code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
module Roguelike.Model.Board
( Board
, GameBoard (..)
) where
import Control.Monad (forM_)
import Control.Monad.ST (ST, runST)
import qualified Data.Matrix as MI
import qualified Data.Matrix.Mutable as MM
import Roguelike.Model.Defs
newtype Board = Board (MI.Matrix Field)
deriving (Show, Eq)
class GameBoard g where
mkBoard :: Coords -> [Field] -> g
populateFields :: Piece -> [Coords] -> g -> g
forRendering :: g -> RenderBoard
boardMin, boardMax :: g -> Coords
isWithinBoard :: g -> Coords -> Bool
moveTop :: Coords -> Coords -> g -> g
type YXPair = (SingleCoord, SingleCoord)
toYXPair :: Coords -> YXPair
toYXPair (Coords x y) = (y, x)
instance GameBoard Board where
mkBoard bounds filling = Board $ MI.matrix (getX bounds) filling
populateFields what coords (Board b) =
let yxPairs = map toYXPair coords
in Board $ runST $ putTop what yxPairs b
forRendering (Board b) = MI.toLists b
boardMin _ = Coords 0 0
boardMax (Board b) = Coords (MI.cols b) (MI.rows b)
isWithinBoard b (Coords x y) = let minX = getX $ boardMin b
minY = getY $ boardMin b
maxX = getX $ boardMax b
maxY = getY $ boardMax b
in x >= minX && x < maxX && y >= minY && y < maxY
moveTop oldPos newPos (Board b) =
let srcYXPair = toYXPair oldPos
destYXPair = toYXPair newPos
b' = runST $ swapTop srcYXPair destYXPair b
in Board b'
putTop :: Piece -> [YXPair] -> MI.Matrix Field -> ST a (MI.Matrix Field)
putTop what dests b = do
mb <- MI.thaw b
forM_ dests $ \yx -> do
stack <- MM.unsafeRead mb yx
MM.unsafeWrite mb yx (what:stack)
MI.unsafeFreeze mb
swapTop :: YXPair -> YXPair -> MI.Matrix Field -> ST a (MI.Matrix Field)
swapTop src dest b = do
mb <- MI.thaw b
(this:rest) <- MM.unsafeRead mb src
MM.unsafeWrite mb src rest
others <- MM.unsafeRead mb dest
MM.unsafeWrite mb dest (this:others)
MI.unsafeFreeze mb
| tr00per/roguelike0 | src/Roguelike/Model/Board.hs | bsd-2-clause | 2,150 | 0 | 14 | 679 | 792 | 402 | 390 | 53 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
import Reduce
import Prelude hiding (replicate)
import Prelude as P
import Obsidian
import Obsidian.Run.CUDA.Exec
-- import Obsidian.Run.CUDA.SC
import qualified Data.Vector.Storable as V
import Control.Monad.State
import Data.Int
import Data.Word
performSmall n threads r =
withCUDA $
do
kern <- capture threads (r (+) . splitUp n)
(inputs :: V.Vector Word32) <- lift $ mkRandomVec (fromIntegral n)
let cpuresult = V.sum inputs
useVector inputs $ \i ->
allocaVector 1 $ \o ->
do
o <== (1,kern) <> i
r <- peekCUDAVector o
lift $ putStrLn $ show r
lift $ putStrLn $ "compare CPU GPU results equal?: " ++ show ((r P.!! 0) == cpuresult)
all512 = [performSmall 512 256 mapRed1,
performSmall 512 256 mapRed2,
performSmall 512 256 mapRed3,
performSmall 512 64 mapRed4,
performSmall 512 64 mapRed5,
performSmall 512 32 mapRed6,
performSmall 512 16 mapRed7]
-- all4096 = [performSmall 4096 mapRed1,
-- performSmall 4096 mapRed2,
-- performSmall 4096 mapRed3,
-- performSmall 4096 mapRed4,
-- performSmall 4096 mapRed5,
-- performSmall 4096 mapRed6,
-- performSmall 4096 mapRed7]
performAll512 = sequence_ all512
--performAll4096 = sequence_ all4096
-- ######################################################################
-- Experiment (works only for specific blks/elts combos
performLarge blcks elts r =
withCUDA $
do
kern1 <- capture 32 (r (+) . splitUp elts)
kern2 <- capture 64 (r (+) . splitUp elts)
kern3 <- capture 96 (r (+) . splitUp elts)
kern4 <- capture 128 (r (+) . splitUp elts)
kern5 <- capture 160 (r (+) . splitUp elts)
kern6 <- capture 192 (r (+) . splitUp elts)
kern7 <- capture 256 (r (+) . splitUp elts)
(inputs :: V.Vector Word32) <- lift $ mkRandomVec (fromIntegral (blcks * elts))
let cpuresult = V.sum inputs
useVector inputs $ \i ->
allocaVector (fromIntegral blcks) $ \(o :: CUDAVector Word32) ->
allocaVector 1 $ \(o2 :: CUDAVector Word32) -> do
body cpuresult kern1 i o o2
body cpuresult kern2 i o o2
body cpuresult kern3 i o o2
body cpuresult kern4 i o o2
body cpuresult kern5 i o o2
body cpuresult kern6 i o o2
body cpuresult kern7 i o o2
where
body cpuresult kern i o o2 =
do
fill o 0
o <== (blcks,kern) <> i
o2 <== (1,kern) <> o
r <- peekCUDAVector o2
lift $ putStrLn $ show r
lift $ putStrLn $ "compare CPU GPU results equal?: " ++ show ((r P.!! 0) == cpuresult)
-- ######################################################################
-- Kernel launch code
-- ######################################################################
launchReduce =
withCUDA $
do
let n = blocks * elts
blocks = 4096
elts = 4096
kern <- capture 32 (mapRed5 (+) . splitUp elts)
(inputs :: V.Vector Word32) <- lift (mkRandomVec (fromIntegral n))
useVector inputs (\i ->
allocaVector (fromIntegral blocks) (\ o ->
allocaVector 1 (\ o2 -> do
do o <== (blocks,kern) <> i
o2 <== (1,kern) <> o
copyOut o2
)
)
)
-- ######################################################################
-- Main
-- ######################################################################
main = performLarge 512 512 mapRed2
| svenssonjoel/ObsidianGFX | Examples/ReductionTutorial/ReduceExec.hs | bsd-3-clause | 3,770 | 0 | 23 | 1,188 | 1,083 | 543 | 540 | 77 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Home.Views where
import Text.Blaze.Html5 as H
import Text.Blaze.Html5.Attributes as Ha
import Common.Views
import Area.Links
import Apc.Links
homePage :: Html
homePage = layout "Home" $ do
p $ a ! href (viewAreasLink "blc") $ "Browse base layer controllers"
p $ a ! href viewApcsLink $ "Browse APCs"
H.div ! Ha.id "motd" $ ""
script "$.get('motd.html', function(data) { $('#motd').html(data); })"
| hectorhon/autotrace2 | app/Home/Views.hs | bsd-3-clause | 450 | 0 | 12 | 74 | 122 | 65 | 57 | 13 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module YaLedger.Parser.CSV where
import Control.Applicative
import Control.Monad
import Data.Yaml
import qualified Data.ByteString.Lazy as L
import YaLedger.Types
import YaLedger.Parser.Common (loadParserConfig, readUrlLBS)
import YaLedger.Parser.Tables
data ParserConfig = ParserConfig {
pcEncoding :: Maybe String,
pcSeparator :: Char,
pcGeneric :: GenericParserConfig
}
deriving (Eq, Show)
instance FromJSON ParserConfig where
parseJSON (Object v) =
ParserConfig
<$> v .:? "encoding"
<*> v .:? "separator" .!= ','
<*> parseGenericConfig [] v
parseJSON x = fail $ "CSV parser config: invalid object: " ++ show x
csvCells :: Char -> String -> [[String]]
csvCells sep str = map parseRow $ lines str
where
parseRow = split sep
parseCSV :: LedgerOptions
-> ParserConfig
-> FilePath
-> Currencies
-> ChartOfAccounts
-> String
-> IO [Ext Record]
parseCSV opts pc path currs coa str =
let rows = csvCells (pcSeparator pc) str
goodRows = filterRows (pcRowsFilter $ pcGeneric pc) rows
in zipWithM (convertRow opts (pcGeneric pc) currs coa path) [1..] goodRows
loadCSV :: LedgerOptions -> FilePath -> Currencies -> ChartOfAccounts -> FilePath -> IO [Ext Record]
loadCSV opts configPath currs coa csvPath = do
config <- loadParserConfig configPath
bstr <- readUrlLBS csvPath
csv <- convertToUtf8 csvPath (pcEncoding config) bstr
parseCSV opts config csvPath currs coa csv
| portnov/yaledger | YaLedger/Parser/CSV.hs | bsd-3-clause | 1,539 | 0 | 13 | 340 | 453 | 234 | 219 | 41 | 1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
module Duckling.Quantity.PT.Tests
( tests ) where
import Prelude
import Data.String
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Quantity.PT.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "PT Tests"
[ makeCorpusTest [This Quantity] corpus
]
| rfranek/duckling | tests/Duckling/Quantity/PT/Tests.hs | bsd-3-clause | 603 | 0 | 9 | 96 | 80 | 51 | 29 | 11 | 1 |
module REPL.Suggest (suggest) where
import Text.Printf
import Data.List (intercalate)
import Data.Maybe (fromJust)
import REPL.NPC (npcLookup)
import Rating
import AlbanKnights (findKeyIndices, pickOne, getKeywordIndex)
suggest :: [String] -> Rating -> Either String Rating
suggest [str] r = case npcLookup str of
"dai" -> Left $ toLine "dai" "ダイ" (dai r)
"kaour" -> Left $ toLine "kaour" "カオル" (kaour r)
"eirlys" -> Left $ toLine "eirlys" "アイリース" (eirlys r)
"elsie" -> Left $ toLine "elsie" "エルシィ" (elsie r)
_ -> Left "そのようなNPCは存在しません"
suggest [] r = Left $ intercalate "\n" $ map wrapToLine table
where table = [("dai", "ダイ", dai r)
,("eirlys", "アイリース", eirlys r)
,("kaour", "カオル", kaour r)
,("elsie", "エルシィ", elsie r)]
wrapToLine (npc, name, info) = toLine npc name info
suggest (str:keywords) _
| Nothing `elem` indices = Left "不正なキーワードが含まれています"
| otherwise = case npcLookup str of
"dai" -> Left $ toLine "dai" "ダイ" (Nothing, False, toStock indices)
"kaour" -> Left $ toLine "kaour" "カオル" (Nothing, False, toStock indices)
"eirlys" -> Left $ toLine "eirlys" "アイリース" (Nothing, False, toStock indices)
"elsie" -> Left $ toLine "elsie" "エルシィ" (Nothing, False, toStock indices)
_ -> Left "そのようなNPCは存在しません"
where indices = map getKeywordIndex keywords
toStock = map fromJust
toLine :: String -> String-> Info -> String
toLine _ name (_,_,[]) = printf "【%s】\tストックが空です" name
toLine npc name (_,_,keys) = case findKeyIndices npc keys of
[] -> printf "【%s】\t該当キーワードがありません" name
xs -> printf "【%s】\t" name ++ intercalate "/" (map suggestion xs)
where suggestion i = let i' = if i >= 99 then i - 99 else i
in printf "%s[%d]" (pickOne npc i) (i'+1)
| sandmark/AlbanKnights | src/REPL/Suggest.hs | bsd-3-clause | 2,094 | 0 | 12 | 495 | 692 | 360 | 332 | 37 | 9 |
module Network.SMTP.Server
( SMTPParameters(..)
, smtpParameters
, ClientInfo(..)
, smtpConnection
, runSMTPServer
) where
import Prelude hiding ((.), id)
import Data.List
import Control.Monad
import Control.Monad.Trans.Class
import Control.Monad.IO.Class
import Data.Conduit
import Control.Wire
import Control.Wire.Unsafe.Event
import qualified Data.Attoparsec.ByteString.Char8 as P
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Lazy.Char8 as BL
import qualified Data.Text as T
import qualified Data.Text.Lazy.Encoding as TL
import qualified Data.Conduit.List as CL
import Network.SMTP.Address (EmailAddress)
import Network.Socket (SockAddr)
import Data.Conduit.Network
import Network.SMTP.Protocol
data ClientInfo = ClientInfo { clientAddr :: !SockAddr
, clientDomain :: !Domain
, clientFrom :: !(Maybe EmailAddress)
, clientTo :: ![RcptAddress]
}
deriving (Show, Eq)
-- | SMTP processing hook. Receives current client information and
-- data to be verified, and returns 'Nothing' if it's verified,
-- or @'Just' reply@ if error happened.
type Validate m a = ClientInfo -> a -> m (Maybe SMTPReply)
-- TODO:
-- Add VRFY hook.
-- Support custom X- commands
-- | SMTP server parameters. Use ReaderT transformer to pass additional context
-- to validation hooks.
--
-- It is expected that @checkData@ hook would not only validate, but also
-- process incoming mail.
data SMTPParameters m = SMTPParameters { smtpDomain :: !Domain
, extraExts :: ![(Extension, Description)]
, checkDomain :: !(Validate m Domain)
, checkSender :: !(Validate m (Maybe EmailAddress))
, checkRcpt :: !(Validate m RcptAddress)
, checkDataReady :: !(Validate m ())
, checkData :: !(Validate m BL.ByteString)
}
-- | Default SMTP parameters.
smtpParameters :: Monad m => SMTPParameters m
smtpParameters = SMTPParameters { smtpDomain = "localhost"
, extraExts = []
, checkDomain = noCheck
, checkSender = noCheck
, checkRcpt = noCheck
, checkDataReady = noCheck
, checkData = noCheck
}
where noCheck _ _ = return Nothing
type SMTPWire m a b = Wire () () m a b
-- | SMTP incoming session.
-- Input: either SMTP error (in case of failed parse) or incoming command
-- Returns: pair of SMTP reply and (partially filled) ClientInfo
smtpIncoming :: Monad m => SMTPParameters m
-> SockAddr
-> SMTPWire m SMTPCommand (SMTPReply, ClientInfo)
smtpIncoming (SMTPParameters {..}) addr = dSwitch $ generic
ClientInfo { clientAddr = addr
, clientDomain = ""
, clientFrom = Nothing
, clientTo = []
}
where supportedExts = [ ("PIPELINING", Nothing) -- By design, TODO: implement pipelining protection
, ("SMTPUTF8", Nothing) -- By design
] ++ extraExts
ok = SMTPReply CComplete
validate :: Monad m => (ClientInfo -> Wire s e m a' b')
-> Validate m a
-> (a -> ClientInfo -> ClientInfo)
-> (a -> SMTPReply)
-> ClientInfo
-> a
-> m ((SMTPReply, ClientInfo), Event (Wire s e m a' b'))
validate next val update msg ci a = val ci a >>= return . \case
Just e -> ((e, ci), NoEvent)
Nothing -> ((msg a, updinfo), Event $ next updinfo)
where updinfo = update a ci
generic info = mkGen_ $ liftM Right . \case
Helo domain -> ehlo [] domain
Ehlo domain -> ehlo supportedExts domain
Noop -> return ((ok "OK", info), NoEvent)
-- TODO: Replace OK with whatever in RFC
Quit -> return ((SMTPReply CServiceClose "OK", info), Event zeroArrow)
_ -> return ((SMTPReply CBadSequence "Bad sequence of commands", info), NoEvent)
where ehlo exts = validate normal checkDomain (\a x -> x { clientDomain = a })
(\d -> ok $ buildEhloReply $ EhloReply smtpDomain (Just ("greets " <> d)) exts)
info
clearMail info = info { clientFrom = Nothing
, clientTo = []
}
newNormal = normal . clearMail
normal info = dSwitch $ ready <|> clear <|> generic info
where clear = mkPure_ $ \case
Rset -> Right ((ok "OK", info), Event $ normal $ clearMail info)
_ -> Left ()
ready = mkGen_ $ \case
MailFrom from -> Right <$> validate transaction checkSender (\a x -> x { clientFrom = a }) (const $ ok "OK") info from
_ -> return $ Left ()
transaction info = dSwitch $ mail <|> clear <|> generic info
where mail = mkGen_ $ \case
RcptTo to -> Right <$>
validate transaction checkRcpt (\a x -> x { clientTo = a : clientTo x }) (const $ ok "OK") info to
Data | clientTo info /= [] -> Right <$>
validate newNormal checkDataReady (const id)
(const $ SMTPReply CStartInput "Start mail input; end with <CRLF>.<CRLF>")
info ()
_ -> return $ Left ()
-- | Parse incoming stream and feed to incoming session automaton.
processSMTP :: forall m. Monad m => SMTPParameters m
-> SockAddr
-> Conduit ByteString m SMTPReply
processSMTP pars addr = do
yield $ SMTPReply CServiceReady $ smtpDomain pars <> " Service ready"
awaitCmd (smtpIncoming pars addr) ""
where awaitInput :: P.Parser a
-> ByteString
-> (ByteString -> a -> Conduit ByteString m SMTPReply)
-> Conduit ByteString m SMTPReply
awaitInput tparser tinput run = wait (P.parse tparser) tinput
where wait parser input
| B.null input = await >>= \case
Nothing -> process parser ""
Just "" -> wait parser ""
Just str -> process parser str
| otherwise = process parser input
process parser str = case parser str of
P.Fail rest ctx (stripPrefix "Failed reading: " -> Just err) -> do
let code = if "argument" `elem` ctx then CArgSyntax else CCmdSyntax
yield $ SMTPReply code $ T.pack err
wait (P.parse tparser) rest
P.Fail _ _ err -> error $ "waitInput: error prefix invalid: " ++ err
P.Partial p -> wait p ""
P.Done rest r -> run rest r
awaitCmd :: SMTPWire m SMTPCommand (SMTPReply, ClientInfo)
-> ByteString
-> Conduit ByteString m SMTPReply
awaitCmd w rest = awaitInput parseCommand rest $ \rest' input -> do
(Right (r@(SMTPReply code _), ci), w') <- lift $ stepWire w () (Right input)
yield r
case code of
CServiceClose -> return ()
CStartInput -> awaitData w' w ci rest'
_ -> awaitCmd w' rest'
awaitData :: SMTPWire m SMTPCommand (SMTPReply, ClientInfo)
-> SMTPWire m SMTPCommand (SMTPReply, ClientInfo)
-> ClientInfo
-> ByteString
-> Conduit ByteString m SMTPReply
awaitData goodw badw ci rest = awaitInput parseData rest $ \rest' input -> do
r <- lift $ checkData pars ci input
case r of
Just reply -> do
yield reply
awaitCmd badw rest'
Nothing -> do
yield $ SMTPReply CComplete "OK"
awaitCmd goodw rest'
-- | SMTP connection processing conduit.
smtpConnection :: Monad m => SMTPParameters m -> SockAddr -> Conduit ByteString m ByteString
smtpConnection pars addr = processSMTP pars addr =$= CL.map (BL.toStrict . TL.encodeUtf8 . buildReply)
-- | Run simple SMTP server with given settings.
runSMTPServer :: MonadIO m => SMTPParameters m -> ServerSettings -> (forall a. m a -> IO a) -> IO ()
runSMTPServer pars server run =
runTCPServer server $ \c ->
run $ appSource c $$ smtpConnection pars (appSockAddr c) =$= appSink c
| abbradar/smtp | src/Network/SMTP/Server.hs | bsd-3-clause | 9,344 | 0 | 22 | 3,706 | 2,302 | 1,209 | 1,093 | -1 | -1 |
module SatO.Karma.Chart (Chart, chart) where
import Data.Bifunctor (second)
import Data.Map (Map, foldWithKey)
import Data.Text (Text, unpack)
import Data.Colour.CIE
import Data.Colour.CIE.Illuminant (d65)
import Graphics.Rendering.Chart.Easy
import SatO.Karma.Types (Graph (..))
newtype Chart = Chart (EC (Layout Double Double) ())
instance ToRenderable Chart where
toRenderable (Chart x) = toRenderable x
chart :: Map Text Graph -> Chart
chart m = Chart $ do
setColors . fmap (opaque . mkXYZTriple . (/16)) $ [0,2..14] ++ [1,3..15]
layout_title .= "SatO Karma"
foldWithKey f (pure ()) m
where
f n (Graph curr prev next) p = do
plot (dline [second (min maxValue) <$> next])
plot (line n' [second (min maxValue) <$> prev'])
p
where
n' = unpack n ++ " " ++ show (round $ curr * 1000 :: Int)
prev' = case filter ((> mint) . fst) prev of
[] -> [(mint, 0), (0, 0)]
t'@((t, _):_)
| t > mint -> (mint, 0) : (t, 0) : t'
| otherwise -> t'
mint = negate 60
maxValue :: Double
maxValue = 10
dline :: [[(x,y)]] -> EC l (PlotLines x y)
dline values = liftEC $ do
color <- currColor
plot_lines_style . line_dashes .= [5,5]
plot_lines_style . line_color .= color
plot_lines_values .= values
-- | Return the curr color from the state
currColor :: EC l (AlphaColour Double)
currColor = liftCState $ do
(c:_) <- use colors
return c
mkXYZTriple :: Double -> Colour Double
mkXYZTriple p = cieLAB d65 l x y
where
l = 60
x = 20 + 70 * cos (2 * pi * p)
y = 20 + 70 * sin (2 * pi * p)
| osakunta/karma | src/SatO/Karma/Chart.hs | bsd-3-clause | 1,650 | 0 | 16 | 454 | 719 | 382 | 337 | -1 | -1 |
import Data.Maybe
import System.IO
import System.Environment
import Text.HTML.TagStream
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as S
import System.Console.ANSI
import qualified Data.Enumerator as E
import qualified Data.Enumerator.Binary as E
import qualified Data.Enumerator.List as EL
import Blaze.ByteString.Builder (Builder)
import Blaze.ByteString.Builder.Enumerator (unsafeBuilderToByteString, allocBuffer)
color :: Color -> ByteString -> ByteString
color c s = S.concat [ S.pack $ setSGRCode [SetColor Foreground Dull c]
, s
, S.pack $ setSGRCode [SetColor Foreground Dull White]
]
hightlightStream :: Monad m => E.Enumeratee Token Builder m b
hightlightStream = EL.map (showToken (color Red))
main :: IO ()
main = do
args <- getArgs
filename <- maybe (fail "pass file path") return (listToMaybe args)
let iter = E.enumFile filename
E.$= tokenStream
E.$= hightlightStream
E.$= unsafeBuilderToByteString (allocBuffer 4096)
E.$$ E.iterHandle stdout
E.run_ iter
| yihuang/tag-stream | Highlight.hs | bsd-3-clause | 1,143 | 0 | 15 | 267 | 327 | 177 | 150 | 28 | 1 |
module Server
(runServer
) where
import qualified Data.Map.Strict as Map
import ConcurrentUtils
import Control.Monad
import Control.Monad.STM
import Control.Concurrent.STM.TVar
import Network
import System.IO
import Models
import Messages (decode, encode)
import Utils
runServer :: ServerId -> CurrentConnection -> ConnectionDB -> MessageQueues -> IO ()
runServer selfId conn db qs = do
sock <- listenOn defaultPort
forever $ do
(handle, host, port) <- accept sock
forkFinally (talk selfId handle conn db qs) (\_ -> hClose handle)
talk :: ServerId -> Handle -> CurrentConnection -> ConnectionDB -> MessageQueues -> IO ()
talk selfId h conn db qs = do
hSetBuffering h LineBuffering
forever $ do
hGetLine h >>= (handleMessage selfId h conn db qs)
handleMessage :: ServerId -> Handle -> CurrentConnection -> ConnectionDB -> MessageQueues -> String -> IO ()
handleMessage selfId h conn db qs msg =
case decode msg of
Just (Handshake id) -> hPutStrLn h (encode $ Handshake selfId)
Just (Msg id msg) -> printOrQueue conn db qs id msg
Nothing -> return ()
printOrQueue :: CurrentConnection -> ConnectionDB -> MessageQueues -> ServerId -> String -> IO ()
printOrQueue conn db qs id msg = do
currId <- currentServerId conn db
case currId of
Just cid -> if cid == id
then putStrLn msg
else enqueueMessage qs id msg
Nothing -> enqueueMessage qs id msg
enqueueMessage :: MessageQueues -> ServerId -> String -> IO ()
enqueueMessage qs id msg = updateMap qs updateWithMsg
where updateWithMsg = Map.insertWith (flip (++)) id [msg]
| shterrett/peer-chat | src/Server.hs | bsd-3-clause | 1,634 | 0 | 13 | 358 | 561 | 283 | 278 | 40 | 3 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE GADTs #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Ordinal.KM.Rules
( rules
) where
import Data.String
import Prelude
import Duckling.Dimensions.Types
import Duckling.Numeral.Types (NumeralData (..))
import Duckling.Ordinal.Helpers
import Duckling.Types
import qualified Duckling.Numeral.Types as TNumeral
ruleOrdinalDigits :: Rule
ruleOrdinalDigits = Rule
{ name = "ordinal (digits)"
, pattern =
[ regex "ទី"
, dimension Numeral
]
, prod = \case
(_:Token Numeral NumeralData{TNumeral.value = x}:_) ->
Just . ordinal $ floor x
_ -> Nothing
}
rules :: [Rule]
rules =
[ ruleOrdinalDigits
]
| facebookincubator/duckling | Duckling/Ordinal/KM/Rules.hs | bsd-3-clause | 903 | 0 | 16 | 176 | 180 | 111 | 69 | 25 | 2 |
{-# OPTIONS -Wno-redundant-constraints #-}
module Language.Expression.Match where
{
import Import;
import Language.Expression.Expression;
matchBind :: (TestEquality wit,Functor f1,Functor f2) =>
MatchExpression wit f1 a ->
ValueExpression wit f2 b ->
ValueExpression wit (Compose f1 f2) (a,b);
matchBind (MkExpression matchWits f1vca) (MkExpression valueWits f2vtb) = case removeAllMatchingMany matchWits valueWits of
{
MkRemoveManyFromList newValueWits insM _remM -> MkExpression newValueWits
(Compose (fmap (\(lx,a) -> fmap (\lb lr -> (a,lb (insM lx lr))) f2vtb) f1vca));
};
matchSimple :: (Functor f) => f r -> MatchExpression wit f r;
matchSimple fr = MkExpression NilListType (fmap (\r -> ((),r)) fr);
matchBoth :: (TestEquality wit,Applicative f) => MatchExpression wit f () -> MatchExpression wit f () -> MatchExpression wit f ();
matchBoth = liftA2 (\_ _ -> ());
matchAll :: (TestEquality wit,Applicative f) => [MatchExpression wit f ()] -> MatchExpression wit f ();
matchAll = sequenceA_;
matchSymbolMap :: (Functor f) =>
MapWitness (->) wit1 wit2 -> MatchExpression wit1 f r -> MatchExpression wit2 f r;
matchSymbolMap mapwit (MkExpression wits fcvr) = case mapList mapwit wits of
{
MkMapList wits' mapvals -> MkExpression wits' (fmap (\(vals,r) -> (mapvals vals,r)) fcvr);
};
}
| AshleyYakeley/expression | src/Language/Expression/Match.hs | bsd-3-clause | 1,413 | 0 | 21 | 307 | 529 | 284 | 245 | 21 | 1 |
module Prac6 where
import Prelude
import FPPrac.Graphics
import FPPrac.Events
import Graphics
import System.FilePath (splitPath, dropExtension)
import CreateGraph
import Debug.Trace
data MyStore = MyStore
{ myGraph :: Graph
}
initPrac6 graph = MyStore {myGraph = graph}
main = doGraph doPrac6 initPrac6 myGraph drawMypracBottomLine
doPrac6 :: MyStore -> Input -> (MyStore,[Output])
-- =======================================
-- = Voeg hier extra doPrac6 clauses toe =
-- =======================================
-- doPrac6 myStore (KeyIn 'r') = (myStore', o)
-- where
-- myStore' = ...
-- o = ...
--
doPrac6 myStore i = (myStore,[])
drawMypracBottomLine :: Graph -> Picture
drawMypracBottomLine graph =
Pictures
[ Translate 0 (-300 + bottomLineHeight / 2) $ Color white $ rectangleSolid 800 bottomLineHeight
, Color black $ Line [(-400,height1),(400,height1)]
, Color black $ Line [(-240,height1),(-240,-300)]
, Translate (-392) height2 $ Color black $ Scale 0.11 0.11 $ Text "myprac:"
, Translate (-332) height2 $ Color red $ Scale 0.11 0.11 $ Text $ (case (name graph) of "" -> "" ; xs -> dropExtension $ last $ splitPath xs)
-- Vervang onderstaande tekst, indien nodig, door extra informatie
, Translate (-235) height2 $ Color black $ Scale 0.11 0.11 $ Text "Press 'q' to return to node-drawing"
]
where
height1 = -300 + bottomLineHeight
height2 = -300 + bottomTextHeight
| christiaanb/fpprac | examples/Prac6.hs | bsd-3-clause | 1,456 | 0 | 13 | 288 | 432 | 235 | 197 | 25 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE ForeignFunctionInterface #-}
{-# LANGUAGE OverloadedStrings #-}
-- | Low level XMLHttpRequest support. IE6 and older are not supported.
module Haste.Ajax (Method (..), URL, ajaxRequest, noParams) where
import Haste.Prim
import Haste.JSType
import Haste.Callback
import Control.Monad.IO.Class
#ifdef __HASTE__
foreign import ccall ajaxReq :: JSString -- method
-> JSString -- url
-> Bool -- async?
-> JSString -- POST data
-> JSFun (Maybe JSString -> IO ())
-> IO ()
#else
ajaxReq :: JSString -> JSString -> Bool -> JSString -> JSFun (Maybe JSString -> IO ()) -> IO ()
ajaxReq = error "Tried to use ajaxReq in native code!"
#endif
data Method = GET | POST deriving Show
-- | Pass to 'ajaxRequest' instead of @[]@ when no parameters are needed, to
-- avoid type ambiguity errors.
noParams :: [((), ())]
noParams = []
-- | Perform an AJAX request.
ajaxRequest :: (MonadIO m, JSType a, JSType b, JSType c)
=> Method -- ^ GET or POST. For GET, pass all params in URL.
-- For POST, pass all params as post data.
-> URL -- ^ URL to make AJAX request to.
-> [(a, b)] -- ^ A list of (key, value) parameters.
-> (Maybe c -> IO ()) -- ^ Callback to invoke on completion.
-> m ()
ajaxRequest m url kv cb = liftIO $ do
_ <- ajaxReq (showm m) url' True pd cb'
return ()
where
showm GET = "GET"
showm POST = "POST"
cb' = mkCallback $ cb . fromJSS
fromJSS (Just jss) = fromJSString jss
fromJSS _ = Nothing
url' = case m of
GET
| null kv -> toJSString url
| otherwise -> catJSStr "?" [toJSString url, toQueryString kv]
POST -> toJSString url
pd = case m of
GET -> ""
POST
| null kv -> ""
| otherwise -> toQueryString kv
toQueryString :: (JSType a, JSType b) =>[(a, b)] -> JSString
toQueryString = catJSStr "&" . map f
where f (k, v) = catJSStr "=" [toJSString k,toJSString v]
| joelburget/haste-compiler | libraries/haste-lib/src/Haste/Ajax.hs | bsd-3-clause | 2,232 | 0 | 15 | 770 | 524 | 279 | 245 | 40 | 5 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Ordinal.TA.Corpus
( corpus
) where
import Data.String
import Prelude
import Duckling.Locale
import Duckling.Ordinal.Types
import Duckling.Resolve
import Duckling.Testing.Types
corpus :: Corpus
corpus = (testContext {locale = makeLocale TA Nothing}, testOptions, allExamples)
allExamples :: [Example]
allExamples = concat
[ examples (OrdinalData 1)
[ "முதல்"
, "1."
]
, examples (OrdinalData 2)
[ "இரண்டாம்"
, "2."
]
, examples (OrdinalData 3)
[ "மூன்றாம்"
, "3."
]
, examples (OrdinalData 4)
[ "நான்காம்"
, "4."
]
, examples (OrdinalData 5)
[ "ஐந்தாம்"
, "5."
]
, examples (OrdinalData 6)
[ "ஆறாம்"
, "6."
]
, examples (OrdinalData 7)
[ "ஏழாம்"
, "7."
]
, examples (OrdinalData 8)
[ "எட்டாம்"
, "8."
]
, examples (OrdinalData 9)
[ "ஒன்பதாம்"
, "9."
]
, examples (OrdinalData 10)
[ "பத்தாம்"
, "10."
]
, examples (OrdinalData 11)
[ "பதினொன்றாம்"
, "11."
]
, examples (OrdinalData 12)
[ "பன்னிரண்டாம்"
, "12."
]
, examples (OrdinalData 20)
[ "இருபதாம்"
, "20."
]
, examples (OrdinalData 21)
[ "இருபத்திஒன்றாம்"
, "21."
]
, examples (OrdinalData 22)
[ "இருபத்திஇரண்டாம்"
, "22."
]
, examples (OrdinalData 26)
[ "இருபத்திஆறாம்"
, "26."
]
, examples (OrdinalData 30)
[ "முப்பதாம்"
, "30."
]
, examples (OrdinalData 33)
[ "முப்பத்துமூன்றாம்"
, "33."
]
, examples (OrdinalData 50)
[ "ஐம்பதாம்"
, "50."
]
, examples (OrdinalData 54)
[ "ஐம்பத்திநான்காம்"
, "54."
]
, examples (OrdinalData 65)
[ "அறுபத்ஐந்தாம்"
, "65."
]
, examples (OrdinalData 76)
[ "எழுபத்திஆறாம்"
, "76."
]
, examples (OrdinalData 87)
[ "எண்பத்திஏழாம்"
, "87."
]
]
| facebookincubator/duckling | Duckling/Ordinal/TA/Corpus.hs | bsd-3-clause | 3,219 | 0 | 9 | 1,257 | 557 | 315 | 242 | 82 | 1 |
module Statistics.Information.Utils.List where
import Data.List
repeatN :: Int -> a -> [a]
repeatN n a = take n (repeat a)
count :: Eq a => a -> [a] -> Int
count x = length . filter (==x)
probs :: (Eq a, Fractional b) => [a] -> [(a, b)]
probs xs = [(x, (fromIntegral $ count x xs) / len) | x <- nub xs]
where
len = fromIntegral $ length xs
hist :: (Eq a, Fractional b) => [a] -> [b]
hist xs = map snd (probs xs)
| eligottlieb/Shannon | src/Statistics/Information/Utils/List.hs | bsd-3-clause | 423 | 0 | 10 | 99 | 235 | 127 | 108 | 11 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Tensor ( tests ) where
import Control.Applicative (liftA2)
import Distribution.TestSuite.QuickCheck
import Prelude hiding (concat)
import Prelude.Unicode
import Test.QuickCheck (Arbitrary (arbitrary))
import Data.Indexable
import Data.MultiIndex
import Data.Sliceable
import Data.Tensor
tests ∷ IO [Test]
tests = return [appendTest, appendTest', sliceTest, reverseTest]
appendTest ∷ Test
appendTest = testGroup "Append and Split"
[ testProperty "2 3" (\x y → split SOne (append SOne (x ∷ Vector ('S 'One) Int) (y ∷ Vector ('S ('S 'One)) Int)) ≡ (x,y))
, testProperty "2 3" (\x y → split SOne (append SOne (x ∷ Matrix ('S 'One) ('S ('S 'One)) Int) (y ∷ Matrix ('S ('S 'One)) ('S ('S 'One)) Int)) ≡ (x,y))
]
appendTest' ∷ Test
appendTest' = testGroup "Append and |:"
[ testProperty "[3] [2,3]" (\x y → append SOne (t1 x) y ≡ (x ∷ Vector ('S ('S 'One)) Int) |: (y ∷ Matrix ('S 'One) ('S ('S 'One)) Int))
]
sliceTest ∷ Test
sliceTest = testGroup "Slice"
[ testProperty "[]" (\x → slice (x ∷ Tensor '[] Int) nilS ≡ x)
, testProperty "[2,3] [Nothing, Nothing]" (\x → slice (x ∷ Matrix ('S 'One) ('S ('S 'One)) Int) (allCons $ allCons nilS) ≡ x)
]
reverseTest ∷ Test
reverseTest = testGroup "Reverse"
[ testProperty "rev Tensor [] (Tensor [] Int)"
(\x → rev (x ∷ Tensor '[] (Tensor '[] Int)) ≡ unT0 x)
, testProperty "rev Tensor [] (Tensor [2] Int)"
(\x → rev (x ∷ Tensor '[] (Vector ('S 'One) Int)) ≡ unT0 x)
, testProperty "rev Tensor [] (Tensor [2,3] Int)"
(\x → rev (x ∷ Tensor '[] (Matrix ('S 'One) ('S ('S 'One)) Int)) ≡ unT0 x)
, testProperty "rev Tensor [2] (Tensor [] Int)"
(\x → rev (x ∷ Vector ('S 'One) (Tensor '[] Int)) ≡ unT0 (concat x))
, testProperty "rev Tensor [2] (Tensor [3] Int)"
(\x → rev (x ∷ Vector ('S 'One) (Vector ('S ('S 'One)) Int)) ≡ unT0 (concat x))
, testProperty "rev Tensor [2] (Tensor [3,4] Int)"
(\x → rev (x ∷ Vector ('S 'One) (Matrix ('S ('S 'One)) ('S ('S ('S 'One))) Int)) ≡ unT0 (concat x))
, testProperty "rev Tensor [2,3] (Tensor [] Int)"
(\x → rev (x ∷ Matrix ('S 'One) ('S ('S 'One)) (Tensor '[] Int)) ≡ unT0 (concat $ concat x))
, testProperty "rev Tensor [2,3] (Tensor [4] Int)"
(\x → rev (x ∷ Matrix ('S 'One) ('S ('S 'One)) (Vector ('S ('S ('S 'One))) Int)) ≡ unT0 (concat $ concat x))
, testProperty "rev Tensor [2,3] (Tensor [4,5] Int)"
(\x → rev (x ∷ Matrix ('S 'One) ('S ('S 'One)) (Matrix ('S ('S ('S 'One))) ('S ('S ('S ('S 'One)))) Int)) ≡ unT0 (concat $ concat x))
, testProperty "unRev Tensor [] (Tensor [] Int)"
(\x → unRev (x ∷ Tensor '[] (Tensor '[] Int)) ≡ fmap unT0 x)
, testProperty "unRev Tensor [] (Tensor [2] Int)"
(\x → unRev (x ∷ Tensor '[] (Vector ('S 'One) Int)) ≡ fmap unT0 (unConcat x))
, testProperty "unRev Tensor [] (Tensor [2,3] Int)"
(\x → unRev (x ∷ Tensor '[] (Matrix ('S 'One) ('S ('S 'One)) Int)) ≡ fmap unT0 (unConcat $ unConcat x))
, testProperty "unRev Tensor [2] (Tensor [] Int)"
(\x → unRev (x ∷ Vector ('S 'One) (Tensor '[] Int)) ≡ fmap unT0 x)
, testProperty "unRev Tensor [2] (Tensor [3] Int)"
(\x → unRev (x ∷ Vector ('S 'One) (Vector ('S ('S 'One)) Int)) ≡ fmap unT0 (unConcat x))
, testProperty "unRev Tensor [2] (Tensor [3,4] Int)"
(\x → unRev (x ∷ Vector ('S 'One) (Matrix ('S ('S 'One)) ('S ('S ('S 'One))) Int)) ≡ fmap unT0 (unConcat $ unConcat x))
, testProperty "unRev Tensor [2,3] (Tensor [] Int)"
(\x → unRev (x ∷ Matrix ('S 'One) ('S ('S 'One)) (Tensor '[] Int)) ≡ fmap unT0 x)
, testProperty "unRev Tensor [2,3] (Tensor [4] Int)"
(\x → unRev (x ∷ Matrix ('S 'One) ('S ('S 'One)) (Vector ('S ('S ('S 'One))) Int)) ≡ fmap unT0 (unConcat x))
, testProperty "unRev Tensor [2,3] (Tensor [4,5] Int)"
(\x → unRev (x ∷ Matrix ('S 'One) ('S ('S 'One)) (Matrix ('S ('S ('S 'One))) ('S ('S ('S ('S 'One)))) Int)) ≡ fmap unT0 (unConcat $ unConcat x))
]
instance Arbitrary e ⇒ Arbitrary (Tensor '[] e) where
arbitrary = t0 <$> arbitrary
instance Arbitrary (Tensor is e) ⇒ Arbitrary (Tensor ('One ': is) e) where
arbitrary = t1 <$> arbitrary
instance ( Arbitrary (Tensor is e)
, Arbitrary (Tensor (i ': is) e)
) ⇒ Arbitrary (Tensor ('S i ': is) e) where
arbitrary = liftA2 (:|) arbitrary arbitrary
| tensor5/tensor | tests/Tensor.hs | bsd-3-clause | 5,274 | 0 | 24 | 1,633 | 2,387 | 1,230 | 1,157 | -1 | -1 |
module Hork.Header where
import Hork.Mem
-- Constants for the locations of things in the header.
hdrVERSION, hdrFLAGS1, hdrHIMEM, hdrPC0, hdrDICTIONARY, hdrOBJTABLE, hdrGLOBALS, hdrSTATIC, hdrFLAGS2, hdrABBREVIATIONS, hdrFILESIZE, hdrCHECKSUM, hdrINTNUMBER, hdrINTVERSION, hdrSTANDARD :: RA
hdrVERSION = 0
hdrFLAGS1 = 1
hdrHIMEM = 4
hdrPC0 = 6
hdrDICTIONARY = 8
hdrOBJTABLE = 0xA
hdrGLOBALS = 0xC
hdrSTATIC = 0xE
hdrFLAGS2 = 0x10
hdrABBREVIATIONS = 0x18
hdrFILESIZE = 0x1A
hdrCHECKSUM = 0x1C
hdrINTNUMBER = 0x1E
hdrINTVERSION = 0x1F
hdrSTANDARD = 0x32
| shepheb/hork | Hork/Header.hs | bsd-3-clause | 556 | 0 | 4 | 81 | 120 | 84 | 36 | 18 | 1 |
import Control.Monad (void)
import Control.Arrow ((&&&))
import Data.IORef ( newIORef
, readIORef
, writeIORef)
import Data.List (groupBy, sort, isInfixOf)
import Data.Char (toLower)
import System.Directory
import qualified Graphics.UI.Threepenny as UI
import Graphics.UI.Threepenny.Core
import Text.Parsec (ParseError)
import HTML
import Parser
import Data
main :: IO () -- startGUI defaultConfig { tpPort = 10000 } setup
main = do startGUI defaultConfig
{ tpPort = 10000
, tpStatic = Just "static"
} $ setup
setup :: Window -> UI ()
setup w = do
return w # set title "<= Skriptenliste ="
UI.addStyleSheet w "stylesheet.css"
let folder = "static/Skriptensammlung"
btnRescanFolder <- UI.button #. "button" #+ [string "Ordner neu laden"]
elInput <- UI.input # set style [("width","300")] # set (attr "type") "text"
inputs <- liftIO $ newIORef []
let drawLayout :: UI ()
drawLayout = void $ do [search] <- getValuesList [elInput]
layout <- mkLayout =<< liftIO (sFilter search `fmap` readIORef inputs)
getBody w # set children [layout]
UI.setFocus elInput
mkLayout :: [String] -> UI Element
mkLayout xs = column [UI.h1 # set text "Skriptenliste", UI.hr
,UI.p #+[ UI.span # set text "Suche: "
, element elInput
, element btnRescanFolder]
,UI.ul #+ makeList xs
,UI.hr
,UI.span # set text "Copyright 2014 by Martin Heuschober"]
rescanFolder :: UI ()
rescanFolder = liftIO $ do fs <- sFilter ".pdf" `fmap` getDirectoryContents folder
writeIORef inputs fs
makeList :: [String] -> [UI Element]
makeList ss = let groupedList = map leftOrAutor $ groupByFst leftRightAutor $ sort $ map (skriptum &&& id) ss
in map (\(hl,lst) -> UI.li #+ [UI.h2 # set html (renderHTML hl), UI.ul #+ map items lst]) groupedList
where url file =folder++"/"++urlEscape file
items (skrpt, fname) = UI.li #+ [UI.anchor # set UI.href (url fname)
#+ [UI.span # set html (renderHTML $ skrpt)]]
on (domEvent "livechange") elInput $ return drawLayout
on UI.click btnRescanFolder $ \_ -> rescanFolder >> drawLayout
rescanFolder
drawLayout
sFilter :: String -> [String] -> [String]
sFilter search = filter (isInfixOf (tidyUp search) . tidyUp)
where tidyUp = ignoreCase . ignoreWhitespace
ignoreCase = map toLower
ignoreWhitespace = filter (`notElem` " _\r\n\t")
groupByFst :: (a -> a -> Bool) -> [(a,b)] -> [[(a,b)]]
groupByFst f = groupBy (\x y -> f (fst x) (fst y))
leftRightAutor :: Either ParseError Skriptum -> Either ParseError Skriptum -> Bool
leftRightAutor (Left _) (Left _) = True
leftRightAutor (Right s1) (Right s2) = autor s1 == autor s2
leftRightAutor _ _ = False
leftOrAutor :: [(Either ParseError Skriptum,String)] -> (String, [(Either ParseError Skriptum,String)])
leftOrAutor [] = error "leftOrAutor: error empty list"
leftOrAutor x = case fst $ head x of Left _ -> (warning, x)
Right y -> (autor y, x)
warning :: String
warning = "Dateien nicht in der Form: Autor_Titel_Typ_Semester_Jahr.pdf"
instance Eq ParseError
where _ == _ = True
instance Ord ParseError where
compare _ _ = EQ
| epsilonhalbe/Skriptenliste | Main.hs | bsd-3-clause | 3,719 | 1 | 20 | 1,219 | 1,206 | 620 | 586 | 74 | 2 |
{-# LANGUAGE TypeFamilies, FlexibleContexts, PackageImports #-}
module Network.XmlPush.Http.Server (
HttpSv,
HttpSvArgs(..), Mechanism(..),
HttpPullSvArgs(HttpPullSvArgs), HttpPushArgs(HttpPushArgs),
) where
import Control.Monad
import "monads-tf" Control.Monad.Error
-- import Control.Monad.Base
import Control.Monad.Trans.Control
import Data.Maybe
import Data.HandleLike
import Data.Pipe
import Data.Pipe.List
import Text.XML.Pipe
import Network.XmlPush
import Network.XmlPush.HttpPull.Server.Body
import Network.XmlPush.HttpPush.Body
import Network.TigHTTP.Server
import Network.Sasl
import Network.PeyoTLS.Server
newtype HttpSv h = HttpSv (Either (HttpPullSv h) (HttpPush h))
data Mechanism = Pull | Push deriving Show
data HttpSvArgs h =
HttpSvArgs (XmlNode -> Mechanism) (HttpPullSvArgs h) (HttpPushArgs h)
instance XmlPusher HttpSv where
type NumOfHandle HttpSv = Two
type PusherArgs HttpSv = HttpSvArgs
generate (Two ch (Just sh)) (HttpSvArgs s pla psa) =
makeHttpSv ch sh s pla psa
generate _ _ = error "bad"
readFrom (HttpSv e) = either readFrom readFrom e
writeTo (HttpSv e) = either writeTo writeTo e
makeHttpSv :: (
ValidateHandle h, MonadBaseControl IO (HandleMonad h),
MonadError (HandleMonad h), SaslError (ErrorType (HandleMonad h))
) => Maybe h -> h -> (XmlNode -> Mechanism) ->
HttpPullSvArgs h -> HttpPushArgs h -> HandleMonad h (HttpSv h)
makeHttpSv ch sh s pla psa = do
rq <- getRequest sh
-- liftBase . print $ requestPath r
Just [rn] <- runPipe $ requestBody rq
=$= xmlEvent
=$= convert fromJust
=$= xmlNode []
=$= toList
-- liftBase . putStrLn $ "here"
HttpSv `liftM` case s rn of
Pull -> do
HttpPullSvTest r w <- generate (One sh) $
HttpPullSvTestArgs pla [rn]
-- HttpPullSvTestArgs pla []
return . Left $ HttpPullSv r w
Push -> do
HttpPushTest ps <- generate (Two ch (Just sh)) $
HttpPushTestArgs psa [rn]
return $ Right ps
| YoshikuniJujo/xml-push | src/Network/XmlPush/Http/Server.hs | bsd-3-clause | 1,913 | 24 | 19 | 321 | 656 | 348 | 308 | 57 | 2 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[HsBinds]{Abstract syntax: top-level bindings and signatures}
Datatype for: @BindGroup@, @Bind@, @Sig@, @Bind@.
-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE UndecidableInstances #-} -- Note [Pass sensitive types]
-- in module PlaceHolder
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE BangPatterns #-}
module HsBinds where
import {-# SOURCE #-} HsExpr ( pprExpr, LHsExpr,
MatchGroup, pprFunBind,
GRHSs, pprPatBind )
import {-# SOURCE #-} HsPat ( LPat )
import PlaceHolder ( PostTc,PostRn,DataId,OutputableBndrId )
import HsTypes
import PprCore ()
import CoreSyn
import TcEvidence
import Type
import Name
import NameSet
import BasicTypes
import Outputable
import SrcLoc
import Var
import Bag
import FastString
import BooleanFormula (LBooleanFormula)
import DynFlags
import Data.Data hiding ( Fixity )
import Data.List hiding ( foldr )
import Data.Ord
import Data.Foldable ( Foldable(..) )
{-
************************************************************************
* *
\subsection{Bindings: @BindGroup@}
* *
************************************************************************
Global bindings (where clauses)
-}
-- During renaming, we need bindings where the left-hand sides
-- have been renamed but the the right-hand sides have not.
-- the ...LR datatypes are parametrized by two id types,
-- one for the left and one for the right.
-- Other than during renaming, these will be the same.
type HsLocalBinds id = HsLocalBindsLR id id
-- | Bindings in a 'let' expression
-- or a 'where' clause
data HsLocalBindsLR idL idR
= HsValBinds (HsValBindsLR idL idR)
-- There should be no pattern synonyms in the HsValBindsLR
-- These are *local* (not top level) bindings
-- The parser accepts them, however, leaving the the
-- renamer to report them
| HsIPBinds (HsIPBinds idR)
| EmptyLocalBinds
deriving instance (DataId idL, DataId idR)
=> Data (HsLocalBindsLR idL idR)
type HsValBinds id = HsValBindsLR id id
-- | Value bindings (not implicit parameters)
-- Used for both top level and nested bindings
-- May contain pattern synonym bindings
data HsValBindsLR idL idR
= -- | Before renaming RHS; idR is always RdrName
-- Not dependency analysed
-- Recursive by default
ValBindsIn
(LHsBindsLR idL idR) [LSig idR]
-- | After renaming RHS; idR can be Name or Id
-- Dependency analysed,
-- later bindings in the list may depend on earlier
-- ones.
| ValBindsOut
[(RecFlag, LHsBinds idL)]
[LSig Name]
deriving instance (DataId idL, DataId idR)
=> Data (HsValBindsLR idL idR)
type LHsBind id = LHsBindLR id id
type LHsBinds id = LHsBindsLR id id
type HsBind id = HsBindLR id id
type LHsBindsLR idL idR = Bag (LHsBindLR idL idR)
type LHsBindLR idL idR = Located (HsBindLR idL idR)
data HsBindLR idL idR
= -- | FunBind is used for both functions @f x = e@
-- and variables @f = \x -> e@
--
-- Reason 1: Special case for type inference: see 'TcBinds.tcMonoBinds'.
--
-- Reason 2: Instance decls can only have FunBinds, which is convenient.
-- If you change this, you'll need to change e.g. rnMethodBinds
--
-- But note that the form @f :: a->a = ...@
-- parses as a pattern binding, just like
-- @(f :: a -> a) = ... @
--
-- 'ApiAnnotation.AnnKeywordId's
--
-- - 'ApiAnnotation.AnnFunId', attached to each element of fun_matches
--
-- - 'ApiAnnotation.AnnEqual','ApiAnnotation.AnnWhere',
-- 'ApiAnnotation.AnnOpen','ApiAnnotation.AnnClose',
-- For details on above see note [Api annotations] in ApiAnnotation
FunBind {
fun_id :: Located idL, -- Note [fun_id in Match] in HsExpr
fun_matches :: MatchGroup idR (LHsExpr idR), -- ^ The payload
fun_co_fn :: HsWrapper, -- ^ Coercion from the type of the MatchGroup to the type of
-- the Id. Example:
--
-- @
-- f :: Int -> forall a. a -> a
-- f x y = y
-- @
--
-- Then the MatchGroup will have type (Int -> a' -> a')
-- (with a free type variable a'). The coercion will take
-- a CoreExpr of this type and convert it to a CoreExpr of
-- type Int -> forall a'. a' -> a'
-- Notice that the coercion captures the free a'.
bind_fvs :: PostRn idL NameSet, -- ^ After the renamer, this contains
-- the locally-bound
-- free variables of this defn.
-- See Note [Bind free vars]
fun_tick :: [Tickish Id] -- ^ Ticks to put on the rhs, if any
}
-- | The pattern is never a simple variable;
-- That case is done by FunBind
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnBang',
-- 'ApiAnnotation.AnnEqual','ApiAnnotation.AnnWhere',
-- 'ApiAnnotation.AnnOpen','ApiAnnotation.AnnClose',
-- For details on above see note [Api annotations] in ApiAnnotation
| PatBind {
pat_lhs :: LPat idL,
pat_rhs :: GRHSs idR (LHsExpr idR),
pat_rhs_ty :: PostTc idR Type, -- ^ Type of the GRHSs
bind_fvs :: PostRn idL NameSet, -- ^ See Note [Bind free vars]
pat_ticks :: ([Tickish Id], [[Tickish Id]])
-- ^ Ticks to put on the rhs, if any, and ticks to put on
-- the bound variables.
}
-- | Dictionary binding and suchlike.
-- All VarBinds are introduced by the type checker
| VarBind {
var_id :: idL,
var_rhs :: LHsExpr idR, -- ^ Located only for consistency
var_inline :: Bool -- ^ True <=> inline this binding regardless
-- (used for implication constraints only)
}
| AbsBinds { -- Binds abstraction; TRANSLATION
abs_tvs :: [TyVar],
abs_ev_vars :: [EvVar], -- ^ Includes equality constraints
-- | AbsBinds only gets used when idL = idR after renaming,
-- but these need to be idL's for the collect... code in HsUtil
-- to have the right type
abs_exports :: [ABExport idL],
-- | Evidence bindings
-- Why a list? See TcInstDcls
-- Note [Typechecking plan for instance declarations]
abs_ev_binds :: [TcEvBinds],
-- | Typechecked user bindings
abs_binds :: LHsBinds idL
}
| AbsBindsSig { -- Simpler form of AbsBinds, used with a type sig
-- in tcPolyCheck. Produces simpler desugaring and
-- is necessary to avoid #11405, comment:3.
abs_tvs :: [TyVar],
abs_ev_vars :: [EvVar],
abs_sig_export :: idL, -- like abe_poly
abs_sig_prags :: TcSpecPrags,
abs_sig_ev_bind :: TcEvBinds, -- no list needed here
abs_sig_bind :: LHsBind idL -- always only one, and it's always a
-- FunBind
}
| PatSynBind (PatSynBind idL idR)
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnPattern',
-- 'ApiAnnotation.AnnLarrow','ApiAnnotation.AnnEqual',
-- 'ApiAnnotation.AnnWhere'
-- 'ApiAnnotation.AnnOpen' @'{'@,'ApiAnnotation.AnnClose' @'}'@
-- For details on above see note [Api annotations] in ApiAnnotation
deriving instance (DataId idL, DataId idR)
=> Data (HsBindLR idL idR)
-- Consider (AbsBinds tvs ds [(ftvs, poly_f, mono_f) binds]
--
-- Creates bindings for (polymorphic, overloaded) poly_f
-- in terms of monomorphic, non-overloaded mono_f
--
-- Invariants:
-- 1. 'binds' binds mono_f
-- 2. ftvs is a subset of tvs
-- 3. ftvs includes all tyvars free in ds
--
-- See Note [AbsBinds]
data ABExport id
= ABE { abe_poly :: id -- ^ Any INLINE pragmas is attached to this Id
, abe_mono :: id
, abe_wrap :: HsWrapper -- ^ See Note [ABExport wrapper]
-- Shape: (forall abs_tvs. abs_ev_vars => abe_mono) ~ abe_poly
, abe_prags :: TcSpecPrags -- ^ SPECIALISE pragmas
} deriving Data
-- | - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnPattern',
-- 'ApiAnnotation.AnnEqual','ApiAnnotation.AnnLarrow'
-- 'ApiAnnotation.AnnWhere','ApiAnnotation.AnnOpen' @'{'@,
-- 'ApiAnnotation.AnnClose' @'}'@,
-- For details on above see note [Api annotations] in ApiAnnotation
data PatSynBind idL idR
= PSB { psb_id :: Located idL, -- ^ Name of the pattern synonym
psb_fvs :: PostRn idR NameSet, -- ^ See Note [Bind free vars]
psb_args :: HsPatSynDetails (Located idR), -- ^ Formal parameter names
psb_def :: LPat idR, -- ^ Right-hand side
psb_dir :: HsPatSynDir idR -- ^ Directionality
}
deriving instance (DataId idL, DataId idR)
=> Data (PatSynBind idL idR)
{-
Note [AbsBinds]
~~~~~~~~~~~~~~~
The AbsBinds constructor is used in the output of the type checker, to record
*typechecked* and *generalised* bindings. Consider a module M, with this
top-level binding, where there is no type signature for M.reverse,
M.reverse [] = []
M.reverse (x:xs) = M.reverse xs ++ [x]
In Hindley-Milner, a recursive binding is typechecked with the *recursive* uses
being *monomorphic*. So after typechecking *and* desugaring we will get something
like this
M.reverse :: forall a. [a] -> [a]
= /\a. letrec
reverse :: [a] -> [a] = \xs -> case xs of
[] -> []
(x:xs) -> reverse xs ++ [x]
in reverse
Notice that 'M.reverse' is polymorphic as expected, but there is a local
definition for plain 'reverse' which is *monomorphic*. The type variable
'a' scopes over the entire letrec.
That's after desugaring. What about after type checking but before
desugaring? That's where AbsBinds comes in. It looks like this:
AbsBinds { abs_tvs = [a]
, abs_exports = [ABE { abe_poly = M.reverse :: forall a. [a] -> [a],
, abe_mono = reverse :: [a] -> [a]}]
, abs_binds = { reverse :: [a] -> [a]
= \xs -> case xs of
[] -> []
(x:xs) -> reverse xs ++ [x] } }
Here,
* abs_tvs says what type variables are abstracted over the binding group,
just 'a' in this case.
* abs_binds is the *monomorphic* bindings of the group
* abs_exports describes how to get the polymorphic Id 'M.reverse' from the
monomorphic one 'reverse'
Notice that the *original* function (the polymorphic one you thought
you were defining) appears in the abe_poly field of the
abs_exports. The bindings in abs_binds are for fresh, local, Ids with
a *monomorphic* Id.
If there is a group of mutually recursive (see Note [Polymorphic
recursion]) functions without type signatures, we get one AbsBinds
with the monomorphic versions of the bindings in abs_binds, and one
element of abe_exports for each variable bound in the mutually
recursive group. This is true even for pattern bindings. Example:
(f,g) = (\x -> x, f)
After type checking we get
AbsBinds { abs_tvs = [a]
, abs_exports = [ ABE { abe_poly = M.f :: forall a. a -> a
, abe_mono = f :: a -> a }
, ABE { abe_poly = M.g :: forall a. a -> a
, abe_mono = g :: a -> a }]
, abs_binds = { (f,g) = (\x -> x, f) }
Note [Polymorphic recursion]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
Rec { f x = ...(g ef)...
; g :: forall a. [a] -> [a]
; g y = ...(f eg)... }
These bindings /are/ mutually recursive (f calls g, and g calls f).
But we can use the type signature for g to break the recursion,
like this:
1. Add g :: forall a. [a] -> [a] to the type environment
2. Typecheck the definition of f, all by itself,
including generalising it to find its most general
type, say f :: forall b. b -> b -> [b]
3. Extend the type environment with that type for f
4. Typecheck the definition of g, all by itself,
checking that it has the type claimed by its signature
Steps 2 and 4 each generate a separate AbsBinds, so we end
up with
Rec { AbsBinds { ...for f ... }
; AbsBinds { ...for g ... } }
This approach allows both f and to call each other
polymorphically, even though only g has a signature.
We get an AbsBinds that encompasses multiple source-program
bindings only when
* Each binding in the group has at least one binder that
lacks a user type signature
* The group forms a strongly connected component
Note [ABExport wrapper]
~~~~~~~~~~~~~~~~~~~~~~~
Consider
(f,g) = (\x.x, \y.y)
This ultimately desugars to something like this:
tup :: forall a b. (a->a, b->b)
tup = /\a b. (\x:a.x, \y:b.y)
f :: forall a. a -> a
f = /\a. case tup a Any of
(fm::a->a,gm:Any->Any) -> fm
...similarly for g...
The abe_wrap field deals with impedance-matching between
(/\a b. case tup a b of { (f,g) -> f })
and the thing we really want, which may have fewer type
variables. The action happens in TcBinds.mkExport.
Note [Bind free vars]
~~~~~~~~~~~~~~~~~~~~~
The bind_fvs field of FunBind and PatBind records the free variables
of the definition. It is used for the following purposes
a) Dependency analysis prior to type checking
(see TcBinds.tc_group)
b) Deciding whether we can do generalisation of the binding
(see TcBinds.decideGeneralisationPlan)
c) Deciding whether the binding can be used in static forms
(see TcExpr.checkClosedInStaticForm for the HsStatic case and
TcBinds.isClosedBndrGroup).
Specifically,
* bind_fvs includes all free vars that are defined in this module
(including top-level things and lexically scoped type variables)
* bind_fvs excludes imported vars; this is just to keep the set smaller
* Before renaming, and after typechecking, the field is unused;
it's just an error thunk
-}
instance (OutputableBndrId idL, OutputableBndrId idR)
=> Outputable (HsLocalBindsLR idL idR) where
ppr (HsValBinds bs) = ppr bs
ppr (HsIPBinds bs) = ppr bs
ppr EmptyLocalBinds = empty
instance (OutputableBndrId idL, OutputableBndrId idR)
=> Outputable (HsValBindsLR idL idR) where
ppr (ValBindsIn binds sigs)
= pprDeclList (pprLHsBindsForUser binds sigs)
ppr (ValBindsOut sccs sigs)
= getPprStyle $ \ sty ->
if debugStyle sty then -- Print with sccs showing
vcat (map ppr sigs) $$ vcat (map ppr_scc sccs)
else
pprDeclList (pprLHsBindsForUser (unionManyBags (map snd sccs)) sigs)
where
ppr_scc (rec_flag, binds) = pp_rec rec_flag <+> pprLHsBinds binds
pp_rec Recursive = text "rec"
pp_rec NonRecursive = text "nonrec"
pprLHsBinds :: (OutputableBndrId idL, OutputableBndrId idR)
=> LHsBindsLR idL idR -> SDoc
pprLHsBinds binds
| isEmptyLHsBinds binds = empty
| otherwise = pprDeclList (map ppr (bagToList binds))
pprLHsBindsForUser :: (OutputableBndrId idL, OutputableBndrId idR,
OutputableBndrId id2)
=> LHsBindsLR idL idR -> [LSig id2] -> [SDoc]
-- pprLHsBindsForUser is different to pprLHsBinds because
-- a) No braces: 'let' and 'where' include a list of HsBindGroups
-- and we don't want several groups of bindings each
-- with braces around
-- b) Sort by location before printing
-- c) Include signatures
pprLHsBindsForUser binds sigs
= map snd (sort_by_loc decls)
where
decls :: [(SrcSpan, SDoc)]
decls = [(loc, ppr sig) | L loc sig <- sigs] ++
[(loc, ppr bind) | L loc bind <- bagToList binds]
sort_by_loc decls = sortBy (comparing fst) decls
pprDeclList :: [SDoc] -> SDoc -- Braces with a space
-- Print a bunch of declarations
-- One could choose { d1; d2; ... }, using 'sep'
-- or d1
-- d2
-- ..
-- using vcat
-- At the moment we chose the latter
-- Also we do the 'pprDeeperList' thing.
pprDeclList ds = pprDeeperList vcat ds
------------
emptyLocalBinds :: HsLocalBindsLR a b
emptyLocalBinds = EmptyLocalBinds
isEmptyLocalBinds :: HsLocalBindsLR a b -> Bool
isEmptyLocalBinds (HsValBinds ds) = isEmptyValBinds ds
isEmptyLocalBinds (HsIPBinds ds) = isEmptyIPBinds ds
isEmptyLocalBinds EmptyLocalBinds = True
isEmptyValBinds :: HsValBindsLR a b -> Bool
isEmptyValBinds (ValBindsIn ds sigs) = isEmptyLHsBinds ds && null sigs
isEmptyValBinds (ValBindsOut ds sigs) = null ds && null sigs
emptyValBindsIn, emptyValBindsOut :: HsValBindsLR a b
emptyValBindsIn = ValBindsIn emptyBag []
emptyValBindsOut = ValBindsOut [] []
emptyLHsBinds :: LHsBindsLR idL idR
emptyLHsBinds = emptyBag
isEmptyLHsBinds :: LHsBindsLR idL idR -> Bool
isEmptyLHsBinds = isEmptyBag
------------
plusHsValBinds :: HsValBinds a -> HsValBinds a -> HsValBinds a
plusHsValBinds (ValBindsIn ds1 sigs1) (ValBindsIn ds2 sigs2)
= ValBindsIn (ds1 `unionBags` ds2) (sigs1 ++ sigs2)
plusHsValBinds (ValBindsOut ds1 sigs1) (ValBindsOut ds2 sigs2)
= ValBindsOut (ds1 ++ ds2) (sigs1 ++ sigs2)
plusHsValBinds _ _
= panic "HsBinds.plusHsValBinds"
{-
What AbsBinds means
~~~~~~~~~~~~~~~~~~~
AbsBinds tvs
[d1,d2]
[(tvs1, f1p, f1m),
(tvs2, f2p, f2m)]
BIND
means
f1p = /\ tvs -> \ [d1,d2] -> letrec DBINDS and BIND
in fm
gp = ...same again, with gm instead of fm
This is a pretty bad translation, because it duplicates all the bindings.
So the desugarer tries to do a better job:
fp = /\ [a,b] -> \ [d1,d2] -> case tp [a,b] [d1,d2] of
(fm,gm) -> fm
..ditto for gp..
tp = /\ [a,b] -> \ [d1,d2] -> letrec DBINDS and BIND
in (fm,gm)
-}
instance (OutputableBndrId idL, OutputableBndrId idR)
=> Outputable (HsBindLR idL idR) where
ppr mbind = ppr_monobind mbind
ppr_monobind :: (OutputableBndrId idL, OutputableBndrId idR)
=> HsBindLR idL idR -> SDoc
ppr_monobind (PatBind { pat_lhs = pat, pat_rhs = grhss })
= pprPatBind pat grhss
ppr_monobind (VarBind { var_id = var, var_rhs = rhs })
= sep [pprBndr CasePatBind var, nest 2 $ equals <+> pprExpr (unLoc rhs)]
ppr_monobind (FunBind { fun_id = fun,
fun_co_fn = wrap,
fun_matches = matches,
fun_tick = ticks })
= pprTicks empty (if null ticks then empty
else text "-- ticks = " <> ppr ticks)
$$ ifPprDebug (pprBndr LetBind (unLoc fun))
$$ pprFunBind matches
$$ ifPprDebug (ppr wrap)
ppr_monobind (PatSynBind psb) = ppr psb
ppr_monobind (AbsBinds { abs_tvs = tyvars, abs_ev_vars = dictvars
, abs_exports = exports, abs_binds = val_binds
, abs_ev_binds = ev_binds })
= sdocWithDynFlags $ \ dflags ->
if gopt Opt_PrintTypecheckerElaboration dflags then
-- Show extra information (bug number: #10662)
hang (text "AbsBinds" <+> brackets (interpp'SP tyvars)
<+> brackets (interpp'SP dictvars))
2 $ braces $ vcat
[ text "Exports:" <+>
brackets (sep (punctuate comma (map ppr exports)))
, text "Exported types:" <+>
vcat [pprBndr LetBind (abe_poly ex) | ex <- exports]
, text "Binds:" <+> pprLHsBinds val_binds
, text "Evidence:" <+> ppr ev_binds ]
else
pprLHsBinds val_binds
ppr_monobind (AbsBindsSig { abs_tvs = tyvars
, abs_ev_vars = dictvars
, abs_sig_ev_bind = ev_bind
, abs_sig_bind = bind })
= sdocWithDynFlags $ \ dflags ->
if gopt Opt_PrintTypecheckerElaboration dflags then
hang (text "AbsBindsSig" <+> brackets (interpp'SP tyvars)
<+> brackets (interpp'SP dictvars))
2 $ braces $ vcat
[ text "Bind:" <+> ppr bind
, text "Evidence:" <+> ppr ev_bind ]
else
ppr bind
instance (OutputableBndr id) => Outputable (ABExport id) where
ppr (ABE { abe_wrap = wrap, abe_poly = gbl, abe_mono = lcl, abe_prags = prags })
= vcat [ ppr gbl <+> text "<=" <+> ppr lcl
, nest 2 (pprTcSpecPrags prags)
, nest 2 (text "wrap:" <+> ppr wrap)]
instance (OutputableBndr idL, OutputableBndrId idR)
=> Outputable (PatSynBind idL idR) where
ppr (PSB{ psb_id = (L _ psyn), psb_args = details, psb_def = pat,
psb_dir = dir })
= ppr_lhs <+> ppr_rhs
where
ppr_lhs = text "pattern" <+> ppr_details
ppr_simple syntax = syntax <+> ppr pat
ppr_details = case details of
InfixPatSyn v1 v2 -> hsep [ppr v1, pprInfixOcc psyn, ppr v2]
PrefixPatSyn vs -> hsep (pprPrefixOcc psyn : map ppr vs)
RecordPatSyn vs ->
pprPrefixOcc psyn
<> braces (sep (punctuate comma (map ppr vs)))
ppr_rhs = case dir of
Unidirectional -> ppr_simple (text "<-")
ImplicitBidirectional -> ppr_simple equals
ExplicitBidirectional mg -> ppr_simple (text "<-") <+> ptext (sLit "where") $$
(nest 2 $ pprFunBind mg)
pprTicks :: SDoc -> SDoc -> SDoc
-- Print stuff about ticks only when -dppr-debug is on, to avoid
-- them appearing in error messages (from the desugarer); see Trac # 3263
-- Also print ticks in dumpStyle, so that -ddump-hpc actually does
-- something useful.
pprTicks pp_no_debug pp_when_debug
= getPprStyle (\ sty -> if debugStyle sty || dumpStyle sty
then pp_when_debug
else pp_no_debug)
{-
************************************************************************
* *
Implicit parameter bindings
* *
************************************************************************
-}
data HsIPBinds id
= IPBinds
[LIPBind id]
TcEvBinds -- Only in typechecker output; binds
-- uses of the implicit parameters
deriving instance (DataId id) => Data (HsIPBinds id)
isEmptyIPBinds :: HsIPBinds id -> Bool
isEmptyIPBinds (IPBinds is ds) = null is && isEmptyTcEvBinds ds
type LIPBind id = Located (IPBind id)
-- ^ May have 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnSemi' when in a
-- list
-- For details on above see note [Api annotations] in ApiAnnotation
-- | Implicit parameter bindings.
--
-- These bindings start off as (Left "x") in the parser and stay
-- that way until after type-checking when they are replaced with
-- (Right d), where "d" is the name of the dictionary holding the
-- evidence for the implicit parameter.
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnEqual'
-- For details on above see note [Api annotations] in ApiAnnotation
data IPBind id
= IPBind (Either (Located HsIPName) id) (LHsExpr id)
deriving instance (DataId name) => Data (IPBind name)
instance (OutputableBndrId id) => Outputable (HsIPBinds id) where
ppr (IPBinds bs ds) = pprDeeperList vcat (map ppr bs)
$$ ifPprDebug (ppr ds)
instance (OutputableBndrId id) => Outputable (IPBind id) where
ppr (IPBind lr rhs) = name <+> equals <+> pprExpr (unLoc rhs)
where name = case lr of
Left (L _ ip) -> pprBndr LetBind ip
Right id -> pprBndr LetBind id
{-
************************************************************************
* *
\subsection{@Sig@: type signatures and value-modifying user pragmas}
* *
************************************************************************
It is convenient to lump ``value-modifying'' user-pragmas (e.g.,
``specialise this function to these four types...'') in with type
signatures. Then all the machinery to move them into place, etc.,
serves for both.
-}
type LSig name = Located (Sig name)
-- | Signatures and pragmas
data Sig name
= -- | An ordinary type signature
--
-- > f :: Num a => a -> a
--
-- After renaming, this list of Names contains the named and unnamed
-- wildcards brought into scope by this signature. For a signature
-- @_ -> _a -> Bool@, the renamer will give the unnamed wildcard @_@
-- a freshly generated name, e.g. @_w@. @_w@ and the named wildcard @_a@
-- are then both replaced with fresh meta vars in the type. Their names
-- are stored in the type signature that brought them into scope, in
-- this third field to be more specific.
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnDcolon',
-- 'ApiAnnotation.AnnComma'
-- For details on above see note [Api annotations] in ApiAnnotation
TypeSig
[Located name] -- LHS of the signature; e.g. f,g,h :: blah
(LHsSigWcType name) -- RHS of the signature; can have wildcards
-- | A pattern synonym type signature
--
-- > pattern Single :: () => (Show a) => a -> [a]
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnPattern',
-- 'ApiAnnotation.AnnDcolon','ApiAnnotation.AnnForall'
-- 'ApiAnnotation.AnnDot','ApiAnnotation.AnnDarrow'
-- For details on above see note [Api annotations] in ApiAnnotation
| PatSynSig [Located name] (LHsSigType name)
-- P :: forall a b. Req => Prov => ty
-- | A signature for a class method
-- False: ordinary class-method signature
-- True: default class method signature
-- e.g. class C a where
-- op :: a -> a -- Ordinary
-- default op :: Eq a => a -> a -- Generic default
-- No wildcards allowed here
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnDefault',
-- 'ApiAnnotation.AnnDcolon'
| ClassOpSig Bool [Located name] (LHsSigType name)
-- | A type signature in generated code, notably the code
-- generated for record selectors. We simply record
-- the desired Id itself, replete with its name, type
-- and IdDetails. Otherwise it's just like a type
-- signature: there should be an accompanying binding
| IdSig Id
-- | An ordinary fixity declaration
--
-- > infixl 8 ***
--
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnInfix',
-- 'ApiAnnotation.AnnVal'
-- For details on above see note [Api annotations] in ApiAnnotation
| FixSig (FixitySig name)
-- | An inline pragma
--
-- > {#- INLINE f #-}
--
-- - 'ApiAnnotation.AnnKeywordId' :
-- 'ApiAnnotation.AnnOpen' @'{-\# INLINE'@ and @'['@,
-- 'ApiAnnotation.AnnClose','ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnVal','ApiAnnotation.AnnTilde',
-- 'ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
| InlineSig (Located name) -- Function name
InlinePragma -- Never defaultInlinePragma
-- | A specialisation pragma
--
-- > {-# SPECIALISE f :: Int -> Int #-}
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnOpen' @'{-\# SPECIALISE'@ and @'['@,
-- 'ApiAnnotation.AnnTilde',
-- 'ApiAnnotation.AnnVal',
-- 'ApiAnnotation.AnnClose' @']'@ and @'\#-}'@,
-- 'ApiAnnotation.AnnDcolon'
-- For details on above see note [Api annotations] in ApiAnnotation
| SpecSig (Located name) -- Specialise a function or datatype ...
[LHsSigType name] -- ... to these types
InlinePragma -- The pragma on SPECIALISE_INLINE form.
-- If it's just defaultInlinePragma, then we said
-- SPECIALISE, not SPECIALISE_INLINE
-- | A specialisation pragma for instance declarations only
--
-- > {-# SPECIALISE instance Eq [Int] #-}
--
-- (Class tys); should be a specialisation of the
-- current instance declaration
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnInstance','ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
| SpecInstSig SourceText (LHsSigType name)
-- Note [Pragma source text] in BasicTypes
-- | A minimal complete definition pragma
--
-- > {-# MINIMAL a | (b, c | (d | e)) #-}
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnVbar','ApiAnnotation.AnnComma',
-- 'ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
| MinimalSig SourceText (LBooleanFormula (Located name))
-- Note [Pragma source text] in BasicTypes
deriving instance (DataId name) => Data (Sig name)
type LFixitySig name = Located (FixitySig name)
data FixitySig name = FixitySig [Located name] Fixity
deriving Data
-- | TsSpecPrags conveys pragmas from the type checker to the desugarer
data TcSpecPrags
= IsDefaultMethod -- ^ Super-specialised: a default method should
-- be macro-expanded at every call site
| SpecPrags [LTcSpecPrag]
deriving Data
type LTcSpecPrag = Located TcSpecPrag
data TcSpecPrag
= SpecPrag
Id
HsWrapper
InlinePragma
-- ^ The Id to be specialised, an wrapper that specialises the
-- polymorphic function, and inlining spec for the specialised function
deriving Data
noSpecPrags :: TcSpecPrags
noSpecPrags = SpecPrags []
hasSpecPrags :: TcSpecPrags -> Bool
hasSpecPrags (SpecPrags ps) = not (null ps)
hasSpecPrags IsDefaultMethod = False
isDefaultMethod :: TcSpecPrags -> Bool
isDefaultMethod IsDefaultMethod = True
isDefaultMethod (SpecPrags {}) = False
isFixityLSig :: LSig name -> Bool
isFixityLSig (L _ (FixSig {})) = True
isFixityLSig _ = False
isTypeLSig :: LSig name -> Bool -- Type signatures
isTypeLSig (L _(TypeSig {})) = True
isTypeLSig (L _(ClassOpSig {})) = True
isTypeLSig (L _(IdSig {})) = True
isTypeLSig _ = False
isSpecLSig :: LSig name -> Bool
isSpecLSig (L _(SpecSig {})) = True
isSpecLSig _ = False
isSpecInstLSig :: LSig name -> Bool
isSpecInstLSig (L _ (SpecInstSig {})) = True
isSpecInstLSig _ = False
isPragLSig :: LSig name -> Bool
-- Identifies pragmas
isPragLSig (L _ (SpecSig {})) = True
isPragLSig (L _ (InlineSig {})) = True
isPragLSig _ = False
isInlineLSig :: LSig name -> Bool
-- Identifies inline pragmas
isInlineLSig (L _ (InlineSig {})) = True
isInlineLSig _ = False
isMinimalLSig :: LSig name -> Bool
isMinimalLSig (L _ (MinimalSig {})) = True
isMinimalLSig _ = False
hsSigDoc :: Sig name -> SDoc
hsSigDoc (TypeSig {}) = text "type signature"
hsSigDoc (PatSynSig {}) = text "pattern synonym signature"
hsSigDoc (ClassOpSig is_deflt _ _)
| is_deflt = text "default type signature"
| otherwise = text "class method signature"
hsSigDoc (IdSig {}) = text "id signature"
hsSigDoc (SpecSig {}) = text "SPECIALISE pragma"
hsSigDoc (InlineSig _ prag) = ppr (inlinePragmaSpec prag) <+> text "pragma"
hsSigDoc (SpecInstSig {}) = text "SPECIALISE instance pragma"
hsSigDoc (FixSig {}) = text "fixity declaration"
hsSigDoc (MinimalSig {}) = text "MINIMAL pragma"
{-
Check if signatures overlap; this is used when checking for duplicate
signatures. Since some of the signatures contain a list of names, testing for
equality is not enough -- we have to check if they overlap.
-}
instance (OutputableBndrId name) => Outputable (Sig name) where
ppr sig = ppr_sig sig
ppr_sig :: (OutputableBndrId name) => Sig name -> SDoc
ppr_sig (TypeSig vars ty) = pprVarSig (map unLoc vars) (ppr ty)
ppr_sig (ClassOpSig is_deflt vars ty)
| is_deflt = text "default" <+> pprVarSig (map unLoc vars) (ppr ty)
| otherwise = pprVarSig (map unLoc vars) (ppr ty)
ppr_sig (IdSig id) = pprVarSig [id] (ppr (varType id))
ppr_sig (FixSig fix_sig) = ppr fix_sig
ppr_sig (SpecSig var ty inl)
= pragBrackets (pprSpec (unLoc var) (interpp'SP ty) inl)
ppr_sig (InlineSig var inl) = pragBrackets (ppr inl <+> pprPrefixOcc (unLoc var))
ppr_sig (SpecInstSig _ ty)
= pragBrackets (text "SPECIALIZE instance" <+> ppr ty)
ppr_sig (MinimalSig _ bf) = pragBrackets (pprMinimalSig bf)
ppr_sig (PatSynSig names sig_ty)
= text "pattern" <+> pprVarSig (map unLoc names) (ppr sig_ty)
instance OutputableBndr name => Outputable (FixitySig name) where
ppr (FixitySig names fixity) = sep [ppr fixity, pprops]
where
pprops = hsep $ punctuate comma (map (pprInfixOcc . unLoc) names)
pragBrackets :: SDoc -> SDoc
pragBrackets doc = text "{-#" <+> doc <+> ptext (sLit "#-}")
pprVarSig :: (OutputableBndr id) => [id] -> SDoc -> SDoc
pprVarSig vars pp_ty = sep [pprvars <+> dcolon, nest 2 pp_ty]
where
pprvars = hsep $ punctuate comma (map pprPrefixOcc vars)
pprSpec :: (OutputableBndr id) => id -> SDoc -> InlinePragma -> SDoc
pprSpec var pp_ty inl = text "SPECIALIZE" <+> pp_inl <+> pprVarSig [var] pp_ty
where
pp_inl | isDefaultInlinePragma inl = empty
| otherwise = ppr inl
pprTcSpecPrags :: TcSpecPrags -> SDoc
pprTcSpecPrags IsDefaultMethod = text "<default method>"
pprTcSpecPrags (SpecPrags ps) = vcat (map (ppr . unLoc) ps)
instance Outputable TcSpecPrag where
ppr (SpecPrag var _ inl) = pprSpec var (text "<type>") inl
pprMinimalSig :: OutputableBndr name => LBooleanFormula (Located name) -> SDoc
pprMinimalSig (L _ bf) = text "MINIMAL" <+> ppr (fmap unLoc bf)
{-
************************************************************************
* *
\subsection[PatSynBind]{A pattern synonym definition}
* *
************************************************************************
-}
data HsPatSynDetails a
= InfixPatSyn a a
| PrefixPatSyn [a]
| RecordPatSyn [RecordPatSynField a]
deriving Data
-- See Note [Record PatSyn Fields]
data RecordPatSynField a
= RecordPatSynField {
recordPatSynSelectorId :: a -- Selector name visible in rest of the file
, recordPatSynPatVar :: a
-- Filled in by renamer, the name used internally
-- by the pattern
} deriving Data
{-
Note [Record PatSyn Fields]
Consider the following two pattern synonyms.
pattern P x y = ([x,True], [y,'v'])
pattern Q{ x, y } =([x,True], [y,'v'])
In P, we just have two local binders, x and y.
In Q, we have local binders but also top-level record selectors
x :: ([Bool], [Char]) -> Bool and similarly for y.
It would make sense to support record-like syntax
pattern Q{ x=x1, y=y1 } = ([x1,True], [y1,'v'])
when we have a different name for the local and top-level binder
the distinction between the two names clear
-}
instance Functor RecordPatSynField where
fmap f (RecordPatSynField { recordPatSynSelectorId = visible
, recordPatSynPatVar = hidden })
= RecordPatSynField { recordPatSynSelectorId = f visible
, recordPatSynPatVar = f hidden }
instance Outputable a => Outputable (RecordPatSynField a) where
ppr (RecordPatSynField { recordPatSynSelectorId = v }) = ppr v
instance Foldable RecordPatSynField where
foldMap f (RecordPatSynField { recordPatSynSelectorId = visible
, recordPatSynPatVar = hidden })
= f visible `mappend` f hidden
instance Traversable RecordPatSynField where
traverse f (RecordPatSynField { recordPatSynSelectorId =visible
, recordPatSynPatVar = hidden })
= (\ sel_id pat_var -> RecordPatSynField { recordPatSynSelectorId = sel_id
, recordPatSynPatVar = pat_var })
<$> f visible <*> f hidden
instance Functor HsPatSynDetails where
fmap f (InfixPatSyn left right) = InfixPatSyn (f left) (f right)
fmap f (PrefixPatSyn args) = PrefixPatSyn (fmap f args)
fmap f (RecordPatSyn args) = RecordPatSyn (map (fmap f) args)
instance Foldable HsPatSynDetails where
foldMap f (InfixPatSyn left right) = f left `mappend` f right
foldMap f (PrefixPatSyn args) = foldMap f args
foldMap f (RecordPatSyn args) = foldMap (foldMap f) args
foldl1 f (InfixPatSyn left right) = left `f` right
foldl1 f (PrefixPatSyn args) = Data.List.foldl1 f args
foldl1 f (RecordPatSyn args) =
Data.List.foldl1 f (map (Data.Foldable.foldl1 f) args)
foldr1 f (InfixPatSyn left right) = left `f` right
foldr1 f (PrefixPatSyn args) = Data.List.foldr1 f args
foldr1 f (RecordPatSyn args) =
Data.List.foldr1 f (map (Data.Foldable.foldr1 f) args)
length (InfixPatSyn _ _) = 2
length (PrefixPatSyn args) = Data.List.length args
length (RecordPatSyn args) = Data.List.length args
null (InfixPatSyn _ _) = False
null (PrefixPatSyn args) = Data.List.null args
null (RecordPatSyn args) = Data.List.null args
toList (InfixPatSyn left right) = [left, right]
toList (PrefixPatSyn args) = args
toList (RecordPatSyn args) = foldMap toList args
instance Traversable HsPatSynDetails where
traverse f (InfixPatSyn left right) = InfixPatSyn <$> f left <*> f right
traverse f (PrefixPatSyn args) = PrefixPatSyn <$> traverse f args
traverse f (RecordPatSyn args) = RecordPatSyn <$> traverse (traverse f) args
data HsPatSynDir id
= Unidirectional
| ImplicitBidirectional
| ExplicitBidirectional (MatchGroup id (LHsExpr id))
deriving instance (DataId id) => Data (HsPatSynDir id)
| vTurbine/ghc | compiler/hsSyn/HsBinds.hs | bsd-3-clause | 39,532 | 0 | 18 | 11,540 | 6,144 | 3,322 | 2,822 | 420 | 4 |
{-# LANGUAGE PackageImports, FlexibleContexts #-}
import "monads-tf" Control.Monad.State
import Control.Monad.Trans.Control
import qualified GHC.Event as E
registerTimeout :: MonadBaseControl IO m =>
E.EventManager -> Int -> m () -> m E.TimeoutKey
registerTimeout ev i cb = control $ \runInIO ->
E.registerTimeout ev i (runInIO cb >> return ())
| YoshikuniJujo/xmpipe | test/testControl.hs | bsd-3-clause | 349 | 4 | 11 | 51 | 111 | 59 | 52 | 8 | 1 |
-- |
module X12.ParserSpec where
import SpecHelper
main :: IO ()
main = hspec spec
spec :: Spec
spec =
describe "X12.Parser" $ do
context "Parse an InterchangeVal" $ do
it "parses a [SegmentToken] into an InterchangeVal" $ do
pendingWith "need to update Parser to work with SegmentToken i/o SegmentTok"
| alexkyllo/xtwelve | test/X12/ParserSpec.hs | bsd-3-clause | 327 | 0 | 14 | 74 | 71 | 35 | 36 | 10 | 1 |
module Main where
import Control.Applicative
import Network.API.Codeship
import System.Environment
main :: IO ()
main = do
result <- projects =<< CodeshipKey <$> getEnv "CODESHIP_API_KEY"
print result
| filib/codeship-hs | examples/Projects.hs | mit | 207 | 0 | 9 | 32 | 59 | 31 | 28 | 8 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.EC2.Waiters
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.AWS.EC2.Waiters where
import Network.AWS.EC2.DescribeBundleTasks
import Network.AWS.EC2.DescribeConversionTasks
import Network.AWS.EC2.DescribeConversionTasks
import Network.AWS.EC2.DescribeConversionTasks
import Network.AWS.EC2.DescribeCustomerGateways
import Network.AWS.EC2.DescribeExportTasks
import Network.AWS.EC2.DescribeExportTasks
import Network.AWS.EC2.DescribeImages
import Network.AWS.EC2.DescribeInstances
import Network.AWS.EC2.DescribeInstances
import Network.AWS.EC2.DescribeInstances
import Network.AWS.EC2.DescribeInstances
import Network.AWS.EC2.DescribeInstanceStatus
import Network.AWS.EC2.DescribeInstanceStatus
import Network.AWS.EC2.DescribeSnapshots
import Network.AWS.EC2.DescribeSpotInstanceRequests
import Network.AWS.EC2.DescribeSubnets
import Network.AWS.EC2.DescribeVolumes
import Network.AWS.EC2.DescribeVolumes
import Network.AWS.EC2.DescribeVolumes
import Network.AWS.EC2.DescribeVPCs
import Network.AWS.EC2.DescribeVPNConnections
import Network.AWS.EC2.DescribeVPNConnections
import Network.AWS.EC2.GetPasswordData
import Network.AWS.EC2.Types
import Network.AWS.Prelude
import Network.AWS.Waiter
-- | Polls 'Network.AWS.EC2.DescribeInstances' every 15 seconds until a
-- successful state is reached. An error is returned after 40 failed checks.
instanceTerminated :: Wait DescribeInstances
instanceTerminated =
Wait
{ _waitName = "InstanceTerminated"
, _waitAttempts = 40
, _waitDelay = 15
, _waitAcceptors = [ matchAll
"terminated"
AcceptSuccess
(folding (concatOf dirsReservations) .
folding (concatOf rInstances) .
insState . isName . to toTextCI)
, matchAny
"pending"
AcceptFailure
(folding (concatOf dirsReservations) .
folding (concatOf rInstances) .
insState . isName . to toTextCI)
, matchAny
"stopping"
AcceptFailure
(folding (concatOf dirsReservations) .
folding (concatOf rInstances) .
insState . isName . to toTextCI)]
}
-- | Polls 'Network.AWS.EC2.DescribeVolumes' every 15 seconds until a
-- successful state is reached. An error is returned after 40 failed checks.
volumeInUse :: Wait DescribeVolumes
volumeInUse =
Wait
{ _waitName = "VolumeInUse"
, _waitAttempts = 40
, _waitDelay = 15
, _waitAcceptors = [ matchAll
"in-use"
AcceptSuccess
(folding (concatOf dvvrsVolumes) .
vState . to toTextCI)
, matchAny
"deleted"
AcceptFailure
(folding (concatOf dvvrsVolumes) .
vState . to toTextCI)]
}
-- | Polls 'Network.AWS.EC2.DescribeSubnets' every 15 seconds until a
-- successful state is reached. An error is returned after 40 failed checks.
subnetAvailable :: Wait DescribeSubnets
subnetAvailable =
Wait
{ _waitName = "SubnetAvailable"
, _waitAttempts = 40
, _waitDelay = 15
, _waitAcceptors = [ matchAll
"available"
AcceptSuccess
(folding (concatOf dsrsSubnets) .
subState . to toTextCI)]
}
-- | Polls 'Network.AWS.EC2.DescribeInstanceStatus' every 15 seconds until a
-- successful state is reached. An error is returned after 40 failed checks.
systemStatusOK :: Wait DescribeInstanceStatus
systemStatusOK =
Wait
{ _waitName = "SystemStatusOk"
, _waitAttempts = 40
, _waitDelay = 15
, _waitAcceptors = [ matchAll
"ok"
AcceptSuccess
(folding (concatOf disrsInstanceStatuses) .
isSystemStatus . _Just . issStatus . to toTextCI)]
}
-- | Polls 'Network.AWS.EC2.DescribeCustomerGateways' every 15 seconds until a
-- successful state is reached. An error is returned after 40 failed checks.
customerGatewayAvailable :: Wait DescribeCustomerGateways
customerGatewayAvailable =
Wait
{ _waitName = "CustomerGatewayAvailable"
, _waitAttempts = 40
, _waitDelay = 15
, _waitAcceptors = [ matchAll
"available"
AcceptSuccess
(folding (concatOf dcgrsCustomerGateways) .
cgState . to toTextCI)
, matchAny
"deleted"
AcceptFailure
(folding (concatOf dcgrsCustomerGateways) .
cgState . to toTextCI)
, matchAny
"deleting"
AcceptFailure
(folding (concatOf dcgrsCustomerGateways) .
cgState . to toTextCI)]
}
-- | Polls 'Network.AWS.EC2.DescribeConversionTasks' every 15 seconds until a
-- successful state is reached. An error is returned after 40 failed checks.
conversionTaskCompleted :: Wait DescribeConversionTasks
conversionTaskCompleted =
Wait
{ _waitName = "ConversionTaskCompleted"
, _waitAttempts = 40
, _waitDelay = 15
, _waitAcceptors = [ matchAll
"completed"
AcceptSuccess
(folding (concatOf dctrsConversionTasks) .
ctState . to toTextCI)
, matchAny
"cancelled"
AcceptFailure
(folding (concatOf dctrsConversionTasks) .
ctState . to toTextCI)
, matchAny
"cancelling"
AcceptFailure
(folding (concatOf dctrsConversionTasks) .
ctState . to toTextCI)]
}
-- | Polls 'Network.AWS.EC2.DescribeInstances' every 15 seconds until a
-- successful state is reached. An error is returned after 40 failed checks.
instanceStopped :: Wait DescribeInstances
instanceStopped =
Wait
{ _waitName = "InstanceStopped"
, _waitAttempts = 40
, _waitDelay = 15
, _waitAcceptors = [ matchAll
"stopped"
AcceptSuccess
(folding (concatOf dirsReservations) .
folding (concatOf rInstances) .
insState . isName . to toTextCI)
, matchAny
"pending"
AcceptFailure
(folding (concatOf dirsReservations) .
folding (concatOf rInstances) .
insState . isName . to toTextCI)
, matchAny
"terminated"
AcceptFailure
(folding (concatOf dirsReservations) .
folding (concatOf rInstances) .
insState . isName . to toTextCI)]
}
-- | Polls 'Network.AWS.EC2.DescribeConversionTasks' every 15 seconds until a
-- successful state is reached. An error is returned after 40 failed checks.
conversionTaskDeleted :: Wait DescribeConversionTasks
conversionTaskDeleted =
Wait
{ _waitName = "ConversionTaskDeleted"
, _waitAttempts = 40
, _waitDelay = 15
, _waitAcceptors = [ matchAll
"deleted"
AcceptSuccess
(folding (concatOf dctrsConversionTasks) .
ctState . to toTextCI)]
}
-- | Polls 'Network.AWS.EC2.GetPasswordData' every 15 seconds until a
-- successful state is reached. An error is returned after 40 failed checks.
passwordDataAvailable :: Wait GetPasswordData
passwordDataAvailable =
Wait
{ _waitName = "PasswordDataAvailable"
, _waitAttempts = 40
, _waitDelay = 15
, _waitAcceptors = [ matchAll
True
AcceptSuccess
(nonEmpty gpdrsPasswordData)]
}
-- | Polls 'Network.AWS.EC2.DescribeInstances' every 15 seconds until a
-- successful state is reached. An error is returned after 40 failed checks.
instanceRunning :: Wait DescribeInstances
instanceRunning =
Wait
{ _waitName = "InstanceRunning"
, _waitAttempts = 40
, _waitDelay = 15
, _waitAcceptors = [ matchAll
"running"
AcceptSuccess
(folding (concatOf dirsReservations) .
folding (concatOf rInstances) .
insState . isName . to toTextCI)
, matchAny
"shutting-down"
AcceptFailure
(folding (concatOf dirsReservations) .
folding (concatOf rInstances) .
insState . isName . to toTextCI)
, matchAny
"terminated"
AcceptFailure
(folding (concatOf dirsReservations) .
folding (concatOf rInstances) .
insState . isName . to toTextCI)
, matchAny
"stopping"
AcceptFailure
(folding (concatOf dirsReservations) .
folding (concatOf rInstances) .
insState . isName . to toTextCI)]
}
-- | Polls 'Network.AWS.EC2.DescribeSpotInstanceRequests' every 15 seconds until a
-- successful state is reached. An error is returned after 40 failed checks.
spotInstanceRequestFulfilled :: Wait DescribeSpotInstanceRequests
spotInstanceRequestFulfilled =
Wait
{ _waitName = "SpotInstanceRequestFulfilled"
, _waitAttempts = 40
, _waitDelay = 15
, _waitAcceptors = [ matchAll
"fulfilled"
AcceptSuccess
(folding (concatOf dsirrsSpotInstanceRequests) .
sirStatus . _Just . sisCode . _Just . to toTextCI)
, matchAny
"schedule-expired"
AcceptFailure
(folding (concatOf dsirrsSpotInstanceRequests) .
sirStatus . _Just . sisCode . _Just . to toTextCI)
, matchAny
"canceled-before-fulfillment"
AcceptFailure
(folding (concatOf dsirrsSpotInstanceRequests) .
sirStatus . _Just . sisCode . _Just . to toTextCI)
, matchAny
"bad-parameters"
AcceptFailure
(folding (concatOf dsirrsSpotInstanceRequests) .
sirStatus . _Just . sisCode . _Just . to toTextCI)
, matchAny
"system-error"
AcceptFailure
(folding (concatOf dsirrsSpotInstanceRequests) .
sirStatus . _Just . sisCode . _Just . to toTextCI)]
}
-- | Polls 'Network.AWS.EC2.DescribeVPCs' every 15 seconds until a
-- successful state is reached. An error is returned after 40 failed checks.
vpcAvailable :: Wait DescribeVPCs
vpcAvailable =
Wait
{ _waitName = "VpcAvailable"
, _waitAttempts = 40
, _waitDelay = 15
, _waitAcceptors = [ matchAll
"available"
AcceptSuccess
(folding (concatOf dvrsVPCs) .
vpcState . to toTextCI)]
}
-- | Polls 'Network.AWS.EC2.DescribeExportTasks' every 15 seconds until a
-- successful state is reached. An error is returned after 40 failed checks.
exportTaskCompleted :: Wait DescribeExportTasks
exportTaskCompleted =
Wait
{ _waitName = "ExportTaskCompleted"
, _waitAttempts = 40
, _waitDelay = 15
, _waitAcceptors = [ matchAll
"completed"
AcceptSuccess
(folding (concatOf detrsExportTasks) .
etState . to toTextCI)]
}
-- | Polls 'Network.AWS.EC2.DescribeVPNConnections' every 15 seconds until a
-- successful state is reached. An error is returned after 40 failed checks.
vpnConnectionAvailable :: Wait DescribeVPNConnections
vpnConnectionAvailable =
Wait
{ _waitName = "VpnConnectionAvailable"
, _waitAttempts = 40
, _waitDelay = 15
, _waitAcceptors = [ matchAll
"available"
AcceptSuccess
(folding (concatOf dvcrsVPNConnections) .
vcState . to toTextCI)
, matchAny
"deleting"
AcceptFailure
(folding (concatOf dvcrsVPNConnections) .
vcState . to toTextCI)
, matchAny
"deleted"
AcceptFailure
(folding (concatOf dvcrsVPNConnections) .
vcState . to toTextCI)]
}
-- | Polls 'Network.AWS.EC2.DescribeExportTasks' every 15 seconds until a
-- successful state is reached. An error is returned after 40 failed checks.
exportTaskCancelled :: Wait DescribeExportTasks
exportTaskCancelled =
Wait
{ _waitName = "ExportTaskCancelled"
, _waitAttempts = 40
, _waitDelay = 15
, _waitAcceptors = [ matchAll
"cancelled"
AcceptSuccess
(folding (concatOf detrsExportTasks) .
etState . to toTextCI)]
}
-- | Polls 'Network.AWS.EC2.DescribeVolumes' every 15 seconds until a
-- successful state is reached. An error is returned after 40 failed checks.
volumeDeleted :: Wait DescribeVolumes
volumeDeleted =
Wait
{ _waitName = "VolumeDeleted"
, _waitAttempts = 40
, _waitDelay = 15
, _waitAcceptors = [ matchAll
"deleted"
AcceptSuccess
(folding (concatOf dvvrsVolumes) .
vState . to toTextCI)
, matchError "InvalidVolumeNotFound" AcceptSuccess]
}
-- | Polls 'Network.AWS.EC2.DescribeBundleTasks' every 15 seconds until a
-- successful state is reached. An error is returned after 40 failed checks.
bundleTaskComplete :: Wait DescribeBundleTasks
bundleTaskComplete =
Wait
{ _waitName = "BundleTaskComplete"
, _waitAttempts = 40
, _waitDelay = 15
, _waitAcceptors = [ matchAll
"complete"
AcceptSuccess
(folding (concatOf dbtrsBundleTasks) .
btState . to toTextCI)
, matchAny
"failed"
AcceptFailure
(folding (concatOf dbtrsBundleTasks) .
btState . to toTextCI)]
}
-- | Polls 'Network.AWS.EC2.DescribeVPNConnections' every 15 seconds until a
-- successful state is reached. An error is returned after 40 failed checks.
vpnConnectionDeleted :: Wait DescribeVPNConnections
vpnConnectionDeleted =
Wait
{ _waitName = "VpnConnectionDeleted"
, _waitAttempts = 40
, _waitDelay = 15
, _waitAcceptors = [ matchAll
"deleted"
AcceptSuccess
(folding (concatOf dvcrsVPNConnections) .
vcState . to toTextCI)
, matchAny
"pending"
AcceptFailure
(folding (concatOf dvcrsVPNConnections) .
vcState . to toTextCI)]
}
-- | Polls 'Network.AWS.EC2.DescribeConversionTasks' every 15 seconds until a
-- successful state is reached. An error is returned after 40 failed checks.
conversionTaskCancelled :: Wait DescribeConversionTasks
conversionTaskCancelled =
Wait
{ _waitName = "ConversionTaskCancelled"
, _waitAttempts = 40
, _waitDelay = 15
, _waitAcceptors = [ matchAll
"cancelled"
AcceptSuccess
(folding (concatOf dctrsConversionTasks) .
ctState . to toTextCI)]
}
-- | Polls 'Network.AWS.EC2.DescribeImages' every 15 seconds until a
-- successful state is reached. An error is returned after 40 failed checks.
imageAvailable :: Wait DescribeImages
imageAvailable =
Wait
{ _waitName = "ImageAvailable"
, _waitAttempts = 40
, _waitDelay = 15
, _waitAcceptors = [ matchAll
"available"
AcceptSuccess
(folding (concatOf desrsImages) .
iState . to toTextCI)
, matchAny
"deregistered"
AcceptFailure
(folding (concatOf desrsImages) .
iState . to toTextCI)]
}
-- | Polls 'Network.AWS.EC2.DescribeSnapshots' every 15 seconds until a
-- successful state is reached. An error is returned after 40 failed checks.
snapshotCompleted :: Wait DescribeSnapshots
snapshotCompleted =
Wait
{ _waitName = "SnapshotCompleted"
, _waitAttempts = 40
, _waitDelay = 15
, _waitAcceptors = [ matchAll
"completed"
AcceptSuccess
(folding (concatOf dssrsSnapshots) .
sState . to toTextCI)]
}
-- | Polls 'Network.AWS.EC2.DescribeInstances' every 5 seconds until a
-- successful state is reached. An error is returned after 40 failed checks.
instanceExists :: Wait DescribeInstances
instanceExists =
Wait
{ _waitName = "InstanceExists"
, _waitAttempts = 40
, _waitDelay = 5
, _waitAcceptors = [ matchStatus 200 AcceptSuccess
, matchError "InvalidInstanceIDNotFound" AcceptRetry]
}
-- | Polls 'Network.AWS.EC2.DescribeInstanceStatus' every 15 seconds until a
-- successful state is reached. An error is returned after 40 failed checks.
instanceStatusOK :: Wait DescribeInstanceStatus
instanceStatusOK =
Wait
{ _waitName = "InstanceStatusOk"
, _waitAttempts = 40
, _waitDelay = 15
, _waitAcceptors = [ matchAll
"ok"
AcceptSuccess
(folding (concatOf disrsInstanceStatuses) .
isInstanceStatus .
_Just . issStatus . to toTextCI)]
}
-- | Polls 'Network.AWS.EC2.DescribeVolumes' every 15 seconds until a
-- successful state is reached. An error is returned after 40 failed checks.
volumeAvailable :: Wait DescribeVolumes
volumeAvailable =
Wait
{ _waitName = "VolumeAvailable"
, _waitAttempts = 40
, _waitDelay = 15
, _waitAcceptors = [ matchAll
"available"
AcceptSuccess
(folding (concatOf dvvrsVolumes) .
vState . to toTextCI)
, matchAny
"deleted"
AcceptFailure
(folding (concatOf dvvrsVolumes) .
vState . to toTextCI)]
}
| fmapfmapfmap/amazonka | amazonka-ec2/gen/Network/AWS/EC2/Waiters.hs | mpl-2.0 | 21,473 | 0 | 17 | 8,851 | 2,929 | 1,617 | 1,312 | 413 | 1 |
{-# LANGUAGE CPP #-}
module System.Posix.GracefulSpec ( spec ) where
import Control.Concurrent
import Control.Exception
import Control.Monad
import Data.List
import Network
import Network.Socket
#if MIN_VERSION_process(1,0,1)
#else
import System.Cmd
#endif
import System.Directory
import System.Exit
import System.FilePath
import System.Posix.Files
import System.Posix.Signals
import System.Posix.Types
import System.Process
import Test.Hspec
spec :: Spec
spec = describe "graceful" $ do
it "prefork workers" $ run preforkWorkers
it "restart keep workers > 0" $ run restartKeepWorkers
it "upgrade keep workers > 0" $ run upgradeKeepWorkers
it "abort upgrade keep workers > 0" $ run abortUpgradeKeepWorkers
it "simple access and quit (SIGQUIT)" $ run $ simpleAccessAnd sigQUIT
it "simple access and stop (SIGINT)" $ run $ simpleAccessAnd sigINT
it "simple access and stop (SIGTERM)" $ run $ simpleAccessAnd sigTERM
it "quit (SIGQUIT) while access" $ run quitWhileAccess
it "stop (SIGINT) while access" $ run $ stopWhileAccess sigINT
it "stop (SIGTERM) while access" $ run $ stopWhileAccess sigTERM
it "restart (SIGHUP) while access" $ run restartWhileAccess
it "upgrade (SIGUSR2) while access" $ run upgradeWhileAccess
it "abort upgrade while access" $ run abortUpgradeWhileAccess
removeFileIfExist :: FilePath -> IO ()
removeFileIfExist file = do
exist <- doesFileExist file
when exist $ removeFile file
waitStandby :: FilePath -> IO ()
waitStandby path = do
status <- tryIO $ readFile path
case status of
Left _err -> threadDelay 1000 >> waitStandby path
Right _ok -> return ()
waitProcessIncreaseTo :: Int -> IO ()
waitProcessIncreaseTo n = do
threadDelay 1000000
procs <- fmap length ps
procs `shouldSatisfy` (<= n)
if procs < n
then threadDelay 1000 >> waitProcessIncreaseTo n
else procs `shouldBe` n
waitProcessDecreaseTo :: Int -> IO ()
waitProcessDecreaseTo n = do
threadDelay 1000000
procs <- fmap length ps
procs `shouldSatisfy` (>= n)
if procs > n
then threadDelay 1000 >> waitProcessDecreaseTo n
else procs `shouldBe` n
run :: IO () -> IO ()
run action = do
procs <- fmap length ps
procs `shouldBe` 0
buildAsEchoServer "test/echo.hs"
let file = "/tmp/echo-server"
mapM_ (removeFileIfExist . (file ++)) [ ".sock", ".pid" ]
rawSystem file [] `shouldReturn` ExitSuccess
waitStandby $ file ++ ".pid"
waitProcessIncreaseTo 5
action
waitProcessDecreaseTo 0
kill :: Signal -> IO ()
kill signal = readFile "/tmp/echo-server.pid" >>=
signalProcess signal . read
killold :: Signal -> IO ()
killold signal = readFile "/tmp/echo-server.pid.old" >>=
signalProcess signal . read
tryIO :: IO a -> IO (Either IOException a)
tryIO = try
echo :: Socket -> String -> IO String
echo sock str = send sock str >> recv sock (2 * length str)
shouldEcho :: Socket -> String -> Expectation
shouldEcho sock str = echo sock str `shouldReturn` str
shouldDouble :: Socket -> String -> Expectation
shouldDouble sock str = echo sock str `shouldReturn` (str ++ str)
simpleAccess :: IO ()
simpleAccess = access (`shouldEcho` "simpleAccess")
wrapClose :: Socket -> IO ()
#if MIN_VERSION_network(2,4,0)
wrapClose = close
#else
wrapClose = sClose
#endif
access :: (Socket -> IO ()) -> IO ()
access action =
bracket (socket AF_INET Stream 0) wrapClose $ \sock -> do
addr <- inet_addr "127.0.0.1"
connect sock $ SockAddrInet 8080 addr
action sock
packageOption :: String
#if __GLASGOW_HASKELL__ < 706
packageOption = "-package-conf"
#else
packageOption = "-package-db"
#endif
buildAsEchoServer :: FilePath -> IO ()
buildAsEchoServer file = do
removeFileIfExist "/tmp/echo-server"
confDistDir <- getConfDistDir
(code, _out, _err) <- readProcessWithExitCode "ghc"
[ "--make", file
, "-o", "/tmp/echo-server"
, packageOption, confDistDir ++ "/package.conf.inplace"
] ""
code `shouldBe` ExitSuccess
getConfDistDir :: IO FilePath
getConfDistDir = fmap (dirname . dirname . dirname) getModuleFile where
dirname = takeDirectory
getModuleFile = readSymbolicLink "/proc/self/exe"
ps :: IO [ProcessID]
ps = do
(_code, out, _err) <- readProcessWithExitCode "ps"
[ "hopid", "-Cecho-server" ] ""
return $ map read $ words out
simpleAccessAnd :: Signal -> IO ()
simpleAccessAnd s = simpleAccess >> kill s
preforkWorkers :: IO ()
preforkWorkers = do
fmap length ps `shouldReturn` 5
kill sigQUIT
restartKeepWorkers :: IO ()
restartKeepWorkers = do
pids <- ps
length pids `shouldBe` 5 -- master + 4 worker
kill sigHUP
waitProcessDecreaseTo 5
pids' <- ps
length pids' `shouldBe` 5 -- master + 4 worker
length (pids `intersect` pids') `shouldBe` 1 -- restarted workers
kill sigQUIT
upgradeKeepWorkers :: IO ()
upgradeKeepWorkers = do
pids <- ps
length pids `shouldBe` 5 -- master + 4 worker
kill sigUSR2
waitProcessIncreaseTo 10
killold sigQUIT
waitProcessDecreaseTo 5
pids' <- ps
length pids' `shouldBe` 5 -- master + 4 worker
length (pids `intersect` pids') `shouldBe` 0 -- upgraded master & workers
kill sigQUIT
abortUpgradeKeepWorkers :: IO ()
abortUpgradeKeepWorkers = do
pids <- ps
length pids `shouldBe` 5 -- master + 4 worker
kill sigUSR2
waitProcessIncreaseTo 10
kill sigQUIT
renameFile "/tmp/echo-server.pid.old" "/tmp/echo-server.pid"
waitProcessDecreaseTo 5
pids' <- ps
length pids' `shouldBe` 5 -- master + 4 worker
length (pids `intersect` pids') `shouldBe` 5 -- abort upgrade
kill sigQUIT
left :: Either a b -> Bool
left = either (const True) (const False)
right :: Either a b -> Bool
right = not . left
quitWhileAccess :: IO ()
quitWhileAccess = do
res <- tryIO $ access $ \sock -> do
kill sigQUIT
replicateM_ 100 $ do
sock `shouldEcho` "quitWhileAccess"
threadDelay 1000
res `shouldSatisfy` right
stopWhileAccess :: Signal -> IO ()
stopWhileAccess s = do
res <- tryIO $ access $ \sock -> do
kill s
replicateM_ 100 $ do
sock `shouldEcho` "stopWhileAccess"
threadDelay 1000
res `shouldSatisfy` left
restartWhileAccess :: IO ()
restartWhileAccess = do
access $ \sock -> do
kill sigHUP
replicateM_ 10 $ do
sock `shouldEcho` "restartWhileAccess"
threadDelay 1000
waitProcessDecreaseTo 5
access $ \sock ->
replicateM_ 10 $ do
sock `shouldEcho` "restartWhileAccess"
threadDelay 1000
kill sigQUIT
upgradeWhileAccess :: IO ()
upgradeWhileAccess = do
buildAsEchoServer "test/double.hs"
access $ \sock -> do
kill sigUSR2
replicateM_ 10 $ do
sock `shouldEcho` "upgradeWhileAccess"
threadDelay 1000
waitProcessIncreaseTo 10
killold sigQUIT
waitProcessDecreaseTo 5
access $ \sock ->
replicateM_ 10 $ do
sock `shouldDouble` "upgradeWhileAccess"
threadDelay 1000
kill sigQUIT
abortUpgradeWhileAccess :: IO ()
abortUpgradeWhileAccess = do
buildAsEchoServer "test/double.hs"
access $ \sock -> do
kill sigUSR2
replicateM_ 10 $ do
sock `shouldEcho` "upgradeWhileAccess"
threadDelay 1000
waitProcessIncreaseTo 10
kill sigQUIT
renameFile "/tmp/echo-server.pid.old" "/tmp/echo-server.pid"
waitProcessDecreaseTo 5
access $ \sock ->
replicateM_ 10 $ do
sock `shouldEcho` "upgradeWhileAccess"
threadDelay 1000
kill sigQUIT
| notogawa/haskell-graceful-debian | test/System/Posix/GracefulSpec.hs | bsd-3-clause | 7,609 | 0 | 15 | 1,743 | 2,233 | 1,074 | 1,159 | 222 | 2 |
-- Name: Java Generics
-- Description: Shows how to interface with Java APIs that use Generics.
-- These language extensions are currently required to support
-- Java Generics.
{-# LANGUAGE MagicHash, FlexibleContexts, TypeFamilies, DataKinds, TypeOperators #-}
-- This imports all the standard library functionality that helps
-- you deal with importing Java methods into Eta. We are hiding certain classes
-- because they are already defined in the standard library
import Java hiding (JInteger, Collection, List, add)
import Control.Monad
main :: IO ()
main = java $ do
list <- newArrayList
list <.> populateArray 10
populateArray :: Int -> Java (ArrayList JInteger) ()
populateArray n = do
forM_ range $ \i ->
add (newInteger i)
forM_ range $ \i -> do
jint <- get i
io $ print $ intValue jint * 5
where range = [0..n]
-- The following a declarations of Java wrapper types. These types let you
-- interact directly with the corresponding Java objects.
-- This will not be the final syntax for Java wrapper types, see:
-- https://github.com/typelead/eta/issues/140
data {-# CLASS "java.util.Collection" #-} Collection a =
Collection (Object# (Collection a))
deriving Class
data {-# CLASS "java.util.List" #-} List a =
List (Object# (List a))
deriving Class
-- The `Inherits` type family specifies parent classes and interfaces
-- so that the Eta typechecker can statically check inheritance relationships.
type instance Inherits (List a) = '[Collection a]
data {-# CLASS "java.util.ArrayList" #-} ArrayList a =
ArrayList (Object# (ArrayList a))
deriving Class
type instance Inherits (ArrayList a) = '[List a]
data {-# CLASS "java.lang.Integer" #-} JInteger = JInteger (Object# JInteger)
deriving Class
foreign import java unsafe "@new" newInteger :: Int -> JInteger
foreign import java unsafe "intValue" intValue :: JInteger -> Int
foreign import java unsafe "@new" newArrayList :: Java c (ArrayList a)
-- The `Extends` multi-parameter typeclass checks whether the first type
-- is a descendant of the second. This static check is facilitated by
-- the `Inherits` type family above.
foreign import java unsafe "add" add ::
(a <: Object, b <: (Collection a)) => a -> Java b Bool
foreign import java unsafe "get" get ::
(a <: Object, b <: (List a)) => Int -> Java b a
| pparkkin/eta | examples/JavaGenerics.hs | bsd-3-clause | 2,319 | 10 | 13 | 415 | 490 | 264 | 226 | -1 | -1 |
{-# LANGUAGE OverloadedStrings, TypeFamilies, TemplateHaskell, QuasiQuotes #-}
module Network.PushNotify.General.YesodPushAppRoutes where
import Yesod
import Network.PushNotify.General.Types
import Control.Concurrent
import Data.Text
-- Yesod subsite to be used for the registration and reception of messages from devices.
mkYesodSubData "PushManager" [parseRoutes|
/register SubRegisterR POST
/messages SubMessagesR POST
|]
| jimpeak/GSoC-Communicating-with-mobile-devices | push-notify-general/Network/PushNotify/General/YesodPushAppRoutes.hs | mit | 428 | 0 | 5 | 46 | 42 | 28 | 14 | 7 | 0 |
-----------------------------------------------------------------------------
--
-- Stg to C--: heap management functions
--
-- (c) The University of Glasgow 2004-2006
--
-----------------------------------------------------------------------------
module StgCmmHeap (
getVirtHp, setVirtHp, setRealHp,
getHpRelOffset, hpRel,
entryHeapCheck, altHeapCheck, noEscapeHeapCheck, altHeapCheckReturnsTo,
heapStackCheckGen,
entryHeapCheck',
mkVirtHeapOffsets, mkVirtConstrOffsets,
mkStaticClosureFields, mkStaticClosure,
allocDynClosure, allocDynClosureCmm,
emitSetDynHdr
) where
#include "HsVersions.h"
import StgSyn
import CLabel
import StgCmmLayout
import StgCmmUtils
import StgCmmMonad
import StgCmmProf (profDynAlloc, dynProfHdr, staticProfHdr)
import StgCmmTicky
import StgCmmClosure
import StgCmmEnv
import MkGraph
import Hoopl
import SMRep
import Cmm
import CmmUtils
import CostCentre
import IdInfo( CafInfo(..), mayHaveCafRefs )
import Id ( Id )
import Module
import DynFlags
import FastString( mkFastString, fsLit )
import Control.Monad (when)
import Data.Maybe (isJust)
-----------------------------------------------------------
-- Initialise dynamic heap objects
-----------------------------------------------------------
allocDynClosure
:: Maybe Id
-> CmmInfoTable
-> LambdaFormInfo
-> CmmExpr -- Cost Centre to stick in the object
-> CmmExpr -- Cost Centre to blame for this alloc
-- (usually the same; sometimes "OVERHEAD")
-> [(NonVoid StgArg, VirtualHpOffset)] -- Offsets from start of object
-- ie Info ptr has offset zero.
-- No void args in here
-> FCode CmmExpr -- returns Hp+n
allocDynClosureCmm
:: Maybe Id -> CmmInfoTable -> LambdaFormInfo -> CmmExpr -> CmmExpr
-> [(CmmExpr, VirtualHpOffset)]
-> FCode CmmExpr -- returns Hp+n
-- allocDynClosure allocates the thing in the heap,
-- and modifies the virtual Hp to account for this.
-- The second return value is the graph that sets the value of the
-- returned LocalReg, which should point to the closure after executing
-- the graph.
-- allocDynClosure returns an (Hp+8) CmmExpr, and hence the result is
-- only valid until Hp is changed. The caller should assign the
-- result to a LocalReg if it is required to remain live.
--
-- The reason we don't assign it to a LocalReg here is that the caller
-- is often about to call regIdInfo, which immediately assigns the
-- result of allocDynClosure to a new temp in order to add the tag.
-- So by not generating a LocalReg here we avoid a common source of
-- new temporaries and save some compile time. This can be quite
-- significant - see test T4801.
allocDynClosure mb_id info_tbl lf_info use_cc _blame_cc args_w_offsets
= do { let (args, offsets) = unzip args_w_offsets
; cmm_args <- mapM getArgAmode args -- No void args
; allocDynClosureCmm mb_id info_tbl lf_info
use_cc _blame_cc (zip cmm_args offsets)
}
allocDynClosureCmm mb_id info_tbl lf_info use_cc _blame_cc amodes_w_offsets
= do { virt_hp <- getVirtHp
-- SAY WHAT WE ARE ABOUT TO DO
; let rep = cit_rep info_tbl
; tickyDynAlloc mb_id rep lf_info
; profDynAlloc rep use_cc
-- FIND THE OFFSET OF THE INFO-PTR WORD
; let info_offset = virt_hp + 1
-- info_offset is the VirtualHpOffset of the first
-- word of the new object
-- Remember, virtHp points to last allocated word,
-- ie 1 *before* the info-ptr word of new object.
info_ptr = CmmLit (CmmLabel (cit_lbl info_tbl))
-- ALLOCATE THE OBJECT
; base <- getHpRelOffset info_offset
; emitComment $ mkFastString "allocDynClosure"
; emitSetDynHdr base info_ptr use_cc
; let (cmm_args, offsets) = unzip amodes_w_offsets
; hpStore base cmm_args offsets
-- BUMP THE VIRTUAL HEAP POINTER
; dflags <- getDynFlags
; setVirtHp (virt_hp + heapClosureSize dflags rep)
; getHpRelOffset info_offset
}
emitSetDynHdr :: CmmExpr -> CmmExpr -> CmmExpr -> FCode ()
emitSetDynHdr base info_ptr ccs
= do dflags <- getDynFlags
hpStore base (header dflags) [0..]
where
header :: DynFlags -> [CmmExpr]
header dflags = [info_ptr] ++ dynProfHdr dflags ccs
-- ToDof: Parallel stuff
-- No ticky header
hpStore :: CmmExpr -> [CmmExpr] -> [VirtualHpOffset] -> FCode ()
-- Store the item (expr,off) in base[off]
hpStore base vals offs
= do dflags <- getDynFlags
let mk_store val off = mkStore (cmmOffsetW dflags base off) val
emit (catAGraphs (zipWith mk_store vals offs))
-----------------------------------------------------------
-- Layout of static closures
-----------------------------------------------------------
-- Make a static closure, adding on any extra padding needed for CAFs,
-- and adding a static link field if necessary.
mkStaticClosureFields
:: DynFlags
-> CmmInfoTable
-> CostCentreStack
-> CafInfo
-> [CmmLit] -- Payload
-> [CmmLit] -- The full closure
mkStaticClosureFields dflags info_tbl ccs caf_refs payload
= mkStaticClosure dflags info_lbl ccs payload padding
static_link_field saved_info_field
where
info_lbl = cit_lbl info_tbl
-- CAFs must have consistent layout, regardless of whether they
-- are actually updatable or not. The layout of a CAF is:
--
-- 3 saved_info
-- 2 static_link
-- 1 indirectee
-- 0 info ptr
--
-- the static_link and saved_info fields must always be in the
-- same place. So we use isThunkRep rather than closureUpdReqd
-- here:
is_caf = isThunkRep (cit_rep info_tbl)
padding
| is_caf && null payload = [mkIntCLit dflags 0]
| otherwise = []
static_link_field
| is_caf || staticClosureNeedsLink (mayHaveCafRefs caf_refs) info_tbl
= [static_link_value]
| otherwise
= []
saved_info_field
| is_caf = [mkIntCLit dflags 0]
| otherwise = []
-- For a static constructor which has NoCafRefs, we set the
-- static link field to a non-zero value so the garbage
-- collector will ignore it.
static_link_value
| mayHaveCafRefs caf_refs = mkIntCLit dflags 0
| otherwise = mkIntCLit dflags 1 -- No CAF refs
mkStaticClosure :: DynFlags -> CLabel -> CostCentreStack -> [CmmLit]
-> [CmmLit] -> [CmmLit] -> [CmmLit] -> [CmmLit]
mkStaticClosure dflags info_lbl ccs payload padding static_link_field saved_info_field
= [CmmLabel info_lbl]
++ staticProfHdr dflags ccs
++ concatMap (padLitToWord dflags) payload
++ padding
++ static_link_field
++ saved_info_field
-- JD: Simon had ellided this padding, but without it the C back end asserts
-- failure. Maybe it's a bad assertion, and this padding is indeed unnecessary?
padLitToWord :: DynFlags -> CmmLit -> [CmmLit]
padLitToWord dflags lit = lit : padding pad_length
where width = typeWidth (cmmLitType dflags lit)
pad_length = wORD_SIZE dflags - widthInBytes width :: Int
padding n | n <= 0 = []
| n `rem` 2 /= 0 = CmmInt 0 W8 : padding (n-1)
| n `rem` 4 /= 0 = CmmInt 0 W16 : padding (n-2)
| n `rem` 8 /= 0 = CmmInt 0 W32 : padding (n-4)
| otherwise = CmmInt 0 W64 : padding (n-8)
-----------------------------------------------------------
-- Heap overflow checking
-----------------------------------------------------------
{- Note [Heap checks]
~~~~~~~~~~~~~~~~~~
Heap checks come in various forms. We provide the following entry
points to the runtime system, all of which use the native C-- entry
convention.
* gc() performs garbage collection and returns
nothing to its caller
* A series of canned entry points like
r = gc_1p( r )
where r is a pointer. This performs gc, and
then returns its argument r to its caller.
* A series of canned entry points like
gcfun_2p( f, x, y )
where f is a function closure of arity 2
This performs garbage collection, keeping alive the
three argument ptrs, and then tail-calls f(x,y)
These are used in the following circumstances
* entryHeapCheck: Function entry
(a) With a canned GC entry sequence
f( f_clo, x:ptr, y:ptr ) {
Hp = Hp+8
if Hp > HpLim goto L
...
L: HpAlloc = 8
jump gcfun_2p( f_clo, x, y ) }
Note the tail call to the garbage collector;
it should do no register shuffling
(b) No canned sequence
f( f_clo, x:ptr, y:ptr, ...etc... ) {
T: Hp = Hp+8
if Hp > HpLim goto L
...
L: HpAlloc = 8
call gc() -- Needs an info table
goto T }
* altHeapCheck: Immediately following an eval
Started as
case f x y of r { (p,q) -> rhs }
(a) With a canned sequence for the results of f
(which is the very common case since
all boxed cases return just one pointer
...
r = f( x, y )
K: -- K needs an info table
Hp = Hp+8
if Hp > HpLim goto L
...code for rhs...
L: r = gc_1p( r )
goto K }
Here, the info table needed by the call
to gc_1p should be the *same* as the
one for the call to f; the C-- optimiser
spots this sharing opportunity)
(b) No canned sequence for results of f
Note second info table
...
(r1,r2,r3) = call f( x, y )
K:
Hp = Hp+8
if Hp > HpLim goto L
...code for rhs...
L: call gc() -- Extra info table here
goto K
* generalHeapCheck: Anywhere else
e.g. entry to thunk
case branch *not* following eval,
or let-no-escape
Exactly the same as the previous case:
K: -- K needs an info table
Hp = Hp+8
if Hp > HpLim goto L
...
L: call gc()
goto K
-}
--------------------------------------------------------------
-- A heap/stack check at a function or thunk entry point.
entryHeapCheck :: ClosureInfo
-> Maybe LocalReg -- Function (closure environment)
-> Int -- Arity -- not same as len args b/c of voids
-> [LocalReg] -- Non-void args (empty for thunk)
-> FCode ()
-> FCode ()
entryHeapCheck cl_info nodeSet arity args code
= entryHeapCheck' is_fastf node arity args code
where
node = case nodeSet of
Just r -> CmmReg (CmmLocal r)
Nothing -> CmmLit (CmmLabel $ staticClosureLabel cl_info)
is_fastf = case closureFunInfo cl_info of
Just (_, ArgGen _) -> False
_otherwise -> True
-- | lower-level version for CmmParse
entryHeapCheck' :: Bool -- is a known function pattern
-> CmmExpr -- expression for the closure pointer
-> Int -- Arity -- not same as len args b/c of voids
-> [LocalReg] -- Non-void args (empty for thunk)
-> FCode ()
-> FCode ()
entryHeapCheck' is_fastf node arity args code
= do dflags <- getDynFlags
let is_thunk = arity == 0
args' = map (CmmReg . CmmLocal) args
stg_gc_fun = CmmReg (CmmGlobal GCFun)
stg_gc_enter1 = CmmReg (CmmGlobal GCEnter1)
{- Thunks: jump stg_gc_enter_1
Function (fast): call (NativeNode) stg_gc_fun(fun, args)
Function (slow): call (slow) stg_gc_fun(fun, args)
-}
gc_call upd
| is_thunk
= mkJump dflags NativeNodeCall stg_gc_enter1 [node] upd
| is_fastf
= mkJump dflags NativeNodeCall stg_gc_fun (node : args') upd
| otherwise
= mkJump dflags Slow stg_gc_fun (node : args') upd
updfr_sz <- getUpdFrameOff
loop_id <- newLabelC
emitLabel loop_id
heapCheck True True (gc_call updfr_sz <*> mkBranch loop_id) code
-- ------------------------------------------------------------
-- A heap/stack check in a case alternative
-- If there are multiple alts and we need to GC, but don't have a
-- continuation already (the scrut was simple), then we should
-- pre-generate the continuation. (if there are multiple alts it is
-- always a canned GC point).
-- altHeapCheck:
-- If we have a return continuation,
-- then if it is a canned GC pattern,
-- then we do mkJumpReturnsTo
-- else we do a normal call to stg_gc_noregs
-- else if it is a canned GC pattern,
-- then generate the continuation and do mkCallReturnsTo
-- else we do a normal call to stg_gc_noregs
altHeapCheck :: [LocalReg] -> FCode a -> FCode a
altHeapCheck regs code = altOrNoEscapeHeapCheck False regs code
altOrNoEscapeHeapCheck :: Bool -> [LocalReg] -> FCode a -> FCode a
altOrNoEscapeHeapCheck checkYield regs code = do
dflags <- getDynFlags
case cannedGCEntryPoint dflags regs of
Nothing -> genericGC checkYield code
Just gc -> do
lret <- newLabelC
let (off, _, copyin) = copyInOflow dflags NativeReturn (Young lret) regs []
lcont <- newLabelC
emitOutOfLine lret (copyin <*> mkBranch lcont)
emitLabel lcont
cannedGCReturnsTo checkYield False gc regs lret off code
altHeapCheckReturnsTo :: [LocalReg] -> Label -> ByteOff -> FCode a -> FCode a
altHeapCheckReturnsTo regs lret off code
= do dflags <- getDynFlags
case cannedGCEntryPoint dflags regs of
Nothing -> genericGC False code
Just gc -> cannedGCReturnsTo False True gc regs lret off code
-- noEscapeHeapCheck is implemented identically to altHeapCheck (which
-- is more efficient), but cannot be optimized away in the non-allocating
-- case because it may occur in a loop
noEscapeHeapCheck :: [LocalReg] -> FCode a -> FCode a
noEscapeHeapCheck regs code = altOrNoEscapeHeapCheck True regs code
cannedGCReturnsTo :: Bool -> Bool -> CmmExpr -> [LocalReg] -> Label -> ByteOff
-> FCode a
-> FCode a
cannedGCReturnsTo checkYield cont_on_stack gc regs lret off code
= do dflags <- getDynFlags
updfr_sz <- getUpdFrameOff
heapCheck False checkYield (gc_call dflags gc updfr_sz) code
where
reg_exprs = map (CmmReg . CmmLocal) regs
-- Note [stg_gc arguments]
-- NB. we use the NativeReturn convention for passing arguments
-- to the canned heap-check routines, because we are in a case
-- alternative and hence the [LocalReg] was passed to us in the
-- NativeReturn convention.
gc_call dflags label sp
| cont_on_stack
= mkJumpReturnsTo dflags label NativeReturn reg_exprs lret off sp
| otherwise
= mkCallReturnsTo dflags label NativeReturn reg_exprs lret off sp []
genericGC :: Bool -> FCode a -> FCode a
genericGC checkYield code
= do updfr_sz <- getUpdFrameOff
lretry <- newLabelC
emitLabel lretry
call <- mkCall generic_gc (GC, GC) [] [] updfr_sz []
heapCheck False checkYield (call <*> mkBranch lretry) code
cannedGCEntryPoint :: DynFlags -> [LocalReg] -> Maybe CmmExpr
cannedGCEntryPoint dflags regs
= case map localRegType regs of
[] -> Just (mkGcLabel "stg_gc_noregs")
[ty]
| isGcPtrType ty -> Just (mkGcLabel "stg_gc_unpt_r1")
| isFloatType ty -> case width of
W32 -> Just (mkGcLabel "stg_gc_f1")
W64 -> Just (mkGcLabel "stg_gc_d1")
_ -> Nothing
| width == wordWidth dflags -> Just (mkGcLabel "stg_gc_unbx_r1")
| width == W64 -> Just (mkGcLabel "stg_gc_l1")
| otherwise -> Nothing
where
width = typeWidth ty
[ty1,ty2]
| isGcPtrType ty1
&& isGcPtrType ty2 -> Just (mkGcLabel "stg_gc_pp")
[ty1,ty2,ty3]
| isGcPtrType ty1
&& isGcPtrType ty2
&& isGcPtrType ty3 -> Just (mkGcLabel "stg_gc_ppp")
[ty1,ty2,ty3,ty4]
| isGcPtrType ty1
&& isGcPtrType ty2
&& isGcPtrType ty3
&& isGcPtrType ty4 -> Just (mkGcLabel "stg_gc_pppp")
_otherwise -> Nothing
-- Note [stg_gc arguments]
-- It might seem that we could avoid passing the arguments to the
-- stg_gc function, because they are already in the right registers.
-- While this is usually the case, it isn't always. Sometimes the
-- code generator has cleverly avoided the eval in a case, e.g. in
-- ffi/should_run/4221.hs we found
--
-- case a_r1mb of z
-- FunPtr x y -> ...
--
-- where a_r1mb is bound a top-level constructor, and is known to be
-- evaluated. The codegen just assigns x, y and z, and continues;
-- R1 is never assigned.
--
-- So we'll have to rely on optimisations to eliminatethese
-- assignments where possible.
-- | The generic GC procedure; no params, no results
generic_gc :: CmmExpr
generic_gc = mkGcLabel "stg_gc_noregs"
-- | Create a CLabel for calling a garbage collector entry point
mkGcLabel :: String -> CmmExpr
mkGcLabel s = CmmLit (CmmLabel (mkCmmCodeLabel rtsPackageId (fsLit s)))
-------------------------------
heapCheck :: Bool -> Bool -> CmmAGraph -> FCode a -> FCode a
heapCheck checkStack checkYield do_gc code
= getHeapUsage $ \ hpHw ->
-- Emit heap checks, but be sure to do it lazily so
-- that the conditionals on hpHw don't cause a black hole
do { dflags <- getDynFlags
; let mb_alloc_bytes
| hpHw > 0 = Just (mkIntExpr dflags (hpHw * (wORD_SIZE dflags)))
| otherwise = Nothing
stk_hwm | checkStack = Just (CmmLit CmmHighStackMark)
| otherwise = Nothing
; codeOnly $ do_checks stk_hwm checkYield mb_alloc_bytes do_gc
; tickyAllocHeap True hpHw
; setRealHp hpHw
; code }
heapStackCheckGen :: Maybe CmmExpr -> Maybe CmmExpr -> FCode ()
heapStackCheckGen stk_hwm mb_bytes
= do updfr_sz <- getUpdFrameOff
lretry <- newLabelC
emitLabel lretry
call <- mkCall generic_gc (GC, GC) [] [] updfr_sz []
do_checks stk_hwm False mb_bytes (call <*> mkBranch lretry)
-- Note [Single stack check]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~
-- When compiling a function we can determine how much stack space it
-- will use. We therefore need to perform only a single stack check at
-- the beginning of a function to see if we have enough stack space.
--
-- The check boils down to comparing Sp-N with SpLim, where N is the
-- amount of stack space needed (see Note [Stack usage] below). *BUT*
-- at this stage of the pipeline we are not supposed to refer to Sp
-- itself, because the stack is not yet manifest, so we don't quite
-- know where Sp pointing.
-- So instead of referring directly to Sp - as we used to do in the
-- past - the code generator uses (old + 0) in the stack check. That
-- is the address of the first word of the old area, so if we add N
-- we'll get the address of highest used word.
--
-- This makes the check robust. For example, while we need to perform
-- only one stack check for each function, we could in theory place
-- more stack checks later in the function. They would be redundant,
-- but not incorrect (in a sense that they should not change program
-- behaviour). We need to make sure however that a stack check
-- inserted after incrementing the stack pointer checks for a
-- respectively smaller stack space. This would not be the case if the
-- code generator produced direct references to Sp. By referencing
-- (old + 0) we make sure that we always check for a correct amount of
-- stack: when converting (old + 0) to Sp the stack layout phase takes
-- into account changes already made to stack pointer. The idea for
-- this change came from observations made while debugging #8275.
-- Note [Stack usage]
-- ~~~~~~~~~~~~~~~~~~
-- At the moment we convert from STG to Cmm we don't know N, the
-- number of bytes of stack that the function will use, so we use a
-- special late-bound CmmLit, namely
-- CmmHighStackMark
-- to stand for the number of bytes needed. When the stack is made
-- manifest, the number of bytes needed is calculated, and used to
-- replace occurrences of CmmHighStackMark
--
-- The (Maybe CmmExpr) passed to do_checks is usually
-- Just (CmmLit CmmHighStackMark)
-- but can also (in certain hand-written RTS functions)
-- Just (CmmLit 8) or some other fixed valuet
-- If it is Nothing, we don't generate a stack check at all.
do_checks :: Maybe CmmExpr -- Should we check the stack?
-- See Note [Stack usage]
-> Bool -- Should we check for preemption?
-> Maybe CmmExpr -- Heap headroom (bytes)
-> CmmAGraph -- What to do on failure
-> FCode ()
do_checks mb_stk_hwm checkYield mb_alloc_lit do_gc = do
dflags <- getDynFlags
gc_id <- newLabelC
let
Just alloc_lit = mb_alloc_lit
bump_hp = cmmOffsetExprB dflags (CmmReg hpReg) alloc_lit
-- Sp overflow if ((old + 0) - CmmHighStack < SpLim)
-- At the beginning of a function old + 0 = Sp
-- See Note [Single stack check]
sp_oflo sp_hwm =
CmmMachOp (mo_wordULt dflags)
[CmmMachOp (MO_Sub (typeWidth (cmmRegType dflags spReg)))
[CmmStackSlot Old 0, sp_hwm],
CmmReg spLimReg]
-- Hp overflow if (Hp > HpLim)
-- (Hp has been incremented by now)
-- HpLim points to the LAST WORD of valid allocation space.
hp_oflo = CmmMachOp (mo_wordUGt dflags)
[CmmReg hpReg, CmmReg (CmmGlobal HpLim)]
alloc_n = mkAssign (CmmGlobal HpAlloc) alloc_lit
case mb_stk_hwm of
Nothing -> return ()
Just stk_hwm -> tickyStackCheck >> (emit =<< mkCmmIfGoto (sp_oflo stk_hwm) gc_id)
if (isJust mb_alloc_lit)
then do
tickyHeapCheck
emitAssign hpReg bump_hp
emit =<< mkCmmIfThen hp_oflo (alloc_n <*> mkBranch gc_id)
else do
when (not (gopt Opt_OmitYields dflags) && checkYield) $ do
-- Yielding if HpLim == 0
let yielding = CmmMachOp (mo_wordEq dflags)
[CmmReg (CmmGlobal HpLim),
CmmLit (zeroCLit dflags)]
emit =<< mkCmmIfGoto yielding gc_id
emitOutOfLine gc_id $
do_gc -- this is expected to jump back somewhere
-- Test for stack pointer exhaustion, then
-- bump heap pointer, and test for heap exhaustion
-- Note that we don't move the heap pointer unless the
-- stack check succeeds. Otherwise we might end up
-- with slop at the end of the current block, which can
-- confuse the LDV profiler.
| lukexi/ghc-7.8-arm64 | compiler/codeGen/StgCmmHeap.hs | bsd-3-clause | 23,495 | 0 | 20 | 6,810 | 3,595 | 1,854 | 1,741 | 284 | 8 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="fil-PH">
<title>Mga Patakaran ng Pasibong Pag-scan - Beta | Ekstensyon ng ZAP</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Mga Nilalaman</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Indeks</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Paghahanap</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Mga Paborito</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/pscanrulesBeta/src/main/javahelp/org/zaproxy/zap/extension/pscanrulesBeta/resources/help_fil_PH/helpset_fil_PH.hs | apache-2.0 | 1,020 | 78 | 68 | 167 | 436 | 219 | 217 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ro-RO">
<title>Automation Framework</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/automation/src/main/javahelp/org/zaproxy/addon/automation/resources/help_ro_RO/helpset_ro_RO.hs | apache-2.0 | 965 | 77 | 66 | 156 | 407 | 206 | 201 | -1 | -1 |
module RefacMoveDef(liftToTopLevel, liftOneLevel, demote,liftingInClientMod) where
import Prelude hiding (putStrLn)
import PrettyPrint
import Data.Maybe
import Data.List
import RefacUtils
import HsName
import AbstractIO
data Direction = UptoTopLevel | UpOneLevel | Down
{--------This function handles refactorings involving moving a defintion--------
According to the Haskell's syntax, a declaration may occur in one of the following six contexts:
1. A top level declaration in the module:
HsModule SrcLoc ModuleName (Maybe [HsExportSpecI i]) [HsImportDeclI i] ds
2. A local declaration in a Match:
HsMatch SrcLoc i [p] (HsRhs e) ds
3. A local declaration in a pattern binding:
HsPatBind SrcLoc p (HsRhs e) ds
4. A local declaration in a Let expression:
HsLet ds e
5. A local declaration in a Case alternative:
HsAlt SrcLoc p (HsRhs e) ds
6. A local declaration in a Do statement:
HsLetStmt ds (HsStmt e p ds)
-}
liftToTopLevel args
= do let fileName = ghead "filename" args
row = read (args!!1)::Int
col = read (args!!2)::Int
-- f <- MT.lift $ getCurrentDirectory
modName <- fileNameToModName fileName
(inscps, _, mod, toks) <- parseSourceFile fileName
let pnt = locToPNT fileName (row, col) mod
pn = pNTtoPN pnt
if pn /= defaultPN
then liftToTopLevel' modName fileName (inscps, mod, toks) pnt
else error "\nInvalid cursor position!\n"
liftOneLevel args
= do let fileName = ghead "filename" args
row = read (args!!1)::Int
col = read (args!!2)::Int
modName <- fileNameToModName fileName
(inscps, _, mod, toks) <- parseSourceFile fileName
let pnt = locToPNT fileName (row, col) mod
pn = pNTtoPN pnt
if pn /= defaultPN
then liftOneLevel' modName fileName (inscps, mod, toks) pnt
else error "\nInvalid cursor position!\n"
demote args
= do let fileName = ghead "filename" args
row = read (args!!1)::Int
col = read (args!!2)::Int
modName <- fileNameToModName fileName
(inscps, _, mod, toks) <- parseSourceFile fileName
let pnt = locToPNT fileName (row, col) mod
pn = pNTtoPN pnt
if pn /= defaultPN
then demote' modName fileName (mod, toks) pn
else error "\nInvalid cursor position!\n"
move direction args
= do let fileName = ghead "filename" args
row = read (args!!1)::Int
col = read (args!!2)::Int
modName <- fileNameToModName fileName
(inscps, _, mod, toks) <- parseSourceFile fileName
let pnt = locToPNT fileName (row, col) mod
pn = pNTtoPN pnt
if pn /= defaultPN
then
case direction of
UptoTopLevel ->liftToTopLevel' modName fileName (inscps, mod, toks) pnt
UpOneLevel ->liftOneLevel' modName fileName (inscps, mod, toks) pnt
Down ->demote' modName fileName (mod, toks) pn
else error "\nInvalid cursor position!\n"
{- Refactoring Names: 'liftToTopLevel'
This refactoring lifts a local function/pattern binding to the top level of the module, so as to
make it accessible to other functions in the current module, and those modules that import
current module.
In the current implementation, a definition will be lifted only if none of the identifiers defined in this
definition will cause name clash/capture problems in the current module after lifting.
In the case that the whole current module is exported implicitly, the lifted identifier will be exported
automatically after lifting. If the identifier will cause name clash/ambiguous occurrence problem in a
client module, it will be hided in the import declaration of the client module (Note: this might not be
the best solution, we prefer hiding it in the server module instead of in the client module in the final version).
In the case of indirect importing, it might be time-consuming to trace whether the lifted identifier
will cause any problem in a client module that indirectly imports the current module. The current solution is:
suppose a defintion is lifted to top level in module A, and module A is imported and exported by module B, then
the lifted identifier will be hided in the import declaration of B no matter whether it causes problems in
module B or not.
Function name: liftToTopLevel
parameters: fileName--current file name.
mod -- the scoped abstract syntax tree of the module.
pn -- the function/pattern name to be lifted.
-}
liftToTopLevel' modName fileName (inscps, mod, toks) pnt@(PNT pn _ _)
= if isLocalFunOrPatName pn mod
then do ((mod',declPns),((toks',m),_))<-runStateT liftToMod ((toks,unmodified),(-1000,0))
if modIsExported mod
then do clients<-clientModsAndFiles modName
refactoredClients <- mapM (liftingInClientMod modName declPns) clients
writeRefactoredFiles False $ ((fileName,m),(toks',mod')):refactoredClients
else do writeRefactoredFiles False [((fileName,m), (toks',mod'))]
else error "\nThe identifier is not a local function/pattern name!"
where
{-step1: divide the module's top level declaration list into three parts:
'parant' is the top level declaration containing the lifted declaration,
'before' and `after` are those declarations before and after 'parent'.
step2: get the declarations to be lifted from parent, bind it to liftedDecls
step3: remove the lifted declarations from parent and extra arguments may be introduce.
step4. test whether there are any names need to be renamed.
-}
liftToMod = do let (before, parent,after)=divideDecls (hsDecls mod) pnt
when (isClassDecl $ ghead "liftToMod" parent)
$ error "Sorry, the refactorer cannot lift a definition from a class declaration!"
when (isInstDecl $ ghead "liftToMod" parent)
$ error "Sorry, the refactorer cannot lift a definition from an instance declaration!"
let liftedDecls=definingDecls [pn] parent True True
declaredPns=nub $ concatMap definedPNs liftedDecls
pns<-pnsNeedRenaming inscps mod parent liftedDecls declaredPns
(_,dd)<-hsFreeAndDeclaredPNs mod
if pns==[]
then do (parent',liftedDecls',paramAdded)<-addParamsToParentAndLiftedDecl pn dd parent liftedDecls
let liftedDecls''=if paramAdded then filter isFunOrPatBind liftedDecls'
else liftedDecls'
mod'<-moveDecl1 (replaceDecls mod (before++parent'++after))
(Just (ghead "liftToMod" (definedPNs (ghead "liftToMod2" parent')))) [pn] True
return (mod', declaredPns)
else askRenamingMsg pns "lifting"
moveDecl1 t defName pns topLevel
= do ((toks, _),_)<-get
let (declToMove, toksToMove) = getDeclAndToks (ghead "moveDecl1" pns) True toks t
--error$ show (declToMove, toksToMove)
t' <- rmDecl (ghead "moveDecl3" pns) False =<<foldM (flip rmTypeSig) t pns
addDecl t' defName (declToMove, Just toksToMove) topLevel
--get all the declarations define in the scope of t
allDeclsIn t = fromMaybe [] (applyTU (full_tdTU (constTU [] `adhocTU` decl)) t)
where decl (d::HsDeclP)
|isFunBind d || isPatBind d || isTypeSig d = Just [d]
decl _ = Just []
askRenamingMsg pns str
= error ("The identifier(s):" ++ showEntities showPNwithLoc pns ++
" will cause name clash/capture or ambiguity occurrence problem after "
++ str ++", please do renaming first!")
--Get the subset of 'pns' that need to be renamed before lifting.
pnsNeedRenaming inscps dest parent liftedDecls pns
=do r<-mapM pnsNeedRenaming' pns
return (concat r)
where
pnsNeedRenaming' pn
= do (f,d)<-hsFDsFromInside dest --f: free variable names that may be shadowed by pn
--d: declaread variables names that may clash with pn
vs<-hsVisiblePNs pn parent --vs: declarad varaibles that may shadow pn
let inscpNames = map (\(x,_,_,_)->x) $ inScopeInfo inscps
vars = map pNtoName (nub (f `union` d `union` vs) \\ [pn]) -- `union` inscpNames
if elem (pNtoName pn) vars || isInScopeAndUnqualified (pNtoName pn) inscps && findEntity pn dest
then return [pn]
else return []
--This pNtoName takes into account the qualifier.
pNtoName (PN (UnQual i) orig)=i
pNtoName (PN (Qual (PlainModule modName) i ) orig)=modName ++ "." ++ i
--can not simply use PNameToExp, PNameToPat here because of the location information.
addParamsToParent pn [] t = return t
addParamsToParent pn params t
=applyTP(full_buTP (idTP `adhocTP` inExp)) t
where
inExp (exp@(Exp (HsId (HsVar (PNT pname ty loc))))::HsExpP)
| pname==pn
= do let newExp=Exp (HsParen (foldl addParamToExp exp (map pNtoExp params)))
update exp newExp exp
inExp x =return x
addParamToExp exp param
=(Exp (HsApp exp param))
--Do refactoring in the client module.
-- that is to hide the identifer in the import declaration if it will cause any problem in the client module.
liftingInClientMod serverModName pns (modName, fileName)
= do (inscps, exps ,mod ,ts) <- parseSourceFile fileName
let modNames = willBeUnQualImportedBy serverModName mod
if isJust modNames
then let pns' = namesNeedToBeHided mod exps (fromJust modNames) pns
in if pns' /= []
then do (mod', ((ts',m),_))<-runStateT (addHiding serverModName mod pns') ((ts,unmodified),(-1000,0))
return ((fileName,m), (ts',mod'))
else return ((fileName,unmodified), (ts,mod))
else return ((fileName,unmodified),(ts,mod))
--Test whether an identifier defined in the modules specified by 'names' will be exported by current module.
willBeExportedByClientMod names mod
= let exps = hsModExports mod
in if isNothing exps
then False
else any isJust $ map (\y-> (find (\x-> (simpModule x==Just y)) (fromJust exps))) names
where simpModule (ModuleE (SN m _)) = Just m
simpModule _ = Nothing
--get the module name or alias name by which the lifted identifier will be imported automatically.
willBeUnQualImportedBy::HsName.ModuleName->HsModuleP->Maybe [HsName.ModuleName]
willBeUnQualImportedBy modName mod
= let imps = hsModImports mod
ms =filter (\(HsImportDecl _ (SN modName1 _) qualify as h)->modName==modName1 && (not qualify) &&
(isNothing h || (isJust h && ((fst (fromJust h))==True)))) imps
in if ms==[] then Nothing
else Just $ nub $ map getModName ms
where getModName (HsImportDecl _ (SN modName _) qualify as h)
= if isJust as then simpModName (fromJust as)
else modName
simpModName (SN m loc) = m
--get the subset of 'pns', which need to be hided in the import declaration in module 'mod'
namesNeedToBeHided mod exps modNames pns
= if willBeExportedByClientMod modNames mod
then pns
else concatMap needToBeHided pns
where
needToBeHided pn
= let name = pNtoName pn
in if (usedWithoutQual name (hsModDecls mod) --the same name is used in the module unqualifiedly
|| usedWithoutQual name (hsModExports mod) --the same name is exported unqualifiedly by an Ent decl
|| causeNameClashInExports pn name mod exps)
then [pn]
else []
-- **************************************************************************************************************--
{- Refactoring Names: 'liftOneLevel'
Descritption:
this refactoring lifts a local function/pattern binding only one level up.
By 'lifting one-level up' ,I mean:
case1: In a module (HsModule SrcLoc ModuleName (Maybe [HsExportSpecI i]) [HsImportDeclI i] ds):
A local declaration D will be lifted to the same level as the 'ds', if D is in the
where clause of one of ds's element declaration.
case2: In a match ( HsMatch SrcLoc i [p] (HsRhs e) ds) :
A local declaration D will be lifted to the same level as the 'ds', if D is in the
where clause of one of ds's element declaration.
A declaration D,say,in the rhs expression 'e' will be lifted to 'ds' if D is Not local to
other declaration list in 'e'
case3: In a pattern binding (HsPatBind SrcLoc p (HsRhs e) ds):
A local declaration D will be lifted to the same level as the 'ds', if D is in the
where clause of one of ds's element declaration.
A declaration D,say,in the rhs expression 'e' will be lifted to 'ds' if D is Not local to
other declaration list in 'e'
case4: In the Lex expression (Exp (HsLet ds e):
A local declaration D will be lifted to the same level as the 'ds', if D is in the
where clause of one of ds's element declaration.
A declaration D, say, in the expression 'e' will be lifted to 'ds' if D is not local to
other declaration list in 'e'
case5: In the case Alternative expression:(HsAlt loc p rhs ds)
A local declaration D will be lifted to the same level as the 'ds', if D is in the
where clause of one of ds's element declaration.
A declaration D in 'rhs' will be lifted to 'ds' if D is not local to other declaration
list in 'rhs'.
case6: In the do statement expression:(HsLetStmt ds stmts)
A local declaration D will be lifted to the same level as the 'ds', if D is in the
where clause of one of ds's element declaration.
A declaration D in 'stmts' will be lifted to 'ds' if D is not local to other declaration
list in 'stmts'.
Function name: liftOneLevel
parameters: fileName--current file name.
mod -- the scoped abstract syntax tree of the module.
pn -- the function/pattern name to be lifted.
-}
liftOneLevel' modName fileName (inscps, mod, toks) pnt@(PNT pn _ _ )
= if isLocalFunOrPatName pn mod
then do (mod', ((toks',m),_))<-liftOneLevel''
let (b, pns) = liftedToTopLevel pnt mod
if b && modIsExported mod
then do clients<-clientModsAndFiles modName
refactoredClients <- mapM (liftingInClientMod modName pns) clients
-- ePutStrLn (show clients)
writeRefactoredFiles False $ ((fileName,m),(toks',mod')):refactoredClients
else writeRefactoredFiles False [((fileName,m), (toks',mod'))]
else error "\nThe identifer is not a function/pattern name!"
where
liftOneLevel''=runStateT (applyTP ((once_tdTP (failTP `adhocTP` liftToMod
`adhocTP` liftToMatch
`adhocTP` liftToPattern
`adhocTP` liftToLet
`adhocTP` liftToAlt
`adhocTP` liftToLetStmt))
`choiceTP` failure) mod) ((toks,unmodified),(-1000,0))
where
--1. The defintion will be lifted to top level
liftToMod (mod@(HsModule loc name exps imps ds):: HsModuleP)
| definingDecls [pn] (hsDecls ds) False False /=[] --False means not taking type signature into account
=do ds'<-worker mod ds pn
return (HsModule loc name exps imps ds')
liftToMod _ =mzero
--2. The definition will be lifted to the declaration list of a match
liftToMatch (match@(HsMatch loc1 name pats rhs ds)::HsMatchP)
| definingDecls [pn] (hsDecls ds) False False/=[]
=do ds'<-worker match ds pn
return (HsMatch loc1 name pats rhs ds')
liftToMatch (match@(HsMatch loc1 name pats rhs ds)::HsMatchP)
| definingDecls [pn] (hsDecls rhs) False False /=[]
= doLifting1 match pn
liftToMatch _ =mzero
--3. The definition will be lifted to the declaration list of a pattern binding
liftToPattern (pat@(Dec (HsPatBind loc p rhs ds))::HsDeclP)
| definingDecls [pn] (hsDecls ds) False False /=[]
=do ds'<-worker pat ds pn
return (Dec (HsPatBind loc p rhs ds'))
liftToPattern (pat@(Dec (HsPatBind loc p rhs ds))::HsDeclP)
| definingDecls [pn] (hsDecls rhs) False False /=[]
=doLifting2 pat pn
liftToPattern _=mzero
--4. The definition will be lifted to the declaration list in a let expresiion.
liftToLet (letExp@(Exp (HsLet ds e))::HsExpP)
| definingDecls [pn] (hsDecls ds) False False/=[]
=do ds' <-worker letExp ds pn
return (Exp (HsLet ds' e))
liftToLet (letExp@(Exp (HsLet ds e))::HsExpP) --Attention: ds can be empty!
| definingDecls [pn] (hsDecls e) False False /=[]
= doLifting3 letExp pn
liftToLet _ =mzero
--5. The definition will be lifted to the declaration list in a alt
liftToAlt (alt@(HsAlt loc p rhs ds)::(HsAlt (HsExpP) (HsPatP) [HsDeclP]))
|definingDecls [pn] (hsDecls ds) False False /=[]
=do ds'<-worker alt ds pn
return (HsAlt loc p rhs ds')
liftToAlt (alt@(HsAlt loc p rhs ds)::(HsAlt (HsExpP) (HsPatP) [HsDeclP]))
|definingDecls [pn] (hsDecls rhs) False False/=[]
=doLifting4 alt pn
liftToAlt _=mzero
--6. The defintion will be lifted to the declaration list in a let statement.
liftToLetStmt (letStmt@(HsLetStmt ds stmts):: (HsStmt (HsExpP) (HsPatP) [HsDeclP]))
|definingDecls [pn] (hsDecls ds) False False/=[]
=do ds'<-worker letStmt ds pn
return (HsLetStmt ds' stmts)
liftToLetStmt (letStmt@(HsLetStmt ds stmts):: (HsStmt (HsExpP) (HsPatP) [HsDeclP]))
|definingDecls [pn] (hsDecls stmts) False False /=[]
= doLifting5 letStmt pn
liftToLetStmt _=mzero
failure=idTP `adhocTP` mod
where
mod (m::HsModuleP)
= error ( "Lifting this definition failed. "++
" This might be because that the definition to be lifted is defined in a class/instance declaration.")
worker dest ds pn
=do let (before, parent,after)=divideDecls ds pnt
liftedDecls=definingDecls [pn] (hsDecls parent) True False
declaredPns=nub $ concatMap definedPNs liftedDecls
(_, dd)<-hsFreeAndDeclaredPNs dest
pns<-pnsNeedRenaming inscps dest parent liftedDecls declaredPns
if pns==[]
then do
(parent',liftedDecls',paramAdded)<-addParamsToParentAndLiftedDecl pn dd
parent liftedDecls
let liftedDecls''=if paramAdded then filter isFunOrPatBind liftedDecls'
else liftedDecls'
--True means the new decl will be at the same level with its parant.
dest'<-moveDecl1 (replaceDecls dest (before++parent'++after))
(Just (ghead "liftToMod" (definedPNs (ghead "worker" parent')))) [pn] False
return (hsDecls dest')
--parent'<-doMoving declaredPns (ghead "worker" parent) True paramAdded parent'
--return (before++parent'++liftedDecls''++after)
else askRenamingMsg pns "lifting"
doLifting1 dest@(HsMatch loc1 name pats parent ds) pn
= do let liftedDecls=definingDecls [pn] (hsDecls parent) True False
declaredPns=nub $ concatMap definedPNs liftedDecls
pns<-pnsNeedRenaming inscps dest parent liftedDecls declaredPns
(_, dd)<-hsFreeAndDeclaredPNs dest
if pns==[]
then do (parent',liftedDecls',paramAdded)<-addParamsToParentAndLiftedDecl pn dd parent liftedDecls
let liftedDecls''=if paramAdded then filter isFunOrPatBind liftedDecls'
else liftedDecls'
moveDecl1 (HsMatch loc1 name pats parent' ds) Nothing [pn] False
else askRenamingMsg pns "lifting"
doLifting2 dest@(Dec (HsPatBind loc p parent ds)) pn
= do let liftedDecls=definingDecls [pn] (hsDecls parent) True False
declaredPns=nub $ concatMap definedPNs liftedDecls
pns<-pnsNeedRenaming inscps dest parent liftedDecls declaredPns
(_, dd)<-hsFreeAndDeclaredPNs dest
if pns==[]
then do (parent',liftedDecls',paramAdded)<-addParamsToParentAndLiftedDecl pn dd parent liftedDecls
let liftedDecls''=if paramAdded then filter isFunOrPatBind liftedDecls'
else liftedDecls'
moveDecl1 (Dec (HsPatBind loc p parent' ds)) Nothing [pn] False
else askRenamingMsg pns "lifting"
doLifting3 dest@(Exp (HsLet ds parent)) pn
= do let liftedDecls=definingDecls [pn] (hsDecls parent) True False
declaredPns=nub $ concatMap definedPNs liftedDecls
pns<-pnsNeedRenaming inscps dest parent liftedDecls declaredPns
(_, dd)<-hsFreeAndDeclaredPNs dest
if pns==[]
then do (parent',liftedDecls',paramAdded)<-addParamsToParentAndLiftedDecl pn dd parent liftedDecls
let liftedDecls''=if paramAdded then filter isFunOrPatBind liftedDecls'
else liftedDecls'
moveDecl1 (Exp (HsLet ds parent')) Nothing [pn] False
else askRenamingMsg pns "lifting"
doLifting4 dest@(HsAlt loc p parent ds) pn
= do let liftedDecls=definingDecls [pn] (hsDecls parent) True False
declaredPns=nub $ concatMap definedPNs liftedDecls
pns<-pnsNeedRenaming inscps dest parent liftedDecls declaredPns
(_, dd)<-hsFreeAndDeclaredPNs dest
if pns==[]
then do (parent',liftedDecls',paramAdded)<-addParamsToParentAndLiftedDecl pn dd parent liftedDecls
let liftedDecls''=if paramAdded then filter isFunOrPatBind liftedDecls'
else liftedDecls'
moveDecl1 (HsAlt loc p parent' ds) Nothing [pn] False
else askRenamingMsg pns "lifting"
doLifting5 dest@(HsLetStmt ds parent) pn
= do let liftedDecls=definingDecls [pn] (hsDecls parent) True False
declaredPns=nub $ concatMap definedPNs liftedDecls
pns<-pnsNeedRenaming inscps dest parent liftedDecls declaredPns
(_, dd)<-hsFreeAndDeclaredPNs dest
if pns==[]
then do (parent',liftedDecls',paramAdded)<-addParamsToParentAndLiftedDecl pn dd parent liftedDecls
let liftedDecls''=if paramAdded then filter isFunOrPatBind liftedDecls'
else liftedDecls'
moveDecl1 (HsLetStmt ds parent') Nothing [pn] False
else askRenamingMsg pns "lifting"
liftedToTopLevel pnt@(PNT pn _ _) (mod@(HsModule loc name exps imps ds):: HsModuleP)
= if definingDecls [pn] (hsDecls ds) False True /=[]
then let (_, parent,_) = divideDecls ds pnt
liftedDecls=definingDecls [pn] (hsDecls parent) True True
declaredPns = nub $ concatMap definedPNs liftedDecls
in (True, declaredPns)
else (False, [])
addParamsToParentAndLiftedDecl pn dd parent liftedDecls
=do (ef,_)<-hsFreeAndDeclaredPNs parent
(lf,_)<-hsFreeAndDeclaredPNs liftedDecls
let newParams=((nub lf)\\ (nub ef)) \\ dd --parameters (in PName format) to be added to pn because of lifting
if newParams/=[]
then if (any isComplexPatBind liftedDecls)
then error "This pattern binding cannot be lifted, as it uses some other local bindings!"
else do parent'<-{-addParamsToDecls parent pn newParams True-} addParamsToParent pn newParams parent
liftedDecls'<-addParamsToDecls liftedDecls pn newParams True
return (parent', liftedDecls',True)
else return (parent,liftedDecls,False)
--------------------------------End of Lifting-----------------------------------------
{-Refactoring : demote a function/pattern binding(simpe or complex) to the declaration where it is used.
Descritption: if a declaration D, say, is only used by another declaration F,say, then D can be
demoted into the local declaration list (where clause) in F.
So currently, D can not be demoted if more than one declaration use it.
In a multi-module context, a top-level definition can not be demoted if it is used
by other modules. In the case that the demoted identifer is in the hiding list of
import declaration in a client module, it should be removed from the hiding list.
Function name:demote
parameters: fileName--current file name.
mod -- the scoped abstract syntax tree of the module.
pn -- the function/pattern name to be demoted.
-}
demote' modName fileName (mod,toks) pn
=if isFunOrPatName pn mod
then if isTopLevelPN pn && isExplicitlyExported pn mod
then error "This definition can not be demoted, as it is explicitly exported by the current module!"
else do (mod',((toks',m),_))<-doDemoting pn fileName mod toks
if isTopLevelPN pn && modIsExported mod
then do let demotedDecls'= definingDecls [pn] (hsDecls mod) True False
declaredPns = nub $ concatMap definedPNs demotedDecls'
clients<-clientModsAndFiles modName
refactoredClients <-mapM (demotingInClientMod declaredPns) clients
writeRefactoredFiles False $ ((fileName,m),(toks',mod')):refactoredClients
else writeRefactoredFiles False [((fileName,m), (toks',mod'))]
else error "\nInvalid cursor position!"
--Do refactoring in the client module, that is:
--a) Check whether the identifier is used in the module body
--b) If the identifier is not used but is hided by the import declaration, then remove it from the hiding.
demotingInClientMod pns (modName, fileName)
= do (inscps, exps, mod ,ts) <- parseSourceFile fileName
if any (\pn->findPN pn (hsModDecls mod) || findPN pn (hsModExports mod)) pns
then error $ "This definition can not be demoted, as it is used in the client module '"++show modName++"'!"
else if any (\pn->findPN pn (hsModImports mod)) pns
then do (mod',((ts',m),_))<-runStateT (rmItemsFromImport mod pns) ((ts,unmodified),(-1000,0))
return ((fileName,m), (ts',mod'))
else return ((fileName,unmodified), (ts,mod))
doDemoting pn fileName mod toks
=runStateT (applyTP ((once_tdTP (failTP `adhocTP` demoteInMod
`adhocTP` demoteInMatch
`adhocTP` demoteInPat
`adhocTP` demoteInLet
`adhocTP` demoteInAlt
`adhocTP` demoteInStmt)) `choiceTP` failure) mod)
((toks,unmodified),(-1000,0))
where
--1. demote from top level
demoteInMod (mod@(HsModule loc name exps imps ds):: HsModuleP)
|definingDecls [pn] ds False False /=[]
= do mod'<-rmQualifier [pn] mod
doDemoting' mod' pn
demoteInMod _ =mzero
--2. The demoted definition is a local decl in a match
demoteInMatch (match@(HsMatch loc1 name pats rhs ds)::HsMatchP)
| definingDecls [pn] ds False False/=[]
= doDemoting' match pn
demoteInMatch _ =mzero
--3. The demoted definition is a local decl in a pattern binding
demoteInPat (pat@(Dec (HsPatBind loc p rhs ds))::HsDeclP)
| definingDecls [pn] ds False False /=[]
= doDemoting' pat pn
demoteInPat _ =mzero
--4: The demoted definition is a local decl in a Let expression
demoteInLet (letExp@(Exp (HsLet ds e))::HsExpP)
| definingDecls [pn] ds False False/=[]
= doDemoting' letExp pn
demoteInLet _=mzero
--5. The demoted definition is a local decl in a case alternative.
demoteInAlt (alt@(HsAlt loc p rhs ds)::(HsAlt (HsExpP) (HsPatP) [HsDeclP]))
| definingDecls [pn] ds False False /=[]
= doDemoting' alt pn
demoteInAlt _=mzero
--6.The demoted definition is a local decl in a Let statement.
demoteInStmt (letStmt@(HsLetStmt ds stmts):: (HsStmt (HsExpP) (HsPatP) [HsDeclP]))
| definingDecls [pn] ds False False /=[]
= doDemoting' letStmt pn
demoteInStmt _=mzero
failure=idTP `adhocTP` mod
where
mod (m::HsModuleP)
= error "Refactoring failed!" --SHOULD GIVE MORE DETAILED ERROR MESSAGE
{- doDemoting' :(MonadPlus m)=>PName->[HsDeclP]->m [HsDeclP]
parameters: t -declaration or expression where pn is define.
pn -- the function/pattern name to be demoted in PName format
-}
doDemoting' t pn
= let origDecls=hsDecls t
demotedDecls'=definingDecls [pn] origDecls True False
declaredPns=nub $ concatMap definedPNs demotedDecls'
demotedDecls=definingDecls declaredPns origDecls True False
in if not (usedByRhs t declaredPns)
then do -- find how many matches/pattern bindings (except the binding defining pn) use 'pn'
uselist<-uses declaredPns (hsDecls t\\demotedDecls)
{- From 'hsDecls t' to 'hsDecls t \\ demotedDecls'.
Bug fixed 06/09/2004 to handle direct recursive function.
-}
case length uselist of
0 ->do error "\n Nowhere to demote this function!\n"
1 -> --This function is only used by one friend function
do (f,d)<-hsFreeAndDeclaredPNs demotedDecls
-- remove demoted declarations
--Without updating the token stream.
let ds=foldl (flip removeTypeSig) (hsDecls t\\demotedDecls) declaredPns
--get those varaibles declared at where the demotedDecls will be demoted to
dl <-mapM (flip declaredNamesInTargetPlace ds) declaredPns
--make sure free variable in 'f' do not clash with variables in 'dl',
--otherwise do renaming.
let clashedNames=filter (\x-> elem (pNtoName x) (map pNtoName f)) $ (nub.concat) dl
--rename clashed names to new names created automatically,update TOKEN STREAM as well.
if clashedNames/=[]
then error ("The identifier(s):" ++ showEntities showPNwithLoc clashedNames ++
", declared in where the definition will be demoted to, will cause name clash/capture"
++" after demoting, please do renaming first!")
--ds'<-foldM (flip (autoRenameLocalVar True)) ds clashedNames
else --duplicate demoted declarations to the right place.
do ds''<-duplicateDecls declaredPns origDecls
return (replaceDecls t ds'')
_ ->error "\nThis function/pattern binding is used by more than one friend bindings\n"
else error "This function can not be demoted as it is used in current level!\n"
where
---find how many matches/pattern bindings use 'pn'-------
uses pns
= applyTU (stop_tdTU (failTU `adhocTU` usedInMatch
`adhocTU` usedInPat))
where
usedInMatch (match@(HsMatch _ (PNT pname _ _) _ _ _)::HsMatchP)
| isNothing (find (==pname) pns) && any (flip findPN match) pns
=return ["Once"]
usedInMatch _ =mzero
usedInPat (pat@(Dec (HsPatBind _ p _ _)):: HsDeclP)
| hsPNs p `intersect` pns ==[] && any (flip findPN pat) pns
=return ["Once"]
usedInPat _=mzero
-- duplicate demotedDecls to the right place (the outer most level where it is used).
duplicateDecls pns decls
= do applyTP (once_tdTP (failTP `adhocTP` dupInMatch
`adhocTP` dupInPat)) decls
--error (show decls' ++ "\n" ++ prettyprint decls')
-- rmDecl (ghead "moveDecl3" pns) False =<<foldM (flip rmTypeSig) decls' pns
where
dupInMatch (match@(HsMatch loc1 name pats rhs ds)::HsMatchP)
| any (flip findPN match) pns && not (any (flip findPN name) pns)
= --If not fold parameters.
moveDecl pns match False decls False
-- If fold parameters.
--foldParams pns match decls
dupInMatch _ =mzero
dupInPat (pat@(Dec (HsPatBind loc p rhs ds))::HsDeclP)
|any (flip findPN pat) pns && not (any (flip findPN p) pns)
= moveDecl pns pat False decls False
dupInPat _ =mzero
demotedDecls=definingDecls pns decls True False
---------------------------------------------------------------------
declaredNamesInTargetPlace :: (Term t, MonadPlus m)=>PName->t->m [PName]
declaredNamesInTargetPlace pn=applyTU (stop_tdTU (failTU
`adhocTU` inMatch
`adhocTU` inPat))
where
inMatch (match@(HsMatch loc1 name pats rhs ds)::HsMatchP)
| findPN pn rhs
=(return.snd)=<<hsFDsFromInside match
inMatch _ =mzero
inPat (pat@(Dec (HsPatBind loc p rhs ds)):: HsDeclP)
|findPN pn rhs
=(return.snd)=<<hsFDsFromInside pat
inPat _=mzero
class (Term t) =>UsedByRhs t where
usedByRhs:: t->[PName]->Bool
instance UsedByRhs HsExpP where
usedByRhs (Exp (HsLet ds e)) pns = or $ map (flip findPN e) pns
instance UsedByRhs HsAltP where
usedByRhs (HsAlt _ _ rhs _) pns =or $ map (flip findPN rhs) pns
instance UsedByRhs HsStmtP where
usedByRhs (HsLetStmt _ stmt) pns =or $ map (flip findPN stmt) pns
instance UsedByRhs HsMatchP where
usedByRhs (HsMatch loc1 fun pats rhs ds) pns =or $ map (flip findPN rhs) pns
instance UsedByRhs HsDeclP where
usedByRhs (Dec (HsPatBind loc p rhs ds)) pns =or $ map (flip findPN rhs) pns
usedByRhs _ pn=False
instance UsedByRhs HsModuleP where
usedByRhs mod pns=False
{- foldParams:remove parameters in the demotedDecls if possible
parameters: pn -- the function/pattern name to be demoted in PName format
match--where the demotedDecls will be demoted to
demotedDecls -- the declarations to be demoted.
example:
module Test where demote 'sq' module Test where
sumSquares x y ===> sumSquares x y =(sq 0) + (sq y)
= sq x 0+ sq x y where sq y=x ^ y
sq x y=x^y
-}
--PROBLEM: TYPE SIGNATURE SHOULD BE CHANGED.
--- TEST THIS FUCNTION!!!
foldParams pns (match@(HsMatch loc1 name pats rhs ds)::HsMatchP) decls
=do let matches=concatMap matchesInDecls demotedDecls
pn=ghead "foldParams" pns --pns /=[]
params<-allParams pn rhs []
if (length.nub.map length) params==1 -- have same number of param
&& ((length matches)==1) -- only one 'match' in the demoted declaration
then do let patsInDemotedDecls=(patsInMatch.(ghead "foldParams")) matches
subst=mkSubst patsInDemotedDecls params
fstSubst=map fst subst
sndSubst=map snd subst
rhs'<-rmParamsInParent pn sndSubst rhs
ls<-mapM hsFreeAndDeclaredPNs sndSubst
-- newNames contains the newly introduced names to the demoted decls---
let newNames=(map pNtoName (concatMap fst ls)) \\ (map pNtoName fstSubst)
--There may be name clashing because of introducing new names.
clashedNames<-getClashedNames fstSubst newNames (ghead "foldParams" matches)
{- --auotmatic renaming
demotedDecls'<-foldM (flip (autoRenameLocalVar True)) demotedDecls clashedNames
demotedDecls''<- foldM replaceExpWithUpdToks demotedDecls' subst
--remove substituted parameters in demoted declarations
demotedDecls'''<-rmParamsInDemotedDecls fstSubst demotedDecls'' -}
decls' <- foldInDemotedDecls pns clashedNames subst decls
let demotedDecls''' = definingDecls pns decls' True False
moveDecl pns (HsMatch loc1 name pats rhs' ds) False decls' False
return (HsMatch loc1 name pats rhs' (ds++(filter (not.isTypeSig) demotedDecls''')))
else do moveDecl pns match False decls True
return (HsMatch loc1 name pats rhs (ds++demotedDecls)) -- no parameter folding
where
matchesInDecls ((Dec (HsFunBind loc matches))::HsDeclP)=matches
matchesInDecls x = []
patsInMatch ((HsMatch loc1 name pats rhs ds)::HsMatchP)
=pats
demotedDecls=definingDecls pns decls True False
foldInDemotedDecls pns clashedNames subst decls
= applyTP (stop_tdTP (failTP `adhocTP` worker)) decls
where
worker (match@(HsMatch loc1 (PNT pname _ _) pats rhs ds)::HsMatchP)
| isJust (find (==pname) pns)
= do match' <- foldM (flip (autoRenameLocalVar True)) match clashedNames
match'' <- foldM replaceExpWithUpdToks match' subst
rmParamsInDemotedDecls (map fst subst) match''
worker _ = mzero
------Get all of the paramaters supplied to pn ---------------------------
{- eg. sumSquares x1 y1 x2 y2 = rt x1 y1 + rt x2 y2
rt x y = x+y
demote 'rt' to 'sumSquares',
'allParams pn rhs []' returns [[x1,x2],[y1,y2]]
where pn is 'rt' and rhs is 'rt x1 y1 + rt x2 y2'
-}
allParams pn rhs initial -- pn: demoted function/pattern name.
=do p<-getOneParam pn rhs
--putStrLn (show p)
if p/=[] then do rhs'<-rmOneParam pn rhs
allParams pn rhs' (initial++[p])
else return initial
where
getOneParam pn
=applyTU (stop_tdTU (failTU `adhocTU` worker))
where
worker (Exp (HsApp e1 e2))
|(expToPN e1==pn) =return (rmLocs [e2])
worker _ =mzero
rmOneParam pn
=applyTP (stop_tdTP (failTP `adhocTP` worker))
where
worker (Exp (HsApp e1 e2 ))
|expToPN e1==pn =return e1
worker _ =mzero
-----------remove parameters in demotedDecls-------------------------------
rmParamsInDemotedDecls ps
=applyTP (once_tdTP (failTP `adhocTP` worker))
where worker ((HsMatch loc1 name pats rhs ds)::HsMatchP)
= do let pats'=filter (\x->not ((patToPN x /=defaultPN) &&
elem (patToPN x) ps)) pats
pats'<-update pats pats' pats
return (HsMatch loc1 name pats' rhs ds)
----------remove parameters in the parent functions' rhs-------------------
--Attention: PNT i1 _ _==PNT i2 _ _ = i1 =i2
rmParamsInParent pn es
=applyTP (full_buTP (idTP `adhocTP` worker))
where worker exp@(Exp (HsApp e1 e2))
| findPN pn e1 && elem e2 es
=update exp e1 exp
worker (exp@(Exp (HsParen e1)))
|pn==expToPN e1
=update exp e1 exp
worker x =return x
getClashedNames oldNames newNames (match::HsMatchP)
= do (f,d)<-hsFDsFromInside match
ds'<-mapM (flip hsVisiblePNs match) oldNames
-- return clashed names
return (filter (\x->elem (pNtoName x) newNames) --Attention: nub
( nub (d `union` (nub.concat) ds')))
----- make Substitions between formal and actual parameters.-----------------
mkSubst pats params
= catMaybes (zipWith (\x y ->if (patToPN x/=defaultPN) && (length (nub y)==1)
then Just (patToPN x,(ghead "mkSubst") y)
else Nothing) pats params)
--substitute an old expression by new expression
replaceExpWithUpdToks decls subst
= applyTP (full_buTP (idTP `adhocTP` worker)) decls
where worker (e::HsExpP)
|(expToPN e/=defaultPN) && (expToPN e)==(fst subst)
=update e (snd subst) e
worker x=return x
--return True if pn is a local function/pattern name
isLocalFunOrPatName pn scope
=isLocalPN pn && isFunOrPatName pn scope
-- |removeTypeSig removes the signature declaraion for pn from the decl list.
removeTypeSig ::PName->[HsDeclP]->[HsDeclP]
removeTypeSig pn decls=concatMap (removeTypeSig' pn) decls
where removeTypeSig' pn sig@(Dec (HsTypeSig loc is c tp))
=if definesTypeSig pn sig && length is==1
then []
else [Dec (HsTypeSig loc (filter (\x-> (pNTtoPN x)/=pn) is) c tp)]
removeTypeSig' pn x=[x]
-- |Divide a declaration list into three parts (before, parent, after) according to the PNT,
-- where 'parent' is the first decl containing the PNT, 'before' are those decls before 'parent'
-- and 'after' are those decls after 'parent'.
divideDecls::[HsDeclP]->PNT->([HsDeclP],[HsDeclP],[HsDeclP])
divideDecls ds pnt
= let (before,after)=break (\x->findPNT pnt x) ds
in if (after/=[])
then (before, [ghead "divideDecls" after], tail after)
else (ds,[],[])
| mpickering/HaRe | old/refactorer/RefacMoveDef.hs | bsd-3-clause | 46,373 | 1 | 24 | 16,651 | 9,884 | 5,048 | 4,836 | -1 | -1 |
{-# LANGUAGE CPP #-}
#if __GLASGOW_HASKELL__ >= 701
{-# LANGUAGE Trustworthy #-}
#endif
-- |
-- Module : Data.ByteString.Short
-- Copyright : (c) Duncan Coutts 2012-2013
-- License : BSD-style
--
-- Maintainer : duncan@community.haskell.org
-- Stability : stable
-- Portability : ghc only
--
-- A compact representation suitable for storing short byte strings in memory.
--
-- In typical use cases it can be imported alongside "Data.ByteString", e.g.
--
-- > import qualified Data.ByteString as B
-- > import qualified Data.ByteString.Short as B
-- > (ShortByteString, toShort, fromShort)
--
-- Other 'ShortByteString' operations clash with "Data.ByteString" or "Prelude"
-- functions however, so they should be imported @qualified@ with a different
-- alias e.g.
--
-- > import qualified Data.ByteString.Short as B.Short
--
module Data.ByteString.Short (
-- * The @ShortByteString@ type
ShortByteString,
-- ** Memory overhead
-- | With GHC, the memory overheads are as follows, expressed in words and
-- in bytes (words are 4 and 8 bytes on 32 or 64bit machines respectively).
--
-- * 'ByteString' unshared: 9 words; 36 or 72 bytes.
--
-- * 'ByteString' shared substring: 5 words; 20 or 40 bytes.
--
-- * 'ShortByteString': 4 words; 16 or 32 bytes.
--
-- For the string data itself, both 'ShortByteString' and 'ByteString' use
-- one byte per element, rounded up to the nearest word. For example,
-- including the overheads, a length 10 'ShortByteString' would take
-- @16 + 12 = 28@ bytes on a 32bit platform and @32 + 16 = 48@ bytes on a
-- 64bit platform.
--
-- These overheads can all be reduced by 1 word (4 or 8 bytes) when the
-- 'ShortByteString' or 'ByteString' is unpacked into another constructor.
--
-- For example:
--
-- > data ThingId = ThingId {-# UNPACK #-} !Int
-- > {-# UNPACK #-} !ShortByteString
--
-- This will take @1 + 1 + 3@ words (the @ThingId@ constructor +
-- unpacked @Int@ + unpacked @ShortByteString@), plus the words for the
-- string data.
-- ** Heap fragmentation
-- | With GHC, the 'ByteString' representation uses /pinned/ memory,
-- meaning it cannot be moved by the GC. This is usually the right thing to
-- do for larger strings, but for small strings using pinned memory can
-- lead to heap fragmentation which wastes space. The 'ShortByteString'
-- type (and the @Text@ type from the @text@ package) use /unpinned/ memory
-- so they do not contribute to heap fragmentation. In addition, with GHC,
-- small unpinned strings are allocated in the same way as normal heap
-- allocations, rather than in a separate pinned area.
-- * Conversions
toShort,
fromShort,
pack,
unpack,
-- * Other operations
empty, null, length, index,
) where
import Data.ByteString.Short.Internal
import Prelude ()
| markflorisson/hpack | testrepo/bytestring-0.10.4.1/Data/ByteString/Short.hs | bsd-3-clause | 2,977 | 0 | 4 | 704 | 116 | 100 | 16 | 10 | 0 |
-- | This is program use uzbl embedded in window to render webpage.
-- Just simple model demo for view, haven't handle event or else.
--
-- You need install uzbl (git clone git://github.com/Dieterbe/uzbl.git) first.
--
-- How to use:
-- ./Uzbl default open Google page.
-- ./Uzbl url will open url you input
--
module Main where
import Graphics.UI.Gtk
import System.Process
import System.Environment
main :: IO ()
main = do
-- Init.
initGUI
-- Get program arguments.
args <- getArgs
let url = case args of
[arg] -> arg -- get user input url
_ -> "http://www.google.com" -- set default url
-- Create window.
window <- windowNew
windowSetDefaultSize window 900 600
windowSetPosition window WinPosCenter
windowSetOpacity window 0.8 -- this function need window-manager support Alpha channel in X11
-- Create socket.
socket <- socketNew
widgetShow socket -- must show before add to parent
window `containerAdd` socket
-- Get socket id.
socketId <- fmap (show . fromNativeWindowId) $ socketGetId socket
-- Start uzbl-core process.
runCommand $ "uzbl-core -s " ++ socketId ++ " -u " ++ url
-- Show.
window `onDestroy` mainQuit
widgetShowAll window
mainGUI
| k0001/gtk2hs | gtk/demo/embedded/Uzbl.hs | gpl-3.0 | 1,279 | 0 | 13 | 313 | 209 | 110 | 99 | 23 | 2 |
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
module T14885b where
class Foo1 a where
foo1 :: forall b. a -> b -> b
default foo1 :: forall b. a -> b -> b
foo1 _ x = (x :: b)
$([d| class Foo2 a where
foo2 :: forall b. a -> b -> b
default foo2 :: forall b. a -> b -> b
foo2 _ x = (x :: b)
|])
| sdiehl/ghc | testsuite/tests/th/T14885b.hs | bsd-3-clause | 403 | 0 | 10 | 130 | 81 | 47 | 34 | 12 | 0 |
{-# htermination elemIndex :: Bool -> [Bool] -> Maybe Int #-}
import List
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/List_elemIndex_6.hs | mit | 74 | 0 | 3 | 13 | 5 | 3 | 2 | 1 | 0 |
{-# LANGUAGE TemplateHaskell #-}
module THDerive where
import Language.Haskell.TH
derive :: Name -> Name -> Dec
derive class_ type_ =
StandaloneDerivD [] (AppT (ConT class_) (ConT type_))
| vladfi1/hs-misc | THDerive.hs | mit | 194 | 0 | 9 | 33 | 61 | 33 | 28 | 6 | 1 |
{-# LANGUAGE TemplateHaskell #-}
-- | Tests for @importify file@ command.
module Test.File
( spec
) where
import Universum
import Data.List (sort)
import Path (Abs, Dir, File, Path, Rel, dirname, fileExtension,
filename, fromRelDir, fromRelFile, fromAbsFile, mkRelFile,
(-<.>), (</>))
import Path.IO (listDir)
import System.Wlog (Severity)
import Test.Hspec (Spec, describe, it, runIO, shouldBe, xit)
import Importify.Main (importifyFileContent)
import Importify.Path (testDataPath)
spec :: Spec
spec = do
(testFolders, _) <- runIO $ listDir testDataPath
describe "file:unused" $
mapM_ (makeTestGroup . (testDataPath </> ) . dirname) testFolders
makeTestGroup :: Path Rel Dir -> Spec
makeTestGroup testCasesPath = do
(_, testDirPaths) <- runIO $ listDir testCasesPath
let testHsOnly = sort
$ filter ((== ".hs") . fileExtension) testDirPaths
describe ("subfolder: " ++ fromRelDir (dirname testCasesPath)) $
mapM_ makeTest testHsOnly
makeTest :: Path Abs File -> Spec
makeTest testCasePath = do
(result, expected) <- runIO $ loadTestData testCasePath
let testType = if filename testCasePath `elem` pendingTests then xit else it
testType (fromRelFile $ filename testCasePath) $ result `shouldBe` expected
pendingTests :: [Path Rel File]
pendingTests = [ $(mkRelFile "01-ImportBothUsedQualified.hs") -- Importify can't modify source yet
]
loadTestData :: Path Abs File -> IO (Text, Text)
loadTestData testCasePath = do
goldenExamplePath <- testCasePath -<.> ".golden"
goldenExampleSrc <- readFile (fromAbsFile goldenExamplePath)
Right importifiedSrc <- importifyFileContent testCasePath
return (importifiedSrc, goldenExampleSrc)
| serokell/importify | test/hspec/Test/File.hs | mit | 1,939 | 0 | 14 | 524 | 510 | 276 | 234 | 38 | 2 |
module Main where
import Codec.Picture
import PathTracer
import System.Random
main :: IO ()
main = do
rndGen <- newStdGen
let image = render rndGen 500 300
writePng "./result.png" image
render :: StdGen -> Int -> Int -> Image PixelRGB8
render rndGen width height = snd $ generateFoldImage r rndGen width height
where
r rg x y = (rg', pixel)
where pixel = colorToPixelRGB8 $ renderer rg'' x y
(rg', rg'') = split rg
renderer = mkRenderer demoScene width height sphericalPerspective 1
colorToPixelRGB8 :: Color -> PixelRGB8
colorToPixelRGB8 (Color r g b) = PixelRGB8 (fence r) (fence g) (fence b)
where fence = round . (*coef) . max 0 . min 1
coef = fromIntegral (maxBound :: Pixel8) | sdeframond/haskell-tracer | src/MainPath.hs | mit | 759 | 0 | 10 | 194 | 272 | 139 | 133 | 19 | 1 |
-- single line comment
{-
- multi-line comment
-
- to use interactive ghc session for it
- $ ghci
- > :l haskell01
-}
import Data.List
import System.IO
always5 :: Int
always5 = 5
always7 = 7
{-
- uses type inference ; this is what inspired golang
- Int -2^63 to 2^63
- Integer is unbounded whole number, size depends on memory
- Float
- Double with precision upto 11points
- Bool True False
- Char
- Tuple
-}
maxInt = maxBound :: Int
minInt = minBound :: Int
{-
- can reload this module on 'ghci' by running
- > :r
- > maxInt
-
- this will print value to maxInt
-}
sumOfNums = sum[1..1000]
addEx = always5 + always7
-- commonly using as a prefix operator
modEx = mod always7 always5
-- same can be written with an infix operator style
modEx2 = always7 `mod` always5
negNumEx = always7 + (-2)
{-
- to checkout what goes on with 'sqrt' function
- *Main> :t sqrt
-}
num9 = 9 :: Int
-- since sqrt requires floating point integer, using 'fromIntegral'
sqrtOf9 = sqrt (fromIntegral num9)
-- more built-in math function
piVal = pi
ePow9 = exp 9
logOf9 = log 9
squared9 = 9 ** 2
truncateVal = truncate 9.999
roundVal = round 9.999
ceilingVal = ceiling 9.999
floorVal = floor 9.999
--- Also sin,cos,tan,asin,atan,acos,sinh,tanh,cosh,asinh,atanh,acosh
trueAndFalse = True && False
trueOrFalse = True || False
notTrue = not(True)
--- can check help on operators as well; *Main> :t (+)
--- haskell02.hs is list onwards
| abhishekkr/tutorials_as_code | talks-articles/languages-n-runtimes/haskell/DerekBanas.Haskell.LYAH/haskell01.hs | mit | 1,449 | 0 | 7 | 301 | 224 | 129 | 95 | 25 | 1 |
{-# LANGUAGE TupleSections #-}
module ProjectEuler.Problem137
( problem
) where
import Data.Maybe
import Data.Word
import Math.NumberTheory.Powers.Squares
import ProjectEuler.Types
problem :: Problem
problem = pureProblem 137 Solved result
{-
I have no clue for this one, but 74049690 looks like a searchable number,
which leads me to http://oeis.org/A081018 and I suspect this is the answer...
Despite that let's do this properly: first let's just denote the series A(x).
A(x) = x F_1 + x^2 F_2 + x^3 F_3 + ...
x A(x) = x^2 F_1 + x^3 F_2 + ...
Therefore:
A(x) + x A(x)
= x F_1 + x^2 (F_2 + F_1) + x^3 (F_3 + F_2) + ...
= x F_1 + x^2 F_3 + x^3 F_4 + ...
= x F_2 + x^2 F_3 + x^3 F_4 + ... (since F_1 = F_2)
= (A(x) - x F_1) / x
from which we can conclude that: A(x) = -x / (x+x^2-1).
Let A(x) = N, we get N x^2 + (N+1) x - N = 0, solution x will be rational
if and only if the equation's discriminant is rational:
b ^ 2 - 4ac = 5N^2 + 2N + 1, which must be a perfect square.
-}
-- some early experiment - this does give us 74049690, as stated in the problem.
{-
an early attempt to find the sequence, this results in:
[(0,1),(2,5),(15,34),(104,233),(714,1597),(4895,10946),(33552,75025),(229970,514229),(1576239,3524578),(10803704,24157817),(74049690,165580141)]
-}
_doSearch :: [(Integer, Integer)]
_doSearch =
take 11
$ mapMaybe
(\(v,r) -> (v,) <$> exactSquareRoot r)
[ (n, 1 + n*(2 + n*5)) | n <- [0..] ]
fibs :: [Word64]
fibs = 0:1:zipWith (+) (tail fibs) fibs
fibGoldenNugget :: Int -> Word64
fibGoldenNugget n = fibs !! (n+n) * fibs !! (n+n+1)
result :: Word64
result = fibGoldenNugget 15
| Javran/Project-Euler | src/ProjectEuler/Problem137.hs | mit | 1,678 | 0 | 12 | 372 | 245 | 140 | 105 | 21 | 1 |
{-# LANGUAGE TupleSections #-}
module RL.Generator.Items (ItemConfig(..), itemsGenerator, generateChestItems, randomItemAppearances) where
-- generate random items in dungeon
import RL.Generator
import RL.Item
import RL.Map
import RL.Random
import Control.Monad.Reader (ask)
import Data.Map (Map)
import Data.Ratio
import Data.Maybe (isJust, catMaybes, fromJust, maybeToList, isNothing)
import qualified Data.List as L
import qualified Data.Map as M
data ItemConfig = ItemConfig {
maxItems :: Int,
minItems :: Int,
itemGenChance :: Rational,
itemAppearances :: Map ItemType String
}
instance GenConfig ItemConfig where
generating conf = (< maxItems conf) <$> getCounter
itemsGenerator :: Generator ItemConfig DLevel [(Point, Item)]
itemsGenerator = do
conf <- ask
lvl <- getGData
items' <- maybe (items lvl) (:items lvl) <$> generateFloorItem
setGData (lvl { items = items' })
return items'
-- generate items in a chest
-- TODO minItems
generateChestItems :: Difficulty -> Generator ItemConfig [Item] [Item]
generateChestItems d = do
conf <- ask
is <- getGData
i <- generateItem (typeRarity d) (itemRarity d)
let is' = maybeToList i ++ is
setGData is'
return is'
-- generate an item on the floor
generateFloorItem :: Generator ItemConfig DLevel (Maybe (Point, Item))
generateFloorItem = do
lvl <- getGData
let tileF p t = not (isStair t) && isPassable t && isNothing (L.lookup p (features lvl))
i <- generateItem (typeRarity (depth lvl)) (itemRarity (depth lvl)) -- TODO minItems
p <- randomTile tileF lvl
return ((,) <$> p <*> i)
-- generate an item using specified rarity functions
generateItem :: (ItemType -> Rational) -> (Item -> Rational) -> Generator ItemConfig a (Maybe Item)
generateItem f g = do
conf <- ask
r <- randomChance (itemGenChance conf)
if r then do
fmap (updateAppearance (itemAppearances conf)) <$> randomItem f g
else
return Nothing
randomItemAppearances :: MonadRandom m => m (Map ItemType String)
randomItemAppearances = do
let f is = zip (map itemType is) . map itemDescription
potApps <- M.fromList . f potions <$> shuffle potions
scrApps <- M.fromList . f scrolls <$> shuffle scrolls
return (M.union potApps scrApps)
randomItem :: MonadRandom m => (ItemType -> Rational) -> (Item -> Rational) -> m (Maybe Item)
randomItem f g = do
t <- pickRarity f itemTypes
case t of
Just (Weapon _) -> pickRarity g weapons
Just (Armor _) -> pickRarity g armors
Just (Potion _) -> pickRarity g potions
Just (Scroll _) -> pickRarity g scrolls
otherwise -> return Nothing
updateAppearance :: Map ItemType String -> Item -> Item
updateAppearance apps i =
let app = M.lookup (itemType i) apps
f s = i { itemDescription = s }
in maybe i f app
-- rarity for item types at depth
typeRarity :: Difficulty -> ItemType -> Rational
typeRarity d t
| d == 1 = case t of
(Weapon _) -> 1 % 5
(Armor _) -> 1 % 7
(Potion _) -> 1 % 10
(Scroll _) -> 1 % 20
(Bandage) -> 0 % 10
(Draught) -> 0 % 10
| d <= 3 = case t of
(Weapon _) -> 1 % 5
(Armor _) -> 1 % 7
(Potion _) -> 1 % 5
(Scroll _) -> 1 % 10
(Bandage) -> 0 % 10
(Draught) -> 0 % 10
| otherwise = case t of
(Weapon _) -> 1 % 5
(Armor _) -> 1 % 7
(Potion _) -> 1 % 3
(Scroll _) -> 1 % 4
(Bandage) -> 0 % 10
(Draught) -> 0 % 10
-- item rarity at depth
itemRarity :: Difficulty -> Item -> Rational
itemRarity d (Item "Mace" _) = (1 % 10)
itemRarity d (Item "Dagger" _) = (4 % 10)
itemRarity d (Item "Quarterstaff" _) = (2 % 10)
itemRarity d (Item "Sword" _) = (1 % 10)
itemRarity d (Item "Two-Handed Sword" _) = (1 % 20)
itemRarity d (Item "Bow" _) = (1 % 10)
itemRarity d (Item "Arrow" _) = (2 % 10)
itemRarity d (Item "Leather Armor" _) = (4 % 10)
itemRarity d (Item "Chain Mail" _) = (2 % 10)
itemRarity d (Item "Plate Mail" _) = (1 % 10)
itemRarity d (Item "Full Plate" _) = (1 % 50)
itemRarity d (Item "Small Shield" _) = (2 % 10)
itemRarity d (Item "Tower Shield" _) = (1 % 20)
itemRarity d (Item _ (Potion t)) = potionRarities t
itemRarity d (Item _ (Scroll t)) = scrollRarities t
itemRarity d otherwise = (0 % 10)
potionRarities :: PotionType -> Rational
potionRarities Healing = (1 % 8)
potionRarities Acid = (1 % 12)
potionRarities Darkness = (1 % 12)
potionRarities Confusion = (1 % 12)
potionRarities _ = (1 % 10)
scrollRarities :: ScrollType -> Rational
scrollRarities _ = (1 % 10)
| MichaelMackus/hsrl | RL/Generator/Items.hs | mit | 4,964 | 0 | 15 | 1,484 | 1,852 | 942 | 910 | 116 | 16 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE OverloadedStrings #-}
module Xml.TasteometerSpec (spec) where
import Data.Text (Text)
import Lastfm
import qualified Lastfm.Tasteometer as Taste
import Test.Hspec
import Text.Xml.Lens
import SpecHelper
spec :: Spec
spec = do
it "compare" $
publicly (Taste.compare (user "smpcln") (user "MCDOOMDESTROYER"))
`shouldHaveXml`
xmlQuery
it "compare" $
publicly (Taste.compare (user "smpcln") (artists ["enduser", "venetian snares"]))
`shouldHaveXml`
xmlQuery
it "compare" $
publicly (Taste.compare (artists ["enduser", "venetian snares"]) (user "smpcln"))
`shouldHaveXml`
xmlQuery
it "compare" $
publicly (Taste.compare (artists ["enduser", "venetian snares"]) (artists ["enduser", "venetian snares"]))
`shouldHaveXml`
xmlQuery
xmlQuery :: Fold Document Text
xmlQuery = root.node "comparison".node "result".node "score".text
| supki/liblastfm | test/api/Xml/TasteometerSpec.hs | mit | 969 | 0 | 14 | 207 | 276 | 149 | 127 | 29 | 1 |
module Model
( Item (..)
, Element, Floor
, parseFloor
) where
import Data.Maybe (catMaybes)
import Data.Set (Set)
import qualified Data.Set as S
import Parser
data Item
= Generator Element
| Microchip Element
deriving (Show, Eq, Ord)
parseItems :: Parser (Set Item)
parseItems =
S.fromList <$> parseList sepP parseItem
where
sepP = parseString ", "
parseItem :: Parser Item
parseItem = do
parseEither (parseString "and a ") (parseString "a ")
parseEither parseGenerator parseMicrochip
where
parseGenerator = do
el <- parseElement
parseString " generator"
return $ Generator el
parseMicrochip = do
el <- parseElement
parseString "-compatible microchip"
return $ Microchip el
type Element = String
parseElement :: Parser Element
parseElement = parseAlphas
type Floor = String
parseFloor :: Parser (Floor, Set Item)
parseFloor = do
floor <- parseFloorIntro
items <- parseEither nothingP parseItems
return (floor, items)
where
nothingP = parseString "nothing relevant." >> pure S.empty
parseFloorIntro :: Parser Floor
parseFloorIntro = do
parseString "The "
floor <- parseAlphas
parseString " floor contains "
return floor
| CarstenKoenig/AdventOfCode2016 | Day11/Model.hs | mit | 1,265 | 0 | 10 | 309 | 356 | 181 | 175 | 44 | 1 |
module Main where
import Control.Applicative
import Control.Monad
import Data.IORef
import Test.Framework (defaultMain)
import Test.Framework.Providers.HUnit (hUnitTestToTests)
import Test.HUnit
import System.FilePath.Glob (glob)
import Language.Egison.Types
import Language.Egison.Core
import Language.Egison.Primitives
import Language.Egison
main = do
testCases <- glob "test/**/*.egi"
defaultMain $ hUnitTestToTests $ test $ map runTestCase testCases
runTestCase :: FilePath -> Test
runTestCase file = TestLabel file . TestCase $ do
env <- initialEnv
assertEgisonM $ do
exprs <- loadFile file
let (bindings, tests) = foldr collectDefsAndTests ([], []) exprs
env' <- recursiveBind env bindings
forM_ tests $ evalExprDeep env'
where
assertEgisonM :: EgisonM a -> Assertion
assertEgisonM m = fromEgisonM m >>= assertString . either show (const "")
collectDefsAndTests (Define name expr) (bindings, tests) =
((name, expr) : bindings, tests)
collectDefsAndTests (Test expr) (bindings, tests) =
(bindings, expr : tests)
collectDefsAndTests _ r = r
| beni55/egison | test/UnitTest.hs | mit | 1,145 | 0 | 16 | 233 | 351 | 185 | 166 | 30 | 3 |
{-# LANGUAGE PackageImports #-}
{-# OPTIONS_GHC -fno-warn-dodgy-exports -fno-warn-unused-imports #-}
-- | Reexports "Data.Void.Compat"
-- from a globally unique namespace.
module Data.Void.Compat.Repl.Batteries (
module Data.Void.Compat
) where
import "this" Data.Void.Compat
| haskell-compat/base-compat | base-compat-batteries/src/Data/Void/Compat/Repl/Batteries.hs | mit | 278 | 0 | 5 | 31 | 29 | 22 | 7 | 5 | 0 |
{-# LANGUAGE FlexibleInstances #-}
module ParserSpec where
import Text.Parsec
import Text.Parsec.Pos
import Test.Hspec
import Parser
import AST
u :: SourcePos
u = newPos "test" 0 0
v :: String -> Expr
v = IdentExpr u
c :: Integer -> Expr
c = Constant u
exprSample :: Expr
exprSample = BinaryPrim u "||" (v "a")
(BinaryPrim u "&&" (v "b")
(UnaryPrim u "*" (BinaryPrim u "+" (v "c") (v "d"))))
stmtSample1 :: Stmt
stmtSample1 = CompoundStmt u
[DeclStmt u [(DeclInt, Variable u "a"), (DeclInt, Sequence u "b" 20)],
DeclStmt u [(DeclPointer DeclInt, Variable u "c")],
ExprStmt u $ AssignExpr u (v "a") (BinaryPrim u "+" (v "c") (v "d"))]
stmtSample2 :: Stmt
stmtSample2 = IfStmt u exprSample stmtSample1 (EmptyStmt u)
longTestCase1 :: String
longTestCase1 = concat ["for(i = 0; i < n; i = i + 1)",
"if(a || b && c[d]){int a, b[20]; int *c; a = c + d;}"]
longTestCase2 :: String
longTestCase2 = concat ["while(a || b && c[d])",
"{int a, b[20]; int *c; a = c+d;}"]
longTestCase3 :: String
longTestCase3 = concat ["void f(int a, int b, int *c, int d){", longTestCase2, "}"]
spec :: Spec
spec = do
describe "Parser" $ do
it "Assign Expr" $ do
parse assignExpr "" "a = 3" `shouldBe`
(Right $ AssignExpr u (v "a") (c 3))
parse assignExpr "" "a || b + 3" `shouldBe`
(Right $ BinaryPrim u "||" (v "a") (BinaryPrim u "+" (v "b") (c 3)))
parse assignExpr "" "a || b && c[d]" `shouldBe` (Right exprSample)
parse assignExpr "" "1 + &a * b" `shouldBe`
(Right $ BinaryPrim u "+" (c 1)
(BinaryPrim u "*" (UnaryPrim u "&" (v "a")) (v "b")))
parse assignExpr "" "1 + &a * b" `shouldBe`
(Right $ BinaryPrim u "+" (c 1)
(BinaryPrim u "*" (UnaryPrim u "&" (v "a")) (v "b")))
parse assignExpr "" "a = f(b, c*d)" `shouldBe`
(Right $ AssignExpr u (v "a") $ ApplyFunc u "f"
[(v "b"),
BinaryPrim u "*" (v "c") (v "d")])
parse assignExpr "" "a <= f(b) + *c" `shouldBe`
(Right $ BinaryPrim u "<=" (v "a") $ BinaryPrim u "+"
(ApplyFunc u "f" [(v "b")])
(UnaryPrim u "*" (v "c")))
it "Expression" $ do
parse expr "" "a = 3, f(b)" `shouldBe`
(Right $ MultiExpr u [(AssignExpr u (v "a") (c 3)),
(ApplyFunc u "f" [(v "b")])])
it "Statement" $ do
parse stmt "" ";" `shouldBe` (Right $ EmptyStmt u)
parse stmt "" "a;" `shouldBe` (Right $ ExprStmt u $ (v "a"))
parse stmt "" "{int a, b[20]; int *c; a = c + d;}" `shouldBe` (Right stmtSample1)
parse stmt "" "if(a) ; else ;" `shouldBe`
(Right $ IfStmt u (v "a") (EmptyStmt u) (EmptyStmt u))
parse stmt "" "if(a || b && c[d]){int a, b[20]; int *c; a = c + d;}" `shouldBe`
(Right $ stmtSample2)
parse stmt "" longTestCase1 `shouldBe`
(Right $ CompoundStmt u
[ExprStmt u (AssignExpr u (v "i") (c 0)),
WhileStmt u (BinaryPrim u "<" (v "i") (v "n"))
(CompoundStmt u
[stmtSample2,
ExprStmt u $ AssignExpr u (v "i")
(BinaryPrim u "+" (v "i") (c 1))])])
parse stmt "" longTestCase2 `shouldBe`
(Right $ WhileStmt u exprSample stmtSample1)
parse stmt "" "return c[2 + a][b || c];" `shouldBe`
(Right $ ReturnStmt u $
UnaryPrim u "*"
(BinaryPrim u "+"
(UnaryPrim u "*" (BinaryPrim u "+" (v "c")
(BinaryPrim u "+" (c 2) (v "a"))))
(BinaryPrim u "||" (v "b") (v "c"))))
it "Program" $ do
parse externalDecl "" "int a, *b, c[100];" `shouldBe`
(Right $ Decl u [(DeclInt, Variable u "a"),
(DeclPointer DeclInt, Variable u "b"),
(DeclInt, Sequence u "c" 100)])
parse externalDecl "" "void func(int a, int *b);" `shouldBe`
(Right $ FuncPrototype u DeclVoid "func"
[(DeclInt, "a"), (DeclPointer DeclInt, "b")])
parse externalDecl "" longTestCase3 `shouldBe`
(Right $ FuncDef u DeclVoid "f" [(DeclInt, "a"),
(DeclInt, "b"),
(DeclPointer DeclInt, "c"),
(DeclInt, "d")]
(CompoundStmt u [WhileStmt u exprSample stmtSample1]))
it "Syntax Sugar" $ do
parse assignExpr "" "-a" `shouldBe`
(Right $ BinaryPrim u "*" (c (-1)) (v "a"))
parse assignExpr "" "&(*a)" `shouldBe` (Right $ (v "a"))
parse assignExpr "" "*(&a)" `shouldBe` (Right $ (v "a"))
| yu-i9/HaSC | test/ParserSpec.hs | mit | 5,252 | 0 | 27 | 2,099 | 1,730 | 888 | 842 | 103 | 1 |
module AI.DemoNeuron
( Neuron(..)
, L2Neuron(..)
, ReducedNeuron(..)
, NeuronWeights
, Values
, ActivationFunction
, ActivationFunction'
, sigmoidNeuron
, tanhNeuron
, recluNeuron
, sigmoid, sigmoid'
, tanh, tanh'
, reclu, reclu'
, l1Norm, l2Norm
) where
import Numeric.LinearAlgebra
import Numeric.LinearAlgebra.Data
type ActivationFunction = Double -> Double
type ActivationFunction' = Double -> Double
data L2Neuron = L2Neuron deriving (Show)
data ReducedNeuron = ReducedNeuron { activation :: ActivationFunction
, activation' :: ActivationFunction'
, description :: String
}
type NeuronWeights = Vector Double
type Values = Vector Double
type Activation = Double
-- | A Neuron type has two functions -- evaluate and evaluate',
-- both of which are functions from NeuronWeights to input values
-- to doubles.
class (Show a) => Neuron a where
evaluate :: a -> NeuronWeights -> Values -> Activation
evaluate' :: a -> NeuronWeights -> Values -> Activation
instance Show (ReducedNeuron) where
show n = description n
instance Neuron (ReducedNeuron) where
evaluate n weights values = f $ dot weights values
where f = activation n
evaluate' n weights values = f' $ dot weights values
where f' = activation' n
instance Neuron (L2Neuron) where
evaluate n = l2Norm
evaluate' n = l1Norm
-- | Our provided neuron types: sigmoid, tanh, reclu
sigmoidNeuron :: ReducedNeuron
sigmoidNeuron = ReducedNeuron sigmoid sigmoid' "sigmoid"
tanhNeuron :: ReducedNeuron
tanhNeuron = ReducedNeuron tanh tanh' "tanh"
recluNeuron :: ReducedNeuron
recluNeuron = ReducedNeuron reclu reclu' "reclu"
-- | Compute a dot product, but ensure that the dimensions of both
-- vectors are the same size.
l1Norm :: NeuronWeights -> Values -> Double
l1Norm w v = if size w /= size v
then error "Neuron NeuronWeights and values don't align"
else dot w v
-- | The sigmoid activation function, a standard activation function defined
-- on the range (0, 1).
sigmoid :: Double -> Activation
sigmoid t = 1 / (1 + exp (-1 * t))
-- | The derivative of the sigmoid function conveniently can be computed in
-- terms of the sigmoid function.
sigmoid' :: Double -> Activation
sigmoid' t = s * (1 - s)
where s = sigmoid t
-- | The hyperbolic tangent activation function is provided in Prelude. Here
-- we provide the derivative. As with the sigmoid function, the derivative
-- of tanh can be computed in terms of tanh.
tanh' :: Double -> Activation
tanh' t = 1 - s ^ 2
where s = tanh t
-- | The rectified linear activation function. This is a more "biologically
-- accurate" activation function that still retains differentiability.
reclu :: Double -> Activation
reclu t = log (1 + exp t)
-- | The derivative of the rectified linear activation function is just the
-- sigmoid.
reclu' :: Double -> Activation
reclu' t = sigmoid t
-- | Calculate the distance between a SOM neuron and an input
l2Norm :: NeuronWeights -> Values -> Activation
l2Norm a b = sqrt $ sum $ map (^2) $ zipWith (-) (toList a) (toList b)
| qzchenwl/LambdaNet | AI/DemoNeuron.hs | mit | 3,294 | 0 | 11 | 830 | 688 | 382 | 306 | 63 | 2 |
{-# LANGUAGE NoImplicitPrelude #-}
module Crypto.Boring.Internal.Context
( EVP_CIPHER_CTX
, EVP_CIPHER
, EVP_MD_CTX
, EVP_MD
, HMAC_CTX
, ErrorCallback
, cryptoCtx
) where
import qualified Data.Map as M
import qualified Language.C.Inline as C
import qualified Language.C.Inline.Context as C
import qualified Language.C.Types as C
import Foreign
import Crypto.Boring.Internal.Prelude
data EVP_MD_CTX
data EVP_MD
data EVP_CIPHER_CTX
data EVP_CIPHER
data HMAC_CTX
type ErrorCallback = Ptr C.CChar -> C.CSize -> Ptr () -> IO C.CInt
cryptoCtx :: C.Context
cryptoCtx = C.baseCtx <> C.bsCtx <> C.fptrCtx <> C.funCtx <> mempty
{ C.ctxTypesTable = M.fromList
[ ( C.TypeName "EVP_MD_CTX", [t|EVP_MD_CTX|] )
, ( C.TypeName "EVP_MD", [t|EVP_MD|] )
, ( C.TypeName "EVP_CIPHER_CTX", [t|EVP_CIPHER_CTX|] )
, ( C.TypeName "EVP_CIPHER", [t|EVP_CIPHER|] )
, ( C.TypeName "HMAC_CTX", [t|HMAC_CTX|] )
]
} | chpatrick/boring-crypto | src/Crypto/Boring/Internal/Context.hs | mit | 950 | 0 | 12 | 184 | 269 | 171 | 98 | -1 | -1 |
module Odds where
odds :: [Int] -> [Int]
odds = filter odd | cojoj/Codewars | Haskell/Codewars.hsproj/Odds.hs | mit | 63 | 0 | 6 | 16 | 27 | 16 | 11 | 3 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
-- Handles indentation in the keymaps. Includes:
-- * (TODO) Auto-indentation to the previous lines indentation
-- * Tab-expansion
-- * Shifting of the indentation for a region of text
module Yi.Buffer.Indent where
import Yi.Buffer.Basic
import Yi.Buffer.Misc
import Yi.Buffer.HighLevel
import Yi.Prelude
import Yi.Buffer.Normal
import Yi.Buffer.Region
import Prelude ()
import Data.Char
import Data.List (span, length, sort, nub, break, reverse, filter, takeWhile, dropWhile)
import Yi.String
{- |
Return either a \t or the number of spaces specified by tabSize in the
IndentSettings. Note that if you actually want to insert a tab character
(for example when editing makefiles) then you should use: @insertB '\t'@.
-}
tabB :: BufferM String
tabB = do
indentSettings <- indentSettingsB
return $ if expandTabs indentSettings
then replicate (tabSize indentSettings) ' '
else "\t"
{-|
Retrieve the current indentation settings for the buffer.
-}
indentSettingsB :: BufferM IndentSettings
indentSettingsB = withModeB (\Mode {modeIndentSettings = x} -> return x)
{-|
A specialisation of 'autoIndentHelperB'.
This is the most basic and the user is encouraged to
specialise 'autoIndentHelperB' on their own.
-}
autoIndentB :: IndentBehaviour -> BufferM ()
autoIndentB indentBehave = do
autoIndentHelperB fetchPreviousIndentsB indentsOfString indentBehave
where
-- Returns the indentation hints considering the given
-- string as the line above the current one.
-- The hints added are:
-- The indent of the given string
-- The indent of the given string plus two
-- The offset of the last open bracket if any in the line.
indentsOfString :: String -> BufferM [Int]
indentsOfString input =
do indent <- indentOfB input
bracketHints <- lastOpenBracketHint input
indentSettings <- indentSettingsB
return $ indent : (indent + shiftWidth indentSettings) : bracketHints
{-|
This takes two arguments the first is a function to
obtain indentation hints from lines above the current one.
The second is a function to obtain a set of indentation hints
from the previous line. Both of these are in the 'BufferM'
monad although the second seems like it is unnecessary.
However we must take into account the length of tabs which come
from the the tab settings and hence we must be in the 'BufferM'
monad.
To get the straightforward behaviour of the indents of all previous
lines until one of them has zero indent call this with:
@autoIndentHelperB fetchPreviousIndentsB (fmap (: []) indentOfB)@
However commonly we wish to have something more interesting for
the second argument, in particular we commonly wish to have the
last opening bracket of the previous line as well as its indent.
-}
autoIndentHelperB :: BufferM [ Int ]
-- ^ Action to fetch hints from previous lines
-> (String -> BufferM [ Int ])
-- ^ Action to calculate hints from previous line
-> IndentBehaviour
-- ^ Sets the indent behaviour,
-- see 'Yi.Buffer.IndentBehaviour' for a description
-> BufferM ()
autoIndentHelperB getUpwards getPrevious indentBehave =
do upwardHints <- savingExcursionB getUpwards
previousLine <- getNextLineB Backward
previousHints <- getPrevious previousLine
let allHints = (upwardHints ++ previousHints)
cycleIndentsB indentBehave allHints
-- | Cycles through the indentation hints. It does this without
-- requiring to set/get any state. We just look at the current
-- indentation of the current line and moving to the largest
-- indent that is
cycleIndentsB :: IndentBehaviour -> [Int] -> BufferM ()
cycleIndentsB _ [] = return ()
cycleIndentsB indentBehave indents =
do currentLine <- readLnB
currentIndent <- indentOfB currentLine
indentToB $ chooseIndent currentIndent (sort $ nub $ indents)
where
-- Is the function to choose the indent from the given current
-- indent to the given list of indentation hints.
chooseIndent :: Int -> [ Int ] -> Int
chooseIndent =
case indentBehave of
IncreaseCycle -> chooseIncreaseCycle
DecreaseCycle -> chooseDecreaseCycle
IncreaseOnly -> chooseIncreaseOnly
DecreaseOnly -> chooseDecreaseOnly
-- Choose the indentation hint which is one more than the current
-- indentation hint unless the current is the largest or larger than
-- all the indentation hints in which case choose the smallest
-- (which will often be zero)
chooseIncreaseCycle :: Int -> [ Int ] -> Int
chooseIncreaseCycle currentIndent hints =
-- Similarly to 'chooseDecreasing' if 'above' is null then
-- we will go to the first of below which will be the smallest
-- indentation hint, if above is not null then we are moving to
-- the indentation hint which is one above the current.
head $ (above ++ below)
where
(below, above) = span (<= currentIndent) hints
-- Note that these functions which follow generally assume that
-- the list of hints which have been given is already sorted
-- and that the list is non-empty
-- Choose the indentation hint one less than the current indentation
-- unless the current indentation is the smallest (usually zero)
-- in which case choose the largest indentation hint.
chooseDecreaseCycle :: Int -> [ Int ] -> Int
chooseDecreaseCycle currentIndent hints =
-- So in particular if 'below' is null then we will
-- go to the largest indentation, if below is not null
-- we go to the largest indentation which is *not* higher
-- than the current one.
last $ (above ++ below)
where
(below, above) = span (< currentIndent) hints
chooseIncreaseOnly :: Int -> [ Int ] -> Int
chooseIncreaseOnly currentIndent hints =
head $ filter (> currentIndent) hints ++ [ currentIndent ]
chooseDecreaseOnly :: Int -> [ Int ] -> Int
chooseDecreaseOnly currentIndent hints =
last $ currentIndent : filter (< currentIndent) hints
{-|
A function generally useful as the first argument to
'autoIndentHelperB'. This searches the lines above
the current line for the indentations of each line
until we get to a line which has no indentation
*and* is not empty. Indicating that we have reached
the outer scope.
-}
fetchPreviousIndentsB :: BufferM [Int]
fetchPreviousIndentsB =
-- Move up one line,
do moveOffset <- lineMoveRel (-1)
line <- readLnB
indent <- indentOfB line
-- So if we didn't manage to move upwards
-- or the current offset was zero *and* the line
-- was non-blank then we return just the current
-- indent (it might be the first line but indented some.)
if moveOffset == 0 ||
( indent == 0 &&
any (not . isSpace) line )
then return [ indent ]
else (indent :) <$> fetchPreviousIndentsB
{-| An application of 'autoIndentHelperB' which adds more
indentation hints using the given keywords.
The offsets of the first set of keywords are used as hints.
For the second set of keywords it is not the offsets of the
keywords themselves but the offset of the first non-white
characters after the keywords.
In addition to the keyword hints we also do the same as the
default ('autoIndentB') which is to use any non-closed
opening brackets as hints.
-}
autoIndentWithKeywordsB :: [ String ] -- ^ Keywords to act as hints
-> [ String ] -- ^ Keywords to act as offset hints
-> IndentBehaviour
-> BufferM ()
autoIndentWithKeywordsB firstKeywords secondKeywords =
autoIndentHelperB fetchPreviousIndentsB getPreviousLineHints
where
getPreviousLineHints :: String -> BufferM [ Int ]
getPreviousLineHints input =
do indent <- indentOfB input
bracketHints <- lastOpenBracketHint input
keyHintsOne <- keywordHints firstKeywords input
keyHintsTwo <- keywordAfterHints secondKeywords input
return $ indent : (indent + 2) : ( bracketHints ++
keyHintsOne ++
keyHintsTwo )
-- | Returns the position of the last opening bracket on the
-- line which is not closed on the same line.
-- Note that if we have unmatched parentheses such as "( ]"
-- then we may not get the correct answer, but in that case
-- then arguably we don't really care if we get the correct
-- answer (at least if we get it wrong the user may notice
-- their error).
-- We return a list here as it's a convenient way of returning
-- no hint in the case of there being no non-closed bracket
-- and normally such a hint will be part of a list of hints
-- anyway.
-- NOTE: this could be easily modified to return the indentations
-- of *all* the non-closed opening brackets. But I think this is
-- not what you generally want.
-- TODO: we also do not care whether or not the bracket is within
-- a string or escaped. If someone feels up to caring about that
-- by all means please fix this.
lastOpenBracketHint :: String -> BufferM [ Int ]
lastOpenBracketHint input =
case getOpen 0 $ reverse input of
Nothing -> return []
Just s -> (: []) <$> spacingOfB s
where
-- We get the last open bracket by counting through
-- the reversed line, when we see a closed bracket we
-- add one to the count. When we see an opening bracket
-- decrease the count. If we see an opening bracket when the
-- count is 0 we return the remaining (reversed) string
-- as the part of the line which preceds the last opening bracket.
-- This can then be turned into an indentation by calling 'spacingOfB'
-- on it so that tabs are counted as tab length.
-- NOTE: that this will work even if tab occur in the middle of the line
getOpen :: Int -> String -> Maybe String
-- We of course return nothing, there is no bracket to give a hint.
getOpen _ [] = Nothing
getOpen i (c : rest)
-- If it is opening and we have no closing to match
-- then we return the rest of the line
| isOpening c && i == 0 = Just rest
-- If i is not zero then we have matched one of the
-- closing parentheses and we can decrease the nesting count.
| isOpening c = getOpen (i - 1) rest
-- If the character is a closing bracket then we must increase
-- the nesting count
| isClosing c = getOpen (i + 1) rest
-- If it is just a normal character forget about it and move on.
| otherwise = getOpen i rest
isOpening :: Char -> Bool
isOpening '(' = True
isOpening '[' = True
isOpening '{' = True
isOpening _ = False
isClosing :: Char -> Bool
isClosing ')' = True
isClosing ']' = True
isClosing '}' = True
isClosing _ = False
-- | Returns the offsets of all the given keywords
-- within the given string. This is potentially useful
-- as providing indentation hints.
keywordHints :: [ String ] -> String -> BufferM [ Int ]
keywordHints keywords =
getHints 0
where
-- Calculate the indentation hints of keywords from the
-- given string. The first argument is the current offset.
-- NOTE: that we have to take into account how long tab characters
-- are according to the indentation settings.
getHints :: Int -> String -> BufferM [ Int ]
getHints _i [] = return []
getHints i input
-- If there are no non-white characters left return zero hints.
| null rest = return []
-- Check if there are white space characters at the front and if
-- so then calculate the ident of it and carry on.
| not $ null white = do spaceSize <- spacingOfB white
getHints (i + spaceSize) rest
-- If there are no white space characters check if we are looking
-- at a keyword and if so add it as a hint
| any (== initNonWhite) keywords = (i :) <$> whiteRestHints
-- Finally we just continue with the tail.
| otherwise = whiteRestHints
where
-- Separate into the leading non-white characters and the rest
(initNonWhite, whiteRest) = break isSpace input
-- Separate into the leading white space characters and the rest
(white, rest) = span isSpace input
-- Get the hints from everything after any leading non-white space.
-- This should only be used if there is no white space at the start.
whiteRestHints = getHints (i + length initNonWhite) whiteRest
-- | Returns the offsets of anything that isn't white space 'after'
-- a keyword on the given line.
-- This is essentially then the same as 'keywordHints' except that
-- for each keyword on the input rather than return the offset at
-- the start of the keyword we return the offset of the first non-white
-- character after the keyword.
keywordAfterHints :: [ String ] -> String -> BufferM [ Int ]
keywordAfterHints keywords =
getHints 0
where
-- Calculate the indentation hints of keywords from the
-- given string. The first argument is the current offset.
-- NOTE: that we have to take into account how long tab characters
-- are according to the indentation settings.
getHints :: Int -> String -> BufferM [ Int ]
getHints _i [] = return []
getHints i input
-- If there is any preceding white space then just take the length
-- of it (according to the indentation settings and proceed.
| not $ null indentation = do indent <- spacingOfB indentation
getHints (i + indent) nonWhite
-- If there is a keyword at the current position and
-- the keyword isn't the last thing on the line.
| any (== key) keywords
&& (not $ null afterwhite) = do indent <- spacingOfB white
let hint = i + length key + indent
tailHints <- getHints hint afterwhite
return $ hint : tailHints
-- we don't have a hint and we can re-try for the rest of the line
| otherwise = afterKeyHints
where
-- Split the input into the preceding white space and the rest
(indentation, nonWhite) = span isSpace input
-- The keyword and what is after the keyword
-- this is only used if 'indentation' is null so we needn't worry that
-- we are taking from the input rather than 'nonWhite'
(key, afterkey) = break isSpace input
-- The white space and what is after the white space
(white, afterwhite) = span isSpace afterkey
-- Get the hints from everything after any leading non-white space.
-- This should only be used if there is no white space at the start.
afterKeyHints = getHints (i + length key) afterkey
{-|
Returns the indentation of a given string. Note that this depends
on the current indentation settings.
-}
indentOfB :: String -> BufferM Int
indentOfB = spacingOfB . takeWhile isSpace
{-| Returns the length of a given string taking into account the
white space and the indentation settings.
-}
spacingOfB :: String -> BufferM Int
spacingOfB text =
do indentSettings <- indentSettingsB
let spacingOfChar :: Char -> Int
spacingOfChar '\t' = tabSize indentSettings
spacingOfChar _ = 1
return (sum $ fmap spacingOfChar text)
{-| Indents the current line to the given indentation level.
In addition moves the point according to where it was on the
line originally. If we were somewhere within the indentation
(ie at the start of the line or on an empty line) then we want
to just go to the end of the (new) indentation.
However if we are currently pointing somewhere within the text
of the line then we wish to remain pointing to the same character.
-}
indentToB :: Int -> BufferM ()
indentToB level = do
indentSettings <- indentSettingsB
modifyRegionClever (rePadString indentSettings level) =<< regionOfB Line
-- | Indent as much as the previous line
indentAsPreviousB :: BufferM ()
indentAsPreviousB =
do previousLine <- getNextNonBlankLineB Backward
previousIndent <- indentOfB previousLine
indentToB previousIndent
-- | Insert a newline at point and indent the new line as the previous one.
newlineAndIndentB :: BufferM ()
newlineAndIndentB = newlineB >> indentAsPreviousB
-- | Set the padding of the string to newCount, filling in tabs if
-- expandTabs is set in the buffers IndentSettings
rePadString :: IndentSettings -> Int -> String -> String
rePadString indentSettings newCount input
| newCount <= 0 = rest
| expandTabs indentSettings = replicate newCount ' ' ++ rest
| otherwise = tabs ++ spaces ++ rest
where (_indents,rest) = span isSpace input
tabs = replicate (newCount `div` tabSize indentSettings) '\t'
spaces = replicate (newCount `mod` tabSize indentSettings) ' '
-- | shifts right (or left if num is negative) num times, filling in tabs if
-- expandTabs is set in the buffers IndentSettings
indentString :: IndentSettings -> Int -> String -> String
indentString indentSettings numOfShifts input = rePadString indentSettings newCount input
where (indents,_) = span isSpace input
countSpace '\t' = tabSize indentSettings
countSpace _ = 1 -- we'll assume nothing but tabs and spaces
newCount = sum (fmap countSpace indents) + (shiftWidth indentSettings * numOfShifts)
-- | Increases the indentation on the region by the given amount of shiftWidth
shiftIndentOfRegion :: Int -> Region -> BufferM ()
shiftIndentOfRegion shiftCount region = do
indentSettings <- indentSettingsB
modifyRegionB (mapLines $ (indentString indentSettings shiftCount `unless` null)) region
moveTo $ regionStart region
firstNonSpaceB
where (f `unless` c) x = if c x then x else f x
deleteIndentOfRegion :: Region -> BufferM ()
deleteIndentOfRegion = modifyRegionB (mapLines $ dropWhile isSpace)
-- | Return the number of spaces at the beginning of the line, up to the point.
indentOfCurrentPosB :: BufferM Int
indentOfCurrentPosB = do
p <- pointB
moveToSol
sol <- pointB
moveTo p
let region = mkRegion p sol
readRegionB region >>= spacingOfB
| codemac/yi-editor | src/Yi/Buffer/Indent.hs | gpl-2.0 | 18,405 | 10 | 15 | 4,575 | 2,564 | 1,350 | 1,214 | 196 | 9 |
{-# LANGUAGE ScopedTypeVariables #-}
module TestHelper (qc, mustFail) where
import Prelude
import Data.List
import Test.QuickCheck as QC
import Test.HUnit as HU
import Test.QuickCheck.Monadic (monadicIO, run)
import Control.Exception (try, evaluate, SomeException)
qc :: QC.Testable prop => prop -> HU.Assertion
qc test = do
result <- quickCheckWithResult (stdArgs {chatty=False, maxSuccess=1000}) test
case result of
Success _ _ _ -> return ()
_ -> assertFailure $ output result
mustFail :: a -> String -> Property
mustFail x msg =
monadicIO $ run $ do
resultOrError <- try (evaluate x)
case resultOrError of
Left (_::SomeException) -> return () -- expected failure occured.
Right _ -> error msg
| Erdwolf/autotool-bonn | src/Haskell/Blueprint/TestHelper.hs | gpl-2.0 | 755 | 0 | 13 | 161 | 248 | 132 | 116 | 21 | 2 |
import Data.List (maximumBy, minimumBy, transpose, group, sort)
import Data.Function (on)
import Control.Monad (liftM)
main = do
columns <- liftM (transpose . lines) (readFile "input.txt")
putStrLn $ map mode columns
putStrLn $ map lode columns
mode = head . (maximumBy $ on compare length) . group . sort
lode = head . (minimumBy $ on compare length) . group . sort
| ayron/AoC | 2016/Day 6/solution.hs | gpl-3.0 | 375 | 3 | 10 | 68 | 164 | 81 | 83 | 9 | 1 |
-- Insertion sort in Haskell
insertSort :: [Int] -> [Int]
insertSort [] = []
insertSort (x:xs) = insert x (insertSort xs)
where insert y [] = [y]
insert y (x:xs) = if y > x then x:(insert y xs) else y:x:xs
-- An alternative with foldr
insertSort :: [Int] -> [Int]
insertSort [] = []
insertSort xs = foldr insert [] xs
where insert y [] = [y]
insert y (x:xs) = if y > x then x:(insert y xs) else y:x:xs
| YuKitAs/tech-note | algorithm/sorting-algo/insertion-sort.hs | gpl-3.0 | 488 | 0 | 10 | 168 | 232 | 123 | 109 | 10 | 3 |
module Main where
import Hip.Prelude
import Hip.HipSpec
import Prelude (Bool(..))
True && a = a
False && _ = False
False || a = a
True || _ = True
not True = False
not False = True
prop_and_comm :: Bool -> Bool -> Prop Bool
prop_and_comm x y = y && x =:= x && y
prop_or_comm :: Bool -> Bool -> Prop Bool
prop_or_comm x y = y || x =:= x || y
prop_and_idem :: Bool -> Prop Bool
prop_and_idem x = x && x =:= x
prop_or_idem :: Bool -> Prop Bool
prop_or_idem x = x || x =:= x
prop_and_identity :: Bool -> Prop Bool
prop_and_identity x = x && True =:= x
prop_and_zero :: Bool -> Prop Bool
prop_and_zero x = x && False =:= False
prop_or_zero :: Bool -> Prop Bool
prop_or_zero x = x || True =:= True
prop_or_identity :: Bool -> Prop Bool
prop_or_identity x = x || False =:= x
prop_not_true :: Prop Bool
prop_not_true = not True =:= False
prop_not_false :: Prop Bool
prop_not_false = not False =:= True
prop_and_assoc :: Bool -> Bool -> Bool -> Prop Bool
prop_and_assoc x y z = x && (y && z) =:= (x && y) && z
prop_or_assoc :: Bool -> Bool -> Bool -> Prop Bool
prop_or_assoc x y z = x || (y || z) =:= (x || y) || z
prop_and_assoc_comm :: Bool -> Bool -> Bool -> Prop Bool
prop_and_assoc_comm x y z = y && (x && z) =:= x && (y && z)
prop_or_assoc_comm :: Bool -> Bool -> Bool -> Prop Bool
prop_or_assoc_comm x y z = y || (x || z) =:= x || (y || z)
prop_and_absorb :: Bool -> Bool -> Prop Bool
prop_and_absorb x y = x && (x || y) =:= x
prop_or_absorb :: Bool -> Bool -> Prop Bool
prop_or_absorb x y = x || (x && y) =:= x
prop_not_involutive :: Bool -> Prop Bool
prop_not_involutive x = not (not x) =:= x
prop_and_complement :: Bool -> Prop Bool
prop_and_complement x = x && not x =:= False
prop_or_complement :: Bool -> Prop Bool
prop_or_complement x = x || not x =:= True
prop_and_distrib :: Bool -> Bool -> Bool -> Prop Bool
prop_and_distrib x y z = (x || y) && (x || z) =:= x || (y && z)
prop_or_distrib :: Bool -> Bool -> Bool -> Prop Bool
prop_or_distrib x y z = (x && y) || (x && z) =:= x && (y || z)
prop_de_morgan_0 :: Bool -> Bool -> Prop Bool
prop_de_morgan_0 x y = not x && not y =:= not (x || y)
prop_de_morgan_1 :: Bool -> Bool -> Prop Bool
prop_de_morgan_1 x y = not x || not y =:= not (x && y)
main = hipSpec "Bool.hs" [fun0 "True" True]
| danr/hipspec | examples/old-examples/hip/Bool.hs | gpl-3.0 | 2,273 | 0 | 9 | 504 | 1,027 | 520 | 507 | 57 | 1 |
-- "Data/RoseTrie/RoseTrie.hs" provides the RoseTrie data type, a tree combining
-- properties of a Trie and a RoseTrie.
--
-- Copyright (C) 2008-2016 Ramin Honary.
--
-- This library is free software: you can redistribute it and/or modify it under the terms of
-- the GNU General Public License as published by the Free Software Foundation, either version 3 of
-- the License, or (at your option) any later version.
--
-- This software is distributed in the hope that it will be useful, but WITHOUT ANY
-- WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-- FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
-- details.
--
-- You should have received a copy of the GNU General Public License along with
-- this program (see the file called "LICENSE"). If not, see the URL:
-- <http://www.gnu.org/licenses/agpl.html>.
-- | A "trie" based on 'Data.Map.Map' where you can store objects @o@ to an arbitrary path
-- constructed of paths-segments @p@. The idea of the 'RoseTrie' data structure is that it behaves
-- exctly like a 'Data.Map.Map' except each individual 'branch' node is labeled with a path segmet,
-- and can be accessed and altered arbitrarily.
--
-- Because of the way similar paths @[p]@ are merged, when you perform a 'Data.Foldable.foldr',
-- 'mergeWithKey', or 'Data.Traversable.traverse' operation, you have a choice of how to order the
-- objects @o@, with 'DepthFirst' or 'BreadthFirst'. Functions like 'elems' and 'assocs' require an
-- additional 'RunRoseTrie' parameter to decide the ordering of the objects @o@.
--
-- Therefore, this data type instantiates 'Data.Foldable.Foldable' only when
-- it is paired with a 'RunRoseTrie' to determine if the 'Data.Foldable.foldr' will occur in
-- 'DepthFirst' or 'BreadthFirst' order.
module Data.Tree.RoseTrie where
import Prelude hiding (id, (.), mapM, foldr, foldl, sum, concat)
import Control.Arrow
import Control.Applicative
import Control.Category
import Control.DeepSeq
import Control.Monad hiding (mapM, forM, msum)
import Control.Monad.Identity hiding (mapM, forM, msum)
import Control.Monad.State hiding (mapM, forM, msum)
import Data.Foldable
import Data.Lens.Minimal
import Data.Maybe
import Data.Monoid
import Data.Typeable
import qualified Data.Map as M
import Data.Traversable
import Data.Word
----------------------------------------------------------------------------------------------------
-- | A 'RoseTrie' is just a @newtype@ around a pair of two elements forming a node, the first being the
-- leaf of the node, and the second being the branches of the node. The leaf may or may not exist,
-- so it is wrapped in a 'Data.Maybe.Maybe' data structure.
--
-- When you associate an object @o@ at a path @[p]@, a walk is performed, with each segment of the
-- path @[p]@ selecting a branch that contains another sub-node. When the path @[p]@ is empty, the
-- walk stops and the object @o@ is placed into the current sub-node.
newtype RoseTrie p o = RoseTrie (Maybe o, M.Map p (RoseTrie p o)) deriving (Eq, Ord, Show, Typeable)
instance Functor (RoseTrie p) where { fmap f (RoseTrie (o, m)) = RoseTrie (fmap f o, fmap (fmap f) m); }
instance (Ord p, Monoid o) => Monoid (Sum (RoseTrie p o)) where
mempty = Sum Data.Tree.RoseTrie.empty
mappend (Sum a) (Sum b) = Sum $ unionWith mappend a b
instance (Ord p, Monoid o) => Monoid (Product (RoseTrie p o)) where
mempty = Product Data.Tree.RoseTrie.empty
mappend (Product a) (Product b) = Product $ intersectionWith mappend a b
instance (NFData a, NFData b) => NFData (RoseTrie a b) where
rnf (RoseTrie (o, m)) = deepseq o $! deepseq m ()
instance Foldable (ReduceRoseTrie p) where
foldr f b (ReduceRoseTrie control tree) = foldr f b $ elems control tree
instance Ord p => Traversable (ReduceRoseTrie p) where
traverse f (ReduceRoseTrie control tree) = fmap (ReduceRoseTrie control . fromList) $
traverse (\ (p, o) -> (,) <$> pure p <*> f o) $ assocs control tree
----------------------------------------------------------------------------------------------------
-- | This class provides 'fromRoseTrie', which generates a data structure of type @d@ from a
-- 'RoseTrie' of type @RoseTrie p o@, similar to how the 'Prelude.Read' class can generate a data
-- structure from a 'Prelude.String'.
class DataFromRoseTrie d p o where { fromRoseTrie :: RoseTrie p o -> d; }
-- | This class provides 'fromRoseTrie', which generates a 'RoseTrie' of type @RoseTrie p o@ from a
-- data type of type @d@, similar to how the 'Prelude.Show' class can generate a 'Prelude.String'
-- from a data structure.
class DataToRoseTrie d p o where { toRoseTrie :: d -> RoseTrie p o; }
----------------------------------------------------------------------------------------------------
-- | This data type controls algorithms like 'mergeWithKeyM' where monadic evaluation needs to occur
-- in a certain order. This simple operation code decides whether evaluation of leaves happens
-- before evaluation of sub-'RoseTrie's ('BreadthFirst') or whether evaluation of leaves happens after
-- evaluation of sub-'RoseTrie's ('DepthFirst').
data RunRoseTrie
= DepthFirst
-- ^ will have the 'Rule' 'Data.Tree.RoseTrie.Leaf's evaluated such that the longest branches evaluate
-- first.
| BreadthFirst
-- ^ will have the 'Rule' 'Data.Tree.RoseTrie.Leaf's evaluated such that the shortest branches evaluate
-- first.
deriving (Eq, Ord, Show, Typeable, Enum, Bounded)
-- | Like 'RunRoseTrie', but pairs the 'RunRoseTrie' value with the 'RoseTrie' data type itself. This is used to
-- instantiate 'Data.Foldable.Foldable' and 'Data.Traversable.Traversable', which means in order to
-- use 'Data.Foldable.foldr' or 'Data.Traversable.traverse', it is first necessary to store the tree
-- in this data type along with the 'RunRoseTrie' operator indicating the order in which the leaf
-- objects @o@ will be retrieved.
data ReduceRoseTrie p o = ReduceRoseTrie{ reduceRoseTrieBy :: RunRoseTrie, getReduced :: RoseTrie p o }
deriving (Eq, Ord, Show, Typeable)
instance Functor (ReduceRoseTrie p) where
fmap f (ReduceRoseTrie control tree) = ReduceRoseTrie control $ fmap f tree
----------------------------------------------------------------------------------------------------
roseTrie :: Monad m => Iso m (RoseTrie p o) (Maybe o, M.Map p (RoseTrie p o))
roseTrie = newIso (\ (RoseTrie o) -> o, RoseTrie)
-- | The empty 'RoseTrie'.
empty :: RoseTrie p o
empty = RoseTrie (Nothing, M.empty)
-- | Since 'RoseTrie' does not directly instantiate 'Data.Monoid.Monoid', it cannot be used with the
-- 'Data.Lens.Minimal.new' function. So the 'newTrie' function is provided which behaves similarly.
-- In other words, this function takes a list of transfomration functions that modify a 'RoseTrie',
-- and starting with an 'empty' 'RoiseTrie', applies each transformation in order to build the
-- 'RoseTrie'.
newRoseTrie :: [RoseTrie p o -> Identity (RoseTrie p o)] -> RoseTrie p o
newRoseTrie = with Data.Tree.RoseTrie.empty
leaf :: Monad m => Lens m (RoseTrie p o) (Maybe o)
leaf = isoLens roseTrie >>> tuple0
branches :: Monad m => Lens m (RoseTrie p o) (M.Map p (RoseTrie p o))
branches = isoLens roseTrie >>> tuple1
-- | This is a focusing lens that focuses on a 'RoseTrie' node at a given path @[p]@, rather than an
-- element at the given path.
node :: (Monad m, Ord p) => [p] -> Lens m (RoseTrie p o) (RoseTrie p o)
node px = Lens
( return . fix
(\loop px -> case px of
[] -> id
p:px -> maybe Data.Tree.RoseTrie.empty (loop px) . fetch (branches >>> mapLens p)
) px
, Data.Lens.Minimal.alter' $ foldl
(\lens p -> lens >>> branches >>> mapLens p >>>
notEmpty Data.Tree.RoseTrie.null Data.Tree.RoseTrie.empty
) id px
)
-- | Focuses on an individual leaf at the given path.
path :: (Monad m, Ord p) => [p] -> Lens m (RoseTrie p o) (Maybe o)
path px = node px >>> leaf
-- | This function merges two trees together, given a leaf-merging function that can optionally
-- create or remove leaves based on whether or not leaves exist on the left and right at any given
-- point in the path @[p]@.
--
-- Also required are two 'RoseTrie' functions: a function that can convert the first (left)
-- 'RoseTrie' parameter to a 'RoseTrie' of the resultant type, and a function that can convert the
-- second (right) 'RoseTrie' parameter to a 'RoseTrie' of the resultant type. These functions are
-- used for when leaves exist only on the left 'RoseTrie', or for when leaves only exist on the
-- right 'RoseTrie'.
--
-- The given leaf-merging function is called for every single sub-'RoseTrie' node where the path
-- @[p]@ exists in both the overlay and target 'RoseTrie's. Each sub-'RoseTrie' node may or may not
-- have a 'Leaf'.
--
-- * If the 'RoseTrie' node for the overlay 'RoseTrie' and the target 'RoseTrie' are both without
-- leaves, the merging function is passed 'Prelude.Nothing' as both arguments to the updating
-- function.
--
-- * If only the target 'RoseTrie' has a 'Leaf', the overlay 'Leaf' as passed with 'Prelude.Just' as
-- the first (left) argument to the updating function, and 'Prelude.Nothing' is passed as the
-- second (right) argument.
--
-- * If only the overlay 'RoseTrie' has a leaf, 'Prelude.Nothing' is passed as the first (left)
-- argument to the merging function, and the overlay 'Leaf' is passed with 'Prelude.Just' as the
-- second (right) argument.
--
-- * If both the target and the overlay 'RoseTrie's have 'Leaf's, both 'Leaf's are passed with
-- 'Prelude.Just' to the merging function.
--
-- Also, it is necessary to specify (as the first parameter to this function) the 'RunRoseTrie'
-- type, which indicates 'DepthFirst' or 'BreadthFirst' evaluation.
mergeWithKeyM
:: forall m p a b c . (Monad m, Ord p)
=> RunRoseTrie
-> ([p] -> Maybe a -> Maybe b -> m (Maybe c))
-> (RoseTrie p a -> m (RoseTrie p c))
-> (RoseTrie p b -> m (RoseTrie p c))
-> RoseTrie p a -> RoseTrie p b -> m (RoseTrie p c)
mergeWithKeyM control = loop [] where
loop px merge left right (RoseTrie (leftLeaf, leftBranches)) (RoseTrie (rightLeaf, rightBranches)) = do
let leaf = merge px leftLeaf rightLeaf
let map = liftM (M.fromList . concat) $
mapM (\ (p, leftIfPaired) -> do
tree <- uncurry (loop (px++[p]) merge left right) ||| id $ leftIfPaired
return $ if Data.Tree.RoseTrie.null tree then [] else [(p, tree)]
)
( let wrap f = fmap (Right . f) in M.assocs $
M.mergeWithKey (\ _ a b -> Just $ Left (a, b))
(wrap left) (wrap right) leftBranches rightBranches
)
if control==BreadthFirst
then ap (ap (return $ curry RoseTrie) leaf) map
else ap (ap (return $ flip $ curry RoseTrie) map) leaf
----------------------------------------------------------------------------------------------------
-- $MapLikeFunctions
-- In this section I have made my best effor to create API functions as similar as possible to that
-- of the "Data.Map" module.
----------------------------------------------------------------------------------------------------
alter :: Ord p => (Maybe o -> Maybe o) -> [p] -> RoseTrie p o -> RoseTrie p o
alter f p = runIdentity . alterM (return . f) p
alterM :: (Monad m, Ord p) => (Maybe o -> m (Maybe o)) -> [p] -> RoseTrie p o -> m (RoseTrie p o)
alterM f p = alter' (path p) f
-- | Insert a leaf at a given address, updating it with the combining function if it already exist.
insertWith :: Ord p => (o -> o -> o) -> [p] -> o -> RoseTrie p o -> RoseTrie p o
insertWith append p o = Data.Tree.RoseTrie.alter (Just . maybe o (`append` o)) p
-- | Insert a leaf at a given address.
insert :: Ord p => [p] -> o -> RoseTrie p o -> RoseTrie p o
insert = insertWith (flip const)
-- | Update a leaf at a given address.
update :: Ord p => (o -> Maybe o) -> [p] -> RoseTrie p o -> RoseTrie p o
update = Data.Tree.RoseTrie.alter . maybe Nothing
-- | Delete a leaf or 'Branch' at a given address.
delete :: Ord p => [p] -> RoseTrie p o -> RoseTrie p o
delete = Data.Tree.RoseTrie.alter (const Nothing)
-- | Create a 'RoseTrie' from a list of associationes, the 'Prelude.fst' element containing the
-- branches, the 'Prelude.snd' element containing the leaf value. This is the inverse operation of
-- 'assocs'.
fromListWith :: Ord p => (o -> o -> o) -> [([p], o)] -> RoseTrie p o
fromListWith append = foldr (uncurry $ insertWith append) Data.Tree.RoseTrie.empty
-- | Like 'fromListWith' but called with @('Prelude.flip' 'Prelude.const')@.
fromList :: Ord p => [([p], o)] -> RoseTrie p o
fromList = fromListWith (flip const)
-- | Create a 'RoseTrie' with @()@ nodes. This is useful for times when the structure of the tree is
-- all you need.
blankRoseTrie :: Ord p => [[p]] -> RoseTrie p ()
blankRoseTrie = fromList . fmap (id &&& const ())
-- | Create a 'RoseTrie' containing only a single 'path' to a single element.
singleton :: Ord p => [p] -> a -> RoseTrie p a
singleton p o = newRoseTrie [path p <~ Just o]
-- | This function analogous to the 'Data.Map.lookup' function, which returns a value stored in a
-- leaf, or nothing if there is no leaf at the given path.
lookup :: Ord p => [p] -> RoseTrie p a -> Maybe a
lookup px = (~> (path px))
-- | This function works like 'lookup', but takes a key predicate to match keys of the tree, rather
-- than using @('Prelude.==')@. This means the efficient O(log n) 'Data.Map.Map' 'Data.Map.lookup'
-- function in the "Data.Map" module cannot be used, each key must be inspected one-by-one making
-- this algorithm O(n^2). This also means multiple values may match the given key predicate. Lookups
-- are always performed in 'DepthFirst' order, this helps improve efficiency a little bit, as the
-- matches nearest the beggining of each list of 'Data.Map.assocs' are chosen first, and lazily
-- taking only the first few matches will save us from searching the entire tree.
--
-- Take note of the different types @p@ and @b@. This means the path @p@ you use to search the
-- 'RoseTrie' need not be the same type as the branches @b@ of the 'RoseTrie', and what is returned
-- are the actual branches @b@ that matched the path @p@, not the path @p@ itself.
slowLookup :: Ord b => (p -> b -> Bool) -> [p] -> RoseTrie b a -> [([b], a)]
slowLookup f = loop [] where
loop branchPath px t = case px of
[] -> maybe [] (\o -> [(branchPath, o)]) $ t~>leaf
p:px -> do
(b, t) <- filter (f p . fst) (M.assocs $ t~>branches)
loop (branchPath++[b]) px t
-- | This function calls 'slowLookup' and returns only the first result. This can be used to take
-- advantage of Haskell's laziness and save time by halting the search for matching paths as soon as
-- the first match is found.
slowLookup1 :: Ord b => (p -> b -> Bool) -> [p] -> RoseTrie b a -> Maybe ([b], a)
slowLookup1 f p t = case slowLookup f p t of { [] -> Nothing; o:_ -> Just o; }
-- | Get all items and their associated path.
assocs :: RunRoseTrie -> RoseTrie p a -> [([p], a)]
assocs control = loop [] where
loop px (RoseTrie (o, m)) =
(if control==BreadthFirst then id else flip) (++)
(maybe [] (return . (,) px) o)
(M.assocs m >>= \ (p, o) -> loop (px++[p]) o)
-- | Like 'assocs' but restricts the resulting list of associations to only include elements that
-- lie along a given path. This function walks through the tree with the given path, and collects
-- every 'leaf' along the way. Where there is a leaf, the path is partitioned into the path up to
-- the leaf and the path after the leaf. The list of returned values are these partitioned paths
-- paired with their associated leaves.
partitions :: (Eq p, Ord p) => RunRoseTrie -> [p] -> RoseTrie p a -> [(([p], [p]), a)]
partitions control = partitionsWith control (\a b -> guard (a == b) >> return a)
-- | Like 'partitions', but allows you to use a matching function that other than ('Prelude.==').
-- The matching function should return 'Prelude.Nothing' for non-matching path elements, and a
-- 'Prelude.Just' containing a path element that may have been transformed by the matching function.
partitionsWith
:: (Eq p, Ord p)
=> RunRoseTrie -> (p -> q -> Maybe r) -> [q] -> RoseTrie p a -> [(([r], [q]), a)]
partitionsWith control match path = runIdentity .
partitionWithM control (\a b -> return $ match a b) path
-- | Like 'partitionsWith' but uses a monadic matching function.
partitionWithM
:: (Eq p, Ord p, Monad m)
=> RunRoseTrie -> (p -> q -> m (Maybe r)) -> [q] -> RoseTrie p a -> m [(([r], [q]), a)]
partitionWithM control match = loop [] where
getleaf path qx = return . maybe [] (return . (,) (path, qx)) . (~> leaf)
loop path qx tree = case qx of
[] -> getleaf path [] tree
q:qx -> liftM2 ((case control of { DepthFirst -> flip; BreadthFirst -> id; }) (++))
(getleaf path (q:qx) tree)
( liftM concat $ forM (M.assocs $ tree~>branches) $ \ (p, tree) ->
match p q >>= maybe (return []) (\r -> loop (path++[r]) qx tree)
)
-- | Apply @'Prelude.map' 'Prelude.snd'@ to the result of 'assocs', behaves just like how
-- 'Data.Map.elems' or 'Data.Array.IArray.elems' works.
elems :: RunRoseTrie -> RoseTrie p a -> [a]
elems control = loop where
append = (case control of{ DepthFirst -> flip; BreadthFirst -> id; }) (++)
loop (RoseTrie (a, m)) = append (maybe [] return a) $ M.elems m >>= loop
-- This function is not implemented in terms of 'assocs' to avoid stacking the paths, as the paths
-- will be ignored.
-- | Counts the number of *nodes*, which includes the number of 'Branch'es and 'Leaf's. Remember
-- that 'node's that contain 'branches' may not necessarily contain 'leaf' elements.
size :: RoseTrie p a -> Word64
size (RoseTrie (o, m)) = maybe 0 (const 1) o + sum (size <$> M.elems m)
-- | Counts the number of 'leaf's only.
leafCount :: RoseTrie p a -> Word64
leafCount = sum . fmap (const 1) . ReduceRoseTrie DepthFirst
-- | Counts the number of branches only, not leaves.
branchCount :: RoseTrie p a -> Word64
branchCount (RoseTrie (_, m)) = fromIntegral (M.size m) + sum (branchCount <$> M.elems m)
null :: RoseTrie p a -> Bool
null (RoseTrie (o, m)) = isNothing o && M.null m
----------------------------------------------------------------------------------------------------
-- | Since this function does not merge trees monadically, it is not important whether merging
-- happens in 'DepthFirst' or 'BreadthFirst' order.
mergeWithKey
:: Ord p
=> ([p] -> Maybe a -> Maybe b -> Maybe c)
-> (RoseTrie p a -> RoseTrie p c)
-> (RoseTrie p b -> RoseTrie p c)
-> RoseTrie p a -> RoseTrie p b -> RoseTrie p c
mergeWithKey a b c d e = runIdentity $
mergeWithKeyM BreadthFirst (\k o -> return . a k o) (return . b) (return . c) d e
mergeWithM
:: (Monad m, Ord p)
=> RunRoseTrie
-> (Maybe a -> Maybe b -> m (Maybe c))
-> (RoseTrie p a -> m (RoseTrie p c))
-> (RoseTrie p b -> m (RoseTrie p c))
-> RoseTrie p a -> RoseTrie p b -> m (RoseTrie p c)
mergeWithM control f = mergeWithKeyM control (const f)
mergeWith
:: Ord p
=> (Maybe a -> Maybe b -> Maybe c)
-> (RoseTrie p a -> RoseTrie p c)
-> (RoseTrie p b -> RoseTrie p c)
-> RoseTrie p a -> RoseTrie p b -> RoseTrie p c
mergeWith f = mergeWithKey (const f)
----------------------------------------------------------------------------------------------------
unionWithKeyM
:: (Monad m, Ord p)
=> RunRoseTrie
-> ([p] -> a -> a -> m a)
-> RoseTrie p a -> RoseTrie p a -> m (RoseTrie p a)
unionWithKeyM control f =
mergeWithKeyM control
(\k a b -> maybe (return Nothing) (>>= (return . Just)) $
(f <$> pure k <*> a <*> b) <|> fmap return a <|> fmap return b
) return return
unionWithKey :: Ord p => ([p] -> a -> a -> a) -> RoseTrie p a -> RoseTrie p a -> RoseTrie p a
unionWithKey f a = runIdentity . unionWithKeyM BreadthFirst (\k a -> return . f k a) a
unionWithM :: (Monad m, Ord p) => RunRoseTrie -> (a -> a -> m a) -> RoseTrie p a -> RoseTrie p a -> m (RoseTrie p a)
unionWithM control f = unionWithKeyM control (const f)
unionWith :: Ord p => (a -> a -> a) -> RoseTrie p a -> RoseTrie p a -> RoseTrie p a
unionWith f a = runIdentity . unionWithM BreadthFirst (\a -> return . f a) a
union :: Ord p => RoseTrie p a -> RoseTrie p a -> RoseTrie p a
union = unionWith const
unionsWith :: Ord p => (a -> a -> a) -> [RoseTrie p a] -> RoseTrie p a
unionsWith overlap = foldl (unionWith overlap) Data.Tree.RoseTrie.empty
unions :: Ord p => [RoseTrie p a] -> RoseTrie p a
unions = unionsWith (flip const)
----------------------------------------------------------------------------------------------------
intersectionWithKeyM
:: (Monad m, Ord p)
=> RunRoseTrie
-> ([p] -> a -> b -> m c)
-> RoseTrie p a -> RoseTrie p b -> m (RoseTrie p c)
intersectionWithKeyM control f =
mergeWithKeyM control
(\k a b -> maybe (return Nothing) (>>= (return . Just)) $ f <$> pure k <*> a <*> b)
(return . const Data.Tree.RoseTrie.empty) (return . const Data.Tree.RoseTrie.empty)
intersectionWithKey :: Ord p => ([p] -> a -> b -> c) -> RoseTrie p a -> RoseTrie p b -> RoseTrie p c
intersectionWithKey f a = runIdentity . intersectionWithKeyM BreadthFirst (\k a -> return . f k a) a
intersectionWithM :: (Monad m, Ord p) => RunRoseTrie -> (a -> b -> m c) -> RoseTrie p a -> RoseTrie p b -> m (RoseTrie p c)
intersectionWithM control f = intersectionWithKeyM control (const f)
intersectionWith :: Ord p => (a -> b -> c) -> RoseTrie p a -> RoseTrie p b -> RoseTrie p c
intersectionWith f a = runIdentity . intersectionWithM BreadthFirst (\a -> return . f a ) a
intersection :: Ord p => RoseTrie p a -> RoseTrie p b -> RoseTrie p a
intersection = intersectionWith const
intersectionsWith :: Ord p => (a -> a -> a) -> [RoseTrie p a] -> RoseTrie p a
intersectionsWith overlap = foldl (intersectionWith overlap) Data.Tree.RoseTrie.empty
intersections :: Ord p => [RoseTrie p a] -> RoseTrie p a
intersections = intersectionsWith (flip const)
----------------------------------------------------------------------------------------------------
differenceWithKeyM
:: (Monad m, Ord p)
=> RunRoseTrie
-> ([p] -> a -> b -> m (Maybe a))
-> RoseTrie p a -> RoseTrie p b -> m (RoseTrie p a)
differenceWithKeyM control f =
mergeWithKeyM control
(\k a b -> fromMaybe (return Nothing) $ (f <$> pure k <*> a <*> b) <|> fmap (return . Just) a)
return (return . const Data.Tree.RoseTrie.empty)
differenceWithKey :: Ord p => ([p] -> a -> b -> Maybe a) -> RoseTrie p a -> RoseTrie p b -> RoseTrie p a
differenceWithKey f a = runIdentity . differenceWithKeyM BreadthFirst (\k a -> return . f k a) a
differenceWithM :: (Monad m, Ord p) => RunRoseTrie -> (a -> b -> m (Maybe a)) -> RoseTrie p a -> RoseTrie p b -> m (RoseTrie p a)
differenceWithM control f = differenceWithKeyM control (const f)
differenceWith :: Ord p => (a -> b -> Maybe a) -> RoseTrie p a -> RoseTrie p b -> RoseTrie p a
differenceWith f a = runIdentity . differenceWithM BreadthFirst (\a -> return . f a) a
difference :: Ord p => RoseTrie p a -> RoseTrie p b -> RoseTrie p a
difference = differenceWith (\ _ _ -> Nothing)
differencesWith :: Ord p => (a -> a -> Maybe a) -> [RoseTrie p a] -> RoseTrie p a
differencesWith overlap = foldl (differenceWith overlap) Data.Tree.RoseTrie.empty
differences :: Ord p => [RoseTrie p a] -> RoseTrie p a
differences = differencesWith (\ _ _ -> Nothing)
----------------------------------------------------------------------------------------------------
-- | This function computes the cartesian of two trees. For example, if the 'assocs' of two trees
-- are:
--
-- @
-- -- tree X tree Y
-- [( [a, b, c], t ), [( [b, c], w ),
-- ( [a, b ], u ), ( [a ], x )]
-- ( [b ], v )]
-- @
--
-- Then the 'product' of these two trees X and Y is the evaluation of 'fromList' on:
--
-- @
-- [( [a, b, c] ++ [b, c], t<>w ),
-- ( [a, b, c] ++ [a ], t<>x ),
-- ( [a, b, ] ++ [b, c], u<>w ),
-- ( [a, b, ] ++ [a, ], u<>x ),
-- ( [b, ] ++ [b, c], v<>w ),
-- ( [b, ] ++ [a ], v<>x )]
-- @
productWith :: Ord p => (a -> b -> c) -> RoseTrie p a -> RoseTrie p b -> RoseTrie p c
productWith append a b = fromList $ do
(pA, oA) <- assocs BreadthFirst a
(pB, oB) <- assocs BreadthFirst b
[(pA++pB, append oA oB)]
-- | Like 'productWith' but uses 'Data.Monoid.mappend' as the function that computes the product of
-- each element.
product :: (Ord p, Monoid a) => RoseTrie p a -> RoseTrie p a -> RoseTrie p a
product = productWith mappend
----------------------------------------------------------------------------------------------------
-- | If you have read the chapter about zippers in the book "Learn You a Haskell for Great Good",
-- you might appreciate that a zipper is provided for 'RoseTrie' in this module, and a number of
-- useful "Control.Monad.State"ful APIs are also provided, namely 'goto' and 'back'.
--
-- Although it should be noted usually, 'Data.Lens.Minimal.Lens'es, 'Data.Foldable.fold's,
-- 'Data.Traversable.traversal's, and 'mergeWithKeyM' are all you will need.
newtype ZipRoseTrie p o = ZipRoseTrie (RoseTrie p o, [(p, RoseTrie p o)])
deriving (Eq, Ord, Typeable)
zipRoseTrie :: Monad m => Iso m (ZipRoseTrie p o) (RoseTrie p o, [(p, RoseTrie p o)])
zipRoseTrie = newIso (\ (ZipRoseTrie o) -> o, ZipRoseTrie)
zipperSubRoseTrie :: Monad m => Lens m (ZipRoseTrie p o) (RoseTrie p o)
zipperSubRoseTrie = isoLens zipRoseTrie >>> tuple0
zipperHistory :: Monad m => Lens m (ZipRoseTrie p o) [(p, RoseTrie p o)]
zipperHistory = isoLens zipRoseTrie >>> tuple1
-- | A monadic function type that keeps the 'ZipRoseTrie' in a 'Control.Monad.State.StateT' for you, and
-- instantiates 'Control.Monad.State.MonadState' such that 'Control.Monad.State.get' and
-- 'Control.Monad.State.put' operate on leaves of the 'RoseTrie'. Use 'goto', 'back', and 'home' to
-- navigate the 'RoseTrie'.
newtype UpdateRoseTrieT p o m a = UpdateRoseTrieT (StateT (ZipRoseTrie p o) m a)
type UpdateRoseTrie p o a = UpdateRoseTrieT p o Identity a
instance Functor m => Functor (UpdateRoseTrieT p o m) where
fmap f (UpdateRoseTrieT o) = UpdateRoseTrieT $ fmap f o
instance (Functor m, Applicative m, Monad m) => Applicative (UpdateRoseTrieT p o m) where
pure = UpdateRoseTrieT . pure
(UpdateRoseTrieT f) <*> (UpdateRoseTrieT o) = UpdateRoseTrieT (f <*> o)
instance (Functor m, Applicative m, Monad m) => Monad (UpdateRoseTrieT p o m) where
return = pure
(UpdateRoseTrieT o) >>= f = UpdateRoseTrieT $ o >>= (\ (UpdateRoseTrieT o) -> o) . f
instance (Ord p, Functor m, Applicative m, Monad m) => MonadState (Maybe o) (UpdateRoseTrieT p o m) where
state f = UpdateRoseTrieT $ StateT $ \st -> do
(a, l) <- return $ f $ st~>zipperSubRoseTrie~>leaf
return (a, with st [zipperSubRoseTrie >>> leaf <~ l])
instance MonadTrans (UpdateRoseTrieT p o) where { lift = UpdateRoseTrieT . lift; }
-- | Run the 'UpdateRoseTrieT' function, returning the modified 'RoseTrie' and the last result returned by
-- the 'UpdateRoseTrieT' function.
runUpdateRoseTrieT :: (Functor m, Applicative m, Monad m, Ord p) => UpdateRoseTrieT p o m a -> RoseTrie p o -> m (a, RoseTrie p o)
runUpdateRoseTrieT f tree = do
(a, z) <- runStateT ((\ (UpdateRoseTrieT f) -> f) $ f <* home) $ ZipRoseTrie (tree, [])
return (a, z~>zipperSubRoseTrie)
-- | Analogous to 'Control.Monad.State.execStateT', does the same thing as 'runUpdateRoseTrieT' but
-- disgards the final return value of the 'UpdateRoseTrieT' function.
execUpdateRoseTrieT :: (Functor m, Applicative m, Monad m, Ord p) => UpdateRoseTrieT p o m a -> RoseTrie p o -> m (RoseTrie p o)
execUpdateRoseTrieT f = fmap snd . runUpdateRoseTrieT f
-- | Analogous to 'Control.Monad.State.execStateT', does the same thing as 'runUpdateRoseTrieT' but
-- disgards the updated 'RoseTrie' and only keeps the last return value of the 'UpdateRoseTrieT' function.
evalUpdateRoseTrieT :: (Functor m, Applicative m, Monad m, Ord p) => UpdateRoseTrieT p o m a -> RoseTrie p o -> m a
evalUpdateRoseTrieT f = runUpdateRoseTrieT f >=> return . fst
-- | Go to the node with the given path. If the path does not exist, it is created.
goto :: (Functor m, Applicative m, Monad m, Ord p) => [p] -> UpdateRoseTrieT p o m ()
goto px = case px of
[] -> return ()
(p:px) -> do
UpdateRoseTrieT $ do
t <- gets $ fromMaybe Data.Tree.RoseTrie.empty . M.lookup p . (~> (branches . zipperSubRoseTrie))
modify (\st -> with st [zipperSubRoseTrie <~ t, zipperHistory $= ((p, st~>zipperSubRoseTrie) :)])
goto px
-- | Go up one level in the tree, storing the current sub-tree into the upper tree, unless the
-- current tree is 'Void', in which case it is deleted from the upper tree. Returns 'Prelude.False'
-- if we are already at the root of the 'RoseTrie' and could not go back.
back :: (Functor m, Applicative m, Monad m, Ord p) => UpdateRoseTrieT p o m Bool
back = UpdateRoseTrieT $ state $ \st -> case st~>zipperHistory of
[] -> (False, st)
(p, RoseTrie (t, m)):hist -> (,) True $ let u = st~>zipperSubRoseTrie in with st
[ zipperSubRoseTrie <~ RoseTrie (t, (if Data.Tree.RoseTrie.null u then id else M.insert p u) m)
, zipperHistory <~ hist
]
-- | Returns 'Prelude.True' if we are at the top level of the tree.
atTop :: (Functor m, Applicative m, Monad m) => UpdateRoseTrieT p o m Bool
atTop = Prelude.null <$> UpdateRoseTrieT (gets (~> zipperHistory))
-- | Go back to the top level of the tree.
home :: (Functor m, Applicative m, Monad m, Ord p) => UpdateRoseTrieT p o m ()
home = atTop >>= flip unless (back >> home)
-- | Return the current path.
getPath :: (Functor m, Applicative m, Monad m, Ord p) => UpdateRoseTrieT p o m [p]
getPath = reverse . fmap fst <$> UpdateRoseTrieT (gets (~> zipperHistory))
----------------------------------------------------------------------------------------------------
-- | This data type lets you store a "diff", that is a structure tracking the differences, between
-- two 'RoseTrie's. This is essentially the result of a 'mergeWithKeyM' operation tracking all of the
-- changes that would happen in a data structure without actually applying the changes. Traversing
-- over the 'RoseTrie' of 'RoseTrieDiff's with 'Data.Traversable.traverse' to actually convert the
-- 'RoseTrieDiff's would then apply the changes.
data RoseTrieDiff a b
= LeftOnly a -- something exists in the "left" branches but not in the "right" branches.
| RightOnly b -- something exists in the "right" branches but not in the "left" branches.
| RoseTrieDiff a b -- something exists in the "left" and "right" branches but they are not equal
deriving (Eq, Typeable)
-- | Produce a difference report of two trees with the given comparison predicate. If the predicate
-- returns 'Prelude.True', the node does not appear in the resultant 'RoseTrie'. If there is a
-- difference, the difference is recored into a node in the resultant 'RoseTrie'.
treeDiffWithM
:: forall m p a b . (Monad m, Ord p)
=> RunRoseTrie
-> ([p] -> a -> b -> m Bool)
-> RoseTrie p a -> RoseTrie p b -> m (RoseTrie p (RoseTrieDiff a b))
treeDiffWithM control compare =
mergeWithKeyM control merge (return . fmap LeftOnly) (return . fmap RightOnly) where
merge p a b = fromMaybe (return Nothing) $ msum
[ a >>= \a -> b >>= \b -> return $
compare p a b >>= \same -> return $ if same then Nothing else return $ RoseTrieDiff a b
, a >>= Just . return . Just . LeftOnly
, b >>= Just . return . Just . RightOnly
]
treeDiffWith :: Ord p => ([p] -> a -> b -> Bool) -> RoseTrie p a -> RoseTrie p b -> RoseTrie p (RoseTrieDiff a b)
treeDiffWith f a = runIdentity . treeDiffWithM BreadthFirst (\p a -> return . f p a) a
-- | Call 'treeDiffWith' using 'Prelude.(==)' as the comparison predicate.
treeDiffM :: (Monad m, Eq a, Ord p) => RunRoseTrie -> RoseTrie p a -> RoseTrie p a -> m (RoseTrie p (RoseTrieDiff a a))
treeDiffM control = treeDiffWithM control (\ _ a -> return . (a ==))
-- | Call 'treeDiffWith' using 'Prelude.(==)' as the comparison predicate.
treeDiff :: (Eq a, Ord p) => RoseTrie p a -> RoseTrie p a -> RoseTrie p (RoseTrieDiff a a)
treeDiff a = runIdentity . treeDiffM BreadthFirst a
| RaminHAL9001/rose-trie | src/Data/Tree/RoseTrie.hs | gpl-3.0 | 32,423 | 13 | 27 | 6,444 | 8,696 | 4,546 | 4,150 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.URLMaps.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves the list of UrlMap resources available to the specified
-- project.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.urlMaps.list@.
module Network.Google.Resource.Compute.URLMaps.List
(
-- * REST Resource
URLMapsListResource
-- * Creating a Request
, urlMapsList
, URLMapsList
-- * Request Lenses
, umlOrderBy
, umlProject
, umlFilter
, umlPageToken
, umlMaxResults
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.urlMaps.list@ method which the
-- 'URLMapsList' request conforms to.
type URLMapsListResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"global" :>
"urlMaps" :>
QueryParam "orderBy" Text :>
QueryParam "filter" Text :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Word32) :>
QueryParam "alt" AltJSON :> Get '[JSON] URLMapList
-- | Retrieves the list of UrlMap resources available to the specified
-- project.
--
-- /See:/ 'urlMapsList' smart constructor.
data URLMapsList = URLMapsList'
{ _umlOrderBy :: !(Maybe Text)
, _umlProject :: !Text
, _umlFilter :: !(Maybe Text)
, _umlPageToken :: !(Maybe Text)
, _umlMaxResults :: !(Textual Word32)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'URLMapsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'umlOrderBy'
--
-- * 'umlProject'
--
-- * 'umlFilter'
--
-- * 'umlPageToken'
--
-- * 'umlMaxResults'
urlMapsList
:: Text -- ^ 'umlProject'
-> URLMapsList
urlMapsList pUmlProject_ =
URLMapsList'
{ _umlOrderBy = Nothing
, _umlProject = pUmlProject_
, _umlFilter = Nothing
, _umlPageToken = Nothing
, _umlMaxResults = 500
}
-- | Sorts list results by a certain order. By default, results are returned
-- in alphanumerical order based on the resource name. You can also sort
-- results in descending order based on the creation timestamp using
-- orderBy=\"creationTimestamp desc\". This sorts results based on the
-- creationTimestamp field in reverse chronological order (newest result
-- first). Use this to sort resources like operations so that the newest
-- operation is returned first. Currently, only sorting by name or
-- creationTimestamp desc is supported.
umlOrderBy :: Lens' URLMapsList (Maybe Text)
umlOrderBy
= lens _umlOrderBy (\ s a -> s{_umlOrderBy = a})
-- | Project ID for this request.
umlProject :: Lens' URLMapsList Text
umlProject
= lens _umlProject (\ s a -> s{_umlProject = a})
-- | Sets a filter expression for filtering listed resources, in the form
-- filter={expression}. Your {expression} must be in the format: field_name
-- comparison_string literal_string. The field_name is the name of the
-- field you want to compare. Only atomic field types are supported
-- (string, number, boolean). The comparison_string must be either eq
-- (equals) or ne (not equals). The literal_string is the string value to
-- filter to. The literal value must be valid for the type of field you are
-- filtering by (string, number, boolean). For string fields, the literal
-- value is interpreted as a regular expression using RE2 syntax. The
-- literal value must match the entire field. For example, to filter for
-- instances that do not have a name of example-instance, you would use
-- filter=name ne example-instance. You can filter on nested fields. For
-- example, you could filter on instances that have set the
-- scheduling.automaticRestart field to true. Use filtering on nested
-- fields to take advantage of labels to organize and search for results
-- based on label values. To filter on multiple expressions, provide each
-- separate expression within parentheses. For example,
-- (scheduling.automaticRestart eq true) (zone eq us-central1-f). Multiple
-- expressions are treated as AND expressions, meaning that resources must
-- match all expressions to pass the filters.
umlFilter :: Lens' URLMapsList (Maybe Text)
umlFilter
= lens _umlFilter (\ s a -> s{_umlFilter = a})
-- | Specifies a page token to use. Set pageToken to the nextPageToken
-- returned by a previous list request to get the next page of results.
umlPageToken :: Lens' URLMapsList (Maybe Text)
umlPageToken
= lens _umlPageToken (\ s a -> s{_umlPageToken = a})
-- | The maximum number of results per page that should be returned. If the
-- number of available results is larger than maxResults, Compute Engine
-- returns a nextPageToken that can be used to get the next page of results
-- in subsequent list requests.
umlMaxResults :: Lens' URLMapsList Word32
umlMaxResults
= lens _umlMaxResults
(\ s a -> s{_umlMaxResults = a})
. _Coerce
instance GoogleRequest URLMapsList where
type Rs URLMapsList = URLMapList
type Scopes URLMapsList =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly"]
requestClient URLMapsList'{..}
= go _umlProject _umlOrderBy _umlFilter _umlPageToken
(Just _umlMaxResults)
(Just AltJSON)
computeService
where go
= buildClient (Proxy :: Proxy URLMapsListResource)
mempty
| rueshyna/gogol | gogol-compute/gen/Network/Google/Resource/Compute/URLMaps/List.hs | mpl-2.0 | 6,417 | 0 | 18 | 1,415 | 678 | 409 | 269 | 94 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AndroidPublisher.Edits.Listings.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets a localized store listing.
--
-- /See:/ <https://developers.google.com/android-publisher Google Play Android Developer API Reference> for @androidpublisher.edits.listings.get@.
module Network.Google.Resource.AndroidPublisher.Edits.Listings.Get
(
-- * REST Resource
EditsListingsGetResource
-- * Creating a Request
, editsListingsGet
, EditsListingsGet
-- * Request Lenses
, elgXgafv
, elgUploadProtocol
, elgPackageName
, elgAccessToken
, elgUploadType
, elgLanguage
, elgEditId
, elgCallback
) where
import Network.Google.AndroidPublisher.Types
import Network.Google.Prelude
-- | A resource alias for @androidpublisher.edits.listings.get@ method which the
-- 'EditsListingsGet' request conforms to.
type EditsListingsGetResource =
"androidpublisher" :>
"v3" :>
"applications" :>
Capture "packageName" Text :>
"edits" :>
Capture "editId" Text :>
"listings" :>
Capture "language" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Listing
-- | Gets a localized store listing.
--
-- /See:/ 'editsListingsGet' smart constructor.
data EditsListingsGet =
EditsListingsGet'
{ _elgXgafv :: !(Maybe Xgafv)
, _elgUploadProtocol :: !(Maybe Text)
, _elgPackageName :: !Text
, _elgAccessToken :: !(Maybe Text)
, _elgUploadType :: !(Maybe Text)
, _elgLanguage :: !Text
, _elgEditId :: !Text
, _elgCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'EditsListingsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'elgXgafv'
--
-- * 'elgUploadProtocol'
--
-- * 'elgPackageName'
--
-- * 'elgAccessToken'
--
-- * 'elgUploadType'
--
-- * 'elgLanguage'
--
-- * 'elgEditId'
--
-- * 'elgCallback'
editsListingsGet
:: Text -- ^ 'elgPackageName'
-> Text -- ^ 'elgLanguage'
-> Text -- ^ 'elgEditId'
-> EditsListingsGet
editsListingsGet pElgPackageName_ pElgLanguage_ pElgEditId_ =
EditsListingsGet'
{ _elgXgafv = Nothing
, _elgUploadProtocol = Nothing
, _elgPackageName = pElgPackageName_
, _elgAccessToken = Nothing
, _elgUploadType = Nothing
, _elgLanguage = pElgLanguage_
, _elgEditId = pElgEditId_
, _elgCallback = Nothing
}
-- | V1 error format.
elgXgafv :: Lens' EditsListingsGet (Maybe Xgafv)
elgXgafv = lens _elgXgafv (\ s a -> s{_elgXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
elgUploadProtocol :: Lens' EditsListingsGet (Maybe Text)
elgUploadProtocol
= lens _elgUploadProtocol
(\ s a -> s{_elgUploadProtocol = a})
-- | Package name of the app.
elgPackageName :: Lens' EditsListingsGet Text
elgPackageName
= lens _elgPackageName
(\ s a -> s{_elgPackageName = a})
-- | OAuth access token.
elgAccessToken :: Lens' EditsListingsGet (Maybe Text)
elgAccessToken
= lens _elgAccessToken
(\ s a -> s{_elgAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
elgUploadType :: Lens' EditsListingsGet (Maybe Text)
elgUploadType
= lens _elgUploadType
(\ s a -> s{_elgUploadType = a})
-- | Language localization code (a BCP-47 language tag; for example,
-- \"de-AT\" for Austrian German).
elgLanguage :: Lens' EditsListingsGet Text
elgLanguage
= lens _elgLanguage (\ s a -> s{_elgLanguage = a})
-- | Identifier of the edit.
elgEditId :: Lens' EditsListingsGet Text
elgEditId
= lens _elgEditId (\ s a -> s{_elgEditId = a})
-- | JSONP
elgCallback :: Lens' EditsListingsGet (Maybe Text)
elgCallback
= lens _elgCallback (\ s a -> s{_elgCallback = a})
instance GoogleRequest EditsListingsGet where
type Rs EditsListingsGet = Listing
type Scopes EditsListingsGet =
'["https://www.googleapis.com/auth/androidpublisher"]
requestClient EditsListingsGet'{..}
= go _elgPackageName _elgEditId _elgLanguage
_elgXgafv
_elgUploadProtocol
_elgAccessToken
_elgUploadType
_elgCallback
(Just AltJSON)
androidPublisherService
where go
= buildClient
(Proxy :: Proxy EditsListingsGetResource)
mempty
| brendanhay/gogol | gogol-android-publisher/gen/Network/Google/Resource/AndroidPublisher/Edits/Listings/Get.hs | mpl-2.0 | 5,482 | 0 | 21 | 1,365 | 862 | 501 | 361 | 128 | 1 |
-- Copyright (C) 2016-2017 Red Hat, Inc.
--
-- This library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU Lesser General Public
-- License as published by the Free Software Foundation; either
-- version 2.1 of the License, or (at your option) any later version.
--
-- This library is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- Lesser General Public License for more details.
--
-- You should have received a copy of the GNU Lesser General Public
-- License along with this library; if not, see <http://www.gnu.org/licenses/>.
{-# LANGUAGE DeriveDataTypeable #-}
module RPM.Tags(Tag(..),
Null(..),
findByteStringTag,
findTag,
findStringTag,
findStringListTag,
findWord16Tag,
findWord16ListTag,
findWord32Tag,
findWord32ListTag,
mkTag,
tagValue)
where
import Data.Bits((.&.), shiftR)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as C
import Data.Data(Data, cast, gmapQi, showConstr, toConstr)
import Data.List(find)
import Data.Maybe(fromMaybe, listToMaybe)
import Data.Typeable(Typeable)
import Data.Word
import Text.PrettyPrint.HughesPJClass(Pretty(..))
import Text.PrettyPrint(text)
import RPM.Internal.Numbers
{-# ANN module "HLint: ignore Use camelCase" #-}
-- The character lists are actually lists of characters, ignore the suggestions
-- to use String instead
{-# ANN module "HLint: ignore Use String" #-}
data Tag = DEPRECATED Tag
| INTERNAL Tag
| OBSOLETE Tag
| UNIMPLEMENTED Tag
| UNUSED Tag
| HeaderImage Null
| HeaderSignatures Null
| HeaderImmutable Null
| HeaderRegions Null
| HeaderI18NTable [String]
| SigBase Null
| SigSize Word32
| SigLEMD5_1 Null
| SigPGP BS.ByteString
| SigLEMD5_2 Null
| SigMD5 BS.ByteString
| SigGPG BS.ByteString
| SigPGP5 Null
| SigBadSHA1_1 Null
| SigBadSHA1_2 Null
| PubKeys [String]
| DSAHeader BS.ByteString
| RSAHeader BS.ByteString
| SHA1Header String
| LongSigSize Word64
| LongArchiveSize Word64
| Name String
| Version String
| Release String
| Epoch Word32
| Summary BS.ByteString
| Description BS.ByteString
| BuildTime Word32
| BuildHost String
| InstallTime Word32
| Size Word32
| Distribution String
| Vendor String
| GIF BS.ByteString
| XPM BS.ByteString
| License String
| Packager String
| Group BS.ByteString
| ChangeLog [String]
| Source [String]
| Patch [String]
| URL String
| OS String
| Arch String
| PreIn String
| PostIn String
| PreUn String
| PostUn String
| OldFileNames [String]
| FileSizes [Word32]
| FileStates [Char]
| FileModes [Word16]
| FileUIDs [Word32]
| FileGIDs [Word32]
| FileRDevs [Word16]
| FileMTimes [Word32]
| FileMD5s [String]
| FileLinkTos [String]
| FileFlags [Word32]
| Root Null
| FileUserName [String]
| FileGroupName [String]
| Exclude Null
| Exclusive Null
| Icon BS.ByteString
| SourceRPM String
| FileVerifyFlags [Word32]
| ArchiveSize Word32
| ProvideName [String]
| RequireFlags [Word32]
| RequireName [String]
| RequireVersion [String]
| NoSource [Word32]
| NoPatch [Word32]
| ConflictFlags [Word32]
| ConflictName [String]
| ConflictVersion [String]
| DefaultPrefix String
| BuildRoot String
| InstallPrefix String
| ExcludeArch [String]
| ExcludeOS [String]
| ExclusiveArch [String]
| ExclusiveOS [String]
| AutoReqProv String
| RPMVersion String
| TriggerScripts [String]
| TriggerName [String]
| TriggerVersion [String]
| TriggerFlags [Word32]
| TriggerIndex [Word32]
| VerifyScript String
| ChangeLogTime [Word32]
| ChangeLogName [String]
| ChangeLogText [String]
| BrokenMD5 Null
| PreReq Null
| PreInProg [String]
| PostInProg [String]
| PreUnProg [String]
| PostUnProg [String]
| BuildArchs [String]
| ObsoleteName [String]
| VerifyScriptProg [String]
| TriggerScriptProg [String]
| DocDir Null
| Cookie String
| FileDevices [Word32]
| FileINodes [Word32]
| FileLangs [String]
| Prefixes [String]
| InstPrefixes [String]
| TriggerIn Null
| TriggerUn Null
| TriggerPostUn Null
| AutoReq Null
| AutoProv Null
| Capability Word32
| SourcePackage Word32
| OldOrigFileNames Null
| BuildPreReq Null
| BuildRequires Null
| BuildConflicts Null
| BuildMacros Null
| ProvideFlags [Word32]
| ProvideVersion [String]
| ObsoleteFlags [Word32]
| ObsoleteVersion [String]
| DirIndexes [Word32]
| BaseNames [String]
| DirNames [String]
| OrigDirIndexes [Word32]
| OrigBaseNames [String]
| OrigDirNames [String]
| OptFlags String
| DistURL String
| PayloadFormat String
| PayloadCompressor String
| PayloadFlags String
| InstallColor Word32
| InstallTID Word32
| RemoveTID Word32
| SHA1RHN Null
| RHNPlatform String
| Platform String
| PatchesName [String]
| PatchesFlags [Word32]
| PatchesVersion [String]
| CacheCTime Word32
| CachePkgPath String
| CachePkgSize Word32
| CachePkgMTime Word32
| FileColors [Word32]
| FileClass [Word32]
| ClassDict [String]
| FileDependsX [Word32]
| FileDependsN [Word32]
| DependsDict [(Word32, Word32)]
| SourcePkgID BS.ByteString
| FileContexts [String]
| FSContexts [String]
| ReContexts [String]
| Policies [String]
| PreTrans String
| PostTrans String
| PreTransProg [String]
| PostTransProg [String]
| DistTag String
| OldSuggestsName [String]
| OldSuggestsVersion [String]
| OldSuggestsFlags [Word32]
| OldEnhancesName [String]
| OldEnhancesVersion [String]
| OldEnhancesFlags [Word32]
| Priority [Word32]
| CVSID String
| BLinkPkgID [String]
| BLinkHdrID [String]
| BLinkNEVRA [String]
| FLinkPkgID [String]
| FLinkHdrID [String]
| FLinkNEVRA [String]
| PackageOrigin String
| TriggerPreIn Null
| BuildSuggests Null
| BuildEnhances Null
| ScriptStates [Word32]
| ScriptMetrics [Word32]
| BuildCPUClock Word32
| FileDigestAlgos [Word32]
| Variants [String]
| XMajor Word32
| XMinor Word32
| RepoTag String
| Keywords [String]
| BuildPlatforms [String]
| PackageColor Word32
| PackagePrefColor Word32
| XattrsDict [String]
| FileXattrsx [Word32]
| DepAttrsDict [String]
| ConflictAttrsx [Word32]
| ObsoleteAttrsx [Word32]
| ProvideAttrsx [Word32]
| RequireAttrsx [Word32]
| BuildProvides Null
| BuildObsoletes Null
| DBInstance Word32
| NVRA String
| FileNames [String]
| FileProvide [String]
| FileRequire [String]
| FSNames [String]
| FSSizes [Word64]
| TriggerConds [String]
| TriggerType [String]
| OrigFileNames [String]
| LongFileSizes [Word64]
| LongSize Word64
| FileCaps [String]
| FileDigestAlgo Word32
| BugURL String
| EVR String
| NVR String
| NEVR String
| NEVRA String
| HeaderColor Word32
| Verbose Word32
| EpochNum Word32
| PreInFlags Word32
| PostInFlags Word32
| PreUnFlags Word32
| PostUnFlags Word32
| PreTransFlags Word32
| PostTransFlags Word32
| VerifyScriptFlags Word32
| TriggerScriptFlags [Word32]
| Collections [String]
| PolicyNames [String]
| PolicyTypes [String]
| PolicyTypesIndexes [Word32]
| PolicyFlags [Word32]
| PolicyVCS String
| OrderName [String]
| OrderVersion [String]
| OrderFlags [Word32]
| MSSFManifest [String]
| MSSFDomain [String]
| InstFileNames [String]
| RequireNEVRs [String]
| ProvideNEVRs [String]
| ObsoleteNEVRs [String]
| ConflictNEVRs [String]
| FileNLinks [Word32]
| RecommendName [String]
| RecommendVersion [String]
| RecommendFlags [Word32]
| SuggestName [String]
| SuggestVersion [String]
| SuggestFlags [Word32]
| SupplementName [String]
| SupplementVersion [String]
| SupplementFlags [Word32]
| EnhanceName [String]
| EnhanceVersion [String]
| EnhanceFlags [Word32]
| RecommendNEVRs [String]
| SuggestNEVRs [String]
| SupplementNEVRs [String]
| EnhanceNEVRs [String]
| Encoding String
| FileTriggerIn Null
| FileTriggerUn Null
| FileTriggerPostUn Null
| FileTriggerScripts [String]
| FileTriggerScriptProg [String]
| FileTriggerScriptFlags [Word32]
| FileTriggerName [String]
| FileTriggerIndex [Word32]
| FileTriggerVersion [String]
| FileTriggerFlags [Word32]
| TransFileTriggerIn Null
| TransFileTriggerUn Null
| TransFileTriggerPostUn Null
| TransFileTriggerScripts [String]
| TransFileTriggerScriptProg [String]
| TransFileTriggerScriptFlags [Word32]
| TransFileTriggerName [String]
| TransFileTriggerIndex [Word32]
| TransFileTriggerVersion [String]
| TransFileTriggerFlags [Word32]
| RemovePathPostFixes String
| FileTriggerPriorities [Word32]
| TransFileTriggerPriorities [Word32]
| FileTriggerConds [String]
| FileTriggerType [String]
| TransFileTriggerConds [String]
| TransFileTriggerType [String]
| FileSignatures [String]
| FileSignatureLength Word32
deriving(Eq, Show, Data, Typeable)
instance Pretty Tag where
-- This is a lot quicker than having to provide a Pretty instance that takes every
-- single Tag into account.
pPrint = text . show
data Null = Null
deriving(Eq, Show, Data, Typeable)
mkTag :: BS.ByteString -> Int -> Word32 -> Word32 -> Word32 -> Maybe Tag
mkTag store tag ty offset count = case tag of
61 -> maker mkNull >>= Just . HeaderImage
62 -> maker mkNull >>= Just . HeaderSignatures
63 -> maker mkNull >>= Just . HeaderImmutable
64 -> maker mkNull >>= Just . HeaderRegions
100 -> maker mkStringArray >>= Just . HeaderI18NTable
256 -> maker mkNull >>= Just . SigBase
257 -> maker mkWord32 >>= listToMaybe >>= Just . SigSize
258 -> maker mkNull >>= Just . INTERNAL . OBSOLETE . SigLEMD5_1
259 -> maker mkBinary >>= Just . SigPGP
260 -> maker mkNull >>= Just . INTERNAL . OBSOLETE . SigLEMD5_2
261 -> maker mkBinary >>= Just . SigMD5
262 -> maker mkBinary >>= Just . SigGPG
263 -> maker mkNull >>= Just . INTERNAL . OBSOLETE . SigPGP5
264 -> maker mkNull >>= Just . INTERNAL . OBSOLETE . SigBadSHA1_1
265 -> maker mkNull >>= Just . INTERNAL . OBSOLETE . SigBadSHA1_2
266 -> maker mkStringArray >>= Just . PubKeys
267 -> maker mkBinary >>= Just . DSAHeader
268 -> maker mkBinary >>= Just . RSAHeader
269 -> maker mkString >>= Just . SHA1Header
270 -> maker mkWord64 >>= listToMaybe >>= Just . LongSigSize
271 -> maker mkWord64 >>= listToMaybe >>= Just . LongArchiveSize
1000 -> maker mkString >>= Just . Name
1001 -> maker mkString >>= Just . Version
1002 -> maker mkString >>= Just . Release
1003 -> maker mkWord32 >>= listToMaybe >>= Just . Epoch
1004 -> maker mkI18NString >>= Just . Summary
1005 -> maker mkI18NString >>= Just . Description
1006 -> maker mkWord32 >>= listToMaybe >>= Just . BuildTime
1007 -> maker mkString >>= Just . BuildHost
1008 -> maker mkWord32 >>= listToMaybe >>= Just . InstallTime
1009 -> maker mkWord32 >>= listToMaybe >>= Just . Size
1010 -> maker mkString >>= Just . Distribution
1011 -> maker mkString >>= Just . Vendor
1012 -> maker mkBinary >>= Just . GIF
1013 -> maker mkBinary >>= Just . XPM
1014 -> maker mkString >>= Just . License
1015 -> maker mkString >>= Just . Packager
1016 -> maker mkI18NString >>= Just . Group
1017 -> maker mkStringArray >>= Just . INTERNAL . ChangeLog
1018 -> maker mkStringArray >>= Just . Source
1019 -> maker mkStringArray >>= Just . Patch
1020 -> maker mkString >>= Just . URL
1021 -> maker mkString >>= Just . OS
1022 -> maker mkString >>= Just . Arch
1023 -> maker mkString >>= Just . PreIn
1024 -> maker mkString >>= Just . PostIn
1025 -> maker mkString >>= Just . PreUn
1026 -> maker mkString >>= Just . PostUn
1027 -> maker mkStringArray >>= Just . OBSOLETE . OldFileNames
1028 -> maker mkWord32 >>= Just . FileSizes
1029 -> maker mkChar >>= Just . FileStates
1030 -> maker mkWord16 >>= Just . FileModes
1031 -> maker mkWord32 >>= Just . INTERNAL . OBSOLETE . FileUIDs
1032 -> maker mkWord32 >>= Just . INTERNAL . OBSOLETE . FileGIDs
1033 -> maker mkWord16 >>= Just . FileRDevs
1034 -> maker mkWord32 >>= Just . FileMTimes
1035 -> maker mkStringArray >>= Just . FileMD5s
1036 -> maker mkStringArray >>= Just . FileLinkTos
1037 -> maker mkWord32 >>= Just . FileFlags
1038 -> maker mkNull >>= Just . INTERNAL . OBSOLETE . Root
1039 -> maker mkStringArray >>= Just . FileUserName
1040 -> maker mkStringArray >>= Just . FileGroupName
1041 -> maker mkNull >>= Just . INTERNAL . OBSOLETE . Exclude
1042 -> maker mkNull >>= Just . INTERNAL . OBSOLETE . Exclusive
1043 -> maker mkBinary >>= Just . Icon
1044 -> maker mkString >>= Just . SourceRPM
1045 -> maker mkWord32 >>= Just . FileVerifyFlags
1046 -> maker mkWord32 >>= listToMaybe >>= Just . ArchiveSize
1047 -> maker mkStringArray >>= Just . ProvideName
1048 -> maker mkWord32 >>= Just . RequireFlags
1049 -> maker mkStringArray >>= Just . RequireName
1050 -> maker mkStringArray >>= Just . RequireVersion
1051 -> maker mkWord32 >>= Just . NoSource
1052 -> maker mkWord32 >>= Just . NoPatch
1053 -> maker mkWord32 >>= Just . ConflictFlags
1054 -> maker mkStringArray >>= Just . ConflictName
1055 -> maker mkStringArray >>= Just . ConflictVersion
1056 -> maker mkString >>= Just . INTERNAL . DEPRECATED . DefaultPrefix
1057 -> maker mkString >>= Just . INTERNAL . OBSOLETE . BuildRoot
1058 -> maker mkString >>= Just . INTERNAL . DEPRECATED . InstallPrefix
1059 -> maker mkStringArray >>= Just . ExcludeArch
1060 -> maker mkStringArray >>= Just . ExcludeOS
1061 -> maker mkStringArray >>= Just . ExclusiveArch
1062 -> maker mkStringArray >>= Just . ExclusiveOS
1063 -> maker mkString >>= Just . INTERNAL . AutoReqProv
1064 -> maker mkString >>= Just . RPMVersion
1065 -> maker mkStringArray >>= Just . TriggerScripts
1066 -> maker mkStringArray >>= Just . TriggerName
1067 -> maker mkStringArray >>= Just . TriggerVersion
1068 -> maker mkWord32 >>= Just . TriggerFlags
1069 -> maker mkWord32 >>= Just . TriggerIndex
1079 -> maker mkString >>= Just . VerifyScript
1080 -> maker mkWord32 >>= Just . ChangeLogTime
1081 -> maker mkStringArray >>= Just . ChangeLogName
1082 -> maker mkStringArray >>= Just . ChangeLogText
1083 -> maker mkNull >>= Just . INTERNAL . OBSOLETE . BrokenMD5
1084 -> maker mkNull >>= Just . INTERNAL . PreReq
1085 -> maker mkStringArray >>= Just . PreInProg
1086 -> maker mkStringArray >>= Just . PostInProg
1087 -> maker mkStringArray >>= Just . PreUnProg
1088 -> maker mkStringArray >>= Just . PostUnProg
1089 -> maker mkStringArray >>= Just . BuildArchs
1090 -> maker mkStringArray >>= Just . ObsoleteName
1091 -> maker mkStringArray >>= Just . VerifyScriptProg
1092 -> maker mkStringArray >>= Just . TriggerScriptProg
1093 -> maker mkNull >>= Just . INTERNAL . DocDir
1094 -> maker mkString >>= Just . Cookie
1095 -> maker mkWord32 >>= Just . FileDevices
1096 -> maker mkWord32 >>= Just . FileINodes
1097 -> maker mkStringArray >>= Just . FileLangs
1098 -> maker mkStringArray >>= Just . Prefixes
1099 -> maker mkStringArray >>= Just . InstPrefixes
1100 -> maker mkNull >>= Just . INTERNAL . TriggerIn
1101 -> maker mkNull >>= Just . INTERNAL . TriggerUn
1102 -> maker mkNull >>= Just . INTERNAL . TriggerPostUn
1103 -> maker mkNull >>= Just . INTERNAL . AutoReq
1104 -> maker mkNull >>= Just . INTERNAL . AutoProv
1105 -> maker mkWord32 >>= listToMaybe >>= Just . INTERNAL . OBSOLETE . Capability
1106 -> maker mkWord32 >>= listToMaybe >>= Just . SourcePackage
1107 -> maker mkNull >>= Just . INTERNAL . OBSOLETE . OldOrigFileNames
1108 -> maker mkNull >>= Just . INTERNAL . BuildPreReq
1109 -> maker mkNull >>= Just . INTERNAL . BuildRequires
1110 -> maker mkNull >>= Just . INTERNAL . BuildConflicts
1111 -> maker mkNull >>= Just . INTERNAL . UNUSED . BuildMacros
1112 -> maker mkWord32 >>= Just . ProvideFlags
1113 -> maker mkStringArray >>= Just . ProvideVersion
1114 -> maker mkWord32 >>= Just . ObsoleteFlags
1115 -> maker mkStringArray >>= Just . ObsoleteVersion
1116 -> maker mkWord32 >>= Just . DirIndexes
1117 -> maker mkStringArray >>= Just . BaseNames
1118 -> maker mkStringArray >>= Just . DirNames
1119 -> maker mkWord32 >>= Just . OrigDirIndexes
1120 -> maker mkStringArray >>= Just . OrigBaseNames
1121 -> maker mkStringArray >>= Just . OrigDirNames
1122 -> maker mkString >>= Just . OptFlags
1123 -> maker mkString >>= Just . DistURL
1124 -> maker mkString >>= Just . PayloadFormat
1125 -> maker mkString >>= Just . PayloadCompressor
1126 -> maker mkString >>= Just . PayloadFlags
1127 -> maker mkWord32 >>= listToMaybe >>= Just . InstallColor
1128 -> maker mkWord32 >>= listToMaybe >>= Just . InstallTID
1129 -> maker mkWord32 >>= listToMaybe >>= Just . RemoveTID
1130 -> maker mkNull >>= Just . INTERNAL . OBSOLETE . SHA1RHN
1131 -> maker mkString >>= Just . INTERNAL . OBSOLETE . RHNPlatform
1132 -> maker mkString >>= Just . Platform
1133 -> maker mkStringArray >>= Just . DEPRECATED . PatchesName
1134 -> maker mkWord32 >>= Just . DEPRECATED . PatchesFlags
1135 -> maker mkStringArray >>= Just . DEPRECATED . PatchesVersion
1136 -> maker mkWord32 >>= listToMaybe >>= Just . INTERNAL . OBSOLETE . CacheCTime
1137 -> maker mkString >>= Just . INTERNAL . OBSOLETE . CachePkgPath
1138 -> maker mkWord32 >>= listToMaybe >>= Just . INTERNAL . OBSOLETE . CachePkgSize
1139 -> maker mkWord32 >>= listToMaybe >>= Just . INTERNAL . OBSOLETE . CachePkgMTime
1140 -> maker mkWord32 >>= Just . FileColors
1141 -> maker mkWord32 >>= Just . FileClass
1142 -> maker mkStringArray >>= Just . ClassDict
1143 -> maker mkWord32 >>= Just . FileDependsX
1144 -> maker mkWord32 >>= Just . FileDependsN
1145 -> maker mkWord32 >>= Just . DependsDict . map (\x -> ((x `shiftR` 24) .&. 0xff, x .&. 0x00ffffff))
1146 -> maker mkBinary >>= Just . SourcePkgID
1147 -> maker mkStringArray >>= Just . OBSOLETE . FileContexts
1148 -> maker mkStringArray >>= Just . FSContexts
1149 -> maker mkStringArray >>= Just . ReContexts
1150 -> maker mkStringArray >>= Just . Policies
1151 -> maker mkString >>= Just . PreTrans
1152 -> maker mkString >>= Just . PostTrans
1153 -> maker mkStringArray >>= Just . PreTransProg
1154 -> maker mkStringArray >>= Just . PostTransProg
1155 -> maker mkString >>= Just . DistTag
1156 -> maker mkStringArray >>= Just . OBSOLETE . OldSuggestsName
1157 -> maker mkStringArray >>= Just . OBSOLETE . OldSuggestsVersion
1158 -> maker mkWord32 >>= Just . OBSOLETE . OldSuggestsFlags
1159 -> maker mkStringArray >>= Just . OBSOLETE . OldEnhancesName
1160 -> maker mkStringArray >>= Just . OBSOLETE . OldEnhancesVersion
1161 -> maker mkWord32 >>= Just . OBSOLETE . OldEnhancesFlags
1162 -> maker mkWord32 >>= Just . UNIMPLEMENTED . Priority
1163 -> maker mkString >>= Just . UNIMPLEMENTED . CVSID
1164 -> maker mkStringArray >>= Just . UNIMPLEMENTED . BLinkPkgID
1165 -> maker mkStringArray >>= Just . UNIMPLEMENTED . BLinkHdrID
1166 -> maker mkStringArray >>= Just . UNIMPLEMENTED . BLinkNEVRA
1167 -> maker mkStringArray >>= Just . UNIMPLEMENTED . FLinkPkgID
1168 -> maker mkStringArray >>= Just . UNIMPLEMENTED . FLinkHdrID
1169 -> maker mkStringArray >>= Just . UNIMPLEMENTED . FLinkNEVRA
1170 -> maker mkString >>= Just . UNIMPLEMENTED . PackageOrigin
1171 -> maker mkNull >>= Just . INTERNAL . TriggerPreIn
1172 -> maker mkNull >>= Just . INTERNAL . UNIMPLEMENTED . BuildSuggests
1173 -> maker mkNull >>= Just . INTERNAL . UNIMPLEMENTED . BuildEnhances
1174 -> maker mkWord32 >>= Just . UNIMPLEMENTED . ScriptStates
1175 -> maker mkWord32 >>= Just . UNIMPLEMENTED . ScriptMetrics
1176 -> maker mkWord32 >>= listToMaybe >>= Just . UNIMPLEMENTED . BuildCPUClock
1177 -> maker mkWord32 >>= Just . UNIMPLEMENTED . FileDigestAlgos
1178 -> maker mkStringArray >>= Just . UNIMPLEMENTED . Variants
1179 -> maker mkWord32 >>= listToMaybe >>= Just . UNIMPLEMENTED . XMajor
1180 -> maker mkWord32 >>= listToMaybe >>= Just . UNIMPLEMENTED . XMinor
1181 -> maker mkString >>= Just . UNIMPLEMENTED . RepoTag
1182 -> maker mkStringArray >>= Just . UNIMPLEMENTED . Keywords
1183 -> maker mkStringArray >>= Just . UNIMPLEMENTED . BuildPlatforms
1184 -> maker mkWord32 >>= listToMaybe >>= Just . UNIMPLEMENTED . PackageColor
1185 -> maker mkWord32 >>= listToMaybe >>= Just . UNIMPLEMENTED . PackagePrefColor
1186 -> maker mkStringArray >>= Just . UNIMPLEMENTED . XattrsDict
1187 -> maker mkWord32 >>= Just . UNIMPLEMENTED . FileXattrsx
1188 -> maker mkStringArray >>= Just . UNIMPLEMENTED . DepAttrsDict
1189 -> maker mkWord32 >>= Just . UNIMPLEMENTED . ConflictAttrsx
1190 -> maker mkWord32 >>= Just . UNIMPLEMENTED . ObsoleteAttrsx
1191 -> maker mkWord32 >>= Just . UNIMPLEMENTED . ProvideAttrsx
1192 -> maker mkWord32 >>= Just . UNIMPLEMENTED . RequireAttrsx
1193 -> maker mkNull >>= Just . UNIMPLEMENTED . BuildProvides
1194 -> maker mkNull >>= Just . UNIMPLEMENTED . BuildObsoletes
1195 -> maker mkWord32 >>= listToMaybe >>= Just . DBInstance
1196 -> maker mkString >>= Just . NVRA
5000 -> maker mkStringArray >>= Just . FileNames
5001 -> maker mkStringArray >>= Just . FileProvide
5002 -> maker mkStringArray >>= Just . FileRequire
5003 -> maker mkStringArray >>= Just . UNIMPLEMENTED . FSNames
5004 -> maker mkWord64 >>= Just . UNIMPLEMENTED . FSSizes
5005 -> maker mkStringArray >>= Just . TriggerConds
5006 -> maker mkStringArray >>= Just . TriggerType
5007 -> maker mkStringArray >>= Just . OrigFileNames
5008 -> maker mkWord64 >>= Just . LongFileSizes
5009 -> maker mkWord64 >>= listToMaybe >>= Just . LongSize
5010 -> maker mkStringArray >>= Just . FileCaps
5011 -> maker mkWord32 >>= listToMaybe >>= Just . FileDigestAlgo
5012 -> maker mkString >>= Just . BugURL
5013 -> maker mkString >>= Just . EVR
5014 -> maker mkString >>= Just . NVR
5015 -> maker mkString >>= Just . NEVR
5016 -> maker mkString >>= Just . NEVRA
5017 -> maker mkWord32 >>= listToMaybe >>= Just . HeaderColor
5018 -> maker mkWord32 >>= listToMaybe >>= Just . Verbose
5019 -> maker mkWord32 >>= listToMaybe >>= Just . EpochNum
5020 -> maker mkWord32 >>= listToMaybe >>= Just . PreInFlags
5021 -> maker mkWord32 >>= listToMaybe >>= Just . PostInFlags
5022 -> maker mkWord32 >>= listToMaybe >>= Just . PreUnFlags
5023 -> maker mkWord32 >>= listToMaybe >>= Just . PostUnFlags
5024 -> maker mkWord32 >>= listToMaybe >>= Just . PreTransFlags
5025 -> maker mkWord32 >>= listToMaybe >>= Just . PostTransFlags
5026 -> maker mkWord32 >>= listToMaybe >>= Just . VerifyScriptFlags
5027 -> maker mkWord32 >>= Just . TriggerScriptFlags
5029 -> maker mkStringArray >>= Just . UNIMPLEMENTED . Collections
5030 -> maker mkStringArray >>= Just . PolicyNames
5031 -> maker mkStringArray >>= Just . PolicyTypes
5032 -> maker mkWord32 >>= Just . PolicyTypesIndexes
5033 -> maker mkWord32 >>= Just . PolicyFlags
5034 -> maker mkString >>= Just . PolicyVCS
5035 -> maker mkStringArray >>= Just . OrderName
5036 -> maker mkStringArray >>= Just . OrderVersion
5037 -> maker mkWord32 >>= Just . OrderFlags
5038 -> maker mkStringArray >>= Just . UNIMPLEMENTED . MSSFManifest
5039 -> maker mkStringArray >>= Just . UNIMPLEMENTED . MSSFDomain
5040 -> maker mkStringArray >>= Just . InstFileNames
5041 -> maker mkStringArray >>= Just . RequireNEVRs
5042 -> maker mkStringArray >>= Just . ProvideNEVRs
5043 -> maker mkStringArray >>= Just . ObsoleteNEVRs
5044 -> maker mkStringArray >>= Just . ConflictNEVRs
5045 -> maker mkWord32 >>= Just . FileNLinks
5046 -> maker mkStringArray >>= Just . RecommendName
5047 -> maker mkStringArray >>= Just . RecommendVersion
5048 -> maker mkWord32 >>= Just . RecommendFlags
5049 -> maker mkStringArray >>= Just . SuggestName
5050 -> maker mkStringArray >>= Just . SuggestVersion
5051 -> maker mkWord32 >>= Just . SuggestFlags
5052 -> maker mkStringArray >>= Just . SupplementName
5053 -> maker mkStringArray >>= Just . SupplementVersion
5054 -> maker mkWord32 >>= Just . SupplementFlags
5055 -> maker mkStringArray >>= Just . EnhanceName
5056 -> maker mkStringArray >>= Just . EnhanceVersion
5057 -> maker mkWord32 >>= Just . EnhanceFlags
5058 -> maker mkStringArray >>= Just . RecommendNEVRs
5059 -> maker mkStringArray >>= Just . SuggestNEVRs
5060 -> maker mkStringArray >>= Just . SupplementNEVRs
5061 -> maker mkStringArray >>= Just . EnhanceNEVRs
5062 -> maker mkString >>= Just . Encoding
5063 -> maker mkNull >>= Just . INTERNAL . FileTriggerIn
5064 -> maker mkNull >>= Just . INTERNAL . FileTriggerUn
5065 -> maker mkNull >>= Just . INTERNAL . FileTriggerPostUn
5066 -> maker mkStringArray >>= Just . FileTriggerScripts
5067 -> maker mkStringArray >>= Just . FileTriggerScriptProg
5068 -> maker mkWord32 >>= Just . FileTriggerScriptFlags
5069 -> maker mkStringArray >>= Just . FileTriggerName
5070 -> maker mkWord32 >>= Just . FileTriggerIndex
5071 -> maker mkStringArray >>= Just . FileTriggerVersion
5072 -> maker mkWord32 >>= Just . FileTriggerFlags
5073 -> maker mkNull >>= Just . INTERNAL . TransFileTriggerIn
5074 -> maker mkNull >>= Just . INTERNAL . TransFileTriggerUn
5075 -> maker mkNull >>= Just . INTERNAL . TransFileTriggerPostUn
5076 -> maker mkStringArray >>= Just . TransFileTriggerScripts
5077 -> maker mkStringArray >>= Just . TransFileTriggerScriptProg
5078 -> maker mkWord32 >>= Just . TransFileTriggerScriptFlags
5079 -> maker mkStringArray >>= Just . TransFileTriggerName
5080 -> maker mkWord32 >>= Just . TransFileTriggerIndex
5081 -> maker mkStringArray >>= Just . TransFileTriggerVersion
5082 -> maker mkWord32 >>= Just . TransFileTriggerFlags
5083 -> maker mkString >>= Just . INTERNAL . RemovePathPostFixes
5084 -> maker mkWord32 >>= Just . FileTriggerPriorities
5085 -> maker mkWord32 >>= Just . TransFileTriggerPriorities
5086 -> maker mkStringArray >>= Just . FileTriggerConds
5087 -> maker mkStringArray >>= Just . FileTriggerType
5088 -> maker mkStringArray >>= Just . TransFileTriggerConds
5089 -> maker mkStringArray >>= Just . TransFileTriggerType
5090 -> maker mkStringArray >>= Just . FileSignatures
5091 -> maker mkWord32 >>= listToMaybe >>= Just . FileSignatureLength
_ -> Nothing
where
maker fn = fn store ty offset count
mkNull :: BS.ByteString -> Word32 -> Word32 -> Word32 -> Maybe Null
mkNull _ ty _ _ | ty == 0 = Just Null
| otherwise = Nothing
mkChar :: BS.ByteString -> Word32 -> Word32 -> Word32 -> Maybe [Char]
mkChar store ty offset count | ty == 1 = Just $ C.unpack $ BS.take count' start
| otherwise = Nothing
where
count' = fromIntegral count
start = BS.drop (fromIntegral offset) store
mkWord16 :: BS.ByteString -> Word32 -> Word32 -> Word32 -> Maybe [Word16]
mkWord16 store ty offset count | ty == 3 = Just $ readWords store 2 asWord16 offsets
| otherwise = Nothing
where
offsets = map (\n -> offset + (n*2)) [0 .. count-1]
mkWord32 :: BS.ByteString -> Word32 -> Word32 -> Word32 -> Maybe [Word32]
mkWord32 store ty offset count | ty == 4 = Just $ readWords store 4 asWord32 offsets
| otherwise = Nothing
where
offsets = map (\n -> offset + (n*4)) [0 .. count-1]
mkWord64 :: BS.ByteString -> Word32 -> Word32 -> Word32 -> Maybe [Word64]
mkWord64 store ty offset count | ty == 5 = Just $ readWords store 8 asWord64 offsets
| otherwise = Nothing
where
offsets = map (\n -> offset + (n*8)) [0 .. count-1]
mkString :: BS.ByteString -> Word32 -> Word32 -> Word32 -> Maybe String
mkString store ty offset _ | ty == 6 = Just $ C.unpack $ BS.takeWhile (/= 0) start
| otherwise = Nothing
where
start = BS.drop (fromIntegral offset) store
mkBinary :: BS.ByteString -> Word32 -> Word32 -> Word32 -> Maybe BS.ByteString
mkBinary store ty offset count | ty == 7 = Just $ BS.take count' start
| otherwise = Nothing
where
count' = fromIntegral count
start = BS.drop (fromIntegral offset) store
mkStringArray :: BS.ByteString -> Word32 -> Word32 -> Word32 -> Maybe [String]
mkStringArray store ty offset count | ty == 8 = Just $ map C.unpack $ readStrings start count
| otherwise = Nothing
where
start = BS.drop (fromIntegral offset) store
mkI18NString :: BS.ByteString -> Word32 -> Word32 -> Word32 -> Maybe BS.ByteString
mkI18NString store ty offset _ | ty == 9 = Just $ BS.takeWhile (/= 0) start
| otherwise = Nothing
where
start = BS.drop (fromIntegral offset) store
-- I don't know how to split a ByteString up into chunks of a given size, so here's what I'm doing. Take
-- a list of offsets of where in the ByteString to read. Skip to each of those offsets, grab size bytes, and
-- convert those bytes into the type using the given conversion function. Return that list.
{-# ANN readWords "HLint: ignore Eta reduce" #-}
readWords :: BS.ByteString -> Int -> (BS.ByteString -> a) -> [Word32] -> [a]
readWords bs size conv offsets = map (\offset -> conv $ BS.take size $ BS.drop (fromIntegral offset) bs) offsets
readStrings :: BS.ByteString -> Word32 -> [BS.ByteString]
readStrings bytestring count = take (fromIntegral count) $ BS.split 0 bytestring
-- | Given a 'Tag' name and a list of 'Tag's, find the match and return it as a Maybe.
findTag :: String -> [Tag] -> Maybe Tag
findTag name = find (\t -> name == showConstr (toConstr t))
-- | Given a 'Tag' name and a list of 'Tag's, find the match, convert it into a
-- 'ByteString', and return it as a Maybe.
findByteStringTag :: String -> [Tag] -> Maybe BS.ByteString
findByteStringTag name tags = findTag name tags >>= \t -> tagValue t :: Maybe BS.ByteString
-- | Given a 'Tag' name and a list of 'Tag's, find the match, convert it into a
-- String, and return it as a Maybe.
findStringTag :: String -> [Tag] -> Maybe String
findStringTag name tags = findTag name tags >>= \t -> tagValue t :: Maybe String
-- | Given a 'Tag' name and a list of 'Tag's, find all matches, convert them into
-- Strings, and return as a list. If no results are found, return an empty list.
findStringListTag :: String -> [Tag] -> [String]
findStringListTag name tags = fromMaybe [] $ findTag name tags >>= \t -> tagValue t :: Maybe [String]
-- | Given a 'Tag' name and a list of 'Tag's, find the match convert it into a
-- Word16, and return it as a Maybe.
findWord16Tag :: String -> [Tag] -> Maybe Word16
findWord16Tag name tags = findTag name tags >>= \t -> tagValue t :: Maybe Word16
-- | Given a 'Tag' name and a list of 'Tag's, find all matches, convert them into
-- Word16, and return as a list. if no results are found, return an empty list.
findWord16ListTag :: String -> [Tag] -> [Word16]
findWord16ListTag name tags = fromMaybe [] $ findTag name tags >>= \t -> tagValue t :: Maybe [Word16]
-- | Given a 'Tag' name and a list of 'Tag's, find the match convert it into a
-- Word32, and return it as a Maybe.
findWord32Tag :: String -> [Tag] -> Maybe Word32
findWord32Tag name tags = findTag name tags >>= \t -> tagValue t :: Maybe Word32
-- | Given a 'Tag' name and a list of 'Tag's, find all matches, convert them into
-- Word32, and return as a list. if no results are found, return an empty list.
findWord32ListTag :: String -> [Tag] -> [Word32]
findWord32ListTag name tags = fromMaybe [] $ findTag name tags >>= \t -> tagValue t :: Maybe [Word32]
-- | Given a 'Tag', return its type.
tagValue :: Typeable a => Tag -> Maybe a
tagValue = gmapQi 0 cast
| dashea/bdcs | haskell-rpm/RPM/Tags.hs | lgpl-2.1 | 46,720 | 0 | 15 | 21,328 | 9,696 | 4,986 | 4,710 | 700 | 301 |
{-
Copyright (C) 2009 Andrejs Sisojevs <andrejs.sisojevs@nextmail.ru>
All rights reserved.
For license and copyright information, see the file COPYRIGHT
-}
--------------------------------------------------------------------------
--------------------------------------------------------------------------
-- | The abbreviation \"PCSI\" means \"Parametric Composable String
-- Instaniation\". Agree, not a very clear name, a more apropriate would be
-- something like that: \"Input Data for Template Representation\", but
-- clear name came to my head when millions (dozens) of ants (variables) were
-- already bearing this name. And I'm a bit lazy to rename everything to sound
-- apropriate, sorry.
module Text.PCLT.PCSI where
import qualified Data.ByteString.Lazy.UTF8.Unified as Lazy (ByteString)
import qualified Data.ByteString.Lazy.UTF8.Unified as B hiding (ByteString)
import qualified Data.Map as M
import Data.Map (Map, (!))
import Data.Typeable
import Text.ConstraintedLBS
import Text.PCLT.CommonTypes
-- | By these user fills parameters of templates.
data PCLT_ParamVal =
PlainText_PV String
| PlainTextLBS_PV Lazy.ByteString
| PCSI_PV PCSI
-- Reparsable_PV PCLT_ParamVal PCSI_ParamsValuesMap -- reserved, currently doesn't work
-- | Second argument is a separator between 'PCSI's in first argument
-- Here representation generator automatically adds to each PCSI in
-- list an implicit parameter @__row_idx@, which holds a value
-- of current PCSI index in list, starting from 1.
| PCSIList_PV [PCSI] PCLT_ParamVal
| PVList_PV [PCLT_ParamVal]
-- | In message generation procedure this stands for a value of
-- 2nd argument in which every occurence of newline (as is configured in
-- "Text.PCLT.Config" in config's parameter @pcsNewlineLBS@)
-- is substituded by @newline ++ (replicate n ' ')@,
-- where n is the 1st argument
| Indented_PV Int PCLT_ParamVal
-- | In message generation procedure this stands for a value of
-- @pcsNewlineLBS@ parameter declared in "Text.PCLT.Config".
-- Different systems means different symbol sequences
-- under \"newline\"...
| NewLine_PV
| Nothing_PV
deriving (Show, Typeable)
type PCSI_ParamsValuesMap = Map PCLT_ParamKey PCLT_ParamVal
-- | PCSI is an output of our extended Show (to which this package
-- is dedicated). And an input to generate a message using catalog.
data PCSI =
PCSI {
pcsiTplID :: PCLT_ID
, pcsiParamsValsMap :: PCSI_ParamsValuesMap
}
deriving (Show, Typeable)
-- * PCSI constructors
-- | PCSI with an empty set of parameters values.
empPCSI :: PCLT_ID -> PCSI
empPCSI k = PCSI { pcsiTplID = k, pcsiParamsValsMap = M.empty }
thePCSI :: PCLT_ID -> [(PCLT_ParamKey, PCLT_ParamVal)] -> PCSI
thePCSI pcsi_id params_alist =
PCSI {
pcsiTplID = pcsi_id
, pcsiParamsValsMap = M.fromList params_alist
}
-- * PCSI maths
addToPCSI :: [PCSI] -> PCSI -> PCSI
addToPCSI l t = foldr
(\ pcsi2 pcsi1_accum ->
pcsi1_accum {
pcsiParamsValsMap = sumPCSI_PVMs
(pcsiParamsValsMap pcsi1_accum)
(pcsiParamsValsMap pcsi2)
}
) t l
sumPCSI_PVMs :: PCSI_ParamsValuesMap -> PCSI_ParamsValuesMap -> PCSI_ParamsValuesMap
sumPCSI_PVMs pcsi_pvm1 pcsi_pvm2 = M.union pcsi_pvm1 pcsi_pvm2
addPVs2PCSI :: [(PCLT_ParamKey, PCLT_ParamVal)] -> PCSI -> PCSI
addPVs2PCSI pvs pcsi = pcsi { pcsiParamsValsMap = foldr (\ (k, v) accum -> M.insert k v accum) (pcsiParamsValsMap pcsi) pvs}
-- * Standards
-- | It's used in some places of package in errors' representations
-- (in instances of @ShowAsPCSI@ - class declared in "Text.PCLT.ShowAsPCSI").
-- @usualSeparatorInPCSIList = PVList_PV [NewLine_PV, PlainText_PV "|----", NewLine_PV]@
usualSeparatorInPCSIList :: PCLT_ParamVal
usualSeparatorInPCSIList = PVList_PV [NewLine_PV, PlainText_PV "|----", NewLine_PV] | Andrey-Sisoyev/haskell-PCLT | Text/PCLT/PCSI.hs | lgpl-2.1 | 4,138 | 0 | 12 | 952 | 503 | 307 | 196 | 45 | 1 |
module External.A001481 (a001481) where
import Helpers.PolygonSizes (a001481_pairs)
a001481_list :: [Integer]
a001481_list = map reconstitute a001481_pairs where
reconstitute ((a,b):_) = a^2 + b^2
a001481 :: Int -> Integer
a001481 n = a001481_list !! (n - 1)
| peterokagey/haskellOEIS | src/External/A001481.hs | apache-2.0 | 263 | 0 | 10 | 39 | 102 | 57 | 45 | 7 | 1 |
import Data.IORef
import Data.Foldable
import Control.Monad
bubbleSort :: [Int] -> IO [Int]
bubbleSort input = do
let n = length input
xs <- mapM newIORef input
forM_ [0..n-1] $ \i -> do
forM_ [0..n-2] $ \j -> do
let ix = xs !! j
let iy = xs !! (j + 1)
x <- readIORef ix
y <- readIORef iy
when (x > y) $ do
writeIORef ix y
writeIORef iy x
mapM readIORef xs
main :: IO ()
main = do
let xs = [9, 2, 15, 7]
sorted_xs <- bubbleSort xs
print sorted_xs
| omefire/HaskellProjects | ioref-bubblesort.hs | apache-2.0 | 528 | 0 | 20 | 176 | 260 | 124 | 136 | 22 | 1 |
class BasicEq a where
isEqual :: a -> a -> Bool
isEqual x y = not $ isNotEqual x y
isNotEqual :: a -> a -> Bool
isNotEqual x y = not $ isEqual x y
instance BasicEq Bool where
isEqual True True = True
isEqual False False = True
isEqual _ _ = False
| EricYT/Haskell | src/real_haskell/chapter-6/BasicEq.hs | apache-2.0 | 291 | 0 | 8 | 99 | 112 | 55 | 57 | 9 | 0 |
module Application.DiagramDrawer.Draw where
import Graphics.UI.Gtk
import Graphics.Rendering.Cairo
import Data.IORef
import Application.DiagramDrawer.Type
redrawCrossmarkAfterErase :: DrawingArea
-> Maybe (Double,Double)
-> (Double,Double)
-> IO ()
redrawCrossmarkAfterErase canvas cm (x,y) = do
win <- widgetGetDrawWindow canvas
renderWithDrawable win $ do
case cm of
Just (x0,y0) -> eraseCrossmark (x0,y0)
Nothing -> return ()
drawCrossmark (x,y)
eraseCrossmark :: (Double,Double) -> Render ()
eraseCrossmark (x,y) = do
rectangle (x-10) (y-10) 20 20
--fill
stroke
drawCrossmark :: (Double,Double) -> Render ()
drawCrossmark (x,y) = do
moveTo (x-10) y
lineTo (x+10) y
moveTo x (y-10)
lineTo x (y+10)
stroke
| wavewave/diagdrawer | lib/Application/DiagramDrawer/Draw.hs | bsd-2-clause | 846 | 0 | 14 | 228 | 320 | 167 | 153 | 27 | 2 |
-- Copyright © 2012 Frank S. Thomas <frank@timepit.eu>
-- All rights reserved.
--
-- Use of this source code is governed by a BSD-style license that
-- can be found in the LICENSE file.
module Web.Ohloh.Lens.NameL where
import Data.Lens.Common
class NameL a where
nameL :: Lens a String
| fthomas/ohloh-hs | Web/Ohloh/Lens/NameL.hs | bsd-3-clause | 293 | 0 | 7 | 53 | 38 | 24 | 14 | 4 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-do-bind #-}
module Main where
import Network.HTTP
import Network.Browser
import Network.URI
import Control.Monad.IO.Class (MonadIO(liftIO))
import Control.Monad
import Control.Lens
import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString.Lazy.Char8 as L8
import Data.Text (Text)
import qualified Data.Text as Text
import qualified Data.Text.Lazy as LText
import qualified Data.Text.Lazy.Encoding as LText
import qualified Data.Aeson as JS
import qualified Data.Aeson.Lens as JS
import Data.Typeable (Typeable)
import Control.Exception
import System.Environment (getArgs)
import System.IO (hFlush, stdout, stderr, hPutStrLn)
import System.Random (randomRIO)
import System.Exit (exitFailure)
import Control.Concurrent
import Dirs (TaskName(..), TaskGroup(..), rawFunName, funHash, showTaskGroup)
------------------------------------------------------------------------
-- Bot type implementation
------------------------------------------------------------------------
newtype Bot a = MkBot { unBot :: Settings -> BrowserAction (HandleStream L.ByteString) a }
data Settings = Settings
{ serverHostName :: String
, serverPort :: String
}
instance Functor Bot where
fmap = liftM
instance Applicative Bot where
(<*>) = ap
pure = return
instance Monad Bot where
return x = MkBot (\_ -> return x)
m >>= f = MkBot (\env -> unBot m env >>= \x -> unBot (f x) env)
fail e = liftIO (throwIO (BotFailure (MkBotFailure e)))
instance MonadIO Bot where
liftIO = MkBot . const . liftIO
runBot :: Bot a -> Settings -> IO (Either BotException a)
runBot m env = try (browse (configureBrowser >> unBot m env))
configureBrowser :: BrowserAction t ()
configureBrowser =
do setOutHandler $ \_ -> return ()
data HttpException = MkHttpException
{ httpConnError :: ResponseCode
, httpExceptionMethod :: Text
, httpExceptionArgs :: [(Text,Text)]
}
deriving (Show, Typeable)
data BotFailure = MkBotFailure
{ botFailureMessage :: String
}
deriving (Show, Typeable)
data BotException
= BotFailure BotFailure
| HttpException HttpException
deriving (Show, Typeable)
instance Exception BotException
------------------------------------------------------------------------
serverMethod :: Text -> [(Text,Text)] -> Bot L.ByteString
serverMethod method args = MkBot $ \env ->
do let uri = URI
{ uriScheme = "http:"
, uriAuthority = Just URIAuth
{ uriUserInfo = ""
, uriRegName = serverHostName env
, uriPort = ":" ++ serverPort env
}
, uriPath = "/" ++ Text.unpack method
, uriQuery = ""
, uriFragment = ""
}
body = L8.pack (urlEncodeVars [ (Text.unpack k, Text.unpack v) | (k,v) <- args])
req = Request
{ rqMethod = POST
, rqHeaders = [ Header HdrContentType "application/x-www-form-urlencoded"
, Header HdrContentLength (show (L8.length body))
]
, rqBody = body
, rqURI = uri
}
(_,resp) <- request req
unless (rspCode resp == (2,0,0)) $
liftIO $ throwIO $ HttpException $ MkHttpException
{ httpConnError = rspCode resp
, httpExceptionMethod = method
, httpExceptionArgs = args
}
return (rspBody resp)
serverMethodJson :: Text -> [(Text,Text)] -> Bot JS.Value
serverMethodJson method args =
do r <- serverMethod method args
case JS.decode r of
Nothing -> fail (Text.unpack method ++ ": Bad JSON, "
++ LText.unpack (LText.decodeUtf8 r))
Just v -> return v
prettyException :: BotException -> String
prettyException (HttpException ex) =
unlines $
[ show (httpConnError ex)
, "Method: " ++ Text.unpack (httpExceptionMethod ex)
, "Args:"
] ++
[ " " ++ Text.unpack key ++ ": " ++ Text.unpack val
| (key,val) <- httpExceptionArgs ex
]
prettyException (BotFailure ex) =
unlines
[ "Bot failed:"
, " " ++ botFailureMessage ex
]
------------------------------------------------------------------------
getSession :: Bot Text
getSession =
do v <- serverMethodJson "play/getSession" []
case preview (JS.key "sessionid" . JS._String) v of
Nothing -> fail "Unable to parse getSession JSON"
Just s -> return s
startTask :: Text -> TaskName -> Bot JS.Value
startTask s tn =
serverMethodJson "play/startTask"
[ ("sessionid", s)
, ("function" , funHash (taskFun tn))
, ("group" , showTaskGroup (taskGroup tn))
, ("name" , taskName tn)
]
addToHole :: Text -> Text -> Text -> Integer -> Bool -> Bot JS.Value
addToHole s tp ep inp asmp = do
serverMethodJson "play/addToHole"
[ ("sessionid", s)
, ("taskpath" , tp)
, ("exprpath" , ep)
, ("inputid" , Text.pack (show inp))
, ("inAsmp" , if asmp then "true" else "false")
]
viewRewrites :: Text -> Text -> Text -> Bot JS.Value
viewRewrites s tp ep =
serverMethodJson "play/viewRewrites"
[ ("sessionid", s)
, ("taskpath" , tp)
, ("exprpath" , ep)
]
rewriteInput :: Text -> Text -> Text -> Int -> Bot JS.Value
rewriteInput s tp ep choice =
serverMethodJson "play/rewriteInput"
[ ("sessionid", s)
, ("taskpath" , tp)
, ("exprpath" , ep)
, ("choice" , Text.pack (show choice))
]
grabInput :: Text -> Text -> Text -> Bot L.ByteString
grabInput s tp ep =
serverMethod "play/grabInput"
[ ("sessionid", s)
, ("taskpath" , tp)
, ("exprpath" , ep)
]
grabExpr :: Text -> Text -> Bot L.ByteString
grabExpr s e =
serverMethod "play/grabExpr"
[ ("sessionid", s)
, ("expr" , e)
]
updateGoal :: Text -> Integer -> Bot Bool
updateGoal s goalId =
do r <- serverMethodJson "play/updateGoal"
[ ("sessionid", s)
, ("goalid", Text.pack (show goalId))
]
v <- expect "update goal result" $
preview (JS.key "result" . JS._String) r
return ("proved" == v || "simple" == v)
abandonTask :: Text -> Bot ()
abandonTask s =
do serverMethod "play/abandonTask" [("sessionid", s)]
return ()
startNewTask :: TaskName -> Bot Text
startNewTask tn =
do s <- getSession
startTask s tn
return s
tutorialTask0 :: TaskName
tutorialTask0 = TaskName
{ taskFun = rawFunName "f4d200b01d00dc20c9d7b5be1277d241492f91a3"
, taskGroup = SafetyLevels
, taskName = "task_3g9"
}
isFromConclusion :: JS.Value -> Bool
isFromConclusion v =
has (JS.key "fromInputId" . JS._Null) v
expect :: Monad m => String -> Maybe a -> m a
expect str Nothing = fail str
expect _ (Just x) = return x
task0_solver :: Bot String
task0_solver =
do s <- getSession
task <- startTask s tutorialTask0
-- determine goal with conclusion
edge <- expect "first goal id" $
preview ( JS.key "graph" . JS.values . filtered isFromConclusion
. JS.key "to" . JS.values
) task
firstGoalId <- expect "goal id" $
preview (JS.key "goalId" . JS._Integer) edge
firstInputId <- expect "input id" $
preview (JS.key "inputId" . JS._Integer) edge
firstGoal <- expect "first goal" $
preview ( JS.key "goals" . JS.nth (fromIntegral firstGoalId)
. JS.key "goal" ) task
-- drag conclusion to first input
conclusion <- expect "conclusion" $
preview (JS.key "conc") firstGoal
concTaskPath <- expect "conc task path" $
preview (JS.key "taskPath") conclusion
concExprPath <- expect "conc expr path" $
preview (JS.key "expr" . JS.key "path") conclusion
a <- addToHole s (stringify concTaskPath)
(stringify concExprPath)
firstInputId True
-- Verify that dragging worked
r <- updateGoal s firstGoalId
unless r (fail "first update goal failed")
n <- liftIO (randomRIO (-100000,-1::Int))
grabExpr s (Text.pack (show n))
-- Do a rewrite
h1 <- expect "new input def" $
preview ( JS.key "holeExprs" . JS.nth (fromIntegral firstGoalId)
. JS.key "inst" ) a
h1tp <- expect "new input tp" $
preview ( JS.key "taskPath" ) h1
let h1ep = review JS._String "2"
rewrites <- viewRewrites s (stringify h1tp) (stringify h1ep)
let isReplace :: JS.Value -> Bool
isReplace x = Just "replace" == preview (JS.key "name") x
selection <- expect "safe sub" $
preview (JS.values . filtered isReplace . asIndex) rewrites
_ <- rewriteInput s (stringify h1tp) (stringify h1ep) selection
-- Check goal where it won't work
r1 <- updateGoal s firstGoalId
when r1 (fail "broken goal shouldn't have checked")
abandonTask s
liftIO (putChar '.' >> hFlush stdout)
return ""
stringify :: JS.Value -> Text
stringify = LText.toStrict . LText.decodeUtf8 . JS.encode
main :: IO ()
main =
do settings <- getSettings
replicateM_ 100 $
do runBot task0_solver settings
threadDelay 1000000
getSettings :: IO Settings
getSettings =
do args <- getArgs
case args of
[hostArg,portArg] -> return Settings { serverHostName = hostArg, serverPort = portArg }
_ -> do hPutStrLn stderr "Usage: Bot HOST PORT"
exitFailure
| GaloisInc/verification-game | web-prover/exes/Bot.hs | bsd-3-clause | 9,658 | 0 | 20 | 2,635 | 2,943 | 1,531 | 1,412 | 237 | 2 |
{-# LANGUAGE DeriveDataTypeable, RankNTypes, FlexibleInstances, FlexibleContexts,
KindSignatures, ScopedTypeVariables #-}
module Game.Sample.Sample where
-- Useful sampling functions used when playing games
import qualified Language.Hakaru.ImportanceSampler as IS
import Language.Hakaru.Metropolis hiding (sample)
import Language.Hakaru.Types -- Discrete
import Language.Hakaru.Distribution
import System.Random.Shuffle (shuffle')
import System.Random (newStdGen)
import Data.Typeable (Typeable)
import Game.Sample.Hakaru
fI = fromIntegral
-- Importance Sampler macros:
sample1 :: (Show a, Ord a) => IS.Measure a -> [Cond] -> IO a
sample1 fncn conds = do
s <- sampleN 1 fncn conds
return $ head s
sampleN :: (Show a, Ord a) => Int -> IS.Measure a -> [Cond] -> IO [a]
sampleN n fncn conds = do
t <- sample fncn conds
return $ take n $ map fst t
uncnd :: Typeable a => Dist a -> IS.Measure a
uncnd = IS.unconditioned
cnd :: Typeable a => Dist a -> IS.Measure a
cnd = IS.conditioned
-- Get a uniform int on a mn-closed mx-open interval [mn,mx)
uniformInt :: Int -> Int -> IS.Measure Int
uniformInt mn' mx' = do
let (mn,mx) = (fI mn', fI mx') :: (Double,Double)
dbl <- (uncnd $ uniform mn mx) :: IS.Measure Double
if ((==) dbl mx) then return $ truncate mx
else return $ floor dbl
-- Shuffling using hakaru:
--shuffleC' [] d2 = return d2
--shuffleC' d1 d2 = do
-- idx <- uniformInt 0 (length d1)
-- return $ (d1 !! idx) : (removeNth idx d1)
--shuffleCards' d = sample1 (shuffleC' d []) []
-- Shuffling using System.Random:
shuffleList d = do
g <- newStdGen -- TODO: keep random gen in game data type
return $ shuffle' d (length d) g
| cronburg/deckbuild | Game/Sample/Sample.hs | bsd-3-clause | 1,701 | 0 | 11 | 339 | 481 | 256 | 225 | 33 | 2 |
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE UndecidableInstances #-} -- For `Show` instance, it's OK.
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE TupleSections #-}
{- |
Module : Verifier.SAW.Simulator.Value
Copyright : Galois, Inc. 2012-2015
License : BSD3
Maintainer : jhendrix@galois.com
Stability : experimental
Portability : non-portable (language extensions)
-}
module Verifier.SAW.Simulator.Value
( module Verifier.SAW.Simulator.Value
, module Verifier.SAW.Simulator.MonadLazy
) where
import Prelude hiding (mapM)
import Control.Monad (foldM, mapM)
import Data.Kind (Type)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Text (Text)
import qualified Data.Text as Text
import Data.Vector (Vector)
import qualified Data.Vector as V
import Numeric.Natural
import GHC.Stack
import Verifier.SAW.FiniteValue (FiniteType(..), FirstOrderType(..))
import Verifier.SAW.SharedTerm
import Verifier.SAW.TypedAST
import Verifier.SAW.Utils (panic)
import Verifier.SAW.Term.Pretty
import Verifier.SAW.Simulator.MonadLazy
------------------------------------------------------------
-- Values and Thunks
{- | The type of values.
Values are parameterized by the /name/ of an instantiation.
The concrete parameters to use are computed from the name using
a collection of type families (e.g., 'EvalM', 'VBool', etc.). -}
data Value l
= VFun !LocalName !(Thunk l -> MValue l)
| VUnit
| VPair (Thunk l) (Thunk l) -- TODO: should second component be strict?
| VCtorApp !(PrimName (TValue l)) ![Thunk l] ![Thunk l]
| VVector !(Vector (Thunk l))
| VBool (VBool l)
| VWord (VWord l)
| VBVToNat !Int (Value l) -- TODO: don't use @Int@ for this, use @Natural@
| VIntToNat (Value l)
| VNat !Natural
| VInt (VInt l)
| VIntMod !Natural (VInt l)
| VArray (VArray l)
| VString !Text
| VRecordValue ![(FieldName, Thunk l)]
| VRecursor
!(PrimName (TValue l)) -- data type ident
![Value l] -- data type parameters
!(Value l) -- motive function
!(TValue l) -- type of motive
!(Map VarIndex (Thunk l, TValue l)) -- constructor eliminators and their types
| VExtra (Extra l)
| TValue (TValue l)
-- | The subset of values that represent types.
data TValue l
= VVecType !Natural !(TValue l)
| VBoolType
| VIntType
| VIntModType !Natural
| VArrayType !(TValue l) !(TValue l)
| VPiType LocalName !(TValue l) !(PiBody l)
| VStringType
| VUnitType
| VPairType !(TValue l) !(TValue l)
| VDataType !(PrimName (TValue l)) ![Value l] ![Value l]
| VRecordType ![(FieldName, TValue l)]
| VSort !Sort
| VRecursorType
!(PrimName (TValue l)) -- data type name
![Value l] -- data type parameters
!(Value l) -- motive function
!(TValue l) -- type of motive function
| VTyTerm !Sort !Term
data PiBody l
= VDependentPi !(Thunk l -> EvalM l (TValue l))
| VNondependentPi !(TValue l)
-- | Neutral terms represent computations that are blocked
-- because some internal term cannot be evaluated
-- (e.g., because it is a variable, because it's definition
-- is being hidden, etc.)
data NeutralTerm
= NeutralBox Term -- the thing blocking evaluation
| NeutralPairLeft NeutralTerm -- left pair projection
| NeutralPairRight NeutralTerm -- right pair projection
| NeutralRecordProj NeutralTerm FieldName -- record projection
| NeutralApp NeutralTerm Term -- function application
| NeutralRecursor
NeutralTerm -- recursor value
[Term] -- indices for the inductive type
Term -- argument being eliminated
| NeutralRecursorArg -- recursor application
Term -- recursor value
[Term] -- indices for the inductive type
NeutralTerm -- argument being elminated
| NeutralConstant -- A constant value with no definition
(ExtCns Term)
type Thunk l = Lazy (EvalM l) (Value l)
-- | Evaluation monad for value instantiation 'l'
type family EvalM l :: Type -> Type
-- | Booleans for value instantiation 'l'
type family VBool l :: Type
-- | Words for value instantiation 'l'
type family VWord l :: Type
-- | Integers for value instantiation 'l'
type family VInt l :: Type
-- | SMT arrays for value instantiation 'l'
type family VArray l :: Type
-- | Additional constructors for instantiation 'l'
type family Extra l :: Type
-- | Short-hand for a monadic value.
type MValue l = EvalM l (Value l)
-- | Short-hand for a monadic boolean.
type MBool l = EvalM l (VBool l)
-- | Short-hand for a monadic word.
type MWord l = EvalM l (VWord l)
-- | Short-hand for a monadic integer.
type MInt l = EvalM l (VInt l)
-- | Short-hand for a monadic array.
type MArray l = EvalM l (VArray l)
-- | Short hand to specify that the evaluation monad is a monad (very common)
type VMonad l = Monad (EvalM l)
-- | Short hand to specify that the evaluation monad is a lazy monad.
type VMonadLazy l = MonadLazy (EvalM l)
-- | Language instantiations with a specific monad.
data WithM (m :: Type -> Type) l
type instance EvalM (WithM m l) = m
type instance VBool (WithM m l) = VBool l
type instance VWord (WithM m l) = VWord l
type instance VInt (WithM m l) = VInt l
type instance VArray (WithM m l) = VArray l
type instance Extra (WithM m l) = Extra l
--------------------------------------------------------------------------------
instance Show (Extra l) => Show (Value l) where
showsPrec p v =
case v of
VFun {} -> showString "<<fun>>"
VUnit -> showString "()"
VPair{} -> showString "<<tuple>>"
VCtorApp s _ps _xv -> shows (primName s)
VVector xv -> showList (toList xv)
VBool _ -> showString "<<boolean>>"
VWord _ -> showString "<<bitvector>>"
VBVToNat n x -> showString "bvToNat " . shows n . showString " " . showParen True (shows x)
VIntToNat x -> showString "intToNat " . showParen True (shows x)
VNat n -> shows n
VInt _ -> showString "<<integer>>"
VIntMod n _ -> showString ("<<Z " ++ show n ++ ">>")
VArray{} -> showString "<<array>>"
VString s -> shows s
VRecordValue [] -> showString "{}"
VRecordValue ((fld,_):_) ->
showString "{" . showString (Text.unpack fld) . showString " = _, ...}"
VRecursor d _ _ _ _
-> showString "<<recursor: " . shows d . showString ">>"
VExtra x -> showsPrec p x
TValue x -> showsPrec p x
where
toList = map (const Nil) . V.toList
instance Show (Extra l) => Show (TValue l) where
showsPrec p v =
case v of
VBoolType -> showString "Bool"
VStringType -> showString "String"
VIntType -> showString "Integer"
VIntModType n -> showParen True (showString "IntMod " . shows n)
VArrayType{} -> showString "Array"
VPiType _ t _ -> showParen True
(shows t . showString " -> ...")
VUnitType -> showString "#()"
VPairType x y -> showParen True (shows x . showString " * " . shows y)
VDataType s ps vs
| null (ps++vs) -> shows s
| otherwise -> shows s . showList (ps++vs)
VRecordType [] -> showString "{}"
VRecordType ((fld,_):_) ->
showString "{" . showString (Text.unpack fld) . showString " :: _, ...}"
VVecType n a -> showString "Vec " . shows n
. showString " " . showParen True (showsPrec p a)
VSort s -> shows s
VRecursorType{} -> showString "RecursorType"
VTyTerm _ tm -> showString "TyTerm (" . (\x -> showTerm tm ++ x) . showString ")"
data Nil = Nil
instance Show Nil where
show Nil = "_"
------------------------------------------------------------
-- Basic operations on values
vTuple :: VMonad l => [Thunk l] -> Value l
vTuple [] = VUnit
vTuple [_] = error "vTuple: unsupported 1-tuple"
vTuple [x, y] = VPair x y
vTuple (x : xs) = VPair x (ready (vTuple xs))
vTupleType :: VMonad l => [TValue l] -> TValue l
vTupleType [] = VUnitType
vTupleType [t] = t
vTupleType (t : ts) = VPairType t (vTupleType ts)
valPairLeft :: (HasCallStack, VMonad l, Show (Extra l)) => Value l -> MValue l
valPairLeft (VPair t1 _) = force t1
valPairLeft v = panic "Verifier.SAW.Simulator.Value.valPairLeft" ["Not a pair value:", show v]
valPairRight :: (HasCallStack, VMonad l, Show (Extra l)) => Value l -> MValue l
valPairRight (VPair _ t2) = force t2
valPairRight v = panic "Verifier.SAW.Simulator.Value.valPairRight" ["Not a pair value:", show v]
vRecord :: Map FieldName (Thunk l) -> Value l
vRecord m = VRecordValue (Map.assocs m)
valRecordProj :: (HasCallStack, VMonad l, Show (Extra l)) => Value l -> FieldName -> MValue l
valRecordProj (VRecordValue fld_map) fld
| Just t <- lookup fld fld_map = force t
valRecordProj v@(VRecordValue _) fld =
panic "Verifier.SAW.Simulator.Value.valRecordProj"
["Record field not found:", show fld, "in value:", show v]
valRecordProj v _ =
panic "Verifier.SAW.Simulator.Value.valRecordProj"
["Not a record value:", show v]
apply :: (HasCallStack, VMonad l, Show (Extra l)) => Value l -> Thunk l -> MValue l
apply (VFun _ f) x = f x
apply (TValue (VPiType _ _ body)) x = TValue <$> applyPiBody body x
apply v _x = panic "Verifier.SAW.Simulator.Value.apply" ["Not a function value:", show v]
applyAll :: (VMonad l, Show (Extra l)) => Value l -> [Thunk l] -> MValue l
applyAll = foldM apply
{-# INLINE applyPiBody #-}
applyPiBody :: VMonad l => PiBody l -> Thunk l -> EvalM l (TValue l)
applyPiBody (VDependentPi f) x = f x
applyPiBody (VNondependentPi t) _ = pure t
asFiniteTypeValue :: Value l -> Maybe FiniteType
asFiniteTypeValue v =
case v of
TValue tv -> asFiniteTypeTValue tv
_ -> Nothing
asFiniteTypeTValue :: TValue l -> Maybe FiniteType
asFiniteTypeTValue v =
case v of
VBoolType -> return FTBit
VVecType n v1 -> do
t1 <- asFiniteTypeTValue v1
return (FTVec n t1)
VUnitType -> return (FTTuple [])
VPairType v1 v2 -> do
t1 <- asFiniteTypeTValue v1
t2 <- asFiniteTypeTValue v2
case t2 of
FTTuple ts -> return (FTTuple (t1 : ts))
_ -> return (FTTuple [t1, t2])
VRecordType elem_tps ->
FTRec <$> Map.fromList <$>
mapM (\(fld,tp) -> (fld,) <$> asFiniteTypeTValue tp) elem_tps
_ -> Nothing
asFirstOrderTypeValue :: Value l -> Maybe FirstOrderType
asFirstOrderTypeValue v =
case v of
TValue tv -> asFirstOrderTypeTValue tv
_ -> Nothing
asFirstOrderTypeTValue :: TValue l -> Maybe FirstOrderType
asFirstOrderTypeTValue v =
case v of
VBoolType -> return FOTBit
VVecType n v1 -> FOTVec n <$> asFirstOrderTypeTValue v1
VIntType -> return FOTInt
VIntModType m -> return (FOTIntMod m)
VArrayType a b ->
FOTArray <$> asFirstOrderTypeTValue a <*> asFirstOrderTypeTValue b
VUnitType -> return (FOTTuple [])
VPairType v1 v2 -> do
t1 <- asFirstOrderTypeTValue v1
t2 <- asFirstOrderTypeTValue v2
case t2 of
FOTTuple ts -> return (FOTTuple (t1 : ts))
_ -> return (FOTTuple [t1, t2])
VRecordType elem_tps ->
FOTRec . Map.fromList <$>
mapM (traverse asFirstOrderTypeTValue) elem_tps
VStringType -> Nothing
VPiType{} -> Nothing
VDataType{} -> Nothing
VSort{} -> Nothing
VRecursorType{} -> Nothing
VTyTerm{} -> Nothing
-- | A (partial) injective mapping from type values to strings. These
-- are intended to be useful as suffixes for names of type instances
-- of uninterpreted constants.
suffixTValue :: TValue sym -> Maybe String
suffixTValue tv =
case tv of
VVecType n a ->
do a' <- suffixTValue a
Just ("_Vec_" ++ show n ++ a')
VBoolType -> Just "_Bool"
VIntType -> Just "_Int"
VIntModType n -> Just ("_IntMod_" ++ show n)
VArrayType a b ->
do a' <- suffixTValue a
b' <- suffixTValue b
Just ("_Array" ++ a' ++ b')
VPiType _ _ _ -> Nothing
VUnitType -> Just "_Unit"
VPairType a b ->
do a' <- suffixTValue a
b' <- suffixTValue b
Just ("_Pair" ++ a' ++ b')
VStringType -> Nothing
VDataType {} -> Nothing
VRecordType {} -> Nothing
VSort {} -> Nothing
VRecursorType{} -> Nothing
VTyTerm{} -> Nothing
neutralToTerm :: NeutralTerm -> Term
neutralToTerm = loop
where
loop (NeutralBox tm) = tm
loop (NeutralPairLeft nt) =
Unshared (FTermF (PairLeft (loop nt)))
loop (NeutralPairRight nt) =
Unshared (FTermF (PairRight (loop nt)))
loop (NeutralRecordProj nt f) =
Unshared (FTermF (RecordProj (loop nt) f))
loop (NeutralApp nt arg) =
Unshared (App (loop nt) arg)
loop (NeutralRecursorArg r ixs x) =
Unshared (FTermF (RecursorApp r ixs (loop x)))
loop (NeutralRecursor r ixs x) =
Unshared (FTermF (RecursorApp (loop r) ixs x))
loop (NeutralConstant ec) =
Unshared (Constant ec Nothing)
neutralToSharedTerm :: SharedContext -> NeutralTerm -> IO Term
neutralToSharedTerm sc = loop
where
loop (NeutralBox tm) = pure tm
loop (NeutralPairLeft nt) =
scFlatTermF sc . PairLeft =<< loop nt
loop (NeutralPairRight nt) =
scFlatTermF sc . PairRight =<< loop nt
loop (NeutralRecordProj nt f) =
do tm <- loop nt
scFlatTermF sc (RecordProj tm f)
loop (NeutralApp nt arg) =
do tm <- loop nt
scApply sc tm arg
loop (NeutralRecursor nt ixs x) =
do tm <- loop nt
scFlatTermF sc (RecursorApp tm ixs x)
loop (NeutralRecursorArg r ixs nt) =
do tm <- loop nt
scFlatTermF sc (RecursorApp r ixs tm)
loop (NeutralConstant ec) =
do scTermF sc (Constant ec Nothing)
ppNeutral :: PPOpts -> NeutralTerm -> SawDoc
ppNeutral opts = ppTerm opts . neutralToTerm
instance Show NeutralTerm where
show = renderSawDoc defaultPPOpts . ppNeutral defaultPPOpts
| GaloisInc/saw-script | saw-core/src/Verifier/SAW/Simulator/Value.hs | bsd-3-clause | 13,963 | 0 | 17 | 3,328 | 4,396 | 2,199 | 2,197 | -1 | -1 |
--
-- Test.hs
--
-- A top level module for loading test cases.
--
-- Gregory Wright, 18 June 2011
--
module Main where
import Data.Maybe
import Ratio
import Basic
import Canonicalize
import Commutativity
import Expr
import Expression
import Numeric
import Symbol
import IO
import Tensor
import TensorBasics
import TensorUtilities
a = Symbol $ simpleSymbol "a"
b = Symbol $ simpleSymbol "b"
c = Symbol $ simpleSymbol "c"
e = Symbol $ simpleSymbol "e"
l = Symbol $ ncSymbol "l" "foo"
m = Symbol $ ncSymbol "m" "foo"
n = Symbol $ ncSymbol "n" "foo"
x = Symbol $ ncSymbol "x" "pauli"
y = Symbol $ ncSymbol "y" "pauli"
z = Symbol $ ncSymbol "z" "pauli"
(s, unnamedMetric) = mkManifold "s" 4 minkowski
mu = mkIndex s "mu"
nu = mkIndex_ s "nu" "\\nu"
rho = mkIndex s "delta"
sigma = mkIndex s "sigma"
g = (fromJust unnamedMetric) "g" "g"
g' = g mu (-nu) * g (-sigma) rho * d (-rho) sigma
g'' = g mu (-nu) * g (-rho) sigma * d (-mu) sigma
g''' = g mu (-nu) * g (-mu) sigma * d (-mu) rho
d = mkKroneckerDelta s "delta"
foo :: Expr -> Expr
foo (Const (I n)) = if n == 1 then Const 0 else Const 2
foo _ = Const 3
fct :: Expr -> Expr
fct (Const (I 1)) = Const 1
fct i@(Const (I n)) = if n > 1 then i * fct (i - 1) else Const 1
hermite :: Integer -> Expr -> Expr
hermite n x =
let
herm 0 _ = 1 :: Expr
herm 1 x = 2 * x
herm n x = 2 * x * herm (n - 1) x - 2 * (n - 1) * herm (n - 2) x
in
expand $ herm (fromInteger n) x
main = do
print $ hermite 10 (y+x)
| gwright83/Wheeler | src/Math/Symbolic/Wheeler/Test.hs | bsd-3-clause | 1,520 | 0 | 15 | 395 | 698 | 363 | 335 | 49 | 3 |
module HaskellGame.Rendering where
import Prelude (
Num(..), Show(..), Integral(..), Ord(..),
IO(..), Integer(), String()
)
import qualified System.Console.ANSI as Console
import qualified Data.List as List
import Control.Concurrent (threadDelay)
import Control.Monad (mapM_)
import Data.List ((++), (!!), length, concat)
import Data.Char (toUpper)
import System.IO (hPutStr, hFlush, Handle())
import HaskellGame.Datatypes
import HaskellGame.Graphics
import HaskellGame.Utils
import HaskellGame.Battle (health, level) -- we need this for printing the status bar
{- Print a message at a screen location -}
printAt theScreen (x, y) (colour, text) = do
Console.hSetCursorPosition theScreen y x
Console.hSetSGR theScreen [Console.SetColor Console.Foreground Console.Vivid colour]
hPutStr theScreen text
Console.hSetSGR theScreen [Console.Reset]
{- Rendering the game world to the console -}
renderMap :: Handle -> Map -> IO ()
renderMap theScreen theMap = do
let xcoords = [0..((width theMap)-1)]
let ycoords = [0..((height theMap)-1)]
let allCoords = [ (x, y) | x <- xcoords, y <- ycoords ]
mapM_ (drawItem theMap theScreen) allCoords
where
drawItem theMap theScreen (x,y) = do
let item = (((tiles theMap) !! y) !! x)
printAt theScreen (x, y) (Console.White, (show item))
{- To render a thing, we need to know how to print it (Show typeclass),
and *where* to print it (Located typeclass) -}
render :: (Graphic a, Located a) => Handle -> a -> IO ()
render theScreen obj = do
let (x, y) = position obj
Console.hSetSGR theScreen [Console.SetConsoleIntensity Console.BoldIntensity]
printAt theScreen (x, y) (Console.White, [symbol obj])
renderScene :: Handle -> Scene -> IO ()
renderScene theScreen theScene = do
renderMap theScreen (map (currentLevel theScene))
mapM_ (render theScreen) (objects (currentLevel theScene))
mapM_ (render theScreen) (monsters (currentLevel theScene))
render theScreen (player theScene)
{- Print the player's status display -}
showStatus theScreen (x, y) p = do
Console.hSetSGR theScreen [Console.SetConsoleIntensity Console.BoldIntensity]
let displayMessage = "HP: " ++ (show (hitpoints p)) ++ " " ++
"XP: " ++ (show (experience p)) ++ " " ++
"LVL: " ++ (show (level p)) ++ " " ++
(concat (List.map show (stats p)))
printAt theScreen (x, y) (Console.Green, displayMessage)
{- We need to be able to print the game time elapsed nicely -}
showTimeElapsed theScreen (x, y) estate =
let totalSeconds = (frameNumber estate) `div` (frameRate estate)
minutes = totalSeconds `div` 60
seconds = totalSeconds `mod` 60
message = (pad 2 '0' (show minutes)) ++ ":" ++ (pad 2 '0' (show seconds))
in do Console.hSetSGR theScreen [Console.SetConsoleIntensity Console.BoldIntensity]
printAt theScreen (x, y) (Console.Yellow, message)
where
pad desiredLength padChar someStr =
if (length someStr) < desiredLength then
pad (desiredLength - 1) padChar (padChar:someStr)
else someStr
{- We want a legend printed with the game controls in it -}
showControls theScreen (x, y) = do
printAt theScreen (x, y) (Console.Magenta, "Controls: [i,j,k,l] = move, a = attack, p = pick up, d = drop")
| KevinCardiff/haskell-game2 | src/HaskellGame/Rendering.hs | bsd-3-clause | 3,338 | 0 | 22 | 696 | 1,120 | 599 | 521 | 60 | 2 |
--{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE ExplicitForAll #-}
--{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
--{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
--{-# LANGUAGE PartialTypeSignatures #-}
{-# LANGUAGE RankNTypes #-}
--{-# LANGUAGE RebindableSyntax #-}
--{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverloadedLists #-}
--{-# LANGUAGE NamedFieldPuns #-}
module FV.Jacob
( expand
, hv2q
) where
import Prelude.Extended
--import Math ( sqrt, atan2, cos, sin, atan, pi )
import Data.Cov (Jacs (..), Vec3, Vec5, fromArray, val )
import qualified Data.Cov.Jac ( Jac (..) )
import qualified Data.Cov.Vec ( Vec (..) )
-- | calculate q 3-vector for a given helix parameterization near vertex position
hv2q :: Vec5 -> Vec3 -> Vec3
hv2q Data.Cov.Vec.Vec {Data.Cov.Vec.v=h_} Data.Cov.Vec.Vec {Data.Cov.Vec.v=v_} = q where
xx = uidx v_ 0
yy = uidx v_ 1
r = sqrt $ xx*xx + yy*yy
phi = atan2 yy xx
w0 = uidx h_ 0
tl0 = uidx h_ 1
psi0 = uidx h_ 2
-- d0 = uidx h_ 3
-- z0 = uidx h_ 4
xi = mod' (psi0 - phi + 2.0*pi) (2.0*pi)
cxi = cos xi
sxi = sin xi
q = fromArray $
if w0 /= 0.0
then [ w0, tl0, psi0 + gamma ]
else [ w0, tl0, psi0 ]
where
oow0 = 1.0/w0
gamma = atan r*cxi/(oow0-r*sxi)
expand :: Vec3 -> Vec3 -> Jacs
expand v q = Jacs {aajacs= aa, bbjacs= bb, h0jacs= h0} where
v_ = val v
xx = uidx v_ 0
yy = uidx v_ 1
z = uidx v_ 2
r = sqrt $ xx*xx + yy*yy
phi = atan2 yy xx
q_ = val q
w = uidx q_ 0
tl = uidx q_ 1
psi = uidx q_ 2
-- some more derived quantities
xi = mod' (psi - phi + 2.0*pi) (2.0*pi)
cxi = cos xi
sxi = sin xi
oow = 1.0 / w
rw = r * w
gamma = atan $ r*cxi/(oow - r*sxi)
sg = sin gamma
cg = cos gamma
-- calculate transformed quantities
psi0 = psi - gamma
d0 = oow - (oow - r*sxi)/cg
z0 = z - tl*gamma/w
-- calc Jacobian
drdx = if r /= 0.0 then xx/r else 0.0
drdy = if r /= 0.0 then yy/r else 0.0
rdxidx = if r /= 0.0 then yy/r else 0.0
rdxidy = if r /= 0.0 then -xx/r else 0.0
dgdvar0 = 1.0/(1.0 + rw*rw - 2.0*rw*sxi)
dgdx = dgdvar0*(w*cxi*drdx + w*(rw - sxi)*rdxidx)
dgdy = dgdvar0*(w*cxi*drdy + w*(rw - sxi)*rdxidy)
dgdw = dgdvar0*r*cxi
dgdpsi = dgdvar0*rw*(rw - sxi)
-- fill matrix:
-- d w / d r, d phi, d z
a11 = 0.0
a12 = 0.0
a13 = 0.0
-- d tl / d x, d y, d z
a21 = 0.0
a22 = 0.0
a23 = 0.0
-- d psi0 / d x, d y, d z
a31 = -dgdx
a32 = -dgdy
a33 = 0.0
-- d d0 / d x, d y, d z
a41 = cxi*rdxidx/cg + sxi*drdx/cg
- (oow - r*sxi)*sg*dgdx/cg/cg
a42 = cxi*rdxidy/cg + sxi*drdy/cg
- (oow - r*sxi)*sg*dgdy/cg/cg
a43 = 0.0
-- d z0 / d x, d y, d z
a51 = -tl/w*dgdx
a52 = -tl/w*dgdy
a53 = 1.0
-- B
-- d w / d w, d tl, d psi
b11 = 1.0
b12 = 0.0
b13 = 0.0
-- d tl / d w, d tl, d psi
b21 = 0.0
b22 = 1.0
b23 = 0.0
-- d psi0 / d w, d tl, d psi
b31 = -dgdw
b32 = 0.0
b33 = 1.0 - dgdpsi
-- d d0 / d w, d tl, d psi
b41 = -oow*oow*(1.0 - 1.0/cg)
- (oow - r*sxi)*sg*dgdw/cg/cg
b42 = 0.0
b43 = r*cxi/cg - (oow - r*sxi)*sg*dgdpsi/cg/cg
-- d z0 / d w, d tl, d psi
b51 = -tl/w*(dgdw - gamma/w)
b52 = -gamma/w
b53 = -tl/w*dgdpsi
v01 = xx
v02 = yy
v03 = z
q01 = w
q02 = tl
q03 = psi
h0 = fromArray [
0.0,
0.0,
psi0 - a31*v01 - a32*v02 - b31*q01 - b33*q03,
d0 - a41*v01 - a42*v02 - b41*q01 - b43*q03,
z0 - a51*v01 - a52*v02 - a53*v03 - b51*q01 - b52*q02 - b53*q03]
aa = Data.Cov.Jac.Jac { Data.Cov.Jac.v= [a11,a12,a13,a21,a22,a23,a31,a32,a33,a41,a42,a43,a51,a52,a53], Data.Cov.Jac.nr= 5}
bb = Data.Cov.Jac.Jac { Data.Cov.Jac.v= [b11,b12,b13,b21,b22,b23,b31,b32,b33,b41,b42,b43,b51,b52,b53], Data.Cov.Jac.nr= 5}
{-- aaT = tr aa `debug` ( "v0 --->> " <> (show v) <> --}
{-- "q0 --->> " <> (show q) <> --}
{-- "aa --->> " <> show aa <> --}
{-- "bb --->> " <> show bb <> --}
{-- "h0 --->> " <> (show h0) --}
{-- ) --}
| LATBauerdick/fv.hs | src/FV/Jacob.hs | bsd-3-clause | 4,857 | 0 | 20 | 1,957 | 1,651 | 939 | 712 | 109 | 5 |
module Main where
import Program.Programs
import Syntax
import qualified Transformer.ConsPD
runConsPD = Transformer.ConsPD.runConsPD
runDoubleAppendo =
runConsPD (-1) "doubleAppendo" prog
where
prog = Program doubleAppendo $ fresh ["x", "y", "z", "r"] (call "doubleAppendo" [V "x", V "y", V "z", V "r"])
main :: IO ()
main =
runDoubleAppendo
| kajigor/uKanren_transformations | app/bench/Main.hs | bsd-3-clause | 378 | 0 | 12 | 83 | 121 | 67 | 54 | 11 | 1 |
-- A Lighted Wreath
-- ================
--
-- This program draws the wreath with all its default values.
{-# LANGUAGE NoMonomorphismRestriction #-}
import Wreath
import Diagrams.Prelude
import Diagrams.Backend.SVG.CmdLine
-- Run the program with `dist/build/simulation/simulation -o simulation.svg -w 400`
-- where `-o` sets the output filename, and `-w` sets the diagram width.
main = defaultMain (litWreath # bg black)
| bobgru/wreath | examples/simulation.hs | bsd-3-clause | 426 | 0 | 8 | 63 | 41 | 26 | 15 | 5 | 1 |
{-# LANGUAGE DeriveDataTypeable, ForeignFunctionInterface, RecordWildCards #-}
-- |
-- Module: Database.MySQL.Base
-- Copyright: (c) 2011 MailRank, Inc.
-- License: BSD3
-- Maintainer: Bryan O'Sullivan <bos@serpentine.com>
-- Stability: experimental
-- Portability: portable
--
-- A low-level client library for the MySQL database, implemented as
-- bindings to the C @mysqlclient@ API.
module Database.MySQL.Base
(
-- * Licensing
-- $license
-- * Resource management
-- $mgmt
-- * Types
ConnectInfo(..)
, SSLInfo(..)
, Seconds
, Protocol(..)
, Option(..)
, defaultConnectInfo
, defaultSSLInfo
, Connection
, Result
, Type(..)
, Row
, MySQLError(errFunction, errNumber, errMessage)
-- * Connection management
, connect
, close
, autocommit
, ping
, changeUser
, selectDB
, setCharacterSet
-- ** Connection information
, threadId
, serverInfo
, hostInfo
, protocolInfo
, characterSet
, sslCipher
, serverStatus
-- * Querying
, query
, insertID
-- ** Escaping
, escape
-- ** Results
, fieldCount
, affectedRows
-- * Working with results
, isResultValid
, freeResult
, storeResult
, useResult
, fetchRow
, fetchFields
, dataSeek
, rowSeek
, rowTell
-- ** Multiple results
, nextResult
-- * Transactions
, commit
, rollback
-- * General information
, clientInfo
, clientVersion
) where
import Control.Applicative ((<$>), (<*>))
import Control.Exception (Exception, throw)
import Control.Monad (forM_, unless, when)
import Data.ByteString.Char8 ()
import Data.ByteString.Internal (ByteString, create, createAndTrim, memcpy)
import Data.ByteString.Unsafe (unsafeUseAsCStringLen)
import Data.IORef (IORef, atomicModifyIORef, newIORef, readIORef, writeIORef)
import Data.Int (Int64)
import Data.List (foldl')
import Data.Typeable (Typeable)
import Data.Word (Word, Word16, Word64)
import Database.MySQL.Base.C
import Database.MySQL.Base.Types
import Foreign.C.String (CString, peekCString, withCString)
import Foreign.C.Types (CULong)
import Foreign.Concurrent (newForeignPtr)
import Foreign.ForeignPtr hiding (newForeignPtr)
import Foreign.Marshal.Array (peekArray)
import Foreign.Ptr (Ptr, castPtr, nullPtr)
import System.IO.Unsafe (unsafePerformIO)
import System.Mem.Weak (Weak, deRefWeak, mkWeakPtr)
-- $license
--
-- /Important licensing note/: This library is BSD-licensed under the
-- terms of the MySQL FOSS License Exception
-- <http://www.mysql.com/about/legal/licensing/foss-exception/>.
--
-- Since this library links against the GPL-licensed @mysqlclient@
-- library, a non-open-source application that uses it /may/ be
-- subject to the terms of the GPL.
-- $mgmt
--
-- Our rules for managing 'Connection' and 'Result' values are
-- unfortunately complicated, thanks to MySQL's lifetime rules.
--
-- At the C @libmysqlclient@ level, a single @MYSQL@ connection may
-- cause multiple @MYSQL_RES@ result values to be created over the
-- course of multiple queries, but only one of these @MYSQL_RES@
-- values may be alive at a time. The programmer is responsible for
-- knowing when to call @mysql_free_result@.
--
-- Meanwhile, up in Haskell-land, we'd like both 'Connection' and
-- 'Result' values to be managed either manually or automatically. In
-- particular, we want finalizers to tidy up after a messy programmer,
-- and we'd prefer it if people didn't need to be mindful of calling
-- @mysql_free_result@. This means that we must wrestle with the
-- lifetime rules. An obvious approach would be to use some monad and
-- type magic to enforce those rules, but then we'd end up with an
-- awkward API.
--
-- Instead, we allow 'Result' values to stay alive for arbitrarily
-- long times, while preserving the right to mark them as
-- invalid. When a @Result@ is marked invalid, its associated
-- @MYSQL_RES@ is freed, and can no longer be used.
--
-- Since all functions over @Result@ values are in the 'IO' monad, we
-- don't risk disrupting pure code by introducing this notion of
-- invalidity. If code tries to use an invalid @Result@, a
-- 'MySQLError' will be thrown. This should /not/ occur in normal
-- code, so there should be no need to use 'isResultValid' to test a
-- @Result@ for validity.
--
-- Each of the following functions will invalidate a 'Result':
--
-- * 'close'
--
-- * 'freeResult'
--
-- * 'nextResult'
--
-- * 'storeResult'
--
-- * 'useResult'
--
-- A 'Result' must be able to keep a 'Connection' alive so that a
-- streaming @Result@ constructed by 'useResult' can continue to pull
-- data from the server, but a @Connection@ must (a) be able to cause
-- the @MYSQL_RES@ behind a @Result@ to be deleted at a moment's notice,
-- while (b) not artificially prolonging the life of either the @Result@
-- or its @MYSQL_RES@.
data ConnectInfo = ConnectInfo {
connectHost :: String
, connectPort :: Word16
, connectUser :: String
, connectPassword :: String
, connectDatabase :: String
, connectOptions :: [Option]
, connectPath :: FilePath
, connectSSL :: Maybe SSLInfo
} deriving (Eq, Read, Show, Typeable)
data SSLInfo = SSLInfo {
sslKey :: FilePath
, sslCert :: FilePath
, sslCA :: FilePath
, sslCAPath :: FilePath
, sslCiphers :: String -- ^ Comma-separated list of cipher names.
} deriving (Eq, Read, Show, Typeable)
data MySQLError = ConnectionError {
errFunction :: String
, errNumber :: Int
, errMessage :: String
} | ResultError {
errFunction :: String
, errNumber :: Int
, errMessage :: String
} deriving (Eq, Show, Typeable)
instance Exception MySQLError
-- | Connection to a MySQL database.
data Connection = Connection {
connFP :: ForeignPtr MYSQL
, connClose :: IO ()
, connResult :: IORef (Maybe (Weak Result))
}
-- | Result of a database query.
data Result = Result {
resFP :: ForeignPtr MYSQL_RES
, resFields :: {-# UNPACK #-} !Int
, resConnection :: Connection
, resValid :: IORef Bool
, resFetchFields :: Ptr MYSQL_RES -> IO (Ptr Field)
, resFetchRow :: Ptr MYSQL_RES -> IO MYSQL_ROW
, resFetchLengths :: Ptr MYSQL_RES -> IO (Ptr CULong)
, resFreeResult :: Ptr MYSQL_RES -> IO ()
} | EmptyResult
-- | A row cursor, used by 'rowSeek' and 'rowTell'.
newtype Row = Row MYSQL_ROW_OFFSET
-- | Default information for setting up a connection.
--
-- Defaults are as follows:
--
-- * Server on @localhost@
--
-- * User @root@
--
-- * No password
--
-- * Database @test@
--
-- * Character set @utf8@
--
-- Use as in the following example:
--
-- > connect defaultConnectInfo { connectHost = "db.example.com" }
defaultConnectInfo :: ConnectInfo
defaultConnectInfo = ConnectInfo {
connectHost = "localhost"
, connectPort = 3306
, connectUser = "root"
, connectPassword = ""
, connectDatabase = "test"
, connectOptions = [CharsetName "utf8"]
, connectPath = ""
, connectSSL = Nothing
}
-- | Default (empty) information for setting up an SSL connection.
defaultSSLInfo :: SSLInfo
defaultSSLInfo = SSLInfo {
sslKey = ""
, sslCert = ""
, sslCA = ""
, sslCAPath = ""
, sslCiphers = ""
}
-- | Connect to a database.
connect :: ConnectInfo -> IO Connection
connect ConnectInfo{..} = do
closed <- newIORef False
ptr0 <- mysql_init nullPtr
case connectSSL of
Nothing -> return ()
Just SSLInfo{..} -> withString sslKey $ \ckey ->
withString sslCert $ \ccert ->
withString sslCA $ \cca ->
withString sslCAPath $ \ccapath ->
withString sslCiphers $ \ccipher ->
mysql_ssl_set ptr0 ckey ccert cca ccapath ccipher
>> return ()
forM_ connectOptions $ \opt -> do
r <- mysql_options ptr0 opt
unless (r == 0) $ connectionError_ "connect" ptr0
let flags = foldl' (+) 0 . map toConnectFlag $ connectOptions
ptr <- withString connectHost $ \chost ->
withString connectUser $ \cuser ->
withString connectPassword $ \cpass ->
withString connectDatabase $ \cdb ->
withString connectPath $ \cpath ->
mysql_real_connect ptr0 chost cuser cpass cdb
(fromIntegral connectPort) cpath flags
when (ptr == nullPtr) $
connectionError_ "connect" ptr0
res <- newIORef Nothing
let realClose = do
cleanupConnResult res
wasClosed <- atomicModifyIORef closed $ \prev -> (True, prev)
unless wasClosed $ mysql_close ptr
fp <- newForeignPtr ptr realClose
return Connection {
connFP = fp
, connClose = realClose
, connResult = res
}
-- | Delete the 'MYSQL_RES' behind a 'Result' immediately, and mark
-- the 'Result' as invalid.
cleanupConnResult :: IORef (Maybe (Weak Result)) -> IO ()
cleanupConnResult res = do
prev <- readIORef res
case prev of
Nothing -> return ()
Just w -> maybe (return ()) freeResult =<< deRefWeak w
-- | Close a connection, and mark any outstanding 'Result' as
-- invalid.
close :: Connection -> IO ()
close = connClose
{-# INLINE close #-}
ping :: Connection -> IO ()
ping conn = withConn conn $ \ptr -> mysql_ping ptr >>= check "ping" conn
threadId :: Connection -> IO Word
threadId conn = fromIntegral <$> withConn conn mysql_thread_id
serverInfo :: Connection -> IO String
serverInfo conn = withConn conn $ \ptr ->
peekCString =<< mysql_get_server_info ptr
hostInfo :: Connection -> IO String
hostInfo conn = withConn conn $ \ptr ->
peekCString =<< mysql_get_host_info ptr
protocolInfo :: Connection -> IO Word
protocolInfo conn = withConn conn $ \ptr ->
fromIntegral <$> mysql_get_proto_info ptr
setCharacterSet :: Connection -> String -> IO ()
setCharacterSet conn cs =
withCString cs $ \ccs ->
withConn conn $ \ptr ->
mysql_set_character_set ptr ccs >>= check "setCharacterSet" conn
characterSet :: Connection -> IO String
characterSet conn = withConn conn $ \ptr ->
peekCString =<< mysql_character_set_name ptr
sslCipher :: Connection -> IO (Maybe String)
sslCipher conn = withConn conn $ \ptr ->
withPtr peekCString =<< mysql_get_ssl_cipher ptr
serverStatus :: Connection -> IO String
serverStatus conn = withConn conn $ \ptr -> do
st <- mysql_stat ptr
checkNull "serverStatus" conn st
peekCString st
clientInfo :: String
clientInfo = unsafePerformIO $ peekCString mysql_get_client_info
{-# NOINLINE clientInfo #-}
clientVersion :: Word
clientVersion = fromIntegral mysql_get_client_version
{-# NOINLINE clientVersion #-}
-- | Turn autocommit on or off.
--
-- By default, MySQL runs with autocommit mode enabled. In this mode,
-- as soon as you modify a table, MySQL stores your modification
-- permanently.
autocommit :: Connection -> Bool -> IO ()
autocommit conn onOff = withConn conn $ \ptr ->
mysql_autocommit ptr b >>= check "autocommit" conn
where b = if onOff then 1 else 0
changeUser :: Connection -> String -> String -> Maybe String -> IO ()
changeUser conn user pass mdb =
withCString user $ \cuser ->
withCString pass $ \cpass ->
withMaybeString mdb $ \cdb ->
withConn conn $ \ptr ->
mysql_change_user ptr cuser cpass cdb >>= check "changeUser" conn
selectDB :: Connection -> String -> IO ()
selectDB conn db =
withCString db $ \cdb ->
withConn conn $ \ptr ->
mysql_select_db ptr cdb >>= check "selectDB" conn
query :: Connection -> ByteString -> IO ()
query conn q = withConn conn $ \ptr ->
unsafeUseAsCStringLen q $ \(p,l) ->
mysql_real_query ptr p (fromIntegral l) >>= check "query" conn
-- | Return the value generated for an @AUTO_INCREMENT@ column by the
-- previous @INSERT@ or @UPDATE@ statement.
--
-- See <http://dev.mysql.com/doc/refman/5.5/en/mysql-insert-id.html>
insertID :: Connection -> IO Word64
insertID conn = fromIntegral <$> (withConn conn $ mysql_insert_id)
-- | Return the number of fields (columns) in a result.
--
-- * If 'Left' 'Connection', returns the number of columns for the most
-- recent query on the connection.
--
-- * For 'Right' 'Result', returns the number of columns in each row
-- of this result.
--
-- The number of columns may legitimately be zero.
fieldCount :: Either Connection Result -> IO Int
fieldCount (Right EmptyResult) = return 0
fieldCount (Right res) = return (resFields res)
fieldCount (Left conn) =
withConn conn $ fmap fromIntegral . mysql_field_count
affectedRows :: Connection -> IO Int64
affectedRows conn = withConn conn $ fmap fromIntegral . mysql_affected_rows
-- | Retrieve a complete result.
--
-- Any previous outstanding 'Result' is first marked as invalid.
storeResult :: Connection -> IO Result
storeResult = frobResult "storeResult" mysql_store_result
mysql_fetch_fields_nonblock
mysql_fetch_row_nonblock
mysql_fetch_lengths_nonblock
mysql_free_result_nonblock
-- | Initiate a row-by-row retrieval of a result.
--
-- Any previous outstanding 'Result' is first marked as invalid.
useResult :: Connection -> IO Result
useResult = frobResult "useResult" mysql_use_result
mysql_fetch_fields
mysql_fetch_row
mysql_fetch_lengths
mysql_free_result
frobResult :: String
-> (Ptr MYSQL -> IO (Ptr MYSQL_RES))
-> (Ptr MYSQL_RES -> IO (Ptr Field))
-> (Ptr MYSQL_RES -> IO MYSQL_ROW)
-> (Ptr MYSQL_RES -> IO (Ptr CULong))
-> (Ptr MYSQL_RES -> IO ())
-> Connection -> IO Result
frobResult func frob fetchFieldsFunc fetchRowFunc fetchLengthsFunc
myFreeResult conn =
withConn conn $ \ptr -> do
cleanupConnResult (connResult conn)
res <- frob ptr
fields <- mysql_field_count ptr
valid <- newIORef True
if res == nullPtr
then if fields == 0
then return EmptyResult
else connectionError func conn
else do
fp <- newForeignPtr res $ freeResult_ valid myFreeResult res
let ret = Result {
resFP = fp
, resFields = fromIntegral fields
, resConnection = conn
, resValid = valid
, resFetchFields = fetchFieldsFunc
, resFetchRow = fetchRowFunc
, resFetchLengths = fetchLengthsFunc
, resFreeResult = myFreeResult
}
weak <- mkWeakPtr ret (Just (freeResult_ valid myFreeResult res))
writeIORef (connResult conn) (Just weak)
return ret
-- | Immediately free the @MYSQL_RES@ value associated with this
-- 'Result', and mark the @Result@ as invalid.
freeResult :: Result -> IO ()
freeResult Result{..} = withForeignPtr resFP $
freeResult_ resValid resFreeResult
freeResult EmptyResult{..} = return ()
-- | Check whether a 'Result' is still valid, i.e. backed by a live
-- @MYSQL_RES@ value.
isResultValid :: Result -> IO Bool
isResultValid Result{..} = readIORef resValid
isResultValid EmptyResult = return False
freeResult_ :: IORef Bool -> (Ptr MYSQL_RES -> IO ()) -> Ptr MYSQL_RES -> IO ()
freeResult_ valid free ptr = do
wasValid <- atomicModifyIORef valid $ \prev -> (False, prev)
when wasValid $ free ptr
fetchRow :: Result -> IO [Maybe ByteString]
fetchRow res@Result{..} = withRes "fetchRow" res $ \ptr -> do
rowPtr <- resFetchRow ptr
if rowPtr == nullPtr
then return []
else do
lenPtr <- resFetchLengths ptr
checkNull "fetchRow" resConnection lenPtr
let go len = withPtr $ \colPtr ->
create (fromIntegral len) $ \d ->
memcpy d (castPtr colPtr) (fromIntegral len)
sequence =<< zipWith go <$> peekArray resFields lenPtr
<*> peekArray resFields rowPtr
fetchRow EmptyResult{..} = return []
fetchFields :: Result -> IO [Field]
fetchFields res@Result{..} = withRes "fetchFields" res $ \ptr -> do
peekArray resFields =<< resFetchFields ptr
fetchFields EmptyResult{..} = return []
dataSeek :: Result -> Int64 -> IO ()
dataSeek res row = withRes "dataSeek" res $ \ptr ->
mysql_data_seek ptr (fromIntegral row)
rowTell :: Result -> IO Row
rowTell res = withRes "rowTell" res $ \ptr ->
Row <$> mysql_row_tell ptr
rowSeek :: Result -> Row -> IO Row
rowSeek res (Row row) = withRes "rowSeek" res $ \ptr ->
Row <$> mysql_row_seek ptr row
-- | Read the next statement result. Returns 'True' if another result
-- is available, 'False' otherwise.
--
-- This function marks the current 'Result' as invalid, if one exists.
nextResult :: Connection -> IO Bool
nextResult conn = withConn conn $ \ptr -> do
cleanupConnResult (connResult conn)
i <- mysql_next_result ptr
case i of
0 -> return True
-1 -> return False
_ -> connectionError "nextResult" conn
-- | Commit the current transaction.
commit :: Connection -> IO ()
commit conn = withConn conn $ \ptr ->
mysql_commit ptr >>= check "commit" conn
-- | Roll back the current transaction.
rollback :: Connection -> IO ()
rollback conn = withConn conn $ \ptr ->
mysql_rollback ptr >>= check "rollback" conn
escape :: Connection -> ByteString -> IO ByteString
escape conn bs = withConn conn $ \ptr ->
unsafeUseAsCStringLen bs $ \(p,l) ->
createAndTrim (l*2 + 1) $ \to ->
fromIntegral <$> mysql_real_escape_string ptr (castPtr to) p
(fromIntegral l)
withConn :: Connection -> (Ptr MYSQL -> IO a) -> IO a
withConn conn = withForeignPtr (connFP conn)
withRes :: String -> Result -> (Ptr MYSQL_RES -> IO a) -> IO a
withRes func res act = do
valid <- readIORef (resValid res)
unless valid . throw $ ResultError func 0 "result is no longer usable"
withForeignPtr (resFP res) act
withString :: String -> (CString -> IO a) -> IO a
withString [] act = act nullPtr
withString xs act = withCString xs act
withMaybeString :: Maybe String -> (CString -> IO a) -> IO a
withMaybeString Nothing act = act nullPtr
withMaybeString (Just xs) act = withCString xs act
check :: (Eq a, Num a) => String -> Connection -> a -> IO ()
check func conn r = unless (r == 0) $ connectionError func conn
{-# INLINE check #-}
checkNull :: String -> Connection -> Ptr a -> IO ()
checkNull func conn p = when (p == nullPtr) $ connectionError func conn
{-# INLINE checkNull #-}
withPtr :: (Ptr a -> IO b) -> Ptr a -> IO (Maybe b)
withPtr act p | p == nullPtr = return Nothing
| otherwise = Just <$> act p
connectionError :: String -> Connection -> IO a
connectionError func conn = withConn conn $ connectionError_ func
connectionError_ :: String -> Ptr MYSQL -> IO a
connectionError_ func ptr =do
errno <- mysql_errno ptr
msg <- peekCString =<< mysql_error ptr
throw $ ConnectionError func (fromIntegral errno) msg
| lhuang7/mysql | Database/MySQL/Base.hs | bsd-3-clause | 19,330 | 0 | 22 | 4,786 | 4,476 | 2,360 | 2,116 | 371 | 3 |
{-
OnYourOwn1.hs (adapted from Simple.cpp which is (c) 2004 Astle/Hawkins)
Copyright (c) Sven Panne 2004-2005 <sven.panne@aedion.de>
This file is part of HOpenGL and distributed under a BSD-style license
See the file libraries/GLUT/LICENSE
-}
import Control.Monad ( unless )
import System.Exit ( exitWith, ExitCode(ExitSuccess) )
import Graphics.UI.GLUT hiding ( initialize )
--------------------------------------------------------------------------------
-- Setup GLUT and OpenGL, drop into the event loop.
--------------------------------------------------------------------------------
main :: IO ()
main = do
-- Setup the basic GLUT stuff
getArgsAndInitialize
initialDisplayMode $= [ DoubleBuffered, RGBMode, WithDepthBuffer ]
-- Create the window
initialWindowSize $= Size 1024 768
initialWindowPosition $= Position 100 150
createWindow "BOGLGP - Chapter 1 - On Your Own 1"
initialize
-- Register the event callback functions
displayCallback $= display
reshapeCallback $= Just reshape
keyboardMouseCallback $= Just keyboardMouseHandler
-- No need for an idle callback here, this would just hog the CPU
-- without any visible effect
-- At this point, control is relinquished to the GLUT event handler.
-- Control is returned as events occur, via the callback functions.
mainLoop
--------------------------------------------------------------------------------
-- One time setup, including creating menus, creating a light, setting the
-- shading mode and clear color, and loading textures.
--------------------------------------------------------------------------------
initialize :: IO ()
initialize = do
-- set up the only meny
attachMenu RightButton (Menu [MenuEntry "Exit" (exitWith ExitSuccess)])
depthFunc $= Just Less
--------------------------------------------------------------------------------
-- Handle mouse and keyboard events. For this simple demo, just exit on a
-- left click or when q is pressed.
--------------------------------------------------------------------------------
keyboardMouseHandler :: KeyboardMouseCallback
keyboardMouseHandler (MouseButton LeftButton)_ _ _ = exitWith ExitSuccess
keyboardMouseHandler (Char 'q') _ _ _ = exitWith ExitSuccess
keyboardMouseHandler _ _ _ _ = postRedisplay Nothing
--------------------------------------------------------------------------------
-- Reset the viewport for window changes.
--------------------------------------------------------------------------------
reshape :: ReshapeCallback
reshape size@(Size width height) =
unless (height == 0) $ do
viewport $= (Position 0 0, size)
matrixMode $= Projection
loadIdentity
perspective 90 (fromIntegral width / fromIntegral height) 1 100
matrixMode $= Modelview 0
--------------------------------------------------------------------------------
-- Clear and redraw the scene.
--------------------------------------------------------------------------------
display :: DisplayCallback
display = do
-- set up the camera
loadIdentity
lookAt (Vertex3 0 1 6) (Vertex3 0 0 0) (Vector3 0 1 0)
-- clear the screen
clear [ ColorBuffer, DepthBuffer ]
-- resolve overloading, not needed in "real" programs
let color3f = color :: Color3 GLfloat -> IO ()
vertex3f = vertex :: Vertex3 GLfloat -> IO ()
-- draw a triangle
renderPrimitive Triangles $ do
color3f (Color3 1 0 0)
vertex3f (Vertex3 2 2.5 (-1))
color3f (Color3 1 0 0)
vertex3f (Vertex3 (-3.5) (-2.5) (-1))
color3f (Color3 1 0 0)
vertex3f (Vertex3 2 (-4) 0)
-- draw a polygon
renderPrimitive Polygon $ do
color3f (Color3 0 0 1)
vertex3f (Vertex3 (-1) 2 0)
color3f (Color3 0 0 1)
vertex3f (Vertex3 (-3) (-0.5) 0)
color3f (Color3 0 0 1)
vertex3f (Vertex3 (-1.5) (-3) 0)
color3f (Color3 0 0 1)
vertex3f (Vertex3 1 (-2) 0)
color3f (Color3 0 0 1)
vertex3f (Vertex3 1 1 0)
-- draw everything and swap the display buffer
swapBuffers
| FranklinChen/hugs98-plus-Sep2006 | packages/GLUT/examples/BOGLGP/Chapter01/OnYourOwn1.hs | bsd-3-clause | 4,096 | 0 | 14 | 786 | 834 | 413 | 421 | 57 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-|
Module : Numeric.ER.BasicTypes.PlusMinus
Description : mini sign datatype
Copyright : (c) Michal Konecny
License : BSD3
Maintainer : mikkonecny@gmail.com
Stability : experimental
Portability : portable
A mini enumeration to represent the sign of different numbers and approximations.
-}
module Numeric.ER.BasicTypes.PlusMinus where
import Data.Typeable
import Data.Generics.Basics
import Data.Binary
--import BinaryDerive
data PlusMinus = Minus | Plus
deriving (Eq, Ord, Typeable, Data)
instance Show PlusMinus where
show Plus = "+"
show Minus = "-"
{- the following has been generated by BinaryDerive -}
instance Binary PlusMinus where
put Minus = putWord8 0
put Plus = putWord8 1
get = do
tag_ <- getWord8
case tag_ of
0 -> return Minus
1 -> return Plus
_ -> fail "no parse"
{- the above has been generated by BinaryDerive -}
signNeg Plus = Minus
signNeg Minus = Plus
signMult Plus s = s
signMult Minus s = signNeg s
signToNum Plus = 1
signToNum Minus = -1
| michalkonecny/polypaver | src/Numeric/ER/BasicTypes/PlusMinus.hs | bsd-3-clause | 1,111 | 0 | 11 | 270 | 215 | 112 | 103 | 25 | 1 |
-- BANNERSTART
-- - Copyright 2006-2008, Galois, Inc.
-- - This software is distributed under a standard, three-clause BSD license.
-- - Please see the file LICENSE, distributed with this software, for specific
-- - terms and conditions.
-- Author: Adam Wick <awick@galois.com>
-- BANNEREND
-- |A low-level module for dealing with unprivileged Xen memory operations,
-- including allocating pages, granting access to pages to other domains, and
-- mapping the grants of other domains.
module Hypervisor.Memory(
-- * Types and conversions for dealing with memory.
PFN, MFN
, VPtr, MPtr
, mfnToMPtr, mptrToMFN , mptrToPtr, toMFN, fromMFN, toPFN
, mfnToVPtr, vptrToMFN
, mptrToInteger
, pageSize
-- * Routines for creating, destroying, and modifying pages.
, allocPage
, AllocProt(..), defaultProt
, allocPageProt
, freePage
, withPage
, setPageWritable
, markAsPageTable
, markFrameAsPageTable
, mapFrames
-- * Routines for creating or destroying grant references
-- and grant handles.
, GrantRef(..)
, grantAccess
, endAccess
, GrantHandle(..)
, mapGrants
, unmapGrant
-- * Routines for transferring or copying pages to another domain.
, prepareTransfer
, transferFrame
, completeTransfer
, performFrameCopy
-- * Low-level routines for dealing with frames, address translation,
-- and similar grungy things.
, virtualToMachine
, machineToVirtual
, addressMapped
, systemWMB, systemRMB, systemMB
)
where
import Control.Exception (throwIO)
import Control.Monad
import Data.Binary
import Data.Bits
import Foreign.Ptr
import Foreign.Marshal.Alloc (alloca,allocaBytesAligned)
import Foreign.Marshal.Array (withArray,allocaArray,peekArray)
import Foreign.Storable
import GHC.Generics
import Numeric
#if __GLASGOW_HASKELL__ < 706
import Prelude hiding (catch)
#endif
import Hypervisor.DomainInfo
import Hypervisor.ErrorCodes
--
-- * Types and conversions for dealing with memory.
--
-- |Pseudo-physical frame numbers. These frame numbers have very little to
-- do with the machine address or the virtual address, but are used in some
-- Xen hypercalls.
newtype PFN = PFN Word
-- |Translate to a PFN
toPFN :: Integral a => a -> PFN
toPFN x = PFN (fromIntegral x)
-- |Machine frame numbers. These frame numbers identify a phyical 4096-byte
-- frame on the underlying hardware.
newtype MFN = MFN Word
deriving (Eq, Ord, Num, Read, Generic, Storable, Bits)
instance Show MFN where
show (MFN x) = "MFN 0x" ++ showHex x ""
-- |A virtual address that, if you've mapped it, can be written to and read
-- from as per normal.
type VPtr a = Ptr a
-- |A machine address. These cannot be written to or read from directly, as
-- HaLVM's always run with paging enabled.
#if defined(CONFIG_X86_PAE) || defined(CONFIG_X86_64)
newtype MPtr a = MPtr Word64 deriving Storable
#else
newtype MPtr a = MPtr Word32 deriving Storable
#endif
mptrToInteger :: MPtr a -> Integer
mptrToInteger (MPtr x) = fromIntegral x
-- |Convert a 32-bit word, from some other source, into an MFN. Manufacturing
-- your own MFNs is dangerous, so make sure you know what you're doing if
-- you use this function.
toMFN :: Word -> MFN
toMFN = MFN
-- | This is used when passing MFNs to some primitives.
-- Eventually, we should change the primitives to take MFNs directly.
fromMFN :: MFN -> Word
fromMFN (MFN x) = x
-- |Convert a machine frame number to the initial machine address within the
-- block.
mfnToMPtr :: MFN -> MPtr a
mfnToMPtr (MFN f) = MPtr (fromIntegral f `shiftL` 12)
-- |Convert a machine frame number to the address at which it is mapped in
-- the address space. Note that, obviously, if the page isn't currently
-- mapped, you'll get an error.
mfnToVPtr :: MFN -> IO (VPtr a)
mfnToVPtr = machineToVirtual . mfnToMPtr
-- |Convert a virtual address to the machine frame underlying its frame. This
-- function will perform the rounding for you. If the page is mapped (if
-- addressMapped) returns True, then this page is guaranteed to succeed.
vptrToMFN :: VPtr a -> IO MFN
vptrToMFN x = do
p <- virtualToMachine x
return (mptrToMFN p)
-- |Convert a machine pointer to its machine frame number. This operation
-- is necessarily lossy, so (x == mptrToMFN (mfnToMPtr x)) does not
-- necessarily hold.
mptrToMFN :: MPtr a -> MFN
mptrToMFN (MPtr p) = fromIntegral (p `shiftR` 12)
-- |Convert a machine pointer to a pointer. In order to use this, you should
-- really know what you're doing. Reading to or from the returned address
-- will probably cause a crash.
mptrToPtr :: MPtr a -> Ptr a
mptrToPtr (MPtr p) = intPtrToPtr (fromIntegral p)
-- |The size, in bytes, of a memory page.
pageSize :: Word32
pageSize = 4096
--
-- * Routines for creating, destroying, and modifying pages.
--
{-# DEPRECATED allocPage "Avoid use of this, can impact GC functionality." #-}
-- |Allocate a page outside the garbage-collected heap. These pages
-- are almost always used with grants.
allocPage :: IO (VPtr a)
allocPage = allocPageProt defaultProt
data AllocProt = AllocProt
{ protRead :: Bool
, protWrite :: Bool
, protExec :: Bool
, protNoCache :: Bool
}
-- | These are the Prot flags used by allocPage
defaultProt :: AllocProt
defaultProt = AllocProt
{ protRead = True
, protWrite = True
, protExec = True
, protNoCache = False
}
getProt :: AllocProt -> Int
getProt flags = flag (bit 0) protRead
.|. flag (bit 1) protWrite
.|. flag (bit 2) protExec
.|. flag (bit 3) protNoCache
where
flag b p | p flags = b
| otherwise = 0
{-# DEPRECATED allocPageProt "Avoid use of this, can impact GC functionality." #-}
-- | Allocate with a set of protection flags.
allocPageProt :: AllocProt -> IO (VPtr a)
allocPageProt flags = do
va <- allocMemory nullPtr 4096 (getProt flags) 1
if va == nullPtr then throwIO ENOMEM else return $! va
-- |Free a page allocated with allocPage.
freePage :: VPtr a -> IO ()
freePage x
| x /= (x `alignPtr` 4096) = throwIO EINVAL
| otherwise = freeMemory x 4096
-- | Allocate a page, call a function with it, and free it.
withPage :: (VPtr a -> IO b) -> IO b
withPage = allocaBytesAligned 4096 4096
-- |Set a page writable (or not).
setPageWritable :: VPtr a -> Bool -> IO ()
setPageWritable x val = do
ent <- get_pt_entry x
set_pt_entry x (modify ent)
where
modify a | val = a `setBit` 1
| otherwise = a `clearBit` 1
-- |Mark the given page as one that will be used as a page table.
-- The given address is a virtual address. This is the analagous
-- version of the MMUEXT_PIN_L?_TABLE case of the MMUext hypercall;
-- the argument specifying what level.
--
-- Note that changing your own page tables is a good way to crash,
-- unless you're very familiar with the HaLVM.
--
-- QUICK GUIDE:
-- Use level '1' for page tables
-- Use level '2' for page directories
-- Use level '3' for PAE base tables / directory pointer tables
-- Use level '4' for PML4
markAsPageTable :: Int -> VPtr a -> DomId -> IO ()
markAsPageTable l addr dom = do
ent <- get_pt_entry addr
let mfn' = fromIntegral (ent `shiftR` 12)
markFrameAsPageTable l (MFN mfn') dom
markFrameAsPageTable :: Int -> MFN -> DomId -> IO ()
markFrameAsPageTable l mfn dom
| l `notElem` [1 .. 4] = throwIO EINVAL
| otherwise = do i <- pin_frame l (fromMFN mfn) (fromDomId dom)
standardUnitRes i
-- |Map the given list of frames into a contiguous chunk of memory.
mapFrames :: [MFN] -> IO (VPtr a)
mapFrames mfns = withArray (map fromMFN mfns) $ \p -> mapFrames' p (length mfns)
--
-- * Routines for creating or destroying grant references and grant handles.
--
newtype GrantRef = GrantRef { unGrantRef :: Word32 }
deriving (Eq, Ord, Generic, Storable)
instance Show GrantRef where
show (GrantRef x) = "grant:" ++ show x
instance Read GrantRef where
readsPrec d str =
case splitAt 6 str of
("grant:",x) -> map (\ (g,rest) -> (GrantRef g, rest)) (readsPrec d x)
_ -> []
instance Binary GrantRef where
put (GrantRef r) = put r
get = GrantRef `fmap` get
-- |Grant access to a given domain to a given region of memory (starting at
-- the pointer and extending for the given length). The boolean determines
-- if the given domain will be able to write to the memory (True) or not
-- (False).
grantAccess :: DomId -> Ptr a -> Word -> Bool -> IO [GrantRef]
grantAccess dom ptr len writable = ga ptr (fromIntegral len)
where
ga _ 0 = return []
ga p l = do
let pword = ptrToWordPtr ptr
offset = pword .&. 4095
clength = minimum [4096, (4096 - offset), l]
ro = if writable then 0 else 1
i <- alloca $ \ rptr -> do
res <- allocGrant (fromDomId dom) p (fromIntegral clength) ro rptr
if (res < 0)
then throwIO (toEnum (-res) :: ErrorCode)
else peek rptr
((GrantRef i):) `fmap` ga (p `plusPtr` fromIntegral clength) (l - clength)
-- |Stop any access grants associated with the given grant reference.
endAccess :: GrantRef -> IO ()
endAccess (GrantRef gr) = do
res <- endGrant gr
when (res < 0) $ throwIO (toEnum (-res) :: ErrorCode)
-- |The type of a grant handle, or (in other words), the handle to a
-- grant from another domain that we've mapped.
newtype GrantHandle = GrantHandle [Word32]
deriving (Eq, Ord, Show, Read)
-- |Map another domain's grants into our own address space. The return
-- values, if successful, are a pointer to the newly-mapped page in
-- memory and the grant handle. The boolean argument determines whether
-- HALVM should map the page read-only (False) or read\/write (True).
mapGrants :: DomId -> [GrantRef] -> Bool -> IO (VPtr a, GrantHandle)
mapGrants dom refs writable =
withArray (map unGrantRef refs) $ \ ptr ->
alloca $ \ resptr ->
allocaArray count $ \ hndlptr -> do
res <- mapGrants' dom' readonly ptr count resptr hndlptr nullPtr
case compare res 0 of
EQ -> do retptr <- peek resptr
hnds <- GrantHandle `fmap` peekArray count hndlptr
return (retptr, hnds)
LT -> throwIO (toEnum (-res) :: ErrorCode)
GT -> throwIO (toEnum res :: GrantErrorCode)
where
readonly | writable = 0
| otherwise = 1
count = length refs
dom' = fromDomId dom
-- |Unmap the grant of another domain's page. This will make the shared
-- memory inaccessible.
unmapGrant :: GrantHandle -> IO ()
unmapGrant (GrantHandle gh) =
withArray gh $ \ ptr -> do
res <- unmapGrants ptr (length gh)
case compare res 0 of
EQ -> return ()
LT -> throwIO (toEnum (-res) :: ErrorCode)
GT -> throwIO (toEnum res :: GrantErrorCode)
--
-- * Routines for transferring or copying pages to another domain.
--
-- |Allow the given foreign domain to transfer a page to the running domain.
-- The resulting grant reference should be passed to the other domain, for
-- them to use in their transfer request. Usual protocol: Side A does
-- prepareTransfer, Side B does transferFrame, Side A does completeTransfer.
prepareTransfer :: DomId -> IO GrantRef
prepareTransfer dom = do
res <- prepTransfer (fromDomId dom)
when (res < 0) $ throwIO (toEnum (-res) :: ErrorCode)
return (GrantRef (fromIntegral res))
-- |Transfer the given frame to another domain, using the given grant
-- reference as the transfer mechanism.
transferFrame :: DomId -> GrantRef -> VPtr a -> IO ()
transferFrame dom (GrantRef ref) ptr = do
res <- transferGrant (fromDomId dom) ref ptr
case compare res 0 of
EQ -> return ()
LT -> throwIO (toEnum (-res) :: ErrorCode)
GT -> throwIO (toEnum res :: GrantErrorCode)
-- |Complete a grant transfer, returning the provided frame.
--
-- The first provided boolean determines the blocking behavior when the other
-- domain has not yet begun the transfer. If True, then the function will
-- block, under the assumption that the other side will begin the transfer
-- soon. If False, the function will not block, raising EAGAIN if the other
-- side has not yet begun the transfer. In all cases, if the other side has
-- begun the transfer, this routine will block until the transfer completes.
--
-- The second boolean determines if this grant reference should be recycled
-- and prepared for another grant transfer from the same domain upon completion
-- (True), or if the reference should be freed (False).
completeTransfer :: GrantRef -> Bool -> Bool -> IO MFN
completeTransfer gr@(GrantRef ref) block reset = do
res <- compTransfer ref reset
let ecode = toEnum (-res) :: ErrorCode
case compare res 0 of
LT | block && ecode == EAGAIN -> completeTransfer gr block reset
| otherwise -> throwIO ecode
_ -> return (MFN (fromIntegral res))
-- |Perform a copy of one frame to another frame. If two frame numbers are
-- used, they must be legitimate frame numbers for the calling domain. For
-- use between domains, the function can use grant references, which must
-- be set as read/write for the appropriate domains. The first mfn/ref and
-- domain is the source, the second set is the destination. Note that it is
-- an error to specify an MFN with any other identifier than domidSelf.
performFrameCopy :: (Either GrantRef MFN) -> DomId -> Word16 ->
(Either GrantRef MFN) -> DomId -> Word16 ->
Word16 ->
IO ()
performFrameCopy src sd soff dest dd doff len = do
let (snum,sisref) = argToVals src sd
(dnum,disref) = argToVals dest dd
ret <- perform_grant_copy snum sisref srcDom soff dnum disref destDom doff len
standardUnitRes ret
where
srcDom = fromDomId sd
destDom = fromDomId dd
argToVals :: (Either GrantRef MFN) -> DomId -> (Word, Bool)
argToVals (Left (GrantRef ref)) _ = (fromIntegral ref, True)
argToVals (Right (MFN _)) dom | dom /= domidSelf =
error "Called with an MFN and non-self domain!"
argToVals (Right (MFN mfn)) _ = (fromIntegral mfn, False)
--
-- * Low-level routines for dealing with frames, address translation,
-- and similar grungy things.
--
-- |Convert a virtual address into a machine-physical address.
virtualToMachine :: VPtr a -> IO (MPtr a)
virtualToMachine x = do
ent <- get_pt_entry x
if ent == 0 || not (ent `testBit` 0)
then throwIO EINVAL
else let inword = ptrToWordPtr x
inoff = fromIntegral (inword .&. 4095)
base = ent .&. (complement 4095)
in return (MPtr (fromIntegral (base + inoff)))
-- |Convert a machine-physical address into a virtual address. THIS IS VERY
-- SLOW.
machineToVirtual :: MPtr a -> IO (VPtr a)
machineToVirtual (MPtr x) = machine_to_virtual x >>= \ x' ->
if x' == nullPtr
then throwIO EINVAL
else return x'
-- |Determine if the given address is actually backed with some
-- physical page, thus determining whether or not someone can
-- read or write from the address.
addressMapped :: VPtr a -> IO Bool
addressMapped addr = do
ent <- get_pt_entry addr
return (ent `testBit` 0) -- lowest bit is the present bit
--
-- --------------------------------------------------------------------------
--
standardUnitRes :: Integral a => a -> IO ()
standardUnitRes 0 = return ()
standardUnitRes x = throwIO (toEnum (fromIntegral (-x)) :: ErrorCode)
#define C_PFN_T Word32
#if defined(CONFIG_X86_64) || defined(CONFIG_X86_PAE)
# define C_MADDR_T Word64
# define C_SIZE_T Word64
#else
# define C_MADDR_T Word32
# define C_SIZE_T Word32
#endif
#define C_PADDR_T Word32
#define C_VADDR_T (VPtr a)
-- Functions from vmm.h
foreign import ccall unsafe "vmm.h get_pt_entry"
get_pt_entry :: Ptr a -> IO Word64
foreign import ccall unsafe "vmm.h set_pt_entry"
set_pt_entry :: Ptr a -> Word64 -> IO ()
foreign import ccall unsafe "vmm.h machine_to_virtual"
machine_to_virtual :: C_MADDR_T -> IO (VPtr a)
-- Functions from memory.h
foreign import ccall unsafe "memory.h pin_frame"
pin_frame :: Int -> Word -> Word32 -> IO Int
foreign import ccall unsafe "memory.h map_frames"
mapFrames' :: VPtr Word -> Int -> IO (VPtr a)
foreign import ccall unsafe "memory.h system_wmb"
systemWMB :: IO ()
foreign import ccall unsafe "memory.h system_rmb"
systemRMB :: IO ()
foreign import ccall unsafe "memory.h system_mb"
systemMB :: IO ()
-- Functions from runtime_reqs.h
foreign import ccall unsafe "runtime_reqs.h runtime_alloc"
allocMemory :: VPtr a -> Word -> Int -> Int -> IO (VPtr a)
foreign import ccall unsafe "runtime_reqs.h runtime_free"
freeMemory :: VPtr a -> Word -> IO ()
-- functions from grants.h
foreign import ccall unsafe "grants.h alloc_grant"
allocGrant :: Word16 -> VPtr a -> Word16 -> Int -> VPtr Word32 -> IO Int
foreign import ccall unsafe "grants.h end_grant"
endGrant :: Word32 -> IO Int
foreign import ccall unsafe "grants.h map_grants"
mapGrants' :: Word16 -> Int -> VPtr Word32 -> Int ->
VPtr (VPtr a) -> VPtr Word32 -> VPtr Word64 ->
IO Int
foreign import ccall unsafe "grants.h unmap_grants"
unmapGrants :: VPtr Word32 -> Int -> IO Int
foreign import ccall unsafe "grants.h prepare_transfer"
prepTransfer :: Word16 -> IO Int
foreign import ccall unsafe "grants.h transfer_frame"
transferGrant :: Word16 -> Word32 -> VPtr a -> IO Int
foreign import ccall unsafe "grants.h complete_transfer"
compTransfer :: Word32 -> Bool -> IO Int
foreign import ccall unsafe "grants.h copy_frame"
perform_grant_copy :: Word -> Bool -> Word16 -> Word16 ->
Word -> Bool -> Word16 -> Word16 ->
Word16 -> IO Int
| GaloisInc/HaLVM | src/HALVMCore/Hypervisor/Memory.hs | bsd-3-clause | 17,921 | 0 | 20 | 4,106 | 3,910 | 2,065 | 1,845 | 275 | 4 |
module Main where
import Control.Monad (forM_)
import System.Posix.Time
import System.IO
import VectorTest
ntry = 10 :: Int
nloop = 2000000 :: Int
main :: IO ()
main = do
let v1 = initVec 2.1 4.5 8.2
let v2 = initVec 1.1 2.5 7.2
let s = 4.7
tryN "noop " (\x -> v1) ntry
tryN "new " (\x -> genVec x) ntry
tryN "add " (\x -> vadd (genVec x) v2) ntry
tryN "sub " (\x -> vsub (genVec x) v2) ntry
tryN "scale" (\x -> vscale s (genVec x)) ntry
tryN "dot " (\x -> initVec (dot (genVec x) v2) 0 0) ntry
tryN "cross" (\x -> cross (genVec x) v2) ntry
tryN :: String -> (Int -> Vector3) -> Int -> IO ()
tryN a f n = do
t0 <- epochTime
forM_ [1..n] $ \i -> do
loopN a f
t1 <- epochTime
hPutStrLn stderr (a ++ "avg Time: " ++ show (t1 - t0))
loopN :: String -> (Int -> Vector3) -> IO ()
loopN a f = do
forM_ [1..nloop] $ \i -> do
--let vs = f i
putStrLn $ show i
-- hPutStrLn stderr (a ++ " Time: " ++ show (t1 - t0))
genVec :: Int -> Vector3
genVec i = initVec (fromIntegral i) 8.4 2.8
| eijian/raytracer | test/Main-t7.hs | bsd-3-clause | 1,032 | 0 | 14 | 272 | 503 | 251 | 252 | 32 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : System.LXC.AttachOptions
-- Copyright : (c) Nickolay Kudasov 2014
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : nickolay.kudasov@gmail.com
--
-- Options and structures to run commands inside LXC containers.
-- You can get more info about LXC at <https://help.ubuntu.com/lts/serverguide/lxc.html> and <https://linuxcontainers.org>.
--
-- Normally you should import @System.LXC@ module only.
--
-----------------------------------------------------------------------------
module System.LXC.AttachOptions (
-- * Attach options
AttachOptions(..),
defaultAttachOptions,
-- * Attach command
AttachCommand(..),
-- * Attach @exec@ functions
AttachExecFn(..),
attachRunCommand,
attachRunShell,
-- * Flags and environment policies
AttachEnvPolicy(..),
AttachFlag(..),
fromAttachEnvPolicy,
fromAttachFlag,
) where
import System.LXC.Internal.AttachOptions
| fizruk/lxc | src/System/LXC/AttachOptions.hs | bsd-3-clause | 1,004 | 0 | 5 | 142 | 85 | 65 | 20 | 12 | 0 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-}
{-# LANGUAGE DeriveDataTypeable, DeriveFunctor, DeriveFoldable,
DeriveTraversable #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE UndecidableInstances #-} -- Note [Pass sensitive types]
-- in module PlaceHolder
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleInstances #-}
-- | Abstract syntax of global declarations.
--
-- Definitions for: @SynDecl@ and @ConDecl@, @ClassDecl@,
-- @InstDecl@, @DefaultDecl@ and @ForeignDecl@.
module HsDecls (
-- * Toplevel declarations
HsDecl(..), LHsDecl, HsDataDefn(..), HsDeriving,
HsDerivingClause(..), LHsDerivingClause,
-- ** Class or type declarations
TyClDecl(..), LTyClDecl,
TyClGroup(..), mkTyClGroup, emptyTyClGroup,
tyClGroupTyClDecls, tyClGroupInstDecls, tyClGroupRoleDecls,
isClassDecl, isDataDecl, isSynDecl, tcdName,
isFamilyDecl, isTypeFamilyDecl, isDataFamilyDecl,
isOpenTypeFamilyInfo, isClosedTypeFamilyInfo,
tyFamInstDeclName, tyFamInstDeclLName,
countTyClDecls, pprTyClDeclFlavour,
tyClDeclLName, tyClDeclTyVars,
hsDeclHasCusk, famDeclHasCusk,
FamilyDecl(..), LFamilyDecl,
-- ** Instance declarations
InstDecl(..), LInstDecl, NewOrData(..), FamilyInfo(..),
TyFamInstDecl(..), LTyFamInstDecl, instDeclDataFamInsts,
DataFamInstDecl(..), LDataFamInstDecl, pprDataFamInstFlavour, pprFamInstLHS,
FamInstEqn, LFamInstEqn, FamEqn(..),
TyFamInstEqn, LTyFamInstEqn, TyFamDefltEqn, LTyFamDefltEqn,
HsTyPats,
LClsInstDecl, ClsInstDecl(..),
-- ** Standalone deriving declarations
DerivDecl(..), LDerivDecl,
-- ** @RULE@ declarations
LRuleDecls,RuleDecls(..),RuleDecl(..), LRuleDecl, RuleBndr(..),LRuleBndr,
collectRuleBndrSigTys,
flattenRuleDecls, pprFullRuleName,
-- ** @VECTORISE@ declarations
VectDecl(..), LVectDecl,
lvectDeclName, lvectInstDecl,
-- ** @default@ declarations
DefaultDecl(..), LDefaultDecl,
-- ** Template haskell declaration splice
SpliceExplicitFlag(..),
SpliceDecl(..), LSpliceDecl,
-- ** Foreign function interface declarations
ForeignDecl(..), LForeignDecl, ForeignImport(..), ForeignExport(..),
noForeignImportCoercionYet, noForeignExportCoercionYet,
CImportSpec(..),
-- ** Data-constructor declarations
ConDecl(..), LConDecl,
HsConDeclDetails, hsConDeclArgTys,
getConNames,
getConDetails,
gadtDeclDetails,
-- ** Document comments
DocDecl(..), LDocDecl, docDeclDoc,
-- ** Deprecations
WarnDecl(..), LWarnDecl,
WarnDecls(..), LWarnDecls,
-- ** Annotations
AnnDecl(..), LAnnDecl,
AnnProvenance(..), annProvenanceName_maybe,
-- ** Role annotations
RoleAnnotDecl(..), LRoleAnnotDecl, roleAnnotDeclName,
-- ** Injective type families
FamilyResultSig(..), LFamilyResultSig, InjectivityAnn(..), LInjectivityAnn,
resultVariableName,
-- * Grouping
HsGroup(..), emptyRdrGroup, emptyRnGroup, appendGroups, hsGroupInstDecls
) where
-- friends:
import GhcPrelude
import {-# SOURCE #-} HsExpr( LHsExpr, HsExpr, HsSplice, pprExpr,
pprSpliceDecl )
-- Because Expr imports Decls via HsBracket
import HsBinds
import HsTypes
import HsDoc
import TyCon
import Name
import BasicTypes
import Coercion
import ForeignCall
import PlaceHolder ( PlaceHolder(..) )
import HsExtension
import NameSet
-- others:
import InstEnv
import Class
import Outputable
import Util
import SrcLoc
import Bag
import Maybes
import Data.Data hiding (TyCon,Fixity, Infix)
{-
************************************************************************
* *
\subsection[HsDecl]{Declarations}
* *
************************************************************************
-}
type LHsDecl id = Located (HsDecl id)
-- ^ When in a list this may have
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnSemi'
--
-- For details on above see note [Api annotations] in ApiAnnotation
-- | A Haskell Declaration
data HsDecl id
= TyClD (TyClDecl id) -- ^ Type or Class Declaration
| InstD (InstDecl id) -- ^ Instance declaration
| DerivD (DerivDecl id) -- ^ Deriving declaration
| ValD (HsBind id) -- ^ Value declaration
| SigD (Sig id) -- ^ Signature declaration
| DefD (DefaultDecl id) -- ^ 'default' declaration
| ForD (ForeignDecl id) -- ^ Foreign declaration
| WarningD (WarnDecls id) -- ^ Warning declaration
| AnnD (AnnDecl id) -- ^ Annotation declaration
| RuleD (RuleDecls id) -- ^ Rule declaration
| VectD (VectDecl id) -- ^ Vectorise declaration
| SpliceD (SpliceDecl id) -- ^ Splice declaration
-- (Includes quasi-quotes)
| DocD (DocDecl) -- ^ Documentation comment declaration
| RoleAnnotD (RoleAnnotDecl id) -- ^ Role annotation declaration
deriving instance (DataId id) => Data (HsDecl id)
-- NB: all top-level fixity decls are contained EITHER
-- EITHER SigDs
-- OR in the ClassDecls in TyClDs
--
-- The former covers
-- a) data constructors
-- b) class methods (but they can be also done in the
-- signatures of class decls)
-- c) imported functions (that have an IfacSig)
-- d) top level decls
--
-- The latter is for class methods only
-- | Haskell Group
--
-- A 'HsDecl' is categorised into a 'HsGroup' before being
-- fed to the renamer.
data HsGroup id
= HsGroup {
hs_valds :: HsValBinds id,
hs_splcds :: [LSpliceDecl id],
hs_tyclds :: [TyClGroup id],
-- A list of mutually-recursive groups;
-- This includes `InstDecl`s as well;
-- Parser generates a singleton list;
-- renamer does dependency analysis
hs_derivds :: [LDerivDecl id],
hs_fixds :: [LFixitySig id],
-- Snaffled out of both top-level fixity signatures,
-- and those in class declarations
hs_defds :: [LDefaultDecl id],
hs_fords :: [LForeignDecl id],
hs_warnds :: [LWarnDecls id],
hs_annds :: [LAnnDecl id],
hs_ruleds :: [LRuleDecls id],
hs_vects :: [LVectDecl id],
hs_docs :: [LDocDecl]
}
deriving instance (DataId id) => Data (HsGroup id)
emptyGroup, emptyRdrGroup, emptyRnGroup :: HsGroup a
emptyRdrGroup = emptyGroup { hs_valds = emptyValBindsIn }
emptyRnGroup = emptyGroup { hs_valds = emptyValBindsOut }
hsGroupInstDecls :: HsGroup id -> [LInstDecl id]
hsGroupInstDecls = (=<<) group_instds . hs_tyclds
emptyGroup = HsGroup { hs_tyclds = [],
hs_derivds = [],
hs_fixds = [], hs_defds = [], hs_annds = [],
hs_fords = [], hs_warnds = [], hs_ruleds = [], hs_vects = [],
hs_valds = error "emptyGroup hs_valds: Can't happen",
hs_splcds = [],
hs_docs = [] }
appendGroups :: HsGroup a -> HsGroup a -> HsGroup a
appendGroups
HsGroup {
hs_valds = val_groups1,
hs_splcds = spliceds1,
hs_tyclds = tyclds1,
hs_derivds = derivds1,
hs_fixds = fixds1,
hs_defds = defds1,
hs_annds = annds1,
hs_fords = fords1,
hs_warnds = warnds1,
hs_ruleds = rulds1,
hs_vects = vects1,
hs_docs = docs1 }
HsGroup {
hs_valds = val_groups2,
hs_splcds = spliceds2,
hs_tyclds = tyclds2,
hs_derivds = derivds2,
hs_fixds = fixds2,
hs_defds = defds2,
hs_annds = annds2,
hs_fords = fords2,
hs_warnds = warnds2,
hs_ruleds = rulds2,
hs_vects = vects2,
hs_docs = docs2 }
=
HsGroup {
hs_valds = val_groups1 `plusHsValBinds` val_groups2,
hs_splcds = spliceds1 ++ spliceds2,
hs_tyclds = tyclds1 ++ tyclds2,
hs_derivds = derivds1 ++ derivds2,
hs_fixds = fixds1 ++ fixds2,
hs_annds = annds1 ++ annds2,
hs_defds = defds1 ++ defds2,
hs_fords = fords1 ++ fords2,
hs_warnds = warnds1 ++ warnds2,
hs_ruleds = rulds1 ++ rulds2,
hs_vects = vects1 ++ vects2,
hs_docs = docs1 ++ docs2 }
instance (SourceTextX pass, OutputableBndrId pass)
=> Outputable (HsDecl pass) where
ppr (TyClD dcl) = ppr dcl
ppr (ValD binds) = ppr binds
ppr (DefD def) = ppr def
ppr (InstD inst) = ppr inst
ppr (DerivD deriv) = ppr deriv
ppr (ForD fd) = ppr fd
ppr (SigD sd) = ppr sd
ppr (RuleD rd) = ppr rd
ppr (VectD vect) = ppr vect
ppr (WarningD wd) = ppr wd
ppr (AnnD ad) = ppr ad
ppr (SpliceD dd) = ppr dd
ppr (DocD doc) = ppr doc
ppr (RoleAnnotD ra) = ppr ra
instance (SourceTextX pass, OutputableBndrId pass)
=> Outputable (HsGroup pass) where
ppr (HsGroup { hs_valds = val_decls,
hs_tyclds = tycl_decls,
hs_derivds = deriv_decls,
hs_fixds = fix_decls,
hs_warnds = deprec_decls,
hs_annds = ann_decls,
hs_fords = foreign_decls,
hs_defds = default_decls,
hs_ruleds = rule_decls,
hs_vects = vect_decls })
= vcat_mb empty
[ppr_ds fix_decls, ppr_ds default_decls,
ppr_ds deprec_decls, ppr_ds ann_decls,
ppr_ds rule_decls,
ppr_ds vect_decls,
if isEmptyValBinds val_decls
then Nothing
else Just (ppr val_decls),
ppr_ds (tyClGroupTyClDecls tycl_decls),
ppr_ds (tyClGroupInstDecls tycl_decls),
ppr_ds deriv_decls,
ppr_ds foreign_decls]
where
ppr_ds :: Outputable a => [a] -> Maybe SDoc
ppr_ds [] = Nothing
ppr_ds ds = Just (vcat (map ppr ds))
vcat_mb :: SDoc -> [Maybe SDoc] -> SDoc
-- Concatenate vertically with white-space between non-blanks
vcat_mb _ [] = empty
vcat_mb gap (Nothing : ds) = vcat_mb gap ds
vcat_mb gap (Just d : ds) = gap $$ d $$ vcat_mb blankLine ds
-- | Located Splice Declaration
type LSpliceDecl pass = Located (SpliceDecl pass)
-- | Splice Declaration
data SpliceDecl id
= SpliceDecl -- Top level splice
(Located (HsSplice id))
SpliceExplicitFlag
deriving instance (DataId id) => Data (SpliceDecl id)
instance (SourceTextX pass, OutputableBndrId pass)
=> Outputable (SpliceDecl pass) where
ppr (SpliceDecl (L _ e) f) = pprSpliceDecl e f
{-
************************************************************************
* *
Type and class declarations
* *
************************************************************************
Note [The Naming story]
~~~~~~~~~~~~~~~~~~~~~~~
Here is the story about the implicit names that go with type, class,
and instance decls. It's a bit tricky, so pay attention!
"Implicit" (or "system") binders
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Each data type decl defines
a worker name for each constructor
to-T and from-T convertors
Each class decl defines
a tycon for the class
a data constructor for that tycon
the worker for that constructor
a selector for each superclass
All have occurrence names that are derived uniquely from their parent
declaration.
None of these get separate definitions in an interface file; they are
fully defined by the data or class decl. But they may *occur* in
interface files, of course. Any such occurrence must haul in the
relevant type or class decl.
Plan of attack:
- Ensure they "point to" the parent data/class decl
when loading that decl from an interface file
(See RnHiFiles.getSysBinders)
- When typechecking the decl, we build the implicit TyCons and Ids.
When doing so we look them up in the name cache (RnEnv.lookupSysName),
to ensure correct module and provenance is set
These are the two places that we have to conjure up the magic derived
names. (The actual magic is in OccName.mkWorkerOcc, etc.)
Default methods
~~~~~~~~~~~~~~~
- Occurrence name is derived uniquely from the method name
E.g. $dmmax
- If there is a default method name at all, it's recorded in
the ClassOpSig (in HsBinds), in the DefMethInfo field.
(DefMethInfo is defined in Class.hs)
Source-code class decls and interface-code class decls are treated subtly
differently, which has given me a great deal of confusion over the years.
Here's the deal. (We distinguish the two cases because source-code decls
have (Just binds) in the tcdMeths field, whereas interface decls have Nothing.
In *source-code* class declarations:
- When parsing, every ClassOpSig gets a DefMeth with a suitable RdrName
This is done by RdrHsSyn.mkClassOpSigDM
- The renamer renames it to a Name
- During typechecking, we generate a binding for each $dm for
which there's a programmer-supplied default method:
class Foo a where
op1 :: <type>
op2 :: <type>
op1 = ...
We generate a binding for $dmop1 but not for $dmop2.
The Class for Foo has a Nothing for op2 and
a Just ($dm_op1, VanillaDM) for op1.
The Name for $dmop2 is simply discarded.
In *interface-file* class declarations:
- When parsing, we see if there's an explicit programmer-supplied default method
because there's an '=' sign to indicate it:
class Foo a where
op1 = :: <type> -- NB the '='
op2 :: <type>
We use this info to generate a DefMeth with a suitable RdrName for op1,
and a NoDefMeth for op2
- The interface file has a separate definition for $dmop1, with unfolding etc.
- The renamer renames it to a Name.
- The renamer treats $dmop1 as a free variable of the declaration, so that
the binding for $dmop1 will be sucked in. (See RnHsSyn.tyClDeclFVs)
This doesn't happen for source code class decls, because they *bind* the default method.
Dictionary functions
~~~~~~~~~~~~~~~~~~~~
Each instance declaration gives rise to one dictionary function binding.
The type checker makes up new source-code instance declarations
(e.g. from 'deriving' or generic default methods --- see
TcInstDcls.tcInstDecls1). So we can't generate the names for
dictionary functions in advance (we don't know how many we need).
On the other hand for interface-file instance declarations, the decl
specifies the name of the dictionary function, and it has a binding elsewhere
in the interface file:
instance {Eq Int} = dEqInt
dEqInt :: {Eq Int} <pragma info>
So again we treat source code and interface file code slightly differently.
Source code:
- Source code instance decls have a Nothing in the (Maybe name) field
(see data InstDecl below)
- The typechecker makes up a Local name for the dict fun for any source-code
instance decl, whether it comes from a source-code instance decl, or whether
the instance decl is derived from some other construct (e.g. 'deriving').
- The occurrence name it chooses is derived from the instance decl (just for
documentation really) --- e.g. dNumInt. Two dict funs may share a common
occurrence name, but will have different uniques. E.g.
instance Foo [Int] where ...
instance Foo [Bool] where ...
These might both be dFooList
- The CoreTidy phase externalises the name, and ensures the occurrence name is
unique (this isn't special to dict funs). So we'd get dFooList and dFooList1.
- We can take this relaxed approach (changing the occurrence name later)
because dict fun Ids are not captured in a TyCon or Class (unlike default
methods, say). Instead, they are kept separately in the InstEnv. This
makes it easy to adjust them after compiling a module. (Once we've finished
compiling that module, they don't change any more.)
Interface file code:
- The instance decl gives the dict fun name, so the InstDecl has a (Just name)
in the (Maybe name) field.
- RnHsSyn.instDeclFVs treats the dict fun name as free in the decl, so that we
suck in the dfun binding
-}
-- | Located Declaration of a Type or Class
type LTyClDecl pass = Located (TyClDecl pass)
-- | A type or class declaration.
data TyClDecl pass
= -- | @type/data family T :: *->*@
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnType',
-- 'ApiAnnotation.AnnData',
-- 'ApiAnnotation.AnnFamily','ApiAnnotation.AnnDcolon',
-- 'ApiAnnotation.AnnWhere','ApiAnnotation.AnnOpenP',
-- 'ApiAnnotation.AnnDcolon','ApiAnnotation.AnnCloseP',
-- 'ApiAnnotation.AnnEqual','ApiAnnotation.AnnRarrow',
-- 'ApiAnnotation.AnnVbar'
-- For details on above see note [Api annotations] in ApiAnnotation
FamDecl { tcdFam :: FamilyDecl pass }
| -- | @type@ declaration
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnType',
-- 'ApiAnnotation.AnnEqual',
-- For details on above see note [Api annotations] in ApiAnnotation
SynDecl { tcdLName :: Located (IdP pass) -- ^ Type constructor
, tcdTyVars :: LHsQTyVars pass -- ^ Type variables; for an
-- associated type these
-- include outer binders
, tcdFixity :: LexicalFixity -- ^ Fixity used in the declaration
, tcdRhs :: LHsType pass -- ^ RHS of type declaration
, tcdFVs :: PostRn pass NameSet }
| -- | @data@ declaration
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnData',
-- 'ApiAnnotation.AnnFamily',
-- 'ApiAnnotation.AnnNewType',
-- 'ApiAnnotation.AnnNewType','ApiAnnotation.AnnDcolon'
-- 'ApiAnnotation.AnnWhere',
-- For details on above see note [Api annotations] in ApiAnnotation
DataDecl { tcdLName :: Located (IdP pass) -- ^ Type constructor
, tcdTyVars :: LHsQTyVars pass -- ^ Type variables; for an
-- associated type
-- these include outer binders
-- Eg class T a where
-- type F a :: *
-- type F a = a -> a
-- Here the type decl for 'f'
-- includes 'a' in its tcdTyVars
, tcdFixity :: LexicalFixity -- ^ Fixity used in the declaration
, tcdDataDefn :: HsDataDefn pass
, tcdDataCusk :: PostRn pass Bool -- ^ does this have a CUSK?
, tcdFVs :: PostRn pass NameSet }
| ClassDecl { tcdCtxt :: LHsContext pass, -- ^ Context...
tcdLName :: Located (IdP pass), -- ^ Name of the class
tcdTyVars :: LHsQTyVars pass, -- ^ Class type variables
tcdFixity :: LexicalFixity, -- ^ Fixity used in the declaration
tcdFDs :: [Located (FunDep (Located (IdP pass)))],
-- ^ Functional deps
tcdSigs :: [LSig pass], -- ^ Methods' signatures
tcdMeths :: LHsBinds pass, -- ^ Default methods
tcdATs :: [LFamilyDecl pass], -- ^ Associated types;
tcdATDefs :: [LTyFamDefltEqn pass],
-- ^ Associated type defaults
tcdDocs :: [LDocDecl], -- ^ Haddock docs
tcdFVs :: PostRn pass NameSet
}
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnClass',
-- 'ApiAnnotation.AnnWhere','ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClose'
-- - The tcdFDs will have 'ApiAnnotation.AnnVbar',
-- 'ApiAnnotation.AnnComma'
-- 'ApiAnnotation.AnnRarrow'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving instance (DataId id) => Data (TyClDecl id)
-- Simple classifiers for TyClDecl
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- | @True@ <=> argument is a @data@\/@newtype@
-- declaration.
isDataDecl :: TyClDecl pass -> Bool
isDataDecl (DataDecl {}) = True
isDataDecl _other = False
-- | type or type instance declaration
isSynDecl :: TyClDecl pass -> Bool
isSynDecl (SynDecl {}) = True
isSynDecl _other = False
-- | type class
isClassDecl :: TyClDecl pass -> Bool
isClassDecl (ClassDecl {}) = True
isClassDecl _ = False
-- | type/data family declaration
isFamilyDecl :: TyClDecl pass -> Bool
isFamilyDecl (FamDecl {}) = True
isFamilyDecl _other = False
-- | type family declaration
isTypeFamilyDecl :: TyClDecl pass -> Bool
isTypeFamilyDecl (FamDecl (FamilyDecl { fdInfo = info })) = case info of
OpenTypeFamily -> True
ClosedTypeFamily {} -> True
_ -> False
isTypeFamilyDecl _ = False
-- | open type family info
isOpenTypeFamilyInfo :: FamilyInfo pass -> Bool
isOpenTypeFamilyInfo OpenTypeFamily = True
isOpenTypeFamilyInfo _ = False
-- | closed type family info
isClosedTypeFamilyInfo :: FamilyInfo pass -> Bool
isClosedTypeFamilyInfo (ClosedTypeFamily {}) = True
isClosedTypeFamilyInfo _ = False
-- | data family declaration
isDataFamilyDecl :: TyClDecl pass -> Bool
isDataFamilyDecl (FamDecl (FamilyDecl { fdInfo = DataFamily })) = True
isDataFamilyDecl _other = False
-- Dealing with names
tyFamInstDeclName :: TyFamInstDecl pass -> (IdP pass)
tyFamInstDeclName = unLoc . tyFamInstDeclLName
tyFamInstDeclLName :: TyFamInstDecl pass -> Located (IdP pass)
tyFamInstDeclLName (TyFamInstDecl { tfid_eqn =
(HsIB { hsib_body = FamEqn { feqn_tycon = ln }}) })
= ln
tyClDeclLName :: TyClDecl pass -> Located (IdP pass)
tyClDeclLName (FamDecl { tcdFam = FamilyDecl { fdLName = ln } }) = ln
tyClDeclLName decl = tcdLName decl
tcdName :: TyClDecl pass -> (IdP pass)
tcdName = unLoc . tyClDeclLName
tyClDeclTyVars :: TyClDecl pass -> LHsQTyVars pass
tyClDeclTyVars (FamDecl { tcdFam = FamilyDecl { fdTyVars = tvs } }) = tvs
tyClDeclTyVars d = tcdTyVars d
countTyClDecls :: [TyClDecl pass] -> (Int, Int, Int, Int, Int)
-- class, synonym decls, data, newtype, family decls
countTyClDecls decls
= (count isClassDecl decls,
count isSynDecl decls, -- excluding...
count isDataTy decls, -- ...family...
count isNewTy decls, -- ...instances
count isFamilyDecl decls)
where
isDataTy DataDecl{ tcdDataDefn = HsDataDefn { dd_ND = DataType } } = True
isDataTy _ = False
isNewTy DataDecl{ tcdDataDefn = HsDataDefn { dd_ND = NewType } } = True
isNewTy _ = False
-- | Does this declaration have a complete, user-supplied kind signature?
-- See Note [Complete user-supplied kind signatures]
hsDeclHasCusk :: TyClDecl GhcRn -> Bool
hsDeclHasCusk (FamDecl { tcdFam = fam_decl }) = famDeclHasCusk Nothing fam_decl
hsDeclHasCusk (SynDecl { tcdTyVars = tyvars, tcdRhs = rhs })
-- NB: Keep this synchronized with 'getInitialKind'
= hsTvbAllKinded tyvars && rhs_annotated rhs
where
rhs_annotated (L _ ty) = case ty of
HsParTy lty -> rhs_annotated lty
HsKindSig {} -> True
_ -> False
hsDeclHasCusk (DataDecl { tcdDataCusk = cusk }) = cusk
hsDeclHasCusk (ClassDecl { tcdTyVars = tyvars }) = hsTvbAllKinded tyvars
-- Pretty-printing TyClDecl
-- ~~~~~~~~~~~~~~~~~~~~~~~~
instance (SourceTextX pass, OutputableBndrId pass)
=> Outputable (TyClDecl pass) where
ppr (FamDecl { tcdFam = decl }) = ppr decl
ppr (SynDecl { tcdLName = ltycon, tcdTyVars = tyvars, tcdFixity = fixity
, tcdRhs = rhs })
= hang (text "type" <+>
pp_vanilla_decl_head ltycon tyvars fixity [] <+> equals)
4 (ppr rhs)
ppr (DataDecl { tcdLName = ltycon, tcdTyVars = tyvars, tcdFixity = fixity
, tcdDataDefn = defn })
= pp_data_defn (pp_vanilla_decl_head ltycon tyvars fixity) defn
ppr (ClassDecl {tcdCtxt = context, tcdLName = lclas, tcdTyVars = tyvars,
tcdFixity = fixity,
tcdFDs = fds,
tcdSigs = sigs, tcdMeths = methods,
tcdATs = ats, tcdATDefs = at_defs})
| null sigs && isEmptyBag methods && null ats && null at_defs -- No "where" part
= top_matter
| otherwise -- Laid out
= vcat [ top_matter <+> text "where"
, nest 2 $ pprDeclList (map (pprFamilyDecl NotTopLevel . unLoc) ats ++
map ppr_fam_deflt_eqn at_defs ++
pprLHsBindsForUser methods sigs) ]
where
top_matter = text "class"
<+> pp_vanilla_decl_head lclas tyvars fixity (unLoc context)
<+> pprFundeps (map unLoc fds)
instance (SourceTextX pass, OutputableBndrId pass)
=> Outputable (TyClGroup pass) where
ppr (TyClGroup { group_tyclds = tyclds
, group_roles = roles
, group_instds = instds
}
)
= ppr tyclds $$
ppr roles $$
ppr instds
pp_vanilla_decl_head :: (SourceTextX pass, OutputableBndrId pass)
=> Located (IdP pass)
-> LHsQTyVars pass
-> LexicalFixity
-> HsContext pass
-> SDoc
pp_vanilla_decl_head thing (HsQTvs { hsq_explicit = tyvars }) fixity context
= hsep [pprHsContext context, pp_tyvars tyvars]
where
pp_tyvars (varl:varsr)
| fixity == Infix && length varsr > 1
= hsep [char '(',ppr (unLoc varl), pprInfixOcc (unLoc thing)
, (ppr.unLoc) (head varsr), char ')'
, hsep (map (ppr.unLoc) (tail varsr))]
| fixity == Infix
= hsep [ppr (unLoc varl), pprInfixOcc (unLoc thing)
, hsep (map (ppr.unLoc) varsr)]
| otherwise = hsep [ pprPrefixOcc (unLoc thing)
, hsep (map (ppr.unLoc) (varl:varsr))]
pp_tyvars [] = ppr thing
pprTyClDeclFlavour :: TyClDecl a -> SDoc
pprTyClDeclFlavour (ClassDecl {}) = text "class"
pprTyClDeclFlavour (SynDecl {}) = text "type"
pprTyClDeclFlavour (FamDecl { tcdFam = FamilyDecl { fdInfo = info }})
= pprFlavour info <+> text "family"
pprTyClDeclFlavour (DataDecl { tcdDataDefn = HsDataDefn { dd_ND = nd } })
= ppr nd
{- Note [Complete user-supplied kind signatures]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We kind-check declarations differently if they have a complete, user-supplied
kind signature (CUSK). This is because we can safely generalise a CUSKed
declaration before checking all of the others, supporting polymorphic recursion.
See ghc.haskell.org/trac/ghc/wiki/GhcKinds/KindInference#Proposednewstrategy
and #9200 for lots of discussion of how we got here.
A declaration has a CUSK if we can know its complete kind without doing any
inference, at all. Here are the rules:
- A class or datatype is said to have a CUSK if and only if all of its type
variables are annotated. Its result kind is, by construction, Constraint or *
respectively.
- A type synonym has a CUSK if and only if all of its type variables and its
RHS are annotated with kinds.
- A closed type family is said to have a CUSK if and only if all of its type
variables and its return type are annotated.
- An open type family always has a CUSK -- unannotated type variables (and
return type) default to *.
- Additionally, if -XTypeInType is on, then a data definition with a top-level
:: must explicitly bind all kind variables to the right of the ::.
See test dependent/should_compile/KindLevels, which requires this case.
(Naturally, any kind variable mentioned before the :: should not be bound
after it.)
-}
{- *********************************************************************
* *
TyClGroup
Strongly connected components of
type, class, instance, and role declarations
* *
********************************************************************* -}
{- Note [TyClGroups and dependency analysis]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A TyClGroup represents a strongly connected components of type/class/instance
decls, together with the role annotations for the type/class declarations.
The hs_tyclds :: [TyClGroup] field of a HsGroup is a dependency-order
sequence of strongly-connected components.
Invariants
* The type and class declarations, group_tyclds, may depend on each
other, or earlier TyClGroups, but not on later ones
* The role annotations, group_roles, are role-annotations for some or
all of the types and classes in group_tyclds (only).
* The instance declarations, group_instds, may (and usually will)
depend on group_tyclds, or on earlier TyClGroups, but not on later
ones.
See Note [Dependency analsis of type, class, and instance decls]
in RnSource for more info.
-}
-- | Type or Class Group
data TyClGroup pass -- See Note [TyClGroups and dependency analysis]
= TyClGroup { group_tyclds :: [LTyClDecl pass]
, group_roles :: [LRoleAnnotDecl pass]
, group_instds :: [LInstDecl pass] }
deriving instance (DataId id) => Data (TyClGroup id)
emptyTyClGroup :: TyClGroup pass
emptyTyClGroup = TyClGroup [] [] []
tyClGroupTyClDecls :: [TyClGroup pass] -> [LTyClDecl pass]
tyClGroupTyClDecls = concatMap group_tyclds
tyClGroupInstDecls :: [TyClGroup pass] -> [LInstDecl pass]
tyClGroupInstDecls = concatMap group_instds
tyClGroupRoleDecls :: [TyClGroup pass] -> [LRoleAnnotDecl pass]
tyClGroupRoleDecls = concatMap group_roles
mkTyClGroup :: [LTyClDecl pass] -> [LInstDecl pass] -> TyClGroup pass
mkTyClGroup decls instds = TyClGroup
{ group_tyclds = decls
, group_roles = []
, group_instds = instds
}
{- *********************************************************************
* *
Data and type family declarations
* *
********************************************************************* -}
{- Note [FamilyResultSig]
~~~~~~~~~~~~~~~~~~~~~~~~~
This data type represents the return signature of a type family. Possible
values are:
* NoSig - the user supplied no return signature:
type family Id a where ...
* KindSig - the user supplied the return kind:
type family Id a :: * where ...
* TyVarSig - user named the result with a type variable and possibly
provided a kind signature for that variable:
type family Id a = r where ...
type family Id a = (r :: *) where ...
Naming result of a type family is required if we want to provide
injectivity annotation for a type family:
type family Id a = r | r -> a where ...
See also: Note [Injectivity annotation]
Note [Injectivity annotation]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A user can declare a type family to be injective:
type family Id a = r | r -> a where ...
* The part after the "|" is called "injectivity annotation".
* "r -> a" part is called "injectivity condition"; at the moment terms
"injectivity annotation" and "injectivity condition" are synonymous
because we only allow a single injectivity condition.
* "r" is the "LHS of injectivity condition". LHS can only contain the
variable naming the result of a type family.
* "a" is the "RHS of injectivity condition". RHS contains space-separated
type and kind variables representing the arguments of a type
family. Variables can be omitted if a type family is not injective in
these arguments. Example:
type family Foo a b c = d | d -> a c where ...
Note that:
(a) naming of type family result is required to provide injectivity
annotation
(b) for associated types if the result was named then injectivity annotation
is mandatory. Otherwise result type variable is indistinguishable from
associated type default.
It is possible that in the future this syntax will be extended to support
more complicated injectivity annotations. For example we could declare that
if we know the result of Plus and one of its arguments we can determine the
other argument:
type family Plus a b = (r :: Nat) | r a -> b, r b -> a where ...
Here injectivity annotation would consist of two comma-separated injectivity
conditions.
See also Note [Injective type families] in TyCon
-}
-- | Located type Family Result Signature
type LFamilyResultSig pass = Located (FamilyResultSig pass)
-- | type Family Result Signature
data FamilyResultSig pass = -- see Note [FamilyResultSig]
NoSig
-- ^ - 'ApiAnnotation.AnnKeywordId' :
-- For details on above see note [Api annotations] in ApiAnnotation
| KindSig (LHsKind pass)
-- ^ - 'ApiAnnotation.AnnKeywordId' :
-- 'ApiAnnotation.AnnOpenP','ApiAnnotation.AnnDcolon',
-- 'ApiAnnotation.AnnCloseP'
-- For details on above see note [Api annotations] in ApiAnnotation
| TyVarSig (LHsTyVarBndr pass)
-- ^ - 'ApiAnnotation.AnnKeywordId' :
-- 'ApiAnnotation.AnnOpenP','ApiAnnotation.AnnDcolon',
-- 'ApiAnnotation.AnnCloseP', 'ApiAnnotation.AnnEqual'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving instance (DataId pass) => Data (FamilyResultSig pass)
-- | Located type Family Declaration
type LFamilyDecl pass = Located (FamilyDecl pass)
-- | type Family Declaration
data FamilyDecl pass = FamilyDecl
{ fdInfo :: FamilyInfo pass -- type/data, closed/open
, fdLName :: Located (IdP pass) -- type constructor
, fdTyVars :: LHsQTyVars pass -- type variables
, fdFixity :: LexicalFixity -- Fixity used in the declaration
, fdResultSig :: LFamilyResultSig pass -- result signature
, fdInjectivityAnn :: Maybe (LInjectivityAnn pass) -- optional injectivity ann
}
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnType',
-- 'ApiAnnotation.AnnData', 'ApiAnnotation.AnnFamily',
-- 'ApiAnnotation.AnnWhere', 'ApiAnnotation.AnnOpenP',
-- 'ApiAnnotation.AnnDcolon', 'ApiAnnotation.AnnCloseP',
-- 'ApiAnnotation.AnnEqual', 'ApiAnnotation.AnnRarrow',
-- 'ApiAnnotation.AnnVbar'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving instance (DataId id) => Data (FamilyDecl id)
-- | Located Injectivity Annotation
type LInjectivityAnn pass = Located (InjectivityAnn pass)
-- | If the user supplied an injectivity annotation it is represented using
-- InjectivityAnn. At the moment this is a single injectivity condition - see
-- Note [Injectivity annotation]. `Located name` stores the LHS of injectivity
-- condition. `[Located name]` stores the RHS of injectivity condition. Example:
--
-- type family Foo a b c = r | r -> a c where ...
--
-- This will be represented as "InjectivityAnn `r` [`a`, `c`]"
data InjectivityAnn pass
= InjectivityAnn (Located (IdP pass)) [Located (IdP pass)]
-- ^ - 'ApiAnnotation.AnnKeywordId' :
-- 'ApiAnnotation.AnnRarrow', 'ApiAnnotation.AnnVbar'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving instance (DataId pass) => Data (InjectivityAnn pass)
data FamilyInfo pass
= DataFamily
| OpenTypeFamily
-- | 'Nothing' if we're in an hs-boot file and the user
-- said "type family Foo x where .."
| ClosedTypeFamily (Maybe [LTyFamInstEqn pass])
deriving instance (DataId pass) => Data (FamilyInfo pass)
-- | Does this family declaration have a complete, user-supplied kind signature?
famDeclHasCusk :: Maybe Bool
-- ^ if associated, does the enclosing class have a CUSK?
-> FamilyDecl pass -> Bool
famDeclHasCusk _ (FamilyDecl { fdInfo = ClosedTypeFamily _
, fdTyVars = tyvars
, fdResultSig = L _ resultSig })
= hsTvbAllKinded tyvars && hasReturnKindSignature resultSig
famDeclHasCusk mb_class_cusk _ = mb_class_cusk `orElse` True
-- all un-associated open families have CUSKs!
-- | Does this family declaration have user-supplied return kind signature?
hasReturnKindSignature :: FamilyResultSig a -> Bool
hasReturnKindSignature NoSig = False
hasReturnKindSignature (TyVarSig (L _ (UserTyVar _))) = False
hasReturnKindSignature _ = True
-- | Maybe return name of the result type variable
resultVariableName :: FamilyResultSig a -> Maybe (IdP a)
resultVariableName (TyVarSig sig) = Just $ hsLTyVarName sig
resultVariableName _ = Nothing
instance (SourceTextX pass, OutputableBndrId pass)
=> Outputable (FamilyDecl pass) where
ppr = pprFamilyDecl TopLevel
pprFamilyDecl :: (SourceTextX pass, OutputableBndrId pass)
=> TopLevelFlag -> FamilyDecl pass -> SDoc
pprFamilyDecl top_level (FamilyDecl { fdInfo = info, fdLName = ltycon
, fdTyVars = tyvars
, fdFixity = fixity
, fdResultSig = L _ result
, fdInjectivityAnn = mb_inj })
= vcat [ pprFlavour info <+> pp_top_level <+>
pp_vanilla_decl_head ltycon tyvars fixity [] <+>
pp_kind <+> pp_inj <+> pp_where
, nest 2 $ pp_eqns ]
where
pp_top_level = case top_level of
TopLevel -> text "family"
NotTopLevel -> empty
pp_kind = case result of
NoSig -> empty
KindSig kind -> dcolon <+> ppr kind
TyVarSig tv_bndr -> text "=" <+> ppr tv_bndr
pp_inj = case mb_inj of
Just (L _ (InjectivityAnn lhs rhs)) ->
hsep [ vbar, ppr lhs, text "->", hsep (map ppr rhs) ]
Nothing -> empty
(pp_where, pp_eqns) = case info of
ClosedTypeFamily mb_eqns ->
( text "where"
, case mb_eqns of
Nothing -> text ".."
Just eqns -> vcat $ map (ppr_fam_inst_eqn . unLoc) eqns )
_ -> (empty, empty)
pprFlavour :: FamilyInfo pass -> SDoc
pprFlavour DataFamily = text "data"
pprFlavour OpenTypeFamily = text "type"
pprFlavour (ClosedTypeFamily {}) = text "type"
instance Outputable (FamilyInfo pass) where
ppr info = pprFlavour info <+> text "family"
{- *********************************************************************
* *
Data types and data constructors
* *
********************************************************************* -}
-- | Haskell Data type Definition
data HsDataDefn pass -- The payload of a data type defn
-- Used *both* for vanilla data declarations,
-- *and* for data family instances
= -- | Declares a data type or newtype, giving its constructors
-- @
-- data/newtype T a = <constrs>
-- data/newtype instance T [a] = <constrs>
-- @
HsDataDefn { dd_ND :: NewOrData,
dd_ctxt :: LHsContext pass, -- ^ Context
dd_cType :: Maybe (Located CType),
dd_kindSig:: Maybe (LHsKind pass),
-- ^ Optional kind signature.
--
-- @(Just k)@ for a GADT-style @data@,
-- or @data instance@ decl, with explicit kind sig
--
-- Always @Nothing@ for H98-syntax decls
dd_cons :: [LConDecl pass],
-- ^ Data constructors
--
-- For @data T a = T1 | T2 a@
-- the 'LConDecl's all have 'ConDeclH98'.
-- For @data T a where { T1 :: T a }@
-- the 'LConDecls' all have 'ConDeclGADT'.
dd_derivs :: HsDeriving pass -- ^ Optional 'deriving' claues
-- For details on above see note [Api annotations] in ApiAnnotation
}
deriving instance (DataId id) => Data (HsDataDefn id)
-- | Haskell Deriving clause
type HsDeriving pass = Located [LHsDerivingClause pass]
-- ^ The optional @deriving@ clauses of a data declaration. "Clauses" is
-- plural because one can specify multiple deriving clauses using the
-- @-XDerivingStrategies@ language extension.
--
-- The list of 'LHsDerivingClause's corresponds to exactly what the user
-- requested to derive, in order. If no deriving clauses were specified,
-- the list is empty.
type LHsDerivingClause pass = Located (HsDerivingClause pass)
-- | A single @deriving@ clause of a data declaration.
--
-- - 'ApiAnnotation.AnnKeywordId' :
-- 'ApiAnnotation.AnnDeriving', 'ApiAnnotation.AnnStock',
-- 'ApiAnnotation.AnnAnyClass', 'Api.AnnNewtype',
-- 'ApiAnnotation.AnnOpen','ApiAnnotation.AnnClose'
data HsDerivingClause pass
-- See Note [Deriving strategies] in TcDeriv
= HsDerivingClause
{ deriv_clause_strategy :: Maybe (Located DerivStrategy)
-- ^ The user-specified strategy (if any) to use when deriving
-- 'deriv_clause_tys'.
, deriv_clause_tys :: Located [LHsSigType pass]
-- ^ The types to derive.
--
-- It uses 'LHsSigType's because, with @-XGeneralizedNewtypeDeriving@,
-- we can mention type variables that aren't bound by the datatype, e.g.
--
-- > data T b = ... deriving (C [a])
--
-- should produce a derived instance for @C [a] (T b)@.
}
deriving instance (DataId id) => Data (HsDerivingClause id)
instance (SourceTextX pass, OutputableBndrId pass)
=> Outputable (HsDerivingClause pass) where
ppr (HsDerivingClause { deriv_clause_strategy = dcs
, deriv_clause_tys = L _ dct })
= hsep [ text "deriving"
, ppDerivStrategy dcs
, pp_dct dct ]
where
-- This complexity is to distinguish between
-- deriving Show
-- deriving (Show)
pp_dct [a@(HsIB { hsib_body = ty })]
| isCompoundHsType ty = parens (ppr a)
| otherwise = ppr a
pp_dct _ = parens (interpp'SP dct)
data NewOrData
= NewType -- ^ @newtype Blah ...@
| DataType -- ^ @data Blah ...@
deriving( Eq, Data ) -- Needed because Demand derives Eq
-- | Located data Constructor Declaration
type LConDecl pass = Located (ConDecl pass)
-- ^ May have 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnSemi' when
-- in a GADT constructor list
-- For details on above see note [Api annotations] in ApiAnnotation
-- |
--
-- @
-- data T b = forall a. Eq a => MkT a b
-- MkT :: forall b a. Eq a => MkT a b
--
-- data T b where
-- MkT1 :: Int -> T Int
--
-- data T = Int `MkT` Int
-- | MkT2
--
-- data T a where
-- Int `MkT` Int :: T Int
-- @
--
-- - 'ApiAnnotation.AnnKeywordId's : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnDotdot','ApiAnnotation.AnnCLose',
-- 'ApiAnnotation.AnnEqual','ApiAnnotation.AnnVbar',
-- 'ApiAnnotation.AnnDarrow','ApiAnnotation.AnnDarrow',
-- 'ApiAnnotation.AnnForall','ApiAnnotation.AnnDot'
-- For details on above see note [Api annotations] in ApiAnnotation
-- | data Constructor Declaration
data ConDecl pass
= ConDeclGADT
{ con_names :: [Located (IdP pass)]
, con_type :: LHsSigType pass
-- ^ The type after the ‘::’
, con_doc :: Maybe LHsDocString
-- ^ A possible Haddock comment.
}
| ConDeclH98
{ con_name :: Located (IdP pass)
, con_qvars :: Maybe (LHsQTyVars pass)
-- User-written forall (if any), and its implicit
-- kind variables
-- Non-Nothing means an explicit user-written forall
-- e.g. data T a = forall b. MkT b (b->a)
-- con_qvars = {b}
, con_cxt :: Maybe (LHsContext pass)
-- ^ User-written context (if any)
, con_details :: HsConDeclDetails pass
-- ^ Arguments
, con_doc :: Maybe LHsDocString
-- ^ A possible Haddock comment.
}
deriving instance (DataId pass) => Data (ConDecl pass)
-- | Haskell data Constructor Declaration Details
type HsConDeclDetails pass
= HsConDetails (LBangType pass) (Located [LConDeclField pass])
getConNames :: ConDecl pass -> [Located (IdP pass)]
getConNames ConDeclH98 {con_name = name} = [name]
getConNames ConDeclGADT {con_names = names} = names
-- don't call with RdrNames, because it can't deal with HsAppsTy
getConDetails :: ConDecl pass -> HsConDeclDetails pass
getConDetails ConDeclH98 {con_details = details} = details
getConDetails ConDeclGADT {con_type = ty } = details
where
(details,_,_,_) = gadtDeclDetails ty
-- don't call with RdrNames, because it can't deal with HsAppsTy
gadtDeclDetails :: LHsSigType pass
-> ( HsConDeclDetails pass
, LHsType pass
, LHsContext pass
, [LHsTyVarBndr pass] )
gadtDeclDetails HsIB {hsib_body = lbody_ty} = (details,res_ty,cxt,tvs)
where
(tvs, cxt, tau) = splitLHsSigmaTy lbody_ty
(details, res_ty) -- See Note [Sorting out the result type]
= case tau of
L _ (HsFunTy (L l (HsRecTy flds)) res_ty')
-> (RecCon (L l flds), res_ty')
_other -> (PrefixCon [], tau)
hsConDeclArgTys :: HsConDeclDetails pass -> [LBangType pass]
hsConDeclArgTys (PrefixCon tys) = tys
hsConDeclArgTys (InfixCon ty1 ty2) = [ty1,ty2]
hsConDeclArgTys (RecCon flds) = map (cd_fld_type . unLoc) (unLoc flds)
pp_data_defn :: (SourceTextX pass, OutputableBndrId pass)
=> (HsContext pass -> SDoc) -- Printing the header
-> HsDataDefn pass
-> SDoc
pp_data_defn pp_hdr (HsDataDefn { dd_ND = new_or_data, dd_ctxt = L _ context
, dd_cType = mb_ct
, dd_kindSig = mb_sig
, dd_cons = condecls, dd_derivs = derivings })
| null condecls
= ppr new_or_data <+> pp_ct <+> pp_hdr context <+> pp_sig
<+> pp_derivings derivings
| otherwise
= hang (ppr new_or_data <+> pp_ct <+> pp_hdr context <+> pp_sig)
2 (pp_condecls condecls $$ pp_derivings derivings)
where
pp_ct = case mb_ct of
Nothing -> empty
Just ct -> ppr ct
pp_sig = case mb_sig of
Nothing -> empty
Just kind -> dcolon <+> ppr kind
pp_derivings (L _ ds) = vcat (map ppr ds)
instance (SourceTextX pass, OutputableBndrId pass)
=> Outputable (HsDataDefn pass) where
ppr d = pp_data_defn (\_ -> text "Naked HsDataDefn") d
instance Outputable NewOrData where
ppr NewType = text "newtype"
ppr DataType = text "data"
pp_condecls :: (SourceTextX pass, OutputableBndrId pass)
=> [LConDecl pass] -> SDoc
pp_condecls cs@(L _ ConDeclGADT{} : _) -- In GADT syntax
= hang (text "where") 2 (vcat (map ppr cs))
pp_condecls cs -- In H98 syntax
= equals <+> sep (punctuate (text " |") (map ppr cs))
instance (SourceTextX pass, OutputableBndrId pass)
=> Outputable (ConDecl pass) where
ppr = pprConDecl
pprConDecl :: (SourceTextX pass, OutputableBndrId pass) => ConDecl pass -> SDoc
pprConDecl (ConDeclH98 { con_name = L _ con
, con_qvars = mtvs
, con_cxt = mcxt
, con_details = details
, con_doc = doc })
= sep [ppr_mbDoc doc, pprHsForAll tvs cxt, ppr_details details]
where
ppr_details (InfixCon t1 t2) = hsep [ppr t1, pprInfixOcc con, ppr t2]
ppr_details (PrefixCon tys) = hsep (pprPrefixOcc con
: map (pprHsType . unLoc) tys)
ppr_details (RecCon fields) = pprPrefixOcc con
<+> pprConDeclFields (unLoc fields)
tvs = case mtvs of
Nothing -> []
Just (HsQTvs { hsq_explicit = tvs }) -> tvs
cxt = fromMaybe (noLoc []) mcxt
pprConDecl (ConDeclGADT { con_names = cons, con_type = res_ty, con_doc = doc })
= sep [ppr_mbDoc doc <+> ppr_con_names cons <+> dcolon
<+> ppr res_ty]
ppr_con_names :: (OutputableBndr a) => [Located a] -> SDoc
ppr_con_names = pprWithCommas (pprPrefixOcc . unLoc)
{-
************************************************************************
* *
Instance declarations
* *
************************************************************************
Note [Type family instance declarations in HsSyn]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The data type FamEqn represents one equation of a type family instance.
Aside from the pass, it is also parameterised over two fields:
feqn_pats and feqn_rhs.
feqn_pats is either LHsTypes (for ordinary data/type family instances) or
LHsQTyVars (for associated type family default instances). In particular:
* An ordinary type family instance declaration looks like this in source Haskell
type instance T [a] Int = a -> a
(or something similar for a closed family)
It is represented by a FamInstEqn, with a *type* (LHsType) in the feqn_pats
field.
* On the other hand, the *default instance* of an associated type looks like
this in source Haskell
class C a where
type T a b
type T a b = a -> b -- The default instance
It is represented by a TyFamDefltEqn, with *type variables* (LHsQTyVars) in
the feqn_pats field.
feqn_rhs is either an HsDataDefn (for data family instances) or an LHsType
(for type family instances).
-}
----------------- Type synonym family instances -------------
-- | Located Type Family Instance Equation
type LTyFamInstEqn pass = Located (TyFamInstEqn pass)
-- ^ May have 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnSemi'
-- when in a list
-- For details on above see note [Api annotations] in ApiAnnotation
-- | Located Type Family Default Equation
type LTyFamDefltEqn pass = Located (TyFamDefltEqn pass)
-- | Haskell Type Patterns
type HsTyPats pass = [LHsType pass]
{- Note [Family instance declaration binders]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For ordinary data/type family instances, the feqn_pats field of FamEqn stores
the LHS type (and kind) patterns. These type patterns can of course contain
type (and kind) variables, which are bound in the hsib_vars field of the
HsImplicitBndrs in FamInstEqn. Note in particular
* The hsib_vars *includes* any anonymous wildcards. For example
type instance F a _ = a
The hsib_vars will be {a, _}. Remember that each separate wildcard
'_' gets its own unique. In this context wildcards behave just like
an ordinary type variable, only anonymous.
* The hsib_vars *includes* type variables that are already in scope
Eg class C s t where
type F t p :: *
instance C w (a,b) where
type F (a,b) x = x->a
The hsib_vars of the F decl are {a,b,x}, even though the F decl
is nested inside the 'instance' decl.
However after the renamer, the uniques will match up:
instance C w7 (a8,b9) where
type F (a8,b9) x10 = x10->a8
so that we can compare the type pattern in the 'instance' decl and
in the associated 'type' decl
For associated type family default instances (TyFamDefltEqn), instead of using
type patterns with binders in a surrounding HsImplicitBndrs, we use raw type
variables (LHsQTyVars) in the feqn_pats field of FamEqn.
-}
-- | Type Family Instance Equation
type TyFamInstEqn pass = FamInstEqn pass (LHsType pass)
-- | Type Family Default Equation
type TyFamDefltEqn pass = FamEqn pass (LHsQTyVars pass) (LHsType pass)
-- See Note [Type family instance declarations in HsSyn]
-- | Located Type Family Instance Declaration
type LTyFamInstDecl pass = Located (TyFamInstDecl pass)
-- | Type Family Instance Declaration
newtype TyFamInstDecl pass = TyFamInstDecl { tfid_eqn :: TyFamInstEqn pass }
-- ^
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnType',
-- 'ApiAnnotation.AnnInstance',
-- For details on above see note [Api annotations] in ApiAnnotation
deriving instance DataId pass => Data (TyFamInstDecl pass)
----------------- Data family instances -------------
-- | Located Data Family Instance Declaration
type LDataFamInstDecl pass = Located (DataFamInstDecl pass)
-- | Data Family Instance Declaration
newtype DataFamInstDecl pass
= DataFamInstDecl { dfid_eqn :: FamInstEqn pass (HsDataDefn pass) }
-- ^
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnData',
-- 'ApiAnnotation.AnnNewType','ApiAnnotation.AnnInstance',
-- 'ApiAnnotation.AnnDcolon'
-- 'ApiAnnotation.AnnWhere','ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving instance DataId pass => Data (DataFamInstDecl pass)
----------------- Family instances (common types) -------------
-- | Located Family Instance Equation
type LFamInstEqn pass rhs = Located (FamInstEqn pass rhs)
-- | Family Instance Equation
type FamInstEqn pass rhs
= HsImplicitBndrs pass (FamEqn pass (HsTyPats pass) rhs)
-- ^ Here, the @pats@ are type patterns (with kind and type bndrs).
-- See Note [Family instance declaration binders]
-- | Family Equation
--
-- One equation in a type family instance declaration, data family instance
-- declaration, or type family default.
-- See Note [Type family instance declarations in HsSyn]
-- See Note [Family instance declaration binders]
data FamEqn pass pats rhs
= FamEqn
{ feqn_tycon :: Located (IdP pass)
, feqn_pats :: pats
, feqn_fixity :: LexicalFixity -- ^ Fixity used in the declaration
, feqn_rhs :: rhs
}
-- ^
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnEqual'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving instance (DataId pass, Data pats, Data rhs)
=> Data (FamEqn pass pats rhs)
----------------- Class instances -------------
-- | Located Class Instance Declaration
type LClsInstDecl pass = Located (ClsInstDecl pass)
-- | Class Instance Declaration
data ClsInstDecl pass
= ClsInstDecl
{ cid_poly_ty :: LHsSigType pass -- Context => Class Instance-type
-- Using a polytype means that the renamer conveniently
-- figures out the quantified type variables for us.
, cid_binds :: LHsBinds pass -- Class methods
, cid_sigs :: [LSig pass] -- User-supplied pragmatic info
, cid_tyfam_insts :: [LTyFamInstDecl pass] -- Type family instances
, cid_datafam_insts :: [LDataFamInstDecl pass] -- Data family instances
, cid_overlap_mode :: Maybe (Located OverlapMode)
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClose',
-- For details on above see note [Api annotations] in ApiAnnotation
}
-- ^
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnInstance',
-- 'ApiAnnotation.AnnWhere',
-- 'ApiAnnotation.AnnOpen','ApiAnnotation.AnnClose',
-- For details on above see note [Api annotations] in ApiAnnotation
deriving instance (DataId id) => Data (ClsInstDecl id)
----------------- Instances of all kinds -------------
-- | Located Instance Declaration
type LInstDecl pass = Located (InstDecl pass)
-- | Instance Declaration
data InstDecl pass -- Both class and family instances
= ClsInstD
{ cid_inst :: ClsInstDecl pass }
| DataFamInstD -- data family instance
{ dfid_inst :: DataFamInstDecl pass }
| TyFamInstD -- type family instance
{ tfid_inst :: TyFamInstDecl pass }
deriving instance (DataId id) => Data (InstDecl id)
instance (SourceTextX pass, OutputableBndrId pass)
=> Outputable (TyFamInstDecl pass) where
ppr = pprTyFamInstDecl TopLevel
pprTyFamInstDecl :: (SourceTextX pass, OutputableBndrId pass)
=> TopLevelFlag -> TyFamInstDecl pass -> SDoc
pprTyFamInstDecl top_lvl (TyFamInstDecl { tfid_eqn = eqn })
= text "type" <+> ppr_instance_keyword top_lvl <+> ppr_fam_inst_eqn eqn
ppr_instance_keyword :: TopLevelFlag -> SDoc
ppr_instance_keyword TopLevel = text "instance"
ppr_instance_keyword NotTopLevel = empty
ppr_fam_inst_eqn :: (SourceTextX pass, OutputableBndrId pass)
=> TyFamInstEqn pass -> SDoc
ppr_fam_inst_eqn (HsIB { hsib_body = FamEqn { feqn_tycon = tycon
, feqn_pats = pats
, feqn_fixity = fixity
, feqn_rhs = rhs }})
= pprFamInstLHS tycon pats fixity [] Nothing <+> equals <+> ppr rhs
ppr_fam_deflt_eqn :: (SourceTextX pass, OutputableBndrId pass)
=> LTyFamDefltEqn pass -> SDoc
ppr_fam_deflt_eqn (L _ (FamEqn { feqn_tycon = tycon
, feqn_pats = tvs
, feqn_fixity = fixity
, feqn_rhs = rhs }))
= text "type" <+> pp_vanilla_decl_head tycon tvs fixity []
<+> equals <+> ppr rhs
instance (SourceTextX pass, OutputableBndrId pass)
=> Outputable (DataFamInstDecl pass) where
ppr = pprDataFamInstDecl TopLevel
pprDataFamInstDecl :: (SourceTextX pass, OutputableBndrId pass)
=> TopLevelFlag -> DataFamInstDecl pass -> SDoc
pprDataFamInstDecl top_lvl (DataFamInstDecl { dfid_eqn = HsIB { hsib_body =
FamEqn { feqn_tycon = tycon
, feqn_pats = pats
, feqn_fixity = fixity
, feqn_rhs = defn }}})
= pp_data_defn pp_hdr defn
where
pp_hdr ctxt = ppr_instance_keyword top_lvl
<+> pprFamInstLHS tycon pats fixity ctxt (dd_kindSig defn)
pprDataFamInstFlavour :: DataFamInstDecl pass -> SDoc
pprDataFamInstFlavour (DataFamInstDecl { dfid_eqn = HsIB { hsib_body =
FamEqn { feqn_rhs = HsDataDefn { dd_ND = nd }}}})
= ppr nd
pprFamInstLHS :: (SourceTextX pass, OutputableBndrId pass)
=> Located (IdP pass)
-> HsTyPats pass
-> LexicalFixity
-> HsContext pass
-> Maybe (LHsKind pass)
-> SDoc
pprFamInstLHS thing typats fixity context mb_kind_sig
-- explicit type patterns
= hsep [ pprHsContext context, pp_pats typats, pp_kind_sig ]
where
pp_pats (patl:patsr)
| fixity == Infix
= hsep [pprHsType (unLoc patl), pprInfixOcc (unLoc thing)
, hsep (map (pprHsType.unLoc) patsr)]
| otherwise = hsep [ pprPrefixOcc (unLoc thing)
, hsep (map (pprHsType.unLoc) (patl:patsr))]
pp_pats [] = pprPrefixOcc (unLoc thing)
pp_kind_sig
| Just k <- mb_kind_sig
= dcolon <+> ppr k
| otherwise
= empty
instance (SourceTextX pass, OutputableBndrId pass)
=> Outputable (ClsInstDecl pass) where
ppr (ClsInstDecl { cid_poly_ty = inst_ty, cid_binds = binds
, cid_sigs = sigs, cid_tyfam_insts = ats
, cid_overlap_mode = mbOverlap
, cid_datafam_insts = adts })
| null sigs, null ats, null adts, isEmptyBag binds -- No "where" part
= top_matter
| otherwise -- Laid out
= vcat [ top_matter <+> text "where"
, nest 2 $ pprDeclList $
map (pprTyFamInstDecl NotTopLevel . unLoc) ats ++
map (pprDataFamInstDecl NotTopLevel . unLoc) adts ++
pprLHsBindsForUser binds sigs ]
where
top_matter = text "instance" <+> ppOverlapPragma mbOverlap
<+> ppr inst_ty
ppDerivStrategy :: Maybe (Located DerivStrategy) -> SDoc
ppDerivStrategy mb =
case mb of
Nothing -> empty
Just (L _ ds) -> ppr ds
ppOverlapPragma :: Maybe (Located OverlapMode) -> SDoc
ppOverlapPragma mb =
case mb of
Nothing -> empty
Just (L _ (NoOverlap s)) -> maybe_stext s "{-# NO_OVERLAP #-}"
Just (L _ (Overlappable s)) -> maybe_stext s "{-# OVERLAPPABLE #-}"
Just (L _ (Overlapping s)) -> maybe_stext s "{-# OVERLAPPING #-}"
Just (L _ (Overlaps s)) -> maybe_stext s "{-# OVERLAPS #-}"
Just (L _ (Incoherent s)) -> maybe_stext s "{-# INCOHERENT #-}"
where
maybe_stext NoSourceText alt = text alt
maybe_stext (SourceText src) _ = text src <+> text "#-}"
instance (SourceTextX pass, OutputableBndrId pass)
=> Outputable (InstDecl pass) where
ppr (ClsInstD { cid_inst = decl }) = ppr decl
ppr (TyFamInstD { tfid_inst = decl }) = ppr decl
ppr (DataFamInstD { dfid_inst = decl }) = ppr decl
-- Extract the declarations of associated data types from an instance
instDeclDataFamInsts :: [LInstDecl pass] -> [DataFamInstDecl pass]
instDeclDataFamInsts inst_decls
= concatMap do_one inst_decls
where
do_one (L _ (ClsInstD { cid_inst = ClsInstDecl { cid_datafam_insts = fam_insts } }))
= map unLoc fam_insts
do_one (L _ (DataFamInstD { dfid_inst = fam_inst })) = [fam_inst]
do_one (L _ (TyFamInstD {})) = []
{-
************************************************************************
* *
\subsection[DerivDecl]{A stand-alone instance deriving declaration}
* *
************************************************************************
-}
-- | Located Deriving Declaration
type LDerivDecl pass = Located (DerivDecl pass)
-- | Deriving Declaration
data DerivDecl pass = DerivDecl
{ deriv_type :: LHsSigType pass
, deriv_strategy :: Maybe (Located DerivStrategy)
, deriv_overlap_mode :: Maybe (Located OverlapMode)
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnDeriving',
-- 'ApiAnnotation.AnnInstance', 'ApiAnnotation.AnnStock',
-- 'ApiAnnotation.AnnAnyClass', 'Api.AnnNewtype',
-- 'ApiAnnotation.AnnOpen','ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
}
deriving instance (DataId pass) => Data (DerivDecl pass)
instance (SourceTextX pass, OutputableBndrId pass)
=> Outputable (DerivDecl pass) where
ppr (DerivDecl { deriv_type = ty
, deriv_strategy = ds
, deriv_overlap_mode = o })
= hsep [ text "deriving"
, ppDerivStrategy ds
, text "instance"
, ppOverlapPragma o
, ppr ty ]
{-
************************************************************************
* *
\subsection[DefaultDecl]{A @default@ declaration}
* *
************************************************************************
There can only be one default declaration per module, but it is hard
for the parser to check that; we pass them all through in the abstract
syntax, and that restriction must be checked in the front end.
-}
-- | Located Default Declaration
type LDefaultDecl pass = Located (DefaultDecl pass)
-- | Default Declaration
data DefaultDecl pass
= DefaultDecl [LHsType pass]
-- ^ - 'ApiAnnotation.AnnKeywordId's : 'ApiAnnotation.AnnDefault',
-- 'ApiAnnotation.AnnOpen','ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving instance (DataId pass) => Data (DefaultDecl pass)
instance (SourceTextX pass, OutputableBndrId pass)
=> Outputable (DefaultDecl pass) where
ppr (DefaultDecl tys)
= text "default" <+> parens (interpp'SP tys)
{-
************************************************************************
* *
\subsection{Foreign function interface declaration}
* *
************************************************************************
-}
-- foreign declarations are distinguished as to whether they define or use a
-- Haskell name
--
-- * the Boolean value indicates whether the pre-standard deprecated syntax
-- has been used
-- | Located Foreign Declaration
type LForeignDecl pass = Located (ForeignDecl pass)
-- | Foreign Declaration
data ForeignDecl pass
= ForeignImport
{ fd_name :: Located (IdP pass) -- defines this name
, fd_sig_ty :: LHsSigType pass -- sig_ty
, fd_co :: PostTc pass Coercion -- rep_ty ~ sig_ty
, fd_fi :: ForeignImport }
| ForeignExport
{ fd_name :: Located (IdP pass) -- uses this name
, fd_sig_ty :: LHsSigType pass -- sig_ty
, fd_co :: PostTc pass Coercion -- rep_ty ~ sig_ty
, fd_fe :: ForeignExport }
-- ^
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnForeign',
-- 'ApiAnnotation.AnnImport','ApiAnnotation.AnnExport',
-- 'ApiAnnotation.AnnDcolon'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving instance (DataId pass) => Data (ForeignDecl pass)
{-
In both ForeignImport and ForeignExport:
sig_ty is the type given in the Haskell code
rep_ty is the representation for this type, i.e. with newtypes
coerced away and type functions evaluated.
Thus if the declaration is valid, then rep_ty will only use types
such as Int and IO that we know how to make foreign calls with.
-}
noForeignImportCoercionYet :: PlaceHolder
noForeignImportCoercionYet = PlaceHolder
noForeignExportCoercionYet :: PlaceHolder
noForeignExportCoercionYet = PlaceHolder
-- Specification Of an imported external entity in dependence on the calling
-- convention
--
data ForeignImport = -- import of a C entity
--
-- * the two strings specifying a header file or library
-- may be empty, which indicates the absence of a
-- header or object specification (both are not used
-- in the case of `CWrapper' and when `CFunction'
-- has a dynamic target)
--
-- * the calling convention is irrelevant for code
-- generation in the case of `CLabel', but is needed
-- for pretty printing
--
-- * `Safety' is irrelevant for `CLabel' and `CWrapper'
--
CImport (Located CCallConv) -- ccall or stdcall
(Located Safety) -- interruptible, safe or unsafe
(Maybe Header) -- name of C header
CImportSpec -- details of the C entity
(Located SourceText) -- original source text for
-- the C entity
deriving Data
-- details of an external C entity
--
data CImportSpec = CLabel CLabelString -- import address of a C label
| CFunction CCallTarget -- static or dynamic function
| CWrapper -- wrapper to expose closures
-- (former f.e.d.)
deriving Data
-- specification of an externally exported entity in dependence on the calling
-- convention
--
data ForeignExport = CExport (Located CExportSpec) -- contains the calling
-- convention
(Located SourceText) -- original source text for
-- the C entity
deriving Data
-- pretty printing of foreign declarations
--
instance (SourceTextX pass, OutputableBndrId pass)
=> Outputable (ForeignDecl pass) where
ppr (ForeignImport { fd_name = n, fd_sig_ty = ty, fd_fi = fimport })
= hang (text "foreign import" <+> ppr fimport <+> ppr n)
2 (dcolon <+> ppr ty)
ppr (ForeignExport { fd_name = n, fd_sig_ty = ty, fd_fe = fexport }) =
hang (text "foreign export" <+> ppr fexport <+> ppr n)
2 (dcolon <+> ppr ty)
instance Outputable ForeignImport where
ppr (CImport cconv safety mHeader spec (L _ srcText)) =
ppr cconv <+> ppr safety
<+> pprWithSourceText srcText (pprCEntity spec "")
where
pp_hdr = case mHeader of
Nothing -> empty
Just (Header _ header) -> ftext header
pprCEntity (CLabel lbl) _ =
doubleQuotes $ text "static" <+> pp_hdr <+> char '&' <> ppr lbl
pprCEntity (CFunction (StaticTarget st _lbl _ isFun)) src =
if dqNeeded then doubleQuotes ce else empty
where
dqNeeded = (take 6 src == "static")
|| isJust mHeader
|| not isFun
|| st /= NoSourceText
ce =
-- We may need to drop leading spaces first
(if take 6 src == "static" then text "static" else empty)
<+> pp_hdr
<+> (if isFun then empty else text "value")
<+> (pprWithSourceText st empty)
pprCEntity (CFunction DynamicTarget) _ =
doubleQuotes $ text "dynamic"
pprCEntity CWrapper _ = doubleQuotes $ text "wrapper"
instance Outputable ForeignExport where
ppr (CExport (L _ (CExportStatic _ lbl cconv)) _) =
ppr cconv <+> char '"' <> ppr lbl <> char '"'
{-
************************************************************************
* *
\subsection{Transformation rules}
* *
************************************************************************
-}
-- | Located Rule Declarations
type LRuleDecls pass = Located (RuleDecls pass)
-- Note [Pragma source text] in BasicTypes
-- | Rule Declarations
data RuleDecls pass = HsRules { rds_src :: SourceText
, rds_rules :: [LRuleDecl pass] }
deriving instance (DataId pass) => Data (RuleDecls pass)
-- | Located Rule Declaration
type LRuleDecl pass = Located (RuleDecl pass)
-- | Rule Declaration
data RuleDecl pass
= HsRule -- Source rule
(Located (SourceText,RuleName)) -- Rule name
-- Note [Pragma source text] in BasicTypes
Activation
[LRuleBndr pass] -- Forall'd vars; after typechecking this
-- includes tyvars
(Located (HsExpr pass)) -- LHS
(PostRn pass NameSet) -- Free-vars from the LHS
(Located (HsExpr pass)) -- RHS
(PostRn pass NameSet) -- Free-vars from the RHS
-- ^
-- - 'ApiAnnotation.AnnKeywordId' :
-- 'ApiAnnotation.AnnOpen','ApiAnnotation.AnnTilde',
-- 'ApiAnnotation.AnnVal',
-- 'ApiAnnotation.AnnClose',
-- 'ApiAnnotation.AnnForall','ApiAnnotation.AnnDot',
-- 'ApiAnnotation.AnnEqual',
-- For details on above see note [Api annotations] in ApiAnnotation
deriving instance (DataId pass) => Data (RuleDecl pass)
flattenRuleDecls :: [LRuleDecls pass] -> [LRuleDecl pass]
flattenRuleDecls decls = concatMap (rds_rules . unLoc) decls
-- | Located Rule Binder
type LRuleBndr pass = Located (RuleBndr pass)
-- | Rule Binder
data RuleBndr pass
= RuleBndr (Located (IdP pass))
| RuleBndrSig (Located (IdP pass)) (LHsSigWcType pass)
-- ^
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnDcolon','ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving instance (DataId pass) => Data (RuleBndr pass)
collectRuleBndrSigTys :: [RuleBndr pass] -> [LHsSigWcType pass]
collectRuleBndrSigTys bndrs = [ty | RuleBndrSig _ ty <- bndrs]
pprFullRuleName :: Located (SourceText, RuleName) -> SDoc
pprFullRuleName (L _ (st, n)) = pprWithSourceText st (doubleQuotes $ ftext n)
instance (SourceTextX pass, OutputableBndrId pass)
=> Outputable (RuleDecls pass) where
ppr (HsRules st rules)
= pprWithSourceText st (text "{-# RULES")
<+> vcat (punctuate semi (map ppr rules)) <+> text "#-}"
instance (SourceTextX pass, OutputableBndrId pass)
=> Outputable (RuleDecl pass) where
ppr (HsRule name act ns lhs _fv_lhs rhs _fv_rhs)
= sep [pprFullRuleName name <+> ppr act,
nest 4 (pp_forall <+> pprExpr (unLoc lhs)),
nest 6 (equals <+> pprExpr (unLoc rhs)) ]
where
pp_forall | null ns = empty
| otherwise = forAllLit <+> fsep (map ppr ns) <> dot
instance (SourceTextX pass, OutputableBndrId pass)
=> Outputable (RuleBndr pass) where
ppr (RuleBndr name) = ppr name
ppr (RuleBndrSig name ty) = parens (ppr name <> dcolon <> ppr ty)
{-
************************************************************************
* *
\subsection{Vectorisation declarations}
* *
************************************************************************
A vectorisation pragma, one of
{-# VECTORISE f = closure1 g (scalar_map g) #-}
{-# VECTORISE SCALAR f #-}
{-# NOVECTORISE f #-}
{-# VECTORISE type T = ty #-}
{-# VECTORISE SCALAR type T #-}
-}
-- | Located Vectorise Declaration
type LVectDecl pass = Located (VectDecl pass)
-- | Vectorise Declaration
data VectDecl pass
= HsVect
SourceText -- Note [Pragma source text] in BasicTypes
(Located (IdP pass))
(LHsExpr pass)
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnEqual','ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
| HsNoVect
SourceText -- Note [Pragma source text] in BasicTypes
(Located (IdP pass))
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
| HsVectTypeIn -- pre type-checking
SourceText -- Note [Pragma source text] in BasicTypes
Bool -- 'TRUE' => SCALAR declaration
(Located (IdP pass))
(Maybe (Located (IdP pass))) -- 'Nothing' => no right-hand side
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnType','ApiAnnotation.AnnClose',
-- 'ApiAnnotation.AnnEqual'
-- For details on above see note [Api annotations] in ApiAnnotation
| HsVectTypeOut -- post type-checking
Bool -- 'TRUE' => SCALAR declaration
TyCon
(Maybe TyCon) -- 'Nothing' => no right-hand side
| HsVectClassIn -- pre type-checking
SourceText -- Note [Pragma source text] in BasicTypes
(Located (IdP pass))
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClass','ApiAnnotation.AnnClose',
-- For details on above see note [Api annotations] in ApiAnnotation
| HsVectClassOut -- post type-checking
Class
| HsVectInstIn -- pre type-checking (always SCALAR) !!!FIXME: should be superfluous now
(LHsSigType pass)
| HsVectInstOut -- post type-checking (always SCALAR) !!!FIXME: should be superfluous now
ClsInst
deriving instance (DataId pass) => Data (VectDecl pass)
lvectDeclName :: NamedThing (IdP pass) => LVectDecl pass -> Name
lvectDeclName (L _ (HsVect _ (L _ name) _)) = getName name
lvectDeclName (L _ (HsNoVect _ (L _ name))) = getName name
lvectDeclName (L _ (HsVectTypeIn _ _ (L _ name) _)) = getName name
lvectDeclName (L _ (HsVectTypeOut _ tycon _)) = getName tycon
lvectDeclName (L _ (HsVectClassIn _ (L _ name))) = getName name
lvectDeclName (L _ (HsVectClassOut cls)) = getName cls
lvectDeclName (L _ (HsVectInstIn _))
= panic "HsDecls.lvectDeclName: HsVectInstIn"
lvectDeclName (L _ (HsVectInstOut _))
= panic "HsDecls.lvectDeclName: HsVectInstOut"
lvectInstDecl :: LVectDecl pass -> Bool
lvectInstDecl (L _ (HsVectInstIn _)) = True
lvectInstDecl (L _ (HsVectInstOut _)) = True
lvectInstDecl _ = False
instance (SourceTextX pass, OutputableBndrId pass)
=> Outputable (VectDecl pass) where
ppr (HsVect _ v rhs)
= sep [text "{-# VECTORISE" <+> ppr v,
nest 4 $
pprExpr (unLoc rhs) <+> text "#-}" ]
ppr (HsNoVect _ v)
= sep [text "{-# NOVECTORISE" <+> ppr v <+> text "#-}" ]
ppr (HsVectTypeIn _ False t Nothing)
= sep [text "{-# VECTORISE type" <+> ppr t <+> text "#-}" ]
ppr (HsVectTypeIn _ False t (Just t'))
= sep [text "{-# VECTORISE type" <+> ppr t, text "=", ppr t', text "#-}" ]
ppr (HsVectTypeIn _ True t Nothing)
= sep [text "{-# VECTORISE SCALAR type" <+> ppr t <+> text "#-}" ]
ppr (HsVectTypeIn _ True t (Just t'))
= sep [text "{-# VECTORISE SCALAR type" <+> ppr t, text "=", ppr t', text "#-}" ]
ppr (HsVectTypeOut False t Nothing)
= sep [text "{-# VECTORISE type" <+> ppr t <+> text "#-}" ]
ppr (HsVectTypeOut False t (Just t'))
= sep [text "{-# VECTORISE type" <+> ppr t, text "=", ppr t', text "#-}" ]
ppr (HsVectTypeOut True t Nothing)
= sep [text "{-# VECTORISE SCALAR type" <+> ppr t <+> text "#-}" ]
ppr (HsVectTypeOut True t (Just t'))
= sep [text "{-# VECTORISE SCALAR type" <+> ppr t, text "=", ppr t', text "#-}" ]
ppr (HsVectClassIn _ c)
= sep [text "{-# VECTORISE class" <+> ppr c <+> text "#-}" ]
ppr (HsVectClassOut c)
= sep [text "{-# VECTORISE class" <+> ppr c <+> text "#-}" ]
ppr (HsVectInstIn ty)
= sep [text "{-# VECTORISE SCALAR instance" <+> ppr ty <+> text "#-}" ]
ppr (HsVectInstOut i)
= sep [text "{-# VECTORISE SCALAR instance" <+> ppr i <+> text "#-}" ]
{-
************************************************************************
* *
\subsection[DocDecl]{Document comments}
* *
************************************************************************
-}
-- | Located Documentation comment Declaration
type LDocDecl = Located (DocDecl)
-- | Documentation comment Declaration
data DocDecl
= DocCommentNext HsDocString
| DocCommentPrev HsDocString
| DocCommentNamed String HsDocString
| DocGroup Int HsDocString
deriving Data
-- Okay, I need to reconstruct the document comments, but for now:
instance Outputable DocDecl where
ppr _ = text "<document comment>"
docDeclDoc :: DocDecl -> HsDocString
docDeclDoc (DocCommentNext d) = d
docDeclDoc (DocCommentPrev d) = d
docDeclDoc (DocCommentNamed _ d) = d
docDeclDoc (DocGroup _ d) = d
{-
************************************************************************
* *
\subsection[DeprecDecl]{Deprecations}
* *
************************************************************************
We use exported entities for things to deprecate.
-}
-- | Located Warning Declarations
type LWarnDecls pass = Located (WarnDecls pass)
-- Note [Pragma source text] in BasicTypes
-- | Warning pragma Declarations
data WarnDecls pass = Warnings { wd_src :: SourceText
, wd_warnings :: [LWarnDecl pass]
}
deriving instance (DataId pass) => Data (WarnDecls pass)
-- | Located Warning pragma Declaration
type LWarnDecl pass = Located (WarnDecl pass)
-- | Warning pragma Declaration
data WarnDecl pass = Warning [Located (IdP pass)] WarningTxt
deriving instance (DataId pass) => Data (WarnDecl pass)
instance OutputableBndr (IdP pass) => Outputable (WarnDecls pass) where
ppr (Warnings (SourceText src) decls)
= text src <+> vcat (punctuate comma (map ppr decls)) <+> text "#-}"
ppr (Warnings NoSourceText _decls) = panic "WarnDecls"
instance OutputableBndr (IdP pass) => Outputable (WarnDecl pass) where
ppr (Warning thing txt)
= hsep ( punctuate comma (map ppr thing))
<+> ppr txt
{-
************************************************************************
* *
\subsection[AnnDecl]{Annotations}
* *
************************************************************************
-}
-- | Located Annotation Declaration
type LAnnDecl pass = Located (AnnDecl pass)
-- | Annotation Declaration
data AnnDecl pass = HsAnnotation
SourceText -- Note [Pragma source text] in BasicTypes
(AnnProvenance (IdP pass)) (Located (HsExpr pass))
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnType'
-- 'ApiAnnotation.AnnModule'
-- 'ApiAnnotation.AnnClose'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving instance (DataId pass) => Data (AnnDecl pass)
instance (SourceTextX pass, OutputableBndrId pass)
=> Outputable (AnnDecl pass) where
ppr (HsAnnotation _ provenance expr)
= hsep [text "{-#", pprAnnProvenance provenance, pprExpr (unLoc expr), text "#-}"]
-- | Annotation Provenance
data AnnProvenance name = ValueAnnProvenance (Located name)
| TypeAnnProvenance (Located name)
| ModuleAnnProvenance
deriving instance Functor AnnProvenance
deriving instance Foldable AnnProvenance
deriving instance Traversable AnnProvenance
deriving instance (Data pass) => Data (AnnProvenance pass)
annProvenanceName_maybe :: AnnProvenance name -> Maybe name
annProvenanceName_maybe (ValueAnnProvenance (L _ name)) = Just name
annProvenanceName_maybe (TypeAnnProvenance (L _ name)) = Just name
annProvenanceName_maybe ModuleAnnProvenance = Nothing
pprAnnProvenance :: OutputableBndr name => AnnProvenance name -> SDoc
pprAnnProvenance ModuleAnnProvenance = text "ANN module"
pprAnnProvenance (ValueAnnProvenance (L _ name))
= text "ANN" <+> ppr name
pprAnnProvenance (TypeAnnProvenance (L _ name))
= text "ANN type" <+> ppr name
{-
************************************************************************
* *
\subsection[RoleAnnot]{Role annotations}
* *
************************************************************************
-}
-- | Located Role Annotation Declaration
type LRoleAnnotDecl pass = Located (RoleAnnotDecl pass)
-- See #8185 for more info about why role annotations are
-- top-level declarations
-- | Role Annotation Declaration
data RoleAnnotDecl pass
= RoleAnnotDecl (Located (IdP pass)) -- type constructor
[Located (Maybe Role)] -- optional annotations
-- ^ - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnType',
-- 'ApiAnnotation.AnnRole'
-- For details on above see note [Api annotations] in ApiAnnotation
deriving instance (DataId pass) => Data (RoleAnnotDecl pass)
instance OutputableBndr (IdP pass) => Outputable (RoleAnnotDecl pass) where
ppr (RoleAnnotDecl ltycon roles)
= text "type role" <+> ppr ltycon <+>
hsep (map (pp_role . unLoc) roles)
where
pp_role Nothing = underscore
pp_role (Just r) = ppr r
roleAnnotDeclName :: RoleAnnotDecl pass -> (IdP pass)
roleAnnotDeclName (RoleAnnotDecl (L _ name) _) = name
| ezyang/ghc | compiler/hsSyn/HsDecls.hs | bsd-3-clause | 86,255 | 0 | 18 | 25,069 | 14,068 | 7,672 | 6,396 | 1,009 | 7 |
module Main (main) where
import Autoencoder
import Control.Applicative
import Control.Monad
import Control.Monad.ST
import Data.Array.ST
import Data.Char
import Data.Classifier (Classifier)
import Data.Classifier.NaiveBayes (NaiveBayes)
import Data.Counter (Counter)
import Data.Maybe
import Data.Monoid
import Data.STRef
import Data.Text (Text)
import Data.Time.Clock
import Numeric.LinearAlgebra.HMatrix (Vector)
import Prelude
import System.Environment (getArgs)
import System.Exit (exitFailure)
import System.Random (randomR, getStdRandom, RandomGen)
import Text.Read (readMaybe)
import qualified AI.HNN.FF.Network as Neural
import qualified Data.Classifier as Classifier
import qualified Data.Classifier.NaiveBayes as NaiveBayes
import qualified Data.Counter as Counter
import qualified Data.Map as Map
import qualified Data.Text as Text
import qualified Numeric.LinearAlgebra.HMatrix as Vector
import qualified System.IO as IO
main :: IO ()
main = getArgs >>= \case
["bayes", filename] ->
void $ evaluateBayes filename
["neural", filename, readMaybe -> Just trainTimes, readMaybe -> Just layers] ->
neural filename trainTimes layers
["generate_autoencoder", filename] -> do
IO.hSetBuffering IO.stdout IO.NoBuffering
t <- getCurrentTime
autoencoder filename >>= print
getCurrentTime >>= print . (`diffUTCTime` t)
["apply_autoencoder", encoder, filename] -> do
(v, (e, _)) <- read <$> readFile encoder :: IO (Counter Text, Autoencoder)
applyAutoencoder v e filename 10 []
["everything", filename] -> do
void $ evaluateBayes filename
neural filename 10 []
neural filename 100 []
neural filename 1000 []
neural filename 10 [10]
neural filename 100 [10]
neural filename 1000 [10]
neural filename 10 [100]
neural filename 100 [100]
neural filename 1000 [100]
neural filename 10000 [10]
neural filename 10 [100, 50]
neural filename 100 [100, 50]
neural filename 1000 [100, 50]
_ -> do
putStrLn "Invalid arguments"
exitFailure
applyAutoencoder :: Counter Text -> Encoder -> FilePath -> Int -> [Int] -> IO ()
applyAutoencoder vocab encoder path times layers = do
shuffled <- readFile path >>= getStdRandom . shuffle . extractData
let (train, test) = splitAt (length shuffled `div` 2) shuffled
let trainVectors = map (\(x, y) -> (encode encoder x, y)) $ classifierToVector boolToVector vocab $ mconcat $ map rowToClassifier train
let testVectors = map (\(x, y) -> (encode encoder x, y)) $ classifierToVector boolToVector vocab $ mconcat $ map rowToClassifier test
case trainVectors of
[] -> putStrLn "No data"
(v, _) : _ -> do
startTime <- getCurrentTime
network <- Neural.createNetwork (Vector.size v) layers 1
IO.hSetBuffering IO.stdout IO.NoBuffering
void $ iterateM 9 (0 :: Int, network, 0) $ \(n, net, timeTakenEvaluating) -> do
let newNet = trainNet net trainVectors
print layers
print (n + times)
print newNet
endTime <- getCurrentTime
print $ (endTime `diffUTCTime` startTime) - timeTakenEvaluating
evalStartTime <- getCurrentTime
print $ Counter.fromList $ map (\(x, y) -> (y, Vector.cmap r $ Neural.output newNet tanh x)) testVectors
evalEndTime <- getCurrentTime
print $ evalEndTime `diffUTCTime` evalStartTime
return (n + times, newNet, timeTakenEvaluating + (evalEndTime `diffUTCTime` evalStartTime))
where r = (fromIntegral :: Int -> Double) . (round :: Double -> Int)
trainNet = Neural.trainNTimes times 0.8 Neural.tanh Neural.tanh'
autoencoder :: FilePath -> IO (Counter Text, Autoencoder)
autoencoder path = do
shuffled <- readFile path >>= getStdRandom . shuffle . extractData
let (train, test) = splitAt (length shuffled `div` 2) shuffled
let trainClassifier = mconcat $ map rowToClassifier train
let vocab = vocabulary trainClassifier
let trainVectors = classifierToVector boolToVector vocab trainClassifier
let testVectors = classifierToVector boolToVector vocab $ mconcat $ map rowToClassifier test
case trainVectors of
[] -> error "no data"
v -> do
res <- generateAutoencoderIO (map fst v) 1000 1000
return (vocab, res)
evaluateBayes :: FilePath -> IO (Counter (Bool, Maybe Bool))
evaluateBayes fp = do
file <- readFile fp
startTime <- getCurrentTime
let res = extractData file
let classifier = createClassifier res
print classifier
printTimeDiff startTime
evalStartTime <- getCurrentTime
let counted = Counter.fromList $ applyNaiveBayes classifier res
print counted
printTimeDiff evalStartTime
return counted
printTimeDiff :: UTCTime -> IO ()
printTimeDiff s = do
e <- getCurrentTime
print $ e `diffUTCTime` s
bayes :: String -> Counter (Bool, Maybe Bool)
bayes file = Counter.fromList results
where res = extractData file
results = applyNaiveBayes (createClassifier res) res
neural :: FilePath -> Int -> [Int] -> IO ()
neural path times layers = do
shuffled <- readFile path >>= getStdRandom . shuffle . extractData
let (train, test) = splitAt (length shuffled `div` 2) shuffled
let trainClassifier = mconcat $ map rowToClassifier train
let vocab = vocabulary trainClassifier
let trainVectors = classifierToVector boolToVector vocab trainClassifier
let testVectors = classifierToVector boolToVector vocab $ mconcat $ map rowToClassifier test
case trainVectors of
[] -> putStrLn "No data"
(v, _) : _ -> do
startTime <- getCurrentTime
network <- Neural.createNetwork (Vector.size v) layers 1
IO.hSetBuffering IO.stdout IO.NoBuffering
void $ iterateM 9 (0 :: Int, network, 0) $ \(n, net, timeTakenEvaluating) -> do
let newNet = trainNet net trainVectors
print layers
print (n + times)
print newNet
endTime <- getCurrentTime
print $ (endTime `diffUTCTime` startTime) - timeTakenEvaluating
evalStartTime <- getCurrentTime
print $ Counter.fromList $ map (\(x, y) -> (y, Vector.cmap r $ Neural.output newNet tanh x)) testVectors
evalEndTime <- getCurrentTime
print $ evalEndTime `diffUTCTime` evalStartTime
return (n + times, newNet, timeTakenEvaluating + (evalEndTime `diffUTCTime` evalStartTime))
where r = (fromIntegral :: Int -> Double) . (round :: Double -> Int)
trainNet = Neural.trainNTimes times 0.8 Neural.tanh Neural.tanh'
iterateM :: Monad m => Int -> a -> (a -> m a) -> m [a]
iterateM 0 _ _ = return []
iterateM n a f = do
v <- f a
next <- iterateM (n - 1) v f
return $ v : next
boolToVector :: Bool -> Vector Double
boolToVector True = 1
boolToVector False = -1
vocabulary :: Ord b => Classifier a b -> Counter b
vocabulary = Map.foldr (mappend . mconcat) mempty . Classifier.toMap
counterToVector :: Ord a => Counter a -> Counter a -> Vector Double
counterToVector (Counter.toMap -> vocab) (Counter.toMap -> m) =
Vector.vector $ map snd $ Map.toAscList $ Map.mergeWithKey (\_ v _ -> Just $ fromIntegral v) (const mempty) (fmap (const 0)) m vocab
classifierToVector :: (Ord a, Ord b) => (a -> Vector Double) -> Counter b -> Classifier a b -> [(Vector Double, Vector Double)]
classifierToVector f vocab (Classifier.toMap -> m) =
Map.foldrWithKey (\k v a -> fmap ((,) <$> counterToVector vocab <*> pure (f k)) v <> a) [] m
applyNaiveBayes :: NaiveBayes Bool Text -> [Row] -> [(Bool, Maybe Bool)]
applyNaiveBayes classifier rows =
foldl (\ a t -> collect (NaiveBayes.remove (rowToClassifier t) classifier) a t) [] rows
collect :: NaiveBayes Bool Text -> [(Bool, Maybe Bool)] -> Row -> [(Bool, Maybe Bool)]
collect cls acc (b, _, _, _, c) =
(b, tested) : acc
where tested = NaiveBayes.test cls $ Counter.fromList $ process c
type Row = (Bool, Text, Text, Text, Text)
extractData :: String -> [Row]
extractData = mapMaybe readMaybe . lines
createClassifier :: [Row] -> NaiveBayes Bool Text
createClassifier = mconcat . map (NaiveBayes.fromClassifier . rowToClassifier)
rowToClassifier :: Row -> Classifier Bool Text
rowToClassifier (b, _, _, _, c) = Classifier.singleton b $ Counter.fromList $ process c
process :: Text -> [Text]
process = filter (not . Text.null) .
map (Text.map toLower . Text.filter isAlpha) .
concatMap (Text.splitOn ".") .
Text.splitOn " " .
Text.filter (not . (== '-'))
-- shuffle from the haskell wiki @ https://wiki.haskell.org/Random_shuffle
shuffle :: RandomGen g => [a] -> g -> ([a], g)
shuffle xs gen = runST $ do
g <- newSTRef gen
ar <- newListArray' xs
xs' <- forM [1..n] $ \i -> do
j <- randomRST (i,n) g
vi <- readArray ar i
vj <- readArray ar j
writeArray ar j vi
return vj
gen' <- readSTRef g
return (xs', gen')
where
n = length xs
newListArray' :: [a] -> ST s (STArray s Int a)
newListArray' = newListArray (1, n)
randomRST lohi g = do
(a, s') <- randomR lohi <$> readSTRef g
writeSTRef g s'
return a
| intolerable/project-utilities | produce-results/Main.hs | bsd-3-clause | 9,015 | 0 | 23 | 1,913 | 3,355 | 1,701 | 1,654 | -1 | -1 |
-----------------------------------------------------------------------------
-- |
-- Module : Data.SBV.SMT.SMT
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : erkokl@gmail.com
-- Stability : experimental
--
-- Abstraction of SMT solvers
-----------------------------------------------------------------------------
{-# LANGUAGE ScopedTypeVariables #-}
module Data.SBV.SMT.SMT where
import qualified Control.Exception as C
import Control.Concurrent (newEmptyMVar, takeMVar, putMVar, forkIO)
import Control.Monad (when, zipWithM)
import Data.Char (isSpace)
import Data.Int (Int8, Int16, Int32, Int64)
import Data.List (intercalate, isPrefixOf, isInfixOf)
import Data.Word (Word8, Word16, Word32, Word64)
import System.Directory (findExecutable)
import System.Process (runInteractiveProcess, waitForProcess, terminateProcess)
import System.Exit (ExitCode(..))
import System.IO (hClose, hFlush, hPutStr, hGetContents, hGetLine)
import qualified Data.Map as M
import Data.SBV.BitVectors.AlgReals
import Data.SBV.BitVectors.Data
import Data.SBV.BitVectors.PrettyNum
import Data.SBV.Utils.TDiff
-- | Extract the final configuration from a result
resultConfig :: SMTResult -> SMTConfig
resultConfig (Unsatisfiable c) = c
resultConfig (Satisfiable c _) = c
resultConfig (Unknown c _) = c
resultConfig (ProofError c _) = c
resultConfig (TimeOut c) = c
-- | A 'prove' call results in a 'ThmResult'
newtype ThmResult = ThmResult SMTResult
-- | A 'sat' call results in a 'SatResult'
-- The reason for having a separate 'SatResult' is to have a more meaningful 'Show' instance.
newtype SatResult = SatResult SMTResult
-- | An 'allSat' call results in a 'AllSatResult'. The boolean says whether
-- we should warn the user about prefix-existentials.
newtype AllSatResult = AllSatResult (Bool, [SMTResult])
-- | User friendly way of printing theorem results
instance Show ThmResult where
show (ThmResult r) = showSMTResult "Q.E.D."
"Unknown" "Unknown. Potential counter-example:\n"
"Falsifiable" "Falsifiable. Counter-example:\n" r
-- | User friendly way of printing satisfiablity results
instance Show SatResult where
show (SatResult r) = showSMTResult "Unsatisfiable"
"Unknown" "Unknown. Potential model:\n"
"Satisfiable" "Satisfiable. Model:\n" r
-- | The Show instance of AllSatResults. Note that we have to be careful in being lazy enough
-- as the typical use case is to pull results out as they become available.
instance Show AllSatResult where
show (AllSatResult (e, xs)) = go (0::Int) xs
where uniqueWarn | e = " (Unique up to prefix existentials.)"
| True = ""
go c (s:ss) = let c' = c+1
(ok, o) = sh c' s
in c' `seq` if ok then o ++ "\n" ++ go c' ss else o
go c [] = case c of
0 -> "No solutions found."
1 -> "This is the only solution." ++ uniqueWarn
_ -> "Found " ++ show c ++ " different solutions." ++ uniqueWarn
sh i c = (ok, showSMTResult "Unsatisfiable"
"Unknown" "Unknown. Potential model:\n"
("Solution #" ++ show i ++ ":\n[Backend solver returned no assignment to variables.]") ("Solution #" ++ show i ++ ":\n") c)
where ok = case c of
Satisfiable{} -> True
_ -> False
-- | Instances of 'SatModel' can be automatically extracted from models returned by the
-- solvers. The idea is that the sbv infrastructure provides a stream of 'CW''s (constant-words)
-- coming from the solver, and the type @a@ is interpreted based on these constants. Many typical
-- instances are already provided, so new instances can be declared with relative ease.
--
-- Minimum complete definition: 'parseCWs'
class SatModel a where
-- | Given a sequence of constant-words, extract one instance of the type @a@, returning
-- the remaining elements untouched. If the next element is not what's expected for this
-- type you should return 'Nothing'
parseCWs :: [CW] -> Maybe (a, [CW])
-- | Given a parsed model instance, transform it using @f@, and return the result.
-- The default definition for this method should be sufficient in most use cases.
cvtModel :: (a -> Maybe b) -> Maybe (a, [CW]) -> Maybe (b, [CW])
cvtModel f x = x >>= \(a, r) -> f a >>= \b -> return (b, r)
-- | Parse a signed/sized value from a sequence of CWs
genParse :: Integral a => Kind -> [CW] -> Maybe (a, [CW])
genParse k (x@(CW _ (CWInteger i)):r) | kindOf x == k = Just (fromIntegral i, r)
genParse _ _ = Nothing
-- | Base case for 'SatModel' at unit type. Comes in handy if there are no real variables.
instance SatModel () where
parseCWs xs = return ((), xs)
-- | 'Bool' as extracted from a model
instance SatModel Bool where
parseCWs xs = do (x, r) <- genParse KBool xs
return ((x :: Integer) /= 0, r)
-- | 'Word8' as extracted from a model
instance SatModel Word8 where
parseCWs = genParse (KBounded False 8)
-- | 'Int8' as extracted from a model
instance SatModel Int8 where
parseCWs = genParse (KBounded True 8)
-- | 'Word16' as extracted from a model
instance SatModel Word16 where
parseCWs = genParse (KBounded False 16)
-- | 'Int16' as extracted from a model
instance SatModel Int16 where
parseCWs = genParse (KBounded True 16)
-- | 'Word32' as extracted from a model
instance SatModel Word32 where
parseCWs = genParse (KBounded False 32)
-- | 'Int32' as extracted from a model
instance SatModel Int32 where
parseCWs = genParse (KBounded True 32)
-- | 'Word64' as extracted from a model
instance SatModel Word64 where
parseCWs = genParse (KBounded False 64)
-- | 'Int64' as extracted from a model
instance SatModel Int64 where
parseCWs = genParse (KBounded True 64)
-- | 'Integer' as extracted from a model
instance SatModel Integer where
parseCWs = genParse KUnbounded
-- | 'AlgReal' as extracted from a model
instance SatModel AlgReal where
parseCWs (CW KReal (CWAlgReal i) : r) = Just (i, r)
parseCWs _ = Nothing
-- | 'Float' as extracted from a model
instance SatModel Float where
parseCWs (CW KFloat (CWFloat i) : r) = Just (i, r)
parseCWs _ = Nothing
-- | 'Double' as extracted from a model
instance SatModel Double where
parseCWs (CW KDouble (CWDouble i) : r) = Just (i, r)
parseCWs _ = Nothing
instance SatModel CW where
parseCWs (cw : r) = Just (cw, r)
parseCWs [] = Nothing
-- | A list of values as extracted from a model. When reading a list, we
-- go as long as we can (maximal-munch). Note that this never fails, as
-- we can always return the empty list!
instance SatModel a => SatModel [a] where
parseCWs [] = Just ([], [])
parseCWs xs = case parseCWs xs of
Just (a, ys) -> case parseCWs ys of
Just (as, zs) -> Just (a:as, zs)
Nothing -> Just ([], ys)
Nothing -> Just ([], xs)
-- | Tuples extracted from a model
instance (SatModel a, SatModel b) => SatModel (a, b) where
parseCWs as = do (a, bs) <- parseCWs as
(b, cs) <- parseCWs bs
return ((a, b), cs)
-- | 3-Tuples extracted from a model
instance (SatModel a, SatModel b, SatModel c) => SatModel (a, b, c) where
parseCWs as = do (a, bs) <- parseCWs as
((b, c), ds) <- parseCWs bs
return ((a, b, c), ds)
-- | 4-Tuples extracted from a model
instance (SatModel a, SatModel b, SatModel c, SatModel d) => SatModel (a, b, c, d) where
parseCWs as = do (a, bs) <- parseCWs as
((b, c, d), es) <- parseCWs bs
return ((a, b, c, d), es)
-- | 5-Tuples extracted from a model
instance (SatModel a, SatModel b, SatModel c, SatModel d, SatModel e) => SatModel (a, b, c, d, e) where
parseCWs as = do (a, bs) <- parseCWs as
((b, c, d, e), fs) <- parseCWs bs
return ((a, b, c, d, e), fs)
-- | 6-Tuples extracted from a model
instance (SatModel a, SatModel b, SatModel c, SatModel d, SatModel e, SatModel f) => SatModel (a, b, c, d, e, f) where
parseCWs as = do (a, bs) <- parseCWs as
((b, c, d, e, f), gs) <- parseCWs bs
return ((a, b, c, d, e, f), gs)
-- | 7-Tuples extracted from a model
instance (SatModel a, SatModel b, SatModel c, SatModel d, SatModel e, SatModel f, SatModel g) => SatModel (a, b, c, d, e, f, g) where
parseCWs as = do (a, bs) <- parseCWs as
((b, c, d, e, f, g), hs) <- parseCWs bs
return ((a, b, c, d, e, f, g), hs)
-- | Various SMT results that we can extract models out of.
class Modelable a where
-- | Is there a model?
modelExists :: a -> Bool
-- | Extract a model, the result is a tuple where the first argument (if True)
-- indicates whether the model was "probable". (i.e., if the solver returned unknown.)
getModel :: SatModel b => a -> Either String (Bool, b)
-- | Extract a model dictionary. Extract a dictionary mapping the variables to
-- their respective values as returned by the SMT solver. Also see `getModelDictionaries`.
getModelDictionary :: a -> M.Map String CW
-- | Extract a model value for a given element. Also see `getModelValues`.
getModelValue :: SymWord b => String -> a -> Maybe b
getModelValue v r = fromCW `fmap` (v `M.lookup` getModelDictionary r)
-- | Extract a representative name for the model value of an uninterpreted kind.
-- This is supposed to correspond to the value as computed internally by the
-- SMT solver; and is unportable from solver to solver. Also see `getModelUninterpretedValues`.
getModelUninterpretedValue :: String -> a -> Maybe String
getModelUninterpretedValue v r = case v `M.lookup` getModelDictionary r of
Just (CW _ (CWUninterpreted s)) -> Just s
_ -> Nothing
-- | A simpler variant of 'getModel' to get a model out without the fuss.
extractModel :: SatModel b => a -> Maybe b
extractModel a = case getModel a of
Right (_, b) -> Just b
_ -> Nothing
-- | Return all the models from an 'allSat' call, similar to 'extractModel' but
-- is suitable for the case of multiple results.
extractModels :: SatModel a => AllSatResult -> [a]
extractModels (AllSatResult (_, xs)) = [ms | Right (_, ms) <- map getModel xs]
-- | Get dictionaries from an all-sat call. Similar to `getModelDictionary`.
getModelDictionaries :: AllSatResult -> [M.Map String CW]
getModelDictionaries (AllSatResult (_, xs)) = map getModelDictionary xs
-- | Extract value of a variable from an all-sat call. Similar to `getModelValue`.
getModelValues :: SymWord b => String -> AllSatResult -> [Maybe b]
getModelValues s (AllSatResult (_, xs)) = map (s `getModelValue`) xs
-- | Extract value of an uninterpreted variable from an all-sat call. Similar to `getModelUninterpretedValue`.
getModelUninterpretedValues :: String -> AllSatResult -> [Maybe String]
getModelUninterpretedValues s (AllSatResult (_, xs)) = map (s `getModelUninterpretedValue`) xs
-- | 'ThmResult' as a generic model provider
instance Modelable ThmResult where
getModel (ThmResult r) = getModel r
modelExists (ThmResult r) = modelExists r
getModelDictionary (ThmResult r) = getModelDictionary r
-- | 'SatResult' as a generic model provider
instance Modelable SatResult where
getModel (SatResult r) = getModel r
modelExists (SatResult r) = modelExists r
getModelDictionary (SatResult r) = getModelDictionary r
-- | 'SMTResult' as a generic model provider
instance Modelable SMTResult where
getModel (Unsatisfiable _) = Left "SBV.getModel: Unsatisfiable result"
getModel (Unknown _ m) = Right (True, parseModelOut m)
getModel (ProofError _ s) = error $ unlines $ "Backend solver complains: " : s
getModel (TimeOut _) = Left "Timeout"
getModel (Satisfiable _ m) = Right (False, parseModelOut m)
modelExists (Satisfiable{}) = True
modelExists (Unknown{}) = False -- don't risk it
modelExists _ = False
getModelDictionary (Unsatisfiable _) = M.empty
getModelDictionary (Unknown _ m) = M.fromList (modelAssocs m)
getModelDictionary (ProofError _ _) = M.empty
getModelDictionary (TimeOut _) = M.empty
getModelDictionary (Satisfiable _ m) = M.fromList (modelAssocs m)
-- | Extract a model out, will throw error if parsing is unsuccessful
parseModelOut :: SatModel a => SMTModel -> a
parseModelOut m = case parseCWs [c | (_, c) <- modelAssocs m] of
Just (x, []) -> x
Just (_, ys) -> error $ "SBV.getModel: Partially constructed model; remaining elements: " ++ show ys
Nothing -> error $ "SBV.getModel: Cannot construct a model from: " ++ show m
-- | Given an 'allSat' call, we typically want to iterate over it and print the results in sequence. The
-- 'displayModels' function automates this task by calling 'disp' on each result, consecutively. The first
-- 'Int' argument to 'disp' 'is the current model number. The second argument is a tuple, where the first
-- element indicates whether the model is alleged (i.e., if the solver is not sure, returing Unknown)
displayModels :: SatModel a => (Int -> (Bool, a) -> IO ()) -> AllSatResult -> IO Int
displayModels disp (AllSatResult (_, ms)) = do
inds <- zipWithM display [a | Right a <- map (getModel . SatResult) ms] [(1::Int)..]
return $ last (0:inds)
where display r i = disp i r >> return i
-- | Show an SMTResult; generic version
showSMTResult :: String -> String -> String -> String -> String -> SMTResult -> String
showSMTResult unsatMsg unkMsg unkMsgModel satMsg satMsgModel result = case result of
Unsatisfiable _ -> unsatMsg
Satisfiable _ (SMTModel [] [] []) -> satMsg
Satisfiable _ m -> satMsgModel ++ showModel cfg m
Unknown _ (SMTModel [] [] []) -> unkMsg
Unknown _ m -> unkMsgModel ++ showModel cfg m
ProofError _ [] -> "*** An error occurred. No additional information available. Try running in verbose mode"
ProofError _ ls -> "*** An error occurred.\n" ++ intercalate "\n" (map ("*** " ++) ls)
TimeOut _ -> "*** Timeout"
where cfg = resultConfig result
-- | Show a model in human readable form
showModel :: SMTConfig -> SMTModel -> String
showModel cfg m = intercalate "\n" (map shM assocs ++ concatMap shUI uninterps ++ concatMap shUA arrs)
where assocs = modelAssocs m
uninterps = modelUninterps m
arrs = modelArrays m
shM (s, v) = " " ++ s ++ " = " ++ shCW cfg v
-- | Show a constant value, in the user-specified base
shCW :: SMTConfig -> CW -> String
shCW = sh . printBase
where sh 2 = binS
sh 10 = show
sh 16 = hexS
sh n = \w -> show w ++ " -- Ignoring unsupported printBase " ++ show n ++ ", use 2, 10, or 16."
-- | Print uninterpreted function values from models. Very, very crude..
shUI :: (String, [String]) -> [String]
shUI (flong, cases) = (" -- uninterpreted: " ++ f) : map shC cases
where tf = dropWhile (/= '_') flong
f = if null tf then flong else tail tf
shC s = " " ++ s
-- | Print uninterpreted array values from models. Very, very crude..
shUA :: (String, [String]) -> [String]
shUA (f, cases) = (" -- array: " ++ f) : map shC cases
where shC s = " " ++ s
-- | Helper function to spin off to an SMT solver.
pipeProcess :: SMTConfig -> String -> [String] -> SMTScript -> (String -> String) -> IO (Either String [String])
pipeProcess cfg execName opts script cleanErrs = do
let nm = show (name (solver cfg))
mbExecPath <- findExecutable execName
case mbExecPath of
Nothing -> return $ Left $ "Unable to locate executable for " ++ nm
++ "\nExecutable specified: " ++ show execName
Just execPath -> do (ec, contents, allErrors) <- runSolver cfg execPath opts script
let errors = dropWhile isSpace (cleanErrs allErrors)
case (null errors, xformExitCode (solver cfg) ec) of
(True, ExitSuccess) -> return $ Right $ map clean (filter (not . null) (lines contents))
(_, ec') -> let errors' = if null errors
then (if null (dropWhile isSpace contents)
then "(No error message printed on stderr by the executable.)"
else contents)
else errors
finalEC = case (ec', ec) of
(ExitFailure n, _) -> n
(_, ExitFailure n) -> n
_ -> 0 -- can happen if ExitSuccess but there is output on stderr
in return $ Left $ "Failed to complete the call to " ++ nm
++ "\nExecutable : " ++ show execPath
++ "\nOptions : " ++ unwords opts
++ "\nExit code : " ++ show finalEC
++ "\nSolver output: "
++ "\n" ++ line ++ "\n"
++ intercalate "\n" (filter (not . null) (lines errors'))
++ "\n" ++ line
++ "\nGiving up.."
where clean = reverse . dropWhile isSpace . reverse . dropWhile isSpace
line = replicate 78 '='
-- | A standard solver interface. If the solver is SMT-Lib compliant, then this function should suffice in
-- communicating with it.
standardSolver :: SMTConfig -> SMTScript -> (String -> String) -> ([String] -> a) -> ([String] -> a) -> IO a
standardSolver config script cleanErrs failure success = do
let msg = when (verbose config) . putStrLn . ("** " ++)
smtSolver= solver config
exec = executable smtSolver
opts = options smtSolver
isTiming = timing config
nmSolver = show (name smtSolver)
msg $ "Calling: " ++ show (unwords (exec:opts))
case smtFile config of
Nothing -> return ()
Just f -> do msg $ "Saving the generated script in file: " ++ show f
writeFile f (scriptBody script)
contents <- timeIf isTiming nmSolver $ pipeProcess config exec opts script cleanErrs
msg $ nmSolver ++ " output:\n" ++ either id (intercalate "\n") contents
case contents of
Left e -> return $ failure (lines e)
Right xs -> return $ success (mergeSExpr xs)
-- | A variant of 'readProcessWithExitCode'; except it knows about continuation strings
-- and can speak SMT-Lib2 (just a little).
runSolver :: SMTConfig -> FilePath -> [String] -> SMTScript -> IO (ExitCode, String, String)
runSolver cfg execPath opts script
= do (send, ask, cleanUp, pid) <- do
(inh, outh, errh, pid) <- runInteractiveProcess execPath opts Nothing Nothing
let send l = hPutStr inh (l ++ "\n") >> hFlush inh
recv = hGetLine outh
ask l = send l >> recv
cleanUp response
= do hClose inh
outMVar <- newEmptyMVar
out <- hGetContents outh
_ <- forkIO $ C.evaluate (length out) >> putMVar outMVar ()
err <- hGetContents errh
_ <- forkIO $ C.evaluate (length err) >> putMVar outMVar ()
takeMVar outMVar
takeMVar outMVar
hClose outh
hClose errh
ex <- waitForProcess pid
return $ case response of
Nothing -> (ex, out, err)
Just (r, vals) -> -- if the status is unknown, prepare for the possibility of not having a model
-- TBD: This is rather crude and potentially Z3 specific
let finalOut = intercalate "\n" (r : vals)
in if "unknown" `isPrefixOf` r && "error" `isInfixOf` (out ++ err)
then (ExitSuccess, finalOut , "")
else (ex, finalOut ++ "\n" ++ out, err)
return (send, ask, cleanUp, pid)
let executeSolver = do mapM_ send (lines (scriptBody script))
response <- case scriptModel script of
Nothing -> do send $ satCmd cfg
return Nothing
Just ls -> do r <- ask $ satCmd cfg
vals <- if any (`isPrefixOf` r) ["sat", "unknown"]
then do let mls = lines ls
when (verbose cfg) $ do putStrLn "** Sending the following model extraction commands:"
mapM_ putStrLn mls
mapM ask mls
else return []
return $ Just (r, vals)
cleanUp response
executeSolver `C.onException` terminateProcess pid
-- | In case the SMT-Lib solver returns a response over multiple lines, compress them so we have
-- each S-Expression spanning only a single line. We'll ignore things line parentheses inside quotes
-- etc., as it should not be an issue
mergeSExpr :: [String] -> [String]
mergeSExpr [] = []
mergeSExpr (x:xs)
| d == 0 = x : mergeSExpr xs
| True = let (f, r) = grab d xs in unwords (x:f) : mergeSExpr r
where d = parenDiff x
parenDiff :: String -> Int
parenDiff = go 0
where go i "" = i
go i ('(':cs) = let i'= i+1 in i' `seq` go i' cs
go i (')':cs) = let i'= i-1 in i' `seq` go i' cs
go i (_ :cs) = go i cs
grab i ls
| i <= 0 = ([], ls)
grab _ [] = ([], [])
grab i (l:ls) = let (a, b) = grab (i+parenDiff l) ls in (l:a, b)
| TomMD/cryptol | sbv/Data/SBV/SMT/SMT.hs | bsd-3-clause | 24,164 | 0 | 33 | 8,480 | 5,930 | 3,102 | 2,828 | 316 | 8 |
{-# OPTIONS_GHC -Wno-orphans #-}
{-|
Module : AERN2.MP.Enclosure
Description : Enclosure operations
Copyright : (c) Michal Konecny
License : BSD3
Maintainer : mikkonecny@gmail.com
Stability : experimental
Portability : portable
Enclosure classes and operations.
-}
module AERN2.MP.Enclosure
(
IsBall(..), ballFunctionUsingLipschitz
, IsInterval(..), endpointL, endpointR
, fromEndpointsAsIntervals, endpointsAsIntervals, endpointLAsInterval, endpointRAsInterval
, intervalFunctionByEndpoints, intervalFunctionByEndpointsUpDown
, CanPlusMinus(..), (+-)
, CanTestContains(..), CanMapInside(..), specCanMapInside
, CanIntersectAsymmetric(..), CanIntersect
, CanIntersectBy, CanIntersectSameType
, CanIntersectCNBy, CanIntersectCNSameType
, CanUnionAsymmetric(..), CanUnion
, CanUnionBy, CanUnionSameType
, CanUnionCNBy, CanUnionCNSameType
)
where
import MixedTypesNumPrelude
-- import qualified Prelude as P
-- import Control.Arrow
import Test.Hspec
import Test.QuickCheck
import qualified Numeric.CollectErrors as CN
import AERN2.Kleenean
import AERN2.MP.ErrorBound
-- import AERN2.MP.Accuracy
{- ball-specific operations -}
class IsBall t where
type CentreType t
centre :: t -> CentreType t
centreAsBallAndRadius :: t-> (t,ErrorBound)
centreAsBall :: t -> t
centreAsBall = fst . centreAsBallAndRadius
radius :: t -> ErrorBound
radius = snd . centreAsBallAndRadius
updateRadius :: (ErrorBound -> ErrorBound) -> (t -> t)
{-| When the radius of the ball is implicitly contributed to by imprecision in the centre
(eg if the centre is a polynomial with inexact coefficients), move all that imprecision
to the explicit radius, making the centre exact. This may lose some information,
but as a ball is equivalent to the original.
For MPBall this function is pointless because it is equivalent to the identity. -}
makeExactCentre :: t -> t
makeExactCentre v =
updateRadius (+r) c
where
(c, r) = centreAsBallAndRadius v
instance (IsBall t => IsBall (CN t)) where
type CentreType (CN t) = CN (CentreType t)
centre = fmap centre
updateRadius f = fmap (updateRadius f)
centreAsBallAndRadius = error $ "centreAsBallAndRadius not defined for CN types"
{-|
Computes a ball function @f@ on the centre and updating the error bound using a Lipschitz constant.
-}
ballFunctionUsingLipschitz ::
(IsBall t, HasEqCertainly t t)
=>
(t -> t) {-^ @fThin@: a version of @f@ that works well on thin balls -} ->
(t -> ErrorBound) {-^ @fLip@: a Lipschitz function of @f@ over large balls -} ->
(t -> t) {-^ @f@ on *large* balls -}
ballFunctionUsingLipschitz fThin fLip x
| r == 0 = fThin c
| otherwise = updateRadius (+ (fLip x)*r) (fThin c)
where
(c, r) = centreAsBallAndRadius x
{- interval-specific operations -}
class IsInterval i where
type IntervalEndpoint i
endpoints :: i -> (IntervalEndpoint i, IntervalEndpoint i)
fromEndpoints :: IntervalEndpoint i -> IntervalEndpoint i -> i
instance (IsInterval t) => (IsInterval (CN t)) where
type (IntervalEndpoint (CN t)) = CN (IntervalEndpoint t)
fromEndpoints l u = CN.lift2 fromEndpoints l u
endpoints = CN.liftPair endpoints
endpointL :: (IsInterval i) => i -> IntervalEndpoint i
endpointL = fst . endpoints
endpointR :: (IsInterval i) => i -> IntervalEndpoint i
endpointR = snd . endpoints
endpointsAsIntervals ::
(IsInterval i) => i -> (i,i)
endpointsAsIntervals x = (lI,rI)
where
lI = fromEndpoints l l
rI = fromEndpoints r r
(l,r) = endpoints x
endpointLAsInterval :: (IsInterval i) => i -> i
endpointLAsInterval = fst . endpointsAsIntervals
endpointRAsInterval :: (IsInterval i) => i -> i
endpointRAsInterval = snd . endpointsAsIntervals
fromEndpointsAsIntervals ::
(IsInterval i, CanMinMaxSameType (IntervalEndpoint i)) =>
i -> i -> i
fromEndpointsAsIntervals l r =
fromEndpoints lMP uMP
where
lMP = min llMP rlMP
uMP = max luMP ruMP
(llMP, luMP) = endpoints l
(rlMP, ruMP) = endpoints r
{- plusMinus (+-) operator -}
class CanPlusMinus t1 t2 where
type PlusMinusType t1 t2
type PlusMinusType t1 t2 = t1
{-| Operator for constructing or enlarging enclosures such as balls or intervals -}
plusMinus :: t1 -> t2 -> PlusMinusType t1 t2
infixl 6 +-
{-| Operator for constructing or enlarging enclosures such as balls or intervals -}
(+-) :: (CanPlusMinus t1 t2) => t1 -> t2 -> PlusMinusType t1 t2
(+-) = plusMinus
{-|
Computes a *monotone* ball function @f@ on intervals using the interval endpoints.
-}
intervalFunctionByEndpoints ::
(IsInterval t, CanMinMaxSameType (IntervalEndpoint t), HasEqCertainly t t)
=>
(t -> t) {-^ @fThin@: a version of @f@ that works well on thin intervals -} ->
(t -> t) {-^ @f@ on *large* intervals -}
intervalFunctionByEndpoints fThin x
| l !==! u = fThin l
| otherwise = fromEndpointsAsIntervals (fThin l) (fThin u)
where
(l,u) = endpointsAsIntervals x
{-|
Computes a *monotone* ball function @f@ on intervals using the interval endpoints.
-}
intervalFunctionByEndpointsUpDown ::
(IsInterval t)
=>
(IntervalEndpoint t -> IntervalEndpoint t) {-^ @fDown@: a version of @f@ working on endpoints, rounded down -} ->
(IntervalEndpoint t -> IntervalEndpoint t) {-^ @fUp@: a version of @f@ working on endpoints, rounded up -} ->
(t -> t) {-^ @f@ on intervals rounding *outwards* -}
intervalFunctionByEndpointsUpDown fDown fUp x =
fromEndpoints (fDown l) (fUp u)
where
(l,u) = endpoints x
{- containment -}
class CanTestContains dom e where
{-| Test if @e@ is inside @dom@. -}
contains :: dom {-^ @dom@ -} -> e {-^ @e@ -} -> Bool
class CanMapInside dom e where
{-| Return some value contained in @dom@.
The returned value does not have to equal the given @e@
even if @e@ is already inside @dom@.
All elements of @dom@ should be covered with roughly the same probability
when calling this function for evenly distributed @e@'s.
This function is intended mainly for generating values inside @dom@
for randomised tests.
-}
mapInside :: dom {-^ @dom@ -} -> e {-^ @e@ -} -> e
specCanMapInside ::
(CanMapInside d e, CanTestContains d e
, Arbitrary d, Arbitrary e, Show d, Show e)
=>
T d -> T e -> Spec
specCanMapInside (T dName :: T d) (T eName :: T e) =
it ("CanMapInside " ++ dName ++ " " ++ eName) $ do
property $
\ (d :: d) (e :: e) ->
contains d $ mapInside d e
{- intersection -}
type CanIntersect e1 e2 =
(CanIntersectAsymmetric e1 e2, CanIntersectAsymmetric e1 e2
, IntersectionType e1 e2 ~ IntersectionType e2 e1)
{-| A set intersection (usually partial) -}
class CanIntersectAsymmetric e1 e2 where
type IntersectionType e1 e2
type IntersectionType e1 e2 = CN e1
intersect :: e1 -> e2 -> IntersectionType e1 e2
type CanIntersectBy e1 e2 =
(CanIntersect e1 e2, IntersectionType e1 e2 ~ e1)
type CanIntersectSameType e1 =
(CanIntersectBy e1 e1)
type CanIntersectCNBy e1 e2 =
(CanIntersect e1 e2, IntersectionType e1 e2 ~ CN e1)
type CanIntersectCNSameType e1 =
(CanIntersectCNBy e1 e1)
instance
CanIntersectAsymmetric Bool Bool
where
intersect b1 b2
| b1 == b2 = cn b1
| otherwise =
CN.noValueNumErrorCertain $ CN.NumError "empty Boolean intersection"
instance
CanIntersectAsymmetric Kleenean Kleenean
where
intersect CertainTrue CertainFalse =
CN.noValueNumErrorCertain $ CN.NumError "empty Kleenean intersection"
intersect CertainFalse CertainTrue =
CN.noValueNumErrorCertain $ CN.NumError "empty Kleenean intersection"
intersect TrueOrFalse k2 = cn k2
intersect k1 _ = cn k1
instance
(CanIntersectAsymmetric a b, IntersectionType a b ~ CN c)
=>
CanIntersectAsymmetric (CN a) (CN b)
where
type IntersectionType (CN a) (CN b) = IntersectionType a b
intersect = CN.lift2CN intersect
instance
(CanIntersectAsymmetric (CN Bool) (CN b))
=>
CanIntersectAsymmetric Bool (CN b)
where
type IntersectionType Bool (CN b) = IntersectionType (CN Bool) (CN b)
intersect b1 = intersect (cn b1)
instance
(CanIntersectAsymmetric (CN a) (CN Bool))
=>
CanIntersectAsymmetric (CN a) Bool
where
type IntersectionType (CN a) Bool = IntersectionType (CN a) (CN Bool)
intersect b1 b2 = intersect b1 (cn b2)
instance
(CanIntersectAsymmetric (CN Kleenean) (CN b))
=>
CanIntersectAsymmetric Kleenean (CN b)
where
type IntersectionType Kleenean (CN b) = IntersectionType (CN Kleenean) (CN b)
intersect k1 = intersect (cn k1)
instance
(CanIntersectAsymmetric (CN a) (CN Kleenean))
=>
CanIntersectAsymmetric (CN a) Kleenean
where
type IntersectionType (CN a) Kleenean = IntersectionType (CN a) (CN Kleenean)
intersect k1 k2 = intersect k1 (cn k2)
{- set union -}
type CanUnion e1 e2 =
(CanUnionAsymmetric e1 e2, CanUnionAsymmetric e1 e2
, UnionType e1 e2 ~ UnionType e2 e1)
{-| A set union (usually partial) -}
class CanUnionAsymmetric e1 e2 where
type UnionType e1 e2
type UnionType e1 e2 = CN e1
union :: e1 -> e2 -> UnionType e1 e2
type CanUnionBy e1 e2 =
(CanUnion e1 e2, UnionType e1 e2 ~ e1)
type CanUnionSameType e1 =
(CanUnionBy e1 e1)
type CanUnionCNBy e1 e2 =
(CanUnion e1 e2, UnionType e1 e2 ~ CN e1)
type CanUnionCNSameType e1 =
(CanUnionCNBy e1 e1)
instance
(CanUnionAsymmetric a b, UnionType a b ~ CN c)
=>
CanUnionAsymmetric (CN a) (CN b)
where
type UnionType (CN a) (CN b) = UnionType a b
union = CN.lift2CN union
instance (CanUnionSameType t, CN.CanTakeCNErrors t) =>
HasIfThenElse Kleenean t
where
type IfThenElseType Kleenean t = t
ifThenElse CertainTrue e1 _ = e1
ifThenElse CertainFalse _ e2 = e2
ifThenElse TrueOrFalse e1 e2 = e1 `union` e2
| michalkonecny/aern2 | aern2-mp/src/AERN2/MP/Enclosure.hs | bsd-3-clause | 9,754 | 7 | 11 | 1,983 | 2,556 | 1,364 | 1,192 | -1 | -1 |
{-|
Module : Database.Relational.Unique
Description : Definition of UNIQUE
Copyright : (c) Alexander Vieth, 2015
Licence : BSD3
Maintainer : aovieth@gmail.com
Stability : experimental
Portability : non-portable (GHC only)
-}
{-# LANGUAGE AutoDeriveTypeable #-}
module Database.Relational.Unique (
UNIQUE(..)
) where
data UNIQUE term = UNIQUE term
| avieth/Relational | Database/Relational/Unique.hs | bsd-3-clause | 376 | 0 | 6 | 75 | 28 | 19 | 9 | 4 | 0 |
module Language.Lambda.Common.Arbitrary where
import Test.QuickCheck
-- | Generates a string like "x_{n}" where n is positive integer
sym_arbitrary :: Gen String
sym_arbitrary = do
index <- suchThat (arbitrary :: Gen Int) (>0)
return $ ("x_" ++ (show index)) | jfischoff/LambdaPrettyQuote | src/Language/Lambda/Common/Arbitrary.hs | bsd-3-clause | 272 | 0 | 11 | 51 | 72 | 40 | 32 | 6 | 1 |
module Baum.BinHeap.Ops where
import Baum.BinHeap.Type
import Baum.Heap.Op ( Position )
import qualified Baum.Heap.Class as C
import Control.Monad ( guard )
import Data.List ( inits, tails )
instance C.Heap BinHeap where
empty = BinHeap []
isEmpty h = null $ roots h
insert h x = BinHeap
$ merge ( roots h )
[ Node { key = x, children = [] } ]
deleteMin h | length ( roots h ) > 0 = head $ do
( pre, this : post ) <- splits $ roots h
guard $ isMin this h
return $ BinHeap
$ merge ( pre ++ post ) ( children this )
get h p = gets ( roots h ) p
decreaseTo h p x =
BinHeap $ decreaseTo ( roots h ) p x
equal = (==)
toList = toll . roots
-- | merge order-increasing lists of trees
merge :: Ord a
=> [BinTree a] -> [ BinTree a ] -> [BinTree a]
merge [] ys = ys
merge xs [] = xs
merge (x:xs) (y:ys) = case compare ( order x ) ( order y ) of
LT -> x : merge xs (y:ys)
GT -> y : merge (x:xs) ys
EQ -> merge [ glue x y ] $ merge xs ys
-- | make one tree from two trees of equal order
glue :: Ord a
=> BinTree a -> BinTree a -> BinTree a
glue x y | order x == order y =
if ( key x < key y )
then x { children = children x ++ [y] }
else y { children = children y ++ [x] }
toll :: [ BinTree a ] -> [(Position, a )]
toll ts = do
(k,t) <- zip [ index_starts_at .. ] ts
( [k], key t ) : do
(p,x) <- toll $ children t
return ( k : p, x )
isMin :: Ord a => BinTree a -> BinHeap a -> Bool
isMin t h = and $ map ( \ u -> key t <= key u ) $ roots h
index_starts_at = 0
gets :: [ BinTree a ] -> Position -> Maybe a
gets ts ps = case ps of
[] -> Nothing
p : ps -> do
let q = p - index_starts_at
guard $ 0 <= q && q < length ts
let t = ts !! p
if null ps then return $ key t else gets ( children t ) ps
decreaseTo :: Ord a
=> [ BinTree a ] -> Position -> a -> [ BinTree a ]
decreaseTo ts (p : ps) x =
let ( pre, t : post ) = splitAt (p - index_starts_at) ts
t' = if null ps
then Node { key = x , children = children t }
else if x < key t
then Node
{ key = x
, children = decreaseTo ( children t ) ps ( key t )
}
else Node
{ key = key t
, children = decreaseTo ( children t ) ps x
}
in ( pre ++ t' : post )
splits xs = zip ( inits xs ) ( tails xs )
| Erdwolf/autotool-bonn | src/Baum/BinHeap/Ops.hs | gpl-2.0 | 2,640 | 0 | 15 | 1,048 | 1,140 | 583 | 557 | 68 | 3 |
import CpiLib
import CpiTest
import CpiODE
import CpiSemantics
import CpiLogic
import System.Environment (getArgs)
-- Time points
--tps = (100,(0,25))
-- Basic
f1 = Pos (0,infty) (ValGT (Conc (Def "P" [])) (R 0.05))
f2 = Pos (0,infty) (ValLE (Conc (Def "S" ["s"])) (R 0.01))
f3 = Nec (0,infty) (ValGT (Conc (Def "E" ["e"])) (R 0.01))
f4 = Nec (0,infty) (ValGT (Conc (Def "E" ["e"])) (R 0.4))
-- 1-nested TL
f5 = Nec (0,infty) f1
f6 = Nec (0,infty) f1
f7 = Pos (0,infty) f1
f8 = Pos (0,infty) f1
-- 2-nested TL
f9 = Pos (0,infty) f5
f10 = Pos (0,infty) f6
f11 = Nec (0,infty) f7
f12 = Nec (0,infty) f8
-- 3-nested TL
f13 = Nec (0,infty) f9
f14 = Nec (0,infty) f10
f15 = Pos (0,infty) f11
f16 = Pos (0,infty) f12
-- Basic Gtee
f17 = Gtee "In" f1
f18 = Gtee "In" f2
f19 = Gtee "In" f3
f20 = Gtee "In" f4
-- 1-nested Gtee
f21 = Pos (0,infty) f17
f22 = Pos (0,infty) f18
f23 = Pos (0,infty) f19
f24 = Pos (0,infty) f20
f25 = Nec (0,infty) f17
f26 = Nec (0,infty) f18
f27 = Nec (0,infty) f19
f28 = Nec (0,infty) f20
-- nested Gtee and nested TL
f29 = Nec (0,infty) f21
f30 = Nec (0,infty) f22
f31 = Nec (0,infty) f23
f32 = Nec (0,infty) f24
f33 = Pos (0,infty) f25
f34 = Pos (0,infty) f26
f35 = Pos (0,infty) f27
f36 = Pos (0,infty) f28
-- 2 nested Gtees
f37 = Gtee "Q'" f21
f38 = Gtee "Q'" f22
f39 = Gtee "Q'" f23
f40 = Gtee "Q'" f24
f41 = Gtee "Q'" f25
f42 = Gtee "Q'" f26
f43 = Gtee "Q'" f27
f44 = Gtee "Q'" f28
-- sandwich nested Gtees
f45 = Pos (0,infty) f37
f46 = Pos (0,infty) f38
f47 = Pos (0,infty) f39
f48 = Pos (0,infty) f40
f49 = Pos (0,infty) f41
f50 = Pos (0,infty) f42
f51 = Pos (0,infty) f43
f52 = Pos (0,infty) f44
f53 = Nec (0,infty) f37
f54 = Nec (0,infty) f38
f55 = Nec (0,infty) f39
f56 = Nec (0,infty) f40
f57 = Nec (0,infty) f41
f58 = Nec (0,infty) f42
f59 = Nec (0,infty) f43
f60 = Nec (0,infty) f44
main = do env <- tEnv "models/testGT.cpi"
res <- getArgs
let tps = (read(res!!0),(0,25))
let pi = tProc env "Pi"
mts = processMTS env pi
pi' = wholeProc env pi mts
dpdt = dPdt' env mts pi'
odes = xdot env dpdt
inits = initials env pi' dpdt
ts = timePoints (read(res!!0)) (0,25)
soln = solveODE env pi' dpdt tps
ss = speciesIn env dpdt
trace = timeSeries ts soln ss
let r1 = {-# SCC "f4-Naive" #-} modelCheck
env solveODE (Nothing) pi tps f4
r2 = {-# SCC "f4-DynProg" #-} modelCheckDP
env solveODE (Nothing) pi tps f4
r3 = {-# SCC "f4-Hybrid" #-} modelCheckHy
env solveODE (Nothing) pi tps f4
r4 = {-# SCC "f4-Hybrid2" #-} modelCheckHy2
env solveODE (Nothing) pi tps f4
print $ pretty f4
print r1
print r2
print r3
print r4
let r1 = {-# SCC "f20-Naive" #-} modelCheck
env solveODE (Nothing) pi tps f20
r2 = {-# SCC "f20-DynProg" #-} modelCheckDP
env solveODE (Nothing) pi tps f20
r3 = {-# SCC "f20-Hybrid" #-} modelCheckHy
env solveODE (Nothing) pi tps f20
r4 = {-# SCC "f20-Hybrid2" #-} modelCheckHy2
env solveODE (Nothing) pi tps f20
print $ pretty f20
print r1
print r2
print r3
print r4
let r1 = {-# SCC "f24-Naive" #-} modelCheck
env solveODE (Nothing) pi tps f24
r2 = {-# SCC "f24-DynProg" #-} modelCheckDP
env solveODE (Nothing) pi tps f24
r3 = {-# SCC "f24-Hybrid" #-} modelCheckHy
env solveODE (Nothing) pi tps f24
r4 = {-# SCC "f24-Hybrid2" #-} modelCheckHy2
env solveODE (Nothing) pi tps f24
print $ pretty f24
print r1
print r2
print r3
print r4
let r1 = {-# SCC "f40-Naive" #-} modelCheck
env solveODE (Nothing) pi tps f40
r2 = {-# SCC "f40-DynProg" #-} modelCheckDP
env solveODE (Nothing) pi tps f40
r3 = {-# SCC "f40-Hybrid" #-} modelCheckHy
env solveODE (Nothing) pi tps f40
r4 = {-# SCC "f40-Hybrid2" #-} modelCheckHy2
env solveODE (Nothing) pi tps f40
print $ pretty f40
print r1
print r2
print r3
print r4
let r1 = {-# SCC "f48-Naive" #-} modelCheck
env solveODE (Nothing) pi tps f48
r2 = {-# SCC "f48-DynProg" #-} modelCheckDP
env solveODE (Nothing) pi tps f48
r3 = {-# SCC "f48-Hybrid" #-} modelCheckHy
env solveODE (Nothing) pi tps f48
r4 = {-# SCC "f48-Hybrid2" #-} modelCheckHy2
env solveODE (Nothing) pi tps f48
print $ pretty f48
print r1
print r2
print r3
print r4
| chrisbanks/cpiwb | profileMC4.hs | gpl-3.0 | 5,096 | 0 | 14 | 1,857 | 1,866 | 982 | 884 | 144 | 1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="fa-IR">
<title>Tips and Tricks | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>نمایه</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>جستجو</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | veggiespam/zap-extensions | addOns/tips/src/main/javahelp/org/zaproxy/zap/extension/tips/resources/help_fa_IR/helpset_fa_IR.hs | apache-2.0 | 986 | 84 | 64 | 161 | 413 | 209 | 204 | -1 | -1 |
-- Copyright (c) 2014 Eric McCorkle. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
-- 3. Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
{-# OPTIONS_GHC -Wall -Werror #-}
-- | Defines a class of monads with the ability to create new
-- 'Position's.
module Control.Monad.Genpos.Class(
MonadGenpos(..)
) where
import Control.Monad.Cont
import Control.Monad.Except
import Control.Monad.List
import Control.Monad.Positions.Class
import Control.Monad.Reader
import Control.Monad.State
import Control.Monad.Trans.Journal
import Control.Monad.Writer
import Data.Position.Filename
import Data.Position.Point
-- | An extension to the 'MonadPositions' class that adds the ability
-- to create new 'Point's.
class MonadPositions m => MonadGenpos m where
-- | Create a 'Point' from raw data.
point :: PointInfo
-- ^ The position info.
-> m Point
-- | Create a 'Filename' from raw data.
filename :: FileInfo
-- ^ The file name and base path.
-> m Filename
instance MonadGenpos m => MonadGenpos (ContT r m) where
point = lift . point
filename = lift . filename
instance (MonadGenpos m) => MonadGenpos (ExceptT e m) where
point = lift . point
filename = lift . filename
instance (MonadGenpos m) => MonadGenpos (JournalT e m) where
point = lift . point
filename = lift . filename
instance MonadGenpos m => MonadGenpos (ListT m) where
point = lift . point
filename = lift . filename
instance MonadGenpos m => MonadGenpos (ReaderT r m) where
point = lift . point
filename = lift . filename
instance MonadGenpos m => MonadGenpos (StateT s m) where
point = lift . point
filename = lift . filename
instance (Monoid w, MonadGenpos m) => MonadGenpos (WriterT w m) where
point = lift . point
filename = lift . filename
| saltlang/compiler-toolbox | src/Control/Monad/Genpos/Class.hs | bsd-3-clause | 3,204 | 0 | 8 | 607 | 451 | 263 | 188 | 39 | 0 |
{-# language ForeignFunctionInterface #-}
{-# language GADTSyntax #-}
{-# language KindSignatures #-}
{-# language MagicHash #-}
{-# language UnboxedTuples #-}
{-# language UnliftedFFITypes #-}
{-# language UnliftedNewtypes #-}
{-# OPTIONS_GHC -O2 #-}
import Data.Kind (Type)
import Data.Word
import GHC.Exts
import GHC.IO
import GHC.Word
foreign import ccall unsafe "head_bytearray"
c_head_bytearray_a :: MutableByteArray# RealWorld -> IO Word8
foreign import ccall unsafe "head_bytearray"
c_head_bytearray_b :: MyArray# -> IO Word8
newtype MyArray# :: TYPE 'UnliftedRep where
MyArray# :: MutableByteArray# RealWorld -> MyArray#
data MutableByteArray :: Type where
MutableByteArray :: MutableByteArray# RealWorld -> MutableByteArray
main :: IO ()
main = do
ba@(MutableByteArray ba#) <- luckySingleton
print =<< readByteArray ba 0
print =<< c_head_bytearray_a ba#
print =<< c_head_bytearray_b (MyArray# ba#)
readByteArray :: MutableByteArray -> Int -> IO Word8
readByteArray (MutableByteArray b#) (I# i#) = IO $ \s0 ->
case readWord8Array# b# i# s0 of
(# s1, w #) -> (# s1, W8# w #)
-- Create a new mutable byte array of length 1 with the sole byte
-- set to the 105.
luckySingleton :: IO MutableByteArray
luckySingleton = IO $ \s0 -> case newByteArray# 1# s0 of
(# s1, marr# #) -> case writeWord8Array# marr# 0# 105## s1 of
s2 -> (# s2, MutableByteArray marr# #)
| sdiehl/ghc | testsuite/tests/ffi/should_run/UnliftedNewtypesByteArrayOffset.hs | bsd-3-clause | 1,402 | 0 | 14 | 239 | 344 | 181 | 163 | -1 | -1 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
--
-- Code generation for foreign calls.
--
-- (c) The University of Glasgow 2004-2006
--
-----------------------------------------------------------------------------
module StgCmmForeign (
cgForeignCall,
emitPrimCall, emitCCall,
emitForeignCall, -- For CmmParse
emitSaveThreadState,
saveThreadState,
emitLoadThreadState,
loadThreadState,
emitOpenNursery,
emitCloseNursery,
) where
#include "HsVersions.h"
import GhcPrelude hiding( succ, (<*>) )
import StgSyn
import StgCmmProf (storeCurCCS, ccsType, curCCS)
import StgCmmEnv
import StgCmmMonad
import StgCmmUtils
import StgCmmClosure
import StgCmmLayout
import BlockId (newBlockId)
import Cmm
import CmmUtils
import MkGraph
import Type
import RepType
import TysPrim
import CLabel
import SMRep
import ForeignCall
import DynFlags
import Maybes
import Outputable
import UniqSupply
import BasicTypes
import Control.Monad
-----------------------------------------------------------------------------
-- Code generation for Foreign Calls
-----------------------------------------------------------------------------
-- | emit code for a foreign call, and return the results to the sequel.
--
cgForeignCall :: ForeignCall -- the op
-> [StgArg] -- x,y arguments
-> Type -- result type
-> FCode ReturnKind
cgForeignCall (CCall (CCallSpec target cconv safety)) stg_args res_ty
= do { dflags <- getDynFlags
; let -- in the stdcall calling convention, the symbol needs @size appended
-- to it, where size is the total number of bytes of arguments. We
-- attach this info to the CLabel here, and the CLabel pretty printer
-- will generate the suffix when the label is printed.
call_size args
| StdCallConv <- cconv = Just (sum (map arg_size args))
| otherwise = Nothing
-- ToDo: this might not be correct for 64-bit API
arg_size (arg, _) = max (widthInBytes $ typeWidth $ cmmExprType dflags arg)
(wORD_SIZE dflags)
; cmm_args <- getFCallArgs stg_args
; (res_regs, res_hints) <- newUnboxedTupleRegs res_ty
; let ((call_args, arg_hints), cmm_target)
= case target of
StaticTarget _ _ _ False ->
panic "cgForeignCall: unexpected FFI value import"
StaticTarget _ lbl mPkgId True
-> let labelSource
= case mPkgId of
Nothing -> ForeignLabelInThisPackage
Just pkgId -> ForeignLabelInPackage pkgId
size = call_size cmm_args
in ( unzip cmm_args
, CmmLit (CmmLabel
(mkForeignLabel lbl size labelSource IsFunction)))
DynamicTarget -> case cmm_args of
(fn,_):rest -> (unzip rest, fn)
[] -> panic "cgForeignCall []"
fc = ForeignConvention cconv arg_hints res_hints CmmMayReturn
call_target = ForeignTarget cmm_target fc
-- we want to emit code for the call, and then emitReturn.
-- However, if the sequel is AssignTo, we shortcut a little
-- and generate a foreign call that assigns the results
-- directly. Otherwise we end up generating a bunch of
-- useless "r = r" assignments, which are not merely annoying:
-- they prevent the common block elimination from working correctly
-- in the case of a safe foreign call.
-- See Note [safe foreign call convention]
--
; sequel <- getSequel
; case sequel of
AssignTo assign_to_these _ ->
emitForeignCall safety assign_to_these call_target call_args
_something_else ->
do { _ <- emitForeignCall safety res_regs call_target call_args
; emitReturn (map (CmmReg . CmmLocal) res_regs)
}
}
{- Note [safe foreign call convention]
The simple thing to do for a safe foreign call would be the same as an
unsafe one: just
emitForeignCall ...
emitReturn ...
but consider what happens in this case
case foo x y z of
(# s, r #) -> ...
The sequel is AssignTo [r]. The call to newUnboxedTupleRegs picks [r]
as the result reg, and we generate
r = foo(x,y,z) returns to L1 -- emitForeignCall
L1:
r = r -- emitReturn
goto L2
L2:
...
Now L1 is a proc point (by definition, it is the continuation of the
safe foreign call). If L2 does a heap check, then L2 will also be a
proc point.
Furthermore, the stack layout algorithm has to arrange to save r
somewhere between the call and the jump to L1, which is annoying: we
would have to treat r differently from the other live variables, which
have to be saved *before* the call.
So we adopt a special convention for safe foreign calls: the results
are copied out according to the NativeReturn convention by the call,
and the continuation of the call should copyIn the results. (The
copyOut code is actually inserted when the safe foreign call is
lowered later). The result regs attached to the safe foreign call are
only used temporarily to hold the results before they are copied out.
We will now generate this:
r = foo(x,y,z) returns to L1
L1:
r = R1 -- copyIn, inserted by mkSafeCall
goto L2
L2:
... r ...
And when the safe foreign call is lowered later (see Note [lower safe
foreign calls]) we get this:
suspendThread()
r = foo(x,y,z)
resumeThread()
R1 = r -- copyOut, inserted by lowerSafeForeignCall
jump L1
L1:
r = R1 -- copyIn, inserted by mkSafeCall
goto L2
L2:
... r ...
Now consider what happens if L2 does a heap check: the Adams
optimisation kicks in and commons up L1 with the heap-check
continuation, resulting in just one proc point instead of two. Yay!
-}
emitCCall :: [(CmmFormal,ForeignHint)]
-> CmmExpr
-> [(CmmActual,ForeignHint)]
-> FCode ()
emitCCall hinted_results fn hinted_args
= void $ emitForeignCall PlayRisky results target args
where
(args, arg_hints) = unzip hinted_args
(results, result_hints) = unzip hinted_results
target = ForeignTarget fn fc
fc = ForeignConvention CCallConv arg_hints result_hints CmmMayReturn
emitPrimCall :: [CmmFormal] -> CallishMachOp -> [CmmActual] -> FCode ()
emitPrimCall res op args
= void $ emitForeignCall PlayRisky res (PrimTarget op) args
-- alternative entry point, used by CmmParse
emitForeignCall
:: Safety
-> [CmmFormal] -- where to put the results
-> ForeignTarget -- the op
-> [CmmActual] -- arguments
-> FCode ReturnKind
emitForeignCall safety results target args
| not (playSafe safety) = do
dflags <- getDynFlags
let (caller_save, caller_load) = callerSaveVolatileRegs dflags
emit caller_save
target' <- load_target_into_temp target
args' <- mapM maybe_assign_temp args
emit $ mkUnsafeCall target' results args'
emit caller_load
return AssignedDirectly
| otherwise = do
dflags <- getDynFlags
updfr_off <- getUpdFrameOff
target' <- load_target_into_temp target
args' <- mapM maybe_assign_temp args
k <- newBlockId
let (off, _, copyout) = copyInOflow dflags NativeReturn (Young k) results []
-- see Note [safe foreign call convention]
tscope <- getTickScope
emit $
( mkStore (CmmStackSlot (Young k) (widthInBytes (wordWidth dflags)))
(CmmLit (CmmBlock k))
<*> mkLast (CmmForeignCall { tgt = target'
, res = results
, args = args'
, succ = k
, ret_args = off
, ret_off = updfr_off
, intrbl = playInterruptible safety })
<*> mkLabel k tscope
<*> copyout
)
return (ReturnedTo k off)
load_target_into_temp :: ForeignTarget -> FCode ForeignTarget
load_target_into_temp (ForeignTarget expr conv) = do
tmp <- maybe_assign_temp expr
return (ForeignTarget tmp conv)
load_target_into_temp other_target@(PrimTarget _) =
return other_target
-- What we want to do here is create a new temporary for the foreign
-- call argument if it is not safe to use the expression directly,
-- because the expression mentions caller-saves GlobalRegs (see
-- Note [Register Parameter Passing]).
--
-- However, we can't pattern-match on the expression here, because
-- this is used in a loop by CmmParse, and testing the expression
-- results in a black hole. So we always create a temporary, and rely
-- on CmmSink to clean it up later. (Yuck, ToDo). The generated code
-- ends up being the same, at least for the RTS .cmm code.
--
maybe_assign_temp :: CmmExpr -> FCode CmmExpr
maybe_assign_temp e = do
dflags <- getDynFlags
reg <- newTemp (cmmExprType dflags e)
emitAssign (CmmLocal reg) e
return (CmmReg (CmmLocal reg))
-- -----------------------------------------------------------------------------
-- Save/restore the thread state in the TSO
-- This stuff can't be done in suspendThread/resumeThread, because it
-- refers to global registers which aren't available in the C world.
emitSaveThreadState :: FCode ()
emitSaveThreadState = do
dflags <- getDynFlags
code <- saveThreadState dflags
emit code
-- | Produce code to save the current thread state to @CurrentTSO@
saveThreadState :: MonadUnique m => DynFlags -> m CmmAGraph
saveThreadState dflags = do
tso <- newTemp (gcWord dflags)
close_nursery <- closeNursery dflags tso
pure $ catAGraphs [
-- tso = CurrentTSO;
mkAssign (CmmLocal tso) stgCurrentTSO,
-- tso->stackobj->sp = Sp;
mkStore (cmmOffset dflags
(CmmLoad (cmmOffset dflags
(CmmReg (CmmLocal tso))
(tso_stackobj dflags))
(bWord dflags))
(stack_SP dflags))
stgSp,
close_nursery,
-- and save the current cost centre stack in the TSO when profiling:
if gopt Opt_SccProfilingOn dflags then
mkStore (cmmOffset dflags (CmmReg (CmmLocal tso)) (tso_CCCS dflags)) curCCS
else mkNop
]
emitCloseNursery :: FCode ()
emitCloseNursery = do
dflags <- getDynFlags
tso <- newTemp (bWord dflags)
code <- closeNursery dflags tso
emit $ mkAssign (CmmLocal tso) stgCurrentTSO <*> code
{- |
@closeNursery dflags tso@ produces code to close the nursery.
A local register holding the value of @CurrentTSO@ is expected for
efficiency.
Closing the nursery corresponds to the following code:
@
tso = CurrentTSO;
cn = CurrentNuresry;
// Update the allocation limit for the current thread. We don't
// check to see whether it has overflowed at this point, that check is
// made when we run out of space in the current heap block (stg_gc_noregs)
// and in the scheduler when context switching (schedulePostRunThread).
tso->alloc_limit -= Hp + WDS(1) - cn->start;
// Set cn->free to the next unoccupied word in the block
cn->free = Hp + WDS(1);
@
-}
closeNursery :: MonadUnique m => DynFlags -> LocalReg -> m CmmAGraph
closeNursery df tso = do
let tsoreg = CmmLocal tso
cnreg <- CmmLocal <$> newTemp (bWord df)
pure $ catAGraphs [
mkAssign cnreg stgCurrentNursery,
-- CurrentNursery->free = Hp+1;
mkStore (nursery_bdescr_free df cnreg) (cmmOffsetW df stgHp 1),
let alloc =
CmmMachOp (mo_wordSub df)
[ cmmOffsetW df stgHp 1
, CmmLoad (nursery_bdescr_start df cnreg) (bWord df)
]
alloc_limit = cmmOffset df (CmmReg tsoreg) (tso_alloc_limit df)
in
-- tso->alloc_limit += alloc
mkStore alloc_limit (CmmMachOp (MO_Sub W64)
[ CmmLoad alloc_limit b64
, CmmMachOp (mo_WordTo64 df) [alloc] ])
]
emitLoadThreadState :: FCode ()
emitLoadThreadState = do
dflags <- getDynFlags
code <- loadThreadState dflags
emit code
-- | Produce code to load the current thread state from @CurrentTSO@
loadThreadState :: MonadUnique m => DynFlags -> m CmmAGraph
loadThreadState dflags = do
tso <- newTemp (gcWord dflags)
stack <- newTemp (gcWord dflags)
open_nursery <- openNursery dflags tso
pure $ catAGraphs [
-- tso = CurrentTSO;
mkAssign (CmmLocal tso) stgCurrentTSO,
-- stack = tso->stackobj;
mkAssign (CmmLocal stack) (CmmLoad (cmmOffset dflags (CmmReg (CmmLocal tso)) (tso_stackobj dflags)) (bWord dflags)),
-- Sp = stack->sp;
mkAssign sp (CmmLoad (cmmOffset dflags (CmmReg (CmmLocal stack)) (stack_SP dflags)) (bWord dflags)),
-- SpLim = stack->stack + RESERVED_STACK_WORDS;
mkAssign spLim (cmmOffsetW dflags (cmmOffset dflags (CmmReg (CmmLocal stack)) (stack_STACK dflags))
(rESERVED_STACK_WORDS dflags)),
-- HpAlloc = 0;
-- HpAlloc is assumed to be set to non-zero only by a failed
-- a heap check, see HeapStackCheck.cmm:GC_GENERIC
mkAssign hpAlloc (zeroExpr dflags),
open_nursery,
-- and load the current cost centre stack from the TSO when profiling:
if gopt Opt_SccProfilingOn dflags
then storeCurCCS
(CmmLoad (cmmOffset dflags (CmmReg (CmmLocal tso))
(tso_CCCS dflags)) (ccsType dflags))
else mkNop
]
emitOpenNursery :: FCode ()
emitOpenNursery = do
dflags <- getDynFlags
tso <- newTemp (bWord dflags)
code <- openNursery dflags tso
emit $ mkAssign (CmmLocal tso) stgCurrentTSO <*> code
{- |
@openNursery dflags tso@ produces code to open the nursery. A local register
holding the value of @CurrentTSO@ is expected for efficiency.
Opening the nursery corresponds to the following code:
@
tso = CurrentTSO;
cn = CurrentNursery;
bdfree = CurrentNuresry->free;
bdstart = CurrentNuresry->start;
// We *add* the currently occupied portion of the nursery block to
// the allocation limit, because we will subtract it again in
// closeNursery.
tso->alloc_limit += bdfree - bdstart;
// Set Hp to the last occupied word of the heap block. Why not the
// next unocupied word? Doing it this way means that we get to use
// an offset of zero more often, which might lead to slightly smaller
// code on some architectures.
Hp = bdfree - WDS(1);
// Set HpLim to the end of the current nursery block (note that this block
// might be a block group, consisting of several adjacent blocks.
HpLim = bdstart + CurrentNursery->blocks*BLOCK_SIZE_W - 1;
@
-}
openNursery :: MonadUnique m => DynFlags -> LocalReg -> m CmmAGraph
openNursery df tso = do
let tsoreg = CmmLocal tso
cnreg <- CmmLocal <$> newTemp (bWord df)
bdfreereg <- CmmLocal <$> newTemp (bWord df)
bdstartreg <- CmmLocal <$> newTemp (bWord df)
-- These assignments are carefully ordered to reduce register
-- pressure and generate not completely awful code on x86. To see
-- what code we generate, look at the assembly for
-- stg_returnToStackTop in rts/StgStartup.cmm.
pure $ catAGraphs [
mkAssign cnreg stgCurrentNursery,
mkAssign bdfreereg (CmmLoad (nursery_bdescr_free df cnreg) (bWord df)),
-- Hp = CurrentNursery->free - 1;
mkAssign hp (cmmOffsetW df (CmmReg bdfreereg) (-1)),
mkAssign bdstartreg (CmmLoad (nursery_bdescr_start df cnreg) (bWord df)),
-- HpLim = CurrentNursery->start +
-- CurrentNursery->blocks*BLOCK_SIZE_W - 1;
mkAssign hpLim
(cmmOffsetExpr df
(CmmReg bdstartreg)
(cmmOffset df
(CmmMachOp (mo_wordMul df) [
CmmMachOp (MO_SS_Conv W32 (wordWidth df))
[CmmLoad (nursery_bdescr_blocks df cnreg) b32],
mkIntExpr df (bLOCK_SIZE df)
])
(-1)
)
),
-- alloc = bd->free - bd->start
let alloc =
CmmMachOp (mo_wordSub df) [CmmReg bdfreereg, CmmReg bdstartreg]
alloc_limit = cmmOffset df (CmmReg tsoreg) (tso_alloc_limit df)
in
-- tso->alloc_limit += alloc
mkStore alloc_limit (CmmMachOp (MO_Add W64)
[ CmmLoad alloc_limit b64
, CmmMachOp (mo_WordTo64 df) [alloc] ])
]
nursery_bdescr_free, nursery_bdescr_start, nursery_bdescr_blocks
:: DynFlags -> CmmReg -> CmmExpr
nursery_bdescr_free dflags cn =
cmmOffset dflags (CmmReg cn) (oFFSET_bdescr_free dflags)
nursery_bdescr_start dflags cn =
cmmOffset dflags (CmmReg cn) (oFFSET_bdescr_start dflags)
nursery_bdescr_blocks dflags cn =
cmmOffset dflags (CmmReg cn) (oFFSET_bdescr_blocks dflags)
tso_stackobj, tso_CCCS, tso_alloc_limit, stack_STACK, stack_SP :: DynFlags -> ByteOff
tso_stackobj dflags = closureField dflags (oFFSET_StgTSO_stackobj dflags)
tso_alloc_limit dflags = closureField dflags (oFFSET_StgTSO_alloc_limit dflags)
tso_CCCS dflags = closureField dflags (oFFSET_StgTSO_cccs dflags)
stack_STACK dflags = closureField dflags (oFFSET_StgStack_stack dflags)
stack_SP dflags = closureField dflags (oFFSET_StgStack_sp dflags)
closureField :: DynFlags -> ByteOff -> ByteOff
closureField dflags off = off + fixedHdrSize dflags
stgSp, stgHp, stgCurrentTSO, stgCurrentNursery :: CmmExpr
stgSp = CmmReg sp
stgHp = CmmReg hp
stgCurrentTSO = CmmReg currentTSO
stgCurrentNursery = CmmReg currentNursery
sp, spLim, hp, hpLim, currentTSO, currentNursery, hpAlloc :: CmmReg
sp = CmmGlobal Sp
spLim = CmmGlobal SpLim
hp = CmmGlobal Hp
hpLim = CmmGlobal HpLim
currentTSO = CmmGlobal CurrentTSO
currentNursery = CmmGlobal CurrentNursery
hpAlloc = CmmGlobal HpAlloc
-- -----------------------------------------------------------------------------
-- For certain types passed to foreign calls, we adjust the actual
-- value passed to the call. For ByteArray#/Array# we pass the
-- address of the actual array, not the address of the heap object.
getFCallArgs :: [StgArg] -> FCode [(CmmExpr, ForeignHint)]
-- (a) Drop void args
-- (b) Add foreign-call shim code
-- It's (b) that makes this differ from getNonVoidArgAmodes
getFCallArgs args
= do { mb_cmms <- mapM get args
; return (catMaybes mb_cmms) }
where
get arg | null arg_reps
= return Nothing
| otherwise
= do { cmm <- getArgAmode (NonVoid arg)
; dflags <- getDynFlags
; return (Just (add_shim dflags arg_ty cmm, hint)) }
where
arg_ty = stgArgType arg
arg_reps = typePrimRep arg_ty
hint = typeForeignHint arg_ty
add_shim :: DynFlags -> Type -> CmmExpr -> CmmExpr
add_shim dflags arg_ty expr
| tycon == arrayPrimTyCon || tycon == mutableArrayPrimTyCon
= cmmOffsetB dflags expr (arrPtrsHdrSize dflags)
| tycon == smallArrayPrimTyCon || tycon == smallMutableArrayPrimTyCon
= cmmOffsetB dflags expr (smallArrPtrsHdrSize dflags)
| tycon == byteArrayPrimTyCon || tycon == mutableByteArrayPrimTyCon
= cmmOffsetB dflags expr (arrWordsHdrSize dflags)
| otherwise = expr
where
tycon = tyConAppTyCon (unwrapType arg_ty)
-- should be a tycon app, since this is a foreign call
| ezyang/ghc | compiler/codeGen/StgCmmForeign.hs | bsd-3-clause | 19,895 | 0 | 22 | 5,456 | 3,475 | 1,776 | 1,699 | 286 | 6 |
{-# LANGUAGE CPP #-}
#if __GLASGOW_HASKELL__ >= 702
{-# LANGUAGE DeriveGeneric #-}
#endif
-- (c) 1999 - 2002 by Martin Erwig [see file COPYRIGHT]
-- | Tree-based implementation of 'Graph' and 'DynGraph'
--
-- You will probably have better performance using the
-- "Data.Graph.Inductive.PatriciaTree" implementation instead.
module Data.Graph.Inductive.Tree (Gr,UGr) where
import Data.Graph.Inductive.Graph
import Control.Applicative (liftA2)
import Control.Arrow (first, second)
import Control.DeepSeq (NFData (..))
import Data.List (foldl', sort)
import Data.Map (Map)
import qualified Data.Map as M
import Data.Maybe (fromMaybe)
#if __GLASGOW_HASKELL__ >= 702
import GHC.Generics (Generic)
#endif
----------------------------------------------------------------------
-- GRAPH REPRESENTATION
----------------------------------------------------------------------
newtype Gr a b = Gr (GraphRep a b)
#if __GLASGOW_HASKELL__ >= 702
deriving (Generic)
#endif
type GraphRep a b = Map Node (Context' a b)
type Context' a b = (Adj b,a,Adj b)
type UGr = Gr () ()
----------------------------------------------------------------------
-- CLASS INSTANCES
----------------------------------------------------------------------
instance (Eq a, Ord b) => Eq (Gr a b) where
(Gr g1) == (Gr g2) = fmap sortAdj g1 == fmap sortAdj g2
where
sortAdj (p,n,s) = (sort p,n,sort s)
instance (Show a, Show b) => Show (Gr a b) where
showsPrec d g = showParen (d > 10) $
showString "mkGraph "
. shows (labNodes g)
. showString " "
. shows (labEdges g)
instance (Read a, Read b) => Read (Gr a b) where
readsPrec p = readParen (p > 10) $ \ r -> do
("mkGraph", s) <- lex r
(ns,t) <- reads s
(es,u) <- reads t
return (mkGraph ns es, u)
-- Graph
--
instance Graph Gr where
empty = Gr M.empty
isEmpty (Gr g) = M.null g
match v gr@(Gr g) = maybe (Nothing, gr)
(first Just . uncurry (cleanSplit v))
. (\(m,g') -> fmap (flip (,) g') m)
$ M.updateLookupWithKey (const (const Nothing)) v g
mkGraph vs es = insEdges es
. Gr
. M.fromList
. map (second (\l -> ([],l,[])))
$ vs
labNodes (Gr g) = map (\(v,(_,l,_))->(v,l)) (M.toList g)
matchAny (Gr g) = maybe (error "Match Exception, Empty Graph")
(uncurry (uncurry cleanSplit))
(M.minViewWithKey g)
noNodes (Gr g) = M.size g
nodeRange (Gr g) = fromMaybe (error "nodeRange of empty graph")
$ liftA2 (,) (ix (M.minViewWithKey g))
(ix (M.maxViewWithKey g))
where
ix = fmap (fst . fst)
labEdges (Gr g) = concatMap (\(v,(_,_,s))->map (\(l,w)->(v,w,l)) s) (M.toList g)
-- After a Node (with its corresponding Context') are split out of a
-- GraphRep, clean up the remainders.
cleanSplit :: Node -> Context' a b -> GraphRep a b
-> (Context a b, Gr a b)
cleanSplit v (p,l,s) g = (c, Gr g')
where
-- Note: loops are kept only in successor list
c = (p', v, l, s)
p' = rmLoops p
s' = rmLoops s
rmLoops = filter ((/=v) . snd)
g' = updAdj s' (clearPred v) . updAdj p' (clearSucc v) $ g
-- DynGraph
--
instance DynGraph Gr where
(p,v,l,s) & (Gr g) = Gr
. updAdj p (addSucc v)
. updAdj s (addPred v)
$ M.alter addCntxt v g
where
addCntxt = maybe (Just cntxt')
(const (error ("Node Exception, Node: "++show v)))
cntxt' = (p,l,s)
instance (NFData a, NFData b) => NFData (Gr a b) where
rnf (Gr g) = rnf g
----------------------------------------------------------------------
-- UTILITIES
----------------------------------------------------------------------
addSucc :: Node -> b -> Context' a b -> Context' a b
addSucc v l (p,l',s) = (p,l',(l,v):s)
addPred :: Node -> b -> Context' a b -> Context' a b
addPred v l (p,l',s) = ((l,v):p,l',s)
clearSucc :: Node -> b -> Context' a b -> Context' a b
clearSucc v _ (p,l,s) = (p,l,filter ((/=v).snd) s)
clearPred :: Node -> b -> Context' a b -> Context' a b
clearPred v _ (p,l,s) = (filter ((/=v).snd) p,l,s)
updAdj :: Adj b -> (b -> Context' a b -> Context' a b) -> GraphRep a b -> GraphRep a b
updAdj adj f g = foldl' (\g' (l,v) -> M.adjust (f l) v g') g adj
| scolobb/fgl | Data/Graph/Inductive/Tree.hs | bsd-3-clause | 4,669 | 0 | 15 | 1,413 | 1,749 | 944 | 805 | 79 | 1 |
-- [ ghc-Bugs-1249226 ] runInteractiveProcess and closed stdin.
-- Fixed in rev 1.9 of fptools/libraries/base/cbits/runProcess.c
-- This test doesn't work in GHCi, because FD 0 gets re-allocated to
-- the IO manager pipe, which isn't set to non-blocking mode, and the
-- interactive prompt ends up blocking on a read from this descriptor.
import System.IO
import Control.Concurrent
import System.Process
main = do
hClose stdin -- everything works as expected if the handle isn't closed.
putStrLn "Running cat ..."
(inp, out, err, pid) <- runInteractiveProcess "cat" [] Nothing Nothing
forkIO (hPutStrLn inp "foo" >> hClose inp)
mout <- newEmptyMVar
merr <- newEmptyMVar
forkIO (hGetContents out >>= \s -> length s `seq` putMVar mout s)
forkIO (hGetContents err >>= \s -> length s `seq` putMVar merr s)
-- Don't want to deal with waitForProcess and -threaded right now.
takeMVar mout >>= putStrLn
takeMVar merr >>= putStrLn
return ()
| DavidAlphaFox/ghc | libraries/process/tests/process003.hs | bsd-3-clause | 961 | 0 | 12 | 174 | 203 | 99 | 104 | 15 | 1 |
{-# LANGUAGE CPP, BangPatterns, NondecreasingIndentation, ScopedTypeVariables #-}
{-# OPTIONS_GHC -fno-warn-warnings-deprecations #-}
-- NB: we specifically ignore deprecations. GHC 7.6 marks the .QSem module as
-- deprecated, although it became un-deprecated later. As a result, using 7.6
-- as your bootstrap compiler throws annoying warnings.
-- -----------------------------------------------------------------------------
--
-- (c) The University of Glasgow, 2011
--
-- This module implements multi-module compilation, and is used
-- by --make and GHCi.
--
-- -----------------------------------------------------------------------------
module Eta.Main.GhcMake(
depanal,
load, load', LoadHowMuch(..),
topSortModuleGraph,
IsBoot(..),
summariseModule,
hscSourceToIsBoot,
findExtraSigImports,
implicitRequirements,
noModError, cyclicModuleErr
) where
#ifdef ETA_REPL
import qualified Eta.REPL.Linker as Linker ( unload )
#endif
import qualified Eta.LanguageExtensions as LangExt
import Eta.Main.DriverPhases
import Eta.Main.DriverPipeline
import Eta.Main.DynFlags
import Eta.Main.ErrUtils
import Eta.Main.Finder
import Eta.Main.GhcMonad
import Eta.Main.HeaderInfo
import Eta.Main.HscTypes
import Eta.BasicTypes.Module
import Eta.Iface.TcIface ( typecheckIface )
import Eta.TypeCheck.TcRnMonad ( initIfaceCheck )
import Eta.Main.HscMain
import Eta.Utils.Bag ( listToBag )
import Eta.BasicTypes.BasicTypes
import Eta.Utils.Digraph
import Eta.Utils.Exception ( tryIO, gbracket, gfinally )
import Eta.Utils.FastString
import Eta.Utils.MonadUtils ( allM, MonadIO )
import Eta.Utils.Outputable
import Eta.Utils.Panic
import Eta.BasicTypes.SrcLoc
import Eta.Utils.StringBuffer
import Eta.Utils.UniqFM
import Eta.Utils.UniqDSet
import Eta.TypeCheck.TcBackpack
import Eta.Main.Packages
import Eta.Utils.UniqSet
import Eta.Utils.Util
import Data.Either ( rights, partitionEithers )
import qualified Data.Map as Map
import Data.Map (Map)
import qualified Data.Set as Set
import qualified Eta.Utils.FiniteMap as Map ( insertListWith )
import Eta.Main.FileCleanup
import Control.Concurrent ( forkIOWithUnmask, killThread )
import qualified GHC.Conc as CC
import Control.Concurrent.MVar
import Control.Concurrent.QSem
import Control.Exception
import Control.Monad
import Data.IORef
import Data.List
import qualified Data.List as List
import Data.Maybe
import Data.Ord ( comparing )
import Data.Time
import System.Directory
import System.FilePath
import System.IO ( fixIO )
import System.IO.Error ( isDoesNotExistError )
import GHC.Conc ( getNumProcessors, getNumCapabilities, setNumCapabilities )
#include "HsVersions.h"
label_self :: String -> IO ()
label_self thread_name = do
self_tid <- CC.myThreadId
CC.labelThread self_tid thread_name
-- -----------------------------------------------------------------------------
-- Loading the program
-- | Perform a dependency analysis starting from the current targets
-- and update the session with the new module graph.
--
-- Dependency analysis entails parsing the @import@ directives and may
-- therefore require running certain preprocessors.
--
-- Note that each 'ModSummary' in the module graph caches its 'DynFlags'.
-- These 'DynFlags' are determined by the /current/ session 'DynFlags' and the
-- @OPTIONS@ and @LANGUAGE@ pragmas of the parsed module. Thus if you want to
-- changes to the 'DynFlags' to take effect you need to call this function
-- again.
--
depanal :: GhcMonad m =>
[ModuleName] -- ^ excluded modules
-> Bool -- ^ allow duplicate roots
-> m ModuleGraph
depanal excluded_mods allow_dup_roots = do
hsc_env <- getSession
let
dflags = hsc_dflags hsc_env
targets = hsc_targets hsc_env
old_graph = hsc_mod_graph hsc_env
liftIO $ showPass dflags "Chasing dependencies"
liftIO $ debugTraceMsg dflags 2 (hcat [
text "Chasing modules from: ",
hcat (punctuate comma (map pprTarget targets))])
mod_summariesE <- liftIO $ downsweep hsc_env (mgModSummaries old_graph)
excluded_mods allow_dup_roots
mod_summaries <- reportImportErrors mod_summariesE
let mod_graph = mkModuleGraph mod_summaries
warnMissingHomeModules hsc_env mod_graph
setSession hsc_env { hsc_mod_graph = mod_graph }
return mod_graph
-- Note [Missing home modules]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Sometimes user doesn't want GHC to pick up modules, not explicitly listed
-- in a command line. For example, cabal may want to enable this warning
-- when building a library, so that GHC warns user about modules, not listed
-- neither in `exposed-modules`, nor in `other-modules`.
--
-- Here "home module" means a module, that doesn't come from an other package.
--
-- For example, if GHC is invoked with modules "A" and "B" as targets,
-- but "A" imports some other module "C", then GHC will issue a warning
-- about module "C" not being listed in a command line.
--
-- The warning in enabled by `-Wmissing-home-modules`. See Trac #13129
-- The warning in enabled by `-Wmissing-home-modules`. See Trac #13129
warnMissingHomeModules :: GhcMonad m => HscEnv -> ModuleGraph -> m ()
warnMissingHomeModules hsc_env mod_graph =
when (wopt Opt_WarnMissingHomeModules dflags && not (null missing)) $
logWarnings (listToBag [warn])
where
dflags = hsc_dflags hsc_env
targets = map targetId (hsc_targets hsc_env)
is_known_module mod = any (is_my_target mod) targets
-- We need to be careful to handle the case where (possibly
-- path-qualified) filenames (aka 'TargetFile') rather than module
-- names are being passed on the GHC command-line.
--
-- For instance, `ghc --make src-exe/Main.hs` and
-- `ghc --make -isrc-exe Main` are supposed to be equivalent.
-- Note also that we can't always infer the associated module name
-- directly from the filename argument. See Trac #13727.
is_my_target mod (TargetModule name)
= moduleName (ms_mod mod) == name
is_my_target mod (TargetFile target_file _)
| Just mod_file <- ml_hs_file (ms_location mod)
= target_file == mod_file ||
-- We can get a file target even if a module name was
-- originally specified in a command line because it can
-- be converted in guessTarget (by appending .hs/.lhs).
-- So let's convert it back and compare with module name
mkModuleName (fst $ splitExtension target_file)
== moduleName (ms_mod mod)
is_my_target _ _ = False
missing = map (moduleName . ms_mod) $
filter (not . is_known_module) (mgModSummaries mod_graph)
msg
| gopt Opt_BuildingCabalPackage dflags
= hang
(text "These modules are needed for compilation but not listed in your .cabal file's other-modules: ")
4
(sep (map ppr missing))
| otherwise
=
hang
(text "Modules are not listed in command line but needed for compilation: ")
4
(sep (map ppr missing))
warn = makeIntoWarning (Reason Opt_WarnMissingHomeModules)
(mkPlainErrMsg dflags noSrcSpan msg)
-- | Describes which modules of the module graph need to be loaded.
data LoadHowMuch
= LoadAllTargets
-- ^ Load all targets and its dependencies.
| LoadUpTo ModuleName
-- ^ Load only the given module and its dependencies.
| LoadDependenciesOf ModuleName
-- ^ Load only the dependencies of the given module, but not the module
-- itself.
-- | Try to load the program. See 'LoadHowMuch' for the different modes.
--
-- This function implements the core of GHC's @--make@ mode. It preprocesses,
-- compiles and loads the specified modules, avoiding re-compilation wherever
-- possible. Depending on the target (see 'DynFlags.hscTarget') compilating
-- and loading may result in files being created on disk.
--
-- Calls the 'reportModuleCompilationResult' callback after each compiling
-- each module, whether successful or not.
--
-- Throw a 'SourceError' if errors are encountered before the actual
-- compilation starts (e.g., during dependency analysis). All other errors
-- are reported using the callback.
--
load :: GhcMonad m => LoadHowMuch -> m SuccessFlag
load how_much = do
mod_graph <- depanal [] False
load' how_much (Just batchMsg) mod_graph
-- | Generalized version of 'load' which also supports a custom
-- 'Messager' (for reporting progress) and 'ModuleGraph' (generally
-- produced by calling 'depanal'.
load' :: GhcMonad m => LoadHowMuch -> Maybe Messager -> ModuleGraph -> m SuccessFlag
load' how_much mHscMessage mod_graph = do
modifySession $ \hsc_env -> hsc_env { hsc_mod_graph = mod_graph }
guessOutputFile
hsc_env <- getSession
let hpt1 = hsc_HPT hsc_env
let dflags = hsc_dflags hsc_env
-- The "bad" boot modules are the ones for which we have
-- B.hs-boot in the module graph, but no B.hs
-- The downsweep should have ensured this does not happen
-- (see msDeps)
let all_home_mods =
mkUniqSet [ ms_mod_name s
| s <- mgModSummaries mod_graph, not (isBootSummary s)]
-- bad_boot_mods = [s | s <- mod_graph, isBootSummary s,
-- not (ms_mod_name s `elem` all_home_mods)]
-- ASSERT( null bad_boot_mods ) return ()
-- check that the module given in HowMuch actually exists, otherwise
-- topSortModuleGraph will bomb later.
let checkHowMuch (LoadUpTo m) = checkMod m
checkHowMuch (LoadDependenciesOf m) = checkMod m
checkHowMuch _ = id
checkMod m and_then
| m `elementOfUniqSet` all_home_mods = and_then
| otherwise = do
liftIO $ errorMsg dflags (text "no such module:" <+>
quotes (ppr m))
return Failed
checkHowMuch how_much $ do
-- mg2_with_srcimps drops the hi-boot nodes, returning a
-- graph with cycles. Among other things, it is used for
-- backing out partially complete cycles following a failed
-- upsweep, and for removing from hpt all the modules
-- not in strict downwards closure, during calls to compile.
let mg2_with_srcimps :: [SCC ModSummary]
mg2_with_srcimps = topSortModuleGraph True mod_graph Nothing
-- If we can determine that any of the {-# SOURCE #-} imports
-- are definitely unnecessary, then emit a warning.
warnUnnecessarySourceImports mg2_with_srcimps
let
-- check the stability property for each module.
stable_mods@(stable_obj,stable_bco)
= checkStability hpt1 mg2_with_srcimps all_home_mods
-- prune bits of the HPT which are definitely redundant now,
-- to save space.
pruned_hpt = pruneHomePackageTable hpt1
(flattenSCCs mg2_with_srcimps)
stable_mods
_ <- liftIO $ evaluate pruned_hpt
-- before we unload anything, make sure we don't leave an old
-- interactive context around pointing to dead bindings. Also,
-- write the pruned HPT to allow the old HPT to be GC'd.
getSession >>= (\_ -> discardIC $ hsc_env { hsc_HPT = pruned_hpt }) >>=
setSession
liftIO $ debugTraceMsg dflags 2 (text "Stable obj:" <+> ppr stable_obj $$
text "Stable BCO:" <+> ppr stable_bco)
-- Unload any modules which are going to be re-linked this time around.
let stable_linkables = [ linkable
| m <- nonDetEltsUniqSet stable_obj ++
nonDetEltsUniqSet stable_bco,
-- It's OK to use nonDetEltsUniqSet here
-- because it only affects linking. Besides
-- this list only serves as a poor man's set.
Just hmi <- [lookupHpt pruned_hpt m],
Just linkable <- [hm_linkable hmi] ]
liftIO $ unload hsc_env stable_linkables
-- We could at this point detect cycles which aren't broken by
-- a source-import, and complain immediately, but it seems better
-- to let upsweep_mods do this, so at least some useful work gets
-- done before the upsweep is abandoned.
--hPutStrLn stderr "after tsort:\n"
--hPutStrLn stderr (showSDoc (vcat (map ppr mg2)))
-- Now do the upsweep, calling compile for each module in
-- turn. Final result is version 3 of everything.
-- Topologically sort the module graph, this time including hi-boot
-- nodes, and possibly just including the portion of the graph
-- reachable from the module specified in the 2nd argument to load.
-- This graph should be cycle-free.
-- If we're restricting the upsweep to a portion of the graph, we
-- also want to retain everything that is still stable.
let full_mg :: [SCC ModSummary]
full_mg = topSortModuleGraph False mod_graph Nothing
maybe_top_mod = case how_much of
LoadUpTo m -> Just m
LoadDependenciesOf m -> Just m
_ -> Nothing
partial_mg0 :: [SCC ModSummary]
partial_mg0 = topSortModuleGraph False mod_graph maybe_top_mod
-- LoadDependenciesOf m: we want the upsweep to stop just
-- short of the specified module (unless the specified module
-- is stable).
partial_mg
| LoadDependenciesOf _mod <- how_much
= ASSERT( case last partial_mg0 of
AcyclicSCC ms -> ms_mod_name ms == _mod; _ -> False )
List.init partial_mg0
| otherwise
= partial_mg0
stable_mg =
[ AcyclicSCC ms
| AcyclicSCC ms <- full_mg,
stable_mod_summary ms ]
stable_mod_summary ms =
ms_mod_name ms `elementOfUniqSet` stable_obj ||
ms_mod_name ms `elementOfUniqSet` stable_bco
-- the modules from partial_mg that are not also stable
-- NB. also keep cycles, we need to emit an error message later
unstable_mg = filter not_stable partial_mg
where not_stable (CyclicSCC _) = True
not_stable (AcyclicSCC ms)
= not $ stable_mod_summary ms
-- Load all the stable modules first, before attempting to load
-- an unstable module (#7231).
mg = stable_mg ++ unstable_mg
-- clean up between compilations
-- let cleanup hsc_env = intermediateCleanTempFiles (hsc_dflags hsc_env)
-- (flattenSCCs mg2_with_srcimps)
-- hsc_env
let cleanup = cleanCurrentModuleTempFiles . hsc_dflags
liftIO $ debugTraceMsg dflags 2 (hang (text "Ready for upsweep")
2 (ppr mg))
n_jobs <- case parMakeCount dflags of
Nothing -> liftIO getNumProcessors
Just n -> return n
let upsweep_fn | n_jobs > 1 = parUpsweep n_jobs
| otherwise = upsweep
setSession hsc_env{ hsc_HPT = emptyHomePackageTable }
(upsweep_ok, modsUpswept)
<- upsweep_fn mHscMessage pruned_hpt stable_mods cleanup mg
-- Make modsDone be the summaries for each home module now
-- available; this should equal the domain of hpt3.
-- Get in in a roughly top .. bottom order (hence reverse).
let modsDone = reverse modsUpswept
-- Try and do linking in some form, depending on whether the
-- upsweep was completely or only partially successful.
if succeeded upsweep_ok
then
-- Easy; just relink it all.
do liftIO $ debugTraceMsg dflags 2 (text "Upsweep completely successful.")
-- Clean up after ourselves
hsc_env1 <- getSession
liftIO $ cleanCurrentModuleTempFiles dflags
-- Issue a warning for the confusing case where the user
-- said '-o foo' but we're not going to do any linking.
-- We attempt linking if either (a) one of the modules is
-- called Main, or (b) the user said -no-hs-main, indicating
-- that main() is going to come from somewhere else.
--
let ofile = outputFile dflags
let no_hs_main = gopt Opt_NoHsMain dflags
let
main_mod = mainModIs dflags
a_root_is_Main = mgElemModule mod_graph main_mod
do_linking = a_root_is_Main || no_hs_main || ghcLink dflags == LinkDynLib || ghcLink dflags == LinkStaticLib
when (ghcLink dflags == LinkBinary
&& isJust ofile && not do_linking) $
liftIO $ debugTraceMsg dflags 1 $
text ("Warning: output was redirected with -o, " ++
"but no output will be generated\n" ++
"because there is no " ++
moduleNameString (moduleName main_mod) ++ " module.")
linkresult <- liftIO $ link (ghcLink dflags) dflags do_linking (hsc_HPT hsc_env1)
loadFinish Succeeded linkresult
else
-- Tricky. We need to back out the effects of compiling any
-- half-done cycles, both so as to clean up the top level envs
-- and to avoid telling the interactive linker to link them.
do liftIO $ debugTraceMsg dflags 2 (text "Upsweep partially successful.")
let modsDone_names
= map ms_mod modsDone
let mods_to_zap_names
= findPartiallyCompletedCycles modsDone_names
mg2_with_srcimps
-- let mods_to_keep
-- = filter ((`notElem` mods_to_zap_names).ms_mod)
-- modsDone
let (mods_to_clean, mods_to_keep) =
partition ((`notElem` mods_to_zap_names).ms_mod) modsDone
hsc_env1 <- getSession
let hpt4' = hsc_HPT hsc_env1
-- We must change the lifetime to TFL_CurrentModule for any temp
-- file created for an element of mod_to_clean during the upsweep.
-- These include preprocessed files and object files for loaded
-- modules.
unneeded_temps = concat
[ms_hspp_file : object_files
| ModSummary{ms_mod, ms_hspp_file} <- mods_to_clean
, let object_files = maybe [] linkableObjs $
lookupHpt hpt4' (moduleName ms_mod)
>>= hm_linkable
]
liftIO $
changeTempFilesLifetime dflags TFL_CurrentModule unneeded_temps
liftIO $ cleanCurrentModuleTempFiles dflags
let hpt4 = retainInTopLevelEnvs (map ms_mod_name mods_to_keep)
hpt4'
-- there should be no Nothings where linkables should be, now
let just_linkables = True
-- isNoLink (ghcLink dflags)
-- || allHpt (isJust.hm_linkable)
-- (filterHpt ((== HsSrcFile).mi_hsc_src.hm_iface)
-- hpt4)
ASSERT( just_linkables ) do
-- Link everything together
linkresult <- liftIO $ link (ghcLink dflags) dflags False hpt4
modifySession $ \hsc_env -> hsc_env{ hsc_HPT = hpt4 }
loadFinish Failed linkresult
-- | Finish up after a load.
loadFinish :: GhcMonad m => SuccessFlag -> SuccessFlag -> m SuccessFlag
-- If the link failed, unload everything and return.
loadFinish _all_ok Failed
= do hsc_env <- getSession
liftIO $ unload hsc_env []
getSession >>= discardProg >>= setSession
return Failed
-- Empty the interactive context and set the module context to the topmost
-- newly loaded module, or the Prelude if none were loaded.
loadFinish all_ok Succeeded
= do getSession >>= discardIC >>= setSession
return all_ok
-- | Forget the current program, but retain the persistent info in HscEnv
discardProg :: (MonadIO m) => HscEnv -> m HscEnv
discardProg hsc_env =
discardIC $ hsc_env { hsc_mod_graph = emptyMG
, hsc_HPT = emptyHomePackageTable }
-- | Discard the contents of the InteractiveContext, but keep the DynFlags
discardIC :: (MonadIO m) => HscEnv -> m HscEnv
discardIC hsc_env = do
ic <- newInteractiveContext (ic_dflags (hsc_IC hsc_env))
return $ hsc_env { hsc_IC = ic }
-- intermediateCleanTempFiles :: DynFlags -> [ModSummary] -> HscEnv -> IO ()
-- intermediateCleanTempFiles dflags summaries hsc_env
-- = do notIntermediate <- readIORef (filesToNotIntermediateClean dflags)
-- cleanTempFilesExcept dflags (notIntermediate ++ except)
-- where
-- except =
-- -- Save preprocessed files. The preprocessed file *might* be
-- -- the same as the source file, but that doesn't do any
-- -- harm.
-- map ms_hspp_file summaries ++
-- -- Save object files for loaded modules. The point of this
-- -- is that we might have generated and compiled a stub C
-- -- file, and in the case of GHCi the object file will be a
-- -- temporary file which we must not remove because we need
-- -- to load/link it later.
-- hptObjs (hsc_HPT hsc_env)
-- | If there is no -o option, guess the name of target executable
-- by using top-level source file name as a base.
guessOutputFile :: GhcMonad m => m ()
guessOutputFile = modifySession $ \env ->
let dflags = hsc_dflags env
mod_graph = hsc_mod_graph env
mainModuleSrcPath :: Maybe String
mainModuleSrcPath = do
ms <- mgLookupModule mod_graph (mainModIs dflags)
ml_hs_file (ms_location ms)
name = fmap dropExtension mainModuleSrcPath
name_exe = do
-- #if defined(mingw32_HOST_OS)
-- -- we must add the .exe extention unconditionally here, otherwise
-- -- when name has an extension of its own, the .exe extension will
-- -- not be added by DriverPipeline.exeFileName. See #2248
-- name' <- fmap (<.> "exe") name
-- #else
name' <- name
-- #endif
mainModuleSrcPath' <- mainModuleSrcPath
-- #9930: don't clobber input files (unless they ask for it)
if name' == mainModuleSrcPath'
then throwGhcException . UsageError $
"default output name would overwrite the input file; " ++
"must specify -o explicitly"
else
let (path,name'') = splitFileName name'
in Just (path ++ "Run" ++ name'')
in
case outputFile dflags of
Just _ -> env
Nothing -> env { hsc_dflags = dflags { outputFile = name_exe } }
-- -----------------------------------------------------------------------------
--
-- | Prune the HomePackageTable
--
-- Before doing an upsweep, we can throw away:
--
-- - For non-stable modules:
-- - all ModDetails, all linked code
-- - all unlinked code that is out of date with respect to
-- the source file
--
-- This is VERY IMPORTANT otherwise we'll end up requiring 2x the
-- space at the end of the upsweep, because the topmost ModDetails of the
-- old HPT holds on to the entire type environment from the previous
-- compilation.
pruneHomePackageTable :: HomePackageTable
-> [ModSummary]
-> StableModules
-> HomePackageTable
pruneHomePackageTable hpt summ (stable_obj, stable_bco)
= mapHpt prune hpt
where prune hmi
| is_stable modl = hmi'
| otherwise = hmi'{ hm_details = emptyModDetails }
where
modl = moduleName (mi_module (hm_iface hmi))
hmi' | Just l <- hm_linkable hmi, linkableTime l < ms_hs_date ms
= hmi{ hm_linkable = Nothing }
| otherwise
= hmi
where ms = expectJust "prune" (lookupUFM ms_map modl)
ms_map = listToUFM [(ms_mod_name ms, ms) | ms <- summ]
is_stable m =
m `elementOfUniqSet` stable_obj ||
m `elementOfUniqSet` stable_bco
-- -----------------------------------------------------------------------------
--
-- | Return (names of) all those in modsDone who are part of a cycle as defined
-- by theGraph.
findPartiallyCompletedCycles :: [Module] -> [SCC ModSummary] -> [Module]
findPartiallyCompletedCycles modsDone theGraph
= chew theGraph
where
chew [] = []
chew ((AcyclicSCC _):rest) = chew rest -- acyclic? not interesting.
chew ((CyclicSCC vs):rest)
= let names_in_this_cycle = nub (map ms_mod vs)
mods_in_this_cycle
= nub ([done | done <- modsDone,
done `elem` names_in_this_cycle])
chewed_rest = chew rest
in
if notNull mods_in_this_cycle
&& length mods_in_this_cycle < length names_in_this_cycle
then mods_in_this_cycle ++ chewed_rest
else chewed_rest
-- ---------------------------------------------------------------------------
--
-- | Unloading
unload :: HscEnv -> [Linkable] -> IO ()
unload hsc_env stable_linkables -- Unload everthing *except* 'stable_linkables'
= case ghcLink (hsc_dflags hsc_env) of
LinkInMemory -> Linker.unload hsc_env stable_linkables
_other -> return ()
-- -----------------------------------------------------------------------------
{- |
Stability tells us which modules definitely do not need to be recompiled.
There are two main reasons for having stability:
- avoid doing a complete upsweep of the module graph in GHCi when
modules near the bottom of the tree have not changed.
- to tell GHCi when it can load object code: we can only load object code
for a module when we also load object code fo all of the imports of the
module. So we need to know that we will definitely not be recompiling
any of these modules, and we can use the object code.
The stability check is as follows. Both stableObject and
stableBCO are used during the upsweep phase later.
@
stable m = stableObject m || stableBCO m
stableObject m =
all stableObject (imports m)
&& old linkable does not exist, or is == on-disk .o
&& date(on-disk .o) > date(.hs)
stableBCO m =
all stable (imports m)
&& date(BCO) > date(.hs)
@
These properties embody the following ideas:
- if a module is stable, then:
- if it has been compiled in a previous pass (present in HPT)
then it does not need to be compiled or re-linked.
- if it has not been compiled in a previous pass,
then we only need to read its .hi file from disk and
link it to produce a 'ModDetails'.
- if a modules is not stable, we will definitely be at least
re-linking, and possibly re-compiling it during the 'upsweep'.
All non-stable modules can (and should) therefore be unlinked
before the 'upsweep'.
- Note that objects are only considered stable if they only depend
on other objects. We can't link object code against byte code.
-}
type StableModules =
( UniqSet ModuleName -- stableObject
, UniqSet ModuleName -- stableBCO
)
checkStability
:: HomePackageTable -- HPT from last compilation
-> [SCC ModSummary] -- current module graph (cyclic)
-> UniqSet ModuleName -- all home modules
-> StableModules
checkStability hpt sccs all_home_mods =
foldl checkSCC (emptyUniqSet, emptyUniqSet) sccs
where
checkSCC :: StableModules -> SCC ModSummary -> StableModules
checkSCC (stable_obj, stable_bco) scc0
| stableObjects = (addListToUniqSet stable_obj scc_mods, stable_bco)
| stableBCOs = (stable_obj, addListToUniqSet stable_bco scc_mods)
| otherwise = (stable_obj, stable_bco)
where
scc = flattenSCC scc0
scc_mods = map ms_mod_name scc
home_module m = m `elementOfUniqSet` all_home_mods && m `notElem` scc_mods
scc_allimps = nub (filter home_module (concatMap ms_home_allimps scc))
-- all imports outside the current SCC, but in the home pkg
stable_obj_imps = map (`elementOfUniqSet` stable_obj) scc_allimps
stable_bco_imps = map (`elementOfUniqSet` stable_bco) scc_allimps
stableObjects =
and stable_obj_imps
&& all object_ok scc
stableBCOs =
and (zipWith (||) stable_obj_imps stable_bco_imps)
&& all bco_ok scc
object_ok ms
| gopt Opt_ForceRecomp (ms_hspp_opts ms) = False
| Just t <- ms_obj_date ms = t >= ms_hs_date ms
&& same_as_prev t
| otherwise = False
where
same_as_prev t = case lookupHpt hpt (ms_mod_name ms) of
Just hmi | Just l <- hm_linkable hmi
-> isObjectLinkable l && t == linkableTime l
_other -> True
-- why '>=' rather than '>' above? If the filesystem stores
-- times to the nearset second, we may occasionally find that
-- the object & source have the same modification time,
-- especially if the source was automatically generated
-- and compiled. Using >= is slightly unsafe, but it matches
-- make's behaviour.
--
-- But see #5527, where someone ran into this and it caused
-- a problem.
bco_ok ms
| gopt Opt_ForceRecomp (ms_hspp_opts ms) = False
| otherwise = case lookupHpt hpt (ms_mod_name ms) of
Just hmi | Just l <- hm_linkable hmi ->
not (isObjectLinkable l) &&
linkableTime l >= ms_hs_date ms
_other -> False
{- Parallel Upsweep
-
- The parallel upsweep attempts to concurrently compile the modules in the
- compilation graph using multiple Haskell threads.
-
- The Algorithm
-
- A Haskell thread is spawned for each module in the module graph, waiting for
- its direct dependencies to finish building before it itself begins to build.
-
- Each module is associated with an initially empty MVar that stores the
- result of that particular module's compile. If the compile succeeded, then
- the HscEnv (synchronized by an MVar) is updated with the fresh HMI of that
- module, and the module's HMI is deleted from the old HPT (synchronized by an
- IORef) to save space.
-
- Instead of immediately outputting messages to the standard handles, all
- compilation output is deferred to a per-module TQueue. A QSem is used to
- limit the number of workers that are compiling simultaneously.
-
- Meanwhile, the main thread sequentially loops over all the modules in the
- module graph, outputting the messages stored in each module's TQueue.
-}
-- | Each module is given a unique 'LogQueue' to redirect compilation messages
-- to. A 'Nothing' value contains the result of compilation, and denotes the
-- end of the message queue.
data LogQueue = LogQueue !(IORef [Maybe (WarnReason, Severity, SrcSpan, PprStyle, MsgDoc)])
!(MVar ())
-- | The graph of modules to compile and their corresponding result 'MVar' and
-- 'LogQueue'.
type CompilationGraph = [(ModSummary, MVar SuccessFlag, LogQueue)]
-- | Build a 'CompilationGraph' out of a list of strongly-connected modules,
-- also returning the first, if any, encountered module cycle.
buildCompGraph :: [SCC ModSummary] -> IO (CompilationGraph, Maybe [ModSummary])
buildCompGraph [] = return ([], Nothing)
buildCompGraph (scc:sccs) = case scc of
AcyclicSCC ms -> do
mvar <- newEmptyMVar
log_queue <- do
ref <- newIORef []
sem <- newEmptyMVar
return (LogQueue ref sem)
(rest,cycle) <- buildCompGraph sccs
return ((ms,mvar,log_queue):rest, cycle)
CyclicSCC mss -> return ([], Just mss)
-- A Module and whether it is a boot module.
type BuildModule = (Module, IsBoot)
-- | 'Bool' indicating if a module is a boot module or not. We need to treat
-- boot modules specially when building compilation graphs, since they break
-- cycles. Regular source files and signature files are treated equivalently.
data IsBoot = IsBoot | NotBoot
deriving (Ord, Eq, Show, Read)
-- | Tests if an 'HscSource' is a boot file, primarily for constructing
-- elements of 'BuildModule'.
hscSourceToIsBoot :: HscSource -> IsBoot
hscSourceToIsBoot HsBootFile = IsBoot
hscSourceToIsBoot _ = NotBoot
mkBuildModule :: ModSummary -> BuildModule
mkBuildModule ms = (ms_mod ms, if isBootSummary ms then IsBoot else NotBoot)
-- | The entry point to the parallel upsweep.
--
-- See also the simpler, sequential 'upsweep'.
parUpsweep
:: GhcMonad m
=> Int
-- ^ The number of workers we wish to run in parallel
-> Maybe Messager
-> HomePackageTable
-> StableModules
-> (HscEnv -> IO ())
-> [SCC ModSummary]
-> m (SuccessFlag,
[ModSummary])
parUpsweep n_jobs mHscMessage old_hpt stable_mods cleanup sccs = do
hsc_env <- getSession
let dflags = hsc_dflags hsc_env
when (not (null (unitIdsToCheck dflags))) $
throwGhcException (ProgramError "Backpack typechecking not supported with -j")
-- The bits of shared state we'll be using:
-- The global HscEnv is updated with the module's HMI when a module
-- successfully compiles.
hsc_env_var <- liftIO $ newMVar hsc_env
-- The old HPT is used for recompilation checking in upsweep_mod. When a
-- module sucessfully gets compiled, its HMI is pruned from the old HPT.
old_hpt_var <- liftIO $ newIORef old_hpt
-- What we use to limit parallelism with.
par_sem <- liftIO $ newQSem n_jobs
let updNumCapabilities = liftIO $ do
n_capabilities <- getNumCapabilities
unless (n_capabilities /= 1) $ setNumCapabilities n_jobs
return n_capabilities
-- Reset the number of capabilities once the upsweep ends.
let resetNumCapabilities orig_n = liftIO $ setNumCapabilities orig_n
gbracket updNumCapabilities resetNumCapabilities $ \_ -> do
-- Sync the global session with the latest HscEnv once the upsweep ends.
let finallySyncSession io = io `gfinally` do
hsc_env <- liftIO $ readMVar hsc_env_var
setSession hsc_env
finallySyncSession $ do
-- Build the compilation graph out of the list of SCCs. Module cycles are
-- handled at the very end, after some useful work gets done. Note that
-- this list is topologically sorted (by virtue of 'sccs' being sorted so).
(comp_graph,cycle) <- liftIO $ buildCompGraph sccs
let comp_graph_w_idx = zip comp_graph [1..]
-- The list of all loops in the compilation graph.
-- NB: For convenience, the last module of each loop (aka the module that
-- finishes the loop) is prepended to the beginning of the loop.
let graph = map fstOf3 (reverse comp_graph)
boot_modules = mkModuleSet [ms_mod ms | ms <- graph, isBootSummary ms]
comp_graph_loops = go graph boot_modules
where
remove ms bm
| isBootSummary ms = delModuleSet bm (ms_mod ms)
| otherwise = bm
go [] _ = []
go mg@(ms:mss) boot_modules
| Just loop <- getModLoop ms mg (`elemModuleSet` boot_modules)
= map mkBuildModule (ms:loop) : go mss (remove ms boot_modules)
| otherwise
= go mss (remove ms boot_modules)
-- Build a Map out of the compilation graph with which we can efficiently
-- look up the result MVar associated with a particular home module.
let home_mod_map :: Map BuildModule (MVar SuccessFlag, Int)
home_mod_map =
Map.fromList [ (mkBuildModule ms, (mvar, idx))
| ((ms,mvar,_),idx) <- comp_graph_w_idx ]
liftIO $ label_self "main --make thread"
-- For each module in the module graph, spawn a worker thread that will
-- compile this module.
let { spawnWorkers = forM comp_graph_w_idx $ \((mod,!mvar,!log_queue),!mod_idx) ->
forkIOWithUnmask $ \unmask -> do
liftIO $ label_self $ unwords
[ "worker --make thread"
, "for module"
, show (moduleNameString (ms_mod_name mod))
, "number"
, show mod_idx
]
-- Replace the default log_action with one that writes each
-- message to the module's log_queue. The main thread will
-- deal with synchronously printing these messages.
--
-- Use a local filesToClean var so that we can clean up
-- intermediate files in a timely fashion (as soon as
-- compilation for that module is finished) without having to
-- worry about accidentally deleting a simultaneous compile's
-- important files.
lcl_files_to_clean <- newIORef emptyFilesToClean
let lcl_dflags = dflags { log_action = parLogAction log_queue
, filesToClean = lcl_files_to_clean }
-- Unmask asynchronous exceptions and perform the thread-local
-- work to compile the module (see parUpsweep_one).
m_res <- try $ unmask $ prettyPrintGhcErrors lcl_dflags $
parUpsweep_one mod home_mod_map comp_graph_loops
lcl_dflags mHscMessage cleanup
par_sem hsc_env_var old_hpt_var
stable_mods mod_idx (length sccs)
res <- case m_res of
Right flag -> return flag
Left exc -> do
-- Don't print ThreadKilled exceptions: they are used
-- to kill the worker thread in the event of a user
-- interrupt, and the user doesn't have to be informed
-- about that.
when (fromException exc /= Just ThreadKilled)
(errorMsg lcl_dflags (text (show exc)))
return Failed
-- Populate the result MVar.
putMVar mvar res
-- Write the end marker to the message queue, telling the main
-- thread that it can stop waiting for messages from this
-- particular compile.
writeLogQueue log_queue Nothing
-- Add the remaining files that weren't cleaned up to the
-- global filesToClean ref, for cleanup later.
-- files_kept <- readIORef (filesToClean lcl_dflags)
-- addFilesToClean dflags files_kept
FilesToClean
{ ftcCurrentModule = cm_files
, ftcGhcSession = gs_files
} <- readIORef (filesToClean lcl_dflags)
addFilesToClean dflags TFL_CurrentModule $ Set.toList cm_files
addFilesToClean dflags TFL_GhcSession $ Set.toList gs_files
-- Kill all the workers, masking interrupts (since killThread is
-- interruptible). XXX: This is not ideal.
; killWorkers = uninterruptibleMask_ . mapM_ killThread }
-- Spawn the workers, making sure to kill them later. Collect the results
-- of each compile.
results <- liftIO $ bracket spawnWorkers killWorkers $ \_ ->
-- Loop over each module in the compilation graph in order, printing
-- each message from its log_queue.
forM comp_graph $ \(mod,mvar,log_queue) -> do
printLogs dflags log_queue
result <- readMVar mvar
if succeeded result then return (Just mod) else return Nothing
-- Collect and return the ModSummaries of all the successful compiles.
-- NB: Reverse this list to maintain output parity with the sequential upsweep.
let ok_results = reverse (catMaybes results)
-- Handle any cycle in the original compilation graph and return the result
-- of the upsweep.
case cycle of
Just mss -> do
liftIO $ fatalErrorMsg dflags (cyclicModuleErr mss)
return (Failed,ok_results)
Nothing -> do
let success_flag = successIf (all isJust results)
return (success_flag,ok_results)
where
writeLogQueue :: LogQueue -> Maybe (WarnReason,Severity,SrcSpan,PprStyle,MsgDoc) -> IO ()
writeLogQueue (LogQueue ref sem) msg = do
atomicModifyIORef ref $ \msgs -> (msg:msgs,())
_ <- tryPutMVar sem ()
return ()
-- The log_action callback that is used to synchronize messages from a
-- worker thread.
parLogAction :: LogQueue -> LogAction
parLogAction log_queue _dflags !reason !severity !srcSpan !style !msg = do
writeLogQueue log_queue (Just (reason,severity,srcSpan,style,msg))
-- Print each message from the log_queue using the log_action from the
-- session's DynFlags.
printLogs :: DynFlags -> LogQueue -> IO ()
printLogs !dflags (LogQueue ref sem) = read_msgs
where read_msgs = do
takeMVar sem
msgs <- atomicModifyIORef ref $ \xs -> ([], reverse xs)
print_loop msgs
print_loop [] = read_msgs
print_loop (x:xs) = case x of
Just (reason,severity,srcSpan,style,msg) -> do
putLogMsg dflags reason severity srcSpan style msg
print_loop xs
-- Exit the loop once we encounter the end marker.
Nothing -> return ()
-- The interruptible subset of the worker threads' work.
parUpsweep_one
:: ModSummary
-- ^ The module we wish to compile
-> Map BuildModule (MVar SuccessFlag, Int)
-- ^ The map of home modules and their result MVar
-> [[BuildModule]]
-- ^ The list of all module loops within the compilation graph.
-> DynFlags
-- ^ The thread-local DynFlags
-> Maybe Messager
-- ^ The messager
-> (HscEnv -> IO ())
-- ^ The callback for cleaning up intermediate files
-> QSem
-- ^ The semaphore for limiting the number of simultaneous compiles
-> MVar HscEnv
-- ^ The MVar that synchronizes updates to the global HscEnv
-> IORef HomePackageTable
-- ^ The old HPT
-> StableModules
-- ^ Sets of stable objects and BCOs
-> Int
-- ^ The index of this module
-> Int
-- ^ The total number of modules
-> IO SuccessFlag
-- ^ The result of this compile
parUpsweep_one mod home_mod_map comp_graph_loops lcl_dflags mHscMessage cleanup par_sem
hsc_env_var old_hpt_var stable_mods mod_index num_mods = do
let this_build_mod = mkBuildModule mod
let home_imps = map unLoc $ ms_home_imps mod
let home_src_imps = map unLoc $ ms_home_srcimps mod
-- All the textual imports of this module.
let textual_deps = Set.fromList $ mapFst (mkModule (thisPackage lcl_dflags)) $
zip home_imps (repeat NotBoot) ++
zip home_src_imps (repeat IsBoot)
-- Dealing with module loops
-- ~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- Not only do we have to deal with explicit textual dependencies, we also
-- have to deal with implicit dependencies introduced by import cycles that
-- are broken by an hs-boot file. We have to ensure that:
--
-- 1. A module that breaks a loop must depend on all the modules in the
-- loop (transitively or otherwise). This is normally always fulfilled
-- by the module's textual dependencies except in degenerate loops,
-- e.g.:
--
-- A.hs imports B.hs-boot
-- B.hs doesn't import A.hs
-- C.hs imports A.hs, B.hs
--
-- In this scenario, getModLoop will detect the module loop [A,B] but
-- the loop finisher B doesn't depend on A. So we have to explicitly add
-- A in as a dependency of B when we are compiling B.
--
-- 2. A module that depends on a module in an external loop can't proceed
-- until the entire loop is re-typechecked.
--
-- These two invariants have to be maintained to correctly build a
-- compilation graph with one or more loops.
-- The loop that this module will finish. After this module successfully
-- compiles, this loop is going to get re-typechecked.
let finish_loop = listToMaybe
[ tail loop | loop <- comp_graph_loops
, head loop == this_build_mod ]
-- If this module finishes a loop then it must depend on all the other
-- modules in that loop because the entire module loop is going to be
-- re-typechecked once this module gets compiled. These extra dependencies
-- are this module's "internal" loop dependencies, because this module is
-- inside the loop in question.
let int_loop_deps = Set.fromList $
case finish_loop of
Nothing -> []
Just loop -> filter (/= this_build_mod) loop
-- If this module depends on a module within a loop then it must wait for
-- that loop to get re-typechecked, i.e. it must wait on the module that
-- finishes that loop. These extra dependencies are this module's
-- "external" loop dependencies, because this module is outside of the
-- loop(s) in question.
let ext_loop_deps = Set.fromList
[ head loop | loop <- comp_graph_loops
, any (`Set.member` textual_deps) loop
, this_build_mod `notElem` loop ]
let all_deps = foldl1 Set.union [textual_deps, int_loop_deps, ext_loop_deps]
-- All of the module's home-module dependencies.
let home_deps_with_idx =
[ home_dep | dep <- Set.toList all_deps
, Just home_dep <- [Map.lookup dep home_mod_map] ]
-- Sort the list of dependencies in reverse-topological order. This way, by
-- the time we get woken up by the result of an earlier dependency,
-- subsequent dependencies are more likely to have finished. This step
-- effectively reduces the number of MVars that each thread blocks on.
let home_deps = map fst $ sortBy (flip (comparing snd)) home_deps_with_idx
-- Wait for the all the module's dependencies to finish building.
deps_ok <- allM (fmap succeeded . readMVar) home_deps
-- We can't build this module if any of its dependencies failed to build.
if not deps_ok
then return Failed
else do
-- Any hsc_env at this point is OK to use since we only really require
-- that the HPT contains the HMIs of our dependencies.
hsc_env <- readMVar hsc_env_var
old_hpt <- readIORef old_hpt_var
let logger err = printBagOfErrors lcl_dflags (srcErrorMessages err)
-- Limit the number of parallel compiles.
let withSem sem = bracket_ (waitQSem sem) (signalQSem sem)
mb_mod_info <- withSem par_sem $
handleSourceError (\err -> do logger err; return Nothing) $ do
-- Have the ModSummary and HscEnv point to our local log_action
-- and filesToClean var.
let lcl_mod = localize_mod mod
let lcl_hsc_env = localize_hsc_env hsc_env
-- Compile the module.
mod_info <- upsweep_mod lcl_hsc_env mHscMessage old_hpt stable_mods lcl_mod
mod_index num_mods
return (Just mod_info)
case mb_mod_info of
Nothing -> return Failed
Just mod_info -> do
let this_mod = ms_mod_name mod
-- Prune the old HPT unless this is an hs-boot module.
unless (isBootSummary mod) $
atomicModifyIORef old_hpt_var $ \old_hpt ->
(delFromHpt old_hpt this_mod, ())
-- Update and fetch the global HscEnv.
lcl_hsc_env' <- modifyMVar hsc_env_var $ \hsc_env -> do
let hsc_env' = hsc_env
{ hsc_HPT = addToHpt (hsc_HPT hsc_env)
this_mod mod_info }
-- If this module is a loop finisher, now is the time to
-- re-typecheck the loop.
hsc_env'' <- case finish_loop of
Nothing -> return hsc_env'
Just loop -> typecheckLoop lcl_dflags hsc_env' $
map (moduleName . fst) loop
return (hsc_env'', localize_hsc_env hsc_env'')
-- Clean up any intermediate files.
cleanup lcl_hsc_env'
return Succeeded
where
localize_mod mod
= mod { ms_hspp_opts = (ms_hspp_opts mod)
{ log_action = log_action lcl_dflags
, filesToClean = filesToClean lcl_dflags } }
localize_hsc_env hsc_env
= hsc_env { hsc_dflags = (hsc_dflags hsc_env)
{ log_action = log_action lcl_dflags
, filesToClean = filesToClean lcl_dflags } }
-- -----------------------------------------------------------------------------
--
-- | The upsweep
--
-- This is where we compile each module in the module graph, in a pass
-- from the bottom to the top of the graph.
--
-- There better had not be any cyclic groups here -- we check for them.
upsweep
:: GhcMonad m
=> Maybe Messager
-> HomePackageTable -- ^ HPT from last time round (pruned)
-> StableModules -- ^ stable modules (see checkStability)
-> (HscEnv -> IO ()) -- ^ How to clean up unwanted tmp files
-> [SCC ModSummary] -- ^ Mods to do (the worklist)
-> m (SuccessFlag,
[ModSummary])
-- ^ Returns:
--
-- 1. A flag whether the complete upsweep was successful.
-- 2. The 'HscEnv' in the monad has an updated HPT
-- 3. A list of modules which succeeded loading.
upsweep mHscMessage old_hpt stable_mods cleanup sccs = do
dflags <- getSessionDynFlags
(res, done) <- upsweep' old_hpt emptyMG sccs 1 (length sccs) cleanup
(unitIdsToCheck dflags) done_holes
return (res, reverse $ mgModSummaries done)
where
done_holes = emptyUniqSet
upsweep' :: GhcMonad m
=> HomePackageTable
-> ModuleGraph
-> [SCC ModSummary]
-> Int
-> Int
-> (HscEnv -> IO ())
-> [UnitId]
-> UniqSet ModuleName
-> m (SuccessFlag, ModuleGraph)
upsweep' _old_hpt done
[] _ _ _ uids_to_check _
= do hsc_env <- getSession
liftIO . runHsc hsc_env $ mapM_ (ioMsgMaybe . tcRnCheckUnitId hsc_env) uids_to_check
return (Succeeded, done)
upsweep' _old_hpt done
(CyclicSCC ms:_) _ _ _ _ _
= do dflags <- getSessionDynFlags
liftIO $ fatalErrorMsg dflags (cyclicModuleErr ms)
return (Failed, done)
upsweep' old_hpt done
(AcyclicSCC mod:mods) mod_index nmods cleanup uids_to_check done_holes
= do -- putStrLn ("UPSWEEP_MOD: hpt = " ++
-- show (map (moduleUserString.moduleName.mi_module.hm_iface)
-- (moduleEnvElts (hsc_HPT hsc_env)))
let logger _mod = defaultWarnErrLogger
hsc_env <- getSession
-- TODO: Cache this, so that we don't repeatedly re-check
-- our imports when you run --make.
let (ready_uids, uids_to_check')
= partition (\uid -> isEmptyUniqDSet
(unitIdFreeHoles uid `uniqDSetMinusUniqSet` done_holes))
uids_to_check
done_holes'
| ms_hsc_src mod == HsigFile
= addOneToUniqSet done_holes (ms_mod_name mod)
| otherwise = done_holes
liftIO . runHsc hsc_env $ mapM_ (ioMsgMaybe . tcRnCheckUnitId hsc_env) ready_uids
-- Remove unwanted tmp files between compilations
liftIO (cleanup hsc_env)
mb_mod_info
<- handleSourceError
(\err -> do logger mod (Just err); return Nothing) $ do
mod_info <- liftIO $ upsweep_mod hsc_env mHscMessage old_hpt stable_mods
mod mod_index nmods
logger mod Nothing -- log warnings
return (Just mod_info)
case mb_mod_info of
Nothing -> return (Failed, done)
Just mod_info -> do
let this_mod = ms_mod_name mod
-- Add new info to hsc_env
hpt1 = addToHpt (hsc_HPT hsc_env) this_mod mod_info
hsc_env1 = hsc_env { hsc_HPT = hpt1 }
-- Space-saving: delete the old HPT entry
-- for mod BUT if mod is a hs-boot
-- node, don't delete it. For the
-- interface, the HPT entry is probaby for the
-- main Haskell source file. Deleting it
-- would force the real module to be recompiled
-- every time.
old_hpt1 | isBootSummary mod = old_hpt
| otherwise = delFromHpt old_hpt this_mod
done' = extendMG done mod
-- fixup our HomePackageTable after we've finished compiling
-- a mutually-recursive loop. See reTypecheckLoop, below.
hsc_env2 <- liftIO $ reTypecheckLoop hsc_env1 mod done'
setSession hsc_env2
upsweep' old_hpt1 done' mods (mod_index+1) nmods cleanup uids_to_check' done_holes'
unitIdsToCheck :: DynFlags -> [UnitId]
unitIdsToCheck dflags =
nubSort $ concatMap goUnitId (explicitPackages (pkgState dflags))
where
goUnitId uid =
case splitUnitIdInsts uid of
(_, Just indef) ->
let insts = indefUnitIdInsts indef
in uid : concatMap (goUnitId . moduleUnitId . snd) insts
_ -> []
maybeGetIfaceDate :: DynFlags -> ModLocation -> IO (Maybe UTCTime)
maybeGetIfaceDate dflags location
| writeInterfaceOnlyMode dflags
-- Minor optimization: it should be harmless to check the hi file location
-- always, but it's better to avoid hitting the filesystem if possible.
= modificationTimeIfExists (ml_hi_file location)
| otherwise
= return Nothing
-- | Compile a single module. Always produce a Linkable for it if
-- successful. If no compilation happened, return the old Linkable.
upsweep_mod :: HscEnv
-> Maybe Messager
-> HomePackageTable
-> StableModules
-> ModSummary
-> Int -- index of module
-> Int -- total number of modules
-> IO HomeModInfo
upsweep_mod hsc_env mHscMessage old_hpt (stable_obj, stable_bco) summary mod_index nmods
= let
this_mod_name = ms_mod_name summary
this_mod = ms_mod summary
mb_obj_date = ms_obj_date summary
mb_if_date = ms_iface_date summary
obj_fn = ml_obj_file (ms_location summary)
hs_date = ms_hs_date summary
is_stable_obj = this_mod_name `elementOfUniqSet` stable_obj
is_stable_bco = this_mod_name `elementOfUniqSet` stable_bco
old_hmi = lookupHpt old_hpt this_mod_name
-- We're using the dflags for this module now, obtained by
-- applying any options in its LANGUAGE & OPTIONS_GHC pragmas.
dflags = ms_hspp_opts summary
prevailing_target = hscTarget (hsc_dflags hsc_env)
local_target = hscTarget dflags
-- If OPTIONS_GHC contains -fasm or -fllvm, be careful that
-- we don't do anything dodgy: these should only work to change
-- from -fllvm to -fasm and vice-versa, otherwise we could
-- end up trying to link object code to byte code.
target = if prevailing_target /= local_target
&& (not (isObjectTarget prevailing_target)
|| not (isObjectTarget local_target))
then prevailing_target
else local_target
-- store the corrected hscTarget into the summary
summary' = summary{ ms_hspp_opts = dflags { hscTarget = target } }
-- The old interface is ok if
-- a) we're compiling a source file, and the old HPT
-- entry is for a source file
-- b) we're compiling a hs-boot file
-- Case (b) allows an hs-boot file to get the interface of its
-- real source file on the second iteration of the compilation
-- manager, but that does no harm. Otherwise the hs-boot file
-- will always be recompiled
mb_old_iface
= case old_hmi of
Nothing -> Nothing
Just hm_info | isBootSummary summary -> Just iface
| not (mi_boot iface) -> Just iface
| otherwise -> Nothing
where
iface = hm_iface hm_info
compile_it :: Maybe Linkable -> SourceModified -> IO HomeModInfo
compile_it mb_linkable src_modified =
compileOne' Nothing mHscMessage hsc_env summary' mod_index nmods
mb_old_iface mb_linkable src_modified
compile_it_discard_iface :: Maybe Linkable -> SourceModified
-> IO HomeModInfo
compile_it_discard_iface mb_linkable src_modified =
compileOne' Nothing mHscMessage hsc_env summary' mod_index nmods
Nothing mb_linkable src_modified
-- With the HscNothing target we create empty linkables to avoid
-- recompilation. We have to detect these to recompile anyway if
-- the target changed since the last compile.
is_fake_linkable
| Just hmi <- old_hmi, Just l <- hm_linkable hmi =
null (linkableUnlinked l)
| otherwise =
-- we have no linkable, so it cannot be fake
False
implies False _ = True
implies True x = x
in
case () of
_
-- Regardless of whether we're generating object code or
-- byte code, we can always use an existing object file
-- if it is *stable* (see checkStability).
| is_stable_obj, Just hmi <- old_hmi -> do
liftIO $ debugTraceMsg (hsc_dflags hsc_env) 5
(text "skipping stable obj mod:" <+> ppr this_mod_name)
return hmi
-- object is stable, and we have an entry in the
-- old HPT: nothing to do
| is_stable_obj, isNothing old_hmi -> do
liftIO $ debugTraceMsg (hsc_dflags hsc_env) 5
(text "compiling stable on-disk mod:" <+> ppr this_mod_name)
linkable <- liftIO $ findObjectLinkable this_mod obj_fn
(expectJust "upsweep1" mb_obj_date)
compile_it (Just linkable) SourceUnmodifiedAndStable
-- object is stable, but we need to load the interface
-- off disk to make a HMI.
| not (isObjectTarget target), is_stable_bco,
(target /= HscNothing) `implies` not is_fake_linkable ->
ASSERT(isJust old_hmi) -- must be in the old_hpt
let Just hmi = old_hmi in do
liftIO $ debugTraceMsg (hsc_dflags hsc_env) 5
(text "skipping stable BCO mod:" <+> ppr this_mod_name)
return hmi
-- BCO is stable: nothing to do
| not (isObjectTarget target),
Just hmi <- old_hmi,
Just l <- hm_linkable hmi,
not (isObjectLinkable l),
(target /= HscNothing) `implies` not is_fake_linkable,
linkableTime l >= ms_hs_date summary -> do
liftIO $ debugTraceMsg (hsc_dflags hsc_env) 5
(text "compiling non-stable BCO mod:" <+> ppr this_mod_name)
compile_it (Just l) SourceUnmodified
-- we have an old BCO that is up to date with respect
-- to the source: do a recompilation check as normal.
-- When generating object code, if there's an up-to-date
-- object file on the disk, then we can use it.
-- However, if the object file is new (compared to any
-- linkable we had from a previous compilation), then we
-- must discard any in-memory interface, because this
-- means the user has compiled the source file
-- separately and generated a new interface, that we must
-- read from the disk.
--
| isObjectTarget target,
Just obj_date <- mb_obj_date,
obj_date >= hs_date -> do
case old_hmi of
Just hmi
| Just l <- hm_linkable hmi,
isObjectLinkable l && linkableTime l == obj_date -> do
liftIO $ debugTraceMsg (hsc_dflags hsc_env) 5
(text "compiling mod with new on-disk obj:" <+> ppr this_mod_name)
compile_it (Just l) SourceUnmodified
_otherwise -> do
liftIO $ debugTraceMsg (hsc_dflags hsc_env) 5
(text "compiling mod with new on-disk obj2:" <+> ppr this_mod_name)
linkable <- liftIO $ findObjectLinkable this_mod obj_fn obj_date
compile_it_discard_iface (Just linkable) SourceUnmodified
-- See Note [Recompilation checking when typechecking only]
| writeInterfaceOnlyMode dflags,
Just if_date <- mb_if_date,
if_date >= hs_date -> do
liftIO $ debugTraceMsg (hsc_dflags hsc_env) 5
(text "skipping tc'd mod:" <+> ppr this_mod_name)
compile_it Nothing SourceUnmodified
_otherwise -> do
liftIO $ debugTraceMsg (hsc_dflags hsc_env) 5
(text "compiling mod:" <+> ppr this_mod_name)
compile_it Nothing SourceModified
-- Note [Recompilation checking when typechecking only]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- If we are compiling with -fno-code -fwrite-interface, there won't
-- be any object code that we can compare against, nor should there
-- be: we're *just* generating interface files. In this case, we
-- want to check if the interface file is new, in lieu of the object
-- file. See also Trac #9243.
-- Filter modules in the HPT
retainInTopLevelEnvs :: [ModuleName] -> HomePackageTable -> HomePackageTable
retainInTopLevelEnvs keep_these hpt
= listToHpt [ (mod, expectJust "retain" mb_mod_info)
| mod <- keep_these
, let mb_mod_info = lookupHpt hpt mod
, isJust mb_mod_info ]
-- ---------------------------------------------------------------------------
-- Typecheck module loops
{-
See bug #930. This code fixes a long-standing bug in --make. The
problem is that when compiling the modules *inside* a loop, a data
type that is only defined at the top of the loop looks opaque; but
after the loop is done, the structure of the data type becomes
apparent.
The difficulty is then that two different bits of code have
different notions of what the data type looks like.
The idea is that after we compile a module which also has an .hs-boot
file, we re-generate the ModDetails for each of the modules that
depends on the .hs-boot file, so that everyone points to the proper
TyCons, Ids etc. defined by the real module, not the boot module.
Fortunately re-generating a ModDetails from a ModIface is easy: the
function TcIface.typecheckIface does exactly that.
Picking the modules to re-typecheck is slightly tricky. Starting from
the module graph consisting of the modules that have already been
compiled, we reverse the edges (so they point from the imported module
to the importing module), and depth-first-search from the .hs-boot
node. This gives us all the modules that depend transitively on the
.hs-boot module, and those are exactly the modules that we need to
re-typecheck.
Following this fix, GHC can compile itself with --make -O2.
-}
reTypecheckLoop :: HscEnv -> ModSummary -> ModuleGraph -> IO HscEnv
reTypecheckLoop hsc_env ms graph
| Just loop <- getModLoop ms mss appearsAsBoot
, let non_boot = filter (not.isBootSummary) loop
= typecheckLoop (hsc_dflags hsc_env) hsc_env (map ms_mod_name non_boot)
| otherwise
= return hsc_env
where
mss = mgModSummaries graph
appearsAsBoot = (`elemModuleSet` mgBootModules graph)
getModLoop :: ModSummary
-> [ModSummary]
-> (Module -> Bool) -- check if a module appears as a boot module in 'graph'
-> Maybe [ModSummary]
getModLoop ms graph appearsAsBoot
| not (isBootSummary ms)
, appearsAsBoot this_mod
, let mss = reachableBackwards (ms_mod_name ms) graph
= Just mss
| otherwise
= Nothing
where
this_mod = ms_mod ms
typecheckLoop :: DynFlags -> HscEnv -> [ModuleName] -> IO HscEnv
typecheckLoop dflags hsc_env mods = do
debugTraceMsg dflags 2 $
text "Re-typechecking loop: " <> ppr mods
new_hpt <-
fixIO $ \new_hpt -> do
let new_hsc_env = hsc_env{ hsc_HPT = new_hpt }
mds <- initIfaceCheck new_hsc_env $
mapM (typecheckIface . hm_iface) hmis
let new_hpt = addListToHpt old_hpt
(zip mods [ hmi{ hm_details = details }
| (hmi,details) <- zip hmis mds ])
return new_hpt
return hsc_env{ hsc_HPT = new_hpt }
where
old_hpt = hsc_HPT hsc_env
hmis = map (expectJust "typecheckLoop" . lookupHpt old_hpt) mods
reachableBackwards :: ModuleName -> [ModSummary] -> [ModSummary]
reachableBackwards mod summaries
= [ ms | (ms,_,_) <- reachableG (transposeG graph) root ]
where -- the rest just sets up the graph:
(graph, lookup_node) = moduleGraphNodes False summaries
root = expectJust "reachableBackwards" (lookup_node HsBootFile mod)
-- ---------------------------------------------------------------------------
--
-- | Topological sort of the module graph
topSortModuleGraph
:: Bool
-- ^ Drop hi-boot nodes? (see below)
-> ModuleGraph
-> Maybe ModuleName
-- ^ Root module name. If @Nothing@, use the full graph.
-> [SCC ModSummary]
-- ^ Calculate SCCs of the module graph, possibly dropping the hi-boot nodes
-- The resulting list of strongly-connected-components is in topologically
-- sorted order, starting with the module(s) at the bottom of the
-- dependency graph (ie compile them first) and ending with the ones at
-- the top.
--
-- Drop hi-boot nodes (first boolean arg)?
--
-- - @False@: treat the hi-boot summaries as nodes of the graph,
-- so the graph must be acyclic
--
-- - @True@: eliminate the hi-boot nodes, and instead pretend
-- the a source-import of Foo is an import of Foo
-- The resulting graph has no hi-boot nodes, but can be cyclic
topSortModuleGraph drop_hs_boot_nodes module_graph mb_root_mod
= map (fmap summaryNodeSummary) $ stronglyConnCompG initial_graph
where
summaries = mgModSummaries module_graph
-- stronglyConnCompG flips the original order, so if we reverse
-- the summaries we get a stable topological sort.
(graph, lookup_node) = moduleGraphNodes drop_hs_boot_nodes (reverse summaries)
initial_graph = case mb_root_mod of
Nothing -> graph
Just root_mod ->
-- restrict the graph to just those modules reachable from
-- the specified module. We do this by building a graph with
-- the full set of nodes, and determining the reachable set from
-- the specified node.
let root | Just node <- lookup_node HsSrcFile root_mod, graph `hasVertexG` node = node
| otherwise = throwGhcException (ProgramError "module does not exist")
in graphFromEdgedVertices (seq root (reachableG graph root))
type SummaryNode = (ModSummary, Int, [Int])
summaryNodeKey :: SummaryNode -> Int
summaryNodeKey (_, k, _) = k
summaryNodeSummary :: SummaryNode -> ModSummary
summaryNodeSummary (s, _, _) = s
moduleGraphNodes :: Bool -> [ModSummary]
-> (Graph SummaryNode, HscSource -> ModuleName -> Maybe SummaryNode)
moduleGraphNodes drop_hs_boot_nodes summaries = (graphFromEdgedVertices nodes, lookup_node)
where
numbered_summaries = zip summaries [1..]
lookup_node :: HscSource -> ModuleName -> Maybe SummaryNode
lookup_node hs_src mod = Map.lookup (mod, hscSourceToIsBoot hs_src) node_map
lookup_key :: HscSource -> ModuleName -> Maybe Int
lookup_key hs_src mod = fmap summaryNodeKey (lookup_node hs_src mod)
node_map :: NodeMap SummaryNode
node_map = Map.fromList [ ((moduleName (ms_mod s),
hscSourceToIsBoot (ms_hsc_src s)), node)
| node@(s, _, _) <- nodes ]
-- We use integers as the keys for the SCC algorithm
nodes :: [SummaryNode]
nodes = [ (s, key, out_keys)
| (s, key) <- numbered_summaries
-- Drop the hi-boot ones if told to do so
, not (isBootSummary s && drop_hs_boot_nodes)
, let out_keys = out_edge_keys hs_boot_key (map unLoc (ms_home_srcimps s)) ++
out_edge_keys HsSrcFile (map unLoc (ms_home_imps s)) ++
(-- see [boot-edges] below
if drop_hs_boot_nodes || ms_hsc_src s == HsBootFile
then []
else case lookup_key HsBootFile (ms_mod_name s) of
Nothing -> []
Just k -> [k]) ]
-- [boot-edges] if this is a .hs and there is an equivalent
-- .hs-boot, add a link from the former to the latter. This
-- has the effect of detecting bogus cases where the .hs-boot
-- depends on the .hs, by introducing a cycle. Additionally,
-- it ensures that we will always process the .hs-boot before
-- the .hs, and so the HomePackageTable will always have the
-- most up to date information.
-- Drop hs-boot nodes by using HsSrcFile as the key
hs_boot_key | drop_hs_boot_nodes = HsSrcFile
| otherwise = HsBootFile
out_edge_keys :: HscSource -> [ModuleName] -> [Int]
out_edge_keys hi_boot ms = mapMaybe (lookup_key hi_boot) ms
-- If we want keep_hi_boot_nodes, then we do lookup_key with
-- IsBoot; else NotBoot
-- The nodes of the graph are keyed by (mod, is boot?) pairs
-- NB: hsig files show up as *normal* nodes (not boot!), since they don't
-- participate in cycles (for now)
type NodeKey = (ModuleName, IsBoot)
type NodeMap a = Map.Map NodeKey a
msKey :: ModSummary -> NodeKey
msKey (ModSummary { ms_mod = mod, ms_hsc_src = boot })
= (moduleName mod, hscSourceToIsBoot boot)
mkNodeMap :: [ModSummary] -> NodeMap ModSummary
mkNodeMap summaries = Map.fromList [ (msKey s, s) | s <- summaries]
nodeMapElts :: NodeMap a -> [a]
nodeMapElts = Map.elems
-- | If there are {-# SOURCE #-} imports between strongly connected
-- components in the topological sort, then those imports can
-- definitely be replaced by ordinary non-SOURCE imports: if SOURCE
-- were necessary, then the edge would be part of a cycle.
warnUnnecessarySourceImports :: GhcMonad m => [SCC ModSummary] -> m ()
warnUnnecessarySourceImports sccs = do
dflags <- getDynFlags
logWarnings (listToBag (concatMap (check dflags . flattenSCC) sccs))
where check dflags ms =
let mods_in_this_cycle = map ms_mod_name ms in
[ warn dflags i | m <- ms, i <- ms_home_srcimps m,
unLoc i `notElem` mods_in_this_cycle ]
warn :: DynFlags -> Located ModuleName -> WarnMsg
warn dflags (L loc mod) =
mkPlainErrMsg dflags loc
(ptext (sLit "Warning: {-# SOURCE #-} unnecessary in import of ")
<+> quotes (ppr mod))
reportImportErrors :: MonadIO m => [Either ErrMsg b] -> m [b]
reportImportErrors xs | null errs = return oks
| otherwise = throwManyErrors errs
where (errs, oks) = partitionEithers xs
throwManyErrors :: MonadIO m => [ErrMsg] -> m ab
throwManyErrors errs = liftIO $ throwIO $ mkSrcErr $ listToBag errs
-----------------------------------------------------------------------------
--
-- | Downsweep (dependency analysis)
--
-- Chase downwards from the specified root set, returning summaries
-- for all home modules encountered. Only follow source-import
-- links.
--
-- We pass in the previous collection of summaries, which is used as a
-- cache to avoid recalculating a module summary if the source is
-- unchanged.
--
-- The returned list of [ModSummary] nodes has one node for each home-package
-- module, plus one for any hs-boot files. The imports of these nodes
-- are all there, including the imports of non-home-package modules.
downsweep :: HscEnv
-> [ModSummary] -- Old summaries
-> [ModuleName] -- Ignore dependencies on these; treat
-- them as if they were package modules
-> Bool -- True <=> allow multiple targets to have
-- the same module name; this is
-- very useful for ghc -M
-> IO [Either ErrMsg ModSummary]
-- The elts of [ModSummary] all have distinct
-- (Modules, IsBoot) identifiers, unless the Bool is true
-- in which case there can be repeats
downsweep hsc_env old_summaries excl_mods allow_dup_roots
= do
rootSummaries <- mapM getRootSummary roots
rootSummariesOk <- reportImportErrors rootSummaries
let root_map = mkRootMap rootSummariesOk
checkDuplicates root_map
summs <- loop (concatMap calcDeps rootSummariesOk) root_map
return summs
where
calcDeps = msDeps
dflags = hsc_dflags hsc_env
roots = hsc_targets hsc_env
old_summary_map :: NodeMap ModSummary
old_summary_map = mkNodeMap old_summaries
getRootSummary :: Target -> IO (Either ErrMsg ModSummary)
getRootSummary (Target (TargetFile file mb_phase) obj_allowed maybe_buf)
= do exists <- liftIO $ doesFileExist file
if exists
then Right `fmap` summariseFile hsc_env old_summaries file mb_phase
obj_allowed maybe_buf
else return $ Left $ mkPlainErrMsg dflags noSrcSpan $
text "can't find file:" <+> text file
getRootSummary (Target (TargetModule modl) obj_allowed maybe_buf)
= do maybe_summary <- summariseModule hsc_env old_summary_map NotBoot
(L rootLoc modl) obj_allowed
maybe_buf excl_mods
case maybe_summary of
Nothing -> return $ Left $ moduleNotFoundErr dflags modl
Just s -> return s
rootLoc = mkGeneralSrcSpan (fsLit "<command line>")
-- In a root module, the filename is allowed to diverge from the module
-- name, so we have to check that there aren't multiple root files
-- defining the same module (otherwise the duplicates will be silently
-- ignored, leading to confusing behaviour).
checkDuplicates :: NodeMap [Either ErrMsg ModSummary] -> IO ()
checkDuplicates root_map
| allow_dup_roots = return ()
| null dup_roots = return ()
| otherwise = liftIO $ multiRootsErr dflags (head dup_roots)
where
dup_roots :: [[ModSummary]] -- Each at least of length 2
dup_roots = filterOut isSingleton $ map rights $ nodeMapElts root_map
loop :: [(Located ModuleName,IsBoot)]
-- Work list: process these modules
-> NodeMap [Either ErrMsg ModSummary]
-- Visited set; the range is a list because
-- the roots can have the same module names
-- if allow_dup_roots is True
-> IO [Either ErrMsg ModSummary]
-- The result includes the worklist, except
-- for those mentioned in the visited set
loop [] done = return (concat (nodeMapElts done))
loop ((wanted_mod, is_boot) : ss) done
| Just summs <- Map.lookup key done
= if isSingleton summs then
loop ss done
else
do { multiRootsErr dflags (rights summs); return [] }
| otherwise
= do mb_s <- summariseModule hsc_env old_summary_map
is_boot wanted_mod True
Nothing excl_mods
case mb_s of
Nothing -> loop ss done
Just (Left e) -> loop ss (Map.insert key [Left e] done)
Just (Right s)-> loop (calcDeps s ++ ss)
(Map.insert key [Right s] done)
where
key = (unLoc wanted_mod, is_boot)
mkRootMap :: [ModSummary] -> NodeMap [Either ErrMsg ModSummary]
mkRootMap summaries = Map.insertListWith (flip (++))
[ (msKey s, [Right s]) | s <- summaries ]
Map.empty
-- | Returns the dependencies of the ModSummary s.
-- A wrinkle is that for a {-# SOURCE #-} import we return
-- *both* the hs-boot file
-- *and* the source file
-- as "dependencies". That ensures that the list of all relevant
-- modules always contains B.hs if it contains B.hs-boot.
-- Remember, this pass isn't doing the topological sort. It's
-- just gathering the list of all relevant ModSummaries
msDeps :: ModSummary -> [(Located ModuleName, IsBoot)]
msDeps s =
concat [ [(m,IsBoot), (m,NotBoot)] | m <- ms_home_srcimps s ]
++ [ (m,NotBoot) | m <- ms_home_imps s ]
home_imps :: [(Maybe FastString, Located ModuleName)] -> [Located ModuleName]
home_imps imps = [ lmodname | (mb_pkg, lmodname) <- imps,
isLocal mb_pkg ]
where isLocal Nothing = True
isLocal (Just pkg) | pkg == fsLit "this" = True -- "this" is special
isLocal _ = False
ms_home_allimps :: ModSummary -> [ModuleName]
ms_home_allimps ms = map unLoc (ms_home_srcimps ms ++ ms_home_imps ms)
ms_home_srcimps :: ModSummary -> [Located ModuleName]
ms_home_srcimps = home_imps . ms_srcimps
ms_home_imps :: ModSummary -> [Located ModuleName]
ms_home_imps = home_imps . ms_imps
-----------------------------------------------------------------------------
-- Summarising modules
-- We have two types of summarisation:
--
-- * Summarise a file. This is used for the root module(s) passed to
-- cmLoadModules. The file is read, and used to determine the root
-- module name. The module name may differ from the filename.
--
-- * Summarise a module. We are given a module name, and must provide
-- a summary. The finder is used to locate the file in which the module
-- resides.
summariseFile
:: HscEnv
-> [ModSummary] -- old summaries
-> FilePath -- source file name
-> Maybe Phase -- start phase
-> Bool -- object code allowed?
-> Maybe (StringBuffer,UTCTime)
-> IO ModSummary
summariseFile hsc_env old_summaries file mb_phase obj_allowed maybe_buf
-- we can use a cached summary if one is available and the
-- source file hasn't changed, But we have to look up the summary
-- by source file, rather than module name as we do in summarise.
| Just old_summary <- findSummaryBySourceFile old_summaries file
= do
let location = ms_location old_summary
dflags = hsc_dflags hsc_env
src_timestamp <- get_src_timestamp
-- The file exists; we checked in getRootSummary above.
-- If it gets removed subsequently, then this
-- getModificationUTCTime may fail, but that's the right
-- behaviour.
-- return the cached summary if the source didn't change
if ms_hs_date old_summary == src_timestamp &&
not (gopt Opt_ForceRecomp (hsc_dflags hsc_env))
then do -- update the object-file timestamp
obj_timestamp <-
if isObjectTarget (hscTarget (hsc_dflags hsc_env))
|| obj_allowed -- bug #1205
then liftIO $ getObjTimestamp location NotBoot
else return Nothing
hi_timestamp <- maybeGetIfaceDate dflags location
return old_summary{ ms_obj_date = obj_timestamp
, ms_iface_date = hi_timestamp }
else
new_summary src_timestamp
| otherwise
= do src_timestamp <- get_src_timestamp
new_summary src_timestamp
where
get_src_timestamp = case maybe_buf of
Just (_,t) -> return t
Nothing -> liftIO $ getModificationUTCTime file
-- getMofificationUTCTime may fail
new_summary src_timestamp = do
let dflags = hsc_dflags hsc_env
let hsc_src = if isHaskellSigFilename file then HsigFile else HsSrcFile
(dflags', hspp_fn, buf)
<- preprocessFile hsc_env file mb_phase maybe_buf
(srcimps,the_imps, L _ mod_name) <- getImports dflags' buf hspp_fn file
-- Make a ModLocation for this file
location <- liftIO $ mkHomeModLocation dflags mod_name file
-- Tell the Finder cache where it is, so that subsequent calls
-- to findModule will find it, even if it's not on any search path
mod <- liftIO $ addHomeModuleToFinder hsc_env mod_name location
-- when the user asks to load a source file by name, we only
-- use an object file if -fobject-code is on. See #1205.
obj_timestamp <-
if isObjectTarget (hscTarget (hsc_dflags hsc_env))
|| obj_allowed -- bug #1205
then liftIO $ modificationTimeIfExists (ml_obj_file location)
else return Nothing
hi_timestamp <- maybeGetIfaceDate dflags location
extra_sig_imports <- findExtraSigImports hsc_env hsc_src mod_name
required_by_imports <- implicitRequirements hsc_env the_imps
return (ModSummary { ms_mod = mod, ms_hsc_src = hsc_src,
ms_location = location,
ms_hspp_file = hspp_fn,
ms_hspp_opts = dflags',
ms_hspp_buf = Just buf,
ms_parsed_mod = Nothing,
ms_srcimps = srcimps,
ms_textual_imps = the_imps
++ extra_sig_imports
++ required_by_imports,
ms_hs_date = src_timestamp,
ms_iface_date = hi_timestamp,
ms_obj_date = obj_timestamp })
findSummaryBySourceFile :: [ModSummary] -> FilePath -> Maybe ModSummary
findSummaryBySourceFile summaries file
= case [ ms | ms <- summaries, HsSrcFile <- [ms_hsc_src ms],
expectJust "findSummaryBySourceFile" (ml_hs_file (ms_location ms)) == file ] of
[] -> Nothing
(x:_) -> Just x
-- Summarise a module, and pick up source and timestamp.
summariseModule
:: HscEnv
-> NodeMap ModSummary -- Map of old summaries
-> IsBoot -- IsBoot <=> a {-# SOURCE #-} import
-> Located ModuleName -- Imported module to be summarised
-> Bool -- object code allowed?
-> Maybe (StringBuffer, UTCTime)
-> [ModuleName] -- Modules to exclude
-> IO (Maybe (Either ErrMsg ModSummary)) -- Its new summary
summariseModule hsc_env old_summary_map is_boot (L loc wanted_mod)
obj_allowed maybe_buf excl_mods
| wanted_mod `elem` excl_mods
= return Nothing
| Just old_summary <- Map.lookup (wanted_mod, is_boot) old_summary_map
= do -- Find its new timestamp; all the
-- ModSummaries in the old map have valid ml_hs_files
let location = ms_location old_summary
src_fn = expectJust "summariseModule" (ml_hs_file location)
-- check the modification time on the source file, and
-- return the cached summary if it hasn't changed. If the
-- file has disappeared, we need to call the Finder again.
case maybe_buf of
Just (_,t) -> check_timestamp old_summary location src_fn t
Nothing -> do
m <- tryIO (getModificationUTCTime src_fn)
case m of
Right t -> check_timestamp old_summary location src_fn t
Left e | isDoesNotExistError e -> find_it
| otherwise -> ioError e
| otherwise = find_it
where
dflags = hsc_dflags hsc_env
check_timestamp old_summary location src_fn src_timestamp
| ms_hs_date old_summary == src_timestamp &&
not (gopt Opt_ForceRecomp dflags) = do
-- update the object-file timestamp
obj_timestamp <-
if isObjectTarget (hscTarget (hsc_dflags hsc_env))
|| obj_allowed -- bug #1205
then getObjTimestamp location is_boot
else return Nothing
hi_timestamp <- maybeGetIfaceDate dflags location
return (Just (Right old_summary{ ms_obj_date = obj_timestamp
, ms_iface_date = hi_timestamp}))
| otherwise =
-- source changed: re-summarise.
new_summary location (ms_mod old_summary) src_fn src_timestamp
find_it = do
-- Don't use the Finder's cache this time. If the module was
-- previously a package module, it may have now appeared on the
-- search path, so we want to consider it to be a home module. If
-- the module was previously a home module, it may have moved.
uncacheModule hsc_env wanted_mod
found <- findImportedModule hsc_env wanted_mod Nothing
case found of
Found location mod
| isJust (ml_hs_file location) ->
-- Home package
just_found location mod
| otherwise ->
-- Drop external-pkg
ASSERT(moduleUnitId mod /= thisPackage dflags)
return Nothing
err -> return $ Just $ Left $ noModError dflags loc wanted_mod err
-- Not found
just_found location mod = do
-- Adjust location to point to the hs-boot source file,
-- hi file, object file, when is_boot says so
let location' | IsBoot <- is_boot = addBootSuffixLocn location
| otherwise = location
src_fn = expectJust "summarise2" (ml_hs_file location')
-- Check that it exists
-- It might have been deleted since the Finder last found it
maybe_t <- modificationTimeIfExists src_fn
case maybe_t of
Nothing -> return $ Just $ Left $ noHsFileErr dflags loc src_fn
Just t -> new_summary location' mod src_fn t
new_summary location mod src_fn src_timestamp
= do
-- Preprocess the source file and get its imports
-- The dflags' contains the OPTIONS pragmas
(dflags', hspp_fn, buf) <- preprocessFile hsc_env src_fn Nothing maybe_buf
(srcimps, the_imps, L mod_loc mod_name) <- getImports dflags' buf hspp_fn src_fn
-- NB: Despite the fact that is_boot is a top-level parameter, we
-- don't actually know coming into this function what the HscSource
-- of the module in question is. This is because we may be processing
-- this module because another module in the graph imported it: in this
-- case, we know if it's a boot or not because of the {-# SOURCE #-}
-- annotation, but we don't know if it's a signature or a regular
-- module until we actually look it up on the filesystem.
let hsc_src = case is_boot of
IsBoot -> HsBootFile
_ | isHaskellSigFilename src_fn -> HsigFile
| otherwise -> HsSrcFile
when (mod_name /= wanted_mod) $
throwOneError $ mkPlainErrMsg dflags' mod_loc $
text "File name does not match module name:"
$$ text "Saw:" <+> quotes (ppr mod_name)
$$ text "Expected:" <+> quotes (ppr wanted_mod)
-- Find the object timestamp, and return the summary
obj_timestamp <-
if isObjectTarget (hscTarget (hsc_dflags hsc_env))
|| obj_allowed -- bug #1205
then getObjTimestamp location is_boot
else return Nothing
hi_timestamp <- maybeGetIfaceDate dflags location
extra_sig_imports <- findExtraSigImports hsc_env hsc_src mod_name
required_by_imports <- implicitRequirements hsc_env the_imps
return (Just (Right (ModSummary {
ms_mod = mod,
ms_hsc_src = hsc_src,
ms_location = location,
ms_hspp_file = hspp_fn,
ms_hspp_opts = dflags',
ms_hspp_buf = Just buf,
ms_parsed_mod = Nothing,
ms_srcimps = srcimps,
ms_textual_imps = the_imps
++ extra_sig_imports
++ required_by_imports,
ms_hs_date = src_timestamp,
ms_iface_date = hi_timestamp,
ms_obj_date = obj_timestamp })))
getObjTimestamp :: ModLocation -> IsBoot -> IO (Maybe UTCTime)
getObjTimestamp location is_boot
= if is_boot == IsBoot then return Nothing
else modificationTimeIfExists (ml_obj_file location)
preprocessFile :: HscEnv
-> FilePath
-> Maybe Phase -- ^ Starting phase
-> Maybe (StringBuffer,UTCTime)
-> IO (DynFlags, FilePath, StringBuffer)
preprocessFile hsc_env src_fn mb_phase Nothing
= do
(dflags', hspp_fn) <- preprocess hsc_env (src_fn, mb_phase)
buf <- hGetStringBuffer hspp_fn
return (dflags', hspp_fn, buf)
preprocessFile hsc_env src_fn mb_phase (Just (buf, _time))
= do
let dflags = hsc_dflags hsc_env
let local_opts = getOptions dflags buf src_fn
(dflags', leftovers, warns)
<- parseDynamicFilePragma dflags local_opts
checkProcessArgsResult dflags leftovers
handleFlagWarnings dflags' warns
let needs_preprocessing
| Just (Unlit _) <- mb_phase = True
| Nothing <- mb_phase, Unlit _ <- startPhase src_fn = True
-- note: local_opts is only required if there's no Unlit phase
| xopt LangExt.Cpp dflags' = True
| gopt Opt_Pp dflags' = True
| otherwise = False
when needs_preprocessing $
throwGhcExceptionIO (ProgramError "buffer needs preprocesing; interactive check disabled")
return (dflags', src_fn, buf)
-----------------------------------------------------------------------------
-- Error messages
-----------------------------------------------------------------------------
noModError :: DynFlags -> SrcSpan -> ModuleName -> FindResult -> ErrMsg
-- ToDo: we don't have a proper line number for this error
noModError dflags loc wanted_mod err
= mkPlainErrMsg dflags loc $ cannotFindModule dflags wanted_mod err
noHsFileErr :: DynFlags -> SrcSpan -> String -> ErrMsg
noHsFileErr dflags loc path
= mkPlainErrMsg dflags loc $ text "Can't find" <+> text path
moduleNotFoundErr :: DynFlags -> ModuleName -> ErrMsg
moduleNotFoundErr dflags mod
= mkPlainErrMsg dflags noSrcSpan $
text "module" <+> quotes (ppr mod) <+> text "cannot be found locally"
multiRootsErr :: DynFlags -> [ModSummary] -> IO ()
multiRootsErr _ [] = panic "multiRootsErr"
multiRootsErr dflags summs@(summ1:_)
= throwOneError $ mkPlainErrMsg dflags noSrcSpan $
text "module" <+> quotes (ppr mod) <+>
text "is defined in multiple files:" <+>
sep (map text files)
where
mod = ms_mod summ1
files = map (expectJust "checkDup" . ml_hs_file . ms_location) summs
cyclicModuleErr :: [ModSummary] -> SDoc
-- From a strongly connected component we find
-- a single cycle to report
cyclicModuleErr mss
= ASSERT( not (null mss) )
case findCycle graph of
Nothing -> ptext (sLit "Unexpected non-cycle") <+> ppr mss
Just path -> vcat [ ptext (sLit "Module imports form a cycle:")
, nest 2 (show_path path) ]
where
graph :: [Node NodeKey ModSummary]
graph = [(ms, msKey ms, get_deps ms) | ms <- mss]
get_deps :: ModSummary -> [NodeKey]
get_deps ms = ([ (unLoc m, IsBoot) | m <- ms_home_srcimps ms ] ++
[ (unLoc m, NotBoot) | m <- ms_home_imps ms ])
show_path [] = panic "show_path"
show_path [m] = ptext (sLit "module") <+> ppr_ms m
<+> ptext (sLit "imports itself")
show_path (m1:m2:ms) = vcat ( nest 7 (ptext (sLit "module") <+> ppr_ms m1)
: nest 6 (ptext (sLit "imports") <+> ppr_ms m2)
: go ms )
where
go [] = [ptext (sLit "which imports") <+> ppr_ms m1]
go (m:ms) = (ptext (sLit "which imports") <+> ppr_ms m) : go ms
ppr_ms :: ModSummary -> SDoc
ppr_ms ms = quotes (ppr (moduleName (ms_mod ms))) <+>
(parens (text (msHsFilePath ms)))
| rahulmutt/ghcvm | compiler/Eta/Main/GhcMake.hs | bsd-3-clause | 96,167 | 0 | 35 | 30,280 | 15,718 | 8,035 | 7,683 | -1 | -1 |
-- | PCP instances of very special form
module PCP.Form where
import PCP.Type
form :: String -> PCP Char
form w = [(w, "0"), ("0","1"), ("1", w)]
-- | use additional letter w,
-- only increasing rules
iform :: String -> PCP Char
iform w = [ (w, "0"), ("0","1"), ("1", "w"), ("w", "0") ]
-- | only increasing rules
dform :: String -> PCP Char
dform w = [ ("1", w), ("0","1"), ("1", "w"), ("w", "0") ]
spiegel :: PCP Char -> PCP Char
spiegel p = do
(l, r) <- p
let fun = reverse
. map ( \ c -> case c of '0' -> '1'; '1' -> '0' )
return (fun l, fun r)
| Erdwolf/autotool-bonn | src/PCP/Form.hs | gpl-2.0 | 580 | 0 | 16 | 149 | 269 | 154 | 115 | 14 | 2 |
module Main where
import Control.Monad
import Database.HDBC
import Database.HDBC.MySQL
connectDatabase :: IO Connection
connectDatabase = connectMySQL defaultMySQLConnectInfo
{ mysqlHost = "putterwell"
}
go :: IO ()
go = do conn <- connectDatabase
putStrLn $ "driver " ++ (show $ hdbcDriverName conn)
putStrLn $ "server version " ++ (show $ dbServerVer conn)
tables <- getTables conn
forM_ tables $ \t -> do
putStrLn $ "table " ++ t
cols <- describeTable conn t
forM_ cols $ \(name, desc) ->
putStrLn $ name ++ " " ++ (show desc)
rows0 <- quickQuery' conn "SELECT a FROM album" []
rows1 <- quickQuery' conn "SELECT str FROM album" []
forM_ (zip rows0 rows1) $ \(a, str) -> putStrLn $ "a=" ++ (show a) ++ ", str=" ++ (show str)
{-
stmt <- prepare conn "INSERT INTO album VALUES (?, ?)"
n <- execute stmt [SqlWord32 3000000000, SqlString "hello"]
commit conn
-}
main :: IO ()
main = handleSqlError (replicateM_ 1 go)
| beastaugh/hdbc-mysql | Test.hs | lgpl-2.1 | 1,098 | 0 | 15 | 340 | 311 | 155 | 156 | 22 | 1 |
{- This is the code extracted from "A reflection on types", by Simon PJ,
Stephanie Weirich, Richard Eisenberg, and Dimitrios Vytiniotis, 2016. -}
-- NB: it includes a negative-recursive function (see delta1), and
-- so will give "simplifier ticks exhausted", at least with -O
{-# LANGUAGE RankNTypes, PolyKinds, TypeOperators,
ScopedTypeVariables, GADTs, FlexibleInstances,
UndecidableInstances, RebindableSyntax,
DataKinds, MagicHash #-}
{-# OPTIONS_GHC -Wno-missing-methods -Wno-redundant-constraints #-}
{-# OPTIONS_GHC -Wno-simplifiable-class-constraints #-}
-- Because we define a local Typeable class and have
-- instance Data.Typeable.Typeable a => Typeable a
module Dynamic where
import Data.Map ( Map )
import qualified Data.Map as Map
import Unsafe.Coerce ( unsafeCoerce )
import Control.Monad ( (<=<) )
import Prelude hiding ( lookup, fromInteger, replicate )
import qualified Prelude
import qualified Data.Typeable
import qualified Data.Data
import Data.Kind
lookupMap = Map.lookup
insertMap = Map.insert
-- let's ignore overloaded numbers
fromInteger :: Integer -> Int
fromInteger = Prelude.fromInteger
insertStore = undefined
schema = undefined
withTypeable _ _ = undefined
throw# = undefined
toDynamicST = undefined
fromDynamicST = undefined
extendStore :: Typeable a => STRef s a -> a -> Store -> Store
lookupStore :: Typeable a => STRef s a -> Store -> Maybe a
type Key = Int
data STRef s a = STR Key
type Store = Map Key Dynamic
extendStore (STR k) v s = insertMap k (toDynamicST v) s
lookupStore (STR k) s = case lookupMap k s of
Just d -> fromDynamicST d
Nothing -> Nothing
toDynamicST :: Typeable a => a -> Dynamic
fromDynamicST :: Typeable a => Dynamic -> Maybe a
eval = undefined
data Term
data DynamicSilly = DIntSilly Int
| DBoolSilly Bool
| DCharSilly Char
| DPairSilly DynamicSilly DynamicSilly
toDynInt :: Int -> DynamicSilly
toDynInt = DIntSilly
fromDynInt :: DynamicSilly -> Maybe Int
fromDynInt (DIntSilly n) = Just n
fromDynInt _ = Nothing
toDynPair :: DynamicSilly -> DynamicSilly -> DynamicSilly
toDynPair = DPairSilly
dynFstSilly :: DynamicSilly -> Maybe DynamicSilly
dynFstSilly (DPairSilly x1 x2) = Just x1
dynFstSilly _ = Nothing
eval :: Term -> DynamicSilly
eqT = undefined
instance Typeable (->)
instance Typeable Maybe
instance Typeable Bool
instance Typeable Int
instance (Typeable a, Typeable b) => Typeable (a b)
instance Typeable (,)
instance Eq SomeTypeRep
data Dynamic where
Dyn :: TypeRep a -> a -> Dynamic
toDynamic :: Typeable a => a -> Dynamic
toDynamic x = Dyn typeRep x
eqTNoKind = undefined
eqTNoKind :: TypeRep a -> TypeRep b -> Maybe (a :***: b)
-- Primitive; implemented by compiler
data a :***: b where
ReflNoKind :: a :***: a
fromDynamic :: forall d. Typeable d => Dynamic -> Maybe d
fromDynamic (Dyn (ra :: TypeRep a) (x :: a))
= case eqT ra (typeRep :: TypeRep d) of
Nothing -> Nothing
Just Refl -> Just x
fromDynamicMonad :: forall d. Typeable d => Dynamic -> Maybe d
fromDynamicMonad (Dyn ra x)
= do Refl <- eqT ra (typeRep :: TypeRep d)
return x
cast :: forall a b. (Typeable a, Typeable b) => a -> Maybe b
cast x = do Refl <- eqT (typeRep :: TypeRep a)
(typeRep :: TypeRep b)
return x
gcast :: forall a b c. (Typeable a, Typeable b) => c a -> Maybe (c b)
gcast x = do Refl <- eqT (typeRep :: TypeRep a)
(typeRep :: TypeRep b)
return x
data SameKind :: k -> k -> Type
type CheckAppResult = SameKind AppResult AppResultNoKind
-- not the most thorough check
foo :: AppResult x -> AppResultNoKind x
foo (App y z) = AppNoKind y z
splitApp :: TypeRep a -> Maybe (AppResult a)
splitApp = undefined
splitAppNoKind = undefined
splitAppNoKind :: TypeRep a -> Maybe (AppResultNoKind a)
-- Primitive; implemented by compiler
data AppResultNoKind t where
AppNoKind :: TypeRep a -> TypeRep b -> AppResultNoKind (a b)
dynFstNoKind :: Dynamic -> Maybe Dynamic
dynFstNoKind (Dyn rpab x)
= do AppNoKind rpa rb <- splitAppNoKind rpab
AppNoKind rp ra <- splitAppNoKind rpa
Refl <- eqT rp (typeRep :: TypeRep (,))
return (Dyn ra (fst x))
dynApply :: Dynamic -> Dynamic -> Maybe Dynamic
dynApply (Dyn rf f) (Dyn rx x) = do
App ra rt2 <- splitApp rf
App rtc rt1 <- splitApp ra
Refl <- eqT rtc (typeRep :: TypeRep (->))
Refl <- eqT rt1 rx
return (Dyn rt2 (f x))
data TypeRepAbstract (a :: k) -- primitive, indexed by type and kind
class Typeable (a :: k) where
typeRep :: TypeRep a
data AppResult (t :: k) where
App :: forall k1 k (a :: k1 -> k) (b :: k1).
TypeRep a -> TypeRep b -> AppResult (a b)
dynFst :: Dynamic -> Maybe Dynamic
dynFst (Dyn (rpab :: TypeRep pab) (x :: pab))
= do App (rpa :: TypeRep pa ) (rb :: TypeRep b) <- splitApp rpab
-- introduces kind |k2|, and types |pa :: k2 -> Type|, |b :: k2|
App (rp :: TypeRep p ) (ra :: TypeRep a) <- splitApp rpa
-- introduces kind |k1|, and types |p :: k1 -> k2 -> Type|,
-- |a :: k1|
Refl <- eqT rp (typeRep :: TypeRep (,))
-- introduces |p ~ (,)| and
-- |(k1 -> k2 -> Type) ~ (Type -> Type -> Type)|
return (Dyn ra (fst x))
eqT :: forall k1 k2 (a :: k1) (b :: k2).
TypeRep a -> TypeRep b -> Maybe (a :~: b)
data (a :: k1) :~: (b :: k2) where
Refl :: forall k (a :: k). a :~: a
castDance :: (Typeable a, Typeable b) => a -> Maybe b
castDance = castR typeRep typeRep
withTypeable :: TypeRep a -> (Typeable a => r) -> r
castR :: TypeRep a -> TypeRep b -> a -> Maybe b
castR ta tb = withTypeable ta (withTypeable tb castDance)
cmpT = undefined
compareTypeRep = undefined
data SomeTypeRep where
SomeTypeRep :: TypeRep a -> SomeTypeRep
type TyMapLessTyped = Map SomeTypeRep Dynamic
insertLessTyped :: forall a. Typeable a => a -> TyMapLessTyped -> TyMapLessTyped
insertLessTyped x
= Map.insert (SomeTypeRep (typeRep :: TypeRep a)) (toDynamic x)
lookupLessTyped :: forall a. Typeable a => TyMapLessTyped -> Maybe a
lookupLessTyped
= fromDynamic <=< Map.lookup (SomeTypeRep (typeRep :: TypeRep a))
instance Ord SomeTypeRep where
compare (SomeTypeRep tr1) (SomeTypeRep tr2) = compareTypeRep tr1 tr2
compareTypeRep :: TypeRep a -> TypeRep b -> Ordering -- primitive
data TyMap = Empty | Node Dynamic TyMap TyMap
lookup :: TypeRep a -> TyMap -> Maybe a
lookup tr1 (Node (Dyn tr2 v) left right) =
case compareTypeRep tr1 tr2 of
LT -> lookup tr1 left
EQ -> castR tr2 tr1 v -- know this cast will succeed
GT -> lookup tr1 right
lookup tr1 Empty = Nothing
cmpT :: TypeRep a -> TypeRep b -> OrderingT a b
-- definition is primitive
data OrderingT a b where
LTT :: OrderingT a b
EQT :: OrderingT t t
GTT :: OrderingT a b
data TypeRep (a :: k) where
TrApp :: TypeRep a -> TypeRep b -> TypeRep (a b)
TrTyCon :: TyCon -> TypeRep k -> TypeRep (a :: k)
data TyCon = TyCon { tc_module :: Module, tc_name :: String }
data Module = Module { mod_pkg :: String, mod_name :: String }
tcMaybe :: TyCon
tcMaybe = TyCon { tc_module = Module { mod_pkg = "base"
, mod_name = "Data.Maybe" }
, tc_name = "Maybe" }
rt = undefined
delta1 :: Dynamic -> Dynamic
-- NB: this function behaves like a negative-recursive data type
-- and hence leads compiler into an infinite inlining loop,
-- and we get "simplifier ticks exhausted".
-- See Section 7 of the paper "A reflection on types"
delta1 dn = case fromDynamic dn of
Just f -> f dn
Nothing -> dn
loop1 = delta1 (toDynamic delta1)
data Rid = MkT (forall a. TypeRep a -> a -> a)
rt :: TypeRep Rid
delta :: forall a. TypeRep a -> a -> a
delta ra x = case (eqT ra rt) of
Just Refl -> case x of MkT y -> y rt x
Nothing -> x
loop = delta rt (MkT delta)
throw# :: SomeException -> a
data SomeException where
SomeException :: Exception e => e -> SomeException
class (Typeable e, Show e) => Exception e where { }
data Company
data Salary
incS :: Float -> Salary -> Salary
incS = undefined
-- some impedance matching with SYB
instance Data.Data.Data Company
instance {-# INCOHERENT #-} Data.Typeable.Typeable a => Typeable a
mkT :: (Typeable a, Typeable b) => (b -> b) -> a -> a
mkT f x = case (cast f) of
Just g -> g x
Nothing -> x
data Expr a
frontEnd = undefined
data DynExp where
DE :: TypeRep a -> Expr a -> DynExp
frontEnd :: String -> DynExp
data TyConOld
typeOf = undefined
eqTOld = undefined
funTcOld = undefined :: TyConOld
splitTyConApp = undefined
mkTyCon3 = undefined
boolTcOld = undefined
tupleTc = undefined
mkTyConApp = undefined
instance Eq TypeRepOld
instance Eq TyConOld
data TypeRepOld -- Abstract
class TypeableOld a where
typeRepOld :: proxy a -> TypeRepOld
data DynamicOld where
DynOld :: TypeRepOld -> a -> DynamicOld
data Proxy a = Proxy
fromDynamicOld :: forall d. TypeableOld d => DynamicOld -> Maybe d
fromDynamicOld (DynOld trx x)
| typeRepOld (Proxy :: Proxy d) == trx = Just (unsafeCoerce x)
| otherwise = Nothing
dynApplyOld :: DynamicOld -> DynamicOld -> Maybe DynamicOld
dynApplyOld (DynOld trf f) (DynOld trx x) =
case splitTyConApp trf of
(tc, [t1,t2]) | tc == funTcOld && t1 == trx ->
Just (DynOld t2 ((unsafeCoerce f) x))
_ -> Nothing
data DynamicClosed where
DynClosed :: TypeRepClosed a -> a -> DynamicClosed
data TypeRepClosed (a :: Type) where
TBool :: TypeRepClosed Bool
TFun :: TypeRepClosed a -> TypeRepClosed b -> TypeRepClosed (a -> b)
TProd :: TypeRepClosed a -> TypeRepClosed b -> TypeRepClosed (a, b)
lookupPil = undefined
lookupPil :: Typeable a => [Dynamic] -> Maybe a
data Dyn1 = Dyn1 Int
| DynFun (Dyn1 -> Dyn1)
| DynPair (Dyn1, Dyn1)
data TypeEnum = IntType | FloatType | BoolType | DateType | StringType
data Schema = Object [Schema] |
Field TypeEnum |
Array Schema
schema :: Typeable a => a -> Schema
| sdiehl/ghc | testsuite/tests/dependent/should_compile/dynamic-paper.hs | bsd-3-clause | 10,379 | 0 | 15 | 2,695 | 3,304 | 1,728 | 1,576 | -1 | -1 |
where
foo :: Monad m
=> Functor m
=> MonadIO m
-> Int
foo x = x
| ruchee/vimrc | vimfiles/bundle/vim-haskell/tests/indent/test016/expected.hs | mit | 92 | 4 | 5 | 44 | 35 | 17 | 18 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE CPP #-}
module Yesod.Core.Dispatch
( -- * Quasi-quoted routing
parseRoutes
, parseRoutesNoCheck
, parseRoutesFile
, parseRoutesFileNoCheck
, mkYesod
-- ** More fine-grained
, mkYesodData
, mkYesodSubData
, mkYesodDispatch
, mkYesodSubDispatch
-- ** Path pieces
, PathPiece (..)
, PathMultiPiece (..)
, Texts
-- * Convert to WAI
, toWaiApp
, toWaiAppPlain
, warp
, warpDebug
, warpEnv
, mkDefaultMiddlewares
, defaultMiddlewaresNoLogging
-- * WAI subsites
, WaiSubsite (..)
, subHelper
) where
import Prelude hiding (exp)
import Yesod.Core.Internal.TH
import Language.Haskell.TH.Syntax (qLocation)
import Web.PathPieces
import qualified Network.Wai as W
import Data.ByteString.Lazy.Char8 ()
import Data.Text (Text)
import Data.Monoid (mappend)
import qualified Data.ByteString as S
import qualified Data.ByteString.Char8 as S8
import qualified Blaze.ByteString.Builder
import Network.HTTP.Types (status301, status307)
import Yesod.Routes.Parse
import Yesod.Core.Types
import Yesod.Core.Class.Yesod
import Yesod.Core.Class.Dispatch
import Yesod.Core.Internal.Run
import Safe (readMay)
import System.Environment (getEnvironment)
import Network.Wai.Middleware.Autohead
import Network.Wai.Middleware.AcceptOverride
import Network.Wai.Middleware.RequestLogger
import Network.Wai.Middleware.Gzip
import Network.Wai.Middleware.MethodOverride
import qualified Network.Wai.Handler.Warp
import System.Log.FastLogger
import Control.Monad.Logger
import Control.Monad (when)
import qualified Paths_yesod_core
import Data.Version (showVersion)
import qualified System.Random.MWC as MWC
-- | Convert the given argument into a WAI application, executable with any WAI
-- handler. This function will provide no middlewares; if you want commonly
-- used middlewares, please use 'toWaiApp'.
toWaiAppPlain :: YesodDispatch site => site -> IO W.Application
toWaiAppPlain site = do
logger <- makeLogger site
sb <- makeSessionBackend site
gen <- MWC.createSystemRandom
return $ toWaiAppYre $ YesodRunnerEnv
{ yreLogger = logger
, yreSite = site
, yreSessionBackend = sb
, yreGen = gen
}
toWaiAppYre :: YesodDispatch site => YesodRunnerEnv site -> W.Application
toWaiAppYre yre req =
case cleanPath site $ W.pathInfo req of
Left pieces -> sendRedirect site pieces req
Right pieces -> yesodDispatch yre req
{ W.pathInfo = pieces
}
where
site = yreSite yre
sendRedirect :: Yesod master => master -> [Text] -> W.Application
sendRedirect y segments' env sendResponse =
sendResponse $ W.responseLBS status
[ ("Content-Type", "text/plain")
, ("Location", Blaze.ByteString.Builder.toByteString dest')
] "Redirecting"
where
-- Ensure that non-GET requests get redirected correctly. See:
-- https://github.com/yesodweb/yesod/issues/951
status
| W.requestMethod env == "GET" = status301
| otherwise = status307
dest = joinPath y (resolveApproot y env) segments' []
dest' =
if S.null (W.rawQueryString env)
then dest
else (dest `mappend`
Blaze.ByteString.Builder.fromByteString (W.rawQueryString env))
-- | Same as 'toWaiAppPlain', but provides a default set of middlewares. This
-- set may change with future releases, but currently covers:
--
-- * Logging
--
-- * GZIP compression
--
-- * Automatic HEAD method handling
--
-- * Request method override with the _method query string parameter
--
-- * Accept header override with the _accept query string parameter
toWaiApp :: YesodDispatch site => site -> IO W.Application
toWaiApp site = do
logger <- makeLogger site
toWaiAppLogger logger site
toWaiAppLogger :: YesodDispatch site => Logger -> site -> IO W.Application
toWaiAppLogger logger site = do
sb <- makeSessionBackend site
gen <- MWC.createSystemRandom
let yre = YesodRunnerEnv
{ yreLogger = logger
, yreSite = site
, yreSessionBackend = sb
, yreGen = gen
}
messageLoggerSource
site
logger
$(qLocation >>= liftLoc)
"yesod-core"
LevelInfo
(toLogStr ("Application launched" :: S.ByteString))
middleware <- mkDefaultMiddlewares logger
return $ middleware $ toWaiAppYre yre
-- | A convenience method to run an application using the Warp webserver on the
-- specified port. Automatically calls 'toWaiApp'. Provides a default set of
-- middlewares. This set may change at any point without a breaking version
-- number. Currently, it includes:
--
-- If you need more fine-grained control of middlewares, please use 'toWaiApp'
-- directly.
--
-- Since 1.2.0
warp :: YesodDispatch site => Int -> site -> IO ()
warp port site = do
logger <- makeLogger site
toWaiAppLogger logger site >>= Network.Wai.Handler.Warp.runSettings (
Network.Wai.Handler.Warp.setPort port $
Network.Wai.Handler.Warp.setServerName serverValue $
Network.Wai.Handler.Warp.setOnException (\_ e ->
when (shouldLog' e) $
messageLoggerSource
site
logger
$(qLocation >>= liftLoc)
"yesod-core"
LevelError
(toLogStr $ "Exception from Warp: " ++ show e)) $
Network.Wai.Handler.Warp.defaultSettings)
where
shouldLog' = Network.Wai.Handler.Warp.defaultShouldDisplayException
serverValue :: S8.ByteString
serverValue = S8.pack $ concat
[ "Warp/"
, Network.Wai.Handler.Warp.warpVersion
, " + Yesod/"
, showVersion Paths_yesod_core.version
, " (core)"
]
-- | A default set of middlewares.
--
-- Since 1.2.0
mkDefaultMiddlewares :: Logger -> IO W.Middleware
mkDefaultMiddlewares logger = do
logWare <- mkRequestLogger def
{ destination = Network.Wai.Middleware.RequestLogger.Logger $ loggerSet logger
, outputFormat = Apache FromSocket
}
return $ logWare . defaultMiddlewaresNoLogging
-- | All of the default middlewares, excluding logging.
--
-- Since 1.2.12
defaultMiddlewaresNoLogging :: W.Middleware
defaultMiddlewaresNoLogging = acceptOverride . autohead . gzip def . methodOverride
-- | Deprecated synonym for 'warp'.
warpDebug :: YesodDispatch site => Int -> site -> IO ()
warpDebug = warp
{-# DEPRECATED warpDebug "Please use warp instead" #-}
-- | Runs your application using default middlewares (i.e., via 'toWaiApp'). It
-- reads port information from the PORT environment variable, as used by tools
-- such as Keter and the FP Complete School of Haskell.
--
-- Note that the exact behavior of this function may be modified slightly over
-- time to work correctly with external tools, without a change to the type
-- signature.
warpEnv :: YesodDispatch site => site -> IO ()
warpEnv site = do
env <- getEnvironment
case lookup "PORT" env of
Nothing -> error $ "warpEnv: no PORT environment variable found"
Just portS ->
case readMay portS of
Nothing -> error $ "warpEnv: invalid PORT environment variable: " ++ show portS
Just port -> warp port site
| urbanslug/yesod | yesod-core/Yesod/Core/Dispatch.hs | mit | 7,633 | 0 | 19 | 1,863 | 1,415 | 800 | 615 | 158 | 3 |
-- | Wrapper around Data.Graph with support for edge labels
{-# LANGUAGE ScopedTypeVariables #-}
module Distribution.Client.Utils.LabeledGraph (
-- * Graphs
Graph
, Vertex
-- ** Building graphs
, graphFromEdges
, graphFromEdges'
, buildG
, transposeG
-- ** Graph properties
, vertices
, edges
-- ** Operations on the underlying unlabeled graph
, forgetLabels
, topSort
) where
import Data.Array
import Data.Graph (Vertex, Bounds)
import Data.List (sortBy)
import Data.Maybe (mapMaybe)
import qualified Data.Graph as G
{-------------------------------------------------------------------------------
Types
-------------------------------------------------------------------------------}
type Graph e = Array Vertex [(e, Vertex)]
type Edge e = (Vertex, e, Vertex)
{-------------------------------------------------------------------------------
Building graphs
-------------------------------------------------------------------------------}
-- | Construct an edge-labeled graph
--
-- This is a simple adaptation of the definition in Data.Graph
graphFromEdges :: forall key node edge. Ord key
=> [ (node, key, [(edge, key)]) ]
-> ( Graph edge
, Vertex -> (node, key, [(edge, key)])
, key -> Maybe Vertex
)
graphFromEdges edges0 =
(graph, \v -> vertex_map ! v, key_vertex)
where
max_v = length edges0 - 1
bounds0 = (0, max_v) :: (Vertex, Vertex)
sorted_edges = sortBy lt edges0
edges1 = zipWith (,) [0..] sorted_edges
graph = array bounds0 [(v, (mapMaybe mk_edge ks)) | (v, (_, _, ks)) <- edges1]
key_map = array bounds0 [(v, k ) | (v, (_, k, _ )) <- edges1]
vertex_map = array bounds0 edges1
(_,k1,_) `lt` (_,k2,_) = k1 `compare` k2
mk_edge :: (edge, key) -> Maybe (edge, Vertex)
mk_edge (edge, key) = do v <- key_vertex key ; return (edge, v)
-- returns Nothing for non-interesting vertices
key_vertex :: key -> Maybe Vertex
key_vertex k = findVertex 0 max_v
where
findVertex a b
| a > b = Nothing
| otherwise = case compare k (key_map ! mid) of
LT -> findVertex a (mid-1)
EQ -> Just mid
GT -> findVertex (mid+1) b
where
mid = a + (b - a) `div` 2
graphFromEdges' :: Ord key
=> [ (node, key, [(edge, key)]) ]
-> ( Graph edge
, Vertex -> (node, key, [(edge, key)])
)
graphFromEdges' x = (a,b)
where
(a,b,_) = graphFromEdges x
transposeG :: Graph e -> Graph e
transposeG g = buildG (bounds g) (reverseE g)
buildG :: Bounds -> [Edge e] -> Graph e
buildG bounds0 edges0 = accumArray (flip (:)) [] bounds0 (map reassoc edges0)
where
reassoc (v, e, w) = (v, (e, w))
reverseE :: Graph e -> [Edge e]
reverseE g = [ (w, e, v) | (v, e, w) <- edges g ]
{-------------------------------------------------------------------------------
Graph properties
-------------------------------------------------------------------------------}
vertices :: Graph e -> [Vertex]
vertices = indices
edges :: Graph e -> [Edge e]
edges g = [ (v, e, w) | v <- vertices g, (e, w) <- g!v ]
{-------------------------------------------------------------------------------
Operations on the underlying unlabelled graph
-------------------------------------------------------------------------------}
forgetLabels :: Graph e -> G.Graph
forgetLabels = fmap (map snd)
topSort :: Graph e -> [Vertex]
topSort = G.topSort . forgetLabels
| Helkafen/cabal | cabal-install/Distribution/Client/Utils/LabeledGraph.hs | bsd-3-clause | 3,641 | 0 | 15 | 918 | 1,082 | 615 | 467 | 66 | 3 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.