code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE OverloadedStrings #-}
module Main (main) where
import Control.Concurrent (forkIO, killThread)
import Control.Monad.Trans (liftIO)
import Happstack.Server.SimpleHTTP
import Rest.Driver.Happstack (apiToHandler')
import Api (api)
import ApiTypes (ServerData (..), runBlogApi)
import Example (exampleBlog)
-- | Run the server
main :: IO ()
main = do
-- Set up the server state
serverData <- exampleBlog
-- Start happstack
putStrLn "Starting happstack server on http://localhost:3000"
tid <- forkIO $ simpleHTTP (Conf 3000 Nothing Nothing 60 Nothing) (handle serverData)
-- Exit gracefully
waitForTermination
killThread tid
-- | Request handler
handle :: ServerData -> ServerPartT IO Response
handle serverData = apiToHandler' (liftIO . runBlogApi serverData) api
| tinkerthaler/basic-invoice-rest | happstack/Main.hs | bsd-3-clause | 796 | 0 | 11 | 124 | 197 | 109 | 88 | 18 | 1 |
module Main
where
import Pipes
import Pipes.Parse
import qualified Pipes.Prelude as P
import qualified Pipes.ZMQ3 as PZ
import qualified System.ZMQ3 as Z
import Control.Concurrent(threadDelay)
import Control.Concurrent.Async
import Control.Monad (forever, unless)
import Control.Applicative((<$>), (<*>))
import qualified Control.Foldl as L
import Data.ByteString.Char8 (pack, unpack)
import Text.Printf
import System.Random (randomRIO)
pubServerThread :: Z.Sender t => Z.Socket t -> IO r
pubServerThread s = forever $ do
threadDelay (25) -- be gentle with the CPU
zipcode <- randomRIO (10000::Int, 11000)
temperature <- randomRIO (-10::Int, 35)
humidity <- randomRIO (10::Int, 60)
let update = pack $ unwords [show zipcode, show temperature, show humidity]
Z.send s [] update
fold' :: (Monad m )=> L.Fold a b -> Producer a m () -> m b
fold' myFold = case myFold of
L.Fold step begin done -> P.fold step begin done
-- | This function will be part of foldl later on
-- @fold (mapped f folder) list == fold folder (map f list)@
mapped :: (a -> b) -> L.Fold b r -> L.Fold a r
mapped f (L.Fold step begin done) = L.Fold step' begin done
where
step' x = step x . f
average :: L.Fold Int Int
average = div <$> L.sum <*> L.length
main :: IO ()
main = do
-- create and use exactly one context in a single process
Z.withContext $ \ctx ->
Z.withSocket ctx Z.Pub $ \pubSocket ->
Z.withSocket ctx Z.Sub $ \subSocket -> do
Z.bind pubSocket "inproc://pubserver"
putStrLn "Starting pub server"
async $ pubServerThread pubSocket
Z.connect subSocket "inproc://pubserver"
Z.subscribe subSocket (pack "10001")
evalStateT reporter (processedData subSocket)
where
reporter :: StateT (Producer (Int, Int, Int) IO ()) IO ()
reporter = loop
where
loop = do
(avgTemp, avgHum) <- fold' averages (input >-> P.take 10)
liftIO $ printf "-- Report: average temperature is %d°C, average humidity is %d%% \n" avgTemp avgHum
eof <- isEndOfInput
unless eof loop
averages :: L.Fold (Int, Int, Int) (Int, Int)
averages =
let avgTemp :: L.Fold (Int, Int, Int) Int
avgTemp = mapped (\(_, t, _) -> t) average
avgHumidity :: L.Fold (Int, Int, Int) Int
avgHumidity = mapped (\(_, _, h) -> h) average
in (,) <$> avgTemp <*> avgHumidity
processedData :: Z.Socket Z.Sub -> Producer (Int, Int, Int) IO ()
processedData subSocket = for (PZ.fromZMQ subSocket) $ \bs -> do
let [zipcode, temperature, humidity] = map read $ words (unpack bs)
liftIO $ printf "At NY City (%d), temperature of %d and humidity %d\n" zipcode temperature humidity
yield (zipcode, temperature, humidity)
| PierreR/pipes-zmq3 | sub.hs | bsd-3-clause | 2,998 | 0 | 17 | 873 | 977 | 512 | 465 | 60 | 1 |
{-# LANGUAGE OverloadedStrings #-}
import Control.Applicative
import Control.Monad
import Data.List (intercalate)
import Data.Monoid (Monoid(..))
import ListUtils (sortOn)
import Parser (CostCentre(..), CostCentreData(..), TimeAlloc(..))
import Parser (parser)
import System.Environment (getArgs)
import TextUtils (showText)
import qualified Data.Attoparsec.Text.Lazy as P
import qualified Data.Map as Map
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.IO as TLIO
import qualified GUI
import qualified Graphics.UI.WX as WX
import qualified Parser
main :: IO ()
main = do
[filename] <- getArgs
parseRes <- P.parse parser <$> TLIO.readFile filename
res <-
case parseRes of
P.Fail remaining contexts err ->
fail $
"Parse error near " ++ (show . TL.intercalate " " . take 10 . TL.splitOn " ") remaining ++
" in (" ++ intercalate "." contexts ++ "): " ++ show err
P.Done _ res -> return res
let
pair cc =
( Parser.ccFullyQualifiedName cc
, (ccIndividual cc, ccInherited cc)
)
accumulatedCosts =
take 800 .
sortOn (negate . timePercent . snd . snd) .
Map.toList .
Map.fromListWith mappend .
map (pair . ccData) $ Parser.allCostCentres res
forM_ accumulatedCosts $ \(fqName, (individual, inherited)) ->
TIO.putStrLn $ T.concat [fqName, ":", showTime inherited, "(", showTime individual, ")"]
WX.start $ GUI.gui res
where
showTime = showText . timePercent
| Peaker/HaskProfileGui | Main.hs | bsd-3-clause | 1,549 | 0 | 22 | 315 | 508 | 282 | 226 | 44 | 2 |
module Hess.Generate
( generate
) where
import Data.List
import Hess.Type
generateDef :: Definition -> String
generateDef (Def sels pseudoclasses properties) = ss ++ pcs ++ " { " ++ props ++ " }"
where
ss = intercalate ", " $ map show sels
props = intercalate "; " $ map show properties
pcs =
if length pseudoclasses > 0
then ":" ++ (intercalate ":" $ map show pseudoclasses)
else ""
generate :: [Definition] -> String
generate = intercalate "\n" . map generateDef
| vincenthz/hess | Hess/Generate.hs | bsd-3-clause | 493 | 10 | 11 | 110 | 176 | 91 | 85 | 14 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE MultiParamTypeClasses #-}
-----------------------------------------------------------------------------
-- |
-- Module : System.Console.Terminal.Color
-- Copyright : (C) 2013 Edward Kmett
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : Edward Kmett <ekmett@gmail.com>
-- Stability : experimental
-- Portability : non-portable
--
----------------------------------------------------------------------------
module System.Console.Terminal.Color
( Color(..)
, AsColor(..)
, HasColor(..)
) where
import Control.Applicative
import Control.Lens
import Data.Ix
import Data.Data
import GHC.Generics
import System.Console.Terminal.Util
import qualified System.Console.ANSI as ANSI
#ifdef USE_TERMINFO
import qualified System.Console.Terminfo.Color as Terminfo
#endif
data Color
= Black
| Red
| Green
| Yellow
| Blue
| Magenta
| Cyan
| White
deriving (Eq,Ord,Show,Read,Ix,Enum,Bounded,Data,Typeable,Generic)
makeClassy ''Color
class AsColor p f t where
-- |
-- @
-- '_Color' :: 'Equality'' 'Color' 'Color'
-- '_Color' :: 'Iso'' 'ANSI.Color' 'Color'
-- '_Color' :: 'Prism'' 'Terminfo.Color' 'Color'
-- @
_Color :: Overloaded' p f t Color
-- |
-- @
-- '_Black' :: 'Prism'' 'Color' ()
-- '_Red' :: 'Prism'' 'Color' ()
-- ...
-- @
_Black, _Red, _Green, _Yellow, _Blue, _Magenta, _Cyan, _White :: (Choice p, Applicative f) => Overloaded' p f t ()
_Black = _Color.eq Black
_Red = _Color.eq Red
_Green = _Color.eq Green
_Yellow = _Color.eq Yellow
_Blue = _Color.eq Blue
_Magenta = _Color.eq Magenta
_Cyan = _Color.eq Cyan
_White = _Color.eq White
instance AsColor p f Color where
_Color = id
#ifdef USE_TERMINFO
instance (Choice p, Applicative f) => AsColor p f Terminfo.Color where
_Color = prism' bt seta where
bt Black = Terminfo.Black
bt Red = Terminfo.Red
bt Green = Terminfo.Green
bt Yellow = Terminfo.Yellow
bt Blue = Terminfo.Blue
bt Magenta = Terminfo.Magenta
bt Cyan = Terminfo.Cyan
bt White = Terminfo.White
seta Terminfo.Black = Just Black
seta Terminfo.Red = Just Red
seta Terminfo.Green = Just Green
seta Terminfo.Yellow = Just Yellow
seta Terminfo.Blue = Just Blue
seta Terminfo.Magenta = Just Magenta
seta Terminfo.Cyan = Just Cyan
seta Terminfo.White = Just White
seta Terminfo.ColorNumber{} = Nothing
_Black = eq Terminfo.Black
_Red = eq Terminfo.Red
_Green = eq Terminfo.Green
_Yellow = eq Terminfo.Yellow
_Blue = eq Terminfo.Blue
_Magenta = eq Terminfo.Magenta
_Cyan = eq Terminfo.Cyan
_White = eq Terminfo.White
#endif
instance (Profunctor p, Functor f) => AsColor p f ANSI.Color where
_Color = iso sa bt where
bt Black = ANSI.Black
bt Red = ANSI.Red
bt Green = ANSI.Green
bt Yellow = ANSI.Yellow
bt Blue = ANSI.Blue
bt Magenta = ANSI.Magenta
bt Cyan = ANSI.Cyan
bt White = ANSI.White
sa ANSI.Black = Black
sa ANSI.Red = Red
sa ANSI.Green = Green
sa ANSI.Yellow = Yellow
sa ANSI.Blue = Blue
sa ANSI.Magenta = Magenta
sa ANSI.Cyan = Cyan
sa ANSI.White = White
_Black = en ANSI.Black
_Red = en ANSI.Red
_Green = en ANSI.Green
_Yellow = en ANSI.Yellow
_Blue = en ANSI.Blue
_Magenta = en ANSI.Magenta
_Cyan = en ANSI.Cyan
_White = en ANSI.White
instance HasColor ANSI.Color where
color = _Color
| lens/terminal | src/System/Console/Terminal/Color.hs | bsd-3-clause | 3,731 | 0 | 10 | 935 | 978 | 530 | 448 | 69 | 0 |
module Utils where
import qualified Data.Aeson as Aeson
import Data.ByteString (ByteString)
import qualified Data.ByteString.Lazy.Char8 as BSL
import qualified Data.Text.Encoding as TE
import Data.Text.Lazy (Text)
import qualified Data.Text.Lazy as TL
import Web.Scotty.Internal.Types
tlToBS :: TL.Text -> ByteString
tlToBS = TE.encodeUtf8 . TL.toStrict
bslToText :: BSL.ByteString -> Text
bslToText = TL.pack . BSL.unpack
paramValue :: Text -> [Param] -> [Text]
paramValue key = fmap snd . filter (hasParam key)
hasParam :: Text -> Param -> Bool
hasParam t = (== t) . fst
parseValue :: Aeson.FromJSON a => Maybe Aeson.Value -> Maybe a
parseValue Nothing = Nothing
parseValue (Just a) = case Aeson.fromJSON a of
Aeson.Error _ -> Nothing
Aeson.Success b -> Just b
| freizl/hoauth2 | hoauth2-example/src/Utils.hs | bsd-3-clause | 773 | 0 | 9 | 122 | 270 | 151 | 119 | 21 | 2 |
{-# OPTIONS_GHC -Wall #-}
module Euler.E096 ( main, isSolved, sumIs45Filter, blah ) where
import Control.Monad ( foldM )
import Data.IntSet ( IntSet, (\\) )
import qualified Data.IntSet as IS
import Data.List ( transpose )
import Data.Maybe
import Data.Vector ( Vector, (!), (//) )
import qualified Data.Vector as V
import Euler.E96Data ( e96Data )
parse :: [String] -> [[[Int]]]
parse xs = case splitAt 10 xs of
([],_) -> []
(x,others) -> parseOnePuzzle x:parse others
where
parseOneLine :: String -> [Int]
parseOneLine = map (\d -> read [d])
parseOnePuzzle :: [String] -> [[Int]]
parseOnePuzzle (_:blahs) = map parseOneLine blahs
parseOnePuzzle _ = error "parseOnePuzzle fail"
allCombos :: [[Int]]
allCombos = rows ++ cols ++ blocks
where
rows :: [[Int]]
rows = map row [0..8]
where
row r = map (9*r +) [0..8]
cols :: [[Int]]
cols = transpose rows
blocks :: [[Int]]
blocks = [getBlock rows kr kc | kr <- [0..2], kc <- [0..2]]
where
getBlock :: [[Int]] -> Int -> Int -> [Int]
getBlock xs kr kc = concatMap filterRow filteredCols
where
filterRow :: [Int] -> [Int]
filterRow rs = map (rs !!) rowInds
filteredCols :: [[Int]]
filteredCols = map (xs !!) colInds
rowInds, colInds :: [Int]
rowInds = map (kr*3 +) [0..2]
colInds = map (kc*3 +) [0..2]
rawPuzzles :: [[[Int]]]
rawPuzzles = parse e96Data
data Entry = Known Int | Unknown IntSet deriving Eq
data Sudoku = Sudoku !(Vector Entry) deriving Eq
instance Show Sudoku where
show (Sudoku v) = init $ unlines $ map (concatMap show) entries
where
entries :: [[Entry]]
entries = f $ V.toList v
f x = case splitAt 9 x of
([],_) -> []
(x', others) -> x':(f others)
instance Show Entry where
show (Known k) = show k
show (Unknown ks) = show (IS.toList ks)
isKnown :: Entry -> Bool
isKnown (Known _) = True
isKnown _ = False
isUnknown :: Entry -> Bool
isUnknown = not . isKnown
toEntry :: Int -> Entry
toEntry 0 = Unknown (IS.fromList [1..9])
toEntry k = Known k
toSudoku :: [[Int]] -> Sudoku
toSudoku = Sudoku . V.fromList . (map toEntry) . concat
puzzles :: [Sudoku]
puzzles = map toSudoku rawPuzzles
totalSum :: Int
totalSum = sum [1..9]
-- take a list of entries, collect the known numbers, and filter these out of the unknown numbers
-- e.g. simpleFilter [Know 3, Unknown [1,2,3], Unknown [3,4]] == [Known 3, Unknown [1,2], Known 4]
simpleFilter :: [Entry] -> Maybe [Entry]
simpleFilter entries0
| IS.size knownSet /= length knownList = Nothing -- two knowns are the same - invalid puzzle
| entries0 /= entries = simpleFilter entries
| otherwise = Just entries
where
entries = map myFilter entries0
knownSet :: IntSet
knownSet = IS.fromList knownList
knownList = foldr g [] entries0
where
g (Known k) acc = k : acc
g _ acc = acc
myFilter :: Entry -> Entry
myFilter (Unknown oldSet)
| newSetSize == 1 = Known (IS.findMin newSet)
| otherwise = Unknown newSet
where
newSetSize = IS.size newSet
newSet = oldSet \\ knownSet
myFilter known@(Known _) = known
blah :: [Entry]
blah = [Known 1, Known 2, Known 3, Known 4, Unknown (IS.fromList [5,7,8]), Known 6, Unknown (IS.fromList [7,8]), Unknown (IS.fromList [5,7]), Known 9]
--sumIs45Filter :: [Entry] -> Maybe [Entry]
sumIs45Filter entries0 = unknowns
where
knownSum = sum $ map (\(Known k) -> k) $ filter isKnown entries0
remainder = totalSum - knownSum
unknowns = map (\(Unknown k) -> IS.toList k) $ filter isUnknown entries0
-- unknowns
filterGroup :: Sudoku -> [Int] -> Maybe Sudoku
filterGroup (Sudoku v0) idxs = case simpleFilter (map (v0 !) idxs) of
Nothing -> Nothing
Just newGroup -> Just $ Sudoku $ v0 // (zip idxs newGroup)
--combinations :: [[Int]] -> [[Int]]
--combinations (x:xs) = concatMap (\x' -> x':combos) x
-- where
-- combos = combinations xs
filterAll :: Sudoku -> Maybe Sudoku
filterAll sudoku0
| isNothing sudoku' = Nothing
| sudoku0 /= sudoku = filterAll sudoku
| otherwise = Just sudoku
where
sudoku' = foldM filterGroup sudoku0 allCombos
sudoku = fromJust sudoku'
--guess :: (Int,Int) -> Sudoku -> Maybe Sudoku
--guess (idx, pickMe) (Sudoku v0) = filterAll (Sudoku v)
-- where
-- v = v0 // [(idx, newSet)]
-- newSet = case v0 ! idx of
-- (Known _) -> error "guess was told to replace an already solved number"
-- (Unknown is) -> if IS.member pickMe is
-- then Known pickMe
-- else error $ "guess was told to guess a number " ++
-- show pickMe ++ " but that isn't one of the candidates " ++ show (IS.toList is)
isSolved :: Sudoku -> Bool
isSolved (Sudoku v) = V.all isKnown v
filteredPuzzles :: [Maybe Sudoku]
filteredPuzzles = map filterAll puzzles
main :: IO ()
main = do
print (filteredPuzzles !! 1)
-- putStrLn ""
-- print (filteredPuzzles !! 1 >>= guess (1,4) >>= guess (2,5))
| ghorn/euler | Euler/E096.hs | bsd-3-clause | 5,139 | 0 | 14 | 1,340 | 1,688 | 910 | 778 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecursiveDo #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
module Reflex.Dom.Widget.Resize where
import Reflex.Class
import Reflex.Time
import Reflex.Dom.Builder.Class
import Reflex.Dom.Builder.Immediate
import Reflex.Dom.Class
import Reflex.Dom.Widget.Basic
import Reflex.PerformEvent.Class
import Reflex.PostBuild.Class
import Reflex.TriggerEvent.Class
import Control.Monad
import Control.Monad.Fix
import Control.Monad.IO.Class
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Monoid
import Data.Text (Text)
import qualified Data.Text as T
import GHCJS.DOM.Element
import GHCJS.DOM.EventM (on)
import qualified GHCJS.DOM.GlobalEventHandlers as Events (scroll)
import GHCJS.DOM.Types (MonadJSM, liftJSM, uncheckedCastTo, HTMLElement(..))
import GHCJS.DOM.HTMLElement (getOffsetWidth, getOffsetHeight)
import qualified GHCJS.DOM.Types as DOM
-- | A widget that wraps the given widget in a div and fires an event when resized.
-- Adapted from @github.com\/marcj\/css-element-queries@
--
-- This function can cause strange scrollbars to appear in some circumstances.
-- These can be hidden with pseudo selectors, for example, in webkit browsers:
-- .wrapper *::-webkit-scrollbar { width: 0px; background: transparent; }
resizeDetector :: (MonadJSM m, DomBuilder t m, PostBuild t m, TriggerEvent t m, PerformEvent t m, MonadHold t m, DomBuilderSpace m ~ GhcjsDomSpace, MonadJSM (Performable m), MonadFix m) => m a -> m (Event t (), a)
resizeDetector = resizeDetectorWithStyle ""
resizeDetectorWithStyle :: (MonadJSM m, DomBuilder t m, PostBuild t m, TriggerEvent t m, PerformEvent t m, MonadHold t m, DomBuilderSpace m ~ GhcjsDomSpace, MonadJSM (Performable m), MonadFix m)
=> Text -- ^ A css style string. Warning: It should not contain the "position" style attribute.
-> m a -- ^ The embedded widget
-> m (Event t (), a) -- ^ An 'Event' that fires on resize, and the result of the embedded widget
resizeDetectorWithStyle styleString = resizeDetectorWithAttrs ("style" =: styleString)
resizeDetectorWithAttrs :: (MonadJSM m, DomBuilder t m, PostBuild t m, TriggerEvent t m, PerformEvent t m, MonadHold t m, DomBuilderSpace m ~ GhcjsDomSpace, MonadJSM (Performable m), MonadFix m)
=> Map Text Text -- ^ A map of attributes. Warning: It should not modify the "position" style attribute.
-> m a -- ^ The embedded widget
-> m (Event t (), a) -- ^ An 'Event' that fires on resize, and the result of the embedded widget
resizeDetectorWithAttrs attrs w = do
let childStyle = "position: absolute; left: 0; top: 0;"
containerAttrs = "style" =: "position: absolute; left: 0; top: 0; right: 0; bottom: 0; overflow: scroll; z-index: -1; visibility: hidden;"
(parent, (expand, expandChild, shrink, w')) <- elAttr' "div" (Map.unionWith (<>) attrs ("style" =: "position: relative;")) $ do
w' <- w
elAttr "div" containerAttrs $ do
(expand, (expandChild, _)) <- elAttr' "div" containerAttrs $ elAttr' "div" ("style" =: childStyle) $ return ()
(shrink, _) <- elAttr' "div" containerAttrs $ elAttr "div" ("style" =: (childStyle <> "width: 200%; height: 200%;")) $ return ()
return (expand, expandChild, shrink, w')
let p = uncheckedCastTo HTMLElement $ _element_raw parent
reset = do
let e = uncheckedCastTo HTMLElement $ _element_raw expand
s = _element_raw shrink
eow <- getOffsetWidth e
eoh <- getOffsetHeight e
let ecw = eow + 10
ech = eoh + 10
setAttribute (_element_raw expandChild) ("style" :: Text) (childStyle <> "width: " <> T.pack (show ecw) <> "px;" <> "height: " <> T.pack (show ech) <> "px;")
esw <- getScrollWidth e
setScrollLeft e esw
esh <- getScrollHeight e
setScrollTop e esh
ssw <- getScrollWidth s
setScrollLeft s ssw
ssh <- getScrollHeight s
setScrollTop s ssh
lastWidth <- getOffsetWidth p
lastHeight <- getOffsetHeight p
return (Just lastWidth, Just lastHeight)
resetIfChanged ds = do
pow <- getOffsetWidth p
poh <- getOffsetHeight p
if ds == (Just pow, Just poh)
then return Nothing
else fmap Just reset
pb <- delay 0 =<< getPostBuild
expandScroll <- wrapDomEvent (DOM.uncheckedCastTo DOM.HTMLElement $ _element_raw expand) (`on` Events.scroll) $ return ()
shrinkScroll <- wrapDomEvent (DOM.uncheckedCastTo DOM.HTMLElement $ _element_raw shrink) (`on` Events.scroll) $ return ()
size0 <- performEvent $ fmap (const $ liftJSM reset) pb
rec resize <- performEventAsync $ fmap (\d cb -> (liftIO . cb) =<< liftJSM (resetIfChanged d)) $ tag (current dimensions) $ leftmost [expandScroll, shrinkScroll]
dimensions <- holdDyn (Nothing, Nothing) $ leftmost [ size0, fmapMaybe id resize ]
return (fmapMaybe void resize, w')
| mightybyte/reflex-dom | reflex-dom-core/src/Reflex/Dom/Widget/Resize.hs | bsd-3-clause | 4,966 | 0 | 21 | 951 | 1,387 | 719 | 668 | 83 | 2 |
module Main where
import System.Console.Haskeline
import System.IO
import System.Environment
import System.Exit
import System.FilePath ((</>), addTrailingPathSeparator)
import Data.Maybe
import Data.Version
import Control.Monad.Trans.State.Strict ( execStateT, get, put )
import Control.Monad.Trans ( liftIO )
import Control.Monad ( when )
import Core.CoreParser
import Core.ShellParser
import Core.TT
import Core.Typecheck
import Core.ProofShell
import Core.Evaluate
import Core.Constraints
import Idris.AbsSyntax
import Idris.Parser
import Idris.REPL
import Idris.ElabDecls
import Idris.Primitives
import Idris.Imports
import Idris.Error
import Util.System ( getLibFlags, getIdrisLibDir, getIncFlags )
import Util.DynamicLinker
import Pkg.Package
import Paths_idris
-- Main program reads command line options, parses the main program, and gets
-- on with the REPL.
main = do xs <- getArgs
let opts = parseArgs xs
execStateT (runIdris opts) idrisInit
runIdris :: [Opt] -> Idris ()
runIdris opts = do
when (Ver `elem` opts) $ liftIO showver
when (Usage `elem` opts) $ liftIO usage
when (ShowIncs `elem` opts) $ liftIO showIncs
when (ShowLibs `elem` opts) $ liftIO showLibs
when (ShowLibdir `elem` opts) $ liftIO showLibdir
case opt getPkgClean opts of
[] -> return ()
fs -> do liftIO $ mapM_ cleanPkg fs
liftIO $ exitWith ExitSuccess
case opt getPkg opts of
[] -> idrisMain opts -- in Idris.REPL
fs -> liftIO $ mapM_ (buildPkg (WarnOnly `elem` opts)) fs
usage = do putStrLn usagemsg
exitWith ExitSuccess
showver = do putStrLn $ "Idris version " ++ ver
exitWith ExitSuccess
showLibs = do libFlags <- getLibFlags
putStrLn libFlags
exitWith ExitSuccess
showLibdir = do dir <- getIdrisLibDir
putStrLn dir
exitWith ExitSuccess
showIncs = do incFlags <- getIncFlags
putStrLn incFlags
exitWith ExitSuccess
usagemsg = "Idris version " ++ ver ++ "\n" ++
"--------------" ++ map (\x -> '-') ver ++ "\n" ++
"Usage: idris [input file] [options]\n" ++
"Options:\n" ++
"\t--quiet Quiet mode (for editors)\n" ++
"\t--check Type check only\n" ++
"\t-o [file] Specify output filename\n" ++
"\t-i [dir] Add directory to the list of import paths\n" ++
"\t--ibcsubdir [dir] Write IBC files into sub directory\n" ++
"\t--noprelude Don't import the prelude\n" ++
"\t--total Require functions to be total by default\n" ++
"\t--warnpartial Warn about undeclared partial functions\n" ++
"\t--typeintype Disable universe checking\n" ++
"\t--log [level] Type debugging log level\n" ++
"\t-S Do no further compilation of code generator output\n" ++
"\t-c Compile to object files rather than an executable\n" ++
"\t--ideslave Ideslave mode (for editors; in/ouput wrapped in s-expressions)\n" ++
"\t--libdir Show library install directory and exit\n" ++
"\t--link Show C library directories and exit (for C linking)\n" ++
"\t--include Show C include directories and exit (for C linking)\n" ++
"\t--target [target] Type the target: C, Java, bytecode, javascript, node\n"
| byorgey/Idris-dev | src/Main.hs | bsd-3-clause | 3,554 | 0 | 28 | 1,030 | 696 | 360 | 336 | 80 | 3 |
module Color.Spectrum.Observer where
class Num a => ObserverModel m a | m -> a where
chromaticResponseWindow :: m -> (a, a)
chromaticResponse :: m -> a -> a -> XYZ a
| ekmett/colorimetry | Colorimetry/Observer/ChromaticResponseModel.hs | bsd-3-clause | 171 | 0 | 10 | 36 | 66 | 36 | 30 | -1 | -1 |
{- |
Module : $Header$
Copyright : (c) Simon Bergot
License : BSD3
Maintainer : simon.bergot@gmail.com
Stability : unstable
Portability : portable
Subparsers allows the creation of complex command line
applications organized around commands.
-}
module System.Console.ArgParser.SubParser (
mkSubParser
, mkSubParserWithName
) where
import qualified Data.List as L
import qualified Data.Map as M
import Data.Maybe
import System.Console.ArgParser.BaseType
import System.Console.ArgParser.Parser
import System.Console.ArgParser.Run
import System.Environment
-- | Create a parser composed of a list of subparsers.
--
-- Each subparser is associated with a command which the user
-- must type to activate.
mkSubParser :: [(Arg, CmdLnInterface a)] -> IO (CmdLnInterface a)
mkSubParser parsers = do
name <- getProgName
return $ mkSubParserWithName name parsers
-- | Same that "mkSubParser" but allows a custom name
mkSubParserWithName :: String -> [(Arg, CmdLnInterface a)] -> CmdLnInterface a
mkSubParserWithName name parsers = CmdLnInterface
parser cmdSpecialFlags name Nothing Nothing Nothing
where
parser = liftParam EmptyParam
cmdSpecialFlags = command:defaultSpecialFlags
command = mkSpecialFlag name parsers
mkSpecialFlag :: String -> [(Arg, CmdLnInterface a)] -> SpecialFlag a
mkSpecialFlag topname subapps = (parser, action) where
parser = liftParam $ CommandParam cmdMap id
action _ (posargs, flagargs) =
case listToMaybe posargs >>= flip M.lookup cmdMap of
Nothing -> error "impossible"
Just subapp -> parseNiceArgs
(drop 1 posargs, flagargs)
(subapp `setAppName` (topname ++ " " ++ getAppName subapp))
cmdMap = M.fromList subapps
data EmptyParam a = EmptyParam
instance ParamSpec EmptyParam where
getParser _ = Parser $ \args -> (Left "command not found", args)
getParamDescr _ = []
data CommandParam appT resT = CommandParam
(M.Map String (CmdLnInterface appT))
(Bool -> resT)
instance ParamSpec (CommandParam resT) where
getParser (CommandParam cmdMap convert) = Parser cmdParser where
cmdParser (pos, flags) = case pos of
[] -> (Left "No command provided", (pos, flags))
arg:_ -> (Right $ convert isMatch, ([], M.empty)) where
isMatch = arg `M.member` cmdMap
getParamDescr (CommandParam cmdMap _) = summary:commands where
cmds = M.elems cmdMap
names = map getAppName cmds
descrs = map (fromMaybe "" . getAppDescr) cmds
summaryUsage = const $ "{" ++ L.intercalate "," names ++ "}"
summary = ParamDescr
summaryUsage "commands arguments" summaryUsage "" ""
singleCmdDescr name descr = ParamDescr
(const "") "commands arguments" (const name) descr ""
commands = zipWith singleCmdDescr names descrs
| sbergot/ArgParser | src/System/Console/ArgParser/SubParser.hs | bsd-3-clause | 2,965 | 0 | 15 | 725 | 726 | 390 | 336 | 53 | 2 |
{-# LANGUAGE InstanceSigs #-}
module PFDS.Commons.HoodMelvilleQueue where
import qualified PFDS.Commons.Queue as Q
data RotationState a =
Idle
| Reversing Int [a] [a] [a] [a]
| Appending Int [a] [a]
| Done [a]
data Queue a = Q Int [a] (RotationState a) Int [a]
exec :: RotationState a -> RotationState a
exec (Reversing ok (x:f) f' (y:r) r') = Reversing (ok+1) f (x:f') r (y:r')
exec (Reversing ok [] f' [y] r') = Appending ok f' (y:r')
exec (Appending 0 f' r') = Done r'
exec (Appending ok (x:f') r') = Appending (ok-1) f' (x:r')
exec state = state
invalidate :: RotationState a -> RotationState a
invalidate (Reversing ok f f' r r') = Reversing (ok-1) f f' r r'
invalidate (Appending 0 f' (x:r')) = Done r'
invalidate (Appending ok f' r') = Appending (ok-1) f' r'
invalidate state = state
exec2 :: Queue a -> Queue a
exec2 (Q lenf f state lenr r) = case exec (exec state) of
Done newf -> Q lenf newf Idle lenr r
newstate -> Q lenf f newstate lenr r
check :: Queue a -> Queue a
check q@(Q lenf f state lenr r) = if lenr <= lenf
then exec2 q
else exec2 (Q (lenf+lenr) f newstate 0 [])
where newstate = Reversing 0 f [] r []
instance Q.Queue Queue where
empty :: Queue a
empty = Q 0 [] Idle 0 []
isEmpty :: Queue a -> Bool
isEmpty (Q lenf _ _ _ _) = lenf == 0
snoc :: Queue a -> a -> Queue a
snoc (Q lenf f state lenr r) x = check (Q lenf f state (lenr+1) (x:r))
head :: Queue a -> a
head (Q _ [] _ _ _) = error "empty"
head (Q _ (x:_) _ _ _) = x
tail :: Queue a -> Queue a
tail (Q _ [] _ _ _) = error "empty"
tail (Q lenf (x:f) state lenr r) = check (Q (lenf-1) f (invalidate state) lenr r)
| matonix/pfds | src/PFDS/Commons/HoodMelvilleQueue.hs | bsd-3-clause | 1,703 | 0 | 10 | 445 | 934 | 479 | 455 | 42 | 2 |
module Resolve.DNS.Helper.DNS where
import Network.Socket
import Resolve.Types
import Resolve.DNS.Types
import qualified Resolve.DNS.Helper.UDP as UDP
import qualified Resolve.DNS.Helper.LiveTCP as TCP
import qualified Resolve.DNS.Truncation as T
import Control.Exception
data Config = Config { host :: HostName
, port :: ServiceName
}
new :: Config -> IO (Resolver Message Message)
new c = do
bracketOnError
(do
u <- UDP.new $ UDP.Config {UDP.host = host c, UDP.port = port c}
t <- TCP.new $ TCP.Config {TCP.host = host c, TCP.port = port c}
return $ Resolver { resolve = T.truncation $ T.Config {T.udp = resolve u, T.tcp = resolve t}
, delete = do
delete u
delete t
}
)
(\r -> delete r)
return
| riaqn/resolve | src/Resolve/DNS/Transport/Helper/DNS.hs | bsd-3-clause | 903 | 0 | 17 | 318 | 274 | 154 | 120 | 22 | 1 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
-- | A library of commonly used data analysis functions.
--
-- Note that this is an initial, demo version of the library. The final version
-- will be much more fully featured.
module DataAnalysis.Library
( -- * General purpose
-- ** Streaming functions
-- $streaming
mapStream
, mapField
, filterStream
, filterField
, sumStream
, sumField
, movingGroupsOf
-- ** Lens helpers
, shown
-- * Mathematical
, exponentialMovingAverage
-- * Financial
-- ** Price differentials
, UpDown (..)
, stocksToUpDown
-- *** Lenses
, HasUpDown (..)
) where
-- Nota bene! The documentation for this module will be generated as Haddocks
-- and given to users, so it must be kept clean and understandable.
import Control.Lens
import Data.Conduit
import Data.Conduit.Analysis
import qualified Data.Conduit.List as CL
import Data.Text (Text, pack)
import Data.Time (Day)
-- $streaming
--
-- The following functions come in two flavors: those operating on the streamed
-- value itself, and those operating on a single field of the streamed value.
-- The latter are convenience functions to make many kinds of common analysis
-- simple, such as taking the logarithm of a specific field in a data
-- structure.
-- | Perform the given transformation on all values in the stream.
--
-- @
-- yieldMany [1..10] =$= mapStream (+1) =$= sumStream
-- @
mapStream :: Monad m => (a -> b) -> Conduit a m b
mapStream = CL.map
-- | Perform the given transformation on the given field in the stream.
--
-- @
-- yieldMany (zip [1..10] [11..20]) =$= mapField _1 (+1) =$= sumField _1
-- @
mapField :: Monad m => Lens s t a b -> (a -> b) -> Conduit s m t
mapField field f = CL.map (field %~ f)
-- | Keep only the values from the stream passing the given predicate function.
--
-- @
-- yieldMany [1..10] =$= filterStream even =$= sumStream
-- @
filterStream :: Monad m => (a -> Bool) -> Conduit a m a
filterStream = CL.filter
-- | Keep only the values from the stream where the given field passes the
-- given predicate function.
--
-- @
-- yieldMany ([1..10], [11..20]) =$= filterField _1 even =$= sumField _2
-- @
filterField :: Monad m => Lens' s a -> (a -> Bool) -> Conduit s m s
filterField field f = CL.filter (f . view field)
-- | Sum all of the values in a stream.
sumStream :: (Monad m, Num a) => Consumer a m a
sumStream = CL.fold (+) 0
-- | Sum up a specific field in a stream.
sumField :: (Monad m, Num a) => Getter s a -> Consumer s m a
sumField field = CL.fold (\total s -> total + (s ^. field)) 0
-- | Convert a value to its textual representation.
--
-- Uses the @Show@ instance for the type.
shown :: Show a => IndexPreservingGetter a Text
shown = to (pack . show)
-- | The difference either up or down of a stock price from one day to the
-- next.
data UpDown = UpDown
{ _udDate :: !Day
, _udUp :: !Double
, _udDown :: !Double
}
deriving Show
makeClassy ''UpDown
-- | Convert a stream of stock prices to a stream of up/down values.
--
-- You must provide the names of the date and adjusted close price fields.
stocksToUpDown :: Monad m
=> Getter stock Day -- ^ date field
-> Getter stock Double -- ^ adjusted close
-> Conduit stock m UpDown
stocksToUpDown stockDate stockAdjClose =
await >>= maybe (return ()) loop
where
loop today = do
myesterday <- await
case myesterday of
Nothing -> return ()
Just yesterday -> do
let ud = UpDown
{ _udDate = today ^. stockDate
, _udUp = max 0 $ (today ^. stockAdjClose) - (yesterday ^. stockAdjClose)
, _udDown = max 0 $ (yesterday ^. stockAdjClose) - (today ^. stockAdjClose)
}
yield ud
loop yesterday
| glebovitz/demo | src/DataAnalysis/Library.hs | mit | 4,443 | 0 | 22 | 1,248 | 757 | 428 | 329 | 74 | 2 |
--
-- | Configuration for generating X509 certificates
--
-- Users are expected to provide configuration (from file or by hand) and use
-- 'fromConfiguration' to turn them into description.
-- Then, full description can be turned into certificates via 'genCertificate'
--
-- Configuration --fromConfiguration--> Description --genCertificate--> Certificate
module Cardano.X509.Configuration
(
-- * Configuration for Certificates
TLSConfiguration(..)
, ServerConfiguration(..)
, CertConfiguration(..)
, DirConfiguration(..)
-- * Description of Certificates
, CertDescription(..)
-- * Effectful Functions
, ConfigurationKey(..)
, ErrInvalidTLSConfiguration
, ErrInvalidExpiryDays
, fromConfiguration
, decodeConfigFile
, genCertificate
) where
import Universum
import Control.Monad ((>=>))
import Crypto.PubKey.RSA (PrivateKey, PublicKey)
import Data.ASN1.OID (OIDable (..))
import Data.Hourglass (Minutes (..), Period (..), dateAddPeriod,
timeAdd)
import Data.Semigroup ((<>))
import Data.String (fromString)
import Data.X509 (DistinguishedName (..), DnElement (..),
ExtAuthorityKeyId (..), ExtBasicConstraints (..),
ExtExtendedKeyUsage (..), ExtKeyUsage (..),
ExtKeyUsageFlag (..), ExtKeyUsagePurpose (..),
ExtSubjectAltName (..), ExtSubjectKeyId (..),
ExtensionRaw, Extensions (..), PubKey (..),
SignedCertificate, extensionEncode, hashDN)
import Data.X509.Validation (ValidationChecks (..), defaultChecks)
import Data.Yaml (decodeFileEither, parseMonad, withObject)
import GHC.Generics (Generic)
import System.IO (FilePath)
import Time.System (dateCurrent)
import Time.Types (DateTime (..))
import Data.X509.Extra (parseSAN, signAlgRSA256, signCertificate)
import Data.Aeson
import qualified Data.Aeson.Types as Aeson
import qualified Data.HashMap.Lazy as HM
import qualified Data.List.NonEmpty as NonEmpty
import qualified Data.X509 as X509
--
-- Configuration of Certificates
--
-- | Foreign Configuration, pulled from within a .yaml file
data TLSConfiguration = TLSConfiguration
{ tlsCa :: CertConfiguration
, tlsServer :: ServerConfiguration
, tlsClients :: [CertConfiguration]
} deriving (Generic, Show, Eq)
instance FromJSON TLSConfiguration where
parseJSON = withObject "TLSConfiguration" $ \o -> TLSConfiguration <$> o .: "ca" <*> o .: "server" <*> o .: "clients"
instance ToJSON TLSConfiguration where
toJSON conf = object
[ "ca" .= tlsCa conf
, "server" .= tlsServer conf
, "clients" .= tlsClients conf
]
-- | Output directories configuration
data DirConfiguration = DirConfiguration
{ outDirServer :: FilePath
, outDirClients :: FilePath
, outDirCA :: Maybe FilePath
} deriving (Generic, Show, Eq)
-- | Foreign Certificate Configuration
data CertConfiguration = CertConfiguration
{ certOrganization :: String
, certCommonName :: String
, certExpiryDays :: Int
} deriving (Generic, Show, Eq)
instance FromJSON CertConfiguration where
parseJSON = withObject "CertConfiguration" $ \o -> CertConfiguration <$> o .: "organization" <*> o .: "commonName" <*> o .: "expiryDays"
instance ToJSON CertConfiguration where
toJSON certconf = object
[ "organization" .= certOrganization certconf
, "commonName" .= certCommonName certconf
, "expiryDays" .= certExpiryDays certconf
]
-- | Foreign Server Certificate Configuration (SANS extra options)
data ServerConfiguration = ServerConfiguration
{ serverConfiguration :: CertConfiguration
, serverAltNames :: NonEmpty String
} deriving (Generic, Show, Eq)
-- NOTE We keep the declaration structure 'flat' such that servers config
-- are simply client config with an extra field 'altDNS'
instance FromJSON ServerConfiguration where
parseJSON v = ServerConfiguration
<$> ((withObject "CertConfiguration" $ \o -> CertConfiguration <$> o .: "organization" <*> o .: "commonName" <*> o .: "expiryDays") v)
<*> ((withObject "ServerConfiguration" $ \o -> o .: "altDNS") v)
instance ToJSON ServerConfiguration where
toJSON conf = object
[ "organization" .= (certOrganization $ serverConfiguration conf)
, "commonName" .= (certCommonName $ serverConfiguration conf)
, "expiryDays" .= (certExpiryDays $ serverConfiguration conf)
, "altDNS" .= serverAltNames conf
]
--
-- Description of Certificates
--
-- | Internal full-representation of a certificate
data CertDescription m pub priv outdir = CertDescription
{ certConfiguration :: CertConfiguration
, certSerial :: Int
, certExtensions :: [ExtensionRaw]
, certIssuer :: DistinguishedName
, certSubject :: DistinguishedName
, certGenKeys :: m (pub, priv)
, certSigningKey :: priv
, certOutDir :: outdir
, certFilename :: String
, certChecks :: ValidationChecks
}
--
-- Effectful Functions
--
-- | Type-alias for signature readability
newtype ConfigurationKey = ConfigurationKey
{ getConfigurationKey :: String
} deriving (Eq, Show)
newtype ErrInvalidExpiryDays
= ErrInvalidExpiryDays String
deriving (Show)
instance Exception ErrInvalidExpiryDays
newtype ErrInvalidTLSConfiguration
= ErrInvalidTLSConfiguration String
deriving (Show)
instance Exception ErrInvalidTLSConfiguration
-- | Describe a list of certificates to generate & sign from a foreign config
--
-- Description can then be used with @genCertificate@ to obtain corresponding
-- certificate
fromConfiguration
:: (Applicative m)
=> TLSConfiguration -- ^ Foreign TLS configuration / setup
-> DirConfiguration -- ^ Output directories configuration
-> m (pub, priv) -- ^ Key pair generator
-> (pub, priv) -- ^ Initial / Root key pair
-> (CertDescription m pub priv (Maybe String), [CertDescription m pub priv String])
-- ^ PKI description matching provided conf, fst = CA, snd = server & clients
fromConfiguration tlsConf dirConf genKeys (caPub, caPriv) =
let
caDN = mkDistinguishedName (tlsCa tlsConf)
caConfig = CertDescription
{ certConfiguration = tlsCa tlsConf
, certSerial = 1
, certExtensions = caExtensionsV3 caDN
, certIssuer = caDN
, certSubject = caDN
, certGenKeys = pure (caPub, caPriv)
, certSigningKey = caPriv
, certOutDir = outDirCA dirConf
, certFilename = "ca"
, certChecks = defaultChecks
}
ServerConfiguration tlsServer' serverAltDNS = tlsServer tlsConf
svDN = mkDistinguishedName tlsServer'
svConfig = CertDescription
{ certConfiguration = tlsServer'
, certSerial = 2
, certExtensions = svExtensionsV3 svDN caDN serverAltDNS
, certIssuer = caDN
, certSubject = svDN
, certGenKeys = genKeys
, certSigningKey = caPriv
, certOutDir = outDirServer dirConf
, certFilename = "server"
, certChecks = defaultChecks
}
clConfigs = forEach (tlsClients tlsConf) $ \(i, tlsClient) ->
let
clDN = mkDistinguishedName tlsClient
suffix = if i == 0 then "" else "_" <> show i
in CertDescription
{ certConfiguration = tlsClient
, certSerial = 3 + i
, certExtensions = clExtensionsV3 clDN caDN
, certIssuer = caDN
, certSubject = clDN
, certGenKeys = genKeys
, certSigningKey = caPriv
, certOutDir = outDirClients dirConf
, certFilename = "client" <> suffix
, certChecks = defaultChecks { checkFQHN = False }
}
in
(caConfig, svConfig : clConfigs)
-- | Decode a configuration file (.yaml). The expected file structure is:
-- <configuration-key>:
-- tls:
-- ca: ...
-- server: ...
-- clients: ...
--
-- where the 'configuration-key' represents the target environment (dev, test,
-- bench, etc.).
decodeConfigFile
:: (MonadIO m, MonadThrow m)
=> ConfigurationKey -- ^ Target configuration Key
-> FilePath -- ^ Target configuration file
-> m TLSConfiguration
decodeConfigFile (ConfigurationKey cKey) filepath =
decodeFileMonad filepath >>= parseMonad parser
where
errMsg key = "property '"<> key <> "' " <> "not found in configuration file."
decodeFileMonad = (liftIO . decodeFileEither) >=> either
(throwM . ErrInvalidTLSConfiguration . show)
return
parser = withObject "TLS Configuration" (parseK cKey >=> parseK "tls")
parseK :: FromJSON a => String -> Aeson.Object -> Aeson.Parser a
parseK key = maybe (fail $ errMsg key) parseJSON . HM.lookup (toText key)
-- | Generate & sign a certificate from a certificate description
genCertificate
:: CertDescription IO PublicKey PrivateKey filename
-> IO (PrivateKey, SignedCertificate)
genCertificate desc = do
((pub, priv), now) <- (,) <$> (certGenKeys desc) <*> dateCurrent
let conf = certConfiguration desc
when (certExpiryDays conf <= 0) $
throwM $ ErrInvalidExpiryDays "expiry days should be a positive integer"
let cert = X509.Certificate
{ X509.certVersion = 2
, X509.certSerial = fromIntegral (certSerial desc)
, X509.certSignatureAlg = signAlgRSA256
, X509.certValidity = (addMinutes (-1) now, addDays (certExpiryDays conf) now)
, X509.certPubKey = PubKeyRSA pub
, X509.certExtensions = Extensions (Just $ certExtensions desc)
, X509.certIssuerDN = certIssuer desc
, X509.certSubjectDN = certSubject desc
}
(priv,) <$> signCertificate (certSigningKey desc) cert
where
addDays :: Int -> DateTime -> DateTime
addDays n time@(DateTime date _) =
time { dtDate = dateAddPeriod date (mempty { periodDays = n }) }
addMinutes :: Int -> DateTime -> DateTime
addMinutes n time =
timeAdd time (Minutes $ fromIntegral n)
--
-- Internals
--
caExtensionsV3 :: DistinguishedName -> [ExtensionRaw]
caExtensionsV3 dn =
let
keyUsage = ExtKeyUsage [KeyUsage_keyCertSign, KeyUsage_cRLSign]
basicConstraints = ExtBasicConstraints True (Just 0)
subjectKeyId = ExtSubjectKeyId (hashDN dn)
authorityKeyId = ExtAuthorityKeyId (hashDN dn)
in
[ extensionEncode True keyUsage
, extensionEncode True basicConstraints
, extensionEncode False subjectKeyId
, extensionEncode False authorityKeyId
]
usExtensionsV3 :: ExtKeyUsagePurpose -> DistinguishedName -> DistinguishedName -> [ExtensionRaw]
usExtensionsV3 purpose subDN issDN =
let
keyUsage = ExtKeyUsage [KeyUsage_digitalSignature, KeyUsage_keyEncipherment]
basicConstraints = ExtBasicConstraints False Nothing
subjectKeyId = ExtSubjectKeyId (hashDN subDN)
authorityKeyId = ExtAuthorityKeyId (hashDN issDN)
extendedKeyUsage = ExtExtendedKeyUsage [purpose]
in
[ extensionEncode True keyUsage
, extensionEncode False extendedKeyUsage
, extensionEncode False basicConstraints
, extensionEncode False subjectKeyId
, extensionEncode False authorityKeyId
]
svExtensionsV3 :: DistinguishedName -> DistinguishedName -> NonEmpty String -> [ExtensionRaw]
svExtensionsV3 subDN issDN altNames =
let
subjectAltName =
ExtSubjectAltName $ map parseSAN (NonEmpty.toList altNames)
in
extensionEncode False subjectAltName :
usExtensionsV3 KeyUsagePurpose_ServerAuth subDN issDN
clExtensionsV3 :: DistinguishedName -> DistinguishedName -> [ExtensionRaw]
clExtensionsV3 =
usExtensionsV3 KeyUsagePurpose_ClientAuth
forEach :: [a] -> ((Int, a) -> b) -> [b]
forEach xs fn =
zipWith (curry fn) [0..(length xs - 1)] xs
mkDistinguishedName :: CertConfiguration -> DistinguishedName
mkDistinguishedName conf = DistinguishedName
[ (getObjectID DnOrganization, fromString $ certOrganization conf)
, (getObjectID DnCommonName, fromString $ certCommonName conf)
]
| input-output-hk/pos-haskell-prototype | x509/src/Cardano/X509/Configuration.hs | mit | 12,841 | 53 | 20 | 3,495 | 2,524 | 1,466 | 1,058 | -1 | -1 |
module SafeHello where
import MonadHandle
import System.IO (IOMode(..))
{-- snippet safeHello --}
safeHello :: MonadHandle h m => FilePath -> m ()
safeHello path = do
h <- openFile path WriteMode
hPutStrLn h "hello world"
hClose h
{-- /snippet safeHello --}
| binesiyu/ifl | examples/ch15/SafeHello.hs | mit | 266 | 0 | 8 | 49 | 80 | 41 | 39 | 8 | 1 |
{- |
Module : $Header$
Description : A module for working with series
Copyright : (c) Nicholas Orton
License : GPL
Maintainer : no | <nick@orton.biz>
Stability : provisional
Portability : portable
Code : https://github.com/nick-orton/project_euler/blob/master/Lib/Numz.lhs
-}
module Euler.Numz where
-- |The set of Natural Numbers ℕ is the set of all counting integers:
-- {1,2,3,...}
nats :: [Integer]
nats = [1 .. ]
-- |A number is natural if we drop the fractional component without a change
-- in value
isNat :: (RealFrac a) => a -> Bool
isNat x = x == fromInteger (floor x)
testisNat = isNat 25 && not (isNat 25.1)
-- |The set of Square Numbers is the set of all integers who's square root
-- is a whole number
isSqr :: (Floating a, RealFrac a) => a -> Bool
isSqr x = isNat (sqrt x)
testisSqr = isSqr 25 && not (isSqr 10)
-- |The factorial of a non-negative integer n, denoted by n!, is the product
-- of all positive integers less than or equal to n
(!) :: (Num t) => t -> t
(!) 0 = 1
(!) 1 = 1
(!) n = n * (!) (n - 1)
-- for example: 5! = 5 x 4 x 3 x 2 x 1 = 120
testFactorial = ((!) 5 == 120) && ((!) 0 == 1)
-- |Fibinacci Series: <http://oeis.org/A000045>
fib :: [Integer] -> [Integer]
fib (y : z : xs) = y + z : fib (y + z : y : z : xs)
fibs = fib [0, 1]
testfib = take 7 fibs == [1, 1, 2, 3, 5, 8, 13]
-- |Triangle Number Series: <http://oeis.org/A000217>
--
-- The sequence of triangle numbers is generated by adding the natural numbers.
-- So the 7^(th) triangle number would be 1 + 2 + 3 + 4 + 5 + 6 + 7 = 28.
--
-- Tn=n(n+1)/2 1, 3, 6, 10, 15, ...
tris :: [Integer]
tris = map (\x -> div (x * (x + 1)) 2) nats
-- The first ten terms would be:
-- 1, 3, 6, 10, 15, 21, 28, 36, 45, 55, ...
testtris = (take 10 tris) == [1, 3, 6, 10, 15, 21, 28, 36, 45, 55]
-- |Pentagonal Number Series: <http://oeis.org/A000326>
--
-- Pn=n(3n+1)/2 1, 5, 12, 22, 35, ...
pents :: [Integer]
pents = map (\x -> div (x * (3 * x - 1)) 2) nats
testPents = take 5 pents == [1, 5, 12, 22, 35]
-- |Hexagonal Number Series: <http://oeis.org/A000384>
--
-- Hn=n(2n+1) 1, 6, 15, 28, 45, ...
hexes :: [Integer]
hexes = map (\x -> x * (2 * x - 1)) nats
testHexes = take 5 hexes == [1, 6, 15, 28, 45]
-- module test
testNumz = testisNat && testisSqr && testfib && testFactorial
&& testPents && testHexes
| nick-orton/euler | src/Euler/Numz.hs | gpl-3.0 | 2,344 | 0 | 13 | 532 | 648 | 374 | 274 | 29 | 1 |
{-|
Module : Expr
Description : Expressions
Copyright : (c) Daniel Selsam, 2016
License : GPL-3
Maintainer : daniel.selsam@gmail.com
API for expressions
-}
module Kernel.Expr (
Expr(..)
, LocalData(LocalData), VarData, SortData, ConstantData(ConstantData), BindingData, AppData, LetData
, BinderInfo(..)
, mkVar, mkLocal, mkLocalDefault, mkLocalData, mkLocalDataDefault, mkConstant, mkSort
, mkLambda, mkLambdaDefault, mkPi, mkPiDefault, mkArrow, mkLet
, mkApp, mkAppSeq
, varIdx
, sortLevel
, localName, localType
, constName, constLevels
, bindingName, bindingDomain, bindingBody, bindingInfo
, letName, letType, letVal, letBody
, appFn, appArg, getOperator, getAppArgs, getAppOpArgs, getAppRevArgs, getAppOpRevArgs, mkRevAppSeq
, exprHasLocal, exprHasLevelParam, hasFreeVars, closed
, abstractPi, abstractPiSeq, abstractLambda, abstractLambdaSeq
, instantiate, instantiateSeq, instantiateLevelParams
, findInExpr
, isConstant, maybeConstant
, innerBodyOfLambda
, mkProp
) where
import Kernel.Expr.Internal
| dselsam/tc | src/Kernel/Expr.hs | gpl-3.0 | 1,064 | 0 | 5 | 166 | 217 | 146 | 71 | 26 | 0 |
module Analysis.CriticalPairs
( CriticalPairType (..)
, CriticalPair (..)
, getCriticalPairMatches
, getCriticalPairComatches
, getNacIndexOfCriticalPair
, getNacMatchOfCriticalPair
, getCriticalPairType
, isDeleteUse
, isProduceDangling
, isProduceForbid
-- * Finding Critical Pairs
, findCriticalPairs
, findAllDeleteUse
, findAllProduceForbid
, findAllProduceDangling
, findAllDeleteUseAndProduceDangling
) where
import Data.Maybe (mapMaybe)
import Abstract.Category
import Abstract.Category.Finitary
import Abstract.Constraint
import Abstract.Rewriting.DPO hiding (calculateComatch)
import qualified Abstract.Rewriting.DPO.DiagramAlgorithms as Diagram
-- | Data representing the type of a 'CriticalPair'
data CriticalPairType =
FreeOverlap
| DeleteUse
| ProduceForbid
| ProduceDangling
deriving(Eq,Show)
-- | A Critical Pair is defined as two matches (m1,m2) from the left
-- side of their productions to a same graph.
-- It assumes that the derivation of the production with match @m1@ causes
-- a conflict with the production with match @m2@
--
-- This diagram shows graphs and morphisms names used in the algorithms below
--
-- p1 = production (L1,K1,R1,[N1])
--
-- p2 = production (L2,K2,R2,[N2])
--
-- @
-- N1 N2
-- ^ ^
-- r l │ │n
-- R1◀─────K1────▶L1 L2◀────K2─────▶R2
-- │ │ \\ / │ │
-- m1'│ k│ m1\\ /m2 │ │
-- ▼ ▼ ▼ ▼ ▼
-- P1◀─────D1───────▶G◀───────D2──────▶P2
-- r' l'
-- @
--
-- m2' :: from L2 to P1
--
-- h21 :: from L2 to D1
--
-- q21 (nacMatch) :: from N2 to P1
data CriticalPair morph = CriticalPair {
matches :: (morph,morph),
comatches :: Maybe (morph,morph),
nacMatch :: Maybe (morph, Int), --if it is a ProduceForbid, here is the index of the nac
cpType :: CriticalPairType
} deriving (Eq,Show)
isDeleteUse :: CriticalPair morph -> Bool
isDeleteUse (CriticalPair _ _ _ DeleteUse) = True
isDeleteUse _ = False
isProduceDangling :: CriticalPair morph -> Bool
isProduceDangling (CriticalPair _ _ _ ProduceDangling) = True
isProduceDangling _ = False
isProduceForbid :: CriticalPair morph -> Bool
isProduceForbid (CriticalPair _ _ _ ProduceForbid) = True
isProduceForbid _ = False
-- | Returns the matches (m1,m2)
getCriticalPairMatches :: CriticalPair morph -> (morph,morph)
getCriticalPairMatches = matches
-- | Returns the comatches (m1',m2')
getCriticalPairComatches :: CriticalPair morph -> Maybe (morph,morph)
getCriticalPairComatches = comatches
-- | Returns the type of a Critical Pair
getCriticalPairType :: CriticalPair morph -> CriticalPairType
getCriticalPairType = cpType
-- | Returns the nac match of a 'CriticalPair'
getNacMatchOfCriticalPair :: CriticalPair morph -> Maybe morph
getNacMatchOfCriticalPair criticalPair =
case nacMatch criticalPair of
Just (nac,_) -> Just nac
Nothing -> Nothing
-- | Returns the nac index of a 'CriticalPair'
getNacIndexOfCriticalPair :: CriticalPair morph -> Maybe Int
getNacIndexOfCriticalPair criticalPair =
case nacMatch criticalPair of
Just (_,idx) -> Just idx
Nothing -> Nothing
-- | Finds all Critical Pairs between two given Productions, excluding those
-- whose initial graph violates some of the given constraints.
findCriticalPairs :: (E'PairCofinitary morph, DPO morph) => MorphismsConfig morph -> [Constraint morph] -> Production morph -> Production morph -> [CriticalPair morph]
findCriticalPairs conf constraints p1 p2 =
findAllDeleteUseAndProduceDangling conf constraints p1 p2 ++ findAllProduceForbid conf constraints p1 p2
-- ** Conflicts
-- *** Delete-Use
-- | Enumerate all delete-use conflicts caused by the application of the first
-- given rule before the second, excluding those whose initial graph violates
-- some of the given constraints.
--
-- This occurs when the first rule deletes some element that the second rule
-- uses.
findAllDeleteUse :: (E'PairCofinitary morph, DPO morph) => MorphismsConfig morph -> [Constraint morph] -> Production morph -> Production morph -> [CriticalPair morph]
findAllDeleteUse conf constraints p1 p2 =
let matchPairs = findJointSurjectiveApplicableMatches conf p1 p2
in [ CriticalPair (m1,m2) Nothing Nothing DeleteUse
| (m1,m2) <- matchPairs, codomain m1 `satisfiesAllConstraints` constraints
, Diagram.isDeleteUse conf p1 (m1,m2) ]
-- *** Produce-Dangling
-- | Enumerate all produce-dangling conflicts caused by the application of the
-- first given rule before the second, excluding those whose initial graph
-- violates some of the given constraints.
--
-- This occurs when the first rule creates an edge onto a node that is deleted
-- by the second.
findAllProduceDangling :: (E'PairCofinitary morph, DPO morph) => MorphismsConfig morph -> [Constraint morph] -> Production morph -> Production morph -> [CriticalPair morph]
findAllProduceDangling conf constraints p1 p2 =
let matchPairs = findJointSurjectiveApplicableMatches conf p1 p2
in [ CriticalPair (m1,m2) Nothing Nothing ProduceDangling
| (m1,m2) <- matchPairs, codomain m1 `satisfiesAllConstraints` constraints
, Diagram.isProduceDangling conf p1 p2 (m1,m2) ]
-- DeleteUse and Produce-Dangling
-- | Enumerate all delete-use and produce-dangling conflicts caused by the
-- application of the first given rule before the second, excluding those whose
-- initial graph violates some of the given constraints.
--
-- This is more efficient than calculating each kind of conflict separately.
findAllDeleteUseAndProduceDangling :: (E'PairCofinitary morph, DPO morph) => MorphismsConfig morph -> [Constraint morph] -> Production morph -> Production morph -> [CriticalPair morph]
findAllDeleteUseAndProduceDangling conf constraints p1 p2 =
let
matchPairs = findJointSurjectiveApplicableMatches conf p1 p2
isValidPair (m1,_)
| null constraints = True
| otherwise = codomain m1 `satisfiesAllConstraints` constraints
conflicts = mapMaybe (Diagram.deleteUseDangling conf p1 p2) $ filter isValidPair matchPairs
in map categorizeConflict conflicts
where
categorizeConflict x = case x of
(Left m) -> CriticalPair m Nothing Nothing DeleteUse
(Right m) -> CriticalPair m Nothing Nothing ProduceDangling
-- *** Produce-Forbid
-- | Enumerate all produce-forbid conflicts caused by the application of the
-- first given rule before the second, excluding those whose initial graph
-- violates some of the given constraints.
--
-- These are caused when the application of the first rule causes
-- the violation of a NAC for the second rule.
findAllProduceForbid :: (E'PairCofinitary morph, DPO morph) => MorphismsConfig morph -> [Constraint morph] -> Production morph -> Production morph -> [CriticalPair morph]
findAllProduceForbid conf constraints p1 p2 =
let conflicts = concatMap (Diagram.produceForbidOneNac conf constraints p1 p2) (zip (nacs p2) [0..])
in [ CriticalPair matches (Just comatches) (Just nac) ProduceForbid
| (matches, comatches, nac) <- conflicts ]
| rodrigo-machado/verigraph | src/library/Analysis/CriticalPairs.hs | gpl-3.0 | 7,471 | 0 | 13 | 1,548 | 1,345 | 733 | 612 | 91 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.OpsWorks.DeleteLayer
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Deletes a specified layer. You must first stop and then delete all associated
-- instances or unassign registered instances. For more information, see <http://docs.aws.amazon.com/opsworks/latest/userguide/workinglayers-basics-delete.html How toDelete a Layer>.
--
-- Required Permissions: To use this action, an IAM user must have a Manage
-- permissions level for the stack, or an attached policy that explicitly grants
-- permissions. For more information on user permissions, see <http://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html Managing UserPermissions>.
--
-- <http://docs.aws.amazon.com/opsworks/latest/APIReference/API_DeleteLayer.html>
module Network.AWS.OpsWorks.DeleteLayer
(
-- * Request
DeleteLayer
-- ** Request constructor
, deleteLayer
-- ** Request lenses
, dlLayerId
-- * Response
, DeleteLayerResponse
-- ** Response constructor
, deleteLayerResponse
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.OpsWorks.Types
import qualified GHC.Exts
newtype DeleteLayer = DeleteLayer
{ _dlLayerId :: Text
} deriving (Eq, Ord, Read, Show, Monoid, IsString)
-- | 'DeleteLayer' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dlLayerId' @::@ 'Text'
--
deleteLayer :: Text -- ^ 'dlLayerId'
-> DeleteLayer
deleteLayer p1 = DeleteLayer
{ _dlLayerId = p1
}
-- | The layer ID.
dlLayerId :: Lens' DeleteLayer Text
dlLayerId = lens _dlLayerId (\s a -> s { _dlLayerId = a })
data DeleteLayerResponse = DeleteLayerResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'DeleteLayerResponse' constructor.
deleteLayerResponse :: DeleteLayerResponse
deleteLayerResponse = DeleteLayerResponse
instance ToPath DeleteLayer where
toPath = const "/"
instance ToQuery DeleteLayer where
toQuery = const mempty
instance ToHeaders DeleteLayer
instance ToJSON DeleteLayer where
toJSON DeleteLayer{..} = object
[ "LayerId" .= _dlLayerId
]
instance AWSRequest DeleteLayer where
type Sv DeleteLayer = OpsWorks
type Rs DeleteLayer = DeleteLayerResponse
request = post "DeleteLayer"
response = nullResponse DeleteLayerResponse
| romanb/amazonka | amazonka-opsworks/gen/Network/AWS/OpsWorks/DeleteLayer.hs | mpl-2.0 | 3,323 | 0 | 9 | 693 | 361 | 222 | 139 | 48 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.DataPipeline.AddTags
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Adds or modifies tags for the specified pipeline.
--
-- <http://docs.aws.amazon.com/datapipeline/latest/APIReference/API_AddTags.html>
module Network.AWS.DataPipeline.AddTags
(
-- * Request
AddTags
-- ** Request constructor
, addTags
-- ** Request lenses
, atPipelineId
, atTags
-- * Response
, AddTagsResponse
-- ** Response constructor
, addTagsResponse
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.DataPipeline.Types
import qualified GHC.Exts
data AddTags = AddTags
{ _atPipelineId :: Text
, _atTags :: List "tags" Tag
} deriving (Eq, Read, Show)
-- | 'AddTags' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'atPipelineId' @::@ 'Text'
--
-- * 'atTags' @::@ ['Tag']
--
addTags :: Text -- ^ 'atPipelineId'
-> AddTags
addTags p1 = AddTags
{ _atPipelineId = p1
, _atTags = mempty
}
-- | The ID of the pipeline.
atPipelineId :: Lens' AddTags Text
atPipelineId = lens _atPipelineId (\s a -> s { _atPipelineId = a })
-- | The tags to add, as key/value pairs.
atTags :: Lens' AddTags [Tag]
atTags = lens _atTags (\s a -> s { _atTags = a }) . _List
data AddTagsResponse = AddTagsResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'AddTagsResponse' constructor.
addTagsResponse :: AddTagsResponse
addTagsResponse = AddTagsResponse
instance ToPath AddTags where
toPath = const "/"
instance ToQuery AddTags where
toQuery = const mempty
instance ToHeaders AddTags
instance ToJSON AddTags where
toJSON AddTags{..} = object
[ "pipelineId" .= _atPipelineId
, "tags" .= _atTags
]
instance AWSRequest AddTags where
type Sv AddTags = DataPipeline
type Rs AddTags = AddTagsResponse
request = post "AddTags"
response = nullResponse AddTagsResponse
| romanb/amazonka | amazonka-datapipeline/gen/Network/AWS/DataPipeline/AddTags.hs | mpl-2.0 | 2,957 | 0 | 10 | 718 | 421 | 255 | 166 | 54 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module HollaBack.Testing.Date.Parser (specs) where
import Data.Attoparsec.Text (parseOnly,
maybeResult,
eitherResult,
Parser)
import Data.Maybe (fromJust)
import Data.Text (Text)
import Test.Hspec (Spec,
describe,
pending,
it)
import Test.Hspec.HUnit
import Test.HUnit.Base ((~?=))
import HollaBack.Date.Types
import HollaBack.Date.Parser
specs :: Spec
specs = do
describe "tag" $ do
it "Parses a well-formed tag" $
parseOnly tag "+Work" ~?= Right work
it "Does not consume past a second +" $
parseOnly tag "+Work+School" ~?= Right work
it "Does not parse raw strings" $
fails $ parseOnly tag "Work"
-- describe_tags,
-- describe_timeUnit,
-- describe_timeKeyword,
-- describe_day,
-- describe_date,
-- describe_dateTimeSpec,
-- describe_time]
--
--describe_tag :: Specs
--describe_tag =
-- describe "tag" [
-- it "Parses a well-formed tag"
-- (parseOnly tag "+Work" ~?= Right work),
-- it "Does not consume past a second +"
-- (parseOnly tag "+Work+School" ~?= Right work),
-- it "Does not parse raw strings"
-- (fails $ parseOnly tag "Work")
-- ]
--
--describe_tags :: Specs
--describe_tags =
-- describe "tags" [
-- it "Parses an empty string as an empty list"
-- (parseOnly tags "" ~?= Right []),
-- it "Parses 1 tag"
-- (parseOnly tags "+Work" ~?= Right [work]),
-- it "Parses many tags"
-- (parseOnly tags "+Work+School" ~?= Right [work, school])
-- ]
--
--describe_timeKeyword :: Specs
--describe_timeKeyword =
-- describe "timeKeyword" [
-- it "parses minutes"
-- (parseOnly timeKeyword "mi" ~?= Right Minutes),
-- it "parses hours"
-- (parseOnly timeKeyword "h" ~?= Right Hours),
-- it "parses days"
-- (parseOnly timeKeyword "d" ~?= Right Days),
-- it "parses weeks"
-- (parseOnly timeKeyword "w" ~?= Right Weeks),
-- it "parses months"
-- (parseOnly timeKeyword "mo" ~?= Right Months),
-- it "parses years"
-- (parseOnly timeKeyword "y" ~?= Right Years),
-- it "fails to parse other tokens"
-- (fails $ parseOnly timeKeyword "z")
-- ]
--
--describe_timeUnit :: Specs
--describe_timeUnit =
-- describe "timeUnit" [
-- it "parses well formed TimeUnits"
-- (parseOnly timeUnit "10d" ~?= Right (TimeUnit 10 Days)),
-- it "fails decimal days"
-- (fails $ parseOnly timeKeyword "10.5d")
-- ]
--
--describe_day :: Specs
--describe_day =
-- describe "day" [
-- it "parses mon"
-- (parseOnly day "mon" ~?= Right Monday),
-- it "parses monday"
-- (parseOnly day "monday" ~?= Right Monday),
-- it "parses tue"
-- (parseOnly day "tue" ~?= Right Tuesday),
-- it "parses tues"
-- (parseOnly day "tues" ~?= Right Tuesday),
-- it "parses tuesday"
-- (parseOnly day "tuesday" ~?= Right Tuesday),
-- it "parses wed"
-- (parseOnly day "wed" ~?= Right Wednesday),
-- it "parses wednesday"
-- (parseOnly day "wednesday" ~?= Right Wednesday),
-- it "parses thu"
-- (parseOnly day "thu" ~?= Right Thursday),
-- it "parses thur"
-- (parseOnly day "thur" ~?= Right Thursday),
-- it "parses thurs"
-- (parseOnly day "thurs" ~?= Right Thursday),
-- it "parses thursday"
-- (parseOnly day "thursday" ~?= Right Thursday),
-- it "parses fri"
-- (parseOnly day "fri" ~?= Right Friday),
-- it "parses friday"
-- (parseOnly day "friday" ~?= Right Friday),
-- it "parses sat"
-- (parseOnly day "sat" ~?= Right Saturday),
-- it "parses saturday"
-- (parseOnly day "saturday" ~?= Right Saturday),
-- it "parses sun"
-- (parseOnly day "sun" ~?= Right Sunday),
-- it "parses sunday"
-- (parseOnly day "sunday" ~?= Right Sunday)
-- ]
--
--describe_date :: Specs
--describe_date =
-- describe "date" [
-- it "parses well-formed dates"
-- (parseOnly date "may25" ~?= Right (Date May 25)),
-- it "parses with leading 0s"
-- (parseOnly date "mar05" ~?= Right (Date March 5)),
-- it "parses full month names"
-- (parseOnly date "january3" ~?= Right (Date January 3)),
-- it "is case insensitive"
-- (parseOnly date "January3" ~?= Right (Date January 3))
-- ]
--
--describe_time :: Specs
--describe_time =
-- describe "time" [
-- it "parses AM times"
-- (parseOnly time "3am" ~?= Right threeAm),
-- it "parses PM times"
-- (parseOnly time "9pm" ~?= Right ninePm),
-- it "is case insensitive about the time half"
-- (parseOnly time "9PM" ~?= Right ninePm),
-- it "fails on out-of-bounds times"
-- (pending "implement bounds checks")
-- ]
-- where threeAm = TimeOfDay 3 0 0
-- ninePm = TimeOfDay 21 0 0
--
--describe_dateTimeSpec :: Specs
--describe_dateTimeSpec =
-- describe "dateTimeSpec" [
-- it "parses RelativeDateTime"
-- (parseOnly dateTimeSpec "13d" ~?= Right relativeDateTime),
-- it "parses SpecificDateTime"
-- (parseOnly dateTimeSpec "Mar13-6pm" ~?= Right specificDateTime),
-- it "parses SpecificWeekdayTime"
-- (parseOnly dateTimeSpec "thurs-1pm" ~?= Right specificWeekdayTime),
-- it "parses SpecificWeekday"
-- (parseOnly dateTimeSpec "fri" ~?= Right specificWeekday),
-- it "parses SpecificTime"
-- (parseOnly dateTimeSpec "5AM" ~?= Right specificTime)
-- ]
-- where relativeDateTime = RelativeDateTime $ TimeUnit 13 Days
-- specificDateTime = SpecificDateTime (Date March 13) $ TimeOfDay 18 0 0
-- specificWeekdayTime = SpecificWeekdayTime Thursday $ TimeOfDay 13 0 0
-- specificWeekday = SpecificWeekday Friday
-- specificTime = SpecificTime $ TimeOfDay 5 0 0
---- Fixtures
work :: Tag
work = Tag "Work"
school :: Tag
school = Tag "School"
isLeft :: Either a b -> Bool
isLeft (Left _) = True
isLeft _ = False
fails res = isLeft res ~?= True
| MichaelXavier/HollaBack | HollaBack/Testing/Date/Parser.hs | bsd-2-clause | 6,041 | 0 | 12 | 1,514 | 409 | 288 | 121 | 33 | 1 |
-- |
-- Module : Crypto.PubKey.ECIES
-- License : BSD-style
-- Maintainer : Vincent Hanquez <vincent@snarc.org>
-- Stability : experimental
-- Portability : unknown
--
-- IES with Elliptic curve <https://en.wikipedia.org/wiki/Integrated_Encryption_Scheme>
--
-- This is a simple cryptographic system between 2 parties using Elliptic Curve.
--
-- The sending party create a shared secret using the receiver public key, and use the shared secret
-- to generate cryptographic material for an symmetric encryption scheme (preferably authenticated encryption).
--
-- The receiving party receive the temporary ephemeral public key which is combined to its secret key
-- to create the shared secret which just like on the sending is used to generate cryptographic material.
--
-- This module doesn't provide any symmetric data encryption capability or any mean to derive
-- cryptographic key material for a symmetric key from the shared secret.
-- this is left to the user for now.
--
module Crypto.PubKey.ECIES
( deriveEncrypt
, deriveDecrypt
) where
import Crypto.ECC
import Crypto.Error
import Crypto.Random
import Crypto.Internal.Proxy
-- | Generate random a new Shared secret and the associated point
-- to do a ECIES style encryption
deriveEncrypt :: (MonadRandom randomly, EllipticCurveDH curve)
=> proxy curve -- ^ representation of the curve
-> Point curve -- ^ the public key of the receiver
-> randomly (CryptoFailable (Point curve, SharedSecret))
deriveEncrypt proxy pub = do
(KeyPair rPoint rScalar) <- curveGenerateKeyPair proxy
return $ (\s -> (rPoint, s)) `fmap` ecdh proxy rScalar pub
-- | Derive the shared secret with the receiver key
-- and the R point of the scheme.
deriveDecrypt :: EllipticCurveDH curve
=> proxy curve -- ^ representation of the curve
-> Point curve -- ^ The received R (supposedly, randomly generated on the encrypt side)
-> Scalar curve -- ^ The secret key of the receiver
-> CryptoFailable SharedSecret
deriveDecrypt proxy point secret = ecdh proxy secret point
| tekul/cryptonite | Crypto/PubKey/ECIES.hs | bsd-3-clause | 2,173 | 0 | 12 | 484 | 235 | 136 | 99 | 20 | 1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Typechecking class declarations
-}
{-# LANGUAGE CPP #-}
module TcClassDcl ( tcClassSigs, tcClassDecl2,
findMethodBind, instantiateMethod,
tcClassMinimalDef,
HsSigFun, mkHsSigFun, lookupHsSig, emptyHsSigs,
tcMkDeclCtxt, tcAddDeclCtxt, badMethodErr,
tcATDefault
) where
#include "HsVersions.h"
import HsSyn
import TcEnv
import TcPat( addInlinePrags, lookupPragEnv, emptyPragEnv )
import TcEvidence( idHsWrapper )
import TcBinds
import TcUnify
import TcHsType
import TcMType
import Type ( getClassPredTys_maybe, varSetElemsWellScoped, piResultTys )
import TcType
import TcRnMonad
import BuildTyCl( TcMethInfo )
import Class
import Coercion ( pprCoAxiom )
import DynFlags
import FamInst
import FamInstEnv
import Id
import Name
import NameEnv
import NameSet
import Var
import VarEnv
import VarSet
import Outputable
import SrcLoc
import TyCon
import Maybes
import BasicTypes
import Bag
import FastString
import BooleanFormula
import Util
import Control.Monad
import Data.List ( mapAccumL )
{-
Dictionary handling
~~~~~~~~~~~~~~~~~~~
Every class implicitly declares a new data type, corresponding to dictionaries
of that class. So, for example:
class (D a) => C a where
op1 :: a -> a
op2 :: forall b. Ord b => a -> b -> b
would implicitly declare
data CDict a = CDict (D a)
(a -> a)
(forall b. Ord b => a -> b -> b)
(We could use a record decl, but that means changing more of the existing apparatus.
One step at at time!)
For classes with just one superclass+method, we use a newtype decl instead:
class C a where
op :: forallb. a -> b -> b
generates
newtype CDict a = CDict (forall b. a -> b -> b)
Now DictTy in Type is just a form of type synomym:
DictTy c t = TyConTy CDict `AppTy` t
Death to "ExpandingDicts".
************************************************************************
* *
Type-checking the class op signatures
* *
************************************************************************
-}
tcClassSigs :: Name -- Name of the class
-> [LSig Name]
-> LHsBinds Name
-> TcM [TcMethInfo] -- Exactly one for each method
tcClassSigs clas sigs def_methods
= do { traceTc "tcClassSigs 1" (ppr clas)
; gen_dm_prs <- concat <$> mapM (addLocM tc_gen_sig) gen_sigs
; let gen_dm_env :: NameEnv Type
gen_dm_env = mkNameEnv gen_dm_prs
; op_info <- concat <$> mapM (addLocM (tc_sig gen_dm_env)) vanilla_sigs
; let op_names = mkNameSet [ n | (n,_,_) <- op_info ]
; sequence_ [ failWithTc (badMethodErr clas n)
| n <- dm_bind_names, not (n `elemNameSet` op_names) ]
-- Value binding for non class-method (ie no TypeSig)
; sequence_ [ failWithTc (badGenericMethod clas n)
| (n,_) <- gen_dm_prs, not (n `elem` dm_bind_names) ]
-- Generic signature without value binding
; traceTc "tcClassSigs 2" (ppr clas)
; return op_info }
where
vanilla_sigs = [L loc (nm,ty) | L loc (ClassOpSig False nm ty) <- sigs]
gen_sigs = [L loc (nm,ty) | L loc (ClassOpSig True nm ty) <- sigs]
dm_bind_names :: [Name] -- These ones have a value binding in the class decl
dm_bind_names = [op | L _ (FunBind {fun_id = L _ op}) <- bagToList def_methods]
tc_sig :: NameEnv Type -> ([Located Name], LHsSigType Name)
-> TcM [TcMethInfo]
tc_sig gen_dm_env (op_names, op_hs_ty)
= do { traceTc "ClsSig 1" (ppr op_names)
; op_ty <- tcClassSigType op_names op_hs_ty -- Class tyvars already in scope
; traceTc "ClsSig 2" (ppr op_names)
; return [ (op_name, op_ty, f op_name) | L _ op_name <- op_names ] }
where
f nm | Just ty <- lookupNameEnv gen_dm_env nm = Just (GenericDM ty)
| nm `elem` dm_bind_names = Just VanillaDM
| otherwise = Nothing
tc_gen_sig (op_names, gen_hs_ty)
= do { gen_op_ty <- tcClassSigType op_names gen_hs_ty
; return [ (op_name, gen_op_ty) | L _ op_name <- op_names ] }
{-
************************************************************************
* *
Class Declarations
* *
************************************************************************
-}
tcClassDecl2 :: LTyClDecl Name -- The class declaration
-> TcM (LHsBinds Id)
tcClassDecl2 (L loc (ClassDecl {tcdLName = class_name, tcdSigs = sigs,
tcdMeths = default_binds}))
= recoverM (return emptyLHsBinds) $
setSrcSpan loc $
do { clas <- tcLookupLocatedClass class_name
-- We make a separate binding for each default method.
-- At one time I used a single AbsBinds for all of them, thus
-- AbsBind [d] [dm1, dm2, dm3] { dm1 = ...; dm2 = ...; dm3 = ... }
-- But that desugars into
-- ds = \d -> (..., ..., ...)
-- dm1 = \d -> case ds d of (a,b,c) -> a
-- And since ds is big, it doesn't get inlined, so we don't get good
-- default methods. Better to make separate AbsBinds for each
; let (tyvars, _, _, op_items) = classBigSig clas
prag_fn = mkPragEnv sigs default_binds
sig_fn = mkHsSigFun sigs
clas_tyvars = snd (tcSuperSkolTyVars tyvars)
pred = mkClassPred clas (mkTyVarTys clas_tyvars)
; this_dict <- newEvVar pred
; let tc_item = tcDefMeth clas clas_tyvars this_dict
default_binds sig_fn prag_fn
; dm_binds <- tcExtendTyVarEnv clas_tyvars $
mapM tc_item op_items
; return (unionManyBags dm_binds) }
tcClassDecl2 d = pprPanic "tcClassDecl2" (ppr d)
tcDefMeth :: Class -> [TyVar] -> EvVar -> LHsBinds Name
-> HsSigFun -> TcPragEnv -> ClassOpItem
-> TcM (LHsBinds TcId)
-- Generate code for default methods
-- This is incompatible with Hugs, which expects a polymorphic
-- default method for every class op, regardless of whether or not
-- the programmer supplied an explicit default decl for the class.
-- (If necessary we can fix that, but we don't have a convenient Id to hand.)
tcDefMeth _ _ _ _ _ prag_fn (sel_id, Nothing)
= do { -- No default method
mapM_ (addLocM (badDmPrag sel_id))
(lookupPragEnv prag_fn (idName sel_id))
; return emptyBag }
tcDefMeth clas tyvars this_dict binds_in hs_sig_fn prag_fn
(sel_id, Just (dm_name, dm_spec))
| Just (L bind_loc dm_bind, bndr_loc) <- findMethodBind sel_name binds_in
= do { -- First look up the default method -- It should be there!
global_dm_id <- tcLookupId dm_name
; global_dm_id <- addInlinePrags global_dm_id prags
; local_dm_name <- setSrcSpan bndr_loc (newLocalName sel_name)
-- Base the local_dm_name on the selector name, because
-- type errors from tcInstanceMethodBody come from here
; spec_prags <- discardConstraints $
tcSpecPrags global_dm_id prags
; warnTc NoReason
(not (null spec_prags))
(text "Ignoring SPECIALISE pragmas on default method"
<+> quotes (ppr sel_name))
; let hs_ty = lookupHsSig hs_sig_fn sel_name
`orElse` pprPanic "tc_dm" (ppr sel_name)
-- We need the HsType so that we can bring the right
-- type variables into scope
--
-- Eg. class C a where
-- op :: forall b. Eq b => a -> [b] -> a
-- gen_op :: a -> a
-- generic gen_op :: D a => a -> a
-- The "local_dm_ty" is precisely the type in the above
-- type signatures, ie with no "forall a. C a =>" prefix
local_dm_ty = instantiateMethod clas global_dm_id (mkTyVarTys tyvars)
lm_bind = dm_bind { fun_id = L bind_loc local_dm_name }
-- Substitute the local_meth_name for the binder
-- NB: the binding is always a FunBind
warn_redundant = case dm_spec of
GenericDM {} -> True
VanillaDM -> False
-- For GenericDM, warn if the user specifies a signature
-- with redundant constraints; but not for VanillaDM, where
-- the default method may well be 'error' or something
ctxt = FunSigCtxt sel_name warn_redundant
; local_dm_sig <- instTcTySig ctxt hs_ty local_dm_ty local_dm_name
; (ev_binds, (tc_bind, _))
<- checkConstraints (ClsSkol clas) tyvars [this_dict] $
tcPolyCheck NonRecursive no_prag_fn local_dm_sig
(L bind_loc lm_bind)
; let export = ABE { abe_poly = global_dm_id
-- We have created a complete type signature in
-- instTcTySig, hence it is safe to call
-- completeSigPolyId
, abe_mono = completeIdSigPolyId local_dm_sig
, abe_wrap = idHsWrapper
, abe_prags = IsDefaultMethod }
full_bind = AbsBinds { abs_tvs = tyvars
, abs_ev_vars = [this_dict]
, abs_exports = [export]
, abs_ev_binds = [ev_binds]
, abs_binds = tc_bind }
; return (unitBag (L bind_loc full_bind)) }
| otherwise = pprPanic "tcDefMeth" (ppr sel_id)
where
sel_name = idName sel_id
prags = lookupPragEnv prag_fn sel_name
no_prag_fn = emptyPragEnv -- No pragmas for local_meth_id;
-- they are all for meth_id
---------------
tcClassMinimalDef :: Name -> [LSig Name] -> [TcMethInfo] -> TcM ClassMinimalDef
tcClassMinimalDef _clas sigs op_info
= case findMinimalDef sigs of
Nothing -> return defMindef
Just mindef -> do
-- Warn if the given mindef does not imply the default one
-- That is, the given mindef should at least ensure that the
-- class ops without default methods are required, since we
-- have no way to fill them in otherwise
whenIsJust (isUnsatisfied (mindef `impliesAtom`) defMindef) $
(\bf -> addWarnTc NoReason (warningMinimalDefIncomplete bf))
return mindef
where
-- By default require all methods without a default
-- implementation whose names don't start with '_'
defMindef :: ClassMinimalDef
defMindef = mkAnd [ noLoc (mkVar name)
| (name, _, Nothing) <- op_info
, not (startsWithUnderscore (getOccName name)) ]
instantiateMethod :: Class -> Id -> [TcType] -> TcType
-- Take a class operation, say
-- op :: forall ab. C a => forall c. Ix c => (b,c) -> a
-- Instantiate it at [ty1,ty2]
-- Return the "local method type":
-- forall c. Ix x => (ty2,c) -> ty1
instantiateMethod clas sel_id inst_tys
= ASSERT( ok_first_pred ) local_meth_ty
where
rho_ty = piResultTys (idType sel_id) inst_tys
(first_pred, local_meth_ty) = tcSplitPredFunTy_maybe rho_ty
`orElse` pprPanic "tcInstanceMethod" (ppr sel_id)
ok_first_pred = case getClassPredTys_maybe first_pred of
Just (clas1, _tys) -> clas == clas1
Nothing -> False
-- The first predicate should be of form (C a b)
-- where C is the class in question
---------------------------
type HsSigFun = NameEnv (LHsSigType Name)
emptyHsSigs :: HsSigFun
emptyHsSigs = emptyNameEnv
mkHsSigFun :: [LSig Name] -> HsSigFun
mkHsSigFun sigs = mkNameEnv [(n, hs_ty)
| L _ (ClassOpSig False ns hs_ty) <- sigs
, L _ n <- ns ]
lookupHsSig :: HsSigFun -> Name -> Maybe (LHsSigType Name)
lookupHsSig = lookupNameEnv
---------------------------
findMethodBind :: Name -- Selector name
-> LHsBinds Name -- A group of bindings
-> Maybe (LHsBind Name, SrcSpan)
-- Returns the binding, and the binding
-- site of the method binder
findMethodBind sel_name binds
= foldlBag mplus Nothing (mapBag f binds)
where
f bind@(L _ (FunBind { fun_id = L bndr_loc op_name }))
| op_name == sel_name
= Just (bind, bndr_loc)
f _other = Nothing
---------------------------
findMinimalDef :: [LSig Name] -> Maybe ClassMinimalDef
findMinimalDef = firstJusts . map toMinimalDef
where
toMinimalDef :: LSig Name -> Maybe ClassMinimalDef
toMinimalDef (L _ (MinimalSig _ (L _ bf))) = Just (fmap unLoc bf)
toMinimalDef _ = Nothing
{-
Note [Polymorphic methods]
~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
class Foo a where
op :: forall b. Ord b => a -> b -> b -> b
instance Foo c => Foo [c] where
op = e
When typechecking the binding 'op = e', we'll have a meth_id for op
whose type is
op :: forall c. Foo c => forall b. Ord b => [c] -> b -> b -> b
So tcPolyBinds must be capable of dealing with nested polytypes;
and so it is. See TcBinds.tcMonoBinds (with type-sig case).
Note [Silly default-method bind]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we pass the default method binding to the type checker, it must
look like op2 = e
not $dmop2 = e
otherwise the "$dm" stuff comes out error messages. But we want the
"$dm" to come out in the interface file. So we typecheck the former,
and wrap it in a let, thus
$dmop2 = let op2 = e in op2
This makes the error messages right.
************************************************************************
* *
Error messages
* *
************************************************************************
-}
tcMkDeclCtxt :: TyClDecl Name -> SDoc
tcMkDeclCtxt decl = hsep [text "In the", pprTyClDeclFlavour decl,
text "declaration for", quotes (ppr (tcdName decl))]
tcAddDeclCtxt :: TyClDecl Name -> TcM a -> TcM a
tcAddDeclCtxt decl thing_inside
= addErrCtxt (tcMkDeclCtxt decl) thing_inside
badMethodErr :: Outputable a => a -> Name -> SDoc
badMethodErr clas op
= hsep [text "Class", quotes (ppr clas),
text "does not have a method", quotes (ppr op)]
badGenericMethod :: Outputable a => a -> Name -> SDoc
badGenericMethod clas op
= hsep [text "Class", quotes (ppr clas),
text "has a generic-default signature without a binding", quotes (ppr op)]
{-
badGenericInstanceType :: LHsBinds Name -> SDoc
badGenericInstanceType binds
= vcat [text "Illegal type pattern in the generic bindings",
nest 2 (ppr binds)]
missingGenericInstances :: [Name] -> SDoc
missingGenericInstances missing
= text "Missing type patterns for" <+> pprQuotedList missing
dupGenericInsts :: [(TyCon, InstInfo a)] -> SDoc
dupGenericInsts tc_inst_infos
= vcat [text "More than one type pattern for a single generic type constructor:",
nest 2 (vcat (map ppr_inst_ty tc_inst_infos)),
text "All the type patterns for a generic type constructor must be identical"
]
where
ppr_inst_ty (_,inst) = ppr (simpleInstInfoTy inst)
-}
badDmPrag :: Id -> Sig Name -> TcM ()
badDmPrag sel_id prag
= addErrTc (text "The" <+> hsSigDoc prag <+> ptext (sLit "for default method")
<+> quotes (ppr sel_id)
<+> text "lacks an accompanying binding")
warningMinimalDefIncomplete :: ClassMinimalDef -> SDoc
warningMinimalDefIncomplete mindef
= vcat [ text "The MINIMAL pragma does not require:"
, nest 2 (pprBooleanFormulaNice mindef)
, text "but there is no default implementation." ]
tcATDefault :: Bool -- If a warning should be emitted when a default instance
-- definition is not provided by the user
-> SrcSpan
-> TCvSubst
-> NameSet
-> ClassATItem
-> TcM [FamInst]
-- ^ Construct default instances for any associated types that
-- aren't given a user definition
-- Returns [] or singleton
tcATDefault emit_warn loc inst_subst defined_ats (ATI fam_tc defs)
-- User supplied instances ==> everything is OK
| tyConName fam_tc `elemNameSet` defined_ats
= return []
-- No user instance, have defaults ==> instatiate them
-- Example: class C a where { type F a b :: *; type F a b = () }
-- instance C [x]
-- Then we want to generate the decl: type F [x] b = ()
| Just (rhs_ty, _loc) <- defs
= do { let (subst', pat_tys') = mapAccumL subst_tv inst_subst
(tyConTyVars fam_tc)
rhs' = substTyUnchecked subst' rhs_ty
tcv_set' = tyCoVarsOfTypes pat_tys'
(tv_set', cv_set') = partitionVarSet isTyVar tcv_set'
tvs' = varSetElemsWellScoped tv_set'
cvs' = varSetElemsWellScoped cv_set'
; rep_tc_name <- newFamInstTyConName (L loc (tyConName fam_tc)) pat_tys'
; let axiom = mkSingleCoAxiom Nominal rep_tc_name tvs' cvs'
fam_tc pat_tys' rhs'
-- NB: no validity check. We check validity of default instances
-- in the class definition. Because type instance arguments cannot
-- be type family applications and cannot be polytypes, the
-- validity check is redundant.
; traceTc "mk_deflt_at_instance" (vcat [ ppr fam_tc, ppr rhs_ty
, pprCoAxiom axiom ])
; fam_inst <- newFamInst SynFamilyInst axiom
; return [fam_inst] }
-- No defaults ==> generate a warning
| otherwise -- defs = Nothing
= do { when emit_warn $ warnMissingAT (tyConName fam_tc)
; return [] }
where
subst_tv subst tc_tv
| Just ty <- lookupVarEnv (getTvSubstEnv subst) tc_tv
= (subst, ty)
| otherwise
= (extendTvSubst subst tc_tv ty', ty')
where
ty' = mkTyVarTy (updateTyVarKind (substTyUnchecked subst) tc_tv)
warnMissingAT :: Name -> TcM ()
warnMissingAT name
= do { warn <- woptM Opt_WarnMissingMethods
; traceTc "warn" (ppr name <+> ppr warn)
; warnTc (Reason Opt_WarnMissingMethods) warn -- Warn only if -Wmissing-methods
(text "No explicit" <+> text "associated type"
<+> text "or default declaration for "
<+> quotes (ppr name)) }
| mcschroeder/ghc | compiler/typecheck/TcClassDcl.hs | bsd-3-clause | 19,449 | 0 | 19 | 6,303 | 3,354 | 1,755 | 1,599 | 252 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
-- | This module provides facilities for patching incoming 'Requests' to
-- correct the value of 'rqClientAddr' if the snap server is running behind a
-- proxy.
--
-- Example usage:
--
-- @
-- m :: Snap ()
-- m = undefined -- code goes here
--
-- applicationHandler :: Snap ()
-- applicationHandler = behindProxy X_Forwarded_For m
-- @
--
module Snap.Util.Proxy
( ProxyType(..)
, behindProxy
) where
------------------------------------------------------------------------------
import Control.Applicative (Alternative ((<|>)))
import Control.Monad (mfilter)
import qualified Data.ByteString.Char8 as S (breakEnd, dropWhile, null, readInt, spanEnd)
import Data.Char (isSpace)
import Data.Maybe (fromMaybe)
import Snap.Core (MonadSnap, Request (rqClientAddr, rqClientPort), getHeader, modifyRequest)
------------------------------------------------------------------------------
------------------------------------------------------------------------------
-- | What kind of proxy is this? Affects which headers 'behindProxy' pulls the
-- original remote address from.
--
-- Currently only proxy servers that send @X-Forwarded-For@ or @Forwarded-For@
-- are supported.
data ProxyType = NoProxy -- ^ no proxy, leave the request alone
| X_Forwarded_For -- ^ Use the @Forwarded-For@ or
-- @X-Forwarded-For@ header
deriving (Read, Show, Eq, Ord)
------------------------------------------------------------------------------
-- | Rewrite 'rqClientAddr' if we're behind a proxy.
--
-- Example:
--
-- @
-- ghci> :set -XOverloadedStrings
-- ghci> import qualified "Data.Map" as M
-- ghci> import qualified "Snap.Test" as T
-- ghci> let r = T.get \"\/foo\" M.empty >> T.addHeader \"X-Forwarded-For\" \"1.2.3.4\"
-- ghci> let h = 'Snap.Core.getsRequest' 'rqClientAddr' >>= 'Snap.Core.writeBS')
-- ghci> T.runHandler r h
-- HTTP\/1.1 200 OK
-- server: Snap\/test
-- date: Fri, 08 Aug 2014 14:32:29 GMT
--
-- 127.0.0.1
-- ghci> T.runHandler r ('behindProxy' 'X_Forwarded_For' h)
-- HTTP\/1.1 200 OK
-- server: Snap\/test
-- date: Fri, 08 Aug 2014 14:33:02 GMT
--
-- 1.2.3.4
-- @
behindProxy :: MonadSnap m => ProxyType -> m a -> m a
behindProxy NoProxy = id
behindProxy X_Forwarded_For = ((modifyRequest xForwardedFor) >>)
{-# INLINE behindProxy #-}
------------------------------------------------------------------------------
xForwardedFor :: Request -> Request
xForwardedFor req = req { rqClientAddr = ip
, rqClientPort = port
}
where
extract = fst . S.spanEnd isSpace . S.dropWhile isSpace . snd . S.breakEnd (== ',')
ip = fromMaybe (rqClientAddr req) $ mfilter (not . S.null) $ fmap extract $
getHeader "Forwarded-For" req <|>
getHeader "X-Forwarded-For" req
port = maybe (rqClientPort req) fst $ (S.readInt =<<) $ fmap extract $
getHeader "Forwarded-Port" req <|>
getHeader "X-Forwarded-Port" req
{-# INLINE xForwardedFor #-}
| snapframework/snap-core | src/Snap/Util/Proxy.hs | bsd-3-clause | 3,207 | 0 | 14 | 713 | 430 | 263 | 167 | 29 | 1 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Get
-- Copyright : (c) Andrea Vezzosi 2008
-- Duncan Coutts 2011
-- John Millikin 2012
-- License : BSD-like
--
-- Maintainer : cabal-devel@haskell.org
-- Stability : provisional
-- Portability : portable
--
-- The 'cabal get' command.
-----------------------------------------------------------------------------
module Distribution.Client.Get (
get
) where
import Distribution.Package
( PackageId, packageId, packageName )
import Distribution.Simple.Setup
( Flag(..), fromFlag, fromFlagOrDefault )
import Distribution.Simple.Utils
( notice, die, info, writeFileAtomic )
import Distribution.Verbosity
( Verbosity )
import Distribution.Text(display)
import qualified Distribution.PackageDescription as PD
import Distribution.Client.Setup
( GlobalFlags(..), GetFlags(..), RepoContext(..) )
import Distribution.Client.Types
import Distribution.Client.Targets
import Distribution.Client.Dependency
import Distribution.Client.FetchUtils
import qualified Distribution.Client.Tar as Tar (extractTarGzFile)
import Distribution.Client.IndexUtils as IndexUtils
( getSourcePackages )
import Distribution.Client.Compat.Process
( readProcessWithExitCode )
import Distribution.Compat.Exception
( catchIO )
import Control.Exception
( finally )
import Control.Monad
( filterM, forM_, unless, when )
import Data.List
( sortBy )
import qualified Data.Map
import Data.Maybe
( listToMaybe, mapMaybe )
#if !MIN_VERSION_base(4,8,0)
import Data.Monoid
( mempty )
#endif
import Data.Ord
( comparing )
import System.Directory
( createDirectoryIfMissing, doesDirectoryExist, doesFileExist
, getCurrentDirectory, setCurrentDirectory
)
import System.Exit
( ExitCode(..) )
import System.FilePath
( (</>), (<.>), addTrailingPathSeparator )
import System.Process
( rawSystem )
-- | Entry point for the 'cabal get' command.
get :: Verbosity
-> RepoContext
-> GlobalFlags
-> GetFlags
-> [UserTarget]
-> IO ()
get verbosity _ _ _ [] =
notice verbosity "No packages requested. Nothing to do."
get verbosity repoCtxt globalFlags getFlags userTargets = do
let useFork = case (getSourceRepository getFlags) of
NoFlag -> False
_ -> True
unless useFork $
mapM_ checkTarget userTargets
sourcePkgDb <- getSourcePackages verbosity repoCtxt
pkgSpecifiers <- resolveUserTargets verbosity repoCtxt
(fromFlag $ globalWorldFile globalFlags)
(packageIndex sourcePkgDb)
userTargets
pkgs <- either (die . unlines . map show) return $
resolveWithoutDependencies
(resolverParams sourcePkgDb pkgSpecifiers)
unless (null prefix) $
createDirectoryIfMissing True prefix
if useFork
then fork pkgs
else unpack pkgs
where
resolverParams sourcePkgDb pkgSpecifiers =
--TODO: add command-line constraint and preference args for unpack
standardInstallPolicy mempty sourcePkgDb pkgSpecifiers
prefix = fromFlagOrDefault "" (getDestDir getFlags)
fork :: [UnresolvedSourcePackage] -> IO ()
fork pkgs = do
let kind = fromFlag . getSourceRepository $ getFlags
branchers <- findUsableBranchers
mapM_ (forkPackage verbosity branchers prefix kind) pkgs
unpack :: [UnresolvedSourcePackage] -> IO ()
unpack pkgs = do
forM_ pkgs $ \pkg -> do
location <- fetchPackage verbosity repoCtxt (packageSource pkg)
let pkgid = packageId pkg
descOverride | usePristine = Nothing
| otherwise = packageDescrOverride pkg
case location of
LocalTarballPackage tarballPath ->
unpackPackage verbosity prefix pkgid descOverride tarballPath
RemoteTarballPackage _tarballURL tarballPath ->
unpackPackage verbosity prefix pkgid descOverride tarballPath
RepoTarballPackage _repo _pkgid tarballPath ->
unpackPackage verbosity prefix pkgid descOverride tarballPath
LocalUnpackedPackage _ ->
error "Distribution.Client.Get.unpack: the impossible happened."
where
usePristine = fromFlagOrDefault False (getPristine getFlags)
checkTarget :: UserTarget -> IO ()
checkTarget target = case target of
UserTargetLocalDir dir -> die (notTarball dir)
UserTargetLocalCabalFile file -> die (notTarball file)
_ -> return ()
where
notTarball t =
"The 'get' command is for tarball packages. "
++ "The target '" ++ t ++ "' is not a tarball."
-- ------------------------------------------------------------
-- * Unpacking the source tarball
-- ------------------------------------------------------------
unpackPackage :: Verbosity -> FilePath -> PackageId
-> PackageDescriptionOverride
-> FilePath -> IO ()
unpackPackage verbosity prefix pkgid descOverride pkgPath = do
let pkgdirname = display pkgid
pkgdir = prefix </> pkgdirname
pkgdir' = addTrailingPathSeparator pkgdir
existsDir <- doesDirectoryExist pkgdir
when existsDir $ die $
"The directory \"" ++ pkgdir' ++ "\" already exists, not unpacking."
existsFile <- doesFileExist pkgdir
when existsFile $ die $
"A file \"" ++ pkgdir ++ "\" is in the way, not unpacking."
notice verbosity $ "Unpacking to " ++ pkgdir'
Tar.extractTarGzFile prefix pkgdirname pkgPath
case descOverride of
Nothing -> return ()
Just pkgtxt -> do
let descFilePath = pkgdir </> display (packageName pkgid) <.> "cabal"
info verbosity $
"Updating " ++ descFilePath
++ " with the latest revision from the index."
writeFileAtomic descFilePath pkgtxt
-- ------------------------------------------------------------
-- * Forking the source repository
-- ------------------------------------------------------------
data BranchCmd = BranchCmd (Verbosity -> FilePath -> IO ExitCode)
data Brancher = Brancher
{ brancherBinary :: String
, brancherBuildCmd :: PD.SourceRepo -> Maybe BranchCmd
}
-- | The set of all supported branch drivers.
allBranchers :: [(PD.RepoType, Brancher)]
allBranchers =
[ (PD.Bazaar, branchBzr)
, (PD.Darcs, branchDarcs)
, (PD.Git, branchGit)
, (PD.Mercurial, branchHg)
, (PD.SVN, branchSvn)
]
-- | Find which usable branch drivers (selected from 'allBranchers') are
-- available and usable on the local machine.
--
-- Each driver's main command is run with @--help@, and if the child process
-- exits successfully, that brancher is considered usable.
findUsableBranchers :: IO (Data.Map.Map PD.RepoType Brancher)
findUsableBranchers = do
let usable (_, brancher) = flip catchIO (const (return False)) $ do
let cmd = brancherBinary brancher
(exitCode, _, _) <- readProcessWithExitCode cmd ["--help"] ""
return (exitCode == ExitSuccess)
pairs <- filterM usable allBranchers
return (Data.Map.fromList pairs)
-- | Fork a single package from a remote source repository to the local
-- file system.
forkPackage :: Verbosity
-> Data.Map.Map PD.RepoType Brancher
-- ^ Branchers supported by the local machine.
-> FilePath
-- ^ The directory in which new branches or repositories will
-- be created.
-> (Maybe PD.RepoKind)
-- ^ Which repo to choose.
-> SourcePackage loc
-- ^ The package to fork.
-> IO ()
forkPackage verbosity branchers prefix kind src = do
let desc = PD.packageDescription (packageDescription src)
pkgid = display (packageId src)
pkgname = display (packageName src)
destdir = prefix </> pkgname
destDirExists <- doesDirectoryExist destdir
when destDirExists $ do
die ("The directory " ++ show destdir ++ " already exists, not forking.")
destFileExists <- doesFileExist destdir
when destFileExists $ do
die ("A file " ++ show destdir ++ " is in the way, not forking.")
let repos = PD.sourceRepos desc
case findBranchCmd branchers repos kind of
Just (BranchCmd io) -> do
exitCode <- io verbosity destdir
case exitCode of
ExitSuccess -> return ()
ExitFailure _ -> die ("Couldn't fork package " ++ pkgid)
Nothing -> case repos of
[] -> die ("Package " ++ pkgid
++ " does not have any source repositories.")
_ -> die ("Package " ++ pkgid
++ " does not have any usable source repositories.")
-- | Given a set of possible branchers, and a set of possible source
-- repositories, find a repository that is both 1) likely to be specific to
-- this source version and 2) is supported by the local machine.
findBranchCmd :: Data.Map.Map PD.RepoType Brancher -> [PD.SourceRepo]
-> (Maybe PD.RepoKind) -> Maybe BranchCmd
findBranchCmd branchers allRepos maybeKind = cmd where
-- Sort repositories by kind, from This to Head to Unknown. Repositories
-- with equivalent kinds are selected based on the order they appear in
-- the Cabal description file.
repos' = sortBy (comparing thisFirst) allRepos
thisFirst r = case PD.repoKind r of
PD.RepoThis -> 0 :: Int
PD.RepoHead -> case PD.repoTag r of
-- If the type is 'head' but the author specified a tag, they
-- probably meant to create a 'this' repository but screwed up.
Just _ -> 0
Nothing -> 1
PD.RepoKindUnknown _ -> 2
-- If the user has specified the repo kind, filter out the repositories
-- she's not interested in.
repos = maybe repos' (\k -> filter ((==) k . PD.repoKind) repos') maybeKind
repoBranchCmd repo = do
t <- PD.repoType repo
brancher <- Data.Map.lookup t branchers
brancherBuildCmd brancher repo
cmd = listToMaybe (mapMaybe repoBranchCmd repos)
-- | Branch driver for Bazaar.
branchBzr :: Brancher
branchBzr = Brancher "bzr" $ \repo -> do
src <- PD.repoLocation repo
let args dst = case PD.repoTag repo of
Just tag -> ["branch", src, dst, "-r", "tag:" ++ tag]
Nothing -> ["branch", src, dst]
return $ BranchCmd $ \verbosity dst -> do
notice verbosity ("bzr: branch " ++ show src)
rawSystem "bzr" (args dst)
-- | Branch driver for Darcs.
branchDarcs :: Brancher
branchDarcs = Brancher "darcs" $ \repo -> do
src <- PD.repoLocation repo
let args dst = case PD.repoTag repo of
Just tag -> ["get", src, dst, "-t", tag]
Nothing -> ["get", src, dst]
return $ BranchCmd $ \verbosity dst -> do
notice verbosity ("darcs: get " ++ show src)
rawSystem "darcs" (args dst)
-- | Branch driver for Git.
branchGit :: Brancher
branchGit = Brancher "git" $ \repo -> do
src <- PD.repoLocation repo
let branchArgs = case PD.repoBranch repo of
Just b -> ["--branch", b]
Nothing -> []
let postClone dst = case PD.repoTag repo of
Just t -> do
cwd <- getCurrentDirectory
setCurrentDirectory dst
finally
(rawSystem "git" (["checkout", t] ++ branchArgs))
(setCurrentDirectory cwd)
Nothing -> return ExitSuccess
return $ BranchCmd $ \verbosity dst -> do
notice verbosity ("git: clone " ++ show src)
code <- rawSystem "git" (["clone", src, dst] ++ branchArgs)
case code of
ExitFailure _ -> return code
ExitSuccess -> postClone dst
-- | Branch driver for Mercurial.
branchHg :: Brancher
branchHg = Brancher "hg" $ \repo -> do
src <- PD.repoLocation repo
let branchArgs = case PD.repoBranch repo of
Just b -> ["--branch", b]
Nothing -> []
let tagArgs = case PD.repoTag repo of
Just t -> ["--rev", t]
Nothing -> []
let args dst = ["clone", src, dst] ++ branchArgs ++ tagArgs
return $ BranchCmd $ \verbosity dst -> do
notice verbosity ("hg: clone " ++ show src)
rawSystem "hg" (args dst)
-- | Branch driver for Subversion.
branchSvn :: Brancher
branchSvn = Brancher "svn" $ \repo -> do
src <- PD.repoLocation repo
let args dst = ["checkout", src, dst]
return $ BranchCmd $ \verbosity dst -> do
notice verbosity ("svn: checkout " ++ show src)
rawSystem "svn" (args dst)
| gbaz/cabal | cabal-install/Distribution/Client/Get.hs | bsd-3-clause | 12,820 | 0 | 22 | 3,419 | 2,989 | 1,529 | 1,460 | 253 | 6 |
{-# LANGUAGE ViewPatterns #-}
{- The spirit of guarded recursion is that we can only
re-enter the recursion with a smaller value.
Can we enforce this with view patterns and a type system that
tracks effective refinement types?
-}
import Control.Arrow
data Nat = Z | S Nat deriving Show
five = S (S (S (S (S Z))))
six = S five
eight = fib six
twentyOne = fib eight
-- Here comes the challenge: we cannot use the
-- induction hypothesis on the right hand sides
-- of our equations, only in the patterns!
-- (And not on top-level!)
plus m Z = m
plus m (S (plus m -> sum)) = S sum
--- |
--- +--> :: Nat -> (S effective ° Nat) -> Nat
fib :: Nat -> Nat
fib zero@Z = zero
fib one@(S Z) = one
fib (S (fib&&&id -> (f, S (fib -> g)))) = f `plus` g
--- | |
--- | +--> :: (S (S effective) ° Nat) -> Nat
--- |
--- +--> :: (S effective ° Nat) -> Nat
-- Okay, so how do we write an Ackermann function?
-- The (curried) original:
-- Ack Z = S
-- Ack (S m) = Iter (Ack m)
-- Iter f Z = f (S Z)
-- Iter f (S n) = f (Iter f n)
{-
ack Z = S
ack (S (iter . ack -> result)) = result
iter f Z = f (S Z)
iter f (f . iter f -> result) = result
-}
instance Num Nat where
fromInteger 0 = Z
fromInteger n = S $ fromInteger $ n - 1
-- From http://rosettacode.org/wiki/Ackermann_function#Haskell
{-
ack Z n = S n
ack (S m) Z = ack m (S Z)
ack m@(S m1) (S n) = ack m1 (ack m n)
-}
ack Z n = S n
ack (S (ack -> f)) Z = f (S Z)
ack m@(S (ack -> f)) (S (ack m -> second)) = f second
| minad/omega | mosaic/GuardedFib.hs | bsd-3-clause | 1,552 | 0 | 13 | 435 | 347 | 189 | 158 | 19 | 1 |
module Data.JSTarget.Op where
import Prelude hiding (GT, LT)
data BinOp
= Add
| Mul
| Sub
| Div
| Mod
| And
| Or
| Eq
| Neq
| LT
| GT
| LTE
| GTE
| Shl
| ShrL
| ShrA
| BitAnd
| BitOr
| BitXor
deriving (Eq)
instance Show BinOp where
show Add = "+"
show Mul = "*"
show Sub = "-"
show Div = "/"
show Mod = "%"
show And = "&&"
show Or = "||"
show Eq = "=="
show Neq = "!="
show LT = "<"
show GT = ">"
show LTE = "<="
show GTE = ">="
show Shl = "<<"
show ShrL = ">>>"
show ShrA = ">>"
show BitAnd = "&"
show BitOr = "|"
show BitXor = "^"
-- | Returns the precedence of the given operator as an int. Higher number
-- means higher priority.
opPrec :: BinOp -> Int
opPrec Mul = 100
opPrec Div = 100
opPrec Mod = 100
opPrec Add = 70
opPrec Sub = 70
opPrec Shl = 60
opPrec ShrA = 60
opPrec ShrL = 60
opPrec LT = 50
opPrec GT = 50
opPrec LTE = 50
opPrec GTE = 50
opPrec Eq = 30
opPrec Neq = 30
opPrec BitAnd = 25
opPrec BitXor = 24
opPrec BitOr = 23
opPrec And = 20
opPrec Or = 10
-- | Is the given operator associative?
opIsAssoc :: BinOp -> Bool
opIsAssoc Mul = True
opIsAssoc Add = True
opIsAssoc BitAnd = True
opIsAssoc BitOr = True
opIsAssoc BitXor = True
opIsAssoc And = True
opIsAssoc Or = True
opIsAssoc _ = False
| joelburget/haste-compiler | src/Data/JSTarget/Op.hs | bsd-3-clause | 1,418 | 0 | 6 | 480 | 484 | 258 | 226 | 72 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sl-SI">
<title>Regular Expression Tester</title>
<maps>
<homeID>regextester</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/regextester/src/main/javahelp/help_sl_SI/helpset_sl_SI.hs | apache-2.0 | 978 | 77 | 66 | 157 | 409 | 207 | 202 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="id-ID">
<title>Mulai Cepat | Ekstensi ZAP</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Konten</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Indeks</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Pencarian</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorit</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/diff/src/main/javahelp/org/zaproxy/zap/extension/diff/resources/help_id_ID/helpset_id_ID.hs | apache-2.0 | 971 | 78 | 66 | 159 | 413 | 209 | 204 | -1 | -1 |
module Expr1 where
import Control.Parallel.Strategies
fib n
| n <= 1 = 1
| otherwise = n1_2 + n2_2 + 1
where
n1 = fib (n-1)
n2 = fib (n-2)
(n1_2, n2_2)
=
runEval
(do n1_2 <- rpar n1
n2_2 <- rpar n2
return (n1_2, n2_2))
n1_2 = fib 42
| RefactoringTools/HaRe | old/testing/introThreshold/Expr1.hs | bsd-3-clause | 365 | 0 | 12 | 182 | 135 | 68 | 67 | 14 | 1 |
{-
- Instant Insanity using Closed Type Families, but no DataKinds
-
- See: http://stackoverflow.com/questions/26538595
-}
{-# OPTIONS_GHC -ftype-function-depth=400 #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
import Prelude hiding (all, flip, map, filter )
data Proxy a = Proxy
main = print (Proxy :: Proxy (Solutions Cubes))
data R -- Red
data G -- Green
data B -- Blue
data W -- White
data Cube u f r b l d
data True
data False
type family And b1 b2 :: * where
And True True = True
And b1 b2 = False
type family NE x y :: * where
NE x x = False
NE x y = True
type family EQ x y :: * where
EQ a a = True
EQ a b = False
data Nil = Nil
data Cons x xs = Cons x xs
type family All l :: * where
All Nil = True
All (Cons False xs) = False
All (Cons True xs) = All xs
type family ListConcat xs ys :: * where
ListConcat Nil ys = ys
ListConcat (Cons x xs) ys = Cons x (ListConcat xs ys)
type family AppendIf b a as :: * where
AppendIf False a as = as
AppendIf True a as = Cons a as
data Rotate
data Twist
data Flip
type family Apply f a :: * where
Apply Rotate (Cube u f r b l d) = (Cube u r b l f d)
Apply Twist (Cube u f r b l d) = (Cube f r u l d b)
Apply Flip (Cube u f r b l d) = (Cube d l b r f u)
type family Map f as :: * where
Map f Nil = Nil
Map f (Cons a as) = Cons (Apply f a) (Map f as)
type family MapAppend f as :: * where
MapAppend f xs = ListConcat xs (Map f xs)
type family MapAppend2 f as :: * where
MapAppend2 f xs = ListConcat xs (MapAppend f (Map f xs))
type family MapAppend3 f as :: * where
MapAppend3 f xs = ListConcat xs (MapAppend2 f (Map f xs))
type family Iterate2 f as :: * where
Iterate2 f Nil = Nil
Iterate2 f (Cons a as) = ListConcat (Cons (Apply f a) (Cons a Nil)) (Iterate2 f as)
type family Iterate3 f as :: * where
Iterate3 f (Cons a as) =
ListConcat (Cons a
(Cons (Apply f a)
(Cons (Apply f (Apply f a))
Nil)))
(Iterate3 f as)
type family Iterate4 f as :: * where
Iterate4 f Nil = Nil
Iterate4 f (Cons a as) =
ListConcat (Cons a
(Cons (Apply f a)
(Cons (Apply f (Apply f a))
(Cons (Apply f (Apply f (Apply f a)))
Nil))))
(Iterate4 f as)
type family Orientations c :: * where
Orientations c = MapAppend3 Rotate (MapAppend2 Twist (MapAppend Flip (Cons c Nil)))
type Cube1 = Cube B G W G B R
type Cube2 = Cube W G B W R R
type Cube3 = Cube G W R B R R
type Cube4 = Cube B R G G W W
type Cubes = Cons Cube1 (Cons Cube2 (Cons Cube3 (Cons Cube4 Nil)))
type family Compatible c d :: * where
Compatible (Cube u1 f1 r1 b1 l1 d1) (Cube u2 f2 r2 b2 l2 d2) =
All (Cons (NE f1 f2) (Cons (NE r1 r2) (Cons (NE b1 b2) (Cons (NE l1 l2) Nil))))
type family Allowed c cs :: * where
Allowed c Nil = True
Allowed c (Cons s ss) = And (Compatible c s) (Allowed c ss)
type family MatchingOrientations as sol :: * where
MatchingOrientations Nil sol = Nil
MatchingOrientations (Cons o os) sol =
AppendIf (Allowed o sol) (Cons o sol) (MatchingOrientations os sol)
type family AllowedCombinations os sols :: * where
AllowedCombinations os Nil = Nil
AllowedCombinations os (Cons sol sols) =
ListConcat (MatchingOrientations os sol) (AllowedCombinations os sols)
type family Solutions c :: * where
Solutions Nil = Cons Nil Nil
Solutions (Cons c cs) = AllowedCombinations (Orientations c) (Solutions cs)
| forked-upstream-packages-for-ghcjs/ghc | testsuite/tests/perf/compiler/T9872c.hs | bsd-3-clause | 3,681 | 0 | 20 | 1,097 | 1,491 | 807 | 684 | -1 | -1 |
{-# OPTIONS_GHC -fno-warn-redundant-constraints #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE TypeOperators #-}
module T8357 where
import Data.Kind (Type)
import GHC.TypeLits
data (:::) (sy :: Symbol) ty
data Key (sy :: Symbol)
data Rec (rs :: [Type])
(*=) :: Key sy -> ty -> Rec '[sy ::: ty]
(*=) = undefined
(.*.) :: (Union xs ys ~ rs) => Rec xs -> Rec ys -> Rec rs
(.*.) = undefined
type family Union (xs :: [Type]) (ys :: [Type]) :: [Type] where
Union ((sy ::: t) ': xs) ys = (sy ::: t) ': Union xs ys
Union '[] ys = ys
fFoo :: Key "foo"
fFoo = undefined
fBar :: Key "bar"
fBar = undefined
foo = fFoo *= "foo"
bar = fBar *= "bar"
both = foo .*. bar
| sdiehl/ghc | testsuite/tests/ghci/scripts/T8357.hs | bsd-3-clause | 734 | 0 | 10 | 159 | 287 | 170 | 117 | -1 | -1 |
{-# LANGUAGE CPP #-}
-- | Hard wired things related to registers.
-- This is module is preventing the native code generator being able to
-- emit code for non-host architectures.
--
-- TODO: Do a better job of the overloading, and eliminate this module.
-- We'd probably do better with a Register type class, and hook this to
-- Instruction somehow.
--
-- TODO: We should also make arch specific versions of RegAlloc.Graph.TrivColorable
module TargetReg (
targetVirtualRegSqueeze,
targetRealRegSqueeze,
targetClassOfRealReg,
targetMkVirtualReg,
targetRegDotColor,
targetClassOfReg
)
where
#include "HsVersions.h"
import Reg
import RegClass
import Format
import Outputable
import Unique
import Platform
import qualified X86.Regs as X86
import qualified X86.RegInfo as X86
import qualified PPC.Regs as PPC
import qualified SPARC.Regs as SPARC
targetVirtualRegSqueeze :: Platform -> RegClass -> VirtualReg -> Int
targetVirtualRegSqueeze platform
= case platformArch platform of
ArchX86 -> X86.virtualRegSqueeze
ArchX86_64 -> X86.virtualRegSqueeze
ArchPPC -> PPC.virtualRegSqueeze
ArchSPARC -> SPARC.virtualRegSqueeze
ArchPPC_64 _ -> PPC.virtualRegSqueeze
ArchARM _ _ _ -> panic "targetVirtualRegSqueeze ArchARM"
ArchARM64 -> panic "targetVirtualRegSqueeze ArchARM64"
ArchAlpha -> panic "targetVirtualRegSqueeze ArchAlpha"
ArchMipseb -> panic "targetVirtualRegSqueeze ArchMipseb"
ArchMipsel -> panic "targetVirtualRegSqueeze ArchMipsel"
ArchJavaScript-> panic "targetVirtualRegSqueeze ArchJavaScript"
ArchUnknown -> panic "targetVirtualRegSqueeze ArchUnknown"
targetRealRegSqueeze :: Platform -> RegClass -> RealReg -> Int
targetRealRegSqueeze platform
= case platformArch platform of
ArchX86 -> X86.realRegSqueeze
ArchX86_64 -> X86.realRegSqueeze
ArchPPC -> PPC.realRegSqueeze
ArchSPARC -> SPARC.realRegSqueeze
ArchPPC_64 _ -> PPC.realRegSqueeze
ArchARM _ _ _ -> panic "targetRealRegSqueeze ArchARM"
ArchARM64 -> panic "targetRealRegSqueeze ArchARM64"
ArchAlpha -> panic "targetRealRegSqueeze ArchAlpha"
ArchMipseb -> panic "targetRealRegSqueeze ArchMipseb"
ArchMipsel -> panic "targetRealRegSqueeze ArchMipsel"
ArchJavaScript-> panic "targetRealRegSqueeze ArchJavaScript"
ArchUnknown -> panic "targetRealRegSqueeze ArchUnknown"
targetClassOfRealReg :: Platform -> RealReg -> RegClass
targetClassOfRealReg platform
= case platformArch platform of
ArchX86 -> X86.classOfRealReg platform
ArchX86_64 -> X86.classOfRealReg platform
ArchPPC -> PPC.classOfRealReg
ArchSPARC -> SPARC.classOfRealReg
ArchPPC_64 _ -> PPC.classOfRealReg
ArchARM _ _ _ -> panic "targetClassOfRealReg ArchARM"
ArchARM64 -> panic "targetClassOfRealReg ArchARM64"
ArchAlpha -> panic "targetClassOfRealReg ArchAlpha"
ArchMipseb -> panic "targetClassOfRealReg ArchMipseb"
ArchMipsel -> panic "targetClassOfRealReg ArchMipsel"
ArchJavaScript-> panic "targetClassOfRealReg ArchJavaScript"
ArchUnknown -> panic "targetClassOfRealReg ArchUnknown"
targetMkVirtualReg :: Platform -> Unique -> Format -> VirtualReg
targetMkVirtualReg platform
= case platformArch platform of
ArchX86 -> X86.mkVirtualReg
ArchX86_64 -> X86.mkVirtualReg
ArchPPC -> PPC.mkVirtualReg
ArchSPARC -> SPARC.mkVirtualReg
ArchPPC_64 _ -> PPC.mkVirtualReg
ArchARM _ _ _ -> panic "targetMkVirtualReg ArchARM"
ArchARM64 -> panic "targetMkVirtualReg ArchARM64"
ArchAlpha -> panic "targetMkVirtualReg ArchAlpha"
ArchMipseb -> panic "targetMkVirtualReg ArchMipseb"
ArchMipsel -> panic "targetMkVirtualReg ArchMipsel"
ArchJavaScript-> panic "targetMkVirtualReg ArchJavaScript"
ArchUnknown -> panic "targetMkVirtualReg ArchUnknown"
targetRegDotColor :: Platform -> RealReg -> SDoc
targetRegDotColor platform
= case platformArch platform of
ArchX86 -> X86.regDotColor platform
ArchX86_64 -> X86.regDotColor platform
ArchPPC -> PPC.regDotColor
ArchSPARC -> SPARC.regDotColor
ArchPPC_64 _ -> PPC.regDotColor
ArchARM _ _ _ -> panic "targetRegDotColor ArchARM"
ArchARM64 -> panic "targetRegDotColor ArchARM64"
ArchAlpha -> panic "targetRegDotColor ArchAlpha"
ArchMipseb -> panic "targetRegDotColor ArchMipseb"
ArchMipsel -> panic "targetRegDotColor ArchMipsel"
ArchJavaScript-> panic "targetRegDotColor ArchJavaScript"
ArchUnknown -> panic "targetRegDotColor ArchUnknown"
targetClassOfReg :: Platform -> Reg -> RegClass
targetClassOfReg platform reg
= case reg of
RegVirtual vr -> classOfVirtualReg vr
RegReal rr -> targetClassOfRealReg platform rr
| acowley/ghc | compiler/nativeGen/TargetReg.hs | bsd-3-clause | 5,044 | 0 | 9 | 1,168 | 867 | 428 | 439 | 98 | 12 |
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-----------------------------------------------------------------------------
-- |
-- Module : Tests.Util
-- Copyright : (C) 2015 Richard Eisenberg
-- License : BSD-style (see LICENSE)
-- Maintainer : Richard Eisenberg (eir@cis.upenn.edu)
-- Stability : experimental
--
-- Utility definnitions for testing glambda
--
----------------------------------------------------------------------------
module Tests.Util (
module Test.Tasty,
testCase,
(@?=), (@=?), (@?) )
where
import Language.Glambda.Util
import Test.Tasty
import Test.Tasty.HUnit ( testCase, (@?), Assertion )
import Text.PrettyPrint.ANSI.Leijen
import Text.Parsec ( ParseError )
import Data.Function
import Language.Haskell.TH
import Control.Monad
prettyError :: Pretty a => a -> a -> String
prettyError exp act = (render $ text "Expected" <+> squotes (pretty exp) <> semi <+>
text "got" <+> squotes (pretty act))
(@?=) :: (Eq a, Pretty a) => a -> a -> Assertion
act @?= exp = (act == exp) @? prettyError exp act
(@=?) :: (Eq a, Pretty a) => a -> a -> Assertion
exp @=? act = (act == exp) @? prettyError exp act
$( do decs <- reifyInstances ''Eq [ConT ''ParseError]
case decs of -- GHC 7.6 eagerly typechecks the instance, sometimes
-- reporting a duplicate. Urgh. So we can't quote it.
[] -> liftM (:[]) $
instanceD (return []) (appT (conT ''Eq) (conT ''ParseError))
[ valD (varP '(==)) (normalB [| (==) `on` show |]) [] ]
_ -> return [] )
instance (Pretty a, Pretty b) => Pretty (Either a b) where
pretty (Left x) = text "Left" <+> pretty x
pretty (Right x) = text "Right" <+> pretty x
| ajnsit/glambda | tests/Tests/Util.hs | bsd-3-clause | 1,779 | 0 | 18 | 405 | 517 | 283 | 234 | 30 | 1 |
{-# LANGUAGE QuasiQuotes, TypeFamilies, TemplateHaskell, MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleInstances, ViewPatterns #-}
{-# LANGUAGE CPP #-}
module YesodCoreTest.CleanPath (cleanPathTest, Widget) where
import Test.Hspec
import Yesod.Core
import Network.Wai
import Network.Wai.Test
import Network.HTTP.Types (status200, decodePathSegments)
import qualified Data.ByteString.Lazy.Char8 as L8
import qualified Data.Text as TS
import qualified Data.Text.Encoding as TE
import Control.Arrow ((***))
import Network.HTTP.Types (encodePath)
import Data.Monoid (mappend)
import Blaze.ByteString.Builder.Char.Utf8 (fromText)
data Subsite = Subsite
getSubsite :: a -> Subsite
getSubsite = const Subsite
instance RenderRoute Subsite where
data Route Subsite = SubsiteRoute [TS.Text]
deriving (Eq, Show, Read)
renderRoute (SubsiteRoute x) = (x, [])
instance ParseRoute Subsite where
parseRoute (x, _) = Just $ SubsiteRoute x
instance YesodSubDispatch Subsite master where
yesodSubDispatch _ req f = f $ responseLBS
status200
[ ("Content-Type", "SUBSITE")
] $ L8.pack $ show (pathInfo req)
data Y = Y
mkYesod "Y" [parseRoutes|
/foo FooR GET
/foo/#String FooStringR GET
/bar BarR GET
/subsite SubsiteR Subsite getSubsite
/plain PlainR GET
|]
instance Yesod Y where
approot = ApprootStatic "http://test"
cleanPath _ s@("subsite":_) = Right s
cleanPath _ ["bar", ""] = Right ["bar"]
cleanPath _ ["bar"] = Left ["bar", ""]
cleanPath _ s =
if corrected == s
then Right s
else Left corrected
where
corrected = filter (not . TS.null) s
joinPath Y ar pieces' qs' =
fromText ar `mappend` encodePath pieces qs
where
pieces = if null pieces' then [""] else pieces'
qs = map (TE.encodeUtf8 *** go) qs'
go "" = Nothing
go x = Just $ TE.encodeUtf8 x
getFooR :: Handler RepPlain
getFooR = return $ RepPlain "foo"
getFooStringR :: String -> Handler RepPlain
getFooStringR = return . RepPlain . toContent
getBarR, getPlainR :: Handler RepPlain
getBarR = return $ RepPlain "bar"
getPlainR = return $ RepPlain "plain"
cleanPathTest :: Spec
cleanPathTest =
describe "Test.CleanPath" $ do
it "remove trailing slash" removeTrailingSlash
it "noTrailingSlash" noTrailingSlash
it "add trailing slash" addTrailingSlash
it "has trailing slash" hasTrailingSlash
it "/foo/something" fooSomething
it "subsite dispatch" subsiteDispatch
it "redirect with query string" redQueryString
it "parsing" $ do
parseRoute (["foo"], []) `shouldBe` Just FooR
parseRoute (["foo", "bar"], []) `shouldBe` Just (FooStringR "bar")
parseRoute (["subsite", "some", "path"], []) `shouldBe` Just (SubsiteR $ SubsiteRoute ["some", "path"])
parseRoute (["ignore", "me"], []) `shouldBe` (Nothing :: Maybe (Route Y))
runner :: Session () -> IO ()
runner f = toWaiApp Y >>= runSession f
removeTrailingSlash :: IO ()
removeTrailingSlash = runner $ do
res <- request defaultRequest
{ pathInfo = decodePathSegments "/foo/"
}
assertStatus 301 res
assertHeader "Location" "http://test/foo" res
noTrailingSlash :: IO ()
noTrailingSlash = runner $ do
res <- request defaultRequest
{ pathInfo = decodePathSegments "/foo"
}
assertStatus 200 res
assertContentType "text/plain; charset=utf-8" res
assertBody "foo" res
addTrailingSlash :: IO ()
addTrailingSlash = runner $ do
res <- request defaultRequest
{ pathInfo = decodePathSegments "/bar"
}
assertStatus 301 res
assertHeader "Location" "http://test/bar/" res
hasTrailingSlash :: IO ()
hasTrailingSlash = runner $ do
res <- request defaultRequest
{ pathInfo = decodePathSegments "/bar/"
}
assertStatus 200 res
assertContentType "text/plain; charset=utf-8" res
assertBody "bar" res
fooSomething :: IO ()
fooSomething = runner $ do
res <- request defaultRequest
{ pathInfo = decodePathSegments "/foo/something"
}
assertStatus 200 res
assertContentType "text/plain; charset=utf-8" res
assertBody "something" res
subsiteDispatch :: IO ()
subsiteDispatch = runner $ do
res <- request defaultRequest
{ pathInfo = decodePathSegments "/subsite/1/2/3/"
}
assertStatus 200 res
assertContentType "SUBSITE" res
assertBody "[\"1\",\"2\",\"3\",\"\"]" res
redQueryString :: IO ()
redQueryString = runner $ do
res <- request defaultRequest
{ pathInfo = decodePathSegments "/plain/"
, rawQueryString = "?foo=bar"
}
assertStatus 301 res
assertHeader "Location" "http://test/plain?foo=bar" res
| ygale/yesod | yesod-core/test/YesodCoreTest/CleanPath.hs | mit | 4,902 | 0 | 16 | 1,188 | 1,330 | 681 | 649 | 120 | 1 |
module UntypedLambda.Compiler
( toIota )
where
import Prelude hiding (id)
import UntypedLambda.Syntax
import qualified Iota.Syntax as I
toIota :: Term -> I.Module
toIota = I.Defs . topLevel
topLevel :: Term -> [I.FunDef]
topLevel (Const n) = [I.FunDef "main" [] (I.Const n)]
topLevel t@(App _ _) = defs ("main":names) t
topLevel t = error $ "no valid translation of " ++ (show t) ++ "into a list of function definitions"
defs :: [I.Name] -> Term -> [I.FunDef]
defs (id:ids) (App t1 t2) = defs ids t2 ++ liftedLambdas ++ [I.FunDef id (params t1') (body t1')]
where (t1', liftedLambdas) = lambdaLift t1
defs _ _ = error "TODO"
lambdaLift :: Term -> (Term, [I.FunDef])
lambdaLift _ = (Var "x", [])
params :: Term -> [I.Name]
params _ = []
body :: Term -> I.Expr
body _ = I.Const 42
names :: [I.Name]
names = "a" : (map (++"'") names)
| mmakowski/ambc | src/UntypedLambda/Compiler.hs | mit | 857 | 0 | 9 | 173 | 395 | 213 | 182 | 23 | 1 |
{-# LANGUAGE FlexibleContexts, DeriveDataTypeable, DeriveFunctor, TypeFamilies #-}
module Wf.Control.Eff.Authenticate
( Authenticate(..)
, AuthenticationType(..)
, authenticate
, authenticationTransfer
) where
import Control.Eff (Eff, Member, send, inj)
import Data.Typeable (Typeable)
import Wf.Network.Http.Response (Response)
class AuthenticationType auth where
type AuthenticationUserType auth :: *
type AuthenticationKeyType auth :: *
data Authenticate auth a =
Authenticate auth (AuthenticationKeyType auth) (AuthenticationUserType auth -> a) |
AuthenticationTransfer auth (Response ()) (Response () -> a)
deriving (Typeable, Functor)
authenticate :: (Typeable auth, Member (Authenticate auth) r)
=> auth -> AuthenticationKeyType auth -> Eff r (AuthenticationUserType auth)
authenticate auth key = send . inj . Authenticate auth key $ id
authenticationTransfer :: (Typeable auth, Member (Authenticate auth) r)
=> auth -> Response () -> Eff r (Response ())
authenticationTransfer auth response = send . inj . AuthenticationTransfer auth response $ id
| bigsleep/Wf | wf-authenticate/src/Wf/Control/Eff/Authenticate.hs | mit | 1,107 | 0 | 11 | 183 | 324 | 178 | 146 | 22 | 1 |
--
-- HUnit tests for the parser
--
-- Create an executable
-- > ghc -o runparsertests testaspparse.hs aspparse.hs
-- > ./runparsertests
--
-- Or in the interpreter:
-- ghci> :l testaspparse.hs
-- ghci> runTestTT tests
--
--
-- Copyright 2012,2013 Vesa Luukkala
--
-- Permission is hereby granted, free of charge, to any person obtaining
-- a copy of this software and associated documentation files (the
-- "Software"), to deal in the Software without restriction, including
-- without limitation the rights to use, copy, modify, merge, publish,
-- distribute, sublicense, and/or sell copies of the Software, and to
-- permit persons to whom the Software is furnished to do so, subject to
-- the following conditions:
--
-- The above copyright notice and this permission notice shall be
-- included in all copies or substantial portions of the Software.
--
-- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-- EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-- MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-- NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
-- LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-- OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
-- WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
--
import qualified Data.List as List
import qualified Data.Map as Map
import System.IO.Unsafe
import Data.IORef
import Text.ParserCombinators.Parsec
import AspParse
import Test.HUnit
{--
parse ruleorfact "" "k { in(X) : vtx(X) }.\n"
Right (Fact [Card (Sym (Const "k")) (Sym (Const "any")) [Typed [Plain (Const "in") [Sym (Var "X")] True,Plain (Const "vtx") [Sym (Var "X")] True]] True])
--}
-- We'd like to do this, but we get
-- No instance for (Eq ParseError)
-- See http://www.haskell.org/pipermail/libraries/2008-September/010655.html
-- for one solution to add the Eqs to parsec types
--truleorfact1 = TestCase $ assertEqual "ruleorfact 1" (Right (Fact [Card (Sym (Const "k")) (Sym (Const "any")) [Typed [Plain (Const "in") [Sym (Var "X")] True,Plain (Const "vtx") [Sym (Var "X")] True]] True]))
-- (parse ruleorfact "" "k { in(X) : vtx(X) }.\n")
-- We cheat a bit and unwrap the error as a valid, but in practice neverseen value
-- of right type.
wrapparser p a1 a2 =
case (parse p a1 a2) of
Right r -> r
Left l -> (Fact [])
-- The return type should be [Rules], rather than above.
-- Is this a problem in the types?
wrapparser' p a1 a2 =
case (parse p a1 a2) of
Right r -> r
Left l -> ([Fact []])
-- The return type should be [Body], rather than above.
wrapparser_bl p a1 a2 =
case (parse p a1 a2) of
Right r -> r
Left l -> ([Empty])
-- The return type should be [Body], rather than above.
wrapparser_bl' p a1 a2 =
case (parse p a1 a2) of
Right r -> r
Left l -> Empty
wrapparser_exp p a1 a2 =
case (parse p a1 a2) of
Right r -> r
Left l -> Sym (Const "Erroneous MyExpr")
wrapparser_exp' p a1 a2 =
case (parse p a1 a2) of
Right r -> r
Left l -> [Sym (Const "Erroneous MyExpr")]
wrapparser_atom p a1 a2 =
case (parse p a1 a2) of
Right r -> r
Left l -> (Const "Erroneous Atom")
truleorfact1 = TestCase $ assertEqual "ruleorfact 1" (Fact [Card (Sym (Const "k")) (Sym (Const "any")) [Typed [Plain (Const "in") [Sym (Var "X")] True,Plain (Const "vtx") [Sym (Var "X")] True]] True]) (wrapparser ruleorfact "" "k { in(X) : vtx(X) }.\n")
truleorfact2 = TestCase $ assertEqual "ruleorfact 2" (Rule (Plain (Const "a") [Sym (Var "X"),Sym (Var "Y")] True) [Plain (Const "in") [Sym (Var "X")] True,Plain (Const "in") [Sym (Var "Y")] True,Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y")] False,Plain (Const "vtx") [Sym (Var "X")] True,Plain (Const "vtx") [Sym (Var "Y")] True]) (wrapparser ruleorfact "" "a(X,Y) :- in(X), in(Y), not arc(X, Y), vtx(X), vtx(Y).\n")
trulebase1 = TestCase $ assertEqual "rulebase 1" [Deny [Plain (Const "blab") [Sym (Var "Baa")] True,Plain (Const "bii") [] True]] (wrapparser' rulebase "" ":- blab(Baa),\n bii.")
trulebase2 = TestCase $ assertEqual "rulebase 2" [Fact [Card (Sym (Const "any")) (Sym (Const "any")) [Typed [Plain (Const "true") [Sym (Var "A")] True,Plain (Const "atom") [Sym (Var "A")] True]] True]] (wrapparser' rulebase "" "{ true(A) : atom(A) }.")
trulebase3 = TestCase $ assertEqual "rulebase 3" [Rule (Plain (Const "blub") [Sym (Var "Foo"),Sym (Var "Bar"),Sym (Var "Goo")] True) [Plain (Const "blab") [Sym (Var "Baa")] True,Plain (Const "bii") [] True]] (wrapparser' rulebase "" "blub(Foo,Bar,Goo) :- blab(Baa),\n bii.")
trulebase4 = TestCase $ assertEqual "rulebase 4" [Rule (Plain (Const "blub") [Sym (Var "Foo"),Sym (Var "Bar"),Sym (Var "Goo")] True) [Plain (Const "blab") [Sym (Var "Baa")] True,Plain (Const "bii") [] True],Deny [Plain (Const "zuu") [Sym (Var "Zaa")] True,Plain (Const "zii") [] True]] (wrapparser' rulebase "" "blub(Foo,Bar,Goo) :- blab(Baa),\n bii.\n\n:- zuu(Zaa),\n zii.")
trulebase5 = TestCase $ assertEqual "rulebase 5" [Rule (Plain (Const "blub") [Sym (Var "Foo"),Sym (Var "Bar"),Sym (Var "Goo")] True) [Plain (Const "blab") [Sym (Var "Baa")] True,Plain (Const "bii") [] True],Deny [Plain (Const "zuu") [Sym (Var "Zaa")] True,Plain (Const "zii") [] True]] (wrapparser' rulebase "" "blub(Foo,Bar,Goo) :- blab(Baa),\n bii.:- zuu(Zaa),\n zii.")
trulebase6 = TestCase $ assertEqual "rulebase 6" [Deny [Plain (Const "zuu") [Sym (Var "Zaa")] True,Plain (Const "zii") [] True],Rule (Plain (Const "blub") [Sym (Var "Foo"),Sym (Var "Bar"),Sym (Var "Goo")] True) [Plain (Const "blab") [Sym (Var "Baa")] True,Plain (Const "bii") [] True]] (wrapparser' rulebase "" "\n:- zuu(Zaa),\n zii.blub(Foo,Bar,Goo) :- blab(Baa),\n bii.\n")
trulebase7 = TestCase $ assertEqual "rulebase 7" [Rule (Plain (Const "p") [Sym (Var "X")] True) [Plain (Const "q") [Sym (Var "X")] True,Plain (Const "d") [Sym (Var "X")] True]] (wrapparser' rulebase "" "p(X) :- q(X), d(X).\n")
trulebase8 = TestCase $ assertEqual "rulebase 8" [Rule (Plain (Const "ready") [Sym (Var "A")] True) [Plain (Const "rdftype") [Sym (Var "A"),Sym (Const "\"wp1:Activity\"")] True,Plain (Const "missing_commit") [Sym (Var "A")] False]] (wrapparser' rulebase "" "ready(A) :- rdftype(A,\"wp1:Activity\"), not missing_commit(A).")
trulebase9 = TestCase $ assertEqual "rulebase 9" [Rule (Plain (Const "ready") [Sym (Var "A")] True) [Plain (Const "rdftype") [Sym (Var "A"),Sym (Const "\"wp1:Activity\"")] True,Plain (Const "missing_commit") [Sym (Var "A")] False],Rule (Plain (Const "missing_commit") [Sym (Var "A")] True) [Plain (Const "\"rdf:type\"") [Sym (Var "A"),Sym (Const "\"wp1:Activity\"")] True,Plain (Const "\"wp1:uses\"") [Sym (Var "A"),Sym (Var "Cap")] True,Plain (Const "\"wp1:commits\"") [Sym (Var "Cap"),Sym (Var "A")] False]] (wrapparser' rulebase "" "ready(A) :- rdftype(A,\"wp1:Activity\"), not missing_commit(A).\n\nmissing_commit(A) :- \n \"rdf:type\"(A,\"wp1:Activity\"),\n \"wp1:uses\"(A,Cap),\n not \"wp1:commits\"(Cap,A).\n ")
trulebase10 = TestCase $ assertEqual "rulebase 10" [Show [Plain (Const "deadlock") [Sym (Var "_")] True],Show [Plain (Const "conflict") [Sym (Var "_"),Sym (Var "_"),Sym (Var "_")] True],Show [Plain (Const "banish") [Sym (Var "_")] True]] (wrapparser' rulebase "" "show deadlock(_).\nshow conflict(_,_,_).\nshow banish(_).")
trulebase11 = TestCase $ assertEqual "rulebase 11" [Rule (Plain (Const "ready") [Sym (Var "A")] True) [Plain (Const "rdftype") [Sym (Var "A"),Sym (Const "\"wp1:Activity\"")] True,Plain (Const "missing_commit") [Sym (Var "A")] False],Rule (Plain (Const "missing_commit") [Sym (Var "A")] True) [Plain (Const "\"rdf:type\"") [Sym (Var "A"),Sym (Const "\"wp1:Activity\"")] True,Plain (Const "\"wp1:uses\"") [Sym (Var "A"),Sym (Var "Cap")] True,Plain (Const "\"wp1:commits\"") [Sym (Var "Cap"),Sym (Var "A")] False],Show [Plain (Const "deadlock") [Sym (Var "_")] True],Show [Plain (Const "conflict") [Sym (Var "_"),Sym (Var "_"),Sym (Var "_")] True],Show [Plain (Const "banish") [Sym (Var "_")] True]] (wrapparser' rulebase "" "ready(A) :- rdftype(A,\"wp1:Activity\"), not missing_commit(A).\n\nmissing_commit(A) :- \n \"rdf:type\"(A,\"wp1:Activity\"),\n \"wp1:uses\"(A,Cap),\n not \"wp1:commits\"(Cap,A).\nshow deadlock(_).\nshow conflict(_,_,_).\nshow banish(_).")
-- Here we lose information, the actual error message is not passed.
-- Could wrap it as a string inside a fact, but yuk ...
trulebase12 = TestCase $ assertEqual "rulebase 12" ([Fact []]) (wrapparser' rulebase "" "ready(A :- \"rdf:type\"(A,\"wp1:Activity\"), not missing_commit(A).")
trulebase13 = TestCase $ assertEqual "rulebase 13" ([Fact []]) (wrapparser' rulebase "" "ready(A) :- \"rdf:type\" A,\"wp1:Activity\"), not missing_commit(A).")
trulebase14 = TestCase $ assertEqual "rulebase 14" [Fact [Card (Sym (Const "k")) (Sym (Const "any")) [Typed [Plain (Const "in") [Sym (Var "X")] True,Plain (Const "vtx") [Sym (Var "X")] True]] True],Deny [Plain (Const "in") [Sym (Var "X")] True,Plain (Const "in") [Sym (Var "Y")] True,Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y")] False,Plain (Const "vtx") [Sym (Var "X")] True,Plain (Const "vtx") [Sym (Var "Y")] True]] (wrapparser' rulebase "" "k { in(X) : vtx(X) }.\n:- in(X), in(Y), not arc(X, Y), vtx(X), vtx(Y).\n")
trulebase15 = TestCase $ assertEqual "rulebase 15" [Consts [Assign (Const "k") (Number (Const "10"))]]
(wrapparser' rulebase "" "const k = 10.")
tconstdef1 = TestCase $ assertEqual "constdef 1" (Consts [Assign (Const "k") (Number (Const "10"))]) (wrapparser constdef "" "const k = 10.")
tfact1 = TestCase $ assertEqual "fact 1" (Fact [Plain (Const "waitingfor") [Sym (Var "_"),Sym (Var "_")] True]) (wrapparser fact "" "waitingfor(_,_).")
tfact2 = TestCase $ assertEqual "fact 2" (Fact [Plain (Const "waitingfor") [Sym (Var "X"),Sym (Var "Y")] True]) (wrapparser fact "" "waitingfor(X,Y).")
tdeny1 = TestCase $ assertEqual "deny 1" (Deny [Plain (Const "blab") [Sym (Var "Baa")] True,Plain (Const "bii") [] True]) (wrapparser deny "" ":- blab(Baa), bii.")
tdeny2 = TestCase $ assertEqual "deny 2" (Deny [Plain (Const "blab") [Sym (Var "Baa")] True,Plain (Const "bii") [] True]) (wrapparser deny "" ":- blab(Baa),\n bii.")
tdeny3 = TestCase $ assertEqual "deny 3" (Deny [Card (Sym (Const "any")) (Arith Minus (Sym (Const "k")) (Number (Const "1"))) [Typed [Plain (Const "sat") [Sym (Var "C")] True,Plain (Const "clause") [Sym (Var "C")] True]] True]) (wrapparser deny "" ":- { sat(C) : clause(C) } k-1.")
tdeny4 = TestCase $ assertEqual "deny 4" (Deny [Plain (Const "vtx") [Sym (Var "X")] True,Plain (Const "vtx") [Sym (Var "Y")] True,BExpr Lt (Sym (Var "X")) (Sym (Var "Y")),Plain (Const "r") [Sym (Var "X"),Sym (Var "Y")] False]) (wrapparser deny "" ":- vtx(X), vtx(Y), X < Y, not r(X, Y).")
tdeny5 = TestCase $ assertEqual "deny 5" (Deny [Plain (Const "ok") [] False]) (wrapparser deny "" ":- not ok.")
tdeny6 = TestCase $ assertEqual "deny 6" (Deny [Plain (Const "vtx") [Sym (Var "X")] True,Plain (Const "occurs") [Sym (Var "X")] True,Plain (Const "r") [Sym (Var "X")] False]) (wrapparser deny "" ":- vtx(X), occurs(X), not r(X).")
trule1 = TestCase $ assertEqual "rule 1" (Rule (Plain (Const "blub") [Sym (Var "Foo"),Sym (Var "Bar"),Sym (Var "Goo")] True) [Plain (Const "blab") [Sym (Var "Baa")] True,Plain (Const "bii") [] True]) (wrapparser rule "" "blub(Foo,Bar,Goo) :- blab(Baa), bii.")
trule2 = TestCase $ assertEqual "rule 2" (Rule (Plain (Const "blub") [Sym (Var "Foo"),Sym (Var "Bar"),Sym (Var "Goo")] True) [Plain (Const "blab") [Sym (Var "Baa")] True,Plain (Const "bii") [] True]) (wrapparser rule "" "blub(Foo,Bar,Goo) :- blab(Baa),\n bii.")
-- trule3 = TestCase $ assertEqual "rule 3" (Rule (Plain (Const "ok") [] True) [Count (Sym (Const "k")) (Sym (Const "any")) [Typed [Plain (Const "lc") [Sym (Var "X"),Sym (Var "Y")] True,Weighed (Sym (Var "L")) (Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y"),Sym (Var "L")] True)]] True]) (wrapparser rule "" "ok :- k [ lc(X, Y) : arc(X, Y, L) = L ] .")
trule3 = TestCase $ assertEqual "rule 3" (Rule (Plain (Const "ok") [] True) [Count (Sym (Const "k")) (Sym (Const "any")) [Typed [Plain (Const "lc") [Sym (Var "X"),Sym (Var "Y")] True,Weighed (Sym (Var "L")) (Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y"),Sym (Var "L")] True) True]] True]) (wrapparser rule "" "ok :- k [ lc(X, Y) : arc(X, Y, L) = L ] .")
trule4 = TestCase $ assertEqual "rule 4" (Rule (Plain (Const "initial") [Sym (Var "X")] True) [Plain (Const "occurs") [Sym (Var "X")] True,Typed [Plain (Const "occurs") [Sym (Var "Y")] False,BExpr Lt (Sym (Var "Y")) (Sym (Var "X"))],Plain (Const "vtx") [Sym (Var "X")] True]) (wrapparser rule "" "initial(X) :- occurs(X), not occurs(Y) : Y < X, vtx(X).")
trule5 = TestCase $ assertEqual "rule 5" (Rule (Card (Sym (Const "any")) (Number (Const "1")) [Typed [Plain (Const "lc") [Sym (Var "X"),Sym (Var "Y")] True,Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y"),Sym (Var "L")] True]] True) [Plain (Const "vtx") [Sym (Var "X")] True]) (wrapparser rule "" "{ lc(X, Y) : arc(X, Y, L) } 1 :- vtx(X)." )
trule6 = TestCase $ assertEqual "rule 6" (Rule (Plain (Const "occurs") [Sym (Var "X")] True) [Plain (Const "lc") [Sym (Var "X"),Sym (Var "Y")] True,Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y"),Sym (Var "L")] True]) (wrapparser rule "" "occurs(X) :- lc(X, Y), arc(X, Y, L).")
trule7 = TestCase $ assertEqual "rule 7" (Rule (Plain (Const "initial") [Sym (Var "X")] True) [Plain (Const "occurs") [Sym (Var "X")] True,Typed [Plain (Const "occurs") [Sym (Var "Y")] False,Plain (Const "vtx") [Sym (Var "X")] True,BExpr Lt (Sym (Var "Y")) (Sym (Var "X"))],Plain (Const "vtx") [Sym (Var "X")] True]) (wrapparser rule "" "initial(X) :- occurs(X), not occurs(Y) : vtx(X) : Y < X, vtx(X).")
trule8 = TestCase $ assertEqual "rule 8" (Rule (Plain (Const "r") [Sym (Var "Y")] True) [Plain (Const "lc") [Sym (Var "X"),Sym (Var "Y")] True,Plain (Const "initial") [Sym (Var "X")] True,Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y"),Sym (Var "L")] True]) (wrapparser rule "" "r(Y) :- lc(X, Y), initial(X), arc(X, Y, L).")
trule9 = TestCase $ assertEqual "rule 9" (Rule (Plain (Const "r") [Sym (Var "Y")] True) [Plain (Const "lc") [Sym (Var "X"),Sym (Var "Y")] True,Plain (Const "r") [Sym (Var "X")] True,Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y"),Sym (Var "L")] True]) (wrapparser rule "" "r(Y) :- lc(X, Y), r(X), arc(X, Y, L).")
trule10 = TestCase $ assertEqual "rule 10" (Fact []) (wrapparser rule "" "denied(X,Y).")
trule11 = TestCase $ assertEqual "rule 11" (Rule (Card (Number (Const "1")) (Sym (Const "any")) [Plain (Const "p") [Sym (Var "X")] True,Plain (Const "t") [Sym (Var "X")] True] True) [Card (Number (Const "1")) (Number (Const "2")) [Plain (Const "r") [Sym (Var "X")] True,Plain (Const "s") [Sym (Var "X")] True,Plain (Const "t") [Sym (Var "X")] False] True]) (wrapparser rule "" "1 { p(X), t(X) } :- 1 {r(X), s(X), not t(X)} 2.")
trule12 = TestCase $ assertEqual "rule 12" (Rule (Card (Number (Const "1")) (Sym (Const "any")) [Plain (Const "p") [] True,Plain (Const "t") [] True] True) [Card (Number (Const "1")) (Number (Const "2")) [Plain (Const "r") [] True,Plain (Const "s") [] True,Plain (Const "t") [] False] True]) (wrapparser rule "" "1 { p, t } :- 1 {r, s, not t} 2.")
trule13 = TestCase $ assertEqual "rule 13" (Rule (Plain (Const "ready") [Sym (Var "A")] True) [Plain (Const "\"rdf:type\"") [Sym (Var "A"),Sym (Const "\"wp1:Activity\"")] True,Plain (Const "missing_commit") [Sym (Var "A")] False]) (wrapparser rule "" "ready(A) :- \"rdf:type\"(A,\"wp1:Activity\"), not missing_commit(A).")
trule14 = TestCase $ assertEqual "rule 14" (Fact []) (wrapparser rule "" "ready(A :- \"rdf:type\"(A,\"wp1:Activity\"), not missing_commit(A).")
trule15 = TestCase $ assertEqual "rule 15" (Fact []) (wrapparser rule "" "ready(A) :- \"rdf:type\" A,\"wp1:Activity\"), not missing_commit(A).")
trule16 = TestCase $ assertEqual "rule 16" (Rule (Plain (Const "border") [Arith Plus (Arith Mult (Arith Minus (Sym (Var "N")) (Number (Const "1"))) (Sym (Var "R"))) (Number (Const "1"))] True) [Plain (Const "number") [Sym (Var "N")] True,Plain (Const "sqrt") [Sym (Var "R")] True,BExpr LtEq (Sym (Var "N")) (Sym (Var "R"))]) (wrapparser rule "" "border((N-1)*R+1) :- number(N), sqrt(R), N<=R.")
trule17 = TestCase $ assertEqual "rule 17" (Rule (Plain (Const "dist") [Func (Const "#abs") [Arith Minus (Sym (Var "RK1")) (Sym (Var "RK2"))] True] True) [Plain (Const "restaurant") [Sym (Var "RN1"),Sym (Var "RK1")] True,Plain (Const "restaurant") [Sym (Var "RN2"),Sym (Var "RK2")] True]) (wrapparser rule "" "dist(#abs(RK1-RK2)) :- restaurant(RN1,RK1), restaurant(RN2,RK2).")
-- trule = TestCase $ assertEqual "rule X" (wrapparser rule "" "")
-- tdeny = TestCase $ assertEqual "deny X" (wrapparser deny "" "")
-- trulebase = TestCase $ assertEqual "rulebase X" (wrapparser' rulebase "" "")
tbody1 = TestCase $ assertEqual "body 1" [Plain (Const "blub") [Sym (Var "Foo"),Sym (Var "Bar"),Sym (Var "Goo")] True,Plain (Const "blab") [Sym (Var "Baa")] True,Plain (Const "bii") [] True] (wrapparser_bl body "" "blub(Foo,Bar,Goo), blab(Baa), bii")
tbody2 = TestCase $ assertEqual "body 2" [Plain (Const "vtx") [Sym (Var "X")] True,Plain (Const "vtx") [Sym (Var "Y")] True,BExpr Lt (Sym (Var "X")) (Sym (Var "Y")),Plain (Const "r") [Sym (Var "X"),Sym (Var "Y")] False] (wrapparser_bl body "" "vtx(X), vtx(Y), X < Y, not r(X, Y)")
tbody3 = TestCase $ assertEqual "body 3" [Typed [Plain (Const "arc_S") [Sym (Var "X"),Sym (Var "Y")] False,Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y")] True],Plain (Const "vtx") [Sym (Var "Y")] True] (wrapparser_bl body "" "not arc_S(X, Y) : arc(X, Y), vtx(Y)")
tbody4 = TestCase $ assertEqual "body 4" [Typed [Plain (Const "arc_S") [Sym (Var "X"),Sym (Var "Y")] False,Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y")] True]] (wrapparser_bl body "" "not arc_S(X, Y) : arc(X, Y)")
tbody5 = TestCase $ assertEqual "body 5" [Plain (Const "occurs") [Sym (Var "X")] True,Typed [Plain (Const "occurs") [Sym (Var "Y")] False,BExpr Lt (Sym (Var "Y")) (Sym (Var "X"))],Plain (Const "vtx") [Sym (Var "X")] True] (wrapparser_bl body "" "occurs(X), not occurs(Y) : Y < X, vtx(X)")
tbody6 = TestCase $ assertEqual "body 6" [Typed [Plain (Const "occurs") [Sym (Var "Y")] False,BExpr Lt (Sym (Var "Y")) (Sym (Var "X"))],Plain (Const "vtx") [Sym (Var "X")] True] (wrapparser_bl body "" "not occurs(Y) : Y < X, vtx(X)")
tbody7 = TestCase $ assertEqual "body 7" [Plain (Const "occurs") [Sym (Var "X")] True,Typed [Plain (Const "occurs") [Sym (Var "Y")] False,Plain (Const "vtx") [Sym (Var "X")] True,BExpr Lt (Sym (Var "Y")) (Sym (Var "X"))],Plain (Const "vtx") [Sym (Var "X")] True] (wrapparser_bl body "" "occurs(X), not occurs(Y) : vtx(X) : Y < X, vtx(X)")
tbody8 = TestCase $ assertEqual "body 8" [Plain (Const "waitingfor") [Sym (Var "_"),Sym (Var "_")] True] (wrapparser_bl body "" "waitingfor(_,_).")
tbody9 = TestCase $ assertEqual "body 9" [Assignment (Var "M") (Optimize False [Typed [Plain (Const "est") [Sym (Var "I"),Sym (Var "S")] True,Plain (Const "est") [Sym (Var "I"),Sym (Var "S")] True,Weighed (Sym (Var "S")) (Plain (Const "hasest") [Sym (Var "I")] True) True]] True) True,Assignment (Var "N") (Optimize True [Typed [Plain (Const "est") [Sym (Var "J"),Sym (Var "T")] True,Plain (Const "est") [Sym (Var "J"),Sym (Var "T")] True,Weighed (Sym (Var "T")) (Plain (Const "hasest") [Sym (Var "J")] True) True]] True) True,Plain (Const "sest") [Sym (Var "P")] True,Plain (Const "est") [] True] (wrapparser_bl body "" "M = #min [ est(I,S) : est(I,S) : hasest(I) = S ],\nN = #max [ est(J,T) : est(J,T) : hasest(J) = T ], sest(P), est.")
tbody10 = TestCase $ assertEqual "body 10" [Card (Sym (Const "any")) (Arith Mult (Sym (Const "k")) (Number (Const "1"))) [Typed [Plain (Const "sat") [Sym (Var "C")] True,Plain (Const "clause") [Sym (Var "C")] True]] True] (wrapparser_bl body "" "{ sat(C) : clause(C) } k*1.")
-- tbody = TestCase $ assertEqual "body X" (wrapparser_bl body "" "")
tgenrel1 = TestCase $ assertEqual "genrel 1" (Plain (Const "blub") [] True) (wrapparser_bl' genrel "" "blub")
tgenrel2 = TestCase $ assertEqual "genrel 2" (Plain (Const "blub") [Sym (Var "Foo"),Sym (Var "Bar"),Sym (Var "Goo")] True) (wrapparser_bl' genrel "" "blub(Foo,Bar,Goo)")
tgenrel3 = TestCase $ assertEqual "genrel 3" (Plain (Const "waitingfor") [Sym (Var "_"),Sym (Var "_")] True) (wrapparser_bl' genrel "" "waitingfor(_,_)")
tgenrel4 = TestCase $ assertEqual "genrel 4" (BExpr Gt (Sym (Var "X")) (Sym (Var "Y"))) (wrapparser_bl' genrel "" "X > Y")
tgenrel5 = TestCase $ assertEqual "genrel 5" (Card (Sym (Const "any")) (Sym (Const "any")) [Plain (Const "blub") [Sym (Var "Foo"),Sym (Var "Bar"),Sym (Var "Goo")] True] True)
(wrapparser_bl' genrel "" "{blub(Foo,Bar,Goo)}")
tgenrel6 = TestCase $ assertEqual "genrel 6" (Card (Sym (Const "any")) (Sym (Const "any")) [Plain (Const "blub") [Sym (Var "Foo"),Sym (Var "Bar"),Sym (Var "Goo")] True,Plain (Const "blab") [Sym (Var "Zub"),Sym (Var "Zap"),Sym (Var "Zoo")] True] True) (wrapparser_bl' genrel "" "{blub(Foo,Bar,Goo),blab(Zub,Zap,Zoo)}")
tgenrel7 = TestCase $ assertEqual "genrel 7" (Card (Number (Const "1")) (Number (Const "2")) [Plain (Const "blub") [Sym (Var "Foo"),Sym (Var "Bar"),Sym (Var "Goo")] True] True) (wrapparser_bl' genrel "" "1{blub(Foo,Bar,Goo)}2")
tgenrel8 = TestCase $ assertEqual "genrel 8" (Card (Number (Const "1")) (Number (Const "1")) [Plain (Const "maps_to") [Sym (Var "X"),Sym (Var "U")] True] True) (wrapparser_bl' genrel "" "1 { maps_to(X, U) } 1")
tgenrel9 = TestCase $ assertEqual "genrel 9" (Card (Sym (Const "any")) (Number (Const "1")) [Typed [Plain (Const "sat") [Sym (Var "C")] True,Plain (Const "clause") [Sym (Var "C")] True]] True) (wrapparser_bl' genrel "" "{ sat(C) : clause(C) } 1")
tgenrel10 = TestCase $ assertEqual "genrel 10" (Card (Sym (Const "any")) (Arith Minus (Sym (Const "k")) (Number (Const "1"))) [Typed [Plain (Const "sat") [Sym (Var "C")] True,Plain (Const "clause") [Sym (Var "C")] True]] True) (wrapparser_bl' genrel "" "{ sat(C) : clause(C) } k-1")
tgenrel11 = TestCase $ assertEqual "genrel 11" (Card (Sym (Const "any")) (Sym (Const "any")) [Typed [Plain (Const "true") [Sym (Var "A")] True,Plain (Const "atom") [Sym (Var "A")] True]] True)
(wrapparser_bl' genrel "" "{ true(A) : atom(A) }")
tgenrel12 = TestCase $ assertEqual "genrel 12" (Card (Number (Const "2")) (Number (Const "2")) [Plain (Const "i") [Sym (Var "A"),Sym (Const "\"wp1:active\""),Sym (Const "\"yes\"")] True,Plain (Const "d") [Sym (Var "A"),Sym (Const "\"wp1:active\""),Sym (Const "\"no\"")] True] True) (wrapparser_bl' genrel "" "2{i(A,\"wp1:active\",\"yes\"),d(A,\"wp1:active\",\"no\")}2")
tgenrel13 = TestCase $ assertEqual "genrel 13" (Card (Sym (Var "M")) (Sym (Const "any")) [Typed [Plain (Const "\"wp1:commits\"") [Sym (Var "Cap"),Sym (Var "A3")] True,Plain (Const "\"rdf:type\"") [Sym (Var "A3"),Sym (Const "\"wp1:Activity\"")] True]] True) (wrapparser_bl' genrel "" "M{\"wp1:commits\"(Cap,A3):\"rdf:type\"(A3,\"wp1:Activity\")}")
tgenrel14 = TestCase $ assertEqual "genrel 14" (Card (Sym (Const "any")) (Number (Const "2")) [Plain (Const "i") [Sym (Var "A"),Sym (Const "\"wp1:active\""),Sym (Const "\"yes\"")] True,Plain (Const "d") [Sym (Var "A"),Sym (Const "\"wp1:active\""),Sym (Const "\"no\"")] True] True) (wrapparser_bl' genrel "" "{i(A,\"wp1:active\",\"yes\"),d(A,\"wp1:active\",\"no\")}2")
tgenrel15 = TestCase $ assertEqual "genrel 15" (Card (Sym (Const "any")) (Sym (Const "any")) [Plain (Const "blub") [Sym (Var "Foo"),Sym (Const "\"Bar\""),Sym (Var "Goo")] True] True) (wrapparser_bl' genrel "" "{blub(Foo,\"Bar\",Goo)}")
-- tgenrel16 = TestCase $ assertEqual "genrel 16" (Count (Sym (Const "any")) (Sym (Const "any")) [Typed [Plain (Const "lc") [Sym (Var "X"),Sym (Var "Y")] True,Weighed (Sym (Var "L")) (Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y"),Sym (Var "L")] True)]] True) (wrapparser_bl' genrel "" "[ lc(X, Y) : arc(X, Y, L) = L ]")
tgenrel16 = TestCase $ assertEqual "genrel 16" (Count (Sym (Const "any")) (Sym (Const "any")) [Typed [Plain (Const "lc") [Sym (Var "X"),Sym (Var "Y")] True,Weighed (Sym (Var "L")) (Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y"),Sym (Var "L")] True) True]] True) (wrapparser_bl' genrel "" "[ lc(X, Y) : arc(X, Y, L) = L ]")
-- Note that the vtx should not be parsed
tgenrel17 = TestCase $ assertEqual "genrel 17" (Typed [Plain (Const "occurs") [Sym (Var "Y")] False,BExpr Lt (Sym (Var "Y")) (Sym (Var "X"))]) (wrapparser_bl' genrel "" "not occurs(Y) : Y < X, vtx(X)")
tgenrel18 = TestCase $ assertEqual "genrel 18" (Typed [Plain (Const "f") [] True,Plain (Const "vtx") [Sym (Var "Y")] True,BExpr Lt (Sym (Var "Y")) (Sym (Var "X"))]) (wrapparser_bl' genrel "" "f : vtx(Y) : Y < X")
tgenrel19 = TestCase $ assertEqual "genrel 19" (Plain (Const "person") [Alternative [Sym (Const "a"),Sym (Const "b"),Sym (Const "c")]] True) (wrapparser_bl' genrel "" "person(a; b; c)")
tgenrel20 = TestCase $ assertEqual "genrel 20" (Optimize True [Typed [Plain (Const "lc") [Sym (Var "X"),Sym (Var "Y")] True,Weighed (Sym (Var "L")) (Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y"),Sym (Var "L")] True) True]] True) (wrapparser_bl' genrel "" "#max [ lc(X, Y) : arc(X, Y, L) = L ]")
tgenrel21 = TestCase $ assertEqual "genrel 21" (Assignment (Var "M") (Optimize False [Typed [Plain (Const "est") [Sym (Var "I"),Sym (Var "S")] True,Plain (Const "est") [Sym (Var "I"),Sym (Var "S")] True,Weighed (Sym (Var "S")) (Plain (Const "hasest") [Sym (Var "I")] True) True]] True) True) (wrapparser_bl' genrel "" "M = #min [ est(I,S) : est(I,S) : hasest(I) = S ]")
tgenrel22 = TestCase $ assertEqual "genrel 22" (Plain (Const "dist") [Func (Const "#abs") [Arith Minus (Sym (Var "RK1")) (Sym (Var "RK2"))] True] True) (wrapparser_bl' genrel "" "dist(#abs(RK1-RK2))")
-- tgenrel = TestCase $ assertEqual "genrel X" (wrapparser_bl' genrel "" "")
tnumericexpr1 = TestCase $ assertEqual "numericexpr 1" (Arith Plus (Sym (Const "k")) (Number (Const "2"))) (wrapparser_exp numericexpr "" "k + 2")
tnumericexpr2 = TestCase $ assertEqual "numericexpr 2" (Sym (Const "k")) (wrapparser_exp numericexpr "" "k")
tnumericexpr3 = TestCase $ assertEqual "numericexpr 3" (Number (Const "1")) (wrapparser_exp numericexpr "" "1")
tnumericexpr4 = TestCase $ assertEqual "numericexpr 4" (Arith Mult (Arith Plus (Sym (Const "k")) (Number (Const "2"))) (Sym (Var "Z"))) (wrapparser_exp numericexpr "" "(k + 2) * Z")
tnumericexpr5 = TestCase $ assertEqual "numericexpr 5" (Arith Plus (Sym (Const "k")) (Arith Mult (Number (Const "2")) (Sym (Var "Z")))) (wrapparser_exp numericexpr "" "k + 2 * Z")
tnumericexpr6 = TestCase $ assertEqual "numericexpr 6" (Arith Div (Arith Mod (Sym (Const "k")) (Sym (Const "\"jopi\""))) (Sym (Var "Z"))) (wrapparser_exp numericexpr "" "(k mod \"jopi\") / (Z)")
tnumericexpr7 = TestCase $ assertEqual "numericexpr 7" (Alternative [Sym (Var "X"),Sym (Var "Y"),Sym (Const "k"),Number (Const "1")]) (wrapparser_exp numericexpr "" "X;Y;k;1")
tnumericexpr8 = TestCase $ assertEqual "numericexpr 8" (Arith Range (Number (Const "1")) (Sym (Var "X"))) (wrapparser_exp numericexpr "" "1..X")
tnumericexpr9 = TestCase $ assertEqual "numericexpr 9" (Arith Div (Arith Range (Number (Const "1")) (Sym (Var "X"))) (Number (Const "2"))) (wrapparser_exp numericexpr "" "1..X/2")
tnumericexpr10 = TestCase $ assertEqual "numericexpr 10" (Arith Plus (Arith Mult (Arith Minus (Sym (Var "N")) (Number (Const "1"))) (Sym (Var "R"))) (Number (Const "1"))) (wrapparser_exp numericexpr "" "(N-1)*R+1")
tnumericexpr11 = TestCase $ assertEqual "numericexpr 11" (Func (Const "#abs") [Arith Minus (Sym (Var "N")) (Number (Const "1")),Sym (Var "X"),Number (Const "1")] True) (wrapparser_exp numericexpr "" "#abs(N-1, X, 1)")
-- tnumericexpr = TestCase $ assertEqual "numericexpr X" (wrapparser_exp numericexpr "" "")
-- Note that "+ 2" are not parsed
tnumeric1 = TestCase $ assertEqual "numeric 1" (Number (Const "1")) (wrapparser_exp numeric "" "1 + 2")
tnumeric2 = TestCase $ assertEqual "numeric 2" (Sym (Const "k")) (wrapparser_exp numeric "" "k + 2")
tnumeric3 = TestCase $ assertEqual "numeric 3" (Sym (Const "k")) (wrapparser_exp numeric "" "k")
tnumeric4 = TestCase $ assertEqual "numeric 4" (Number (Const "1")) (wrapparser_exp numeric "" "1")
tnumeric5 = TestCase $ assertEqual "numeric 5" (Sym (Var "X")) (wrapparser_exp numeric "" "X")
-- tnumeric = TestCase $ assertEqual "numeric X" (wrapparser_exp numeric "" "")
tnexpr1 = TestCase $ assertEqual "nexpr 1" (Arith Plus (Sym (Const "k")) (Number (Const "2"))) (wrapparser_exp nexpr "" "k + 2")
tnexpr2 = TestCase $ assertEqual "nexpr 2" (Arith Plus (Sym (Const "k")) (Number (Const "2"))) (wrapparser_exp nexpr "" "k + 2 + Z")
tnexpr3 = TestCase $ assertEqual "nexpr 3" (Arith Plus (Sym (Const "k")) (Sym (Var "Z"))) (wrapparser_exp nexpr "" "k + Z")
tnexpr4 = TestCase $ assertEqual "nexpr 4" (Sym (Const "Erroneous MyExpr")) (wrapparser_exp nexpr "" "k + + Z")
-- tnexpr = TestCase $ assertEqual "nexpr X" (wrapparser_exp nexpr "" "")
tbexpr1 = TestCase $ assertEqual "bexpr 1" (BExpr Gt (Sym (Const "k")) (Number (Const "2"))) (wrapparser_bl' bexpr "" "k > 2")
tbexpr2 = TestCase $ assertEqual "bexpr 2" (BExpr Lt (Sym (Var "X")) (Sym (Var "Y"))) (wrapparser_bl' bexpr "" "X < Y")
-- tbexpr = TestCase $ assertEqual "bexpr X" (wrapparser_bl' bexpr "" "")
tmycount1 = TestCase $ assertEqual "mycount 1" (Count (Number (Const "1")) (Number (Const "2")) [Plain (Const "blub") [Sym (Var "Foo"),Sym (Var "Bar"),Sym (Var "Goo")] True] True) (wrapparser_bl' mycount "" "1 [blub(Foo,Bar,Goo)] 2")
tmycount2 = TestCase $ assertEqual "mycount 2" (Count (Sym (Var "M")) (Sym (Const "any")) [Plain (Const "blub") [Sym (Var "Foo"),Sym (Var "Bar"),Sym (Var "Goo")] True] True) (wrapparser_bl' mycount "" "M[blub(Foo,Bar,Goo)]")
tmycount3 = TestCase $ assertEqual "mycount 3" (Count (Sym (Const "any")) (Sym (Const "any")) [Plain (Const "blub") [Sym (Var "Foo"),Sym (Var "Bar"),Sym (Var "Goo")] True] True) (wrapparser_bl' mycount "" "[blub(Foo,Bar,Goo)]")
tmycount4 = TestCase $ assertEqual "mycount 4" (Count (Sym (Const "any")) (Arith Minus (Sym (Const "k")) (Number (Const "1"))) [Typed [Plain (Const "sat") [Sym (Var "C")] True,Plain (Const "clause") [Sym (Var "C")] True]] True) (wrapparser_bl' mycount "" "[ sat(C) : clause(C) ] k-1")
-- tmycount5 = TestCase $ assertEqual "mycount 5" (Count (Sym (Const "k")) (Sym (Const "any")) [Typed [Plain (Const "lc") [Sym (Var "X"),Sym (Var "Y")] True,Weighed (Sym (Var "L")) (Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y"),Sym (Var "L")] True)]] True) (wrapparser_bl' mycount "" "k [ lc(X, Y) : arc(X, Y, L) = L ]")
tmycount5 = TestCase $ assertEqual "mycount 5" (Count (Sym (Const "k")) (Sym (Const "any")) [Typed [Plain (Const "lc") [Sym (Var "X"),Sym (Var "Y")] True,Weighed (Sym (Var "L")) (Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y"),Sym (Var "L")] True) True]] True) (wrapparser_bl' mycount "" "k [ lc(X, Y) : arc(X, Y, L) = L ]")
tmycount6 = TestCase $ assertEqual "mycount 6" (Count (Sym (Const "k")) (Sym (Const "any")) [Typed [Plain (Const "lc") [Sym (Var "X"),Sym (Var "Y")] True,Weighed (Sym (Var "L")) (Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y"),Sym (Var "L")] True) False]] True) (wrapparser_bl' mycount "" "k [ lc(X, Y) : not arc(X, Y, L) = L]")
-- tmycount = TestCase $ assertEqual "mycount X" (wrapparser_bl' mycount "" "")
tmychoice1 = TestCase $ assertEqual "mychoice 1" (Card (Number (Const "1")) (Number (Const "2")) [Plain (Const "blub") [Sym (Var "Foo"),Sym (Var "Bar"),Sym (Var "Goo")] True] True) (wrapparser_bl' mychoice "" "1 {blub(Foo,Bar,Goo)} 2")
tmychoice2 = TestCase $ assertEqual "mychoice 2" (Card (Sym (Var "M")) (Sym (Const "any")) [Plain (Const "blub") [Sym (Var "Foo"),Sym (Var "Bar"),Sym (Var "Goo")] True] True) (wrapparser_bl' mychoice "" "M{blub(Foo,Bar,Goo)}")
tmychoice3 = TestCase $ assertEqual "mychoice 3" (Card (Sym (Const "any")) (Sym (Const "any")) [Plain (Const "blub") [Sym (Var "Foo"),Sym (Var "Bar"),Sym (Var "Goo")] True] True) (wrapparser_bl' mychoice "" "{blub(Foo,Bar,Goo)}")
tmychoice4 = TestCase $ assertEqual "mychoice 4" (Card (Sym (Const "any")) (Arith Minus (Sym (Const "k")) (Number (Const "1"))) [Typed [Plain (Const "sat") [Sym (Var "C")] True,Plain (Const "clause") [Sym (Var "C")] True]] True) (wrapparser_bl' mychoice "" "{ sat(C) : clause(C) } k-1")
tmychoice5 = TestCase $ assertEqual "mychoice 5" (Card (Sym (Const "any")) (Number (Const "1")) [Typed [Plain (Const "lc") [Sym (Var "X"),Sym (Var "Y")] True,Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y"),Sym (Var "L")] True,Plain (Const "mooh") [Sym (Var "Z")] True]] True) (wrapparser_bl' mychoice "" "{ lc(X, Y) : arc(X, Y, L) : mooh(Z) } 1")
tmychoice6 = TestCase $ assertEqual "mychoice 6" (Card (Number (Const "1")) (Sym (Const "any")) [Plain (Const "p") [] True,Plain (Const "t") [] True] True) (wrapparser_bl' mychoice "" "1 { p, t }")
tmychoice7 = TestCase $ assertEqual "mychoice 7" (Card (Sym (Const "any")) (Sym (Const "any")) [Plain (Const "p") [] True,Plain (Const "t") [] True,Plain (Const "x") [] False] True) (wrapparser_bl' mychoice "" "{ p, t, not x}")
tmychoice8 = TestCase $ assertEqual "mychoice 8" (Card (Sym (Const "any")) (Sym (Const "any")) [Plain (Const "p") [] True,Plain (Const "t") [] True,Plain (Const "x") [] False] True) (wrapparser_bl' mychoice "" "{ p, t, not x }")
tmychoice9 = TestCase $ assertEqual "mychoice 9" Empty (wrapparser_bl' mychoice "" "1 { p, not t t}")
-- tmychoice = TestCase $ assertEqual "mychoice X" (wrapparser_bl' mychoice "" "")
-- Note that the latter "no" is not parsed
trel1 = TestCase $ assertEqual "rel 1" (Plain (Const "blub") [Sym (Var "Foo"),Sym (Var "Bar"),Sym (Var "Goo")] True) (wrapparser_bl' rel "" "blub(Foo,Bar,Goo),\"no\"")
-- trel2 = TestCase $ assertEqual "rel 2" (Weighed (Sym (Var "L")) (Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y"),Sym (Var "L")] True)) (wrapparser_bl' rel "" "arc(X, Y, L) = L")
trel2 = TestCase $ assertEqual "rel 2" (Weighed (Sym (Var "L")) (Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y"),Sym (Var "L")] True) True) (wrapparser_bl' rel "" "arc(X, Y, L) = L")
trel3 = TestCase $ assertEqual "rel 3" (Typed [Plain (Const "lc") [Sym (Var "X"),Sym (Var "Y")] True,Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y"),Sym (Var "L")] True]) (wrapparser_bl' rel "" "lc(X,Y) : arc(X, Y, L)")
-- trel4 = TestCase $ assertEqual "rel 4" (Typed [Plain (Const "lc") [Sym (Var "X"),Sym (Var "Y")] True,Weighed (Sym (Var "L")) (Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y"),Sym (Var "L")] True)]) (wrapparser_bl' rel "" "lc(X,Y) : arc(X, Y, L) = L")
trel4 = TestCase $ assertEqual "rel 4" (Typed [Plain (Const "lc") [Sym (Var "X"),Sym (Var "Y")] True,Weighed (Sym (Var "L")) (Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y"),Sym (Var "L")] True) True]) (wrapparser_bl' rel "" "lc(X,Y) : arc(X, Y, L) = L")
trel5 = TestCase $ assertEqual "rel 5" (Plain (Const "blub") [Sym (Var "Foo"),Sym (Var "Bar"),Sym (Var "Goo")] True) (wrapparser_bl' rel "" "blub(Foo,Bar,Goo)")
-- Only the first atom is parsed in the following three
trel6 = TestCase $ assertEqual "rel 6" (Plain (Const "blub") [Sym (Var "Foo"),Sym (Var "Bar"),Sym (Var "Goo")] True) (wrapparser_bl' rel "" "blub(Foo,Bar,Goo),chunga(Foo,Bar,Goo)")
trel7 = TestCase $ assertEqual "rel 7" (Plain (Const "r") [] True) (wrapparser_bl' rel "" "r, s, not t")
trel8 = TestCase $ assertEqual "rel 8" (Plain (Const "r") [] False) (wrapparser_bl' rel "" "not r, s, not t")
trel9 = TestCase $ assertEqual "rel 9" (Typed [Plain (Const "ttask") [Sym (Var "I"),Sym (Var "D")] True,Plain (Const "ttask") [Sym (Var "I"),Sym (Var "D")] True,Plain (Const "haslet") [Sym (Var "I")] False,Weighed (Sym (Var "D")) (Plain (Const "tsklet") [Sym (Var "I")] True) False]) (wrapparser_bl' rel "" "ttask(I,D) : ttask(I,D) : not haslet(I) : not tsklet(I) = D")
trel10 = TestCase $ assertEqual "rel 10" (Typed [Plain (Const "f") [] True,Plain (Const "vtx") [Sym (Var "Y")] True,BExpr Lt (Sym (Var "Y")) (Sym (Var "X")),Assign (Var "L") (Arith Range (Sym (Var "X")) (Sym (Var "Y")))]) (wrapparser_bl' rel "" "f : vtx(Y) : Y < X : L = X..Y.")
-- trel = TestCase $ assertEqual "rel X" (wrapparser_bl' rel "" "")
ttrel1 = TestCase $ assertEqual "trel 1" (Typed [Plain (Const "arc_S") [Sym (Var "X"),Sym (Var "Y")] True,Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y")] True]) (wrapparser_bl' trel "" "arc_S(X, Y) : arc(X, Y)")
ttrel2 = TestCase $ assertEqual "trel 2" (Typed [Plain (Const "arc_S") [Sym (Var "X"),Sym (Var "Y")] False,Plain (Const "arc") [Sym (Var "X"),Sym (Var "Yo")] True]) (wrapparser_bl' trel "" "not arc_S(X, Y) : arc(X, Yo)")
-- ttrel3 = TestCase $ assertEqual "trel 3" (Typed [Plain (Const "lc") [Sym (Var "X"),Sym (Var "Y")] True,Weighed (Sym (Var "L")) (Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y"),Sym (Var "L")] True)]) (wrapparser_bl' trel "" "lc(X, Y) : arc(X, Y, L) = L")
ttrel3 = TestCase $ assertEqual "trel 3" (Typed [Plain (Const "lc") [Sym (Var "X"),Sym (Var "Y")] True,Weighed (Sym (Var "L")) (Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y"),Sym (Var "L")] True) True]) (wrapparser_bl' trel "" "lc(X, Y) : arc(X, Y, L) = L")
ttrel4 = TestCase $ assertEqual "trel 4" (Typed [Plain (Const "occurs") [Sym (Var "Y")] False,Plain (Const "vtx") [Sym (Var "X")] True,BExpr Lt (Sym (Var "Y")) (Sym (Var "X"))]) (wrapparser_bl' trel "" "not occurs(Y) : vtx(X) : Y < X")
-- Not typed, fails
ttrel5 = TestCase $ assertEqual "trel 5" Empty (wrapparser_bl' trel "" "blub(Foo,Bar,Goo)")
ttrel6 = TestCase $ assertEqual "trel 6" (Typed [Plain (Const "f") [] True,Plain (Const "vtx") [Sym (Var "Y")] True,BExpr Lt (Sym (Var "Y")) (Sym (Var "X"))]) (wrapparser_bl' trel "" "f : vtx(Y) : Y < X.")
-- Make sure we do not parse too far
ttrel7 = TestCase $ assertEqual "trel 7" Empty (wrapparser_bl' trel "" "forth(J,NI+1,S-M) :")
ttrel8 = TestCase $ assertEqual "trel 8" Empty (wrapparser_bl' trel "" "forth(J,NI+1,S-M) :-")
-- ttrel = TestCase $ assertEqual "trel X" (wrapparser_bl' trel "" "")
tatomrel1 = TestCase $ assertEqual "atomrel 1" (Plain (Const "blub") [] True) (wrapparser_bl' atomrel "" "blub(Foo,Bar,Goo)")
-- Only 1st atom is parsed
tatomrel2 = TestCase $ assertEqual "atomrel 2" (Plain (Const "blub") [] True) (wrapparser_bl' atomrel "" "blub(Foo,Bar,Goo),chunga(Foo,Bar,Goo)")
tatomrel3 = TestCase $ assertEqual "atomrel 3" (Plain (Const "r") [] True) (wrapparser_bl' atomrel "" "r, s, not t")
-- tatomrel = TestCase $ assertEqual "atomrel X" (wrapparser_bl' atomrel "" "")
tsrel1 = TestCase $ assertEqual "srel 1" (Plain (Const "blub") [Sym (Var "Foo"),Sym (Var "Bar"),Sym (Var "Goo")] True) (wrapparser_bl' srel "" "blub(Foo,Bar,Goo)")
-- Only the first one handled
tsrel2 = TestCase $ assertEqual "srel 2" (Plain (Const "blub") [Sym (Var "Foo"),Sym (Var "Bar"),Sym (Var "Goo")] True) (wrapparser_bl' srel "" "blub(Foo,Bar,Goo),chunga(Foo,Bar,Goo)")
-- Dies on first comma
tsrel3 = TestCase $ assertEqual "srel 3" Empty (wrapparser_bl' srel "" "r, s, not t")
tsrel4 = TestCase $ assertEqual "srel 4" (Plain (Const "blub") [Sym (Var "Foo"),Sym (Const "\"Bar\""),Sym (Var "Goo")] True) (wrapparser_bl' srel "" "blub(Foo,\"Bar\",Goo)")
tsrel5 = TestCase $ assertEqual "srel 5" (Plain (Const "border") [Arith Plus (Arith Mult (Arith Minus (Sym (Var "N")) (Number (Const "1"))) (Sym (Var "R"))) (Number (Const "1")),Sym (Var "N")] True)
(wrapparser_bl' srel "" "border((N-1)*R+1,N).")
-- tsrel = TestCase $ assertEqual "srel X" (wrapparser_bl' srel "" "")
-- twrel1 = TestCase $ assertEqual "wrel 1" (Weighed (Sym (Var "L")) (Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y"),Sym (Var "L")] True)) (wrapparser_bl' wrel "" "arc(X, Y, L) = L")
twrel1 = TestCase $ assertEqual "wrel 1" (Weighed (Sym (Var "L")) (Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y"),Sym (Var "L")] True) True) (wrapparser_bl' wrel "" "arc(X, Y, L) = L")
twrel2 = TestCase $ assertEqual "wrel 2" (Weighed (Arith Range (Number (Const "1")) (Sym (Var "L"))) (Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y"),Sym (Var "L")] True) True) (wrapparser_bl' wrel "" "arc(X, Y, L) = 1..L")
twrel3 = TestCase $ assertEqual "wrel 3" (Assign (Var "L") (Arith Range (Number (Const "1")) (Sym (Var "X")))) (wrapparser_bl' wrel "" "L = 1..X")
-- twrel = TestCase $ assertEqual "wrel X" (wrapparser_bl' wrel "" "")
tnegrel1 = TestCase $ assertEqual "negrel 1" (Plain (Const "blub") [Sym (Var "Foo"),Sym (Var "Bar"),Sym (Var "Goo")] False) (wrapparser_bl' negrel "" "not blub(Foo,Bar,Goo)")
-- tnegrel = TestCase $ assertEqual "negrel X" (wrapparser_bl' negrel "" "")
-- tarel1 = TestCase $ assertEqual "arel X" (Weighed (Sym (Var "L")) (Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y"),Sym (Var "L")] True)) (wrapparser_bl' arel "" "arc(X, Y, L) = L")
tarel1 = TestCase $ assertEqual "arel 1" (Weighed (Sym (Var "L")) (Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y"),Sym (Var "L")] True) True) (wrapparser_bl' arel "" "arc(X, Y, L) = L")
tarel2 = TestCase $ assertEqual "arel 2" (Plain (Const "border") [Arith Plus (Arith Mult (Arith Minus (Sym (Var "N")) (Number (Const "1"))) (Sym (Var "R"))) (Number (Const "1")),Sym (Var "N")] True) (wrapparser_bl' arel "" "border((N-1)*R+1,N).")
-- tarel = TestCase $ assertEqual "arel X" (wrapparser_bl' arel "" "")
tarel''1 = TestCase $ assertEqual "arel'' 1" (Weighed (Sym (Var "L")) (Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y"),Sym (Var "L")] True) False) (wrapparser_bl' arel'' "" "not arc(X, Y, L) = L")
tarel''2 = TestCase $ assertEqual "arel'' 2" Empty (wrapparser_bl' arel'' "" "not M = [ est(I,S) : est(I,S) : hasest(I) = S ]")
-- tarel'' = TestCase $ assertEqual "arel'' X" (wrapparser_bl' arel'' "" "")
targs1 = TestCase $ assertEqual "args 1" [Sym (Var "Foo"),Sym (Var "Bar"),Sym (Var "Goo")] (wrapparser_exp' args "" "(Foo, Bar, Goo)")
targs2 = TestCase $ assertEqual "args 2" [Sym (Var "Foo"),Sym (Var "Bar"),Sym (Var "Goo")] (wrapparser_exp' args "" "(Foo,Bar, Goo)")
targs3 = TestCase $ assertEqual "args 3" [Sym (Var "Foo"),Sym (Var "Bar"),Sym (Var "Goo")] (wrapparser_exp' args "" "(Foo,Bar,Goo)")
targs4 = TestCase $ assertEqual "args 4" [Sym (Var "Foo")] (wrapparser_exp' args "" "(Foo)")
targs5 = TestCase $ assertEqual "args 5" [Sym (Var "Foo")] (wrapparser_exp' args "" "( Foo)")
targs6 = TestCase $ assertEqual "args 6" [Sym (Const "foo")] (wrapparser_exp' args "" "(foo)")
targs7 = TestCase $ assertEqual "args 7" [Sym (Const "foo"),Sym (Var "Bar"),Sym (Const "goo")] (wrapparser_exp' args "" "(foo,Bar,goo)")
targs8 = TestCase $ assertEqual "args 8" [Sym (Var "Foo"),Sym (Var "Bar")] (wrapparser_exp' args "" " ( Foo,Bar )")
-- fails
targs9 = TestCase $ assertEqual "args 9" [Sym (Const "Erroneous MyExpr")] (wrapparser_exp' args "" "")
targs10 = TestCase $ assertEqual "args 10" [Arith Range (Number (Const "1")) (Sym (Const "k"))] (wrapparser_exp' args "" "(1 .. k)")
targs11 = TestCase $ assertEqual "args 11" [Sym (Const "foo"),Sym (Var "Bar"),Number (Const "5")] (wrapparser_exp' args "" "(foo,Bar,5)")
targs12 = TestCase $ assertEqual "args 12" [Sym (Const "foo"),Sym (Const "\"Bar\""),Number (Const "5")] (wrapparser_exp' args "" "(foo,\"Bar\",5)")
targs13 = TestCase $ assertEqual "args 13" [Alternative [Sym (Var "X"),Arith Plus (Sym (Var "X")) (Number (Const "1"))],Sym (Var "Y")] (wrapparser_exp' args "" "(X;X+1,Y)")
targs14 = TestCase $ assertEqual "args 14" [Arith Plus (Arith Mult (Arith Minus (Sym (Var "N")) (Number (Const "1"))) (Sym (Var "R"))) (Number (Const "1")),Sym (Var "N")] (wrapparser_exp' args "" "((N-1)*R+1,N)")
targs15 = TestCase $ assertEqual "args 15" [Func (Const "#abs") [Arith Minus (Sym (Var "RK1")) (Sym (Var "RK2"))] True,Sym (Var "Z")] (wrapparser_exp' args "" "(#abs(RK1-RK2),Z)")
-- This does not work, but it really should, have a look at farg!
-- Or does it work still???
targs16 = TestCase $ assertEqual "args 16" [Func (Const "#abs") [Arith Minus (Sym (Var "RK1")) (Sym (Var "RK2"))] True] (wrapparser_exp' args "" "(#abs(RK1-RK2))")
-- targs = TestCase $ assertEqual "args X" (wrapparser_exp' args "" "")
tfargs1 = TestCase $ assertEqual "fargs 1" [Func (Const "#abs") [Arith Minus (Sym (Var "RK1")) (Sym (Var "RK2"))] True] (wrapparser_exp' fargs "" "(#abs(RK1-RK2))")
tfargs2 = TestCase $ assertEqual "fargs 2" [Func (Const "#abs") [Arith Minus (Sym (Var "RK1")) (Sym (Var "RK2"))] True,Sym (Var "Z")] (wrapparser_exp' fargs "" "(#abs(RK1-RK2),Z)")
tmyelem1 = TestCase $ assertEqual "myelem 1" (Sym (Var "Foo")) (wrapparser_exp myelem "" "Foo")
tmyelem2 = TestCase $ assertEqual "myelem 2" (Sym (Const "foo")) (wrapparser_exp myelem "" "foo")
tmyelem3 = TestCase $ assertEqual "myelem 3" (Sym (Var "_")) (wrapparser_exp myelem "" "_")
tmyelem4 = TestCase $ assertEqual "myelem 4" (Sym (Const "\"foo\"")) (wrapparser_exp myelem "" "\"foo\"")
-- tmyelem = TestCase $ assertEqual "myelem X" (wrapparser_exp myelem "" "")
tatom1 = TestCase $ assertEqual "atom 1" (Const "Erroneous Atom") (wrapparser_atom atom "" "Foo")
tatom2 = TestCase $ assertEqual "atom 2" (Const "foo") (wrapparser_atom atom "" "foo")
tatom3 = TestCase $ assertEqual "atom 3" (Const "\"Foo\"") (wrapparser_atom atom "" "\"Foo\"")
tatom4 = TestCase $ assertEqual "atom 4" (Const "\"wp1:Person\"") (wrapparser_atom atom "" "\"wp1:Person\"")
-- Should this fail?
tatom5 = TestCase $ assertEqual "atom 5" (Const "Erroneous Atom") (wrapparser_atom atom "" "1")
tatom6 = TestCase $ assertEqual "atom 6" (Const "m") (wrapparser_atom atom "" "m")
-- tatom = TestCase $ assertEqual "atom X" (wrapparser_atom atom "" "")
tnvariable1 = TestCase $ assertEqual "nvariable 1" (Var "Foo") (wrapparser_atom nvariable "" "Foo")
tnvariable2 = TestCase $ assertEqual "nvariable 2" (Var "F") (wrapparser_atom nvariable "" "F")
-- Only parses the first token, X
tnvariable3 = TestCase $ assertEqual "nvariable 3" (Var "X") (wrapparser_atom nvariable "" "X > Y")
-- tnvariable = TestCase $ assertEqual "nvariable X" (wrapparser_atom nvariable "" "")
{--
taltrel1 = TestCase $ assertEqual "altrel 1" (Alternative [Plain (Const "arc_S") [Sym (Var "X"),Sym (Var "Y")] True,Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y")] True]) (wrapparser_bl' altrel "" "arc_S(X, Y) ; arc(X, Y)")
taltrel2 = TestCase $ assertEqual "altrel 2" (Alternative [Plain (Const "arc_S") [Sym (Var "X"),Sym (Var "Y")] False,Plain (Const "arc") [Sym (Var "X"),Sym (Var "Yo")] True]) (wrapparser_bl' altrel "" "not arc_S(X, Y) ; arc(X, Yo)")
taltrel3 = TestCase $ assertEqual "altrel 3" (Alternative [Plain (Const "lc") [Sym (Var "X"),Sym (Var "Y")] True,Weighed (Sym (Var "L")) (Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y"),Sym (Var "L")] True)]) (wrapparser_bl' altrel "" "lc(X, Y) ; arc(X, Y, L) = L")
taltrel4 = TestCase $ assertEqual "altrel 4" (Alternative [Plain (Const "occurs") [Sym (Var "Y")] False,Plain (Const "vtx") [Sym (Var "X")] True,BExpr Lt (Sym (Var "Y")) (Sym (Var "X"))]) (wrapparser_bl' altrel "" "not occurs(Y) ; vtx(X) ; Y < X")
taltrel5 = TestCase $ assertEqual "altrel 5" Empty (wrapparser_bl' altrel "" "blub(Foo;Bar,Bar;Foo,Goo;Moo)")
taltrel6 = TestCase $ assertEqual "altrel 6" (Alternative [Plain (Const "f") [] True,Plain (Const "g") [] True]) (wrapparser_bl' altrel "" "f;g")
taltrel7 = TestCase $ assertEqual "altrel 7" (Alternative [Plain (Const "f") [] True,Plain (Const "g") [] True]) (wrapparser_bl' altrel "" "f; g")
taltrel8 = TestCase $ assertEqual "altrel 8" (Alternative [Plain (Const "f") [] True,Plain (Const "g") [] True]) (wrapparser_bl' altrel "" "f ; g")
taltrel9 = TestCase $ assertEqual "altrel 9" (Alternative [Plain (Const "f") [] True,Plain (Const "g") [] True]) (wrapparser_bl' altrel "" "f ;g")
taltrel10 = TestCase $ assertEqual "altrel 10" (Alternative [Plain (Const "f") [] True,Plain (Const "g") [] True,Plain (Const "x") [] True,Plain (Const "y") [] True,Plain (Const "z") [] True]) (wrapparser_bl' altrel "" "f ;g;x;y;z")
taltrel11 = TestCase $ assertEqual "altrel 11" (Alternative [Plain (Const "f") [] True,Plain (Const "vtx") [Sym (Var "Y")] True,BExpr Lt (Sym (Var "Y")) (Sym (Var "X"))]) (wrapparser_bl' altrel "" "f ; vtx(Y) ; Y < X.")
-- taltrel = TestCase $ assertEqual "altrel X" (wrapparser_bl' altrel "" "")
--}
taltexpr1 = TestCase $ assertEqual "altexpr 1" (Alternative [Sym (Const "f"),Sym (Const "g")]) (wrapparser_exp altexpr "" "f;g")
taltexpr2 = TestCase $ assertEqual "altexpr 2" (Alternative [Sym (Const "f"),Sym (Const "g")]) (wrapparser_exp altexpr "" "f; g")
taltexpr3 = TestCase $ assertEqual "altexpr 3" (Alternative [Sym (Const "f"),Sym (Const "g"),Sym (Const "x"),Sym (Const "y"),Sym (Const "z")]) (wrapparser_exp altexpr "" "f ;g;x;y;z")
taltexpr4 = TestCase $ assertEqual "altexpr 4" (Alternative [Sym (Const "f"),Sym (Const "g"),Sym (Const "x"),Sym (Const "y"),Sym (Var "Z")]) (wrapparser_exp altexpr "" "f ;g;x;y;Z")
taltexpr5 = TestCase $ assertEqual "altexpr 5" (Alternative [Sym (Var "Foo"),Sym (Var "Bar")]) (wrapparser_exp altexpr "" "Foo;Bar")
-- taltexpr = TestCase $ assertEqual "altexpr X" (wrapparser_expr altexpr "" "")
tfunc1 = TestCase $ assertEqual "func 1" (Func (Const "#abs") [Sym (Var "N"),Arith Plus (Sym (Var "N")) (Number (Const "1"))] True) (wrapparser_exp func "" "#abs(N,N+1)")
-- tfunc = TestCase $ assertEqual "func X" (wrapparser_expr func "" "")
tshoworhide1 = TestCase $ assertEqual "showorhide 1" (Show [Plain (Const "waitingfor") [Sym (Var "_"),Sym (Var "_")] True]) (wrapparser showorhide "" "show waitingfor(_,_).")
tshoworhide2 = TestCase $ assertEqual "showorhide 2" (GShow [Plain (Const "waitingfor") [Sym (Var "_"),Sym (Var "_")] True]) (wrapparser showorhide "" "#show waitingfor(_,_).")
tshoworhide3 = TestCase $ assertEqual "showorhide 3" (GHide [Plain (Const "waitingfor") [Sym (Var "_"),Sym (Var "_")] True]) (wrapparser showorhide "" "#hide waitingfor(_,_).")
tshoworhide4 = TestCase $ assertEqual "showorhide 4" (GHide [Empty]) (wrapparser showorhide "" "#hide.")
tshoworhide5 = TestCase $ assertEqual "showorhide 5" (GHide [Arity (Const "pos") "3"]) (wrapparser showorhide "" "#hide pos/3.")
tshoworhide6 = TestCase $ assertEqual "showorhide 6" (GShow [Arity (Const "pos") "3"]) (wrapparser showorhide "" "#show pos/3.")
-- tshoworhide = TestCase $ assertEqual "showorhide X" (wrapparser showorhide "" "")
tmyassign1 = TestCase $ assertEqual "myassign 1" (Assignment (Var "M") (Optimize False [Typed [Plain (Const "est") [Sym (Var "I"),Sym (Var "S")] True,Plain (Const "est") [Sym (Var "I"),Sym (Var "S")] True,Weighed (Sym (Var "S")) (Plain (Const "hasest") [Sym (Var "I")] True) True]] True) True) (wrapparser_bl' myassign "" "M = #min [ est(I,S) : est(I,S) : hasest(I) = S ]")
tmyassign2 = TestCase $ assertEqual "myassign 2" (Assignment (Var "M") (Count (Sym (Const "any")) (Sym (Const "any")) [Typed [Plain (Const "est") [Sym (Var "I"),Sym (Var "S")] True,Plain (Const "est") [Sym (Var "I"),Sym (Var "S")] True,Weighed (Sym (Var "S")) (Plain (Const "hasest") [Sym (Var "I")] True) True]] True) True) (wrapparser_bl' myassign "" "M = [ est(I,S) : est(I,S) : hasest(I) = S ]")
-- tmyassign = TestCase $ assertEqual "myassign X" (wrapparser_bl' myassign "" "")
tmyoptimize1 = TestCase $ assertEqual "myoptimize 1" (Optimize True [Typed [Plain (Const "lc") [Sym (Var "X"),Sym (Var "Y")] True,Weighed (Sym (Var "L")) (Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y"),Sym (Var "L")] True) True]] True) (wrapparser_bl' myoptimize "" "#max [ lc(X, Y) : arc(X, Y, L) = L ]")
tmyoptimize2 = TestCase $ assertEqual "myoptimize 2" (Optimize True [Typed [Plain (Const "lc") [Sym (Var "X"),Sym (Var "Y")] True,Weighed (Sym (Var "L")) (Plain (Const "arc") [Sym (Var "X"),Sym (Var "Y"),Sym (Var "L")] True) True]] True) (wrapparser_bl' myoptimize "" "max [ lc(X, Y) : arc(X, Y, L) = L ]")
-- tmyoptimize = TestCase $ assertEqual "myoptimize X" (wrapparser_bl' myoptimize "" "")
tests = TestList [ truleorfact1, truleorfact2, trulebase1, trulebase2, trulebase3, trulebase4,
trulebase5, trulebase6, trulebase7, trulebase8, trulebase9, trulebase10,
trulebase11, trulebase12, trulebase13, trulebase14, trulebase15, tconstdef1,
tfact1, tfact2,tdeny1, tdeny2, tdeny3, tdeny4, tdeny5, tdeny6,
trule1, trule2, trule3, trule4, trule5, trule6, trule7, trule8, trule9,
trule10, trule11, trule12, trule13, trule14, trule15,trule16,trule17,
tbody1,tbody2, tbody3, tbody4, tbody5, tbody6, tbody7, tbody8,tbody9,tbody10,
tgenrel1, tgenrel2, tgenrel3, tgenrel4, tgenrel5, tgenrel6, tgenrel7, tgenrel8,
tgenrel9, tgenrel10, tgenrel11, tgenrel12, tgenrel13, tgenrel14, tgenrel15,
tgenrel16, tgenrel17, tgenrel18, tgenrel19, tgenrel20, tgenrel21,tgenrel22,
tarel1, tarel2,
tarel''1, tarel''2,
tnumericexpr1, tnumericexpr2, tnumericexpr3,tnumericexpr4,tnumericexpr5,
tnumericexpr6, tnumericexpr7,tnumericexpr8,tnumericexpr9,tnumericexpr10,
tnumericexpr11,
tnumeric1 , tnumeric2, tnumeric3, tnumeric4, tnumeric5,
tnexpr1 , tnexpr2, tnexpr3, tnexpr4,
tbexpr1, tbexpr2,
tmycount1, tmycount2, tmycount3, tmycount4, tmycount5, tmycount6,
tmychoice1, tmychoice2, tmychoice3, tmychoice4, tmychoice5, tmychoice6,
tmychoice7, tmychoice8,tmychoice9,
trel1, trel2,trel3,trel4,trel5,trel6,trel7,trel8,trel9, trel10,
ttrel1, ttrel2,ttrel3,ttrel4,ttrel5,ttrel6,ttrel7,ttrel8,
tatomrel1, tatomrel2, tatomrel3,
tsrel1, tsrel2, tsrel3, tsrel4,tsrel5,
twrel1, twrel2, twrel3, tnegrel1, tarel1,
targs1,targs2,targs3,targs4,targs5,targs6,targs7,targs8,targs9,
targs10,targs11,targs12,targs13, targs14, targs15, targs16,
tfargs1, tfargs2,
tmyelem1, tmyelem2, tmyelem3, tmyelem4,
tatom1,tatom2,tatom3,tatom4,tatom5,tatom6,
tnvariable1, tnvariable2, tnvariable3,
taltexpr1,taltexpr2,taltexpr3,taltexpr4,taltexpr5,
--taltrel1,taltrel2,taltrel3,taltrel4,taltrel5,taltrel6,taltrel7,
-- taltrel8,taltrel9,taltrel10,taltrel11
tshoworhide1, tshoworhide2, tshoworhide3, tshoworhide4, tshoworhide5, tshoworhide6,
tmyassign1, tmyassign2, tmyoptimize1, tmyoptimize2
]
-- runTestTT tests
main =
do runTestTT tests | vluukkal/aspreify | testaspparse.hs | mit | 54,828 | 14 | 20 | 9,092 | 20,890 | 10,599 | 10,291 | 269 | 2 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.Worker
(js_newWorker, newWorker, js_postMessage, postMessage,
js_terminate, terminate, message, Worker, castToWorker,
gTypeWorker)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSRef(..), JSString, castRef)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSRef(..), FromJSRef(..))
import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.Enums
foreign import javascript unsafe "new window[\"Worker\"]($1)"
js_newWorker :: JSString -> IO (JSRef Worker)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Worker Mozilla Worker documentation>
newWorker ::
(MonadIO m, ToJSString scriptUrl) => scriptUrl -> m Worker
newWorker scriptUrl
= liftIO
(js_newWorker (toJSString scriptUrl) >>= fromJSRefUnchecked)
foreign import javascript unsafe "$1[\"postMessage\"]($2, $3)"
js_postMessage ::
JSRef Worker -> JSRef SerializedScriptValue -> JSRef Array -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Worker.postMessage Mozilla Worker.postMessage documentation>
postMessage ::
(MonadIO m, IsSerializedScriptValue message,
IsArray messagePorts) =>
Worker -> Maybe message -> Maybe messagePorts -> m ()
postMessage self message messagePorts
= liftIO
(js_postMessage (unWorker self)
(maybe jsNull (unSerializedScriptValue . toSerializedScriptValue)
message)
(maybe jsNull (unArray . toArray) messagePorts))
foreign import javascript unsafe "$1[\"terminate\"]()" js_terminate
:: JSRef Worker -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Worker.terminate Mozilla Worker.terminate documentation>
terminate :: (MonadIO m) => Worker -> m ()
terminate self = liftIO (js_terminate (unWorker self))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Worker.onmessage Mozilla Worker.onmessage documentation>
message :: EventName Worker MessageEvent
message = unsafeEventName (toJSString "message") | plow-technologies/ghcjs-dom | src/GHCJS/DOM/JSFFI/Generated/Worker.hs | mit | 2,629 | 22 | 11 | 412 | 636 | 372 | 264 | 45 | 1 |
module PE0014 where
import Data.List (maximumBy)
import Data.Ord (comparing)
collatzStep :: Integral a => a -> a
collatzStep n | even n = n `div` 2
collatzStep n | otherwise = 3 * n + 1
-- |
-- >>> collatzChain 3
-- [3,10,5,16,8,4,2,1]
collatzChain :: Integral a => a -> [a]
collatzChain 1 = [1]
collatzChain n | n > 1 = n : collatzChain (collatzStep n)
| otherwise = []
-- |
-- >>> collatzChainLengths [1..4]
-- [(1,1),(2,2),(3,8),(4,3)]
collatzChainLengths :: [Int] -> [(Int, Int)]
collatzChainLengths = map (\i -> (i, length $ collatzChain i))
main :: IO ()
main = do
print $ maximumBy (comparing snd) $ collatzChainLengths [1..999999]
| mvidner/projecteuler | src/PE0014.hs | mit | 668 | 0 | 11 | 140 | 256 | 135 | 121 | 15 | 1 |
{-# LANGUAGE RecordWildCards #-}
module Main where
import qualified Graphics.Vty as V
import qualified Brick.AttrMap as A
import qualified Brick.Main as M
import Brick.Types (Widget)
import qualified Brick.Types as T
import Brick.Util as B
import qualified Brick.Widgets.Border as B
import qualified Brick.Widgets.Center as C
import Brick.Widgets.Core (str, vBox, vLimit, withAttr)
import qualified Brick.Widgets.GroupedList as GL()
import qualified Brick.Widgets.List as L
import qualified Data.Vector as V
import Core.Types (ApplicationState (..),
initializeApplicationState)
import qualified Git
drawUI :: ApplicationState -> [Widget]
drawUI (ApplicationStateConst {..}) = [ui]
where
stagedBox = fileListBox stagedFiles "Staged Files" position
unStagedBox = fileListBox unstagedFiles "Unstaged Files" (position - V.length (L.listElements stagedFiles))
helpBox = vBox $ fmap C.hCenter [str "Press ESC to exit"]
ui = C.vCenter $ vBox [ C.vCenter stagedBox
, C.vCenter unStagedBox
, helpBox
]
fileListBox :: L.List Git.File -> String -> Int -> Widget
fileListBox files label pos = B.borderWithLabel (str label) $
vLimit 8 $
L.renderList files (drawFileListElement colorize)
where colorize = pos > 0 && pos <= V.length (L.listElements files)
appEvent :: ApplicationState -> V.Event -> T.EventM (T.Next ApplicationState)
appEvent applicationState event =
case event of
V.EvKey V.KEsc [] -> M.halt applicationState
_ -> T.handleEvent event applicationState >>= M.continue
drawFileListElement ::Bool -> Bool -> Git.File -> Widget
drawFileListElement isSelected1 isSelected2 (Git.File {name=name}) = C.hCenter $ addAttr realSelectedAttr $ str name
where addAttr attr = if isSelected1 && isSelected2
then withAttr attr
else withAttr mempty
realSelectedAttr :: A.AttrName
realSelectedAttr = L.listSelectedAttr <> "custom"
theMap :: A.AttrMap
theMap = A.attrMap V.defAttr
[ (L.listAttr <> L.listSelectedAttr, V.brightYellow `B.on` V.black)
, (realSelectedAttr, V.black `B.on` V.brightYellow)
]
theApp :: M.App ApplicationState V.Event
theApp =
M.App { M.appDraw = drawUI
, M.appChooseCursor = M.neverShowCursor
, M.appHandleEvent = appEvent
, M.appStartEvent = return
, M.appAttrMap = const theMap
, M.appLiftVtyEvent = id
}
main :: IO ()
main = do
initialState <- initializeApplicationState
void $ M.defaultMain theApp initialState
| zsedem/gitcommander | Main.hs | mit | 2,916 | 0 | 13 | 897 | 751 | 418 | 333 | 58 | 2 |
module BibleRot where
import Rotations
import Data.List
import Control.Monad.Random
{-
This is an example of using rotations to modify text based off of the king james version of the bible from Gutenberg
The first thing we need to do is run through the text in order to figure out where our chapter and paragraph breaks should be.
It'd probably be easiest to do something with parsec
-}
{-
import Text.Parsec
import Text.Parsec.Combinator
import Text.Parsec.Char
import Text.Parsec.String
-}
{-
The basic layout is as follows:
A chapter starts with a single line,
followed by two extraneous newlines
A chapter is followed by an arbitrary number of paragraphs
ended with four extraneous newlines
-}
{- we'll assume that a "lines" has already performed on the string -}
endOfLine :: String
endOfLine = "\r\n"
breakIntoChapters :: String -> [String]
breakIntoChapters = breakBySubstring $ concat $ (replicate 5 endOfLine)
breakChapter :: String -> (String,String)
breakChapter s = let cs = breakBySubstring (concat (replicate 3 endOfLine)) s
in (cs !! 0, cs !! 1)
breakParagraph :: String -> [String]
breakParagraph = breakBySubstring $ concat (replicate 2 endOfLine)
breakBySubstring :: Eq a => [a] -> [a] -> [[a]]
breakBySubstring sub l = breakBySubstring' l sub []
breakBySubstring' :: Eq a => [a] -> [a] -> [[a]] -> [[a]]
breakBySubstring' [] sub accum = (reverse $ map reverse $ dropWhile null accum)
breakBySubstring' (x:xs) sub accum = if take (length sub) (x:xs) == sub
then breakBySubstring' (drop (length sub) (x:xs)) sub ([]:accum)
else breakBySubstring' xs sub (addFirst x accum)
addFirst x [] = [[x]]
addFirst x (l : ls) = (x:l) : ls
breakSentences s = breakSentences' s []
breakSentences' [] acc = (reverse acc)
breakSentences' xs@(x:xs') acc = case break (\c -> c `elem` ".?!") xs of
(left,r:right) -> breakSentences' right ((left++[r]):acc)
(left,[]) -> breakSentences' [] (left:acc)
parseBible :: String -> [(String,[[String]])]
parseBible s = let cs = breakIntoChapters s
tps = map breakChapter cs
tpps = map (\(t,c) -> (t,breakParagraph c)) tps
in map (\(t,ps) -> (t, map breakSentences ps)) tpps
flattenBible :: [(String,[[String]])] -> String
flattenBible b = concat (map flattenChapter b)
flattenChapter (t,ps) = t ++ endOfLine ++ endOfLine ++ endOfLine ++ (concat $ map flattenParagraph ps)
flattenParagraph p = concat $ intersperse "\r\n" p
rotateBible :: String -> String -> IO ()
rotateBible f1 f2 = do
s <- readFile f1
let b = parseBible s
b' <- evalRandIO $ rotateChapterWithTitle b
writeFile f2 (flattenBible b')
| clarissalittler/nanogenmo-2015 | BibleRot.hs | mit | 2,852 | 0 | 13 | 717 | 878 | 467 | 411 | 42 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Y2021.M01.D21.Exercise where
import Y2021.M01.D20.Solution -- for wineries
import Data.Aeson.WikiDatum (Name)
import Graph.Query
import Graph.JSON.Cypher
import qualified Graph.JSON.Cypher.Read.Rows as RR
{--
Okay, yesterday, we got some lat/longs, or, more precisely, some LongLats
for wineries via wikipedia. Today, we're going to compare this against the
wine graph database.
To set up the wine graph database, upload the CSV files from the neo4j
repository, following the instructions of the README there:
https://github.com/lju-lazarevic/wine
Now, that done, compare the wineries from the wikidata-set to the ones in the
wine graph database. What matches do we have?
--}
import Data.Map (Map)
import Data.Set (Set)
{--
>>> readWineries (wineriesDir ++ wineriesJSON)
...
>>> let wikiwineries = it
--}
type Idx = Integer
type IxWineries = Map Name Idx
wineriesQuery :: Cypher
wineriesQuery = "MATCH (w:Winery) RETURN w.name AS winery, id(w) AS ix"
wineriesFromGraph :: Endpoint -> IO IxWineries
wineriesFromGraph = undefined
-- How many wineries are in the graph database?
wineries2wineries :: Wineries -> Set Name -> Set Name
wineries2wineries = undefined
-- How many wineries are in the intersection?
{-- BONUS -------------------------------------------------------
Are there wineries that should intersect, but don't? Which ones? How would
you fix this non-intersection (if it exists) to be in the intersection-set?
... actually, the winery's country helps quite a bit to answer this bonus,
so we will defer it until tomorrow.
--}
-- hmmm, shall we upload LongLat data to the graph? YES! ... but not today.
| geophf/1HaskellADay | exercises/HAD/Y2021/M01/D21/Exercise.hs | mit | 1,681 | 0 | 7 | 266 | 144 | 91 | 53 | 17 | 1 |
module Day5 where
import Data.List (group, isInfixOf)
rule1 :: String -> Bool
rule1 = (>= 3) . length . filter (`elem` "aeiou")
rule2 :: String -> Bool
rule2 = (>= 1) . length . filter ((>= 2) . length) . group
rule3 :: String -> Bool
rule3 str = not $ "ab" `isInfixOf` str || "cd" `isInfixOf` str || "pq" `isInfixOf` str || "xy" `isInfixOf` str
isNice :: String -> Bool
isNice str = rule1 str && rule2 str && rule3 str
main :: IO ()
main = do
content <- getContents
let niceStrs = filter isNice . lines $ content
putStrLn $ "Count of nice strings: " ++ (show . length $ niceStrs)
| tomwadeson/adventofcode | 05.hs | mit | 593 | 0 | 12 | 126 | 251 | 137 | 114 | 15 | 1 |
{-
Functional Morphology: Latin internal dictionary
Copyright (C) 2004 Author: Markus Forsberg
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
module DictON where
import BuildON
import Dictionary
import TypesON
oldnorseDict :: Dictionary
oldnorseDict = dictionary $ nouns ++ verbs
nouns = [
d1heimr "heimr",
d1songr "söngr",
d1kottr "köttr",
d1barn "barn",
d1nidr "niðr",
d2sjar "sjár",
d2sker "sker",
d1smjor "smjör",
d1kvadi "kvæði",
d1hirdir "hirðir",
d1nal "nál",
d1stod "stöð",
d1ben "ben",
d1heidr "heiðr",
d1gygr "gýgr",
d1stadr "staðr",
d1gestr "gestr",
d1naud "nauð",
d2holl "höll",
d1ylgr "ylgr",
d1fingr "fingr",
d2vetr "vetr",
d1fotr "fótr",
d1bok "bók",
d2vik "vík",
d2flo "fló",
d1kyr "kýr",
d1fadir "faðir",
d2dottir "dóttir",
d1gefandi "gefandi",
d1timi "tími",
d2auga "auga",
d1bryti "bryti",
d1vokvi "vökvi",
d1tunga "tunga",
d1brynja "brynja",
d2kirkja "kirkja",
d1volva "völva",
d1elli "elli",
d2iski "iski",
d1gorsimi "görsimi"
]
verbs = [
d1elska "elska",
d1stodva "stöðva",
d1stodva "herja",
d1spa "spá",
d1krefja "krefja",
d1lyja "lýja",
d1bita "bíta"
] | icemorph/icemorph | bin/FM/oldnorse/DictON.hs | cc0-1.0 | 1,954 | 36 | 7 | 464 | 367 | 188 | 179 | 56 | 1 |
--
-- Copyright (c) 2013 Bonelli Nicola <bonelli@antifork.org>
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
--
module CGrep.CGrep (sanitizeOptions, cgrepDispatch) where
import qualified CGrep.Strategy.BoyerMoore as BoyerMoore
import qualified CGrep.Strategy.Levenshtein as Levenshtein
import qualified CGrep.Strategy.Regex as Regex
import qualified CGrep.Strategy.Cpp.Tokenizer as CppTokenizer
import qualified CGrep.Strategy.Cpp.Semantic as CppSemantic
import qualified CGrep.Strategy.Generic.Semantic as Semantic
import Control.Monad.Trans.Reader
import CGrep.Lang
import CGrep.Common
import CGrep.Output
import Data.List
import Data.Maybe
import Options
hasLanguage :: FilePath -> Options -> [Lang] -> Bool
hasLanguage path opt xs = isJust $ getFileLang opt path >>= (`elemIndex` xs)
sanitizeOptions :: FilePath -> Options -> Options
sanitizeOptions path opt =
if hasLanguage path opt [C, Cpp]
then opt
else opt { identifier = False
, keyword = False
, directive = False
, header = False
, string = False
, char = False
, oper = False
}
hasTokenizerOpt :: Options -> Bool
hasTokenizerOpt Options
{ identifier = i
, keyword = k
, directive = d
, header = h
, number = n
, string = s
, char = c
, oper = o
} = i || k || d || h || n || s || c || o
cgrepDispatch :: FilePath -> [Text8] -> ReaderT Options IO [Output]
cgrepDispatch filename patterns = do
opt <- ask
case () of
_ | not (regex opt) && not (hasTokenizerOpt opt) && not (semantic opt) && edit_dist opt -> Levenshtein.search filename patterns
| not (regex opt) && not (hasTokenizerOpt opt) && not (semantic opt) -> BoyerMoore.search filename patterns
| not (regex opt) && semantic opt && hasLanguage filename opt [C,Cpp] -> CppSemantic.search filename patterns
| not (regex opt) && semantic opt -> Semantic.search filename patterns
| not (regex opt) -> CppTokenizer.search filename patterns
| regex opt -> Regex.search filename patterns
| otherwise -> undefined
| beni55/cgrep | src/CGrep/CGrep.hs | gpl-2.0 | 3,243 | 0 | 18 | 1,067 | 656 | 366 | 290 | 49 | 2 |
--
-- Copyright (c) 2014 Citrix Systems, Inc.
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
--
module Main where
import Prelude hiding (catch)
import Control.Monad
import Control.Exception
import Control.Applicative
import System
import System.IO
import Directory
import ShellTools
import InformTools
import UpgradeEngine
import Migrations
import Data.List
import qualified Data.Map as M
import qualified Data.Text as T
-- MODIFY THIS WHEN FORMAT CHANGES
latestVersion :: Int
latestVersion = 41
----------------------------------
dbdRunning :: IO Bool
dbdRunning = do
pid <- spawnShell' "pidof dbd"
return $ pid /= Nothing
backupConfig :: IO ()
backupConfig = do
current <- getCurrentVersion
inform $ "Backing up config version " ++ show current
safeSpawnShell $ "mkdir -p " ++ dstPath current
safeSpawnShell $ "rsync -avz --exclude 'backups' /config/ " ++ dstPath current
return ()
where
dstPath v = "/config/backups/" ++ show v
performMigration :: Migration -> IO ()
performMigration m = do
inform $ "Migrating " ++ show (sourceVersion m) ++ " -> " ++ show (targetVersion m)
runMigrate m
upgradeTo :: Int -> IO ()
upgradeTo target = do
current <- getCurrentVersion
when (target /= current) $
do let m = getMigrationFromVer current
performMigration m
upgradeTo target
rollback :: Int -> IO ()
rollback version = do
inform $ "Rolling back changes to version " ++ show version
safeSpawnShell $ "rsync -avz --delete --exclude 'backups' /config/backups/" ++ show version ++ "/ /config"
inform $ "Rollback done. Database was NOT upgraded."
return ()
--
-- build comparison stuffs
--
data BuildInfo = BuildInfo {
biBuildNum :: String
, biBuildDate :: String
, biBuildBranch :: String
, biVersion :: String
, biRelease :: String
}
readBuildInfo :: String -> IO BuildInfo
readBuildInfo filename =
parse <$> readFile filename
where
parse = fromMap . toMap . lines
toMap = foldl' insert M.empty
insert m line = case map strip . split '=' $ line of
[k,v] -> M.insert k v m
_ -> m
fromMap m = BuildInfo {
biBuildNum = maybe "" id (M.lookup "build" m)
, biBuildDate = maybe "" id (M.lookup "build_date" m)
, biBuildBranch = maybe "" id (M.lookup "build_branch" m)
, biVersion = maybe "" id (M.lookup "version" m)
, biRelease = maybe "" id (M.lookup "release" m)
}
wipeShaSums :: IO ()
wipeShaSums =
do inform "clearing hash sums from all measured disks"
xformVmJSON wipe
where
wipe vm_tree = jsMapChildren wipe_disk "/config/disk" vm_tree
wipe_disk disk_tree = jsRm "/sha1sum" disk_tree
wipeSuspendImages :: IO ()
wipeSuspendImages =
do e <- doesFileExist uivmp
when e $ do
inform $ "removing UIVM suspend image: " ++ show uivmp
removeFile uivmp
where
uivmp = "/storage/uivm/uivm-suspend-image"
-- Split a list over an element
split :: (Eq a) => a -> [a] -> [[a]]
split sep xs =
let (y,ys) = span (/= sep) xs in
case ys of
[] -> [y]
zs -> y : split sep (tail zs)
strip = T.unpack . T.strip . T.pack
buildUpgrade :: IO ()
buildUpgrade = do
have_etc_conf <- doesFileExist etc_path
have_conf_conf <- doesFileExist config_path
rehash <- doesFileExist rehash_path
when ( not have_etc_conf ) $ error "fatal error, /etc/xenclient.conf does not exist!"
if not have_conf_conf
then do_it
else do bi <- readBuildInfo etc_path
bi' <- readBuildInfo config_path
if biBuildNum bi /= biBuildNum bi'
then do_it
else inform $ "build number has not changed"
when rehash $ rehash_vms >> removeFile rehash_path
where
etc_path = "/etc/xenclient.conf"
config_path = "/config/xenclient.conf"
rehash_path = "/config/rehash-vms"
do_it = do
inform $ "build number has changed - performing appropriate actions"
wipeSuspendImages
safeSpawnShell $ "cp " ++ etc_path ++ " " ++ config_path
inform $ "appropriate performed"
rehash_vms = do
inform $ "rehash flag present"
wipeShaSums
main = do
dbd <- dbdRunning
when dbd $ inform "Cannot upgrade while database daemon 'dbd' is running" >> exitFailure
current <- getCurrentVersion
if ( current < latestVersion )
then do backupConfig
upgradeTo latestVersion
`catch` (errorRollback current)
else inform $ "Database is already in latest version, " ++ show latestVersion
-- some upgrade stuff which does not relate to db version
buildUpgrade
where
errorRollback :: Int -> SomeException -> IO ()
errorRollback version err = do
inform $ "Error: " ++ show err
rollback version
| OpenXT/manager | upgrade-db/Upgrade.hs | gpl-2.0 | 5,604 | 0 | 13 | 1,463 | 1,300 | 652 | 648 | 125 | 3 |
import Data.List
on :: (b -> b -> c) -> (a -> b) -> a -> a -> c
f `on` g = \x y -> f (g x) (g y)
main = do
print $ intersperse '.' "MONKEY"
print $ intersperse 0 [1,2,3,4,5]
print $ intercalate " " ["hey","there","guys"]
print $ intercalate [0,0,0] [[1,2,3],[4,5,6],[7,8,9]]
print $ transpose [[1,2,3],[4,5,6],[7,8,9]]
print $ transpose ["hey","there","guys"]
print $ map sum $ transpose [[0,3,5,9],[10,0,0,9],[8,5,1,-1]]
print $ concat ["foo","bar","car"]
print $ concat [[3,4,5],[2,3,4],[2,1,1]]
print $ concatMap (replicate 4) [1..3]
print $ and $ map (>4) [5,6,7,8]
print $ and $ map (==4) [4,4,4,3,4]
print $ or $ map (==4) [2,3,4,5,6,1]
print $ or $ map (>4) [1,2,3]
print $ any (==4) [2,3,5,6,1,4]
print $ all (>4) [6,9,10]
print $ all (`elem` ['A'..'Z']) "HEYGUYSwhatsup"
print $ any (`elem` ['A'..'Z']) "HEYGUYSwhatsup"
print $ take 10 $ iterate (*2) 1
print $ take 3 $ iterate (++ "haha") "haha"
print $ splitAt 3 "heyman"
print $ splitAt 100 "heyman"
print $ splitAt (-3) "heyman"
print $ takeWhile (>3) [6,5,4,3,2,1,2,3,4,5,4,3,2,1]
print $ takeWhile (/=' ') "This is a sentence"
print $ sum $ takeWhile (<10000) $ map (^3) [1..]
print $ dropWhile (/=' ') "This is a sentence"
print $ dropWhile (<3) [1,2,2,2,3,4,5,4,3,2,1]
let stock = [(994.4,2008,9,1),(995.2,2008,9,2),(999.2,2008,9,3),(1001.4,2008,9,4),(998.3,2008,9,5)]
print $ head (dropWhile (\(val,y,m,d) -> val < 1000) stock)
print $ break (==4) [1,2,3,4,5,6,7]
print $ span (/=4) [1,2,3,4,5,6,7]
print $ sort [8,5,3,2,1,6,4,2]
print $ sort "This will be sorted soon"
print $ group [1,1,1,1,2,2,2,2,3,3,2,2,2,5,6,7]
print $ map (\l@(x:xs) -> (x,length l)) . group . sort $ [1,1,1,1,2,2,2,2,3,3,2,2,2,5,6,7]
print $ inits "w00t"
print $ tails "w00t"
print $ "cat" `isInfixOf` "im a cat burglar"
print $ "Cat" `isInfixOf` "im a cat burglar"
print $ "cats" `isInfixOf` "im a cat burglar"
print $ "hey" `isPrefixOf` "hey there!"
print $ "hey" `isPrefixOf` "oh hey there!"
print $ "there!" `isSuffixOf` "oh hey there!"
print $ "there!" `isSuffixOf` "oh hey there"
print $ partition (`elem` ['A'..'Z']) "BOBsidneyMORGANeddy"
print $ partition (>3) [1,3,5,6,3,2,1,0,3,7]
print $ span (`elem` ['A'..'Z']) "BOBsidneyMORGANeddy"
print $ find (>4) [1,2,3,4,5,6]
print $ find (>9) [1,2,3,4,5,6]
print $ 4 `elemIndex` [1,2,3,4,5,6]
print $ 10 `elemIndex` [1,2,3,4,5,6]
print $ ' ' `elemIndices` "Where are the spaces?"
print $ findIndex (==4) [5,3,2,1,6,4]
print $ findIndex (==7) [5,3,2,1,6,4]
print $ findIndices (`elem` ['A'..'Z']) "Where Are The Caps?"
print $ zipWith3 (\x y z -> x + y + z) [1,2,3] [4,5,2,2] [2,2,3]
print $ zip4 [2,3,3] [2,2,2] [5,5,3] [2,2,2]
print $ lines "first line\nsecond line\nthird line"
print $ unlines ["first line","second line","third line"]
print $ words "hey these are the words in this sentence"
print $ words "hey these are the words in this\nsentence"
print $ unwords ["hey","there","mate"]
print $ nub [1,2,3,4,3,2,1,2,3,4,3,2,1]
print $ nub "Lots of words and stuff"
print $ delete 'h' "hey there ghang!"
print $ delete 'h' .delete 'h' $ "hey there ghang!"
print $ delete 'h' .delete 'h' .delete 'h' $ "hey there ghang!"
print $ [1..10] \\ [2,5,9]
print $ "Im a big baby" \\ "big"
print $ "hey man" `union` "man what's up"
print $ [1..7] `union` [5..10]
print $ [1..7] `intersect` [5..10]
print $ insert 4 [1,2,3,5,6,7]
print $ insert 'g' $ ['a'..'f'] ++ ['h'..'z']
print $ insert 3 [1,2,4,3,2,1]
let values = [-4.3,-2.4,-1.2,0.4,2.3,5.9,10.5,29.1,5.3,-2.4,-14.5,2.9,2.3]
print $ groupBy (\x y -> (x > 0) == (y > 0)) values
print $ groupBy ((==) `on` (> 0)) values
let xs = [[5,4,5,4,4],[1,2,3],[3,5,4,3],[],[2],[2,2]]
print $ sortBy (compare `on` length) xs
| solvery/lang-features | haskell/list_3.hs | gpl-2.0 | 4,198 | 3 | 16 | 1,109 | 2,468 | 1,394 | 1,074 | 85 | 1 |
module Hero.UI where
import Control.Monad(when)
import Data.Maybe(fromJust)
import Data.Array.IArray(assocs)
import UI.NCurses
import Hero.Map
-- | Update the NCurses UI by displaying the whole map.
displayMap :: Map -> Update ()
displayMap m = do moveCursor 0 0
go 0 . assocs $ m
where
go :: Integer -> [(Point, Tile)] -> Update ()
go _ [] = return ()
go y ((pt,t):xs) = do when (y /= y') $
moveCursor y' 0
drawString . show $ t
go y' xs
where
y' = fst pt
nextChar :: Window -> Curses (Char)
nextChar w = do ev <- getEvent w Nothing
case (fromJust ev) of EventCharacter c -> return c
_ -> nextChar w
| brennie/hero-old | Hero/UI.hs | gpl-3.0 | 818 | 0 | 12 | 330 | 281 | 142 | 139 | 20 | 2 |
-- Sortable Test
-- Copyright (C) 2015 Jonathan Lamothe
-- <jonathan@jlamothe.net>
-- This program is free software: you can redistribute it and/or
-- modify it under the terms of the GNU General Public License as
-- published by the Free Software Foundation, either version 3 of the
-- License, or (at your option) any later version.
-- This program is distributed in the hope that it will be useful, but
-- WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-- You should have received a copy of the GNU General Public License
-- along with this program. If not, see
-- <http://www.gnu.org/licenses/>.
module Daily.Types where
import Data.Map (Map)
import Data.Time.Calendar (Day)
data ProcessedData =
ProcessedData { processedFields :: [String]
, processedRecords :: Map Day ProcessedRecord
} deriving (Eq, Show)
type ProcessedRecord = [Stats]
data Stats =
Stats { statSum :: Double
, statMax :: Double
, statMin :: Double
, statAvg :: Double
, statStdDev :: Double
} deriving (Eq, Show)
-- jl
| jlamothe/sortable-test | Daily/Types.hs | gpl-3.0 | 1,217 | 0 | 9 | 282 | 141 | 92 | 49 | 15 | 0 |
module SignaturesAnalyzerSpec(spec) where
import Language.Mulang.Analyzer hiding (result, spec)
import Test.Hspec
result signatures
= emptyCompletedAnalysisResult { signatures = signatures }
run language content style = analyse (signaturesAnalysis (CodeSample language content) style)
spec = describe "SignturesAnalyzer" $ do
it "handles MulangStyle" $ do
(run Haskell "f x = x + 1" MulangStyle ) `shouldReturn` (result ["-- f(x)"])
(run Haskell "f :: Int -> String" MulangStyle ) `shouldReturn` (result ["-- f(Int): String"])
it "handles HaskellStyle" $ do
(run Haskell "f x = x + 1" HaskellStyle ) `shouldReturn` (result ["-- f x"])
it "handles PrologStyle" $ do
(run Prolog "f(X):-g(X)." PrologStyle ) `shouldReturn` (result ["%% f/1"])
it "handles UntypedCStyle" $ do
(run Haskell "f x = x + 1" UntypedCStyle) `shouldReturn` (result ["// f(x)"])
| mumuki/mulang | spec/SignaturesAnalyzerSpec.hs | gpl-3.0 | 908 | 0 | 14 | 179 | 268 | 140 | 128 | 16 | 1 |
{-# LANGUAGE Rank2Types, NoMonomorphismRestriction, ScopedTypeVariables #-}
module Database.Design.Ampersand.Test.Parser.ParserTest (
parseReparse, parseScripts, showErrors
) where
import Prelude hiding (readFile)
import Database.Design.Ampersand.ADL1.PrettyPrinters(prettyPrint)
import Database.Design.Ampersand.Core.ParseTree
import Database.Design.Ampersand.Input.ADL1.CtxError (Guarded(..),whenChecked,CtxError)
import Database.Design.Ampersand.Input.ADL1.Parser
import Database.Design.Ampersand.Input.Parsing
import Database.Design.Ampersand.Misc.Options(Options)
import System.IO (hPutStrLn, stderr)
-- Tries to parse all the given files
parseScripts :: Options -> [FilePath] -> IO Bool
parseScripts _ [] = return True
parseScripts opts (f:fs) =
do parsed <- parseADL opts f
case parsed of
Checked _ -> do { putStrLn ("Parsed: " ++ f); parseScripts opts fs }
Errors e -> do { putStrLn ("Cannot parse: " ++ f); showErrors e; return False }
printErrLn :: Show a => a -> IO ()
printErrLn a = hPutStrLn stderr (show a)
showErrors :: [CtxError] -> IO ()
showErrors [] = return ()
showErrors (e:es) = do { printErrLn e; showErrors es }
parse :: FilePath -> String -> Guarded P_Context
parse file txt = whenChecked (runParser pContext file txt) (Checked . fst)
parseReparse :: FilePath -> String -> Guarded P_Context
parseReparse file txt = whenChecked (parse file txt) reparse
where reparse p = parse (file ++ "**pretty") (prettyPrint p)
| DanielSchiavini/ampersand | src/Database/Design/Ampersand/Test/Parser/ParserTest.hs | gpl-3.0 | 1,504 | 0 | 14 | 250 | 478 | 260 | 218 | 28 | 2 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
module BlastItWithPiss.MonadChoice
(module Control.Monad.Random
,MonadChoice
,chooseFromList
,chooseFromListMaybe
,generateRandomString
,generateSymbolString
) where
import Import
import Control.Monad.Trans.Class
import Control.Monad.Trans.Resource
import Control.Monad.Trans.State.Strict
import Control.Monad.Random
import System.Random.Shuffle
type MonadChoice a = (MonadRandom a, MonadIO a, MonadBaseControl IO a, Applicative a)
{-# INLINABLE chooseFromList #-}
chooseFromList :: MonadChoice m => [a] -> m a
chooseFromList [] = error "chooseFromList supplied with empty list."
chooseFromList [x] = return x
chooseFromList l = (l!!) <$> getRandomR (0, length l - 1)
{-# INLINABLE chooseFromListMaybe #-}
chooseFromListMaybe :: MonadChoice m => [a] -> m (Maybe a)
chooseFromListMaybe [] = return Nothing
chooseFromListMaybe [x] = return (Just x)
chooseFromListMaybe l = Just . (l!!) <$> getRandomR (0, length l - 1)
{-# INLINABLE generateRandomString #-}
generateRandomString :: MonadChoice m => (Int, Int) -> (Char, Char) -> m String
generateRandomString lengthBounds charBounds = do
len <- getRandomR lengthBounds
take len <$> getRandomRs charBounds
{-# INLINABLE generateSymbolString #-}
generateSymbolString :: MonadChoice m => Int -> m String
generateSymbolString maxlength = do
let plength = maxlength `div` 6
num <- generateRandomString (0, plength) ('0', '9')
beng <- generateRandomString (0, plength) ('A', 'Z')
seng <- generateRandomString (0, plength) ('a', 'z')
brus <- generateRandomString (0, plength) ('А', 'Я')
srus <- generateRandomString (0, plength) ('а', 'я')
spc <- generateRandomString (0, plength) (' ', ' ')
shuffleM (num++beng++seng++brus++srus++spc)
instance MonadRandom m => MonadRandom (ResourceT m) where
{-# INLINE getRandom #-}
getRandom = lift getRandom
{-# INLINE getRandoms #-}
getRandoms = lift getRandoms
{-# INLINE getRandomR #-}
getRandomR = lift . getRandomR
{-# INLINE getRandomRs #-}
getRandomRs = lift . getRandomRs
| exbb2/BlastItWithPiss | src/BlastItWithPiss/MonadChoice.hs | gpl-3.0 | 2,120 | 0 | 13 | 377 | 625 | 337 | 288 | 46 | 1 |
module Main where
import System.Environment(getArgs)
import Control.Monad
import Data.List
import qualified Data.IntMap as IM
digitRemainders :: IM.IntMap [Int]
digitRemainders = IM.fromList [
(0,[0]),
(1,[1]),
(2,[2,4,8,6]),
(3,[3,9,7,1]),
(4,[4,6]),
(5,[5]),
(6,[6]),
(7,[7,9,3,1]),
(8,[8,4,2,6]),
(9,[9,1])]
digitStatistics :: Int -> Integer -> [(Int,Integer)]
digitStatistics a n =
let remainders = digitRemainders IM.! (a `mod` 10)
(d,m) = n `divMod` genericLength remainders
countMap = IM.fromList $ zipWith (\x c -> (x,d+c))
remainders $ genericReplicate m 1 ++ repeat 0
in map (\x -> (x,IM.findWithDefault 0 x countMap)) [0..9]
processLine :: String -> String
processLine line =
let [as,ns] = words line
stats = digitStatistics (read as) (read ns)
in intercalate ", " $ map (\(d,c) -> show d ++ ": " ++ show c) stats
main :: IO ()
main = liftM head getArgs >>= liftM lines . readFile >>= mapM_ (putStrLn . processLine)
| cryptica/CodeEval | Challenges/144_DigitStatistics/main.hs | gpl-3.0 | 1,151 | 0 | 16 | 363 | 526 | 302 | 224 | 31 | 1 |
import System.Exit
import Test.HUnit
import StackMachine.Emulator (Opcode(..),memSize)
import StackMachine.SMAssembler
main = do
Counts _ _ errors failures <- runTestTT tests
if errors > 0 || failures > 0 then exitWith $ ExitFailure 1 else exitWith ExitSuccess
testAdr = TestCase $ assertEqual "test Adr assembly"
([fromEnum Adr,-2],[0])
(assemble "0 Adr -2 ; some comment")
testLit = TestCase $ assertEqual "test Lit assembly"
([fromEnum Lit,0],[0])
(assemble "0 Lit 0 ; some comment")
testPrsA = TestCase $ assertEqual "test Prs assembly - A"
([fromEnum Prs,memSize-2], [0,fromEnum 'c',fromEnum 'b',fromEnum 'a',0])
(assemble "0 Prs 'abc'")
testPrsB = TestCase $ assertEqual "test Prs assembly - B"
([fromEnum Prs,memSize-2], [0,fromEnum 'c',fromEnum 'b',fromEnum 'a',fromEnum ' ',0])
(assemble "0 Prs ' abc'")
testPrsC = TestCase $ assertEqual "test Prs assembly - C"
([fromEnum Prs,memSize-2], [0,fromEnum ' ', fromEnum 'c',fromEnum 'b',fromEnum 'a',0])
(assemble "0 Prs 'abc '")
testPrsD = TestCase $ assertEqual "test Prs assembly - D"
([fromEnum Prs,memSize-2], [0,fromEnum 'c',fromEnum 'b',fromEnum ' ',fromEnum 'a',0])
(assemble "0 Prs 'a bc'")
testPrsE = TestCase $ assertEqual "test Prs assembly - E"
([fromEnum Prs,memSize-2], [0,fromEnum ' ',fromEnum 'c',fromEnum 'b',fromEnum 'a',fromEnum ' ',0])
(assemble "0 Prs ' abc '")
testPrsF = TestCase $ assertEqual "test Prs assembly - F"
([fromEnum Prs,memSize-2], [0,fromEnum 'c',fromEnum 'b',fromEnum ' ', fromEnum ' ',fromEnum 'a',0])
(assemble "0 Prs 'a bc'")
testPrs = TestList [ testPrsA
, testPrsB
, testPrsC
, testPrsD
, testPrsE
, testPrsF
]
tests = TestList [ TestLabel "testAdrAssembly" testAdr
, TestLabel "testLitAssembly" testLit
, TestLabel "testPrsAssembly" testPrs
]
| zebbo/stack-machine | test/SMAssemblerTests.hs | gpl-3.0 | 2,464 | 0 | 10 | 966 | 662 | 355 | 307 | 40 | 2 |
module MyStandardFunctions where
myOr :: [Bool] -> Bool
myOr [] = False
myOr (x:xs) =
if x == True
then True
else myOr xs
myAny :: (a -> Bool) -> [a] -> Bool
myAny _ [] = False
myAny f (x:xs) =
if f x == True
then True
else myAny f xs
myElem :: Eq a => a -> [a] -> Bool
myElem _ [] = False
myElem x (l:ls) =
if x == l
then True
else myElem x ls
myReverse :: [a] -> [a]
myReverse [] = []
myReverse xs = last xs : myReverse (init xs)
squish :: [[a]] -> [a]
squish [] = []
squish ( x:xs ) = x ++ squish xs
squishMap :: (a -> [b]) -> [a] -> [b]
squishMap _ [] = []
squishMap f (x:xs) = f x ++ squishMap f xs
squishAgain :: [[a]] -> [a]
squishAgain = undefined
| dkensinger/haskell | haskellbook/mystandardfunctions.hs | gpl-3.0 | 682 | 0 | 8 | 176 | 395 | 211 | 184 | 30 | 2 |
class Functor m => Monad m where
join :: m (m a) -> m a
return :: a -> m a | hmemcpy/milewski-ctfp-pdf | src/content/3.4/code/haskell/snippet14.hs | gpl-3.0 | 82 | 0 | 8 | 28 | 50 | 23 | 27 | 3 | 0 |
module Qual5(module Qual1) where
import qualified Qual1 as Q
f :: Bool
f = True
(/**) :: a -> Int -> Int
_ /** z = 2 * g * z -- 12 * z
where
g = 3 * k -- 6
where
k = 2
infixr 5 /**
data Listt3 a = a :>>< (Qual5.Listt3 a)
| Emptyy | Helium4Haskell/helium | test/exports/Qual5.hs | gpl-3.0 | 269 | 0 | 9 | 99 | 108 | 64 | 44 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.EC2.DescribeVpcEndpoints
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Describes one or more of your VPC endpoints.
--
-- <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-DescribeVpcEndpoints.html>
module Network.AWS.EC2.DescribeVpcEndpoints
(
-- * Request
DescribeVpcEndpoints
-- ** Request constructor
, describeVpcEndpoints
-- ** Request lenses
, dve1DryRun
, dve1Filters
, dve1MaxResults
, dve1NextToken
, dve1VpcEndpointIds
-- * Response
, DescribeVpcEndpointsResponse
-- ** Response constructor
, describeVpcEndpointsResponse
-- ** Response lenses
, dverNextToken
, dverVpcEndpoints
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.EC2.Types
import qualified GHC.Exts
data DescribeVpcEndpoints = DescribeVpcEndpoints
{ _dve1DryRun :: Maybe Bool
, _dve1Filters :: List "Filter" Filter
, _dve1MaxResults :: Maybe Int
, _dve1NextToken :: Maybe Text
, _dve1VpcEndpointIds :: List "item" Text
} deriving (Eq, Read, Show)
-- | 'DescribeVpcEndpoints' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dve1DryRun' @::@ 'Maybe' 'Bool'
--
-- * 'dve1Filters' @::@ ['Filter']
--
-- * 'dve1MaxResults' @::@ 'Maybe' 'Int'
--
-- * 'dve1NextToken' @::@ 'Maybe' 'Text'
--
-- * 'dve1VpcEndpointIds' @::@ ['Text']
--
describeVpcEndpoints :: DescribeVpcEndpoints
describeVpcEndpoints = DescribeVpcEndpoints
{ _dve1DryRun = Nothing
, _dve1VpcEndpointIds = mempty
, _dve1Filters = mempty
, _dve1MaxResults = Nothing
, _dve1NextToken = Nothing
}
-- | Checks whether you have the required permissions for the action, without
-- actually making the request, and provides an error response. If you have the
-- required permissions, the error response is 'DryRunOperation'. Otherwise, it is 'UnauthorizedOperation'.
dve1DryRun :: Lens' DescribeVpcEndpoints (Maybe Bool)
dve1DryRun = lens _dve1DryRun (\s a -> s { _dve1DryRun = a })
-- | One or more filters.
--
-- 'service-name': The name of the AWS service.
--
-- 'vpc-id': The ID of the VPC in which the endpoint resides.
--
-- 'vpc-endpoint-id': The ID of the endpoint.
--
-- 'vpc-endpoint-state': The state of the endpoint. ('pending' | 'available' | 'deleting' | 'deleted')
--
--
dve1Filters :: Lens' DescribeVpcEndpoints [Filter]
dve1Filters = lens _dve1Filters (\s a -> s { _dve1Filters = a }) . _List
-- | The maximum number of items to return for this request. The request returns a
-- token that you can specify in a subsequent call to get the next set of
-- results.
--
-- Constraint: If the value is greater than 1000, we return only 1000 items.
dve1MaxResults :: Lens' DescribeVpcEndpoints (Maybe Int)
dve1MaxResults = lens _dve1MaxResults (\s a -> s { _dve1MaxResults = a })
-- | The token for the next set of items to return. (You received this token from
-- a prior call.)
dve1NextToken :: Lens' DescribeVpcEndpoints (Maybe Text)
dve1NextToken = lens _dve1NextToken (\s a -> s { _dve1NextToken = a })
-- | One or more endpoint IDs.
dve1VpcEndpointIds :: Lens' DescribeVpcEndpoints [Text]
dve1VpcEndpointIds =
lens _dve1VpcEndpointIds (\s a -> s { _dve1VpcEndpointIds = a })
. _List
data DescribeVpcEndpointsResponse = DescribeVpcEndpointsResponse
{ _dverNextToken :: Maybe Text
, _dverVpcEndpoints :: List "item" VpcEndpoint
} deriving (Eq, Read, Show)
-- | 'DescribeVpcEndpointsResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dverNextToken' @::@ 'Maybe' 'Text'
--
-- * 'dverVpcEndpoints' @::@ ['VpcEndpoint']
--
describeVpcEndpointsResponse :: DescribeVpcEndpointsResponse
describeVpcEndpointsResponse = DescribeVpcEndpointsResponse
{ _dverVpcEndpoints = mempty
, _dverNextToken = Nothing
}
-- | The token to use when requesting the next set of items. If there are no
-- additional items to return, the string is empty.
dverNextToken :: Lens' DescribeVpcEndpointsResponse (Maybe Text)
dverNextToken = lens _dverNextToken (\s a -> s { _dverNextToken = a })
-- | Information about the endpoints.
dverVpcEndpoints :: Lens' DescribeVpcEndpointsResponse [VpcEndpoint]
dverVpcEndpoints = lens _dverVpcEndpoints (\s a -> s { _dverVpcEndpoints = a }) . _List
instance ToPath DescribeVpcEndpoints where
toPath = const "/"
instance ToQuery DescribeVpcEndpoints where
toQuery DescribeVpcEndpoints{..} = mconcat
[ "DryRun" =? _dve1DryRun
, "Filter" `toQueryList` _dve1Filters
, "MaxResults" =? _dve1MaxResults
, "NextToken" =? _dve1NextToken
, "VpcEndpointId" `toQueryList` _dve1VpcEndpointIds
]
instance ToHeaders DescribeVpcEndpoints
instance AWSRequest DescribeVpcEndpoints where
type Sv DescribeVpcEndpoints = EC2
type Rs DescribeVpcEndpoints = DescribeVpcEndpointsResponse
request = post "DescribeVpcEndpoints"
response = xmlResponse
instance FromXML DescribeVpcEndpointsResponse where
parseXML x = DescribeVpcEndpointsResponse
<$> x .@? "nextToken"
<*> x .@? "vpcEndpointSet" .!@ mempty
| romanb/amazonka | amazonka-ec2/gen/Network/AWS/EC2/DescribeVpcEndpoints.hs | mpl-2.0 | 6,190 | 0 | 10 | 1,293 | 802 | 487 | 315 | 84 | 1 |
{--License: license.txt --}
module CCAR.Parser.CCARParsec
(readExpr, readExprTree, Stress)
where
import Import
import Data.Text as Text
import CCAR.Model.CcarDataTypes
import CCAR.Model.Maturity
import Control.Monad
syntaxError i = CCARError $ Text.append "Invalid symbol " i
symbol :: Parser Char
symbol = oneOf "!#$%&|*+-/:<=>?@^_-"
readExprTree :: Text -> [Stress]
readExprTree input = case parse parseStatements (Text.unpack $ msg $ syntaxError input)
(Text.unpack input) of
Left err -> []
Right val -> val
readExpr :: Text -> Value
readExpr input = case parse (parseStatements) (Text.unpack $ msg $ syntaxError input) (Text.unpack input) of
Left err -> toJSON $ syntaxError input
Right val -> toJSON val
{--
Basis points are usually in integer.
Percentages can have floating points therefore lets use rational numbers
--}
spaces :: Parser ()
spaces = skipMany1 space
parseNeg :: Parser Sign
parseNeg = do
char '-'
return Negative
parsePos :: Parser Sign
parsePos = do
char '+'
return Positive
parseSign :: Parser Sign
parseSign = do
try parseNeg
<|> try parsePos
<|> return Positive
<?> "Error parsing sign"
parseBasisPoints :: Parser StressValue
parseBasisPoints = do
string "bps"
spaces
sign <- parseSign
many space
pNum <- many1 alphaNum
return $ BasisPoints sign $ read (pNum)
parsePercentage :: Parser StressValue
parsePercentage = do
string "pct"
spaces
sign <- parseSign
many space
pNum <- many1 alphaNum
spaces
many space
string "%"
spaces
pDenom <- liftM read $ many1 digit
if (pDenom == (0 :: Integer))
then return $ StressValueError "Divide by zero"
else return $ Percentage sign $ read (pNum ++ "%" ++ (show pDenom))
parseStressValue :: Parser StressValue
parseStressValue = try parsePercentage
<|> try parseBasisPoints
<?> "Error parsing stress value"
parseCurrencyStress :: Parser Stress
parseCurrencyStress = do
string "Create"
spaces
string "Currency"
spaces
string "Shock"
spaces
string "for"
spaces
string "major"
spaces
curr1 <- many1 alphaNum
spaces
string "minor"
spaces
curr2 <-many1 alphaNum
spaces
stressValue <- parseStressValue
return $ CurrencyStress (CurrencyPair (Currency curr1) (Currency curr2)) stressValue
parseEquityStress :: Parser Stress
parseEquityStress = do
string "Create"
spaces
string "Equity"
spaces
string "Shock"
spaces
string "for"
spaces
equitySymbol <- many1 alphaNum
spaces
stressValue <- parseStressValue
return $ EquityStress (Equity equitySymbol) stressValue
parseOptionStress :: Parser Stress
parseOptionStress = do
string "Create"
spaces
string "Option"
spaces
string "Shock"
spaces
string "for"
spaces
optionSymbol <- many1 alphaNum
spaces
string "Exp"
spaces
month <- many1 alphaNum -- Read of month needs to support APR/4 and should be less than 13
spaces
year <- many1 alphaNum
spaces
string "Strike"
spaces
price <- many1 alphaNum
spaces
stressValue <- parseStressValue
return $ OptionStress (CCAROption optionSymbol (Exp (read month) (read year)) (Str $ read price))
stressValue
parseTenorValue :: Parser (Mat, StressValue)
parseTenorValue = do
string "("
tenorValue <- many1 digit
tenorPeriod <- many1 alphaNum
many space
string "->"
many space
stressValue <- parseStressValue
string ")"
return ((createMat tenorValue tenorPeriod), stressValue)
where
createMat tenorValue tenorPeriod =
case tenorPeriod of
"Y" -> checkBounds (MatY (read tenorValue))
"M" -> MatM (read tenorValue)
_ -> InvalidMaturity
parseTenorCurve :: Parser [(Mat, StressValue)]
parseTenorCurve = do
string "["
many space
tenors <- sepBy parseTenorValue (char ',')
many space
string "]"
return tenors
parseMaturity :: Parser Mat
parseMaturity = do
tenorValue <- many1 digit
tenorPeriod <- many1 alphaNum
return $ createMat tenorValue tenorPeriod
where
createMat tenorValue tenorPeriod =
case tenorPeriod of
"Y" -> MatY (read tenorValue)
"M" -> MatM (read tenorValue)
_ -> InvalidMaturity
parseRatesStress :: Parser Stress
parseRatesStress = do
string "Create"
spaces
string "Rates"
spaces
string "Shock"
spaces
string "for"
spaces
currency <- many1 alphaNum
many space
tenors <- parseTenorCurve
return $ TenorStress (Currency currency) tenors
parseRatesVegaStress :: Parser Stress
parseRatesVegaStress = do
string "Create"
spaces
string "Rates"
spaces
string "Vega"
spaces
string "Shock"
spaces
string "for"
spaces
currency <- many1 alphaNum
spaces
string "Expiry"
spaces
string "="
spaces
tenor <- parseMaturity
many space
curve <- parseTenorCurve
return $ TenorVegaStress (Currency currency) tenor curve
parserError :: Parser Stress
parserError = do
return $ StressError $ syntaxError "Unknown error"
parseExpr :: Parser Stress
parseExpr = do
try parseEquityStress <|> try parseCurrencyStress
<|> try parseOptionStress
<|> try parseRatesStress
<|> try parseRatesVegaStress
<|> parserError
parseStatements :: Parser[Stress]
parseStatements = do
x <- endBy parseExpr eol
return x
eol :: Parser String
eol = do
try (string ";\n")
<|> try (string ";") | asm-products/ccar-websockets | CCAR/Parser/CCARParsec.hs | agpl-3.0 | 6,000 | 0 | 14 | 1,804 | 1,667 | 743 | 924 | 209 | 3 |
module Main where
table :: [Integer] -> [(Integer, Integer, Integer)]
table xs = [(x, y, x * y) | x <- xs, y <- xs]
| Olical/langs | haskell/day1/table.hs | unlicense | 124 | 0 | 7 | 33 | 68 | 40 | 28 | 3 | 1 |
{- |
Module : Bio.Motions.Format.Handle
Description : OutputBackend instance for binary format
License : Apache
Stability : experimental
Portability : unportable
-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE TupleSections #-}
module Bio.Motions.Format.Handle
( BinaryBackend
, openBinaryOutput
, openBinaryInput
, withBinaryInput
, seekBinaryKF
) where
import Bio.Motions.Format.ProtoStream
import Bio.Motions.Format.DumpSerialisation
import Bio.Motions.Format.DumpDeserialisation
import qualified Bio.Motions.Format.Proto.Header as ProtoHeader
import Bio.Motions.Representation.Dump
import Bio.Motions.Representation.Class
import qualified Bio.Motions.Callback.Class as CC
import Bio.Motions.Types
import Bio.Motions.Output
import Bio.Motions.Input
import Foreign.C.String
import Foreign.C.Types
import Foreign.Ptr
import Foreign.Storable
import Foreign.ForeignPtr
import Data.ByteString.Unsafe
import qualified Data.ByteString.Lazy as BL
import Text.ProtocolBuffers
import Data.IORef
import Data.Maybe
import Control.Monad
import Control.Exception(bracket)
import Control.Monad.IO.Class
type ProtoHandle = Ptr HStream
data Mode = Reading | Appending
data BinaryBackend = BinaryBackend
{ handle :: ProtoHandle
, framesPerKF :: Int
-- ^Frames per keyframe
, framesSinceLastKF :: IORef Int
-- ^Frames written/read since last keyframe (including that keyframe)
, mode :: Mode
, keyframeIterator :: IORef (Ptr HKeyframeIterator)
, deltaIterator :: IORef (Ptr HDeltaIterator)
, header :: ProtoHeader.Header
}
instance OutputBackend BinaryBackend where
getNextPush state@BinaryBackend{..} = do
cur <- readIORef framesSinceLastKF
pure $ if cur == framesPerKF then
PushDump $ \dump callbacks step _ _ -> appendKeyframe state dump callbacks step
else
PushMove $ \move callbacks step _ -> appendDelta state move callbacks step
closeBackend BinaryBackend{..} = protoClose handle
>> freeRef keyframeIterator protoFreeKeyframeIterator
>> freeRef deltaIterator protoFreeDeltaIterator
where freeRef r f = readIORef r >>= f
pushLastFrame _ _ _ _ _ = pure ()
-- |Create a 'BinaryBackend'
openBinaryOutput ::
Int
-- ^Number of frames per keyframe
-> OutputSettings
-> Dump
-> IO BinaryBackend
openBinaryOutput framesPerKF OutputSettings{..} dump = do
handle <- openOutput
-- Hack. We want the first getNextPush to return PushDump, so
-- we set framesSinceLastKF to what the if statement is expecting.
framesSinceLastKF <- newIORef framesPerKF
let mode = Appending
keyframeIterator <- newIORef nullPtr
deltaIterator <- newIORef nullPtr
pure BinaryBackend{..}
where
openOutput = withCString path $ \cPath ->
unsafeUseAsCStringLen bytes $ \(ptr, len) ->
protoOpenNew cPath (fromIntegral framesPerKF) (castPtr ptr) (fromIntegral len)
header = getHeader simulationName simulationDescription binderTypesNames chainNames dump
bytes = BL.toStrict . messagePut $ header
path = outputPrefix ++ ".bin"
-- |Append a protobuf value to a stream, using a protostream function
genericAppend :: (ReflectDescriptor msg, Wire msg) =>
ProtoHandle
-- ^Handle to a protostream object
-> (Ptr HStream -> Ptr () -> CSize -> IO ())
-- ^libprotostream handler function
-> msg
-- ^Protobuf message to write
-> IO ()
genericAppend stream f msg =
unsafeUseAsCStringLen bytes $ \(ptr, len) ->
f stream (castPtr ptr) (fromIntegral len)
--TODO toStrict is slow
where bytes = BL.toStrict . messagePut $ msg
appendKeyframe :: BinaryBackend -> Dump -> CC.Callbacks -> StepCounter -> IO ()
appendKeyframe BinaryBackend{..} dump callbacks step = do
writeIORef framesSinceLastKF 1
genericAppend handle protoAppendKeyframe $ getKeyframe dump callbacks step
appendDelta :: BinaryBackend -> Move -> CC.Callbacks -> StepCounter -> IO ()
appendDelta BinaryBackend{..} move callbacks step = do
modifyIORef framesSinceLastKF (+1)
genericAppend handle protoAppendDelta $ serialiseMove move callbacks step
openBinaryInput :: InputSettings -> IO BinaryBackend
openBinaryInput InputSettings{..} = do
handle <- case inputFiles of
[file] -> withCString file protoOpenExisting
_ -> fail "Specify exactly one binary input file"
framesPerKF <- fromIntegral <$> protoGetFPKF handle
framesSinceLastKF <- newIORef 1
let mode = Reading
kfIt <- protoIterKeyframes handle
deltaIterator <- protoIterDeltas kfIt >>= newIORef
keyframeIterator <- newIORef kfIt
valid <- (/= 0) <$> protoValidKeyframeIterator handle kfIt
unless valid $ fail "Cannot get iterator for first frame. Is the file empty?"
header <- genericGet (protoGetHeader handle) protoFreeHeader id
pure BinaryBackend{..}
withBinaryInput :: InputSettings -> (BinaryBackend -> [String] -> [String] -> IO a) -> IO a
withBinaryInput s f = bracket open closeBackend (\backend -> f backend (getChainNames' backend) (binderTN backend))
where
open = openBinaryInput s
getChainNames' = fromMaybe (error "error getting chain names") . getChainNames . header
binderTN = getBinderTypesNames . header
-- | Seek `i` frames from the beginning and return the Dump at this position. `i` must be divisible by fpkf.
seekBinaryKF :: BinaryBackend -> Int -> IO Dump
seekBinaryKF BinaryBackend{..} i = do
when (0 /= i `mod` framesPerKF) $ fail "not divisible by fpkf"
kfi <- readIORef keyframeIterator
protoAdvanceKeyframeIterator kfi $ fromIntegral (i `div` framesPerKF)
valid <- (/= 0) <$> protoValidKeyframeIterator handle kfi
unless valid $ fail "out of bounds"
readIORef deltaIterator >>= protoFreeDeltaIterator
protoIterDeltas kfi >>= writeIORef deltaIterator
writeIORef framesSinceLastKF 1
liftIO $ genericGet (protoGetKeyframe kfi) protoFreeKeyframe $
fromMaybe (error "invalid keyframe") . deserialiseDump header
-- |Get a value from a stream using a protostream getter and its associated deallocation function
genericGet :: (Wire msg, ReflectDescriptor msg) =>
(Ptr (Ptr ()) -> Ptr CSize -> IO ())
-- ^protostream getter function
-> (Ptr () -> IO ())
-- ^protostream finalizer (i.e. free)
-> (msg -> b)
-- ^deserialising function
-> IO b
genericGet getter finalizer fun = do
ptrPtr' <- mallocForeignPtr
sizePtr' <- mallocForeignPtr
withForeignPtr ptrPtr' $ \ptrPtr ->
withForeignPtr sizePtr' $ \sizePtr -> do
getter ptrPtr sizePtr
buf <- peek ptrPtr
size <- fromIntegral <$> peek sizePtr
bs <- unsafePackCStringFinalizer (castPtr buf) size $ finalizer buf
let msg = case messageGet $ BL.fromStrict bs of
Right (m, _) -> m
Left e -> error $ "protobuf decoding error: " ++ e
pure $ fun msg
instance (MonadIO m, ReadRepresentation m repr) => MoveProducer m repr BinaryBackend where
getMove BinaryBackend{..} repr score = do
delta <- liftIO (readIORef deltaIterator)
kfi <- liftIO (readIORef keyframeIterator)
valid <- liftIO (protoValidDeltaIterator kfi delta)
ret <- case valid of
0 -> do
liftIO $ protoAdvanceKeyframeIterator kfi 1
liftIO (protoValidKeyframeIterator handle kfi) >>= \case
0 -> pure Nothing
_ -> getDiffed kfi >>= \ret -> liftIO $ do
protoFreeDeltaIterator delta
protoIterDeltas kfi >>= writeIORef deltaIterator
pure $ Just ret
_ -> liftIO $ do
ret <- getDelta delta
protoAdvanceDeltaIterator delta
pure $ Just ret
case ret of
Just move -> MakeMove move <$> CC.updateCallback repr score move
Nothing -> pure Stop
where
getDelta delta = genericGet (protoGetDelta delta) protoFreeDelta $ \msg ->
fromMaybe (error "failed to decode Move") $ deserialiseMove msg
getDiffed kfi = do
dump' <- liftIO $ genericGet (protoGetKeyframe kfi) protoFreeKeyframe $
fromMaybe (error "failed to load keyframe") . deserialiseDump header
dump <- makeDump repr
case diffDumps dump dump' of
Right m -> pure m
Left e -> error $ "adjacent keyframes don't match: " ++ e
{-# INLINABLE getMove #-}
| Motions/motions | src/Bio/Motions/Format/Handle.hs | apache-2.0 | 8,896 | 0 | 25 | 2,236 | 2,161 | 1,079 | 1,082 | 172 | 2 |
-- | Generates the API for a configured kernel.
module RISK.API
( generateAPI
) where
import Text.Printf
import RISK.Compile (indent, block)
import RISK.Config
import RISK.Spec
-- | Generates the API files for a configured RISK kernel.
generateAPI :: Spec -> IO ()
generateAPI spec = sequence_
[ do writeFile ("risk_api_" ++ name ++ ".h") $ headerFile name memory
writeFile ("risk_api_" ++ name ++ ".c") $ cFile name memory
| (name, memory) <- partitionMemory $ configure spec
]
headerFile :: Name -> PartitionMemory -> String
headerFile name memory = unlines
[ "// The RISK API for the \"" ++ name ++ "\" partition."
, ""
, "#ifdef __cplusplus"
, "extern \"C\" {"
, "#endif"
, ""
, "#include \"risk_lib.h\""
, ""
, "// The main entry point for the partition."
, printf "void %s_main(void);" name
, ""
, "// Yield control back to kernel."
, "void risk_yield(void);"
, ""
, "// Receive message on incoming channels. If no messages are available on a channel, size will be zero."
, unlines [ printf "void %s_from_%s_recv_msg(word * size, word * msg);" name sender | (_, sender) <- recvBuffers memory ]
, "// Initialize outgoing channels for transmission. Must be called after every yield."
, unlines [ printf "void %s_to_%s_send_init(void);" name receiver | (_, receiver) <- sendBuffers memory ]
, "// Transmit message on outgoing channels."
, unlines [ printf "void %s_to_%s_send_msg(word size, word * msg);" name receiver | (_, receiver) <- sendBuffers memory ]
, ""
, "#ifdef __cplusplus"
, "}"
, "#endif"
, ""
]
cFile :: Name -> PartitionMemory -> String
cFile name memory = unlines
[ "// The RISK API for the \"" ++ name ++ "\" partition."
, printf ""
, printf "#include \"risk_api_%s.h\"" name
, printf ""
, concatMap recvMessage $ recvBuffers memory
, concatMap sendMessage $ sendBuffers memory
, printf ""
]
where
recvMessage :: (Int, Name) -> String
recvMessage (size, sender) = unlines
[ printf "// Receive buffer from %s." sender
, printf "extern word const * const %s_recv_buffer; // 0x%x words" prefix (2 ^ size :: Int)
, printf ""
, printf "// Head and tail indecies of receive buffer from %s. Head is managed by the partition. Tail is managed by the kernel." sender
, printf "extern word * const %s_head_index;" prefix
, printf "extern word const * const %s_tail_index;" prefix
, printf ""
, printf "// Receives a message from the %s partition. If no messages are available, size will be zero." sender
, printf "void %s_recv_msg(word * size, word * msg)" prefix
, block $ unlines
[ "word i;"
, printf "if (*%s_head_index == *%s_tail_index)" prefix prefix
, indent "*size = 0;"
, printf "else"
, block $ unlines
[ printf "*size = %s_recv_buffer[*%s_head_index & 0x%x];" prefix prefix mask
, printf "*%s_head_index = *%s_head_index + 1;" prefix prefix
, printf "for (i = 0; i < *size; i++)"
, block $ unlines
[ printf "msg[i] = %s_recv_buffer[*%s_head_index & 0x%x];" prefix prefix mask
, printf "*%s_head_index = *%s_head_index + 1;" prefix prefix
]
]
]
, ""
]
where
prefix :: String
prefix = printf "%s_from_%s" name sender
mask :: Int
mask = 2 ^ size - 1
sendMessage :: (Int, Name) -> String
sendMessage (size, receiver) = unlines
[ printf "// Sending buffer to %s." receiver
, printf "extern word * const %s_send_buffer; // 0x%x words" prefix (2 ^ size :: Int)
, printf ""
, printf "static word %s_send_index;" prefix
, printf ""
, printf "// Initialize the send channel. Resets the index to the send buffer."
, printf "// Zeros the first (size) element in the buffer to clear it."
, printf "void %s_send_init(void)" prefix
, block $ unlines
[ printf "%s_send_index = 0;" prefix
, printf "%s_send_buffer[0] = 0;" prefix
]
, printf ""
, printf "// Sends a message to the %s partition." receiver
, printf "void %s_send_msg(word size, word * msg)" prefix
, block $ unlines
[ printf "word i;"
, printf "%s_send_buffer[%s_send_index++] = size;" prefix prefix
, printf "for (i = 0; i < size; i++)"
, indent $ printf "%s_send_buffer[%s_send_index++] = msg[i];" prefix prefix
, printf "%s_send_buffer[%s_send_index] = 0;" prefix prefix
]
, ""
]
where
prefix :: String
prefix = printf "%s_to_%s" name receiver
| tomahawkins/risk | RISK/API.hs | bsd-2-clause | 4,533 | 0 | 18 | 1,117 | 920 | 485 | 435 | 100 | 1 |
{-# LANGUAGE CPP, TemplateHaskell #-}
{-| Shim library for supporting various Template Haskell versions
-}
{-
Copyright (C) 2018 Ganeti Project Contributors.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.THH.Compat
( gntInstanceD
, gntDataD
, extractDataDConstructors
, myNotStrict
) where
import Language.Haskell.TH
-- | Convert Names to DerivClauses
--
-- template-haskell 2.12 (GHC 8.2) has changed the DataD class of
-- constructors to expect [DerivClause] instead of [Names]. Handle this in a
-- backwards-compatible way.
#if MIN_VERSION_template_haskell(2,12,0)
derivesFromNames :: [Name] -> [DerivClause]
derivesFromNames names = [DerivClause Nothing $ map ConT names]
#else
derivesFromNames :: [Name] -> Cxt
derivesFromNames names = map ConT names
#endif
-- | DataD "constructor" function
--
-- Handle TH 2.11 and 2.12 changes in a transparent manner using the pre-2.11
-- API.
gntDataD :: Cxt -> Name -> [TyVarBndr] -> [Con] -> [Name] -> Dec
gntDataD x y z a b =
#if MIN_VERSION_template_haskell(2,12,0)
DataD x y z Nothing a $ derivesFromNames b
#elif MIN_VERSION_template_haskell(2,11,0)
DataD x y z Nothing a $ map ConT b
#else
DataD x y z a b
#endif
-- | InstanceD "constructor" function
--
-- Handle TH 2.11 and 2.12 changes in a transparent manner using the pre-2.11
-- API.
gntInstanceD :: Cxt -> Type -> [Dec] -> Dec
gntInstanceD x y =
#if MIN_VERSION_template_haskell(2,11,0)
InstanceD Nothing x y
#else
InstanceD x y
#endif
-- | Extract constructors from a DataD instance
--
-- Handle TH 2.11 changes by abstracting pattern matching against DataD.
extractDataDConstructors :: Info -> Maybe [Con]
extractDataDConstructors info =
case info of
#if MIN_VERSION_template_haskell(2,11,0)
TyConI (DataD _ _ _ Nothing cons _) -> Just cons
#else
TyConI (DataD _ _ _ cons _) -> Just cons
#endif
_ -> Nothing
-- | Strict has been replaced by Bang, so redefine NotStrict in terms of the
-- latter.
#if MIN_VERSION_template_haskell(2,11,0)
myNotStrict :: Bang
myNotStrict = Bang NoSourceUnpackedness NoSourceStrictness
#else
myNotStrict = NotStrict
#endif
| mbakke/ganeti | src/Ganeti/THH/Compat.hs | bsd-2-clause | 3,350 | 0 | 10 | 561 | 282 | 166 | 116 | 21 | 2 |
import System.IO
main = getContents >>= putStrLn
| mightymoose/RubyQuiz | quiz_2/solution.hs | bsd-2-clause | 50 | 0 | 5 | 8 | 15 | 8 | 7 | 2 | 1 |
module Combinatorics.Symbolic.MathExpr.Parser where
import Combinatorics.Symbolic.MathExpr
import Text.Parsec
import Text.Parsec.Expr
import Text.Parsec.Token
import Text.Parsec.Language (haskell)
{-
A few things
I probably want to parse the tex version of the symbols
or atleast something similiar
-}
pInfinity = undefined
pLit = undefined
pAdd = undefined
pSumInv = undefined
pProduct = undefined
pProductInv = undefined
pLog = undefined
pExpon = undefined
pRaise = undefined
pSigma = undefined
pBigProduct = undefined
pLim = undefined
pDerivative = undefined
pAntiDerivative = undefined
pFactorial = undefined
pFunction = undefined
pApp = undefined
pVar = undefined
| jfischoff/symbolic-combinatorics | src/Combinatorics/Symbolic/MathExpr/Parser.hs | bsd-3-clause | 864 | 0 | 5 | 283 | 133 | 83 | 50 | 24 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-
Copyright (c) 2013, Markus Barenhoff <alios@alios.org>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the <organization> nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Data.ISO8211.Parser (DataFile
,DataDescriptiveRecord(..)
,DataRecord
,DataDescriptiveField
,DataStructureCode (..)
,DataTypeCode (..)
,LexicalLevel (..)
,DataStructure (..)
,dsSingleData, dsLinearStruct, dsMultiDimStruct
,DataFormat (..)
,DataField(..)
,DataFieldT(..)
,parseDataFile
) where
import Data.ByteString (ByteString)
import Data.Binary
import Data.Binary.Get
import Data.Bits
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BL
import Data.Tree
import Data.Attoparsec as P
import qualified Data.Attoparsec.ByteString.Char8 as C8
import Data.Data
import Data.Map (Map)
import qualified Data.Map as Map
import Data.SafeCopy (base, deriveSafeCopy)
import Data.Typeable
--
-- external type definitions
--
data LexicalLevel =
LexicalLevel0 | LexicalLevel1 | LexicalLevel2
deriving (Eq, Ord, Read, Show, Data, Typeable, Enum)
$(deriveSafeCopy 0 'base ''LexicalLevel)
data DataFormat = CharacterData (Maybe Integer)
| ImplicitPoint (Maybe Integer)
| ExplicitPoint (Maybe Integer)
| BitString (Maybe Integer)
| SubFieldLabel (Maybe Integer)
| SignedInt Integer
| UnsignedInt Integer
deriving (Eq, Ord, Read, Show, Data, Typeable)
$(deriveSafeCopy 0 'base ''DataFormat)
data DataFieldT =
DFString !String | DFInteger !Integer | DFReal !Double | DFByteString !ByteString
deriving (Eq, Ord, Read, Show, Data, Typeable)
$(deriveSafeCopy 0 'base ''DataFieldT)
data DataStructure = SD DataFieldT
| LS (Map String DataFieldT)
| MDS [Map String DataFieldT]
deriving (Eq, Ord, Read, Show, Data, Typeable)
$(deriveSafeCopy 0 'base ''DataStructure)
type DataFile = (DataDescriptiveRecord, [DataRecord])
type DataRecord = Tree DataFieldR
type DataFieldR = (String, DataStructure)
data DataDescriptiveRecord =
DDR {
ddrFileName :: !String,
ddrFieldStructure :: ![(String, String)],
ddrDataFormats :: Map String DataDescriptiveField -- [(String, DataDescriptiveField)]
} deriving (Show, Eq)
class DataField t where
fromDataField :: DataFieldT -> t
instance DataField String where
fromDataField (DFString s) = s
fromDataField v = error $ "fromDataFieldT is not a String: " ++ show v
instance DataField Integer where
fromDataField (DFInteger i) = i
fromDataField v = error $ "fromDataFieldT is not an Integer: " ++ show v
instance DataField Double where
fromDataField (DFReal r) = r
fromDataField v = error $ "fromDataFieldT is not a Double: " ++ show v
instance DataField ByteString where
fromDataField (DFByteString bs) = bs
fromDataField v = error $ "fromDataFieldT is not a ByteString: " ++ show v
type DataDescriptiveField =
(DataStructureCode, DataTypeCode, LexicalLevel, String, [(String, DataFormat)])
data DataStructureCode =
SingleDataItem | LinearStructure | MultiDimStructure
deriving (Eq, Ord, Read, Show, Data, Typeable, Enum)
data DataTypeCode =
CharacterString | ImplicitPointInt | ImplicitPointReal | BinaryForm | MixedDataType
deriving (Eq, Ord, Read, Show, Data, Typeable)
--
-- internal type definitions
--
type DataDescr = (DataStructureCode, [String])
type FieldControlField =
(DataStructureCode, DataTypeCode, LexicalLevel, String, [(String, String)])
instance Enum DataTypeCode where
toEnum 0 = CharacterString
toEnum 1 = ImplicitPointInt
toEnum 2 = ImplicitPointReal
toEnum 5 = BinaryForm
toEnum 6 = MixedDataType
toEnum i = error $ "Unknown DataTypeCode" ++ show i
fromEnum CharacterString = 0
fromEnum ImplicitPointInt = 1
fromEnum ImplicitPointReal = 2
fromEnum BinaryForm = 5
fromEnum MixedDataType = 6
dsSingleData :: DataStructure -> DataFieldT
dsSingleData (SD f) = f
dsSingleData t = error $ "not a Single Data Item: " ++ show t
dsLinearStruct :: DataStructure -> (Map String DataFieldT)
dsLinearStruct (LS fs) = fs
dsLinearStruct t = error $ "not a Linear Structure: " ++ show t
dsMultiDimStruct :: DataStructure -> [Map String DataFieldT]
dsMultiDimStruct (MDS fss) = fss
dsMultiDimStruct t = error $ "not a Multi Dim Structure: " ++ show t
--
-- exports
--
parseDataFile :: Parser DataFile
parseDataFile = do
ddr <- parseDDR <?> "data descriptive record"
drs <- manyTill (parseDR ddr) (try endOfInput) <?> "data record"
return (ddr, drs)
parseDDR :: Parser DataDescriptiveRecord
parseDDR = fmap snd parseDDR'
parseDDR' :: Parser ((Char, Char, Char, Char, Char, String), DataDescriptiveRecord)
parseDDR' = do
(ichglvl, lid, ext, ver, appi, fcl, extCharSet, fieldAreaLen) <- parseLeader
(shieldTagF, ds) <- parseEntryMap
bs <- P.take fieldAreaLen
let ((_, rr):rs) = [(t, BS.take l $ BS.drop p bs) | (t,p,l) <- ds]
let (_,_,_,fname, fstruct) =
either error id $ parseOnly (parseFieldControlField fcl shieldTagF) rr
let rs'= [(a, either error id $ parseOnly
(parseDataDescriptiveField fcl) b) | (a,b) <- rs]
let ddr = DDR fname fstruct $ Map.fromList rs'
let retval = ((ichglvl, lid, ext, ver, appi, extCharSet), ddr)
return retval
parseDR :: DataDescriptiveRecord -> Parser DataRecord
parseDR ddr = fmap snd (parseDR' ddr)
parseDR' :: DataDescriptiveRecord -> Parser ((Char, Char, Char, Char, Char, String), DataRecord)
parseDR' ddr = do
(ichglvl, lid, ext, ver, appi, _, extCharSet, fieldAreaLen) <- parseLeader
(_, ds) <- parseEntryMap
bs <- P.take fieldAreaLen
let bss = [ (t, BS.take l $ BS.drop p bs) | (t,p,l) <- ds]
let dfs = map (\(t, lbs) -> (t, either error id $
parseOnly (ddrParserLookup ddr t) lbs)) bss
return ((ichglvl, lid, ext, ver, appi, extCharSet), drsToTree' ddr dfs)
--
-- internal
--
unitTerminator, recordTerminator :: Char
unitTerminator = '\US'
recordTerminator = '\RS'
parseUT, parseRT :: Parser ()
parseUT = do _ <- C8.char unitTerminator; return ()
parseRT = do _ <- C8.char recordTerminator; return ()
ddrParserLookup :: DataDescriptiveRecord -> String -> Parser DataStructure
ddrParserLookup ddr t = do
let (sc, _, esc, _, dfs) = maybe (error $ "unknown field: " ++ t) id
$ Map.lookup t (ddrDataFormats ddr)
dfsParser dfs esc sc
dfsParser ::
[(String, DataFormat)] -> LexicalLevel -> DataStructureCode
-> Parser DataStructure
dfsParser dfs esc sc =
let mfp = sequence $ map fieldParser $ dfs
fieldParser (t, p) = do
v <- dataFormatToParser esc p
return $ (t, v)
in do res <- case sc of
SingleDataItem -> do
(_, r) <- fmap head $ mfp
_ <- parseRT
return $ SD r
LinearStructure -> do
r <- fmap (LS . Map.fromList) mfp
_ <- parseRT
return $ r
MultiDimStructure ->
fmap (MDS . (map Map.fromList) ) $ mfp `manyTill` (try $ parseRT)
return res
dataFormatToParser :: LexicalLevel -> DataFormat -> Parser DataFieldT
dataFormatToParser _ (CharacterData l) = fmap DFString $
case l of
Nothing -> C8.anyChar `manyTill` (try $ parseUT)
Just i -> count (fromInteger i) C8.anyChar
dataFormatToParser esc (ImplicitPoint l) = do
(DFString s) <- dataFormatToParser esc (CharacterData l)
return $ if (length s == 0)
then DFInteger 0
else DFInteger (read s)
dataFormatToParser esc (ExplicitPoint l) = do
(DFString s) <- dataFormatToParser esc (CharacterData l)
return $ if (length s == 0)
then DFReal 0.0
else DFReal (read s)
dataFormatToParser _ (BitString l) = fmap DFByteString $
case l of
Nothing -> fmap BS.pack $ anyWord8 `manyTill` (try $ parseUT)
Just l' -> P.take (fromInteger l' `div` 8)
dataFormatToParser _ (UnsignedInt l) = do
bs <- P.take (fromInteger l)
case l of
1 -> return $ DFInteger $ toInteger $ parseUInt8 (BL.fromChunks [bs])
2 -> return $ DFInteger $ toInteger $ parseUInt16 (BL.fromChunks [bs])
4 -> return $ DFInteger $ toInteger$ parseUInt32 (BL.fromChunks [bs])
i -> error $ "invalid int length: " ++ show i
dataFormatToParser _ (SignedInt l) = do
bs <- P.take (fromInteger l)
case l of
1 -> return $ DFInteger $ toInteger $ parseInt8 (BL.fromChunks [bs])
2 -> return $ DFInteger $ toInteger $ parseInt16 (BL.fromChunks [bs])
4 -> return $ DFInteger $ toInteger $ parseInt32 (BL.fromChunks [bs])
i -> error $ "invalid int length: " ++ show i
dataFormatToParser _ (SubFieldLabel _) = error $ "@ subfield label not implemented"
parseUInt8' :: BL.ByteString -> Word8
parseUInt8' = runGet getWord8
parseUInt16' :: BL.ByteString -> Word16
parseUInt16' = runGet getWord16le
parseUInt32' :: BL.ByteString -> Word32
parseUInt32' = runGet getWord32le
parseInt8,parseUInt8, parseInt16,parseUInt16,parseInt32,parseUInt32 :: BL.ByteString -> Integer
parseUInt8 = toInteger . parseUInt8'
parseInt8 = sintParser . parseUInt8'
parseUInt16 = toInteger . parseUInt16'
parseInt16 = sintParser . parseUInt16'
parseUInt32 = toInteger . parseUInt32'
parseInt32 = sintParser . parseUInt32'
sintParser :: (Integral a, Bits a) => a -> Integer
sintParser p =
let ui = p
c2 = toInteger $ 1 + (complement ui)
msbSet = testBit ui ((bitSize ui) - 1)
in if (msbSet) then (negate c2) else (toInteger ui)
drsToTree' :: DataDescriptiveRecord -> [DataFieldR] -> Tree DataFieldR
drsToTree' ddr dfs =
let cs' k = ddrLookupChildFields ddr k
cs k = filter (\(k',_) -> k' `elem` (cs' k)) dfs
in head $ unfoldForest (\b@(k,_) -> (b, cs k)) dfs
ddrLookupChildFields :: DataDescriptiveRecord -> String -> [String]
ddrLookupChildFields ddr fn = findChildren [] fn (ddrFieldStructure ddr)
where findChildren :: [String] -> String -> [(String,String)] -> [String]
findChildren r _ [] = r
findChildren r a ((b,c):bs) = findChildren (if (a == b) then (c:r) else r) a bs
tryMaybe :: Parser a -> Parser (Maybe a)
tryMaybe p = choice [ fmap Just $ try p, return Nothing]
parseDataFormatLength :: Parser (Maybe Integer)
parseDataFormatLength = tryMaybe $ do
_ <- C8.char8 '('
ds <- C8.digit `manyTill` C8.char8 ')'
return $ read ds
parseDataFormat' :: Parser DataFormat
parseDataFormat' =
let parsers =
[ do _ <- C8.char 'A'
fl <- parseDataFormatLength
return $ CharacterData fl
, do _ <- C8.char 'I'
fl <- parseDataFormatLength
return $ ImplicitPoint fl
, do _ <- C8.char 'R'
fl <- parseDataFormatLength
return $ ExplicitPoint fl
, do _ <- C8.char 'B'
fl <- parseDataFormatLength
return $ BitString fl
, do _ <- C8.char '@'
fl <- parseDataFormatLength
return $ SubFieldLabel fl
, do _ <- C8.char 'b'
t <- choice $ map try
[ C8.char '1', C8.char '2']
l <- fmap toInteger $ parseInt 1
case t of
'1' -> return $ UnsignedInt l
'2' -> return $ SignedInt l
i -> error $ "must be 1 (unsigned) or 2 (signed) not: " ++ show i
]
in choice $ map try parsers
parseDataFormat :: Parser (Integer, DataFormat)
parseDataFormat = do
mul <- tryMaybe $ fmap read $ try $ many1 C8.digit
fmt <- parseDataFormat'
return (maybe 1 id mul, fmt)
parseDataFormats :: Parser [DataFormat]
parseDataFormats = do
_ <- C8.char '('
fmts <- sepBy parseDataFormat (C8.char ',')
_ <- C8.char ')'
return . concat . map (\ (c,f) -> replicate (fromInteger c) f) $ fmts
parseLeader ::
Parser (Char, Char, Char, Char, Char, Int, String, Int)
parseLeader = do
len <- parseInt 5 <?> "record len"
ichglvl <- C8.anyChar <?> "interchange level"
lid <- C8.anyChar <?> "leader identifier"
ext <- C8.anyChar <?> "In line code extension indicator"
ver <- C8.anyChar <?> "version number"
appi <- C8.anyChar <?> "application indicator"
fcl <- parseFCL <?> "field control length"
baseAddr <- parseInt 5 <?> "field control length"
extCharSet <- count 3 C8.anyChar <?> "Extended character set indicator"
let fieldAreaLen = len - baseAddr
return (ichglvl, lid, ext, ver, appi, fcl, extCharSet, fieldAreaLen)
parseFCL :: Parser Int
parseFCL = choice [
try $ parseInt 2,
do _ <- count 2 $ C8.char ' '
return 0
]
parseEntryMap :: Parser (Int, [(String, Int, Int)])
parseEntryMap = do
sFieldLengthF <- parseInt 1 <?> "size of field length field"
sFieldPosF <- parseInt 1 <?> "size of field position field"
_ <- C8.char8 '0'
sFieldTagF <- parseInt 1 <?> "size of field tag field"
let dirParser =
do ftag <- count sFieldTagF C8.anyChar
flen <- parseInt sFieldLengthF
fpos <- parseInt sFieldPosF
return (ftag, fpos, flen)
ds <- manyTill dirParser (try $ parseRT)
return (sFieldTagF, ds)
parseInt :: Int -> Parser Int
parseInt l = fmap read (count l $ C8.anyChar)
parseDDFCtrl :: Parser (DataStructureCode, DataTypeCode, Char, Char, LexicalLevel)
parseDDFCtrl = do
s <- parseInt 1 <?> "data structure code"
t <- parseInt 1 <?> "data type code"
_ <- (count 2 $ C8.char '0') <?> "required 00 characters"
f <- C8.anyChar <?> "printable field terminator"
u <- C8.anyChar <?> "printable unit terminator"
esc' <- count 3 C8.anyChar
let esc =
if (esc' == " ")
then 0 else if (esc' == "-A ")
then 1 else if (esc' == "%/@") then 2 else
error $ "unknown escape sequence " ++ esc'
return (toEnum s, toEnum t, f,u, toEnum esc)
parseTagPair :: Int -> Parser (String, String)
parseTagPair sFieldTagF = do
k <- count sFieldTagF C8.anyChar
v <- count sFieldTagF C8.anyChar
return (k,v)
parseFieldControlField :: Int -> Int -> Parser FieldControlField
parseFieldControlField fcl sFieldTagF = do
fieldCtrlBS <- P.take fcl
case (parseOnly parseDDFCtrl fieldCtrlBS) of
Left err -> fail err
Right (SingleDataItem, CharacterString, _ ,_,esc) ->
do name <- manyTill C8.anyChar (try $ parseUT) <?> "name"
fieldTags <- manyTill (parseTagPair sFieldTagF) (try $ parseRT)
return (toEnum 0, toEnum 0,esc,name, fieldTags)
Right _ -> fail $ "first DDR filed must have tag 0000"
parseDataDescr :: DataStructureCode -> Parser DataDescr
parseDataDescr SingleDataItem = do
fn <- manyTill C8.anyChar (try $ parseUT)
return (SingleDataItem, [fn])
parseDataDescr LinearStructure = do
fs <- (many1 $ C8.satisfy $ C8.notInClass (unitTerminator:"!")) `sepBy` (try $ C8.char '!')
parseUT
return (LinearStructure, fs)
parseDataDescr MultiDimStructure = do
_ <- C8.char '*'
fs <- (many1 $ C8.satisfy $ C8.notInClass (unitTerminator:"!")) `sepBy` (try $ C8.char '!')
parseUT
return (MultiDimStructure, fs)
parseDataDescriptiveField ::
Int -> Parser DataDescriptiveField
parseDataDescriptiveField fcl = do
fieldCtrlBS <- P.take fcl
case (parseOnly parseDDFCtrl fieldCtrlBS) of
Left err -> fail err
Right (s,t,_,_,esc) ->
do name <- manyTill C8.anyChar (try $ parseUT) <?> "name"
label <- (do (_,ls) <- parseDataDescr s; return ls) <?> "label"
format <- (do fs <- parseDataFormats; parseRT; return fs) <?> "format"
return (s, t, esc,name, zip label format)
| alios/nauticlib | src/Data/ISO8211/Parser.hs | bsd-3-clause | 17,795 | 0 | 20 | 4,638 | 5,412 | 2,792 | 2,620 | 377 | 11 |
module Data.Rivers.Idiom where
import Prelude (($))
class Idiom f where
pure :: a -> f a
srepeat :: a -> f a
(<>) :: f (a -> b) -> f a -> f b
smap :: (a -> b) -> f a -> f b
zip :: (a -> b -> c) -> f a -> f b -> f c
pure = srepeat
(<>) = zip ($)
srepeat = pure
smap f s = pure f <> s
zip g s t = pure g <> s <> t
| d-rive/rivers | Data/Rivers/Idiom.hs | bsd-3-clause | 468 | 0 | 10 | 246 | 206 | 108 | 98 | 13 | 0 |
module Data.Time.LocalTime
(
module Data.Time.LocalTime.TimeZone,
module Data.Time.LocalTime.TimeOfDay,
module Data.Time.LocalTime.LocalTime
) where
import Data.Time.LocalTime.TimeZone
import Data.Time.LocalTime.TimeOfDay
import Data.Time.LocalTime.LocalTime
| bergmark/time | lib/Data/Time/LocalTime.hs | bsd-3-clause | 272 | 0 | 5 | 30 | 54 | 39 | 15 | 8 | 0 |
{-# LANGUAGE FlexibleInstances #-}
module Patch where
import Control.Applicative
import Control.Monad.ST
import Data.Array
import Data.List
import Data.Random
import Data.Random.Extras
import System.Random
import Row
import Tile
import Utilities
import Maze
data Dimensions = Dimensions Int Int deriving Show
data Patch a = Patch Dimensions [a]
type PR = Patch (Row Tile)
instance Show PR where
show (Patch _ p) = unlines $ map show p
instance Functor Patch where
fmap f (Patch d l) = Patch d $ fmap f l
emptyP :: Dimensions -> PR
emptyP d@(Dimensions w h) =
Patch d $ [wall w]
++ replicate (h-2) (emptyR w)
++ [wall w]
updateP :: (Tile -> Tile) -> Point -> PR -> PR
updateP f c@(Point x y) p@(Patch dims rs)
| (not . validPoint p) c = p
| otherwise = Patch dims $ update (updateR f x) y rs
validPoint :: PR -> Point -> Bool
validPoint p@(Patch (Dimensions w h) _) c@(Point x y) =
x > 0 && y > 0 && x < w-1 && y < h-1 && isSpaceP p c
isSpaceP :: PR -> Point -> Bool
isSpaceP (Patch _ rs) (Point x y) = isSpaceR (rs !! y) x
maxXP :: PR -> Int
maxXP (Patch (Dimensions x _) _) = x - 2
maxYP :: PR -> Int
maxYP (Patch (Dimensions _ y) _) = y - 2
randUpdateP :: (Tile -> Tile) -> PR -> IO PR
randUpdateP f p = do
c <- randPoint p
return $ updateP f c p
randPoint :: PR -> IO Point
randPoint p = runRVar (choice $ tsP p Space) StdRandom
tsP :: PR -> Tile -> [Point]
tsP (Patch _ rs) t = concat $ zipWith coords indexes rSpaces
where coords y = map (`Point` y)
indexes = findIndices (const True) rSpaces
rSpaces = fmap (tsR t) rs
usP :: PR -> Point
usP p = head $ tsP p UStairs
dsP :: PR -> Point
dsP p = head $ tsP p DStairs
testP :: PR
testP = emptyP (Dimensions 50 20)
mazeP :: Dimensions -> IO PR
mazeP (Dimensions w h) = newStdGen >>= stToIO . maze w h >>= return . mazePatch
mazePatch :: Maze -> PR
mazePatch m = Patch dims $ topLine : (concat (map rowLine [0..maxY]))
where
topLine = Row $ replicate width Wall
rowLine y = [rowRLines y, rowBLines y]
rowRLines y = Row $ concat $
[Wall] : map rowSeg [0..maxX]
where rowSeg x = if walls ! (x,y) then [Space, Wall] else [Space, Space]
walls = rightWalls m
rowBLines y = Row $ concat $
(map rowSeg [0..maxX]) ++ [[Wall]]
where rowSeg x = if walls ! (x,y) then [Wall, Wall] else [Wall, Space]
walls = belowWalls m
dims = Dimensions width height
width = (1+) $ (2*) $ maxX+1
height = (2*) $ maxY+1
maxX = fst $ snd $ bounds $ rightWalls m
maxY = snd $ snd $ bounds $ rightWalls m
| hans25041/gringotz-level | src/Patch.hs | bsd-3-clause | 2,621 | 0 | 14 | 689 | 1,228 | 640 | 588 | 74 | 3 |
{-# LANGUAGE OverloadedStrings, PackageImports #-}
import Control.Applicative
import Control.Monad
import "monads-tf" Control.Monad.State
import Control.Concurrent
import Data.HandleLike
import Network
import Network.PeyoTLS.Server
import Network.PeyoTLS.ReadFile
import "crypto-random" Crypto.Random
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as BSC
main :: IO ()
main = do
k <- readKey "localhost.key"
c <- readCertificateChain "localhost.crt"
g0 <- cprgCreate <$> createEntropyPool :: IO SystemRNG
soc <- listenOn $ PortNumber 443
void . (`runStateT` g0) . forever $ do
(h, _, _) <- liftIO $ accept soc
g <- StateT $ return . cprgFork
liftIO . forkIO . (`run` g) $ do
p <- open h ["TLS_RSA_WITH_AES_128_CBC_SHA"] [(k, c)]
Nothing
doUntil BS.null (hlGetLine p) >>= liftIO . mapM_ BSC.putStrLn
hlPut p $ BS.concat [
"HTTP/1.1 200 OK\r\n",
"Transfer-Encoding: chunked\r\n",
"Content-Type: text/plain\r\n\r\n",
"5\r\nHello0\r\n\r\n" ]
hlClose p
doUntil :: Monad m => (a -> Bool) -> m a -> m [a]
doUntil p rd = rd >>= \x ->
(if p x then return . (: []) else (`liftM` doUntil p rd) . (:)) x
| YoshikuniJujo/forest | subprojects/tls-analysis/server/examples/simpleServer.hs | bsd-3-clause | 1,171 | 12 | 17 | 214 | 414 | 222 | 192 | 34 | 2 |
module PPTest.Builders.Nfa (specs) where
import qualified Data.Char as C
import qualified Data.Graph.Inductive.Graph as Gr
import PP
import PP.Builders.Nfa
import PP.Grammars.Lexical
import Test.Hspec
-- Utilities
getNfa expr = let Right ast = (parseAst expr :: To RegExpr) in buildNfa ast
isValue (_, _, NfaValue _) = True
isValue _ = False
specs = describe "PPTest.Builders.Nfa" $ do
it "should build the correct automaton ((a|b)*abb)" $ do
-- Dragon Book (2nd edition, fr), page 142, figure 3.34
let expr = "(a|b)*abb"
let e = Gr.mkGraph [(0,NfaInitial),(1,NfaNode),(2,NfaNode),(3,NfaNode),
(4,NfaNode),(5,NfaNode),(6,NfaNode),(7,NfaNode),
(8,NfaNode),(9,NfaNode),(10,NfaFinal expr)]
[(0,1,NfaEmpty),(0,7,NfaEmpty),(1,2,NfaEmpty),
(1,4,NfaEmpty),(2,3,NfaValue 'a'),(3,6,NfaEmpty),
(4,5,NfaValue 'b'),(5,6,NfaEmpty),(6,1,NfaEmpty),
(6,7,NfaEmpty),(7,8,NfaValue 'a'),(8,9,NfaValue 'b'),
(9,10,NfaValue 'b')]
getNfa expr `shouldBe` e
it "should build the correct automaton (a+)" $ do
let expr = "a+"
let e = Gr.mkGraph [(0,NfaInitial),(1,NfaNode),(2,NfaNode),(3,NfaFinal expr)]
[(0,1,NfaEmpty),(1,2,NfaValue 'a'),
(2,1,NfaEmpty),(2,3,NfaEmpty)]
getNfa expr `shouldBe` e
it "should build the correct automaton (a?)" $ do
let expr = "a?"
let e = Gr.mkGraph [(0,NfaInitial),(1,NfaNode),(2,NfaNode),(3,NfaFinal expr)]
[(0,1,NfaEmpty),(0,3,NfaEmpty),
(1,2,NfaValue 'a'),(2,3,NfaEmpty)]
getNfa expr `shouldBe` e
it "should build the correct automaton ([a-c])" $ do
let expr = "[a-c]"
let e = Gr.mkGraph [(0,NfaInitial),(1,NfaNode),(2,NfaNode),(3,NfaNode),
(4,NfaNode),(5,NfaNode),(6,NfaNode),(7,NfaFinal expr)]
[(0,1,NfaEmpty),(0,3,NfaEmpty),(0,5,NfaEmpty),
(1,2,NfaValue 'a'),(2,7,NfaEmpty),(3,4,NfaValue 'b'),
(4,7,NfaEmpty),(5,6,NfaValue 'c'),(6,7,NfaEmpty)]
getNfa expr `shouldBe` e
it "should build the correct automaton ([a-c0-2.-])" $ do
let expr = "[a-c0-2.-]"
let e = Gr.mkGraph [(0,NfaInitial),(1,NfaNode),(2,NfaNode),(3,NfaNode),
(4,NfaNode),(5,NfaNode),(6,NfaNode),(7,NfaNode),
(8,NfaNode),(9,NfaNode),(10,NfaNode),(11,NfaNode),
(12,NfaNode),(13,NfaNode),(14,NfaNode),(15,NfaNode),
(16,NfaNode),(17,NfaFinal expr)]
[(0,1,NfaEmpty),(0,3,NfaEmpty),(0,5,NfaEmpty),
(0,7,NfaEmpty),(0,9,NfaEmpty),(0,11,NfaEmpty),
(0,13,NfaEmpty),(0,15,NfaEmpty),(1,2,NfaValue 'a'),
(2,17,NfaEmpty),(3,4,NfaValue 'b'),(4,17,NfaEmpty),
(5,6,NfaValue 'c'),(6,17,NfaEmpty),(7,8,NfaValue '0'),
(8,17,NfaEmpty),(9,10,NfaValue '1'),(10,17,NfaEmpty),
(11,12,NfaValue '2'),(12,17,NfaEmpty),(13,14,NfaValue '.'),
(14,17,NfaEmpty),(15,16,NfaValue '-'),(16,17,NfaEmpty)]
getNfa expr `shouldBe` e
it "should build the correct automaton (.)" $ do
let expr = "."
let e = [c | c <- [minBound..maxBound], C.isAscii c]
let values = map (\(_, _, NfaValue c) -> c) $ filter isValue $ Gr.labEdges $ getNfa expr
values `shouldBe` e
it "should combine multiple NFA in one correctly" $ do
let a = Gr.mkGraph [(0,NfaInitial),(1,NfaFinal "a")]
[(0,1,NfaValue 'a')]
let b = Gr.mkGraph [(0,NfaInitial),(1,NfaNode),(2,NfaFinal "bc")]
[(0,1,NfaValue 'b'),(1,2,NfaValue 'c')]
let e = Gr.mkGraph [(0,NfaInitial),(1,NfaNode),(2,NfaFinal "a"),(3,NfaNode),
(4,NfaNode),(5,NfaFinal "bc")]
[(0,1,NfaEmpty),(0,3,NfaEmpty),(1,2,NfaValue 'a'),
(3,4,NfaValue 'b'),(4,5,NfaValue 'c')]
combineNfa [a,b] `shouldBe` e
| chlablak/platinum-parsing | test/PPTest/Builders/Nfa.hs | bsd-3-clause | 4,212 | 0 | 21 | 1,268 | 1,902 | 1,136 | 766 | 73 | 1 |
module Matterhorn.State.ThemeListOverlay
( enterThemeListMode
, themeListSelectDown
, themeListSelectUp
, themeListPageDown
, themeListPageUp
, setTheme
)
where
import Prelude ()
import Matterhorn.Prelude
import Brick ( invalidateCache )
import Brick.Themes ( themeToAttrMap )
import qualified Brick.Widgets.List as L
import qualified Data.Text as T
import qualified Data.Vector as Vec
import Lens.Micro.Platform ( (.=) )
import Network.Mattermost.Types
import Matterhorn.State.ListOverlay
import Matterhorn.Themes
import Matterhorn.Types
-- | Show the user list overlay with the given search scope, and issue a
-- request to gather the first search results.
enterThemeListMode :: MH ()
enterThemeListMode =
enterListOverlayMode (csCurrentTeam.tsThemeListOverlay)
ThemeListOverlay () setInternalTheme getThemesMatching
-- | Move the selection up in the user list overlay by one user.
themeListSelectUp :: MH ()
themeListSelectUp = themeListMove L.listMoveUp
-- | Move the selection down in the user list overlay by one user.
themeListSelectDown :: MH ()
themeListSelectDown = themeListMove L.listMoveDown
-- | Move the selection up in the user list overlay by a page of users
-- (themeListPageSize).
themeListPageUp :: MH ()
themeListPageUp = themeListMove (L.listMoveBy (-1 * themeListPageSize))
-- | Move the selection down in the user list overlay by a page of users
-- (themeListPageSize).
themeListPageDown :: MH ()
themeListPageDown = themeListMove (L.listMoveBy themeListPageSize)
-- | Transform the user list results in some way, e.g. by moving the
-- cursor, and then check to see whether the modification warrants a
-- prefetch of more search results.
themeListMove :: (L.List Name InternalTheme -> L.List Name InternalTheme) -> MH ()
themeListMove = listOverlayMove (csCurrentTeam.tsThemeListOverlay)
-- | The number of users in a "page" for cursor movement purposes.
themeListPageSize :: Int
themeListPageSize = 10
getThemesMatching :: ()
-> Session
-> Text
-> IO (Vec.Vector InternalTheme)
getThemesMatching _ _ searchString = do
let matching = filter matches internalThemes
search = T.toLower searchString
matches t = search `T.isInfixOf` T.toLower (internalThemeName t) ||
search `T.isInfixOf` T.toLower (internalThemeDesc t)
return $ Vec.fromList matching
setInternalTheme :: InternalTheme -> MH Bool
setInternalTheme t = do
setTheme $ internalThemeName t
return False
setTheme :: Text -> MH ()
setTheme name =
case lookupTheme name of
Nothing -> enterThemeListMode
Just it -> do
mh invalidateCache
csResources.crTheme .= (themeToAttrMap $ internalTheme it)
| matterhorn-chat/matterhorn | src/Matterhorn/State/ThemeListOverlay.hs | bsd-3-clause | 2,857 | 0 | 15 | 633 | 553 | 299 | 254 | -1 | -1 |
module Hydra.Stages.Validate (validate) where
import Hydra.Data
validate :: SymTab -> SymTab
validate st =
let neq = length (equations st)
nvar = variableNumber st
msg = "Number of equations (" ++ show neq ++ ") and variables (" ++ show nvar ++ ") do not agree in the model." ++ "\n"
in if (neq == nvar)
then st
else error msg | giorgidze/Hydra | src/Hydra/Stages/Validate.hs | bsd-3-clause | 360 | 0 | 14 | 94 | 111 | 58 | 53 | 10 | 2 |
module Main where
import Data.Maybe
import Test.HUnit
import Market.Types
import Market.Util
import Market.Coins
---------------------------------------
main :: IO Counts
main = runTestTT tests
tests :: Test
tests =
TestList
[ TestLabel "Orderbook equality works" quoteBookEq
, TestLabel "Aggregate and disaggregate" (roundTripAggregation asksSample)
, TestLabel "Return Total Value" (getTotalValue asksSample)
, TestLabel "Fee worsening of book works" (testFeeBook book shavedBook)
, TestLabel "Inverting orderbook works" (invertOrderbook book invertedBook)
, TestLabel "Merging orderbook works" (mergeOrderbook firstBook otherBook mergedBook)
, TestLabel "Numerically Unstable Book" (unstableOrderbook bk1 bk2)
]
---------------------------- TESTS --------------------------------
-- FIX ME! I'm getting double precision rounding error problems in aggregation/disaggregation.
asksSample :: [(Price Double, Vol BTC)]
asksSample = [(256, 0.5),(512, 1),(576, 0.25)]
roundTripAggregation :: [(Price Double, Vol BTC)] -> Test
roundTripAggregation samples = TestCase $
do
assertEqual "Aggregation followed by disaggregation is modifying list"
samples (disaggregate $ aggregate samples)
quoteBookEq :: Test
quoteBookEq = TestCase $ do
assertBool "Fragmenting quotes should still compare equal" (book == book')
assertBool "Different prices but books compare equal" (book /= book2)
assertBool "Different volumes but books compare equal" (book /= book3)
----------
book, book', book2, book3 :: QuoteBook USD BTC () ()
book = QuoteBook{ bids = [bid1, bid2]
, asks = [ask1]
, counter = ()}
book' = book { bids = [bid3]}
book2 = book { asks = [ask2]}
book3 = book { asks = [ask3]}
bid1, bid2, bid3, ask1, ask2, ask3 :: Quote USD BTC ()
bid1 = Quote { side = Bid, price = 600, volume = 0.7, qtail = ()}
bid2 = Quote { side = Bid, price = 600, volume = 0.3, qtail = ()}
bid3 = Quote { side = Bid, price = 600, volume = 1.0, qtail = ()}
ask1 = Quote { side = Ask, price = 1000, volume = 1, qtail = ()}
ask2 = Quote { side = Ask, price = 1001, volume = 1, qtail = ()}
ask3 = Quote { side = Ask, price = 1000, volume = 1.1, qtail = ()}
----------
shavedBook :: QuoteBook USD BTC () ()
shavedBook =
QuoteBook
{ bids = [bid1', bid2']
, asks = [ask1']
, counter = ()}
bid1', bid2', ask1' :: Quote USD BTC ()
bid1' = Quote { side = Bid, price = 600 / 1.007, volume = 0.7, qtail = ()}
bid2' = Quote { side = Bid, price = 600 / 1.007, volume = 0.3, qtail = ()}
ask1' = Quote { side = Ask, price = 1000 * 1.007, volume = 1, qtail = ()}
----------
invertedBook :: QuoteBook BTC USD () ()
invertedBook =
QuoteBook
{ bids = [bid1'']
, asks = [ask1'', ask2'']
, counter = ()}
ask1'', ask2'', bid1'' :: Quote BTC USD ()
ask1'' = Quote { side = Ask, price = 1/600, volume = 0.7 * 600, qtail = ()}
ask2'' = Quote { side = Ask, price = 1/600, volume = 0.3 * 600, qtail = ()}
bid1'' = Quote { side = Bid, price = 1/1000, volume = 1 * 1000, qtail = ()}
----------
firstBook :: QuoteBook USD BTC () ()
firstBook = QuoteBook
{ bids = [fb1, fb2]
, asks = [fa1]
, counter = ()}
fb1, fb2, fa1 :: Quote USD BTC ()
fa1 = Quote { side = Ask, price = 1000, volume = 1, qtail = ()}
fb1 = Quote { side = Bid, price = 500, volume = 0.7, qtail = ()}
fb2 = Quote { side = Bid, price = 500, volume = 0.3, qtail = ()}
otherBook :: QuoteBook USD LTC Int Int
otherBook =
QuoteBook
{ bids = [b1, b2, b3]
, asks = [a1, a2, a3]
, counter = 555}
b1, b2, b3, a1, a2, a3 :: Quote USD LTC Int
b1 = Quote { side = Bid, price = 9, volume = 100, qtail = -1}
b2 = Quote { side = Bid, price = 8, volume = 20, qtail = -2}
b3 = Quote { side = Bid, price = 7, volume = 200, qtail = -3}
a1 = Quote { side = Ask, price = 10, volume = 40, qtail = -4}
a2 = Quote { side = Ask, price = 20, volume = 4, qtail = -5}
a3 = Quote { side = Ask, price = 30, volume = 1, qtail = -6}
----------
mergedBook :: QuoteBook BTC LTC () ()
mergedBook =
QuoteBook
{ bids = [ Quote {side = Bid, price = Price 0.00900000, volume = Vol 100.00000000, qtail = ()}
, Quote {side = Bid, price = Price 0.00800000, volume = Vol 12.50000000, qtail = ()}]
, asks = [ Quote {side = Ask, price = Price 0.02000000, volume = Vol 35.00000000, qtail = ()}
, Quote {side = Ask, price = Price 0.02000000, volume = Vol 5.00000000, qtail = ()}
, Quote {side = Ask, price = Price 0.04000000, volume = Vol 4.00000000, qtail = ()}
, Quote {side = Ask, price = Price 0.06000000, volume = Vol 0.66666667, qtail = ()}]
, counter = ()}
----------
getTotalValue :: [(Price Double, Vol BTC)] -> Test
getTotalValue samples = TestCase $ do
assertEqual "Returned wrong value for requested volume"
(Right (256*0.5+512*1+576*0.1, 1.6)) (totalValue 1.6 $ aggregate samples)
testFeeBook :: (Coin p, Coin v, Eq q, Show q, Show c) => QuoteBook p v q c -> QuoteBook p v q c -> Test
testFeeBook sample result = TestCase $ do
assertEqual "Applied fees mismatch expectation" result (feeBook 1.007 sample)
invertOrderbook :: (Coin p, Coin v, Eq q, Show q, Show c) => QuoteBook p v q c -> QuoteBook v p q c -> Test
invertOrderbook sample result = TestCase $ do
assertEqual "Applied fees mismatch expectation" result (invert sample)
mergeOrderbook
:: ( Coin p1, Coin v1, Eq q1, Show q1, Show c1
, Coin v2, Eq q2, Show q2, Show c2)
=> QuoteBook p1 v1 q1 c1
-> QuoteBook p1 v2 q2 c2
-> QuoteBook v1 v2 () ()
-> Test
mergeOrderbook part1 part2 result = TestCase $ do
assertEqual "Merged orderbook does not match" result (part1 `merge` part2)
----------
unstableOrderbook
:: ( Coin p1, Coin v1, Coin v2)
=> QuoteBook p1 v1 () ()
-> QuoteBook v1 v2 () ()
-> Test
unstableOrderbook quoteBook1 quoteBook2 = TestCase $ do
let bk = feeBook (1.007 * 1.0025 * 1.007) $ merge (invert quoteBook1) quoteBook2
a' = fromMaybe 99999 (getBestPrice' (asks bk))
assertEqual "Merged orderbook ask does not match" 143.63699 a'
bk1 :: QuoteBook BRL BTC () ()
bk1 =
QuoteBook
{ asks = [ Quote {side = Ask, price = Price 9039.90769, volume = Vol 0.04297070, qtail = ()}
, Quote {side = Ask, price = Price 9039.9579 , volume = Vol 0.08888888, qtail = ()}
, Quote {side = Ask, price = Price 9040.0000 , volume = Vol 0.09999999, qtail = ()}]
, bids = [ Quote {side = Bid, price = Price 8905.00101, volume = Vol 0.59051716, qtail = ()}
, Quote {side = Bid, price = Price 8905.001 , volume = Vol 0.11791000, qtail = ()}
, Quote {side = Bid, price = Price 8891.0 , volume = Vol 0.02300000, qtail = ()}]
, counter = ()}
bk2 :: QuoteBook BTC LTC () ()
bk2 =
QuoteBook -- first ask is a problem, has VERY small Volume at low price (cost < 1E-8)
{ asks = [ Quote {side = Ask, price = Price 0.01563000, volume = Vol 0.00000036, qtail = ()}
, Quote {side = Ask, price = Price 0.01563000, volume = Vol 0.01514199, qtail = ()}
, Quote {side = Ask, price = Price 0.01564000, volume = Vol 0.01803125, qtail = ()}]
, bids = [ Quote {side = Bid, price = Price 0.01561000, volume = Vol 0.02098053, qtail = ()}
, Quote {side = Bid, price = Price 0.01561000, volume = Vol 92.27239253, qtail = ()}
, Quote {side = Bid, price = Price 0.01561000, volume = Vol 46.00000000, qtail = ()}]
, counter = ()}
| dimitri-xyz/market-model | test/Test.hs | bsd-3-clause | 7,590 | 0 | 16 | 1,889 | 2,860 | 1,661 | 1,199 | 144 | 1 |
{-# LANGUAGE FlexibleContexts #-}
module RDSTests.Util
( testRDS
, withDBInstance
, waitUntilNotFound
, withDBSnapshot
, withEventSubscription
, withDBParameterGroup
, withDBSecurityGroup
, withOptionGroup
)
where
import qualified Control.Concurrent as CC
import qualified Control.Exception.Lifted as E
import Control.Monad.IO.Class (liftIO, MonadIO)
import Control.Monad.Trans.Resource (ResourceT, runResourceT, MonadResource, MonadBaseControl)
import Data.List (find)
import Data.Text (Text)
import Cloud.AWS.RDS
import Cloud.AWS.RDS.Types
testRDS
:: Text
-> RDS (ResourceT IO) a
-> IO a
testRDS region request = do
runResourceT $ runRDS $ do
setRegion region
request
withDBInstance
:: (MonadBaseControl IO m, MonadResource m)
=> CreateDBInstanceRequest
-> (DBInstance -> RDS m a)
-> RDS m a
withDBInstance req = E.bracket
(deleted >> createDBInstance req)
(\dbi -> deleteDBInstance (dbInstanceIdentifier dbi) SkipFinalSnapshot)
where
describe = describeDBInstances Nothing Nothing Nothing
f dbi = dbInstanceIdentifier dbi == createDBInstanceIdentifier req
g dbi = dbInstanceStatus dbi == Just "available"
delete dbi = deleteDBInstance (dbInstanceIdentifier dbi) SkipFinalSnapshot
deleted = waitUntilNotFound describe f g delete
waitUntilNotFound
:: (MonadIO m, Functor m, MonadBaseControl IO m, MonadResource m)
=> RDS m [a] -- describe resources
-> (a -> Bool) -- is target resource
-> (a -> Bool) -- is deletable resource
-> (a -> RDS m b) -- delete resource
-> RDS m ()
waitUntilNotFound describe match deletable delete = do
rs <- describe
case find match rs of
Nothing -> return ()
Just r
| deletable r -> do
delete r
waitUntilNotFound describe match deletable delete
| otherwise -> do
liftIO $ CC.threadDelay 10000000
waitUntilNotFound describe match deletable delete
withDBSnapshot
:: (MonadBaseControl IO m, MonadResource m)
=> Text
-> Text
-> (DBSnapshot -> RDS m a)
-> RDS m a
withDBSnapshot dbiid dbsid = E.bracket
(createDBSnapshot dbiid dbsid)
(deleteDBSnapshot . dbSnapshotIdentifier)
withEventSubscription
:: (MonadBaseControl IO m, MonadResource m)
=> Text
-> Text
-> (EventSubscription -> RDS m a)
-> RDS m a
withEventSubscription name arn = E.bracket
(createEventSubscription Nothing [] arn [] Nothing name)
(deleteEventSubscription . eventSubscriptionCustSubscriptionId)
withDBParameterGroup
:: (MonadBaseControl IO m, MonadResource m)
=> Text
-> (DBParameterGroup -> RDS m a)
-> RDS m a
withDBParameterGroup name = E.bracket
(createDBParameterGroup "MySQL5.5" name "hspec-test")
(deleteDBParameterGroup . dbParameterGroupName)
withDBSecurityGroup
:: (MonadBaseControl IO m, MonadResource m)
=> Text
-> (DBSecurityGroup -> RDS m a)
-> RDS m a
withDBSecurityGroup name = E.bracket
(createDBSecurityGroup name "hspec-test")
(deleteDBSecurityGroup . dbSecurityGroupName)
withOptionGroup
:: (MonadBaseControl IO m, MonadResource m)
=> Text
-> (OptionGroup -> RDS m a)
-> RDS m a
withOptionGroup name = E.bracket
(createOptionGroup "oracle-ee" "11.2" "hspec-test" name)
(deleteOptionGroup . optionGroupName)
| worksap-ate/aws-sdk | test/RDSTests/Util.hs | bsd-3-clause | 3,417 | 0 | 15 | 792 | 973 | 500 | 473 | 99 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
-- |
-- Module: $HEADER$
-- Description: TODO
-- Copyright: (c) 2016 Peter Trško
-- License: BSD3
--
-- Stability: experimental
-- Portability: GHC specific language extensions.
--
-- TODO
module Data.DHT.Type.Hash
( DhtHash(..)
, Bound(..)
, succNum
, predNum
, isWholeSpace
)
where
import Prelude (Bounded(maxBound, minBound), Integral, Num((-), (+)))
import Control.Applicative (liftA2)
import Control.Arrow ((***))
import Data.Bool (Bool(False, True), (&&), (||), otherwise)
import Data.Eq (Eq((/=), (==)))
import Data.Function (const)
import Data.Functor (Functor(fmap))
import Data.Ord (Ord((<=)))
import Data.Typeable (Typeable)
import GHC.Generics (Generic)
import Text.Show (Show)
-- | Represents one bound of an interval.
--
-- @
-- [x, y] = ('Including' x, 'Including' y)
-- [x, y) = ('Including' x, 'Excluding' y)
-- (x, y] = ('Excluding' x, 'Including' y)
-- (x, y) = ('Excluding' x, 'Excluding' y)
-- @
data Bound a = Including a | Excluding a
deriving (Eq, Generic, Ord, Show, Typeable)
instance Functor Bound where
fmap f = \case
Including a -> Including (f a)
Excluding a -> Excluding (f a)
-- | Evaluate 'Bound' type. Used to convert interval into another interval
-- where both bounds are inclusive.
bound
:: (a -> a)
-- ^ Functiona applied to in 'Excluding' case to a bound value. Usually
-- 'succ' for min bound and 'pred' for max bound.
-> Bound a
-> a
bound f = \case
Including a -> a
Excluding a -> f a
-- | DHT hash space is circular, i.e. 'maxBound' is immediately followed by
-- 'minBound' and vice versa.
class (Bounded a, Eq a, Ord a, Show a) => DhtHash a where
{-# MINIMAL succ, pred #-}
-- |
-- @
-- 'succ' 'maxBound' = 'minBound'
-- @
succ :: a -> a
-- |
-- @
-- 'pred' 'minBound' = 'maxBound'
-- @
pred :: a -> a
-- | Check if element is inside a specified interval. Note that this
-- function handles the circular notion of hash space correctly. In example
-- lets have a hash space 0, 1, ... 9 then for example this predicate holds
-- 0 ∈ (7, 1].
--
-- Following hold:
--
-- @
-- forall x,y ∈ I.
-- x \< y =\> ((y, x) = I \\ [x, y]
-- ∧ (y, x] = I \\ (x, y]
-- ∧ [y, x) = I \\ [x, y)
-- ∧ [y, x] = I \\ (x, y))
-- @
--
-- @
-- forall x ∈ I.
-- (x, x] = [x, x) = I
-- ∧ [x, x] = {x}
-- ∧ (x, x) = I \\ {x}
-- @
inInterval :: (Bound a, Bound a) -> a -> Bool
inInterval bs = case bs of
(Including b1, Excluding b2)
| b1 == b2 -> const True -- [x, x) = I
| otherwise -> inInterval'
(Excluding b1, Including b2)
| b1 == b2 -> const True -- (x, x] = I
| otherwise -> inInterval'
(Including b1, Including b2)
| b1 == b2 -> (== b1) -- [x, x] = {x}
| otherwise -> inInterval'
(Excluding b1, Excluding b2)
| b1 == b2 -> (/= b1) -- (x, x) = I \ {x}
| otherwise -> inInterval'
where
inInterval'
| minb <= maxb = unsafeInInterval bs'
| otherwise =
-- We need to split the interval when bounds include the point
-- where "end" and "beginning" of the DHT circle meet. In example,
-- lets have a hash space 0, 1, ... 9, then interval (7, 1] is
-- first converted in to inclusive interval [8, 1] and then split
-- in to two sub-intervals [8, 9] and [0, 1].
unsafeInInterval (minb, maxBound)
<||> unsafeInInterval (minBound, maxb)
where
-- Bounds (minb, maxb) need to be always inclusive, hence the
-- conversion.
bs'@(minb, maxb) = (bound succ *** bound pred) bs
(<||>) = liftA2 (||)
-- | Assumes that @lowerBound < upperBound@ in
-- @'unsafeInInterval' (lowerBound, upperBound)@.
unsafeInInterval :: (a, a) -> a -> Bool
unsafeInInterval (minb, maxb) a = minb <= a && a <= maxb
succNum :: Integral a => a -> a
succNum = (+ 1)
predNum :: Integral a => a -> a
predNum n = n - 1
-- | Predicate that checks if the bounds represent the whole identifier space
-- @I@, which can be represented as:
--
-- @
-- forall x ∈ I. (x, x] = [x, x) = I
-- @
isWholeSpace :: Eq a => (Bound a, Bound a) -> Bool
isWholeSpace = \case
(Including b1, Excluding b2) -> b1 == b2
(Excluding b1, Including b2) -> b1 == b2
_ -> False
| FPBrno/dht-api | src/Data/DHT/Type/Hash.hs | bsd-3-clause | 4,747 | 0 | 14 | 1,452 | 961 | 559 | 402 | 70 | 3 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[TcBinds]{TcBinds}
-}
{-# LANGUAGE CPP, RankNTypes, ScopedTypeVariables #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ViewPatterns #-}
module TcBinds ( tcLocalBinds, tcTopBinds, tcValBinds,
tcHsBootSigs, tcPolyCheck,
chooseInferredQuantifiers,
badBootDeclErr ) where
import GhcPrelude
import {-# SOURCE #-} TcMatches ( tcGRHSsPat, tcMatchesFun )
import {-# SOURCE #-} TcExpr ( tcMonoExpr )
import {-# SOURCE #-} TcPatSyn ( tcPatSynDecl, tcPatSynBuilderBind )
import CoreSyn (Tickish (..))
import CostCentre (mkUserCC, CCFlavour(DeclCC))
import DynFlags
import FastString
import GHC.Hs
import TcSigs
import TcRnMonad
import TcOrigin
import TcEnv
import TcUnify
import TcSimplify
import TcEvidence
import TcHsType
import TcPat
import TcMType
import FamInstEnv( normaliseType )
import FamInst( tcGetFamInstEnvs )
import TyCon
import TcType
import Type( mkStrLitTy, tidyOpenType, splitTyConApp_maybe, mkCastTy)
import TysPrim
import TysWiredIn( mkBoxedTupleTy )
import Id
import Var
import VarSet
import VarEnv( TidyEnv )
import Module
import Name
import NameSet
import NameEnv
import SrcLoc
import Bag
import ErrUtils
import Digraph
import Maybes
import Util
import BasicTypes
import Outputable
import PrelNames( ipClassName )
import TcValidity (checkValidType)
import UniqFM
import UniqSet
import qualified GHC.LanguageExtensions as LangExt
import ConLike
import Control.Monad
import Data.Foldable (find)
#include "HsVersions.h"
{-
************************************************************************
* *
\subsection{Type-checking bindings}
* *
************************************************************************
@tcBindsAndThen@ typechecks a @HsBinds@. The "and then" part is because
it needs to know something about the {\em usage} of the things bound,
so that it can create specialisations of them. So @tcBindsAndThen@
takes a function which, given an extended environment, E, typechecks
the scope of the bindings returning a typechecked thing and (most
important) an LIE. It is this LIE which is then used as the basis for
specialising the things bound.
@tcBindsAndThen@ also takes a "combiner" which glues together the
bindings and the "thing" to make a new "thing".
The real work is done by @tcBindWithSigsAndThen@.
Recursive and non-recursive binds are handled in essentially the same
way: because of uniques there are no scoping issues left. The only
difference is that non-recursive bindings can bind primitive values.
Even for non-recursive binding groups we add typings for each binder
to the LVE for the following reason. When each individual binding is
checked the type of its LHS is unified with that of its RHS; and
type-checking the LHS of course requires that the binder is in scope.
At the top-level the LIE is sure to contain nothing but constant
dictionaries, which we resolve at the module level.
Note [Polymorphic recursion]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The game plan for polymorphic recursion in the code above is
* Bind any variable for which we have a type signature
to an Id with a polymorphic type. Then when type-checking
the RHSs we'll make a full polymorphic call.
This fine, but if you aren't a bit careful you end up with a horrendous
amount of partial application and (worse) a huge space leak. For example:
f :: Eq a => [a] -> [a]
f xs = ...f...
If we don't take care, after typechecking we get
f = /\a -> \d::Eq a -> let f' = f a d
in
\ys:[a] -> ...f'...
Notice the stupid construction of (f a d), which is of course
identical to the function we're executing. In this case, the
polymorphic recursion isn't being used (but that's a very common case).
This can lead to a massive space leak, from the following top-level defn
(post-typechecking)
ff :: [Int] -> [Int]
ff = f Int dEqInt
Now (f dEqInt) evaluates to a lambda that has f' as a free variable; but
f' is another thunk which evaluates to the same thing... and you end
up with a chain of identical values all hung onto by the CAF ff.
ff = f Int dEqInt
= let f' = f Int dEqInt in \ys. ...f'...
= let f' = let f' = f Int dEqInt in \ys. ...f'...
in \ys. ...f'...
Etc.
NOTE: a bit of arity analysis would push the (f a d) inside the (\ys...),
which would make the space leak go away in this case
Solution: when typechecking the RHSs we always have in hand the
*monomorphic* Ids for each binding. So we just need to make sure that
if (Method f a d) shows up in the constraints emerging from (...f...)
we just use the monomorphic Id. We achieve this by adding monomorphic Ids
to the "givens" when simplifying constraints. That's what the "lies_avail"
is doing.
Then we get
f = /\a -> \d::Eq a -> letrec
fm = \ys:[a] -> ...fm...
in
fm
-}
tcTopBinds :: [(RecFlag, LHsBinds GhcRn)] -> [LSig GhcRn]
-> TcM (TcGblEnv, TcLclEnv)
-- The TcGblEnv contains the new tcg_binds and tcg_spects
-- The TcLclEnv has an extended type envt for the new bindings
tcTopBinds binds sigs
= do { -- Pattern synonym bindings populate the global environment
(binds', (tcg_env, tcl_env)) <- tcValBinds TopLevel binds sigs $
do { gbl <- getGblEnv
; lcl <- getLclEnv
; return (gbl, lcl) }
; specs <- tcImpPrags sigs -- SPECIALISE prags for imported Ids
; complete_matches <- setEnvs (tcg_env, tcl_env) $ tcCompleteSigs sigs
; traceTc "complete_matches" (ppr binds $$ ppr sigs)
; traceTc "complete_matches" (ppr complete_matches)
; let { tcg_env' = tcg_env { tcg_imp_specs
= specs ++ tcg_imp_specs tcg_env
, tcg_complete_matches
= complete_matches
++ tcg_complete_matches tcg_env }
`addTypecheckedBinds` map snd binds' }
; return (tcg_env', tcl_env) }
-- The top level bindings are flattened into a giant
-- implicitly-mutually-recursive LHsBinds
-- Note [Typechecking Complete Matches]
-- Much like when a user bundled a pattern synonym, the result types of
-- all the constructors in the match pragma must be consistent.
--
-- If we allowed pragmas with inconsistent types then it would be
-- impossible to ever match every constructor in the list and so
-- the pragma would be useless.
-- This is only used in `tcCompleteSig`. We fold over all the conlikes,
-- this accumulator keeps track of the first `ConLike` with a concrete
-- return type. After fixing the return type, all other constructors with
-- a fixed return type must agree with this.
--
-- The fields of `Fixed` cache the first conlike and its return type so
-- that that we can compare all the other conlikes to it. The conlike is
-- stored for error messages.
--
-- `Nothing` in the case that the type is fixed by a type signature
data CompleteSigType = AcceptAny | Fixed (Maybe ConLike) TyCon
tcCompleteSigs :: [LSig GhcRn] -> TcM [CompleteMatch]
tcCompleteSigs sigs =
let
doOne :: Sig GhcRn -> TcM (Maybe CompleteMatch)
doOne c@(CompleteMatchSig _ _ lns mtc)
= fmap Just $ do
addErrCtxt (text "In" <+> ppr c) $
case mtc of
Nothing -> infer_complete_match
Just tc -> check_complete_match tc
where
checkCLTypes acc = foldM checkCLType (acc, []) (unLoc lns)
infer_complete_match = do
(res, cls) <- checkCLTypes AcceptAny
case res of
AcceptAny -> failWithTc ambiguousError
Fixed _ tc -> return $ mkMatch cls tc
check_complete_match tc_name = do
ty_con <- tcLookupLocatedTyCon tc_name
(_, cls) <- checkCLTypes (Fixed Nothing ty_con)
return $ mkMatch cls ty_con
mkMatch :: [ConLike] -> TyCon -> CompleteMatch
mkMatch cls ty_con = CompleteMatch {
-- foldM is a left-fold and will have accumulated the ConLikes in
-- the reverse order. foldrM would accumulate in the correct order,
-- but would type-check the last ConLike first, which might also be
-- confusing from the user's perspective. Hence reverse here.
completeMatchConLikes = reverse (map conLikeName cls),
completeMatchTyCon = tyConName ty_con
}
doOne _ = return Nothing
ambiguousError :: SDoc
ambiguousError =
text "A type signature must be provided for a set of polymorphic"
<+> text "pattern synonyms."
-- See note [Typechecking Complete Matches]
checkCLType :: (CompleteSigType, [ConLike]) -> Located Name
-> TcM (CompleteSigType, [ConLike])
checkCLType (cst, cs) n = do
cl <- addLocM tcLookupConLike n
let (_,_,_,_,_,_, res_ty) = conLikeFullSig cl
res_ty_con = fst <$> splitTyConApp_maybe res_ty
case (cst, res_ty_con) of
(AcceptAny, Nothing) -> return (AcceptAny, cl:cs)
(AcceptAny, Just tc) -> return (Fixed (Just cl) tc, cl:cs)
(Fixed mfcl tc, Nothing) -> return (Fixed mfcl tc, cl:cs)
(Fixed mfcl tc, Just tc') ->
if tc == tc'
then return (Fixed mfcl tc, cl:cs)
else case mfcl of
Nothing ->
addErrCtxt (text "In" <+> ppr cl) $
failWithTc typeSigErrMsg
Just cl -> failWithTc (errMsg cl)
where
typeSigErrMsg :: SDoc
typeSigErrMsg =
text "Couldn't match expected type"
<+> quotes (ppr tc)
<+> text "with"
<+> quotes (ppr tc')
errMsg :: ConLike -> SDoc
errMsg fcl =
text "Cannot form a group of complete patterns from patterns"
<+> quotes (ppr fcl) <+> text "and" <+> quotes (ppr cl)
<+> text "as they match different type constructors"
<+> parens (quotes (ppr tc)
<+> text "resp."
<+> quotes (ppr tc'))
-- For some reason I haven't investigated further, the signatures come in
-- backwards wrt. declaration order. So we reverse them here, because it makes
-- a difference for incomplete match suggestions.
in mapMaybeM (addLocM doOne) (reverse sigs) -- process in declaration order
tcHsBootSigs :: [(RecFlag, LHsBinds GhcRn)] -> [LSig GhcRn] -> TcM [Id]
-- A hs-boot file has only one BindGroup, and it only has type
-- signatures in it. The renamer checked all this
tcHsBootSigs binds sigs
= do { checkTc (null binds) badBootDeclErr
; concat <$> mapM (addLocM tc_boot_sig) (filter isTypeLSig sigs) }
where
tc_boot_sig (TypeSig _ lnames hs_ty) = mapM f lnames
where
f (L _ name)
= do { sigma_ty <- tcHsSigWcType (FunSigCtxt name False) hs_ty
; return (mkVanillaGlobal name sigma_ty) }
-- Notice that we make GlobalIds, not LocalIds
tc_boot_sig s = pprPanic "tcHsBootSigs/tc_boot_sig" (ppr s)
badBootDeclErr :: MsgDoc
badBootDeclErr = text "Illegal declarations in an hs-boot file"
------------------------
tcLocalBinds :: HsLocalBinds GhcRn -> TcM thing
-> TcM (HsLocalBinds GhcTcId, thing)
tcLocalBinds (EmptyLocalBinds x) thing_inside
= do { thing <- thing_inside
; return (EmptyLocalBinds x, thing) }
tcLocalBinds (HsValBinds x (XValBindsLR (NValBinds binds sigs))) thing_inside
= do { (binds', thing) <- tcValBinds NotTopLevel binds sigs thing_inside
; return (HsValBinds x (XValBindsLR (NValBinds binds' sigs)), thing) }
tcLocalBinds (HsValBinds _ (ValBinds {})) _ = panic "tcLocalBinds"
tcLocalBinds (HsIPBinds x (IPBinds _ ip_binds)) thing_inside
= do { ipClass <- tcLookupClass ipClassName
; (given_ips, ip_binds') <-
mapAndUnzipM (wrapLocSndM (tc_ip_bind ipClass)) ip_binds
-- If the binding binds ?x = E, we must now
-- discharge any ?x constraints in expr_lie
-- See Note [Implicit parameter untouchables]
; (ev_binds, result) <- checkConstraints (IPSkol ips)
[] given_ips thing_inside
; return (HsIPBinds x (IPBinds ev_binds ip_binds') , result) }
where
ips = [ip | (L _ (IPBind _ (Left (L _ ip)) _)) <- ip_binds]
-- I wonder if we should do these one at a time
-- Consider ?x = 4
-- ?y = ?x + 1
tc_ip_bind ipClass (IPBind _ (Left (L _ ip)) expr)
= do { ty <- newOpenFlexiTyVarTy
; let p = mkStrLitTy $ hsIPNameFS ip
; ip_id <- newDict ipClass [ p, ty ]
; expr' <- tcMonoExpr expr (mkCheckExpType ty)
; let d = toDict ipClass p ty `fmap` expr'
; return (ip_id, (IPBind noExtField (Right ip_id) d)) }
tc_ip_bind _ (IPBind _ (Right {}) _) = panic "tc_ip_bind"
tc_ip_bind _ (XIPBind nec) = noExtCon nec
-- Coerces a `t` into a dictionary for `IP "x" t`.
-- co : t -> IP "x" t
toDict ipClass x ty = mkHsWrap $ mkWpCastR $
wrapIP $ mkClassPred ipClass [x,ty]
tcLocalBinds (HsIPBinds _ (XHsIPBinds nec)) _ = noExtCon nec
tcLocalBinds (XHsLocalBindsLR nec) _ = noExtCon nec
{- Note [Implicit parameter untouchables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We add the type variables in the types of the implicit parameters
as untouchables, not so much because we really must not unify them,
but rather because we otherwise end up with constraints like this
Num alpha, Implic { wanted = alpha ~ Int }
The constraint solver solves alpha~Int by unification, but then
doesn't float that solved constraint out (it's not an unsolved
wanted). Result disaster: the (Num alpha) is again solved, this
time by defaulting. No no no.
However [Oct 10] this is all handled automatically by the
untouchable-range idea.
-}
tcValBinds :: TopLevelFlag
-> [(RecFlag, LHsBinds GhcRn)] -> [LSig GhcRn]
-> TcM thing
-> TcM ([(RecFlag, LHsBinds GhcTcId)], thing)
tcValBinds top_lvl binds sigs thing_inside
= do { -- Typecheck the signatures
-- It's easier to do so now, once for all the SCCs together
-- because a single signature f,g :: <type>
-- might relate to more than one SCC
; (poly_ids, sig_fn) <- tcAddPatSynPlaceholders patsyns $
tcTySigs sigs
-- Extend the envt right away with all the Ids
-- declared with complete type signatures
-- Do not extend the TcBinderStack; instead
-- we extend it on a per-rhs basis in tcExtendForRhs
; tcExtendSigIds top_lvl poly_ids $ do
{ (binds', (extra_binds', thing)) <- tcBindGroups top_lvl sig_fn prag_fn binds $ do
{ thing <- thing_inside
-- See Note [Pattern synonym builders don't yield dependencies]
-- in GHC.Rename.Binds
; patsyn_builders <- mapM tcPatSynBuilderBind patsyns
; let extra_binds = [ (NonRecursive, builder) | builder <- patsyn_builders ]
; return (extra_binds, thing) }
; return (binds' ++ extra_binds', thing) }}
where
patsyns = getPatSynBinds binds
prag_fn = mkPragEnv sigs (foldr (unionBags . snd) emptyBag binds)
------------------------
tcBindGroups :: TopLevelFlag -> TcSigFun -> TcPragEnv
-> [(RecFlag, LHsBinds GhcRn)] -> TcM thing
-> TcM ([(RecFlag, LHsBinds GhcTcId)], thing)
-- Typecheck a whole lot of value bindings,
-- one strongly-connected component at a time
-- Here a "strongly connected component" has the straightforward
-- meaning of a group of bindings that mention each other,
-- ignoring type signatures (that part comes later)
tcBindGroups _ _ _ [] thing_inside
= do { thing <- thing_inside
; return ([], thing) }
tcBindGroups top_lvl sig_fn prag_fn (group : groups) thing_inside
= do { -- See Note [Closed binder groups]
type_env <- getLclTypeEnv
; let closed = isClosedBndrGroup type_env (snd group)
; (group', (groups', thing))
<- tc_group top_lvl sig_fn prag_fn group closed $
tcBindGroups top_lvl sig_fn prag_fn groups thing_inside
; return (group' ++ groups', thing) }
-- Note [Closed binder groups]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- A mutually recursive group is "closed" if all of the free variables of
-- the bindings are closed. For example
--
-- > h = \x -> let f = ...g...
-- > g = ....f...x...
-- > in ...
--
-- Here @g@ is not closed because it mentions @x@; and hence neither is @f@
-- closed.
--
-- So we need to compute closed-ness on each strongly connected components,
-- before we sub-divide it based on what type signatures it has.
--
------------------------
tc_group :: forall thing.
TopLevelFlag -> TcSigFun -> TcPragEnv
-> (RecFlag, LHsBinds GhcRn) -> IsGroupClosed -> TcM thing
-> TcM ([(RecFlag, LHsBinds GhcTcId)], thing)
-- Typecheck one strongly-connected component of the original program.
-- We get a list of groups back, because there may
-- be specialisations etc as well
tc_group top_lvl sig_fn prag_fn (NonRecursive, binds) closed thing_inside
-- A single non-recursive binding
-- We want to keep non-recursive things non-recursive
-- so that we desugar unlifted bindings correctly
= do { let bind = case bagToList binds of
[bind] -> bind
[] -> panic "tc_group: empty list of binds"
_ -> panic "tc_group: NonRecursive binds is not a singleton bag"
; (bind', thing) <- tc_single top_lvl sig_fn prag_fn bind closed
thing_inside
; return ( [(NonRecursive, bind')], thing) }
tc_group top_lvl sig_fn prag_fn (Recursive, binds) closed thing_inside
= -- To maximise polymorphism, we do a new
-- strongly-connected-component analysis, this time omitting
-- any references to variables with type signatures.
-- (This used to be optional, but isn't now.)
-- See Note [Polymorphic recursion] in HsBinds.
do { traceTc "tc_group rec" (pprLHsBinds binds)
; whenIsJust mbFirstPatSyn $ \lpat_syn ->
recursivePatSynErr (getLoc lpat_syn) binds
; (binds1, thing) <- go sccs
; return ([(Recursive, binds1)], thing) }
-- Rec them all together
where
mbFirstPatSyn = find (isPatSyn . unLoc) binds
isPatSyn PatSynBind{} = True
isPatSyn _ = False
sccs :: [SCC (LHsBind GhcRn)]
sccs = stronglyConnCompFromEdgedVerticesUniq (mkEdges sig_fn binds)
go :: [SCC (LHsBind GhcRn)] -> TcM (LHsBinds GhcTcId, thing)
go (scc:sccs) = do { (binds1, ids1) <- tc_scc scc
; (binds2, thing) <- tcExtendLetEnv top_lvl sig_fn
closed ids1 $
go sccs
; return (binds1 `unionBags` binds2, thing) }
go [] = do { thing <- thing_inside; return (emptyBag, thing) }
tc_scc (AcyclicSCC bind) = tc_sub_group NonRecursive [bind]
tc_scc (CyclicSCC binds) = tc_sub_group Recursive binds
tc_sub_group rec_tc binds =
tcPolyBinds sig_fn prag_fn Recursive rec_tc closed binds
recursivePatSynErr ::
OutputableBndrId p =>
SrcSpan -- ^ The location of the first pattern synonym binding
-- (for error reporting)
-> LHsBinds (GhcPass p)
-> TcM a
recursivePatSynErr loc binds
= failAt loc $
hang (text "Recursive pattern synonym definition with following bindings:")
2 (vcat $ map pprLBind . bagToList $ binds)
where
pprLoc loc = parens (text "defined at" <+> ppr loc)
pprLBind (L loc bind) = pprWithCommas ppr (collectHsBindBinders bind)
<+> pprLoc loc
tc_single :: forall thing.
TopLevelFlag -> TcSigFun -> TcPragEnv
-> LHsBind GhcRn -> IsGroupClosed -> TcM thing
-> TcM (LHsBinds GhcTcId, thing)
tc_single _top_lvl sig_fn _prag_fn
(L _ (PatSynBind _ psb@PSB{ psb_id = L _ name }))
_ thing_inside
= do { (aux_binds, tcg_env) <- tcPatSynDecl psb (sig_fn name)
; thing <- setGblEnv tcg_env thing_inside
; return (aux_binds, thing)
}
tc_single top_lvl sig_fn prag_fn lbind closed thing_inside
= do { (binds1, ids) <- tcPolyBinds sig_fn prag_fn
NonRecursive NonRecursive
closed
[lbind]
; thing <- tcExtendLetEnv top_lvl sig_fn closed ids thing_inside
; return (binds1, thing) }
------------------------
type BKey = Int -- Just number off the bindings
mkEdges :: TcSigFun -> LHsBinds GhcRn -> [Node BKey (LHsBind GhcRn)]
-- See Note [Polymorphic recursion] in HsBinds.
mkEdges sig_fn binds
= [ DigraphNode bind key [key | n <- nonDetEltsUniqSet (bind_fvs (unLoc bind)),
Just key <- [lookupNameEnv key_map n], no_sig n ]
| (bind, key) <- keyd_binds
]
-- It's OK to use nonDetEltsUFM here as stronglyConnCompFromEdgedVertices
-- is still deterministic even if the edges are in nondeterministic order
-- as explained in Note [Deterministic SCC] in Digraph.
where
bind_fvs (FunBind { fun_ext = fvs }) = fvs
bind_fvs (PatBind { pat_ext = fvs }) = fvs
bind_fvs _ = emptyNameSet
no_sig :: Name -> Bool
no_sig n = not (hasCompleteSig sig_fn n)
keyd_binds = bagToList binds `zip` [0::BKey ..]
key_map :: NameEnv BKey -- Which binding it comes from
key_map = mkNameEnv [(bndr, key) | (L _ bind, key) <- keyd_binds
, bndr <- collectHsBindBinders bind ]
------------------------
tcPolyBinds :: TcSigFun -> TcPragEnv
-> RecFlag -- Whether the group is really recursive
-> RecFlag -- Whether it's recursive after breaking
-- dependencies based on type signatures
-> IsGroupClosed -- Whether the group is closed
-> [LHsBind GhcRn] -- None are PatSynBind
-> TcM (LHsBinds GhcTcId, [TcId])
-- Typechecks a single bunch of values bindings all together,
-- and generalises them. The bunch may be only part of a recursive
-- group, because we use type signatures to maximise polymorphism
--
-- Returns a list because the input may be a single non-recursive binding,
-- in which case the dependency order of the resulting bindings is
-- important.
--
-- Knows nothing about the scope of the bindings
-- None of the bindings are pattern synonyms
tcPolyBinds sig_fn prag_fn rec_group rec_tc closed bind_list
= setSrcSpan loc $
recoverM (recoveryCode binder_names sig_fn) $ do
-- Set up main recover; take advantage of any type sigs
{ traceTc "------------------------------------------------" Outputable.empty
; traceTc "Bindings for {" (ppr binder_names)
; dflags <- getDynFlags
; let plan = decideGeneralisationPlan dflags bind_list closed sig_fn
; traceTc "Generalisation plan" (ppr plan)
; result@(_, poly_ids) <- case plan of
NoGen -> tcPolyNoGen rec_tc prag_fn sig_fn bind_list
InferGen mn -> tcPolyInfer rec_tc prag_fn sig_fn mn bind_list
CheckGen lbind sig -> tcPolyCheck prag_fn sig lbind
; traceTc "} End of bindings for" (vcat [ ppr binder_names, ppr rec_group
, vcat [ppr id <+> ppr (idType id) | id <- poly_ids]
])
; return result }
where
binder_names = collectHsBindListBinders bind_list
loc = foldr1 combineSrcSpans (map getLoc bind_list)
-- The mbinds have been dependency analysed and
-- may no longer be adjacent; so find the narrowest
-- span that includes them all
--------------
-- If typechecking the binds fails, then return with each
-- signature-less binder given type (forall a.a), to minimise
-- subsequent error messages
recoveryCode :: [Name] -> TcSigFun -> TcM (LHsBinds GhcTcId, [Id])
recoveryCode binder_names sig_fn
= do { traceTc "tcBindsWithSigs: error recovery" (ppr binder_names)
; let poly_ids = map mk_dummy binder_names
; return (emptyBag, poly_ids) }
where
mk_dummy name
| Just sig <- sig_fn name
, Just poly_id <- completeSigPolyId_maybe sig
= poly_id
| otherwise
= mkLocalId name forall_a_a
forall_a_a :: TcType
-- At one point I had (forall r (a :: TYPE r). a), but of course
-- that type is ill-formed: its mentions 'r' which escapes r's scope.
-- Another alternative would be (forall (a :: TYPE kappa). a), where
-- kappa is a unification variable. But I don't think we need that
-- complication here. I'm going to just use (forall (a::*). a).
-- See #15276
forall_a_a = mkSpecForAllTys [alphaTyVar] alphaTy
{- *********************************************************************
* *
tcPolyNoGen
* *
********************************************************************* -}
tcPolyNoGen -- No generalisation whatsoever
:: RecFlag -- Whether it's recursive after breaking
-- dependencies based on type signatures
-> TcPragEnv -> TcSigFun
-> [LHsBind GhcRn]
-> TcM (LHsBinds GhcTcId, [TcId])
tcPolyNoGen rec_tc prag_fn tc_sig_fn bind_list
= do { (binds', mono_infos) <- tcMonoBinds rec_tc tc_sig_fn
(LetGblBndr prag_fn)
bind_list
; mono_ids' <- mapM tc_mono_info mono_infos
; return (binds', mono_ids') }
where
tc_mono_info (MBI { mbi_poly_name = name, mbi_mono_id = mono_id })
= do { _specs <- tcSpecPrags mono_id (lookupPragEnv prag_fn name)
; return mono_id }
-- NB: tcPrags generates error messages for
-- specialisation pragmas for non-overloaded sigs
-- Indeed that is why we call it here!
-- So we can safely ignore _specs
{- *********************************************************************
* *
tcPolyCheck
* *
********************************************************************* -}
tcPolyCheck :: TcPragEnv
-> TcIdSigInfo -- Must be a complete signature
-> LHsBind GhcRn -- Must be a FunBind
-> TcM (LHsBinds GhcTcId, [TcId])
-- There is just one binding,
-- it is a Funbind
-- it has a complete type signature,
tcPolyCheck prag_fn
(CompleteSig { sig_bndr = poly_id
, sig_ctxt = ctxt
, sig_loc = sig_loc })
(L loc (FunBind { fun_id = (L nm_loc name)
, fun_matches = matches }))
= setSrcSpan sig_loc $
do { traceTc "tcPolyCheck" (ppr poly_id $$ ppr sig_loc)
; (tv_prs, theta, tau) <- tcInstType tcInstSkolTyVars poly_id
-- See Note [Instantiate sig with fresh variables]
; mono_name <- newNameAt (nameOccName name) nm_loc
; ev_vars <- newEvVars theta
; let mono_id = mkLocalId mono_name tau
skol_info = SigSkol ctxt (idType poly_id) tv_prs
skol_tvs = map snd tv_prs
; (ev_binds, (co_fn, matches'))
<- checkConstraints skol_info skol_tvs ev_vars $
tcExtendBinderStack [TcIdBndr mono_id NotTopLevel] $
tcExtendNameTyVarEnv tv_prs $
setSrcSpan loc $
tcMatchesFun (L nm_loc mono_name) matches (mkCheckExpType tau)
; let prag_sigs = lookupPragEnv prag_fn name
; spec_prags <- tcSpecPrags poly_id prag_sigs
; poly_id <- addInlinePrags poly_id prag_sigs
; mod <- getModule
; tick <- funBindTicks nm_loc mono_id mod prag_sigs
; let bind' = FunBind { fun_id = L nm_loc mono_id
, fun_matches = matches'
, fun_co_fn = co_fn
, fun_ext = placeHolderNamesTc
, fun_tick = tick }
export = ABE { abe_ext = noExtField
, abe_wrap = idHsWrapper
, abe_poly = poly_id
, abe_mono = mono_id
, abe_prags = SpecPrags spec_prags }
abs_bind = L loc $
AbsBinds { abs_ext = noExtField
, abs_tvs = skol_tvs
, abs_ev_vars = ev_vars
, abs_ev_binds = [ev_binds]
, abs_exports = [export]
, abs_binds = unitBag (L loc bind')
, abs_sig = True }
; return (unitBag abs_bind, [poly_id]) }
tcPolyCheck _prag_fn sig bind
= pprPanic "tcPolyCheck" (ppr sig $$ ppr bind)
funBindTicks :: SrcSpan -> TcId -> Module -> [LSig GhcRn]
-> TcM [Tickish TcId]
funBindTicks loc fun_id mod sigs
| (mb_cc_str : _) <- [ cc_name | L _ (SCCFunSig _ _ _ cc_name) <- sigs ]
-- this can only be a singleton list, as duplicate pragmas are rejected
-- by the renamer
, let cc_str
| Just cc_str <- mb_cc_str
= sl_fs $ unLoc cc_str
| otherwise
= getOccFS (Var.varName fun_id)
cc_name = moduleNameFS (moduleName mod) `appendFS` consFS '.' cc_str
= do
flavour <- DeclCC <$> getCCIndexM cc_name
let cc = mkUserCC cc_name mod loc flavour
return [ProfNote cc True True]
| otherwise
= return []
{- Note [Instantiate sig with fresh variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's vital to instantiate a type signature with fresh variables.
For example:
type T = forall a. [a] -> [a]
f :: T;
f = g where { g :: T; g = <rhs> }
We must not use the same 'a' from the defn of T at both places!!
(Instantiation is only necessary because of type synonyms. Otherwise,
it's all cool; each signature has distinct type variables from the renamer.)
-}
{- *********************************************************************
* *
tcPolyInfer
* *
********************************************************************* -}
tcPolyInfer
:: RecFlag -- Whether it's recursive after breaking
-- dependencies based on type signatures
-> TcPragEnv -> TcSigFun
-> Bool -- True <=> apply the monomorphism restriction
-> [LHsBind GhcRn]
-> TcM (LHsBinds GhcTcId, [TcId])
tcPolyInfer rec_tc prag_fn tc_sig_fn mono bind_list
= do { (tclvl, wanted, (binds', mono_infos))
<- pushLevelAndCaptureConstraints $
tcMonoBinds rec_tc tc_sig_fn LetLclBndr bind_list
; let name_taus = [ (mbi_poly_name info, idType (mbi_mono_id info))
| info <- mono_infos ]
sigs = [ sig | MBI { mbi_sig = Just sig } <- mono_infos ]
infer_mode = if mono then ApplyMR else NoRestrictions
; mapM_ (checkOverloadedSig mono) sigs
; traceTc "simplifyInfer call" (ppr tclvl $$ ppr name_taus $$ ppr wanted)
; (qtvs, givens, ev_binds, residual, insoluble)
<- simplifyInfer tclvl infer_mode sigs name_taus wanted
; emitConstraints residual
; let inferred_theta = map evVarPred givens
; exports <- checkNoErrs $
mapM (mkExport prag_fn insoluble qtvs inferred_theta) mono_infos
; loc <- getSrcSpanM
; let poly_ids = map abe_poly exports
abs_bind = L loc $
AbsBinds { abs_ext = noExtField
, abs_tvs = qtvs
, abs_ev_vars = givens, abs_ev_binds = [ev_binds]
, abs_exports = exports, abs_binds = binds'
, abs_sig = False }
; traceTc "Binding:" (ppr (poly_ids `zip` map idType poly_ids))
; return (unitBag abs_bind, poly_ids) }
-- poly_ids are guaranteed zonked by mkExport
--------------
mkExport :: TcPragEnv
-> Bool -- True <=> there was an insoluble type error
-- when typechecking the bindings
-> [TyVar] -> TcThetaType -- Both already zonked
-> MonoBindInfo
-> TcM (ABExport GhcTc)
-- Only called for generalisation plan InferGen, not by CheckGen or NoGen
--
-- mkExport generates exports with
-- zonked type variables,
-- zonked poly_ids
-- The former is just because no further unifications will change
-- the quantified type variables, so we can fix their final form
-- right now.
-- The latter is needed because the poly_ids are used to extend the
-- type environment; see the invariant on TcEnv.tcExtendIdEnv
-- Pre-condition: the qtvs and theta are already zonked
mkExport prag_fn insoluble qtvs theta
mono_info@(MBI { mbi_poly_name = poly_name
, mbi_sig = mb_sig
, mbi_mono_id = mono_id })
= do { mono_ty <- zonkTcType (idType mono_id)
; poly_id <- mkInferredPolyId insoluble qtvs theta poly_name mb_sig mono_ty
-- NB: poly_id has a zonked type
; poly_id <- addInlinePrags poly_id prag_sigs
; spec_prags <- tcSpecPrags poly_id prag_sigs
-- tcPrags requires a zonked poly_id
-- See Note [Impedance matching]
-- NB: we have already done checkValidType, including an ambiguity check,
-- on the type; either when we checked the sig or in mkInferredPolyId
; let poly_ty = idType poly_id
sel_poly_ty = mkInfSigmaTy qtvs theta mono_ty
-- This type is just going into tcSubType,
-- so Inferred vs. Specified doesn't matter
; wrap <- if sel_poly_ty `eqType` poly_ty -- NB: eqType ignores visibility
then return idHsWrapper -- Fast path; also avoids complaint when we infer
-- an ambiguous type and have AllowAmbiguousType
-- e..g infer x :: forall a. F a -> Int
else addErrCtxtM (mk_impedance_match_msg mono_info sel_poly_ty poly_ty) $
tcSubType_NC sig_ctxt sel_poly_ty poly_ty
; warn_missing_sigs <- woptM Opt_WarnMissingLocalSignatures
; when warn_missing_sigs $
localSigWarn Opt_WarnMissingLocalSignatures poly_id mb_sig
; return (ABE { abe_ext = noExtField
, abe_wrap = wrap
-- abe_wrap :: idType poly_id ~ (forall qtvs. theta => mono_ty)
, abe_poly = poly_id
, abe_mono = mono_id
, abe_prags = SpecPrags spec_prags }) }
where
prag_sigs = lookupPragEnv prag_fn poly_name
sig_ctxt = InfSigCtxt poly_name
mkInferredPolyId :: Bool -- True <=> there was an insoluble error when
-- checking the binding group for this Id
-> [TyVar] -> TcThetaType
-> Name -> Maybe TcIdSigInst -> TcType
-> TcM TcId
mkInferredPolyId insoluble qtvs inferred_theta poly_name mb_sig_inst mono_ty
| Just (TISI { sig_inst_sig = sig }) <- mb_sig_inst
, CompleteSig { sig_bndr = poly_id } <- sig
= return poly_id
| otherwise -- Either no type sig or partial type sig
= checkNoErrs $ -- The checkNoErrs ensures that if the type is ambiguous
-- we don't carry on to the impedance matching, and generate
-- a duplicate ambiguity error. There is a similar
-- checkNoErrs for complete type signatures too.
do { fam_envs <- tcGetFamInstEnvs
; let (_co, mono_ty') = normaliseType fam_envs Nominal mono_ty
-- Unification may not have normalised the type,
-- (see Note [Lazy flattening] in TcFlatten) so do it
-- here to make it as uncomplicated as possible.
-- Example: f :: [F Int] -> Bool
-- should be rewritten to f :: [Char] -> Bool, if possible
--
-- We can discard the coercion _co, because we'll reconstruct
-- it in the call to tcSubType below
; (binders, theta') <- chooseInferredQuantifiers inferred_theta
(tyCoVarsOfType mono_ty') qtvs mb_sig_inst
; let inferred_poly_ty = mkForAllTys binders (mkPhiTy theta' mono_ty')
; traceTc "mkInferredPolyId" (vcat [ppr poly_name, ppr qtvs, ppr theta'
, ppr inferred_poly_ty])
; unless insoluble $
addErrCtxtM (mk_inf_msg poly_name inferred_poly_ty) $
checkValidType (InfSigCtxt poly_name) inferred_poly_ty
-- See Note [Validity of inferred types]
-- If we found an insoluble error in the function definition, don't
-- do this check; otherwise (#14000) we may report an ambiguity
-- error for a rather bogus type.
; return (mkLocalId poly_name inferred_poly_ty) }
chooseInferredQuantifiers :: TcThetaType -- inferred
-> TcTyVarSet -- tvs free in tau type
-> [TcTyVar] -- inferred quantified tvs
-> Maybe TcIdSigInst
-> TcM ([TyVarBinder], TcThetaType)
chooseInferredQuantifiers inferred_theta tau_tvs qtvs Nothing
= -- No type signature (partial or complete) for this binder,
do { let free_tvs = closeOverKinds (growThetaTyVars inferred_theta tau_tvs)
-- Include kind variables! #7916
my_theta = pickCapturedPreds free_tvs inferred_theta
binders = [ mkTyVarBinder Inferred tv
| tv <- qtvs
, tv `elemVarSet` free_tvs ]
; return (binders, my_theta) }
chooseInferredQuantifiers inferred_theta tau_tvs qtvs
(Just (TISI { sig_inst_sig = sig -- Always PartialSig
, sig_inst_wcx = wcx
, sig_inst_theta = annotated_theta
, sig_inst_skols = annotated_tvs }))
= -- Choose quantifiers for a partial type signature
do { psig_qtv_prs <- zonkTyVarTyVarPairs annotated_tvs
-- Check whether the quantified variables of the
-- partial signature have been unified together
-- See Note [Quantified variables in partial type signatures]
; mapM_ report_dup_tyvar_tv_err (findDupTyVarTvs psig_qtv_prs)
-- Check whether a quantified variable of the partial type
-- signature is not actually quantified. How can that happen?
-- See Note [Quantification and partial signatures] Wrinkle 4
-- in TcSimplify
; mapM_ report_mono_sig_tv_err [ n | (n,tv) <- psig_qtv_prs
, not (tv `elem` qtvs) ]
; let psig_qtvs = mkVarSet (map snd psig_qtv_prs)
; annotated_theta <- zonkTcTypes annotated_theta
; (free_tvs, my_theta) <- choose_psig_context psig_qtvs annotated_theta wcx
; let keep_me = free_tvs `unionVarSet` psig_qtvs
final_qtvs = [ mkTyVarBinder vis tv
| tv <- qtvs -- Pulling from qtvs maintains original order
, tv `elemVarSet` keep_me
, let vis | tv `elemVarSet` psig_qtvs = Specified
| otherwise = Inferred ]
; return (final_qtvs, my_theta) }
where
report_dup_tyvar_tv_err (n1,n2)
| PartialSig { psig_name = fn_name, psig_hs_ty = hs_ty } <- sig
= addErrTc (hang (text "Couldn't match" <+> quotes (ppr n1)
<+> text "with" <+> quotes (ppr n2))
2 (hang (text "both bound by the partial type signature:")
2 (ppr fn_name <+> dcolon <+> ppr hs_ty)))
| otherwise -- Can't happen; by now we know it's a partial sig
= pprPanic "report_tyvar_tv_err" (ppr sig)
report_mono_sig_tv_err n
| PartialSig { psig_name = fn_name, psig_hs_ty = hs_ty } <- sig
= addErrTc (hang (text "Can't quantify over" <+> quotes (ppr n))
2 (hang (text "bound by the partial type signature:")
2 (ppr fn_name <+> dcolon <+> ppr hs_ty)))
| otherwise -- Can't happen; by now we know it's a partial sig
= pprPanic "report_mono_sig_tv_err" (ppr sig)
choose_psig_context :: VarSet -> TcThetaType -> Maybe TcType
-> TcM (VarSet, TcThetaType)
choose_psig_context _ annotated_theta Nothing
= do { let free_tvs = closeOverKinds (tyCoVarsOfTypes annotated_theta
`unionVarSet` tau_tvs)
; return (free_tvs, annotated_theta) }
choose_psig_context psig_qtvs annotated_theta (Just wc_var_ty)
= do { let free_tvs = closeOverKinds (growThetaTyVars inferred_theta seed_tvs)
-- growThetaVars just like the no-type-sig case
-- Omitting this caused #12844
seed_tvs = tyCoVarsOfTypes annotated_theta -- These are put there
`unionVarSet` tau_tvs -- by the user
; let keep_me = psig_qtvs `unionVarSet` free_tvs
my_theta = pickCapturedPreds keep_me inferred_theta
-- Fill in the extra-constraints wildcard hole with inferred_theta,
-- so that the Hole constraint we have already emitted
-- (in tcHsPartialSigType) can report what filled it in.
-- NB: my_theta already includes all the annotated constraints
; let inferred_diff = [ pred
| pred <- my_theta
, all (not . (`eqType` pred)) annotated_theta ]
; ctuple <- mk_ctuple inferred_diff
; case tcGetCastedTyVar_maybe wc_var_ty of
-- We know that wc_co must have type kind(wc_var) ~ Constraint, as it
-- comes from the checkExpectedKind in TcHsType.tcAnonWildCardOcc. So, to
-- make the kinds work out, we reverse the cast here.
Just (wc_var, wc_co) -> writeMetaTyVar wc_var (ctuple `mkCastTy` mkTcSymCo wc_co)
Nothing -> pprPanic "chooseInferredQuantifiers 1" (ppr wc_var_ty)
; traceTc "completeTheta" $
vcat [ ppr sig
, ppr annotated_theta, ppr inferred_theta
, ppr inferred_diff ]
; return (free_tvs, my_theta) }
mk_ctuple preds = return (mkBoxedTupleTy preds)
-- Hack alert! See TcHsType:
-- Note [Extra-constraint holes in partial type signatures]
mk_impedance_match_msg :: MonoBindInfo
-> TcType -> TcType
-> TidyEnv -> TcM (TidyEnv, SDoc)
-- This is a rare but rather awkward error messages
mk_impedance_match_msg (MBI { mbi_poly_name = name, mbi_sig = mb_sig })
inf_ty sig_ty tidy_env
= do { (tidy_env1, inf_ty) <- zonkTidyTcType tidy_env inf_ty
; (tidy_env2, sig_ty) <- zonkTidyTcType tidy_env1 sig_ty
; let msg = vcat [ text "When checking that the inferred type"
, nest 2 $ ppr name <+> dcolon <+> ppr inf_ty
, text "is as general as its" <+> what <+> text "signature"
, nest 2 $ ppr name <+> dcolon <+> ppr sig_ty ]
; return (tidy_env2, msg) }
where
what = case mb_sig of
Nothing -> text "inferred"
Just sig | isPartialSig sig -> text "(partial)"
| otherwise -> empty
mk_inf_msg :: Name -> TcType -> TidyEnv -> TcM (TidyEnv, SDoc)
mk_inf_msg poly_name poly_ty tidy_env
= do { (tidy_env1, poly_ty) <- zonkTidyTcType tidy_env poly_ty
; let msg = vcat [ text "When checking the inferred type"
, nest 2 $ ppr poly_name <+> dcolon <+> ppr poly_ty ]
; return (tidy_env1, msg) }
-- | Warn the user about polymorphic local binders that lack type signatures.
localSigWarn :: WarningFlag -> Id -> Maybe TcIdSigInst -> TcM ()
localSigWarn flag id mb_sig
| Just _ <- mb_sig = return ()
| not (isSigmaTy (idType id)) = return ()
| otherwise = warnMissingSignatures flag msg id
where
msg = text "Polymorphic local binding with no type signature:"
warnMissingSignatures :: WarningFlag -> SDoc -> Id -> TcM ()
warnMissingSignatures flag msg id
= do { env0 <- tcInitTidyEnv
; let (env1, tidy_ty) = tidyOpenType env0 (idType id)
; addWarnTcM (Reason flag) (env1, mk_msg tidy_ty) }
where
mk_msg ty = sep [ msg, nest 2 $ pprPrefixName (idName id) <+> dcolon <+> ppr ty ]
checkOverloadedSig :: Bool -> TcIdSigInst -> TcM ()
-- Example:
-- f :: Eq a => a -> a
-- K f = e
-- The MR applies, but the signature is overloaded, and it's
-- best to complain about this directly
-- c.f #11339
checkOverloadedSig monomorphism_restriction_applies sig
| not (null (sig_inst_theta sig))
, monomorphism_restriction_applies
, let orig_sig = sig_inst_sig sig
= setSrcSpan (sig_loc orig_sig) $
failWith $
hang (text "Overloaded signature conflicts with monomorphism restriction")
2 (ppr orig_sig)
| otherwise
= return ()
{- Note [Partial type signatures and generalisation]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If /any/ of the signatures in the group is a partial type signature
f :: _ -> Int
then we *always* use the InferGen plan, and hence tcPolyInfer.
We do this even for a local binding with -XMonoLocalBinds, when
we normally use NoGen.
Reasons:
* The TcSigInfo for 'f' has a unification variable for the '_',
whose TcLevel is one level deeper than the current level.
(See pushTcLevelM in tcTySig.) But NoGen doesn't increase
the TcLevel like InferGen, so we lose the level invariant.
* The signature might be f :: forall a. _ -> a
so it really is polymorphic. It's not clear what it would
mean to use NoGen on this, and indeed the ASSERT in tcLhs,
in the (Just sig) case, checks that if there is a signature
then we are using LetLclBndr, and hence a nested AbsBinds with
increased TcLevel
It might be possible to fix these difficulties somehow, but there
doesn't seem much point. Indeed, adding a partial type signature is a
way to get per-binding inferred generalisation.
We apply the MR if /all/ of the partial signatures lack a context.
In particular (#11016):
f2 :: (?loc :: Int) => _
f2 = ?loc
It's stupid to apply the MR here. This test includes an extra-constraints
wildcard; that is, we don't apply the MR if you write
f3 :: _ => blah
Note [Quantified variables in partial type signatures]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
f :: forall a. a -> a -> _
f x y = g x y
g :: forall b. b -> b -> _
g x y = [x, y]
Here, 'f' and 'g' are mutually recursive, and we end up unifying 'a' and 'b'
together, which is fine. So we bind 'a' and 'b' to TyVarTvs, which can then
unify with each other.
But now consider:
f :: forall a b. a -> b -> _
f x y = [x, y]
We want to get an error from this, because 'a' and 'b' get unified.
So we make a test, one per partial signature, to check that the
explicitly-quantified type variables have not been unified together.
#14449 showed this up.
Note [Validity of inferred types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We need to check inferred type for validity, in case it uses language
extensions that are not turned on. The principle is that if the user
simply adds the inferred type to the program source, it'll compile fine.
See #8883.
Examples that might fail:
- the type might be ambiguous
- an inferred theta that requires type equalities e.g. (F a ~ G b)
or multi-parameter type classes
- an inferred type that includes unboxed tuples
Note [Impedance matching]
~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
f 0 x = x
f n x = g [] (not x)
g [] y = f 10 y
g _ y = f 9 y
After typechecking we'll get
f_mono_ty :: a -> Bool -> Bool
g_mono_ty :: [b] -> Bool -> Bool
with constraints
(Eq a, Num a)
Note that f is polymorphic in 'a' and g in 'b'; and these are not linked.
The types we really want for f and g are
f :: forall a. (Eq a, Num a) => a -> Bool -> Bool
g :: forall b. [b] -> Bool -> Bool
We can get these by "impedance matching":
tuple :: forall a b. (Eq a, Num a) => (a -> Bool -> Bool, [b] -> Bool -> Bool)
tuple a b d1 d1 = let ...bind f_mono, g_mono in (f_mono, g_mono)
f a d1 d2 = case tuple a Any d1 d2 of (f, g) -> f
g b = case tuple Integer b dEqInteger dNumInteger of (f,g) -> g
Suppose the shared quantified tyvars are qtvs and constraints theta.
Then we want to check that
forall qtvs. theta => f_mono_ty is more polymorphic than f's polytype
and the proof is the impedance matcher.
Notice that the impedance matcher may do defaulting. See #7173.
It also cleverly does an ambiguity check; for example, rejecting
f :: F a -> F a
where F is a non-injective type function.
-}
{-
Note [SPECIALISE pragmas]
~~~~~~~~~~~~~~~~~~~~~~~~~
There is no point in a SPECIALISE pragma for a non-overloaded function:
reverse :: [a] -> [a]
{-# SPECIALISE reverse :: [Int] -> [Int] #-}
But SPECIALISE INLINE *can* make sense for GADTS:
data Arr e where
ArrInt :: !Int -> ByteArray# -> Arr Int
ArrPair :: !Int -> Arr e1 -> Arr e2 -> Arr (e1, e2)
(!:) :: Arr e -> Int -> e
{-# SPECIALISE INLINE (!:) :: Arr Int -> Int -> Int #-}
{-# SPECIALISE INLINE (!:) :: Arr (a, b) -> Int -> (a, b) #-}
(ArrInt _ ba) !: (I# i) = I# (indexIntArray# ba i)
(ArrPair _ a1 a2) !: i = (a1 !: i, a2 !: i)
When (!:) is specialised it becomes non-recursive, and can usefully
be inlined. Scary! So we only warn for SPECIALISE *without* INLINE
for a non-overloaded function.
************************************************************************
* *
tcMonoBinds
* *
************************************************************************
@tcMonoBinds@ deals with a perhaps-recursive group of HsBinds.
The signatures have been dealt with already.
-}
data MonoBindInfo = MBI { mbi_poly_name :: Name
, mbi_sig :: Maybe TcIdSigInst
, mbi_mono_id :: TcId }
tcMonoBinds :: RecFlag -- Whether the binding is recursive for typechecking purposes
-- i.e. the binders are mentioned in their RHSs, and
-- we are not rescued by a type signature
-> TcSigFun -> LetBndrSpec
-> [LHsBind GhcRn]
-> TcM (LHsBinds GhcTcId, [MonoBindInfo])
tcMonoBinds is_rec sig_fn no_gen
[ L b_loc (FunBind { fun_id = L nm_loc name
, fun_matches = matches
, fun_ext = fvs })]
-- Single function binding,
| NonRecursive <- is_rec -- ...binder isn't mentioned in RHS
, Nothing <- sig_fn name -- ...with no type signature
= -- Note [Single function non-recursive binding special-case]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- In this very special case we infer the type of the
-- right hand side first (it may have a higher-rank type)
-- and *then* make the monomorphic Id for the LHS
-- e.g. f = \(x::forall a. a->a) -> <body>
-- We want to infer a higher-rank type for f
setSrcSpan b_loc $
do { ((co_fn, matches'), rhs_ty)
<- tcInferInst $ \ exp_ty ->
-- tcInferInst: see TcUnify,
-- Note [Deep instantiation of InferResult] in TcUnify
tcExtendBinderStack [TcIdBndr_ExpType name exp_ty NotTopLevel] $
-- We extend the error context even for a non-recursive
-- function so that in type error messages we show the
-- type of the thing whose rhs we are type checking
tcMatchesFun (L nm_loc name) matches exp_ty
; mono_id <- newLetBndr no_gen name rhs_ty
; return (unitBag $ L b_loc $
FunBind { fun_id = L nm_loc mono_id,
fun_matches = matches', fun_ext = fvs,
fun_co_fn = co_fn, fun_tick = [] },
[MBI { mbi_poly_name = name
, mbi_sig = Nothing
, mbi_mono_id = mono_id }]) }
tcMonoBinds _ sig_fn no_gen binds
= do { tc_binds <- mapM (wrapLocM (tcLhs sig_fn no_gen)) binds
-- Bring the monomorphic Ids, into scope for the RHSs
; let mono_infos = getMonoBindInfo tc_binds
rhs_id_env = [ (name, mono_id)
| MBI { mbi_poly_name = name
, mbi_sig = mb_sig
, mbi_mono_id = mono_id } <- mono_infos
, case mb_sig of
Just sig -> isPartialSig sig
Nothing -> True ]
-- A monomorphic binding for each term variable that lacks
-- a complete type sig. (Ones with a sig are already in scope.)
; traceTc "tcMonoBinds" $ vcat [ ppr n <+> ppr id <+> ppr (idType id)
| (n,id) <- rhs_id_env]
; binds' <- tcExtendRecIds rhs_id_env $
mapM (wrapLocM tcRhs) tc_binds
; return (listToBag binds', mono_infos) }
------------------------
-- tcLhs typechecks the LHS of the bindings, to construct the environment in which
-- we typecheck the RHSs. Basically what we are doing is this: for each binder:
-- if there's a signature for it, use the instantiated signature type
-- otherwise invent a type variable
-- You see that quite directly in the FunBind case.
--
-- But there's a complication for pattern bindings:
-- data T = MkT (forall a. a->a)
-- MkT f = e
-- Here we can guess a type variable for the entire LHS (which will be refined to T)
-- but we want to get (f::forall a. a->a) as the RHS environment.
-- The simplest way to do this is to typecheck the pattern, and then look up the
-- bound mono-ids. Then we want to retain the typechecked pattern to avoid re-doing
-- it; hence the TcMonoBind data type in which the LHS is done but the RHS isn't
data TcMonoBind -- Half completed; LHS done, RHS not done
= TcFunBind MonoBindInfo SrcSpan (MatchGroup GhcRn (LHsExpr GhcRn))
| TcPatBind [MonoBindInfo] (LPat GhcTcId) (GRHSs GhcRn (LHsExpr GhcRn))
TcSigmaType
tcLhs :: TcSigFun -> LetBndrSpec -> HsBind GhcRn -> TcM TcMonoBind
-- Only called with plan InferGen (LetBndrSpec = LetLclBndr)
-- or NoGen (LetBndrSpec = LetGblBndr)
-- CheckGen is used only for functions with a complete type signature,
-- and tcPolyCheck doesn't use tcMonoBinds at all
tcLhs sig_fn no_gen (FunBind { fun_id = L nm_loc name
, fun_matches = matches })
| Just (TcIdSig sig) <- sig_fn name
= -- There is a type signature.
-- It must be partial; if complete we'd be in tcPolyCheck!
-- e.g. f :: _ -> _
-- f x = ...g...
-- Just g = ...f...
-- Hence always typechecked with InferGen
do { mono_info <- tcLhsSigId no_gen (name, sig)
; return (TcFunBind mono_info nm_loc matches) }
| otherwise -- No type signature
= do { mono_ty <- newOpenFlexiTyVarTy
; mono_id <- newLetBndr no_gen name mono_ty
; let mono_info = MBI { mbi_poly_name = name
, mbi_sig = Nothing
, mbi_mono_id = mono_id }
; return (TcFunBind mono_info nm_loc matches) }
tcLhs sig_fn no_gen (PatBind { pat_lhs = pat, pat_rhs = grhss })
= -- See Note [Typechecking pattern bindings]
do { sig_mbis <- mapM (tcLhsSigId no_gen) sig_names
; let inst_sig_fun = lookupNameEnv $ mkNameEnv $
[ (mbi_poly_name mbi, mbi_mono_id mbi)
| mbi <- sig_mbis ]
-- See Note [Existentials in pattern bindings]
; ((pat', nosig_mbis), pat_ty)
<- addErrCtxt (patMonoBindsCtxt pat grhss) $
tcInferNoInst $ \ exp_ty ->
tcLetPat inst_sig_fun no_gen pat exp_ty $
mapM lookup_info nosig_names
; let mbis = sig_mbis ++ nosig_mbis
; traceTc "tcLhs" (vcat [ ppr id <+> dcolon <+> ppr (idType id)
| mbi <- mbis, let id = mbi_mono_id mbi ]
$$ ppr no_gen)
; return (TcPatBind mbis pat' grhss pat_ty) }
where
bndr_names = collectPatBinders pat
(nosig_names, sig_names) = partitionWith find_sig bndr_names
find_sig :: Name -> Either Name (Name, TcIdSigInfo)
find_sig name = case sig_fn name of
Just (TcIdSig sig) -> Right (name, sig)
_ -> Left name
-- After typechecking the pattern, look up the binder
-- names that lack a signature, which the pattern has brought
-- into scope.
lookup_info :: Name -> TcM MonoBindInfo
lookup_info name
= do { mono_id <- tcLookupId name
; return (MBI { mbi_poly_name = name
, mbi_sig = Nothing
, mbi_mono_id = mono_id }) }
tcLhs _ _ other_bind = pprPanic "tcLhs" (ppr other_bind)
-- AbsBind, VarBind impossible
-------------------
tcLhsSigId :: LetBndrSpec -> (Name, TcIdSigInfo) -> TcM MonoBindInfo
tcLhsSigId no_gen (name, sig)
= do { inst_sig <- tcInstSig sig
; mono_id <- newSigLetBndr no_gen name inst_sig
; return (MBI { mbi_poly_name = name
, mbi_sig = Just inst_sig
, mbi_mono_id = mono_id }) }
------------
newSigLetBndr :: LetBndrSpec -> Name -> TcIdSigInst -> TcM TcId
newSigLetBndr (LetGblBndr prags) name (TISI { sig_inst_sig = id_sig })
| CompleteSig { sig_bndr = poly_id } <- id_sig
= addInlinePrags poly_id (lookupPragEnv prags name)
newSigLetBndr no_gen name (TISI { sig_inst_tau = tau })
= newLetBndr no_gen name tau
-------------------
tcRhs :: TcMonoBind -> TcM (HsBind GhcTcId)
tcRhs (TcFunBind info@(MBI { mbi_sig = mb_sig, mbi_mono_id = mono_id })
loc matches)
= tcExtendIdBinderStackForRhs [info] $
tcExtendTyVarEnvForRhs mb_sig $
do { traceTc "tcRhs: fun bind" (ppr mono_id $$ ppr (idType mono_id))
; (co_fn, matches') <- tcMatchesFun (L loc (idName mono_id))
matches (mkCheckExpType $ idType mono_id)
; return ( FunBind { fun_id = L loc mono_id
, fun_matches = matches'
, fun_co_fn = co_fn
, fun_ext = placeHolderNamesTc
, fun_tick = [] } ) }
tcRhs (TcPatBind infos pat' grhss pat_ty)
= -- When we are doing pattern bindings we *don't* bring any scoped
-- type variables into scope unlike function bindings
-- Wny not? They are not completely rigid.
-- That's why we have the special case for a single FunBind in tcMonoBinds
tcExtendIdBinderStackForRhs infos $
do { traceTc "tcRhs: pat bind" (ppr pat' $$ ppr pat_ty)
; grhss' <- addErrCtxt (patMonoBindsCtxt pat' grhss) $
tcGRHSsPat grhss pat_ty
; return ( PatBind { pat_lhs = pat', pat_rhs = grhss'
, pat_ext = NPatBindTc placeHolderNamesTc pat_ty
, pat_ticks = ([],[]) } )}
tcExtendTyVarEnvForRhs :: Maybe TcIdSigInst -> TcM a -> TcM a
tcExtendTyVarEnvForRhs Nothing thing_inside
= thing_inside
tcExtendTyVarEnvForRhs (Just sig) thing_inside
= tcExtendTyVarEnvFromSig sig thing_inside
tcExtendTyVarEnvFromSig :: TcIdSigInst -> TcM a -> TcM a
tcExtendTyVarEnvFromSig sig_inst thing_inside
| TISI { sig_inst_skols = skol_prs, sig_inst_wcs = wcs } <- sig_inst
= tcExtendNameTyVarEnv wcs $
tcExtendNameTyVarEnv skol_prs $
thing_inside
tcExtendIdBinderStackForRhs :: [MonoBindInfo] -> TcM a -> TcM a
-- Extend the TcBinderStack for the RHS of the binding, with
-- the monomorphic Id. That way, if we have, say
-- f = \x -> blah
-- and something goes wrong in 'blah', we get a "relevant binding"
-- looking like f :: alpha -> beta
-- This applies if 'f' has a type signature too:
-- f :: forall a. [a] -> [a]
-- f x = True
-- We can't unify True with [a], and a relevant binding is f :: [a] -> [a]
-- If we had the *polymorphic* version of f in the TcBinderStack, it
-- would not be reported as relevant, because its type is closed
tcExtendIdBinderStackForRhs infos thing_inside
= tcExtendBinderStack [ TcIdBndr mono_id NotTopLevel
| MBI { mbi_mono_id = mono_id } <- infos ]
thing_inside
-- NotTopLevel: it's a monomorphic binding
---------------------
getMonoBindInfo :: [Located TcMonoBind] -> [MonoBindInfo]
getMonoBindInfo tc_binds
= foldr (get_info . unLoc) [] tc_binds
where
get_info (TcFunBind info _ _) rest = info : rest
get_info (TcPatBind infos _ _ _) rest = infos ++ rest
{- Note [Typechecking pattern bindings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Look at:
- typecheck/should_compile/ExPat
- #12427, typecheck/should_compile/T12427{a,b}
data T where
MkT :: Integral a => a -> Int -> T
and suppose t :: T. Which of these pattern bindings are ok?
E1. let { MkT p _ = t } in <body>
E2. let { MkT _ q = t } in <body>
E3. let { MkT (toInteger -> r) _ = t } in <body>
* (E1) is clearly wrong because the existential 'a' escapes.
What type could 'p' possibly have?
* (E2) is fine, despite the existential pattern, because
q::Int, and nothing escapes.
* Even (E3) is fine. The existential pattern binds a dictionary
for (Integral a) which the view pattern can use to convert the
a-valued field to an Integer, so r :: Integer.
An easy way to see all three is to imagine the desugaring.
For (E2) it would look like
let q = case t of MkT _ q' -> q'
in <body>
We typecheck pattern bindings as follows. First tcLhs does this:
1. Take each type signature q :: ty, partial or complete, and
instantiate it (with tcLhsSigId) to get a MonoBindInfo. This
gives us a fresh "mono_id" qm :: instantiate(ty), where qm has
a fresh name.
Any fresh unification variables in instantiate(ty) born here, not
deep under implications as would happen if we allocated them when
we encountered q during tcPat.
2. Build a little environment mapping "q" -> "qm" for those Ids
with signatures (inst_sig_fun)
3. Invoke tcLetPat to typecheck the pattern.
- We pass in the current TcLevel. This is captured by
TcPat.tcLetPat, and put into the pc_lvl field of PatCtxt, in
PatEnv.
- When tcPat finds an existential constructor, it binds fresh
type variables and dictionaries as usual, increments the TcLevel,
and emits an implication constraint.
- When we come to a binder (TcPat.tcPatBndr), it looks it up
in the little environment (the pc_sig_fn field of PatCtxt).
Success => There was a type signature, so just use it,
checking compatibility with the expected type.
Failure => No type signature.
Infer case: (happens only outside any constructor pattern)
use a unification variable
at the outer level pc_lvl
Check case: use promoteTcType to promote the type
to the outer level pc_lvl. This is the
place where we emit a constraint that'll blow
up if existential capture takes place
Result: the type of the binder is always at pc_lvl. This is
crucial.
4. Throughout, when we are making up an Id for the pattern-bound variables
(newLetBndr), we have two cases:
- If we are generalising (generalisation plan is InferGen or
CheckGen), then the let_bndr_spec will be LetLclBndr. In that case
we want to bind a cloned, local version of the variable, with the
type given by the pattern context, *not* by the signature (even if
there is one; see #7268). The mkExport part of the
generalisation step will do the checking and impedance matching
against the signature.
- If for some some reason we are not generalising (plan = NoGen), the
LetBndrSpec will be LetGblBndr. In that case we must bind the
global version of the Id, and do so with precisely the type given
in the signature. (Then we unify with the type from the pattern
context type.)
And that's it! The implication constraints check for the skolem
escape. It's quite simple and neat, and more expressive than before
e.g. GHC 8.0 rejects (E2) and (E3).
Example for (E1), starting at level 1. We generate
p :: beta:1, with constraints (forall:3 a. Integral a => a ~ beta)
The (a~beta) can't float (because of the 'a'), nor be solved (because
beta is untouchable.)
Example for (E2), we generate
q :: beta:1, with constraint (forall:3 a. Integral a => Int ~ beta)
The beta is untouchable, but floats out of the constraint and can
be solved absolutely fine.
************************************************************************
* *
Generalisation
* *
********************************************************************* -}
data GeneralisationPlan
= NoGen -- No generalisation, no AbsBinds
| InferGen -- Implicit generalisation; there is an AbsBinds
Bool -- True <=> apply the MR; generalise only unconstrained type vars
| CheckGen (LHsBind GhcRn) TcIdSigInfo
-- One FunBind with a signature
-- Explicit generalisation
-- A consequence of the no-AbsBinds choice (NoGen) is that there is
-- no "polymorphic Id" and "monmomorphic Id"; there is just the one
instance Outputable GeneralisationPlan where
ppr NoGen = text "NoGen"
ppr (InferGen b) = text "InferGen" <+> ppr b
ppr (CheckGen _ s) = text "CheckGen" <+> ppr s
decideGeneralisationPlan
:: DynFlags -> [LHsBind GhcRn] -> IsGroupClosed -> TcSigFun
-> GeneralisationPlan
decideGeneralisationPlan dflags lbinds closed sig_fn
| has_partial_sigs = InferGen (and partial_sig_mrs)
| Just (bind, sig) <- one_funbind_with_sig = CheckGen bind sig
| do_not_generalise closed = NoGen
| otherwise = InferGen mono_restriction
where
binds = map unLoc lbinds
partial_sig_mrs :: [Bool]
-- One for each partial signature (so empty => no partial sigs)
-- The Bool is True if the signature has no constraint context
-- so we should apply the MR
-- See Note [Partial type signatures and generalisation]
partial_sig_mrs
= [ null theta
| TcIdSig (PartialSig { psig_hs_ty = hs_ty })
<- mapMaybe sig_fn (collectHsBindListBinders lbinds)
, let (_, L _ theta, _) = splitLHsSigmaTyInvis (hsSigWcType hs_ty) ]
has_partial_sigs = not (null partial_sig_mrs)
mono_restriction = xopt LangExt.MonomorphismRestriction dflags
&& any restricted binds
do_not_generalise (IsGroupClosed _ True) = False
-- The 'True' means that all of the group's
-- free vars have ClosedTypeId=True; so we can ignore
-- -XMonoLocalBinds, and generalise anyway
do_not_generalise _ = xopt LangExt.MonoLocalBinds dflags
-- With OutsideIn, all nested bindings are monomorphic
-- except a single function binding with a signature
one_funbind_with_sig
| [lbind@(L _ (FunBind { fun_id = v }))] <- lbinds
, Just (TcIdSig sig) <- sig_fn (unLoc v)
= Just (lbind, sig)
| otherwise
= Nothing
-- The Haskell 98 monomorphism restriction
restricted (PatBind {}) = True
restricted (VarBind { var_id = v }) = no_sig v
restricted (FunBind { fun_id = v, fun_matches = m }) = restricted_match m
&& no_sig (unLoc v)
restricted b = pprPanic "isRestrictedGroup/unrestricted" (ppr b)
restricted_match mg = matchGroupArity mg == 0
-- No args => like a pattern binding
-- Some args => a function binding
no_sig n = not (hasCompleteSig sig_fn n)
isClosedBndrGroup :: TcTypeEnv -> Bag (LHsBind GhcRn) -> IsGroupClosed
isClosedBndrGroup type_env binds
= IsGroupClosed fv_env type_closed
where
type_closed = allUFM (nameSetAll is_closed_type_id) fv_env
fv_env :: NameEnv NameSet
fv_env = mkNameEnv $ concatMap (bindFvs . unLoc) binds
bindFvs :: HsBindLR GhcRn GhcRn -> [(Name, NameSet)]
bindFvs (FunBind { fun_id = L _ f
, fun_ext = fvs })
= let open_fvs = get_open_fvs fvs
in [(f, open_fvs)]
bindFvs (PatBind { pat_lhs = pat, pat_ext = fvs })
= let open_fvs = get_open_fvs fvs
in [(b, open_fvs) | b <- collectPatBinders pat]
bindFvs _
= []
get_open_fvs fvs = filterNameSet (not . is_closed) fvs
is_closed :: Name -> ClosedTypeId
is_closed name
| Just thing <- lookupNameEnv type_env name
= case thing of
AGlobal {} -> True
ATcId { tct_info = ClosedLet } -> True
_ -> False
| otherwise
= True -- The free-var set for a top level binding mentions
is_closed_type_id :: Name -> Bool
-- We're already removed Global and ClosedLet Ids
is_closed_type_id name
| Just thing <- lookupNameEnv type_env name
= case thing of
ATcId { tct_info = NonClosedLet _ cl } -> cl
ATcId { tct_info = NotLetBound } -> False
ATyVar {} -> False
-- In-scope type variables are not closed!
_ -> pprPanic "is_closed_id" (ppr name)
| otherwise
= True -- The free-var set for a top level binding mentions
-- imported things too, so that we can report unused imports
-- These won't be in the local type env.
-- Ditto class method etc from the current module
{- *********************************************************************
* *
Error contexts and messages
* *
********************************************************************* -}
-- This one is called on LHS, when pat and grhss are both Name
-- and on RHS, when pat is TcId and grhss is still Name
patMonoBindsCtxt :: (OutputableBndrId p, Outputable body)
=> LPat (GhcPass p) -> GRHSs GhcRn body -> SDoc
patMonoBindsCtxt pat grhss
= hang (text "In a pattern binding:") 2 (pprPatBind pat grhss)
| sdiehl/ghc | compiler/typecheck/TcBinds.hs | bsd-3-clause | 73,468 | 1 | 23 | 23,085 | 11,678 | 6,155 | 5,523 | 832 | 9 |
-- hisg - IRC stats generator.
--
-- Copyright (c) 2009, 2010 Antoine Kalmbach <antoine dot kalmbach at jyu dot fi>
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
-- * Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
-- * Neither the name of the author nor the
-- names of its contributors may be used to endorse or promote products
-- derived from this software without specific prior written permission.
--
-- For further details, see LICENSE.
module Hisg.Stats where
import Data.List
import Data.Maybe
import Control.Parallel.Strategies
import qualified Data.Map as M
import qualified Data.Set as Set
import qualified Data.ByteString.Char8 as S
import qualified Data.ByteString.Lazy.Char8 as L
import Text.Regex.PCRE.Light (compile, match)
import Text.Printf
import Hisg.MapReduce
import Hisg.Formats.Irssi
import Control.Arrow
import Control.Parallel (pseq)
import Control.DeepSeq
instance NFData S.ByteString where
rnf _ = () -- not built into Control.Parallel.Strategies
-- | An empty hourly distribution map.
emptyHourStats :: M.Map S.ByteString Int
emptyHourStats = M.fromList (zip (map (\x -> S.pack (printf "%02d" (x::Int))) [0 .. 23]) (repeat 0))
-- | lines, words, kicks, night, morning, afternoon, evening
type UserStats = ([Int], HourStats)
type HourStats = M.Map S.ByteString Int
type StatsMap = M.Map S.ByteString UserStats
-- | Calculates statistics for an user, currently lines, words and kicks given.
-- I am not sure whether I'd want to calculate anything more than daily and
-- monthly trends (besides user word count and word to line ration etc.), as
-- that is not really pertinent. Activity analysis is where it's at, hell yes.
calcUserStats :: [L.ByteString] -> StatsMap
calcUserStats = mapReduce rseq (foldl' matchAll M.empty . L.lines)
rseq (M.unionsWith sumUser)
-- | Chains all matches together. TODO: implement this in a non-slineStatsid way.
matchAll :: StatsMap -> L.ByteString -> StatsMap
matchAll statsMap line = let converted = conv line in
snd . matchKick . matchMessage $ (converted, statsMap)
-- | Increases the message line count and word count and modifies an users's hour distribution
-- should the regexp match.
matchMessage :: (S.ByteString, StatsMap) -> (S.ByteString, StatsMap) -- our modified map if the line matches
matchMessage lineStats = case match (compile normalMessageRegex []) (fst lineStats) [] of
Just (_:hour:nick:contents:_)
-> second (M.insertWith' (incMessage hour) nick newValue) lineStats
where
newValue = ([1, contents `pseq` S.length contents, 0], M.adjust succ hour emptyHourStats)
_ -> lineStats
-- | Increases the kick count of a user if the regex matches.
matchKick :: (S.ByteString, StatsMap) -> (S.ByteString, StatsMap)
matchKick lineStats@(line, statsMap) = case match (compile kickMessageRegex []) line [] of
Just (_:_:_:_:nick:_)
-- for some reason this deepseq makes the whole thing run in constant space.
-- why? i deduced that it likely results from the resulting strictness, i.e.
-- the map is evaluated deeply before we increase a kick, ultimately allowing
-- the compiler to conclude that this was the final match. however, doing this
-- on every failure (i.e. using it as the Nothing) only slows the program down,
-- approximately to 300%, but cuts GC time to 2-3%. where's the tradeoff. thus
-- it is faster to parse for hypothetical kicks (or whatever) than do a strict
-- evaluation on *every* parsing failure.
-> second (\m -> m `deepseq` M.insertWith' incKick nick ([0, 0, 1], M.empty) m) lineStats
_ -> lineStats
conv = S.concat . L.toChunks
incKick :: UserStats -> UserStats -> UserStats
incKick _ ([l, w, k], ts) = ([l, w, succ k], ts)
incMessage :: S.ByteString -> UserStats -> UserStats -> UserStats
incMessage ts ([_, wc', _], _) ([lc, wc, kc], hs) = wc `pseq` wc' `pseq` lc `pseq` ([succ lc, wc+wc', kc], incHour hs)
where
incHour = ts `pseq` M.adjust succ ts
-- | Joins two sets of user data into one.
sumUser :: UserStats -> UserStats -> UserStats
sumUser (xs@[l, w, k], hs) (xs'@[l', w', k'], hs') = (zipWith (+) xs xs', M.unionWith (+) hs hs')
| ane/hisg | src/Hisg/Stats.hs | bsd-3-clause | 4,655 | 0 | 15 | 888 | 988 | 576 | 412 | 47 | 2 |
{-|
Description: SDL timer support.
-}
module Graphics.UI.SDL.Timer
( module Ticks
) where
import Graphics.UI.SDL.Timer.Ticks as Ticks
| abbradar/MySDL | src/Graphics/UI/SDL/Timer.hs | bsd-3-clause | 151 | 0 | 4 | 33 | 25 | 19 | 6 | 3 | 0 |
-- |
-- Module : Network.TLS.Sending
-- License : BSD-style
-- Maintainer : Vincent Hanquez <vincent@snarc.org>
-- Stability : experimental
-- Portability : unknown
--
-- the Sending module contains calls related to marshalling packets according
-- to the TLS state
--
module Network.TLS.Sending (writePacket) where
import Control.Applicative
import Control.Monad.State
import Control.Concurrent.MVar
import Data.IORef
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import Network.TLS.Types (Role(..))
import Network.TLS.Cap
import Network.TLS.Struct
import Network.TLS.Record
import Network.TLS.Packet
import Network.TLS.Context.Internal
import Network.TLS.Parameters
import Network.TLS.State
import Network.TLS.Handshake.State
import Network.TLS.Cipher
import Network.TLS.Util
-- | 'makePacketData' create a Header and a content bytestring related to a packet
-- this doesn't change any state
makeRecord :: Packet -> RecordM (Record Plaintext)
makeRecord pkt = do
ver <- getRecordVersion
return $ Record (packetType pkt) ver (fragmentPlaintext $ writePacketContent pkt)
where writePacketContent (Handshake hss) = encodeHandshakes hss
writePacketContent (Alert a) = encodeAlerts a
writePacketContent (ChangeCipherSpec) = encodeChangeCipherSpec
writePacketContent (AppData x) = x
-- | marshall packet data
encodeRecord :: Record Ciphertext -> RecordM ByteString
encodeRecord record = return $ B.concat [ encodeHeader hdr, content ]
where (hdr, content) = recordToRaw record
-- | writePacket transform a packet into marshalled data related to current state
-- and updating state on the go
writePacket :: Context -> Packet -> IO (Either TLSError ByteString)
writePacket ctx pkt@(Handshake hss) = do
forM_ hss $ \hs -> do
case hs of
Finished fdata -> usingState_ ctx $ updateVerifiedData ClientRole fdata
_ -> return ()
let encoded = encodeHandshake hs
usingHState ctx $ do
when (certVerifyHandshakeMaterial hs) $ addHandshakeMessage encoded
when (finishHandshakeTypeMaterial $ typeOfHandshake hs) $ updateHandshakeDigest encoded
prepareRecord ctx (makeRecord pkt >>= engageRecord >>= encodeRecord)
writePacket ctx pkt = do
d <- prepareRecord ctx (makeRecord pkt >>= engageRecord >>= encodeRecord)
when (pkt == ChangeCipherSpec) $ switchTxEncryption ctx
return d
-- before TLS 1.1, the block cipher IV is made of the residual of the previous block,
-- so we use cstIV as is, however in other case we generate an explicit IV
prepareRecord :: Context -> RecordM a -> IO (Either TLSError a)
prepareRecord ctx f = do
ver <- usingState_ ctx (getVersionWithDefault $ maximum $ supportedVersions $ ctxSupported ctx)
txState <- readMVar $ ctxTxState ctx
let sz = case stCipher $ txState of
Nothing -> 0
Just cipher -> if hasRecordIV $ bulkF $ cipherBulk cipher
then bulkIVSize $ cipherBulk cipher
else 0 -- to not generate IV
if hasExplicitBlockIV ver && sz > 0
then do newIV <- getStateRNG ctx sz
runTxState ctx (modify (setRecordIV newIV) >> f)
else runTxState ctx f
switchTxEncryption :: Context -> IO ()
switchTxEncryption ctx = do
tx <- usingHState ctx (fromJust "tx-state" <$> gets hstPendingTxState)
(ver, cc) <- usingState_ ctx $ do v <- getVersion
c <- isClientContext
return (v, c)
liftIO $ modifyMVar_ (ctxTxState ctx) (\_ -> return tx)
-- set empty packet counter measure if condition are met
when (ver <= TLS10 && cc == ClientRole && isCBC tx && supportedEmptyPacket (ctxSupported ctx)) $ liftIO $ writeIORef (ctxNeedEmptyPacket ctx) True
where isCBC tx = maybe False (\c -> bulkBlockSize (cipherBulk c) > 0) (stCipher tx)
| tolysz/hs-tls | core/Network/TLS/Sending.hs | bsd-3-clause | 3,995 | 0 | 18 | 952 | 1,003 | 508 | 495 | 66 | 4 |
{-# LANGUAGE OverloadedStrings #-}
module Parser where
import Control.Applicative
import Control.Comonad.Cofree
import Control.Monad (void)
import Data.Char (digitToInt)
import Data.List (foldl')
import Data.String (IsString(..))
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Text.Trifecta
import Text.Parser.Token.Highlight (Highlight(..))
import Text.Trifecta.Delta
import Types
parseText :: T.Text -> Either T.Text AST
parseText t =
case parseByteString parseTopLevel (Lines 0 0 0 0) (T.encodeUtf8 t) of
Success ast -> Right ast
Failure diag -> Left (T.pack (show diag))
parseFile :: FilePath -> IO (Maybe AST)
parseFile = parseFromFile parseTopLevel
parseTopLevel :: Parser AST
parseTopLevel = (() :<) . List <$> many' parseList
many' :: Parser a -> Parser [a]
many' p = skipStuff >> sepEndBy p skipStuff
skipStuff :: Parser ()
skipStuff = do
_ <- many $ choice [ void comma
, void $ oneOf " \t\n"
, void $ char ';' >> many (noneOf "\n") >> newline
]
return ()
parseExpr :: Parser AST
parseExpr =
choice [ parseList
, parseAnonFunction
, parseVec
, parseMap
, parseSet
, parseQuote
, parseLitString
, parseLitRegex
, parseAtom
]
parseList :: Parser AST
parseList = (() :<) . List <$> parens (many' parseExpr)
parseAnonFunction :: Parser AST
parseAnonFunction = (() :<) . List <$> between (text "#(")
(text ")")
(many' parseExpr)
parseVec :: Parser AST
parseVec = (() :<) . Vec <$> brackets (many' parseExpr)
parseMap :: Parser AST
parseMap = (() :<) . Map <$> braces (many' exprPair)
where exprPair = do
key <- parseExpr
skipStuff
value <- parseExpr
return (key, value)
parseSet :: Parser AST
parseSet =
(() :<) . Set <$> between (text "#{")
(text "}")
(many' parseExpr)
parseQuote :: Parser AST
parseQuote =
(() :<) . Quote <$> (void (text "'") >> parseExpr)
parseLitRegex :: Parser AST
parseLitRegex = fmap ((() :<) . CljRegexLiteral . CljRegex . T.pack) . try $ do
void $ text "#"
between (text "\"")
(text "\"")
(many (noneOf "\""))
parseLitString :: Parser AST
parseLitString = (() :<) . CljStringLiteral . CljString <$> javaStringLiteral
parseAtom :: Parser AST
parseAtom = (() :<) . Atom . T.pack <$> some (noneOf " ()[]{},\n")
javaStringLiteral :: (TokenParsing m, IsString s) => m s
javaStringLiteral = fromString <$> token (highlight StringLiteral lit) where
lit = Prelude.foldr (maybe id (:)) ""
<$> between (char '"') (char '"' <?> "end of string") (many stringChar)
<?> "string"
stringChar = Just <$> stringLetter
<|> stringEscape
<?> "string character"
stringLetter = satisfy (\c -> (c == '\n') || ((c /= '"') && (c /= '\\') && (c > '\026')))
stringEscape = highlight EscapeCode $ char '\\' *> esc where
esc = Nothing <$ escapeGap
<|> Nothing <$ escapeEmpty
<|> Just <$> escapeCode
escapeEmpty = char '&'
escapeGap = skipSome space *> (char '\\' <?> "end of string gap")
{-# INLINE javaStringLiteral #-}
escapeCode :: TokenParsing m => m Char
escapeCode = (charEsc <|> charUnicode <|> charNum <|> charAscii <|> charControl) <?> "escape code"
where
charControl = (\c -> toEnum (fromEnum c - fromEnum '@')) <$> (char '^' *> (upper <|> char '@'))
charNum = toEnum . fromInteger <$> num where
num = decimal
<|> (char 'o' *> number 8 octDigit)
<|> (char 'x' *> number 16 hexDigit)
charUnicode = toEnum . fromInteger <$> (char 'u' *> number 16 hexDigit)
charEsc = choice $ parseEsc <$> escMap
parseEsc (c,code) = code <$ char c
escMap = zip "abfnrtv\\\"\'" "\a\b\f\n\r\t\v\\\"\'"
charAscii = choice $ parseAscii <$> asciiMap
parseAscii (asc,code) = try $ code <$ string asc
asciiMap = zip (ascii3codes ++ ascii2codes) (ascii3 ++ ascii2)
ascii2codes, ascii3codes :: [String]
ascii2codes = [ "BS","HT","LF","VT","FF","CR","SO"
, "SI","EM","FS","GS","RS","US","SP"]
ascii3codes = ["NUL","SOH","STX","ETX","EOT","ENQ","ACK"
,"BEL","DLE","DC1","DC2","DC3","DC4","NAK"
,"SYN","ETB","CAN","SUB","ESC","DEL"]
ascii2, ascii3 :: String
ascii2 = "\BS\HT\LF\VT\FF\CR\SO\SI\EM\FS\GS\RS\US\SP"
ascii3 = "\NUL\SOH\STX\ETX\EOT\ENQ\ACK\BEL\DLE\DC1\DC2\DC3\DC4\NAK\SYN\ETB\CAN\SUB\ESC\DEL"
number :: TokenParsing m => Integer -> m Char -> m Integer
number base baseDigit =
foldl' (\x d -> base*x + toInteger (digitToInt d)) 0 <$> some baseDigit
| ethercrow/unused-defns | Parser.hs | bsd-3-clause | 4,797 | 0 | 15 | 1,256 | 1,632 | 872 | 760 | 117 | 2 |
module SubListKata.Day7 (sublist, Result(..)) where
import Data.List(isInfixOf)
data Result = Equal | Unequal | Sublist | Superlist
deriving(Show, Eq)
sublist :: (Ord a) => [a] -> [a] -> Result
sublist [] [] = Equal
sublist [] _ = Sublist
sublist _ [] = Superlist
sublist l1 l2
| l1 == l2 = Equal
| isInfixOf l1 l2 = Sublist
| isInfixOf l2 l1 = Superlist
| otherwise = Unequal
| Alex-Diez/haskell-tdd-kata | old-katas/src/SubListKata/Day7.hs | bsd-3-clause | 457 | 0 | 8 | 149 | 178 | 94 | 84 | 13 | 1 |
{-# LANGUAGE OverloadedStrings, NoImplicitPrelude #-}
module Grab where
import BasePrelude hiding (try, takeWhile)
import Control.Lens
import Data.Attoparsec.Text.Lazy
import qualified Data.ByteString.Lazy as BSL
import qualified Data.Text.Lazy.Encoding as TLE
import qualified Data.Text.Lazy as TL
import Graphics.Rendering.Cairo hiding (x, y, width)
import Network.Wreq
import System.Directory as D
import System.FilePath
data Color = Black
| White
data Piece = King
| Queen
| Rook
| Bishop
| Knight
| Pawn
deriving (Enum, Bounded)
wikipediaChar :: Piece -> [Char]
wikipediaChar King = "k"
wikipediaChar Queen = "q"
wikipediaChar Rook = "r"
wikipediaChar Bishop = "b"
wikipediaChar Knight = "n"
wikipediaChar Pawn = "p"
url :: Color -> Piece -> [Char]
url color piece = "http://en.wikipedia.org/wiki/File:Chess_" <> (wikipediaChar piece) <> (case color of Black -> "d"; White -> "l") <> "t45.svg"
pieces :: [Piece]
pieces = [minBound..]
download :: FilePath -> Color -> Piece -> IO ()
download path color piece = do
putStrLn ("here we go" <> path)
r <- get (url color piece)
s <- get ("http:" <> (TL.unpack $ parse' (r ^. responseBody)))
BSL.writeFile path (s ^. responseBody)
return ()
where
urlParser = anchorParser <|> (anyChar *> urlParser)
anchorParser = string "<a href=\"" *> takeWhile (/= '"') <* anchorEnd
anchorEnd = takeWhile (/= '>') *> string ">Original file</a>"
parse' s = TL.fromStrict (fromJust (maybeResult (parse urlParser (TLE.decodeUtf8 s))))
prep :: IO ()
prep = do
D.createDirectoryIfMissing True "/tmp/pieces"
forM_ pieces $ \piece -> do
let path color = ("/tmp/pieces/" <> wikipediaChar piece <> "-" <> (case color of Black -> "b"; White -> "w") <> ".svg")
download (path White) White piece
download (path Black) Black piece
checker :: Double -> Int -> Render ()
checker size i =
case (row + col) `mod` 2 of
0 -> do
rectangle x y width width
setSourceRGBA (147 / 255) (69 / 255) (20/ 255) 1
fill
1 ->
return ()
where
width = size / 8
x = (fromIntegral col) * width
y = (fromIntegral row) * width
row = i `div` 8
col = i `mod` 8
board :: IO ()
board =
withImageSurface FormatRGB24 isize isize $ \surface -> do
renderWith surface $ do
rectangle 0 0 size size
setSourceRGBA (231 / 255) (171 / 255) (87 / 255) 1
fill
forM_ [0..63] (checker size)
surfaceWriteToPNG surface "/tmp/neptune.png"
where
size = 500 :: Double
isize = 500 :: Int
| hlian/zwischenzug | Grab.hs | bsd-3-clause | 2,593 | 0 | 20 | 619 | 935 | 495 | 440 | 75 | 2 |
-----------------------------------------------------------------------------
--
-- Module : Text.XML.Plist
-- Copyright : (c) Yuras Shumovich 2009
-- License : BSD3
--
-- Maintainer : shumovichy@gmail.com
-- Stability : experimental
-- Portability : portable
--
-- |Library for generation and parsing Mac OS X plist format
--
-----------------------------------------------------------------------------
module Text.XML.Plist (
PlObject(..),
writePlistToFile,
readPlistFromFile,
objectToPlist,
plistToObject,
objectToXml,
xmlToObject,
fromPlString,
fromPlBool,
fromPlInteger,
fromPlReal,
fromPlArray,
fromPlDict
) where
import Text.XML.Plist.PlObject
import Text.XML.Plist.Read
import Text.XML.Plist.Write
| tomgr/webcspm | src/Text/XML/Plist.hs | bsd-3-clause | 731 | 0 | 5 | 92 | 86 | 63 | 23 | 17 | 0 |
-- | aws-sdk is an AWS library for Haskell
--
-- Put your AWS AccessKey and SecretAccessKey into a configuration
-- file. Write the following in /./\//aws.config/.
--
-- > accessKey: your-access-key
-- > secretAccessKey: your-secret-access-key
--
-- The following is quick example(DescribeInstances).
--
-- > module Example where
-- >
-- > import Data.Conduit
-- > import qualified Data.Conduit.List as CL
-- > import Control.Monad.IO.Class (liftIO)
-- > import Control.Monad.Trans.Class (lift)
-- >
-- > import AWS
-- > import AWS.EC2
-- > import qualified AWS.EC2.Util as Util
-- >
-- > main :: IO ()
-- > main = do
-- > cred <- loadCredential
-- > doc <- runResourceT $
-- > runEC2 cred $
-- > Util.list $ describeInstances [] []
-- > print doc
-- > putStr "Length: "
-- > print $ length doc
{-# LANGUAGE OverloadedStrings #-}
module AWS
( -- * Credentials
Credential
, AccessKey
, SecretAccessKey
, newCredential
, loadCredential
, loadCredentialFromFile
-- * Environment
, AWS
, AWSException(..)
, getLastRequestId
) where
import AWS.Credential
import AWS.Class
| IanConnolly/aws-sdk-fork | AWS.hs | bsd-3-clause | 1,163 | 0 | 5 | 272 | 80 | 64 | 16 | 14 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.EC2.CreateImage
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates an Amazon EBS-backed AMI from an Amazon EBS-backed instance that
-- is either running or stopped.
--
-- If you customized your instance with instance store volumes or EBS
-- volumes in addition to the root device volume, the new AMI contains
-- block device mapping information for those volumes. When you launch an
-- instance from this new AMI, the instance automatically launches with
-- those additional volumes.
--
-- For more information, see
-- <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/creating-an-ami-ebs.html Creating Amazon EBS-Backed Linux AMIs>
-- in the /Amazon Elastic Compute Cloud User Guide/.
--
-- /See:/ <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-CreateImage.html AWS API Reference> for CreateImage.
module Network.AWS.EC2.CreateImage
(
-- * Creating a Request
createImage
, CreateImage
-- * Request Lenses
, ciiNoReboot
, ciiDescription
, ciiBlockDeviceMappings
, ciiDryRun
, ciiInstanceId
, ciiName
-- * Destructuring the Response
, createImageResponse
, CreateImageResponse
-- * Response Lenses
, cirsImageId
, cirsResponseStatus
) where
import Network.AWS.EC2.Types
import Network.AWS.EC2.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'createImage' smart constructor.
data CreateImage = CreateImage'
{ _ciiNoReboot :: !(Maybe Bool)
, _ciiDescription :: !(Maybe Text)
, _ciiBlockDeviceMappings :: !(Maybe [BlockDeviceMapping])
, _ciiDryRun :: !(Maybe Bool)
, _ciiInstanceId :: !Text
, _ciiName :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CreateImage' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ciiNoReboot'
--
-- * 'ciiDescription'
--
-- * 'ciiBlockDeviceMappings'
--
-- * 'ciiDryRun'
--
-- * 'ciiInstanceId'
--
-- * 'ciiName'
createImage
:: Text -- ^ 'ciiInstanceId'
-> Text -- ^ 'ciiName'
-> CreateImage
createImage pInstanceId_ pName_ =
CreateImage'
{ _ciiNoReboot = Nothing
, _ciiDescription = Nothing
, _ciiBlockDeviceMappings = Nothing
, _ciiDryRun = Nothing
, _ciiInstanceId = pInstanceId_
, _ciiName = pName_
}
-- | By default, this parameter is set to 'false', which means Amazon EC2
-- attempts to shut down the instance cleanly before image creation and
-- then reboots the instance. When the parameter is set to 'true', Amazon
-- EC2 doesn\'t shut down the instance before creating the image. When this
-- option is used, file system integrity on the created image can\'t be
-- guaranteed.
ciiNoReboot :: Lens' CreateImage (Maybe Bool)
ciiNoReboot = lens _ciiNoReboot (\ s a -> s{_ciiNoReboot = a});
-- | A description for the new image.
ciiDescription :: Lens' CreateImage (Maybe Text)
ciiDescription = lens _ciiDescription (\ s a -> s{_ciiDescription = a});
-- | Information about one or more block device mappings.
ciiBlockDeviceMappings :: Lens' CreateImage [BlockDeviceMapping]
ciiBlockDeviceMappings = lens _ciiBlockDeviceMappings (\ s a -> s{_ciiBlockDeviceMappings = a}) . _Default . _Coerce;
-- | Checks whether you have the required permissions for the action, without
-- actually making the request, and provides an error response. If you have
-- the required permissions, the error response is 'DryRunOperation'.
-- Otherwise, it is 'UnauthorizedOperation'.
ciiDryRun :: Lens' CreateImage (Maybe Bool)
ciiDryRun = lens _ciiDryRun (\ s a -> s{_ciiDryRun = a});
-- | The ID of the instance.
ciiInstanceId :: Lens' CreateImage Text
ciiInstanceId = lens _ciiInstanceId (\ s a -> s{_ciiInstanceId = a});
-- | A name for the new image.
--
-- Constraints: 3-128 alphanumeric characters, parentheses (()), square
-- brackets ([]), spaces ( ), periods (.), slashes (\/), dashes (-), single
-- quotes (\'), at-signs (\'), or underscores(_)
ciiName :: Lens' CreateImage Text
ciiName = lens _ciiName (\ s a -> s{_ciiName = a});
instance AWSRequest CreateImage where
type Rs CreateImage = CreateImageResponse
request = postQuery eC2
response
= receiveXML
(\ s h x ->
CreateImageResponse' <$>
(x .@? "imageId") <*> (pure (fromEnum s)))
instance ToHeaders CreateImage where
toHeaders = const mempty
instance ToPath CreateImage where
toPath = const "/"
instance ToQuery CreateImage where
toQuery CreateImage'{..}
= mconcat
["Action" =: ("CreateImage" :: ByteString),
"Version" =: ("2015-04-15" :: ByteString),
"NoReboot" =: _ciiNoReboot,
"Description" =: _ciiDescription,
toQuery
(toQueryList "BlockDeviceMapping" <$>
_ciiBlockDeviceMappings),
"DryRun" =: _ciiDryRun,
"InstanceId" =: _ciiInstanceId, "Name" =: _ciiName]
-- | /See:/ 'createImageResponse' smart constructor.
data CreateImageResponse = CreateImageResponse'
{ _cirsImageId :: !(Maybe Text)
, _cirsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CreateImageResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cirsImageId'
--
-- * 'cirsResponseStatus'
createImageResponse
:: Int -- ^ 'cirsResponseStatus'
-> CreateImageResponse
createImageResponse pResponseStatus_ =
CreateImageResponse'
{ _cirsImageId = Nothing
, _cirsResponseStatus = pResponseStatus_
}
-- | The ID of the new AMI.
cirsImageId :: Lens' CreateImageResponse (Maybe Text)
cirsImageId = lens _cirsImageId (\ s a -> s{_cirsImageId = a});
-- | The response status code.
cirsResponseStatus :: Lens' CreateImageResponse Int
cirsResponseStatus = lens _cirsResponseStatus (\ s a -> s{_cirsResponseStatus = a});
| fmapfmapfmap/amazonka | amazonka-ec2/gen/Network/AWS/EC2/CreateImage.hs | mpl-2.0 | 6,783 | 0 | 13 | 1,466 | 967 | 582 | 385 | 114 | 1 |
module Cardano.Wallet.API.V1.Handlers.Wallets where
import Universum
import Servant
import Cardano.Wallet.API.Request
import Cardano.Wallet.API.Response
import Cardano.Wallet.API.V1.Types as V1
import qualified Cardano.Wallet.API.V1.Wallets as Wallets
import Cardano.Wallet.WalletLayer (PassiveWalletLayer)
import qualified Cardano.Wallet.WalletLayer as WalletLayer
import Pos.Core.Common (Coin (..))
-- | All the @Servant@ handlers for wallet-specific operations.
handlers :: PassiveWalletLayer IO -> ServerT Wallets.API Handler
handlers pwl = newWallet pwl
:<|> listWallets pwl
:<|> updatePassword pwl
:<|> deleteWallet pwl
:<|> getWallet pwl
:<|> updateWallet pwl
:<|> getUtxoStatistics pwl
-- | Creates a new or restores an existing @wallet@ given a 'NewWallet' payload.
-- Returns to the client the representation of the created or restored
-- wallet in the 'Wallet' type.
newWallet :: PassiveWalletLayer IO
-> NewWallet
-> Handler (APIResponse Wallet)
newWallet pwl newWalletRequest = do
-- FIXME(adn) Do not allow creation or restoration of wallets if the underlying node
-- is still catching up.
res <- liftIO $ WalletLayer.createWallet pwl (WalletLayer.CreateWallet newWalletRequest)
case res of
Left e -> throwM e
Right w -> return $ single w
-- | Returns the full (paginated) list of wallets.
listWallets :: PassiveWalletLayer IO
-> RequestParams
-> FilterOperations '[WalletId, Coin] Wallet
-> SortOperations Wallet
-> Handler (APIResponse [Wallet])
listWallets pwl params fops sops = do
wallets <- liftIO $ WalletLayer.getWallets pwl
respondWith params
fops
sops
(pure wallets)
updatePassword :: PassiveWalletLayer IO
-> WalletId
-> PasswordUpdate
-> Handler (APIResponse Wallet)
updatePassword pwl wid passwordUpdate = do
res <- liftIO $ WalletLayer.updateWalletPassword pwl wid passwordUpdate
case res of
Left e -> throwM e
Right w -> return $ single w
-- | Deletes an exisiting wallet.
deleteWallet :: PassiveWalletLayer IO
-> WalletId
-> Handler NoContent
deleteWallet pwl wid = do
res <- liftIO $ WalletLayer.deleteWallet pwl wid
case res of
Left e -> throwM e
Right () -> return NoContent
-- | Gets a specific wallet.
getWallet :: PassiveWalletLayer IO
-> WalletId
-> Handler (APIResponse Wallet)
getWallet pwl wid = do
res <- liftIO $ WalletLayer.getWallet pwl wid
case res of
Left e -> throwM e
Right w -> return $ single w
updateWallet :: PassiveWalletLayer IO
-> WalletId
-> WalletUpdate
-> Handler (APIResponse Wallet)
updateWallet pwl wid walletUpdateRequest = do
res <- liftIO $ WalletLayer.updateWallet pwl wid walletUpdateRequest
case res of
Left e -> throwM e
Right w -> return $ single w
getUtxoStatistics
:: PassiveWalletLayer IO
-> WalletId
-> Handler (APIResponse UtxoStatistics)
getUtxoStatistics pwl wid = do
res <- liftIO $ WalletLayer.getUtxos pwl wid
case res of
Left e -> throwM e
Right w ->
return $ single $ V1.computeUtxoStatistics V1.log10 (map snd w)
| input-output-hk/cardano-sl | wallet/src/Cardano/Wallet/API/V1/Handlers/Wallets.hs | apache-2.0 | 3,471 | 0 | 13 | 990 | 829 | 409 | 420 | -1 | -1 |
{-# LANGUAGE CPP, DataKinds, TypeOperators, TypeApplications, TypeFamilies #-}
#if __GLASGOW_HASKELL__ >= 805
{-# LANGUAGE NoStarIsType #-}
#endif
{-# OPTIONS_GHC -fdefer-type-errors #-}
{-# OPTIONS_GHC -fplugin GHC.TypeLits.Normalise #-}
{-# OPTIONS_GHC -fplugin GHC.TypeLits.KnownNat.Solver #-}
{-# OPTIONS_GHC -fplugin GHC.TypeLits.Extra.Solver #-}
module ErrorTests where
import Data.Proxy
import GHC.TypeLits
import GHC.TypeLits.Extra
testFail1 :: Proxy (GCD 6 8) -> Proxy 4
testFail1 = id
testFail2 :: Proxy ((GCD 6 8) + x) -> Proxy (x + (GCD 6 9))
testFail2 = id
testFail3 :: Proxy (CLog 3 10) -> Proxy 2
testFail3 = id
testFail4 :: Proxy ((CLog 3 10) + x) -> Proxy (x + (CLog 2 9))
testFail4 = id
testFail5 :: Proxy (CLog 0 4) -> Proxy 100
testFail5 = id
testFail6 :: Proxy (CLog 1 4) -> Proxy 100
testFail6 = id
testFail7 :: Proxy (CLog 4 0) -> Proxy 0
testFail7 = id
testFail8 :: Proxy (CLog 1 (1^y)) -> Proxy y
testFail8 = id
testFail9 :: Proxy (CLog 0 (0^y)) -> Proxy y
testFail9 = id
testFail10 :: Integer
testFail10 = natVal (Proxy :: Proxy (CLog 1 4))
testFail11 :: Integer
testFail11 = natVal (Proxy :: Proxy ((CLog 4 4) - (CLog 2 4)))
testFail12 :: Proxy (Div 4 0) -> Proxy 4
testFail12 = id
testFail13 :: Proxy (Mod 4 0) -> Proxy 4
testFail13 = id
testFail14 :: Proxy (FLog 0 4) -> Proxy 100
testFail14 = id
testFail15 :: Proxy (FLog 1 4) -> Proxy 100
testFail15 = id
testFail16 :: Proxy (FLog 4 0) -> Proxy 0
testFail16 = id
testFail17 :: Proxy (LCM 6 8) -> Proxy 48
testFail17 = id
testFail18 :: Proxy ((LCM 6 8) + x) -> Proxy (x + (LCM 6 9))
testFail18 = id
testFail19 :: Integer
testFail19 = natVal (Proxy :: Proxy (Log 3 0))
testFail20 :: Integer
testFail20 = natVal (Proxy :: Proxy (Log 3 10))
testFail21 :: Proxy a -> Proxy b -> Proxy (Min a (a*b)) -> Proxy a
testFail21 _ _ = id
testFail22 :: Proxy a -> Proxy b -> Proxy (Max a (a*b)) -> Proxy (a*b)
testFail22 _ _ = id
testFail23' :: ((1 <=? Div l r) ~ False) => Proxy l -> Proxy r -> ()
testFail23' _ _ = ()
testFail23 :: ()
testFail23 = testFail23' (Proxy @18) (Proxy @3)
testFail24 :: Proxy x -> Proxy y -> Proxy z -> Proxy (z <=? Max x y) -> Proxy True
testFail24 _ _ _ = id
testFail25 :: Proxy x -> Proxy y -> Proxy (x+1 <=? Max x y) -> Proxy True
testFail25 _ _ = id
-- While n ~ (Max x y) implies x <= n (see test46), the reverse is not true.
testFail26' :: ((x <=? n) ~ True) => Proxy x -> Proxy y -> Proxy n -> Proxy ((Max x y)) -> Proxy n
testFail26' _ _ _ = id
testFail26 = testFail26' (Proxy @4) (Proxy @6) (Proxy @6)
testFail27 :: Proxy n -> Proxy (n + 2 <=? Max (n + 1) 1) -> Proxy True
testFail27 _ = id
#if __GLASGOW_HASKELL__ >= 900
testFail1Errors =
["Expected: Proxy (GCD 6 8) -> Proxy 4"
," Actual: Proxy 4 -> Proxy 4"
]
testFail2Errors =
["Expected: Proxy (GCD 6 8 + x) -> Proxy (x + GCD 6 9)"
," Actual: Proxy (GCD 6 8 + x) -> Proxy (GCD 6 8 + x)"
]
testFail3Errors =
["Expected: Proxy (CLog 3 10) -> Proxy 2"
," Actual: Proxy 2 -> Proxy 2"
]
testFail4Errors =
["Expected: Proxy (CLog 3 10 + x) -> Proxy (x + CLog 2 9)"
," Actual: Proxy (CLog 3 10 + x) -> Proxy (CLog 3 10 + x)"
]
testFail5Errors =
["Expected: Proxy (CLog 0 4) -> Proxy 100"
," Actual: Proxy 100 -> Proxy 100"
]
testFail6Errors =
["Expected: Proxy (CLog 1 4) -> Proxy 100"
," Actual: Proxy 100 -> Proxy 100"
]
testFail7Errors =
["Expected: Proxy (CLog 4 0) -> Proxy 0"
," Actual: Proxy 0 -> Proxy 0"
]
testFail8Errors =
["Expected: Proxy (CLog 1 (1 ^ y)) -> Proxy y"
," Actual: Proxy y -> Proxy y"
]
testFail9Errors =
["Expected: Proxy (CLog 0 (0 ^ y)) -> Proxy y"
," Actual: Proxy y -> Proxy y"
]
testFail12Errors =
["Expected: Proxy (Div 4 0) -> Proxy 4"
," Actual: Proxy 4 -> Proxy 4"
]
testFail13Errors =
["Expected: Proxy (Mod 4 0) -> Proxy 4"
," Actual: Proxy 4 -> Proxy 4"
]
testFail14Errors =
["Expected: Proxy (FLog 0 4) -> Proxy 100"
," Actual: Proxy 100 -> Proxy 100"
]
testFail15Errors =
["Expected: Proxy (FLog 1 4) -> Proxy 100"
," Actual: Proxy 100 -> Proxy 100"
]
testFail16Errors =
["Expected: Proxy (FLog 4 0) -> Proxy 0"
," Actual: Proxy 0 -> Proxy 0"
]
testFail17Errors =
["Expected: Proxy (LCM 6 8) -> Proxy 48"
," Actual: Proxy 48 -> Proxy 48"
]
testFail18Errors =
["Expected: Proxy (LCM 6 8 + x) -> Proxy (x + LCM 6 9)"
," Actual: Proxy (LCM 6 8 + x) -> Proxy (LCM 6 8 + x)"
]
testFail19Errors =
["Couldn't match type: FLog 3 0"
," with: CLog 3 0"]
testFail20Errors =
["Couldn't match type: FLog 3 10"
," with: CLog 3 10"]
testFail21Errors =
["Expected: Proxy (Min a (a * b)) -> Proxy a"
," Actual: Proxy a -> Proxy a"
]
testFail22Errors =
["Expected: Proxy (Max a (a * b)) -> Proxy (a * b)"
," Actual: Proxy (Max a (a * b)) -> Proxy (Max a (a * b))"]
testFail27Errors =
["Expected: Proxy ((n + 2) <=? Max (n + 1) 1) -> Proxy 'True"
," Actual: Proxy 'True -> Proxy 'True"
]
#else
testFail1Errors =
["Expected type: Proxy (GCD 6 8) -> Proxy 4"
,"Actual type: Proxy 4 -> Proxy 4"
]
testFail2Errors =
["Expected type: Proxy (GCD 6 8 + x) -> Proxy (x + GCD 6 9)"
,"Actual type: Proxy (x + GCD 6 9) -> Proxy (x + GCD 6 9)"
]
testFail3Errors =
["Expected type: Proxy (CLog 3 10) -> Proxy 2"
,"Actual type: Proxy 2 -> Proxy 2"
]
testFail4Errors =
["Expected type: Proxy (CLog 3 10 + x) -> Proxy (x + CLog 2 9)"
,"Actual type: Proxy (x + CLog 2 9) -> Proxy (x + CLog 2 9)"
]
testFail5Errors =
["Expected type: Proxy (CLog 0 4) -> Proxy 100"
,"Actual type: Proxy 100 -> Proxy 100"
]
testFail6Errors =
["Expected type: Proxy (CLog 1 4) -> Proxy 100"
,"Actual type: Proxy 100 -> Proxy 100"
]
testFail7Errors =
["Expected type: Proxy (CLog 4 0) -> Proxy 0"
,"Actual type: Proxy 0 -> Proxy 0"
]
testFail8Errors =
["Expected type: Proxy (CLog 1 (1 ^ y)) -> Proxy y"
,"Actual type: Proxy y -> Proxy y"
]
testFail9Errors =
["Expected type: Proxy (CLog 0 (0 ^ y)) -> Proxy y"
,"Actual type: Proxy y -> Proxy y"
]
testFail12Errors =
["Expected type: Proxy (Div 4 0) -> Proxy 4"
,"Actual type: Proxy 4 -> Proxy 4"
]
testFail13Errors =
["Expected type: Proxy (Mod 4 0) -> Proxy 4"
,"Actual type: Proxy 4 -> Proxy 4"
]
testFail14Errors =
["Expected type: Proxy (FLog 0 4) -> Proxy 100"
,"Actual type: Proxy 100 -> Proxy 100"
]
testFail15Errors =
["Expected type: Proxy (FLog 1 4) -> Proxy 100"
,"Actual type: Proxy 100 -> Proxy 100"
]
testFail16Errors =
["Expected type: Proxy (FLog 4 0) -> Proxy 0"
,"Actual type: Proxy 0 -> Proxy 0"
]
testFail17Errors =
["Expected type: Proxy (LCM 6 8) -> Proxy 48"
,"Actual type: Proxy 48 -> Proxy 48"
]
testFail18Errors =
["Expected type: Proxy (LCM 6 8 + x) -> Proxy (x + LCM 6 9)"
,"Actual type: Proxy (x + LCM 6 9) -> Proxy (x + LCM 6 9)"
]
testFail19Errors =
["Couldn't match type ‘FLog 3 0’ with ‘CLog 3 0’"]
testFail20Errors =
["Couldn't match type ‘FLog 3 10’ with ‘CLog 3 10’"]
testFail21Errors =
["Expected type: Proxy (Min a (a * b)) -> Proxy a"
,"Actual type: Proxy a -> Proxy a"
]
testFail22Errors =
["Expected type: Proxy (Max a (a * b)) -> Proxy (a * b)"
,"Actual type: Proxy (a * b) -> Proxy (a * b)"]
testFail27Errors =
["Expected type: Proxy ((n + 2) <=? Max (n + 1) 1) -> Proxy 'True"
,"Actual type: Proxy 'True -> Proxy 'True"
]
#endif
testFail10Errors =
["Couldn't match type ‘'False’ with ‘'True’"]
testFail11Errors =
#if __GLASGOW_HASKELL__ >= 902
["Couldn't match type ‘Data.Type.Ord.OrdCond"
,"(CmpNat (CLog 2 4) (CLog 4 4)) 'True 'True 'False’"
,"with ‘'True’"]
#else
["Couldn't match type ‘CLog 2 4 <=? CLog 4 4’ with ‘'True’"]
#endif
testFail23Errors =
#if __GLASGOW_HASKELL__ >= 804
["Couldn't match type ‘'True’ with ‘'False’"]
#else
["Couldn't match type ‘1 <=? Div 18 3’ with ‘'False’"]
#endif
testFail24Errors =
#if __GLASGOW_HASKELL__ >= 902
["Couldn't match type ‘Data.Type.Ord.OrdCond"
,"(CmpNat z (Max x y)) 'True 'True 'False’"
,"with ‘'True’"]
#else
["Couldn't match type ‘z <=? Max x y’ with ‘'True’"]
#endif
testFail25Errors =
#if __GLASGOW_HASKELL__ >= 902
["Couldn't match type ‘Data.Type.Ord.OrdCond"
,"(CmpNat (x + 1) (Max x y)) 'True 'True 'False’"
,"with ‘'True’"]
#else
["Couldn't match type ‘(x + 1) <=? Max x y’ with ‘'True’"]
#endif
testFail26Errors =
["Could not deduce: Max x y ~ n"
,"from the context: (x <=? n) ~ 'True"
]
| christiaanb/ghc-typelits-extra | tests/ErrorTests.hs | bsd-2-clause | 8,568 | 0 | 13 | 1,944 | 1,495 | 805 | 690 | 140 | 1 |
module Notify (testNotify) where
import Common
import Control.Applicative
import Control.Concurrent
import Control.Monad
import Data.Function
import Data.List
import Database.PostgreSQL.Simple.Notification
import qualified Data.ByteString as B
-- TODO: Test with payload, but only for PostgreSQL >= 9.0
-- (when that feature was introduced).
testNotify :: TestEnv -> Assertion
testNotify TestEnv{..} =
withConn $ \conn2 -> do
execute_ conn "LISTEN foo"
execute_ conn "LISTEN bar"
results_mv <- newEmptyMVar
forkIO $ replicateM 2 (getNotification conn)
>>= putMVar results_mv
threadDelay 100000
execute_ conn2 "NOTIFY foo"
execute_ conn2 "NOTIFY bar"
[n1, n2] <- sortBy (compare `on` notificationChannel)
<$> takeMVar results_mv
assertEqual "n1" "bar" (notificationChannel n1)
assertEqual "n2" "foo" (notificationChannel n2)
-- Other sanity checks
assertEqual "Server PIDs match" (notificationPid n1) (notificationPid n2)
assertBool "notificationData is empty" $
all (B.null . notificationData) [n1, n2]
| tomjaguarpaw/postgresql-simple | test/Notify.hs | bsd-3-clause | 1,163 | 0 | 13 | 286 | 273 | 138 | 135 | -1 | -1 |
module Main where
foo x = baz
where foo = 2
two = 4 where bax = 4
| mpickering/ghc-exactprint | tests/examples/transform/NormaliseLayout.hs | bsd-3-clause | 77 | 0 | 8 | 29 | 31 | 18 | 13 | 4 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Main (main) where
import GitHub.Data.Id (Id (..))
import qualified GitHub.Endpoints.Users.PublicSSHKeys as PK
import qualified GitHub.Auth as Auth
main :: IO ()
main = do
let auth = Auth.OAuth "auth_token"
ePublicSSHKey <- PK.publicSSHKey' auth (Id 18528451)
case ePublicSSHKey of
(Left err) -> putStrLn $ "Error: " ++ (show err)
(Right publicSSHKey) -> putStrLn $ show publicSSHKey
| jwiegley/github | samples/Users/PublicSSHKeys/ShowPublicSSHKey.hs | bsd-3-clause | 441 | 0 | 12 | 76 | 144 | 79 | 65 | 12 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.Route53.ListHostedZones
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | To retrieve a list of your hosted zones, send a 'GET' request to the '2013-04-01/hostedzone' resource. The response to this request includes a 'HostedZones' element with
-- zero, one, or multiple 'HostedZone' child elements. By default, the list of
-- hosted zones is displayed on a single page. You can control the length of the
-- page that is displayed by using the 'MaxItems' parameter. You can use the 'Marker'
-- parameter to control the hosted zone that the list begins with.
--
-- Amazon Route 53 returns a maximum of 100 items. If you set MaxItems to a
-- value greater than 100, Amazon Route 53 returns only the first 100.
--
-- <http://docs.aws.amazon.com/Route53/latest/APIReference/API_ListHostedZones.html>
module Network.AWS.Route53.ListHostedZones
(
-- * Request
ListHostedZones
-- ** Request constructor
, listHostedZones
-- ** Request lenses
, lhzDelegationSetId
, lhzMarker
, lhzMaxItems
-- * Response
, ListHostedZonesResponse
-- ** Response constructor
, listHostedZonesResponse
-- ** Response lenses
, lhzrHostedZones
, lhzrIsTruncated
, lhzrMarker
, lhzrMaxItems
, lhzrNextMarker
) where
import Network.AWS.Prelude
import Network.AWS.Request.RestXML
import Network.AWS.Route53.Types
import qualified GHC.Exts
data ListHostedZones = ListHostedZones
{ _lhzDelegationSetId :: Maybe Text
, _lhzMarker :: Maybe Text
, _lhzMaxItems :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'ListHostedZones' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'lhzDelegationSetId' @::@ 'Maybe' 'Text'
--
-- * 'lhzMarker' @::@ 'Maybe' 'Text'
--
-- * 'lhzMaxItems' @::@ 'Maybe' 'Text'
--
listHostedZones :: ListHostedZones
listHostedZones = ListHostedZones
{ _lhzMarker = Nothing
, _lhzMaxItems = Nothing
, _lhzDelegationSetId = Nothing
}
lhzDelegationSetId :: Lens' ListHostedZones (Maybe Text)
lhzDelegationSetId =
lens _lhzDelegationSetId (\s a -> s { _lhzDelegationSetId = a })
-- | If the request returned more than one page of results, submit another request
-- and specify the value of 'NextMarker' from the last response in the 'marker'
-- parameter to get the next page of results.
lhzMarker :: Lens' ListHostedZones (Maybe Text)
lhzMarker = lens _lhzMarker (\s a -> s { _lhzMarker = a })
-- | Specify the maximum number of hosted zones to return per page of results.
lhzMaxItems :: Lens' ListHostedZones (Maybe Text)
lhzMaxItems = lens _lhzMaxItems (\s a -> s { _lhzMaxItems = a })
data ListHostedZonesResponse = ListHostedZonesResponse
{ _lhzrHostedZones :: List "HostedZone" HostedZone
, _lhzrIsTruncated :: Bool
, _lhzrMarker :: Maybe Text
, _lhzrMaxItems :: Text
, _lhzrNextMarker :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'ListHostedZonesResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'lhzrHostedZones' @::@ ['HostedZone']
--
-- * 'lhzrIsTruncated' @::@ 'Bool'
--
-- * 'lhzrMarker' @::@ 'Maybe' 'Text'
--
-- * 'lhzrMaxItems' @::@ 'Text'
--
-- * 'lhzrNextMarker' @::@ 'Maybe' 'Text'
--
listHostedZonesResponse :: Bool -- ^ 'lhzrIsTruncated'
-> Text -- ^ 'lhzrMaxItems'
-> ListHostedZonesResponse
listHostedZonesResponse p1 p2 = ListHostedZonesResponse
{ _lhzrIsTruncated = p1
, _lhzrMaxItems = p2
, _lhzrHostedZones = mempty
, _lhzrMarker = Nothing
, _lhzrNextMarker = Nothing
}
-- | A complex type that contains information about the hosted zones associated
-- with the current AWS account.
lhzrHostedZones :: Lens' ListHostedZonesResponse [HostedZone]
lhzrHostedZones = lens _lhzrHostedZones (\s a -> s { _lhzrHostedZones = a }) . _List
-- | A flag indicating whether there are more hosted zones to be listed. If your
-- results were truncated, you can make a follow-up request for the next page of
-- results by using the 'Marker' element.
--
-- Valid Values: 'true' | 'false'
lhzrIsTruncated :: Lens' ListHostedZonesResponse Bool
lhzrIsTruncated = lens _lhzrIsTruncated (\s a -> s { _lhzrIsTruncated = a })
-- | If the request returned more than one page of results, submit another request
-- and specify the value of 'NextMarker' from the last response in the 'marker'
-- parameter to get the next page of results.
lhzrMarker :: Lens' ListHostedZonesResponse (Maybe Text)
lhzrMarker = lens _lhzrMarker (\s a -> s { _lhzrMarker = a })
-- | The maximum number of hosted zones to be included in the response body. If
-- the number of hosted zones associated with this AWS account exceeds 'MaxItems',
-- the value of 'ListHostedZonesResponse$IsTruncated' in the response is 'true'.
-- Call 'ListHostedZones' again and specify the value of 'ListHostedZonesResponse$NextMarker' in the 'ListHostedZonesRequest$Marker' element to get the next page of results.
lhzrMaxItems :: Lens' ListHostedZonesResponse Text
lhzrMaxItems = lens _lhzrMaxItems (\s a -> s { _lhzrMaxItems = a })
-- | Indicates where to continue listing hosted zones. If 'ListHostedZonesResponse$IsTruncated' is 'true', make another request to 'ListHostedZones' and include the value of
-- the 'NextMarker' element in the 'Marker' element to get the next page of results.
lhzrNextMarker :: Lens' ListHostedZonesResponse (Maybe Text)
lhzrNextMarker = lens _lhzrNextMarker (\s a -> s { _lhzrNextMarker = a })
instance ToPath ListHostedZones where
toPath = const "/2013-04-01/hostedzone"
instance ToQuery ListHostedZones where
toQuery ListHostedZones{..} = mconcat
[ "delegationsetid" =? _lhzDelegationSetId
, "marker" =? _lhzMarker
, "maxitems" =? _lhzMaxItems
]
instance ToHeaders ListHostedZones
instance ToXMLRoot ListHostedZones where
toXMLRoot = const (namespaced ns "ListHostedZones" [])
instance ToXML ListHostedZones
instance AWSRequest ListHostedZones where
type Sv ListHostedZones = Route53
type Rs ListHostedZones = ListHostedZonesResponse
request = get
response = xmlResponse
instance FromXML ListHostedZonesResponse where
parseXML x = ListHostedZonesResponse
<$> x .@? "HostedZones" .!@ mempty
<*> x .@ "IsTruncated"
<*> x .@? "Marker"
<*> x .@ "MaxItems"
<*> x .@? "NextMarker"
instance AWSPager ListHostedZones where
page rq rs
| stop (rs ^. lhzrIsTruncated) = Nothing
| otherwise = Just $ rq
& lhzMarker .~ rs ^. lhzrNextMarker
| kim/amazonka | amazonka-route53/gen/Network/AWS/Route53/ListHostedZones.hs | mpl-2.0 | 7,593 | 0 | 16 | 1,627 | 960 | 568 | 392 | 99 | 1 |
{- Linux library copier and binary shimmer
-
- Copyright 2013 Joey Hess <joey@kitenet.net>
-
- License: BSD-2-clause
-}
module Utility.LinuxMkLibs where
import Control.Applicative
import Data.Maybe
import System.Directory
import System.FilePath
import Data.List.Utils
import System.Posix.Files
import Data.Char
import Control.Monad.IfElse
import Utility.PartialPrelude
import Utility.Directory
import Utility.Process
import Utility.Monad
import Utility.Path
{- Installs a library. If the library is a symlink to another file,
- install the file it links to, and update the symlink to be relative. -}
installLib :: (FilePath -> FilePath -> IO ()) -> FilePath -> FilePath -> IO (Maybe FilePath)
installLib installfile top lib = ifM (doesFileExist lib)
( do
installfile top lib
checksymlink lib
return $ Just $ takeDirectory lib
, return Nothing
)
where
checksymlink f = whenM (isSymbolicLink <$> getSymbolicLinkStatus (inTop top f)) $ do
l <- readSymbolicLink (inTop top f)
let absl = absPathFrom (takeDirectory f) l
let target = relPathDirToFile (takeDirectory f) absl
installfile top absl
nukeFile (top ++ f)
createSymbolicLink target (inTop top f)
checksymlink absl
-- Note that f is not relative, so cannot use </>
inTop :: FilePath -> FilePath -> FilePath
inTop top f = top ++ f
{- Parse ldd output, getting all the libraries that the input files
- link to. Note that some of the libraries may not exist
- (eg, linux-vdso.so) -}
parseLdd :: String -> [FilePath]
parseLdd = mapMaybe (getlib . dropWhile isSpace) . lines
where
getlib l = headMaybe . words =<< lastMaybe (split " => " l)
{- Get all glibc libs and other support files, including gconv files
-
- XXX Debian specific. -}
glibcLibs :: IO [FilePath]
glibcLibs = lines <$> readProcess "sh"
["-c", "dpkg -L libc6:$(dpkg --print-architecture) libgcc1:$(dpkg --print-architecture) | egrep '\\.so|gconv'"]
| avengerpenguin/propellor | src/Utility/LinuxMkLibs.hs | bsd-2-clause | 1,911 | 6 | 15 | 331 | 444 | 226 | 218 | 37 | 1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Loading interface files
-}
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module LoadIface (
-- Importing one thing
tcLookupImported_maybe, importDecl,
checkWiredInTyCon, ifCheckWiredInThing,
-- RnM/TcM functions
loadModuleInterface, loadModuleInterfaces,
loadSrcInterface, loadSrcInterface_maybe,
loadInterfaceForName, loadInterfaceForModule,
-- IfM functions
loadInterface,
loadSysInterface, loadUserInterface, loadPluginInterface,
findAndReadIface, readIface, -- Used when reading the module's old interface
loadDecls, -- Should move to TcIface and be renamed
initExternalPackageState,
moduleFreeHolesPrecise,
pprModIfaceSimple,
ifaceStats, pprModIface, showIface
) where
#include "HsVersions.h"
import GhcPrelude
import {-# SOURCE #-} TcIface( tcIfaceDecl, tcIfaceRules, tcIfaceInst,
tcIfaceFamInst, tcIfaceVectInfo,
tcIfaceAnnotations, tcIfaceCompleteSigs )
import DynFlags
import IfaceSyn
import IfaceEnv
import HscTypes
import BasicTypes hiding (SuccessFlag(..))
import TcRnMonad
import Constants
import PrelNames
import PrelInfo
import PrimOp ( allThePrimOps, primOpFixity, primOpOcc )
import MkId ( seqId )
import TysPrim ( funTyConName )
import Rules
import TyCon
import Annotations
import InstEnv
import FamInstEnv
import Name
import NameEnv
import Avail
import Module
import Maybes
import ErrUtils
import Finder
import UniqFM
import SrcLoc
import Outputable
import BinIface
import Panic
import Util
import FastString
import Fingerprint
import Hooks
import FieldLabel
import RnModIface
import UniqDSet
import Control.Monad
import Control.Exception
import Data.IORef
import System.FilePath
{-
************************************************************************
* *
* tcImportDecl is the key function for "faulting in" *
* imported things
* *
************************************************************************
The main idea is this. We are chugging along type-checking source code, and
find a reference to GHC.Base.map. We call tcLookupGlobal, which doesn't find
it in the EPS type envt. So it
1 loads GHC.Base.hi
2 gets the decl for GHC.Base.map
3 typechecks it via tcIfaceDecl
4 and adds it to the type env in the EPS
Note that DURING STEP 4, we may find that map's type mentions a type
constructor that also
Notice that for imported things we read the current version from the EPS
mutable variable. This is important in situations like
...$(e1)...$(e2)...
where the code that e1 expands to might import some defns that
also turn out to be needed by the code that e2 expands to.
-}
tcLookupImported_maybe :: Name -> TcM (MaybeErr MsgDoc TyThing)
-- Returns (Failed err) if we can't find the interface file for the thing
tcLookupImported_maybe name
= do { hsc_env <- getTopEnv
; mb_thing <- liftIO (lookupTypeHscEnv hsc_env name)
; case mb_thing of
Just thing -> return (Succeeded thing)
Nothing -> tcImportDecl_maybe name }
tcImportDecl_maybe :: Name -> TcM (MaybeErr MsgDoc TyThing)
-- Entry point for *source-code* uses of importDecl
tcImportDecl_maybe name
| Just thing <- wiredInNameTyThing_maybe name
= do { when (needWiredInHomeIface thing)
(initIfaceTcRn (loadWiredInHomeIface name))
-- See Note [Loading instances for wired-in things]
; return (Succeeded thing) }
| otherwise
= initIfaceTcRn (importDecl name)
importDecl :: Name -> IfM lcl (MaybeErr MsgDoc TyThing)
-- Get the TyThing for this Name from an interface file
-- It's not a wired-in thing -- the caller caught that
importDecl name
= ASSERT( not (isWiredInName name) )
do { traceIf nd_doc
-- Load the interface, which should populate the PTE
; mb_iface <- ASSERT2( isExternalName name, ppr name )
loadInterface nd_doc (nameModule name) ImportBySystem
; case mb_iface of {
Failed err_msg -> return (Failed err_msg) ;
Succeeded _ -> do
-- Now look it up again; this time we should find it
{ eps <- getEps
; case lookupTypeEnv (eps_PTE eps) name of
Just thing -> return $ Succeeded thing
Nothing -> let doc = whenPprDebug (found_things_msg eps $$ empty)
$$ not_found_msg
in return $ Failed doc
}}}
where
nd_doc = text "Need decl for" <+> ppr name
not_found_msg = hang (text "Can't find interface-file declaration for" <+>
pprNameSpace (occNameSpace (nameOccName name)) <+> ppr name)
2 (vcat [text "Probable cause: bug in .hi-boot file, or inconsistent .hi file",
text "Use -ddump-if-trace to get an idea of which file caused the error"])
found_things_msg eps =
hang (text "Found the following declarations in" <+> ppr (nameModule name) <> colon)
2 (vcat (map ppr $ filter is_interesting $ nameEnvElts $ eps_PTE eps))
where
is_interesting thing = nameModule name == nameModule (getName thing)
{-
************************************************************************
* *
Checks for wired-in things
* *
************************************************************************
Note [Loading instances for wired-in things]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We need to make sure that we have at least *read* the interface files
for any module with an instance decl or RULE that we might want.
* If the instance decl is an orphan, we have a whole separate mechanism
(loadOrphanModules)
* If the instance decl is not an orphan, then the act of looking at the
TyCon or Class will force in the defining module for the
TyCon/Class, and hence the instance decl
* BUT, if the TyCon is a wired-in TyCon, we don't really need its interface;
but we must make sure we read its interface in case it has instances or
rules. That is what LoadIface.loadWiredInHomeIface does. It's called
from TcIface.{tcImportDecl, checkWiredInTyCon, ifCheckWiredInThing}
* HOWEVER, only do this for TyCons. There are no wired-in Classes. There
are some wired-in Ids, but we don't want to load their interfaces. For
example, Control.Exception.Base.recSelError is wired in, but that module
is compiled late in the base library, and we don't want to force it to
load before it's been compiled!
All of this is done by the type checker. The renamer plays no role.
(It used to, but no longer.)
-}
checkWiredInTyCon :: TyCon -> TcM ()
-- Ensure that the home module of the TyCon (and hence its instances)
-- are loaded. See Note [Loading instances for wired-in things]
-- It might not be a wired-in tycon (see the calls in TcUnify),
-- in which case this is a no-op.
checkWiredInTyCon tc
| not (isWiredInName tc_name)
= return ()
| otherwise
= do { mod <- getModule
; traceIf (text "checkWiredInTyCon" <+> ppr tc_name $$ ppr mod)
; ASSERT( isExternalName tc_name )
when (mod /= nameModule tc_name)
(initIfaceTcRn (loadWiredInHomeIface tc_name))
-- Don't look for (non-existent) Float.hi when
-- compiling Float.hs, which mentions Float of course
-- A bit yukky to call initIfaceTcRn here
}
where
tc_name = tyConName tc
ifCheckWiredInThing :: TyThing -> IfL ()
-- Even though we are in an interface file, we want to make
-- sure the instances of a wired-in thing are loaded (imagine f :: Double -> Double)
-- Ditto want to ensure that RULES are loaded too
-- See Note [Loading instances for wired-in things]
ifCheckWiredInThing thing
= do { mod <- getIfModule
-- Check whether we are typechecking the interface for this
-- very module. E.g when compiling the base library in --make mode
-- we may typecheck GHC.Base.hi. At that point, GHC.Base is not in
-- the HPT, so without the test we'll demand-load it into the PIT!
-- C.f. the same test in checkWiredInTyCon above
; let name = getName thing
; ASSERT2( isExternalName name, ppr name )
when (needWiredInHomeIface thing && mod /= nameModule name)
(loadWiredInHomeIface name) }
needWiredInHomeIface :: TyThing -> Bool
-- Only for TyCons; see Note [Loading instances for wired-in things]
needWiredInHomeIface (ATyCon {}) = True
needWiredInHomeIface _ = False
{-
************************************************************************
* *
loadSrcInterface, loadOrphanModules, loadInterfaceForName
These three are called from TcM-land
* *
************************************************************************
-}
-- | Load the interface corresponding to an @import@ directive in
-- source code. On a failure, fail in the monad with an error message.
loadSrcInterface :: SDoc
-> ModuleName
-> IsBootInterface -- {-# SOURCE #-} ?
-> Maybe FastString -- "package", if any
-> RnM ModIface
loadSrcInterface doc mod want_boot maybe_pkg
= do { res <- loadSrcInterface_maybe doc mod want_boot maybe_pkg
; case res of
Failed err -> failWithTc err
Succeeded iface -> return iface }
-- | Like 'loadSrcInterface', but returns a 'MaybeErr'.
loadSrcInterface_maybe :: SDoc
-> ModuleName
-> IsBootInterface -- {-# SOURCE #-} ?
-> Maybe FastString -- "package", if any
-> RnM (MaybeErr MsgDoc ModIface)
loadSrcInterface_maybe doc mod want_boot maybe_pkg
-- We must first find which Module this import refers to. This involves
-- calling the Finder, which as a side effect will search the filesystem
-- and create a ModLocation. If successful, loadIface will read the
-- interface; it will call the Finder again, but the ModLocation will be
-- cached from the first search.
= do { hsc_env <- getTopEnv
; res <- liftIO $ findImportedModule hsc_env mod maybe_pkg
; case res of
Found _ mod -> initIfaceTcRn $ loadInterface doc mod (ImportByUser want_boot)
-- TODO: Make sure this error message is good
err -> return (Failed (cannotFindModule (hsc_dflags hsc_env) mod err)) }
-- | Load interface directly for a fully qualified 'Module'. (This is a fairly
-- rare operation, but in particular it is used to load orphan modules
-- in order to pull their instances into the global package table and to
-- handle some operations in GHCi).
loadModuleInterface :: SDoc -> Module -> TcM ModIface
loadModuleInterface doc mod = initIfaceTcRn (loadSysInterface doc mod)
-- | Load interfaces for a collection of modules.
loadModuleInterfaces :: SDoc -> [Module] -> TcM ()
loadModuleInterfaces doc mods
| null mods = return ()
| otherwise = initIfaceTcRn (mapM_ load mods)
where
load mod = loadSysInterface (doc <+> parens (ppr mod)) mod
-- | Loads the interface for a given Name.
-- Should only be called for an imported name;
-- otherwise loadSysInterface may not find the interface
loadInterfaceForName :: SDoc -> Name -> TcRn ModIface
loadInterfaceForName doc name
= do { when debugIsOn $ -- Check pre-condition
do { this_mod <- getModule
; MASSERT2( not (nameIsLocalOrFrom this_mod name), ppr name <+> parens doc ) }
; ASSERT2( isExternalName name, ppr name )
initIfaceTcRn $ loadSysInterface doc (nameModule name) }
-- | Loads the interface for a given Module.
loadInterfaceForModule :: SDoc -> Module -> TcRn ModIface
loadInterfaceForModule doc m
= do
-- Should not be called with this module
when debugIsOn $ do
this_mod <- getModule
MASSERT2( this_mod /= m, ppr m <+> parens doc )
initIfaceTcRn $ loadSysInterface doc m
{-
*********************************************************
* *
loadInterface
The main function to load an interface
for an imported module, and put it in
the External Package State
* *
*********************************************************
-}
-- | An 'IfM' function to load the home interface for a wired-in thing,
-- so that we're sure that we see its instance declarations and rules
-- See Note [Loading instances for wired-in things]
loadWiredInHomeIface :: Name -> IfM lcl ()
loadWiredInHomeIface name
= ASSERT( isWiredInName name )
do _ <- loadSysInterface doc (nameModule name); return ()
where
doc = text "Need home interface for wired-in thing" <+> ppr name
------------------
-- | Loads a system interface and throws an exception if it fails
loadSysInterface :: SDoc -> Module -> IfM lcl ModIface
loadSysInterface doc mod_name = loadInterfaceWithException doc mod_name ImportBySystem
------------------
-- | Loads a user interface and throws an exception if it fails. The first parameter indicates
-- whether we should import the boot variant of the module
loadUserInterface :: Bool -> SDoc -> Module -> IfM lcl ModIface
loadUserInterface is_boot doc mod_name
= loadInterfaceWithException doc mod_name (ImportByUser is_boot)
loadPluginInterface :: SDoc -> Module -> IfM lcl ModIface
loadPluginInterface doc mod_name
= loadInterfaceWithException doc mod_name ImportByPlugin
------------------
-- | A wrapper for 'loadInterface' that throws an exception if it fails
loadInterfaceWithException :: SDoc -> Module -> WhereFrom -> IfM lcl ModIface
loadInterfaceWithException doc mod_name where_from
= withException (loadInterface doc mod_name where_from)
------------------
loadInterface :: SDoc -> Module -> WhereFrom
-> IfM lcl (MaybeErr MsgDoc ModIface)
-- loadInterface looks in both the HPT and PIT for the required interface
-- If not found, it loads it, and puts it in the PIT (always).
-- If it can't find a suitable interface file, we
-- a) modify the PackageIfaceTable to have an empty entry
-- (to avoid repeated complaints)
-- b) return (Left message)
--
-- It's not necessarily an error for there not to be an interface
-- file -- perhaps the module has changed, and that interface
-- is no longer used
loadInterface doc_str mod from
| isHoleModule mod
-- Hole modules get special treatment
= do dflags <- getDynFlags
-- Redo search for our local hole module
loadInterface doc_str (mkModule (thisPackage dflags) (moduleName mod)) from
| otherwise
= do { -- Read the state
(eps,hpt) <- getEpsAndHpt
; gbl_env <- getGblEnv
; traceIf (text "Considering whether to load" <+> ppr mod <+> ppr from)
-- Check whether we have the interface already
; dflags <- getDynFlags
; case lookupIfaceByModule dflags hpt (eps_PIT eps) mod of {
Just iface
-> return (Succeeded iface) ; -- Already loaded
-- The (src_imp == mi_boot iface) test checks that the already-loaded
-- interface isn't a boot iface. This can conceivably happen,
-- if an earlier import had a before we got to real imports. I think.
_ -> do {
-- READ THE MODULE IN
; read_result <- case (wantHiBootFile dflags eps mod from) of
Failed err -> return (Failed err)
Succeeded hi_boot_file ->
-- Stoutly warn against an EPS-updating import
-- of one's own boot file! (one-shot only)
--See Note [Do not update EPS with your own hi-boot]
-- in MkIface.
WARN( hi_boot_file &&
fmap fst (if_rec_types gbl_env) == Just mod,
ppr mod )
computeInterface doc_str hi_boot_file mod
; case read_result of {
Failed err -> do
{ let fake_iface = emptyModIface mod
; updateEps_ $ \eps ->
eps { eps_PIT = extendModuleEnv (eps_PIT eps) (mi_module fake_iface) fake_iface }
-- Not found, so add an empty iface to
-- the EPS map so that we don't look again
; return (Failed err) } ;
-- Found and parsed!
-- We used to have a sanity check here that looked for:
-- * System importing ..
-- * a home package module ..
-- * that we know nothing about (mb_dep == Nothing)!
--
-- But this is no longer valid because thNameToGhcName allows users to
-- cause the system to load arbitrary interfaces (by supplying an appropriate
-- Template Haskell original-name).
Succeeded (iface, loc) ->
let
loc_doc = text loc
in
initIfaceLcl (mi_semantic_module iface) loc_doc (mi_boot iface) $ do
-- Load the new ModIface into the External Package State
-- Even home-package interfaces loaded by loadInterface
-- (which only happens in OneShot mode; in Batch/Interactive
-- mode, home-package modules are loaded one by one into the HPT)
-- are put in the EPS.
--
-- The main thing is to add the ModIface to the PIT, but
-- we also take the
-- IfaceDecls, IfaceClsInst, IfaceFamInst, IfaceRules, IfaceVectInfo
-- out of the ModIface and put them into the big EPS pools
-- NB: *first* we do loadDecl, so that the provenance of all the locally-defined
--- names is done correctly (notably, whether this is an .hi file or .hi-boot file).
-- If we do loadExport first the wrong info gets into the cache (unless we
-- explicitly tag each export which seems a bit of a bore)
; ignore_prags <- goptM Opt_IgnoreInterfacePragmas
; new_eps_decls <- loadDecls ignore_prags (mi_decls iface)
; new_eps_insts <- mapM tcIfaceInst (mi_insts iface)
; new_eps_fam_insts <- mapM tcIfaceFamInst (mi_fam_insts iface)
; new_eps_rules <- tcIfaceRules ignore_prags (mi_rules iface)
; new_eps_anns <- tcIfaceAnnotations (mi_anns iface)
; new_eps_vect_info <- tcIfaceVectInfo mod (mkNameEnv new_eps_decls) (mi_vect_info iface)
; new_eps_complete_sigs <- tcIfaceCompleteSigs (mi_complete_sigs iface)
; let { final_iface = iface {
mi_decls = panic "No mi_decls in PIT",
mi_insts = panic "No mi_insts in PIT",
mi_fam_insts = panic "No mi_fam_insts in PIT",
mi_rules = panic "No mi_rules in PIT",
mi_anns = panic "No mi_anns in PIT"
}
}
; updateEps_ $ \ eps ->
if elemModuleEnv mod (eps_PIT eps) || is_external_sig dflags iface
then eps else
eps {
eps_PIT = extendModuleEnv (eps_PIT eps) mod final_iface,
eps_PTE = addDeclsToPTE (eps_PTE eps) new_eps_decls,
eps_rule_base = extendRuleBaseList (eps_rule_base eps)
new_eps_rules,
eps_complete_matches
= extendCompleteMatchMap
(eps_complete_matches eps)
new_eps_complete_sigs,
eps_inst_env = extendInstEnvList (eps_inst_env eps)
new_eps_insts,
eps_fam_inst_env = extendFamInstEnvList (eps_fam_inst_env eps)
new_eps_fam_insts,
eps_vect_info = plusVectInfo (eps_vect_info eps)
new_eps_vect_info,
eps_ann_env = extendAnnEnvList (eps_ann_env eps)
new_eps_anns,
eps_mod_fam_inst_env
= let
fam_inst_env =
extendFamInstEnvList emptyFamInstEnv
new_eps_fam_insts
in
extendModuleEnv (eps_mod_fam_inst_env eps)
mod
fam_inst_env,
eps_stats = addEpsInStats (eps_stats eps)
(length new_eps_decls)
(length new_eps_insts)
(length new_eps_rules) }
; return (Succeeded final_iface)
}}}}
-- | Returns @True@ if a 'ModIface' comes from an external package.
-- In this case, we should NOT load it into the EPS; the entities
-- should instead come from the local merged signature interface.
is_external_sig :: DynFlags -> ModIface -> Bool
is_external_sig dflags iface =
-- It's a signature iface...
mi_semantic_module iface /= mi_module iface &&
-- and it's not from the local package
moduleUnitId (mi_module iface) /= thisPackage dflags
-- | This is an improved version of 'findAndReadIface' which can also
-- handle the case when a user requests @p[A=<B>]:M@ but we only
-- have an interface for @p[A=<A>]:M@ (the indefinite interface.
-- If we are not trying to build code, we load the interface we have,
-- *instantiating it* according to how the holes are specified.
-- (Of course, if we're actually building code, this is a hard error.)
--
-- In the presence of holes, 'computeInterface' has an important invariant:
-- to load module M, its set of transitively reachable requirements must
-- have an up-to-date local hi file for that requirement. Note that if
-- we are loading the interface of a requirement, this does not
-- apply to the requirement itself; e.g., @p[A=<A>]:A@ does not require
-- A.hi to be up-to-date (and indeed, we MUST NOT attempt to read A.hi, unless
-- we are actually typechecking p.)
computeInterface ::
SDoc -> IsBootInterface -> Module
-> TcRnIf gbl lcl (MaybeErr MsgDoc (ModIface, FilePath))
computeInterface doc_str hi_boot_file mod0 = do
MASSERT( not (isHoleModule mod0) )
dflags <- getDynFlags
case splitModuleInsts mod0 of
(imod, Just indef) | not (unitIdIsDefinite (thisPackage dflags)) -> do
r <- findAndReadIface doc_str imod mod0 hi_boot_file
case r of
Succeeded (iface0, path) -> do
hsc_env <- getTopEnv
r <- liftIO $
rnModIface hsc_env (indefUnitIdInsts (indefModuleUnitId indef))
Nothing iface0
case r of
Right x -> return (Succeeded (x, path))
Left errs -> liftIO . throwIO . mkSrcErr $ errs
Failed err -> return (Failed err)
(mod, _) ->
findAndReadIface doc_str mod mod0 hi_boot_file
-- | Compute the signatures which must be compiled in order to
-- load the interface for a 'Module'. The output of this function
-- is always a subset of 'moduleFreeHoles'; it is more precise
-- because in signature @p[A=<A>,B=<B>]:B@, although the free holes
-- are A and B, B might not depend on A at all!
--
-- If this is invoked on a signature, this does NOT include the
-- signature itself; e.g. precise free module holes of
-- @p[A=<A>,B=<B>]:B@ never includes B.
moduleFreeHolesPrecise
:: SDoc -> Module
-> TcRnIf gbl lcl (MaybeErr MsgDoc (UniqDSet ModuleName))
moduleFreeHolesPrecise doc_str mod
| moduleIsDefinite mod = return (Succeeded emptyUniqDSet)
| otherwise =
case splitModuleInsts mod of
(imod, Just indef) -> do
let insts = indefUnitIdInsts (indefModuleUnitId indef)
traceIf (text "Considering whether to load" <+> ppr mod <+>
text "to compute precise free module holes")
(eps, hpt) <- getEpsAndHpt
dflags <- getDynFlags
case tryEpsAndHpt dflags eps hpt `firstJust` tryDepsCache eps imod insts of
Just r -> return (Succeeded r)
Nothing -> readAndCache imod insts
(_, Nothing) -> return (Succeeded emptyUniqDSet)
where
tryEpsAndHpt dflags eps hpt =
fmap mi_free_holes (lookupIfaceByModule dflags hpt (eps_PIT eps) mod)
tryDepsCache eps imod insts =
case lookupInstalledModuleEnv (eps_free_holes eps) imod of
Just ifhs -> Just (renameFreeHoles ifhs insts)
_otherwise -> Nothing
readAndCache imod insts = do
mb_iface <- findAndReadIface (text "moduleFreeHolesPrecise" <+> doc_str) imod mod False
case mb_iface of
Succeeded (iface, _) -> do
let ifhs = mi_free_holes iface
-- Cache it
updateEps_ (\eps ->
eps { eps_free_holes = extendInstalledModuleEnv (eps_free_holes eps) imod ifhs })
return (Succeeded (renameFreeHoles ifhs insts))
Failed err -> return (Failed err)
wantHiBootFile :: DynFlags -> ExternalPackageState -> Module -> WhereFrom
-> MaybeErr MsgDoc IsBootInterface
-- Figure out whether we want Foo.hi or Foo.hi-boot
wantHiBootFile dflags eps mod from
= case from of
ImportByUser usr_boot
| usr_boot && not this_package
-> Failed (badSourceImport mod)
| otherwise -> Succeeded usr_boot
ImportByPlugin
-> Succeeded False
ImportBySystem
| not this_package -- If the module to be imported is not from this package
-> Succeeded False -- don't look it up in eps_is_boot, because that is keyed
-- on the ModuleName of *home-package* modules only.
-- We never import boot modules from other packages!
| otherwise
-> case lookupUFM (eps_is_boot eps) (moduleName mod) of
Just (_, is_boot) -> Succeeded is_boot
Nothing -> Succeeded False
-- The boot-ness of the requested interface,
-- based on the dependencies in directly-imported modules
where
this_package = thisPackage dflags == moduleUnitId mod
badSourceImport :: Module -> SDoc
badSourceImport mod
= hang (text "You cannot {-# SOURCE #-} import a module from another package")
2 (text "but" <+> quotes (ppr mod) <+> ptext (sLit "is from package")
<+> quotes (ppr (moduleUnitId mod)))
-----------------------------------------------------
-- Loading type/class/value decls
-- We pass the full Module name here, replete with
-- its package info, so that we can build a Name for
-- each binder with the right package info in it
-- All subsequent lookups, including crucially lookups during typechecking
-- the declaration itself, will find the fully-glorious Name
--
-- We handle ATs specially. They are not main declarations, but also not
-- implicit things (in particular, adding them to `implicitTyThings' would mess
-- things up in the renaming/type checking of source programs).
-----------------------------------------------------
addDeclsToPTE :: PackageTypeEnv -> [(Name,TyThing)] -> PackageTypeEnv
addDeclsToPTE pte things = extendNameEnvList pte things
loadDecls :: Bool
-> [(Fingerprint, IfaceDecl)]
-> IfL [(Name,TyThing)]
loadDecls ignore_prags ver_decls
= do { thingss <- mapM (loadDecl ignore_prags) ver_decls
; return (concat thingss)
}
loadDecl :: Bool -- Don't load pragmas into the decl pool
-> (Fingerprint, IfaceDecl)
-> IfL [(Name,TyThing)] -- The list can be poked eagerly, but the
-- TyThings are forkM'd thunks
loadDecl ignore_prags (_version, decl)
= do { -- Populate the name cache with final versions of all
-- the names associated with the decl
let main_name = ifName decl
-- Typecheck the thing, lazily
-- NB. Firstly, the laziness is there in case we never need the
-- declaration (in one-shot mode), and secondly it is there so that
-- we don't look up the occurrence of a name before calling mk_new_bndr
-- on the binder. This is important because we must get the right name
-- which includes its nameParent.
; thing <- forkM doc $ do { bumpDeclStats main_name
; tcIfaceDecl ignore_prags decl }
-- Populate the type environment with the implicitTyThings too.
--
-- Note [Tricky iface loop]
-- ~~~~~~~~~~~~~~~~~~~~~~~~
-- Summary: The delicate point here is that 'mini-env' must be
-- buildable from 'thing' without demanding any of the things
-- 'forkM'd by tcIfaceDecl.
--
-- In more detail: Consider the example
-- data T a = MkT { x :: T a }
-- The implicitTyThings of T are: [ <datacon MkT>, <selector x>]
-- (plus their workers, wrappers, coercions etc etc)
--
-- We want to return an environment
-- [ "MkT" -> <datacon MkT>, "x" -> <selector x>, ... ]
-- (where the "MkT" is the *Name* associated with MkT, etc.)
--
-- We do this by mapping the implicit_names to the associated
-- TyThings. By the invariant on ifaceDeclImplicitBndrs and
-- implicitTyThings, we can use getOccName on the implicit
-- TyThings to make this association: each Name's OccName should
-- be the OccName of exactly one implicitTyThing. So the key is
-- to define a "mini-env"
--
-- [ 'MkT' -> <datacon MkT>, 'x' -> <selector x>, ... ]
-- where the 'MkT' here is the *OccName* associated with MkT.
--
-- However, there is a subtlety: due to how type checking needs
-- to be staged, we can't poke on the forkM'd thunks inside the
-- implicitTyThings while building this mini-env.
-- If we poke these thunks too early, two problems could happen:
-- (1) When processing mutually recursive modules across
-- hs-boot boundaries, poking too early will do the
-- type-checking before the recursive knot has been tied,
-- so things will be type-checked in the wrong
-- environment, and necessary variables won't be in
-- scope.
--
-- (2) Looking up one OccName in the mini_env will cause
-- others to be looked up, which might cause that
-- original one to be looked up again, and hence loop.
--
-- The code below works because of the following invariant:
-- getOccName on a TyThing does not force the suspended type
-- checks in order to extract the name. For example, we don't
-- poke on the "T a" type of <selector x> on the way to
-- extracting <selector x>'s OccName. Of course, there is no
-- reason in principle why getting the OccName should force the
-- thunks, but this means we need to be careful in
-- implicitTyThings and its helper functions.
--
-- All a bit too finely-balanced for my liking.
-- This mini-env and lookup function mediates between the
--'Name's n and the map from 'OccName's to the implicit TyThings
; let mini_env = mkOccEnv [(getOccName t, t) | t <- implicitTyThings thing]
lookup n = case lookupOccEnv mini_env (getOccName n) of
Just thing -> thing
Nothing ->
pprPanic "loadDecl" (ppr main_name <+> ppr n $$ ppr (decl))
; implicit_names <- mapM lookupIfaceTop (ifaceDeclImplicitBndrs decl)
-- ; traceIf (text "Loading decl for " <> ppr main_name $$ ppr implicit_names)
; return $ (main_name, thing) :
-- uses the invariant that implicit_names and
-- implicitTyThings are bijective
[(n, lookup n) | n <- implicit_names]
}
where
doc = text "Declaration for" <+> ppr (ifName decl)
bumpDeclStats :: Name -> IfL () -- Record that one more declaration has actually been used
bumpDeclStats name
= do { traceIf (text "Loading decl for" <+> ppr name)
; updateEps_ (\eps -> let stats = eps_stats eps
in eps { eps_stats = stats { n_decls_out = n_decls_out stats + 1 } })
}
{-
*********************************************************
* *
\subsection{Reading an interface file}
* *
*********************************************************
Note [Home module load error]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the sought-for interface is in the current package (as determined
by -package-name flag) then it jolly well should already be in the HPT
because we process home-package modules in dependency order. (Except
in one-shot mode; see notes with hsc_HPT decl in HscTypes).
It is possible (though hard) to get this error through user behaviour.
* Suppose package P (modules P1, P2) depends on package Q (modules Q1,
Q2, with Q2 importing Q1)
* We compile both packages.
* Now we edit package Q so that it somehow depends on P
* Now recompile Q with --make (without recompiling P).
* Then Q1 imports, say, P1, which in turn depends on Q2. So Q2
is a home-package module which is not yet in the HPT! Disaster.
This actually happened with P=base, Q=ghc-prim, via the AMP warnings.
See Trac #8320.
-}
findAndReadIface :: SDoc
-- The unique identifier of the on-disk module we're
-- looking for
-> InstalledModule
-- The *actual* module we're looking for. We use
-- this to check the consistency of the requirements
-- of the module we read out.
-> Module
-> IsBootInterface -- True <=> Look for a .hi-boot file
-- False <=> Look for .hi file
-> TcRnIf gbl lcl (MaybeErr MsgDoc (ModIface, FilePath))
-- Nothing <=> file not found, or unreadable, or illegible
-- Just x <=> successfully found and parsed
-- It *doesn't* add an error to the monad, because
-- sometimes it's ok to fail... see notes with loadInterface
findAndReadIface doc_str mod wanted_mod_with_insts hi_boot_file
= do traceIf (sep [hsep [text "Reading",
if hi_boot_file
then text "[boot]"
else Outputable.empty,
text "interface for",
ppr mod <> semi],
nest 4 (text "reason:" <+> doc_str)])
-- Check for GHC.Prim, and return its static interface
-- TODO: make this check a function
if mod `installedModuleEq` gHC_PRIM
then do
iface <- getHooked ghcPrimIfaceHook ghcPrimIface
return (Succeeded (iface,
"<built in interface for GHC.Prim>"))
else do
dflags <- getDynFlags
-- Look for the file
hsc_env <- getTopEnv
mb_found <- liftIO (findExactModule hsc_env mod)
case mb_found of
InstalledFound loc mod -> do
-- Found file, so read it
let file_path = addBootSuffix_maybe hi_boot_file
(ml_hi_file loc)
-- See Note [Home module load error]
if installedModuleUnitId mod `installedUnitIdEq` thisPackage dflags &&
not (isOneShot (ghcMode dflags))
then return (Failed (homeModError mod loc))
else do r <- read_file file_path
checkBuildDynamicToo r
return r
err -> do
traceIf (text "...not found")
dflags <- getDynFlags
return (Failed (cannotFindInterface dflags
(installedModuleName mod) err))
where read_file file_path = do
traceIf (text "readIFace" <+> text file_path)
-- Figure out what is recorded in mi_module. If this is
-- a fully definite interface, it'll match exactly, but
-- if it's indefinite, the inside will be uninstantiated!
dflags <- getDynFlags
let wanted_mod =
case splitModuleInsts wanted_mod_with_insts of
(_, Nothing) -> wanted_mod_with_insts
(_, Just indef_mod) ->
indefModuleToModule dflags
(generalizeIndefModule indef_mod)
read_result <- readIface wanted_mod file_path
case read_result of
Failed err -> return (Failed (badIfaceFile file_path err))
Succeeded iface -> return (Succeeded (iface, file_path))
-- Don't forget to fill in the package name...
checkBuildDynamicToo (Succeeded (iface, filePath)) = do
dflags <- getDynFlags
-- Indefinite interfaces are ALWAYS non-dynamic, and
-- that's OK.
let is_definite_iface = moduleIsDefinite (mi_module iface)
when is_definite_iface $
whenGeneratingDynamicToo dflags $ withDoDynamicToo $ do
let ref = canGenerateDynamicToo dflags
dynFilePath = addBootSuffix_maybe hi_boot_file
$ replaceExtension filePath (dynHiSuf dflags)
r <- read_file dynFilePath
case r of
Succeeded (dynIface, _)
| mi_mod_hash iface == mi_mod_hash dynIface ->
return ()
| otherwise ->
do traceIf (text "Dynamic hash doesn't match")
liftIO $ writeIORef ref False
Failed err ->
do traceIf (text "Failed to load dynamic interface file:" $$ err)
liftIO $ writeIORef ref False
checkBuildDynamicToo _ = return ()
-- @readIface@ tries just the one file.
readIface :: Module -> FilePath
-> TcRnIf gbl lcl (MaybeErr MsgDoc ModIface)
-- Failed err <=> file not found, or unreadable, or illegible
-- Succeeded iface <=> successfully found and parsed
readIface wanted_mod file_path
= do { res <- tryMostM $
readBinIface CheckHiWay QuietBinIFaceReading file_path
; dflags <- getDynFlags
; case res of
Right iface
-- NB: This check is NOT just a sanity check, it is
-- critical for correctness of recompilation checking
-- (it lets us tell when -this-unit-id has changed.)
| wanted_mod == actual_mod
-> return (Succeeded iface)
| otherwise -> return (Failed err)
where
actual_mod = mi_module iface
err = hiModuleNameMismatchWarn dflags wanted_mod actual_mod
Left exn -> return (Failed (text (showException exn)))
}
{-
*********************************************************
* *
Wired-in interface for GHC.Prim
* *
*********************************************************
-}
initExternalPackageState :: ExternalPackageState
initExternalPackageState
= EPS {
eps_is_boot = emptyUFM,
eps_PIT = emptyPackageIfaceTable,
eps_free_holes = emptyInstalledModuleEnv,
eps_PTE = emptyTypeEnv,
eps_inst_env = emptyInstEnv,
eps_fam_inst_env = emptyFamInstEnv,
eps_rule_base = mkRuleBase builtinRules,
-- Initialise the EPS rule pool with the built-in rules
eps_mod_fam_inst_env
= emptyModuleEnv,
eps_vect_info = noVectInfo,
eps_complete_matches = emptyUFM,
eps_ann_env = emptyAnnEnv,
eps_stats = EpsStats { n_ifaces_in = 0, n_decls_in = 0, n_decls_out = 0
, n_insts_in = 0, n_insts_out = 0
, n_rules_in = length builtinRules, n_rules_out = 0 }
}
{-
*********************************************************
* *
Wired-in interface for GHC.Prim
* *
*********************************************************
-}
ghcPrimIface :: ModIface
ghcPrimIface
= (emptyModIface gHC_PRIM) {
mi_exports = ghcPrimExports,
mi_decls = [],
mi_fixities = fixities,
mi_fix_fn = mkIfaceFixCache fixities
}
where
fixities = (getOccName seqId, Fixity NoSourceText 0 InfixR)
-- seq is infixr 0
: (occName funTyConName, funTyFixity) -- trac #10145
: mapMaybe mkFixity allThePrimOps
mkFixity op = (,) (primOpOcc op) <$> primOpFixity op
{-
*********************************************************
* *
\subsection{Statistics}
* *
*********************************************************
-}
ifaceStats :: ExternalPackageState -> SDoc
ifaceStats eps
= hcat [text "Renamer stats: ", msg]
where
stats = eps_stats eps
msg = vcat
[int (n_ifaces_in stats) <+> text "interfaces read",
hsep [ int (n_decls_out stats), text "type/class/variable imported, out of",
int (n_decls_in stats), text "read"],
hsep [ int (n_insts_out stats), text "instance decls imported, out of",
int (n_insts_in stats), text "read"],
hsep [ int (n_rules_out stats), text "rule decls imported, out of",
int (n_rules_in stats), text "read"]
]
{-
************************************************************************
* *
Printing interfaces
* *
************************************************************************
-}
-- | Read binary interface, and print it out
showIface :: HscEnv -> FilePath -> IO ()
showIface hsc_env filename = do
-- skip the hi way check; we don't want to worry about profiled vs.
-- non-profiled interfaces, for example.
iface <- initTcRnIf 's' hsc_env () () $
readBinIface IgnoreHiWay TraceBinIFaceReading filename
let dflags = hsc_dflags hsc_env
putLogMsg dflags NoReason SevDump noSrcSpan
(defaultDumpStyle dflags) (pprModIface iface)
-- Show a ModIface but don't display details; suitable for ModIfaces stored in
-- the EPT.
pprModIfaceSimple :: ModIface -> SDoc
pprModIfaceSimple iface = ppr (mi_module iface) $$ pprDeps (mi_deps iface) $$ nest 2 (vcat (map pprExport (mi_exports iface)))
pprModIface :: ModIface -> SDoc
-- Show a ModIface
pprModIface iface
= vcat [ text "interface"
<+> ppr (mi_module iface) <+> pp_hsc_src (mi_hsc_src iface)
<+> (if mi_orphan iface then text "[orphan module]" else Outputable.empty)
<+> (if mi_finsts iface then text "[family instance module]" else Outputable.empty)
<+> (if mi_hpc iface then text "[hpc]" else Outputable.empty)
<+> integer hiVersion
, nest 2 (text "interface hash:" <+> ppr (mi_iface_hash iface))
, nest 2 (text "ABI hash:" <+> ppr (mi_mod_hash iface))
, nest 2 (text "export-list hash:" <+> ppr (mi_exp_hash iface))
, nest 2 (text "orphan hash:" <+> ppr (mi_orphan_hash iface))
, nest 2 (text "flag hash:" <+> ppr (mi_flag_hash iface))
, nest 2 (text "opt_hash:" <+> ppr (mi_opt_hash iface))
, nest 2 (text "hpc_hash:" <+> ppr (mi_hpc_hash iface))
, nest 2 (text "sig of:" <+> ppr (mi_sig_of iface))
, nest 2 (text "used TH splices:" <+> ppr (mi_used_th iface))
, nest 2 (text "where")
, text "exports:"
, nest 2 (vcat (map pprExport (mi_exports iface)))
, pprDeps (mi_deps iface)
, vcat (map pprUsage (mi_usages iface))
, vcat (map pprIfaceAnnotation (mi_anns iface))
, pprFixities (mi_fixities iface)
, vcat [ppr ver $$ nest 2 (ppr decl) | (ver,decl) <- mi_decls iface]
, vcat (map ppr (mi_insts iface))
, vcat (map ppr (mi_fam_insts iface))
, vcat (map ppr (mi_rules iface))
, pprVectInfo (mi_vect_info iface)
, ppr (mi_warns iface)
, pprTrustInfo (mi_trust iface)
, pprTrustPkg (mi_trust_pkg iface)
, vcat (map ppr (mi_complete_sigs iface))
]
where
pp_hsc_src HsBootFile = text "[boot]"
pp_hsc_src HsigFile = text "[hsig]"
pp_hsc_src HsSrcFile = Outputable.empty
{-
When printing export lists, we print like this:
Avail f f
AvailTC C [C, x, y] C(x,y)
AvailTC C [x, y] C!(x,y) -- Exporting x, y but not C
-}
pprExport :: IfaceExport -> SDoc
pprExport (Avail n) = ppr n
pprExport (AvailTC _ [] []) = Outputable.empty
pprExport (AvailTC n ns0 fs)
= case ns0 of
(n':ns) | n==n' -> ppr n <> pp_export ns fs
_ -> ppr n <> vbar <> pp_export ns0 fs
where
pp_export [] [] = Outputable.empty
pp_export names fs = braces (hsep (map ppr names ++ map (ppr . flLabel) fs))
pprUsage :: Usage -> SDoc
pprUsage usage@UsagePackageModule{}
= pprUsageImport usage usg_mod
pprUsage usage@UsageHomeModule{}
= pprUsageImport usage usg_mod_name $$
nest 2 (
maybe Outputable.empty (\v -> text "exports: " <> ppr v) (usg_exports usage) $$
vcat [ ppr n <+> ppr v | (n,v) <- usg_entities usage ]
)
pprUsage usage@UsageFile{}
= hsep [text "addDependentFile",
doubleQuotes (text (usg_file_path usage)),
ppr (usg_file_hash usage)]
pprUsage usage@UsageMergedRequirement{}
= hsep [text "merged", ppr (usg_mod usage), ppr (usg_mod_hash usage)]
pprUsageImport :: Outputable a => Usage -> (Usage -> a) -> SDoc
pprUsageImport usage usg_mod'
= hsep [text "import", safe, ppr (usg_mod' usage),
ppr (usg_mod_hash usage)]
where
safe | usg_safe usage = text "safe"
| otherwise = text " -/ "
pprDeps :: Dependencies -> SDoc
pprDeps (Deps { dep_mods = mods, dep_pkgs = pkgs, dep_orphs = orphs,
dep_finsts = finsts })
= vcat [text "module dependencies:" <+> fsep (map ppr_mod mods),
text "package dependencies:" <+> fsep (map ppr_pkg pkgs),
text "orphans:" <+> fsep (map ppr orphs),
text "family instance modules:" <+> fsep (map ppr finsts)
]
where
ppr_mod (mod_name, boot) = ppr mod_name <+> ppr_boot boot
ppr_pkg (pkg,trust_req) = ppr pkg <>
(if trust_req then text "*" else Outputable.empty)
ppr_boot True = text "[boot]"
ppr_boot False = Outputable.empty
pprFixities :: [(OccName, Fixity)] -> SDoc
pprFixities [] = Outputable.empty
pprFixities fixes = text "fixities" <+> pprWithCommas pprFix fixes
where
pprFix (occ,fix) = ppr fix <+> ppr occ
pprVectInfo :: IfaceVectInfo -> SDoc
pprVectInfo (IfaceVectInfo { ifaceVectInfoVar = vars
, ifaceVectInfoTyCon = tycons
, ifaceVectInfoTyConReuse = tyconsReuse
, ifaceVectInfoParallelVars = parallelVars
, ifaceVectInfoParallelTyCons = parallelTyCons
}) =
vcat
[ text "vectorised variables:" <+> hsep (map ppr vars)
, text "vectorised tycons:" <+> hsep (map ppr tycons)
, text "vectorised reused tycons:" <+> hsep (map ppr tyconsReuse)
, text "parallel variables:" <+> hsep (map ppr parallelVars)
, text "parallel tycons:" <+> hsep (map ppr parallelTyCons)
]
pprTrustInfo :: IfaceTrustInfo -> SDoc
pprTrustInfo trust = text "trusted:" <+> ppr trust
pprTrustPkg :: Bool -> SDoc
pprTrustPkg tpkg = text "require own pkg trusted:" <+> ppr tpkg
instance Outputable Warnings where
ppr = pprWarns
pprWarns :: Warnings -> SDoc
pprWarns NoWarnings = Outputable.empty
pprWarns (WarnAll txt) = text "Warn all" <+> ppr txt
pprWarns (WarnSome prs) = text "Warnings"
<+> vcat (map pprWarning prs)
where pprWarning (name, txt) = ppr name <+> ppr txt
pprIfaceAnnotation :: IfaceAnnotation -> SDoc
pprIfaceAnnotation (IfaceAnnotation { ifAnnotatedTarget = target, ifAnnotatedValue = serialized })
= ppr target <+> text "annotated by" <+> ppr serialized
{-
*********************************************************
* *
\subsection{Errors}
* *
*********************************************************
-}
badIfaceFile :: String -> SDoc -> SDoc
badIfaceFile file err
= vcat [text "Bad interface file:" <+> text file,
nest 4 err]
hiModuleNameMismatchWarn :: DynFlags -> Module -> Module -> MsgDoc
hiModuleNameMismatchWarn dflags requested_mod read_mod
| moduleUnitId requested_mod == moduleUnitId read_mod =
sep [text "Interface file contains module" <+> quotes (ppr read_mod) <> comma,
text "but we were expecting module" <+> quotes (ppr requested_mod),
sep [text "Probable cause: the source code which generated interface file",
text "has an incompatible module name"
]
]
| otherwise =
-- ToDo: This will fail to have enough qualification when the package IDs
-- are the same
withPprStyle (mkUserStyle dflags alwaysQualify AllTheWay) $
-- we want the Modules below to be qualified with package names,
-- so reset the PrintUnqualified setting.
hsep [ text "Something is amiss; requested module "
, ppr requested_mod
, text "differs from name found in the interface file"
, ppr read_mod
, parens (text "if these names look the same, try again with -dppr-debug")
]
homeModError :: InstalledModule -> ModLocation -> SDoc
-- See Note [Home module load error]
homeModError mod location
= text "attempting to use module " <> quotes (ppr mod)
<> (case ml_hs_file location of
Just file -> space <> parens (text file)
Nothing -> Outputable.empty)
<+> text "which is not loaded"
| shlevy/ghc | compiler/iface/LoadIface.hs | bsd-3-clause | 52,642 | 479 | 24 | 17,086 | 8,598 | 4,602 | 3,996 | -1 | -1 |
-------------------------------------------------------------------------------
--
-- | Main API for compiling plain Haskell source code.
--
-- This module implements compilation of a Haskell source. It is
-- /not/ concerned with preprocessing of source files; this is handled
-- in "DriverPipeline".
--
-- There are various entry points depending on what mode we're in:
-- "batch" mode (@--make@), "one-shot" mode (@-c@, @-S@ etc.), and
-- "interactive" mode (GHCi). There are also entry points for
-- individual passes: parsing, typechecking/renaming, desugaring, and
-- simplification.
--
-- All the functions here take an 'HscEnv' as a parameter, but none of
-- them return a new one: 'HscEnv' is treated as an immutable value
-- from here on in (although it has mutable components, for the
-- caches).
--
-- Warning messages are dealt with consistently throughout this API:
-- during compilation warnings are collected, and before any function
-- in @HscMain@ returns, the warnings are either printed, or turned
-- into a real compialtion error if the @-Werror@ flag is enabled.
--
-- (c) The GRASP/AQUA Project, Glasgow University, 1993-2000
--
-------------------------------------------------------------------------------
module HscMain
(
-- * Making an HscEnv
newHscEnv
-- * Compiling complete source files
, Messager, batchMsg
, HscStatus (..)
, hscCompileOneShot
, hscCompileCmmFile
, hscCompileCore
, genericHscCompileGetFrontendResult
, genModDetails
, hscSimpleIface
, hscWriteIface
, hscNormalIface
, hscGenHardCode
, hscInteractive
-- * Running passes separately
, hscParse
, hscTypecheckRename
, hscDesugar
, makeSimpleIface
, makeSimpleDetails
, hscSimplify -- ToDo, shouldn't really export this
-- * Support for interactive evaluation
, hscParseIdentifier
, hscTcRcLookupName
, hscTcRnGetInfo
, hscCheckSafe
, hscGetSafe
#ifdef GHCI
, hscIsGHCiMonad
, hscGetModuleInterface
, hscRnImportDecls
, hscTcRnLookupRdrName
, hscStmt, hscStmtWithLocation
, hscDecls, hscDeclsWithLocation
, hscTcExpr, hscImport, hscKcType
, hscCompileCoreExpr
-- * Low-level exports for hooks
, hscCompileCoreExpr'
#endif
-- We want to make sure that we export enough to be able to redefine
-- hscFileFrontEnd in client code
, hscParse', hscSimplify', hscDesugar', tcRnModule'
, getHscEnv
, hscSimpleIface', hscNormalIface'
, oneShotMsg
, hscFileFrontEnd, genericHscFrontend, dumpIfaceStats
) where
#ifdef GHCI
import Id
import BasicTypes ( HValue )
import ByteCodeGen ( byteCodeGen, coreExprToBCOs )
import Linker
import CoreTidy ( tidyExpr )
import Type ( Type )
import PrelNames
import {- Kind parts of -} Type ( Kind )
import CoreMonad ( lintInteractiveExpr )
import DsMeta ( templateHaskellNames )
import VarEnv ( emptyTidyEnv )
import Panic
import GHC.Exts
#endif
import Module
import Packages
import RdrName
import HsSyn
import CoreSyn
import StringBuffer
import Parser
import Lexer
import SrcLoc
import TcRnDriver
import TcIface ( typecheckIface )
import TcRnMonad
import IfaceEnv ( initNameCache )
import LoadIface ( ifaceStats, initExternalPackageState )
import PrelInfo
import MkIface
import Desugar
import SimplCore
import TidyPgm
import CorePrep
import CoreToStg ( coreToStg )
import qualified StgCmm ( codeGen )
import StgSyn
import CostCentre
import ProfInit
import TyCon
import Name
import SimplStg ( stg2stg )
import Cmm
import CmmParse ( parseCmmFile )
import CmmBuildInfoTables
import CmmPipeline
import CmmInfo
import CodeOutput
import NameEnv ( emptyNameEnv )
import NameSet ( emptyNameSet )
import InstEnv
import FamInstEnv
import Fingerprint ( Fingerprint )
import Hooks
import DynFlags
import ErrUtils
import Outputable
import HscStats ( ppSourceStats )
import HscTypes
import MkExternalCore ( emitExternalCore )
import FastString
import UniqFM ( emptyUFM )
import UniqSupply
import Bag
import Exception
import qualified Stream
import Stream (Stream)
import Util
import Data.List
import Control.Monad
import Data.Maybe
import Data.IORef
import System.FilePath as FilePath
import System.Directory
#include "HsVersions.h"
{- **********************************************************************
%* *
Initialisation
%* *
%********************************************************************* -}
newHscEnv :: DynFlags -> IO HscEnv
newHscEnv dflags = do
eps_var <- newIORef initExternalPackageState
us <- mkSplitUniqSupply 'r'
nc_var <- newIORef (initNameCache us knownKeyNames)
fc_var <- newIORef emptyUFM
mlc_var <- newIORef emptyModuleEnv
return HscEnv { hsc_dflags = dflags,
hsc_targets = [],
hsc_mod_graph = [],
hsc_IC = emptyInteractiveContext dflags,
hsc_HPT = emptyHomePackageTable,
hsc_EPS = eps_var,
hsc_NC = nc_var,
hsc_FC = fc_var,
hsc_MLC = mlc_var,
hsc_type_env_var = Nothing }
knownKeyNames :: [Name] -- Put here to avoid loops involving DsMeta,
knownKeyNames = -- where templateHaskellNames are defined
map getName wiredInThings
++ basicKnownKeyNames
#ifdef GHCI
++ templateHaskellNames
#endif
-- -----------------------------------------------------------------------------
getWarnings :: Hsc WarningMessages
getWarnings = Hsc $ \_ w -> return (w, w)
clearWarnings :: Hsc ()
clearWarnings = Hsc $ \_ _ -> return ((), emptyBag)
logWarnings :: WarningMessages -> Hsc ()
logWarnings w = Hsc $ \_ w0 -> return ((), w0 `unionBags` w)
getHscEnv :: Hsc HscEnv
getHscEnv = Hsc $ \e w -> return (e, w)
handleWarnings :: Hsc ()
handleWarnings = do
dflags <- getDynFlags
w <- getWarnings
liftIO $ printOrThrowWarnings dflags w
clearWarnings
-- | log warning in the monad, and if there are errors then
-- throw a SourceError exception.
logWarningsReportErrors :: Messages -> Hsc ()
logWarningsReportErrors (warns,errs) = do
logWarnings warns
when (not $ isEmptyBag errs) $ throwErrors errs
-- | Throw some errors.
throwErrors :: ErrorMessages -> Hsc a
throwErrors = liftIO . throwIO . mkSrcErr
-- | Deal with errors and warnings returned by a compilation step
--
-- In order to reduce dependencies to other parts of the compiler, functions
-- outside the "main" parts of GHC return warnings and errors as a parameter
-- and signal success via by wrapping the result in a 'Maybe' type. This
-- function logs the returned warnings and propagates errors as exceptions
-- (of type 'SourceError').
--
-- This function assumes the following invariants:
--
-- 1. If the second result indicates success (is of the form 'Just x'),
-- there must be no error messages in the first result.
--
-- 2. If there are no error messages, but the second result indicates failure
-- there should be warnings in the first result. That is, if the action
-- failed, it must have been due to the warnings (i.e., @-Werror@).
ioMsgMaybe :: IO (Messages, Maybe a) -> Hsc a
ioMsgMaybe ioA = do
((warns,errs), mb_r) <- liftIO ioA
logWarnings warns
case mb_r of
Nothing -> throwErrors errs
Just r -> ASSERT( isEmptyBag errs ) return r
-- | like ioMsgMaybe, except that we ignore error messages and return
-- 'Nothing' instead.
ioMsgMaybe' :: IO (Messages, Maybe a) -> Hsc (Maybe a)
ioMsgMaybe' ioA = do
((warns,_errs), mb_r) <- liftIO $ ioA
logWarnings warns
return mb_r
-- -----------------------------------------------------------------------------
-- | Lookup things in the compiler's environment
#ifdef GHCI
hscTcRnLookupRdrName :: HscEnv -> RdrName -> IO [Name]
hscTcRnLookupRdrName hsc_env0 rdr_name = runInteractiveHsc hsc_env0 $ do
hsc_env <- getHscEnv
ioMsgMaybe $ tcRnLookupRdrName hsc_env rdr_name
#endif
hscTcRcLookupName :: HscEnv -> Name -> IO (Maybe TyThing)
hscTcRcLookupName hsc_env0 name = runInteractiveHsc hsc_env0 $ do
hsc_env <- getHscEnv
ioMsgMaybe' $ tcRnLookupName hsc_env name
-- ignore errors: the only error we're likely to get is
-- "name not found", and the Maybe in the return type
-- is used to indicate that.
hscTcRnGetInfo :: HscEnv -> Name -> IO (Maybe (TyThing, Fixity, [ClsInst], [FamInst]))
hscTcRnGetInfo hsc_env0 name
= runInteractiveHsc hsc_env0 $
do { hsc_env <- getHscEnv
; ioMsgMaybe' $ tcRnGetInfo hsc_env name }
#ifdef GHCI
hscIsGHCiMonad :: HscEnv -> String -> IO Name
hscIsGHCiMonad hsc_env name
= runHsc hsc_env $ ioMsgMaybe $ isGHCiMonad hsc_env name
hscGetModuleInterface :: HscEnv -> Module -> IO ModIface
hscGetModuleInterface hsc_env0 mod = runInteractiveHsc hsc_env0 $ do
hsc_env <- getHscEnv
ioMsgMaybe $ getModuleInterface hsc_env mod
-- -----------------------------------------------------------------------------
-- | Rename some import declarations
hscRnImportDecls :: HscEnv -> [LImportDecl RdrName] -> IO GlobalRdrEnv
hscRnImportDecls hsc_env0 import_decls = runInteractiveHsc hsc_env0 $ do
hsc_env <- getHscEnv
ioMsgMaybe $ tcRnImportDecls hsc_env import_decls
#endif
-- -----------------------------------------------------------------------------
-- | parse a file, returning the abstract syntax
hscParse :: HscEnv -> ModSummary -> IO HsParsedModule
hscParse hsc_env mod_summary = runHsc hsc_env $ hscParse' mod_summary
-- internal version, that doesn't fail due to -Werror
hscParse' :: ModSummary -> Hsc HsParsedModule
hscParse' mod_summary = do
dflags <- getDynFlags
let src_filename = ms_hspp_file mod_summary
maybe_src_buf = ms_hspp_buf mod_summary
-------------------------- Parser ----------------
liftIO $ showPass dflags "Parser"
{-# SCC "Parser" #-} do
-- sometimes we already have the buffer in memory, perhaps
-- because we needed to parse the imports out of it, or get the
-- module name.
buf <- case maybe_src_buf of
Just b -> return b
Nothing -> liftIO $ hGetStringBuffer src_filename
let loc = mkRealSrcLoc (mkFastString src_filename) 1 1
case unP parseModule (mkPState dflags buf loc) of
PFailed span err ->
liftIO $ throwOneError (mkPlainErrMsg dflags span err)
POk pst rdr_module -> do
logWarningsReportErrors (getMessages pst)
liftIO $ dumpIfSet_dyn dflags Opt_D_dump_parsed "Parser" $
ppr rdr_module
liftIO $ dumpIfSet_dyn dflags Opt_D_source_stats "Source Statistics" $
ppSourceStats False rdr_module
-- To get the list of extra source files, we take the list
-- that the parser gave us,
-- - eliminate files beginning with '<'. gcc likes to use
-- pseudo-filenames like "<built-in>" and "<command-line>"
-- - normalise them (elimiante differences between ./f and f)
-- - filter out the preprocessed source file
-- - filter out anything beginning with tmpdir
-- - remove duplicates
-- - filter out the .hs/.lhs source filename if we have one
--
let n_hspp = FilePath.normalise src_filename
srcs0 = nub $ filter (not . (tmpDir dflags `isPrefixOf`))
$ filter (not . (== n_hspp))
$ map FilePath.normalise
$ filter (not . (isPrefixOf "<"))
$ map unpackFS
$ srcfiles pst
srcs1 = case ml_hs_file (ms_location mod_summary) of
Just f -> filter (/= FilePath.normalise f) srcs0
Nothing -> srcs0
-- sometimes we see source files from earlier
-- preprocessing stages that cannot be found, so just
-- filter them out:
srcs2 <- liftIO $ filterM doesFileExist srcs1
return HsParsedModule {
hpm_module = rdr_module,
hpm_src_files = srcs2
}
-- XXX: should this really be a Maybe X? Check under which circumstances this
-- can become a Nothing and decide whether this should instead throw an
-- exception/signal an error.
type RenamedStuff =
(Maybe (HsGroup Name, [LImportDecl Name], Maybe [LIE Name],
Maybe LHsDocString))
-- | Rename and typecheck a module, additionally returning the renamed syntax
hscTypecheckRename :: HscEnv -> ModSummary -> HsParsedModule
-> IO (TcGblEnv, RenamedStuff)
hscTypecheckRename hsc_env mod_summary rdr_module = runHsc hsc_env $ do
tc_result <- tcRnModule' hsc_env mod_summary True rdr_module
-- This 'do' is in the Maybe monad!
let rn_info = do decl <- tcg_rn_decls tc_result
let imports = tcg_rn_imports tc_result
exports = tcg_rn_exports tc_result
doc_hdr = tcg_doc_hdr tc_result
return (decl,imports,exports,doc_hdr)
return (tc_result, rn_info)
-- wrapper around tcRnModule to handle safe haskell extras
tcRnModule' :: HscEnv -> ModSummary -> Bool -> HsParsedModule
-> Hsc TcGblEnv
tcRnModule' hsc_env sum save_rn_syntax mod = do
tcg_res <- {-# SCC "Typecheck-Rename" #-}
ioMsgMaybe $
tcRnModule hsc_env (ms_hsc_src sum) save_rn_syntax mod
tcSafeOK <- liftIO $ readIORef (tcg_safeInfer tcg_res)
dflags <- getDynFlags
-- end of the Safe Haskell line, how to respond to user?
if not (safeHaskellOn dflags) || (safeInferOn dflags && not tcSafeOK)
-- if safe haskell off or safe infer failed, wipe trust
then wipeTrust tcg_res emptyBag
-- module safe, throw warning if needed
else do
tcg_res' <- hscCheckSafeImports tcg_res
safe <- liftIO $ readIORef (tcg_safeInfer tcg_res')
when (safe && wopt Opt_WarnSafe dflags)
(logWarnings $ unitBag $
mkPlainWarnMsg dflags (warnSafeOnLoc dflags) $ errSafe tcg_res')
return tcg_res'
where
pprMod t = ppr $ moduleName $ tcg_mod t
errSafe t = quotes (pprMod t) <+> text "has been inferred as safe!"
-- | Convert a typechecked module to Core
hscDesugar :: HscEnv -> ModSummary -> TcGblEnv -> IO ModGuts
hscDesugar hsc_env mod_summary tc_result =
runHsc hsc_env $ hscDesugar' (ms_location mod_summary) tc_result
hscDesugar' :: ModLocation -> TcGblEnv -> Hsc ModGuts
hscDesugar' mod_location tc_result = do
hsc_env <- getHscEnv
r <- ioMsgMaybe $
{-# SCC "deSugar" #-}
deSugar hsc_env mod_location tc_result
-- always check -Werror after desugaring, this is the last opportunity for
-- warnings to arise before the backend.
handleWarnings
return r
-- | Make a 'ModIface' from the results of typechecking. Used when
-- not optimising, and the interface doesn't need to contain any
-- unfoldings or other cross-module optimisation info.
-- ToDo: the old interface is only needed to get the version numbers,
-- we should use fingerprint versions instead.
makeSimpleIface :: HscEnv -> Maybe ModIface -> TcGblEnv -> ModDetails
-> IO (ModIface,Bool)
makeSimpleIface hsc_env maybe_old_iface tc_result details = runHsc hsc_env $ do
safe_mode <- hscGetSafeMode tc_result
ioMsgMaybe $ do
mkIfaceTc hsc_env (fmap mi_iface_hash maybe_old_iface) safe_mode
details tc_result
-- | Make a 'ModDetails' from the results of typechecking. Used when
-- typechecking only, as opposed to full compilation.
makeSimpleDetails :: HscEnv -> TcGblEnv -> IO ModDetails
makeSimpleDetails hsc_env tc_result = mkBootModDetailsTc hsc_env tc_result
{- **********************************************************************
%* *
The main compiler pipeline
%* *
%********************************************************************* -}
{-
--------------------------------
The compilation proper
--------------------------------
It's the task of the compilation proper to compile Haskell, hs-boot and core
files to either byte-code, hard-code (C, asm, LLVM, ect) or to nothing at all
(the module is still parsed and type-checked. This feature is mostly used by
IDE's and the likes). Compilation can happen in either 'one-shot', 'batch',
'nothing', or 'interactive' mode. 'One-shot' mode targets hard-code, 'batch'
mode targets hard-code, 'nothing' mode targets nothing and 'interactive' mode
targets byte-code.
The modes are kept separate because of their different types and meanings:
* In 'one-shot' mode, we're only compiling a single file and can therefore
discard the new ModIface and ModDetails. This is also the reason it only
targets hard-code; compiling to byte-code or nothing doesn't make sense when
we discard the result.
* 'Batch' mode is like 'one-shot' except that we keep the resulting ModIface
and ModDetails. 'Batch' mode doesn't target byte-code since that require us to
return the newly compiled byte-code.
* 'Nothing' mode has exactly the same type as 'batch' mode but they're still
kept separate. This is because compiling to nothing is fairly special: We
don't output any interface files, we don't run the simplifier and we don't
generate any code.
* 'Interactive' mode is similar to 'batch' mode except that we return the
compiled byte-code together with the ModIface and ModDetails.
Trying to compile a hs-boot file to byte-code will result in a run-time error.
This is the only thing that isn't caught by the type-system.
-}
type Messager = HscEnv -> (Int,Int) -> RecompileRequired -> ModSummary -> IO ()
genericHscCompileGetFrontendResult ::
Bool -- always do basic recompilation check?
-> Maybe TcGblEnv
-> Maybe Messager
-> HscEnv
-> ModSummary
-> SourceModified
-> Maybe ModIface -- Old interface, if available
-> (Int,Int) -- (i,n) = module i of n (for msgs)
-> IO (Either ModIface (TcGblEnv, Maybe Fingerprint))
genericHscCompileGetFrontendResult always_do_basic_recompilation_check m_tc_result
mHscMessage hsc_env mod_summary source_modified mb_old_iface mod_index
= do
let msg what = case mHscMessage of
Just hscMessage -> hscMessage hsc_env mod_index what mod_summary
Nothing -> return ()
skip iface = do
msg UpToDate
return $ Left iface
compile mb_old_hash reason = do
msg reason
tc_result <- runHsc hsc_env $ genericHscFrontend mod_summary
return $ Right (tc_result, mb_old_hash)
stable = case source_modified of
SourceUnmodifiedAndStable -> True
_ -> False
case m_tc_result of
Just tc_result
| not always_do_basic_recompilation_check ->
return $ Right (tc_result, Nothing)
_ -> do
(recomp_reqd, mb_checked_iface)
<- {-# SCC "checkOldIface" #-}
checkOldIface hsc_env mod_summary
source_modified mb_old_iface
-- save the interface that comes back from checkOldIface.
-- In one-shot mode we don't have the old iface until this
-- point, when checkOldIface reads it from the disk.
let mb_old_hash = fmap mi_iface_hash mb_checked_iface
case mb_checked_iface of
Just iface | not (recompileRequired recomp_reqd) ->
-- If the module used TH splices when it was last compiled,
-- then the recompilation check is not accurate enough (#481)
-- and we must ignore it. However, if the module is stable
-- (none of the modules it depends on, directly or indirectly,
-- changed), then we *can* skip recompilation. This is why
-- the SourceModified type contains SourceUnmodifiedAndStable,
-- and it's pretty important: otherwise ghc --make would
-- always recompile TH modules, even if nothing at all has
-- changed. Stability is just the same check that make is
-- doing for us in one-shot mode.
case m_tc_result of
Nothing
| mi_used_th iface && not stable ->
compile mb_old_hash (RecompBecause "TH")
_ ->
skip iface
_ ->
case m_tc_result of
Nothing -> compile mb_old_hash recomp_reqd
Just tc_result ->
return $ Right (tc_result, mb_old_hash)
genericHscFrontend :: ModSummary -> Hsc TcGblEnv
genericHscFrontend mod_summary =
getHooked hscFrontendHook genericHscFrontend' >>= ($ mod_summary)
genericHscFrontend' :: ModSummary -> Hsc TcGblEnv
genericHscFrontend' mod_summary
| ExtCoreFile <- ms_hsc_src mod_summary =
panic "GHC does not currently support reading External Core files"
| otherwise =
hscFileFrontEnd mod_summary
--------------------------------------------------------------
-- Compilers
--------------------------------------------------------------
hscCompileOneShot :: HscEnv
-> FilePath
-> ModSummary
-> SourceModified
-> IO HscStatus
hscCompileOneShot env =
lookupHook hscCompileOneShotHook hscCompileOneShot' (hsc_dflags env) env
-- Compile Haskell, boot and extCore in OneShot mode.
hscCompileOneShot' :: HscEnv
-> FilePath
-> ModSummary
-> SourceModified
-> IO HscStatus
hscCompileOneShot' hsc_env extCore_filename mod_summary src_changed
= do
-- One-shot mode needs a knot-tying mutable variable for interface
-- files. See TcRnTypes.TcGblEnv.tcg_type_env_var.
type_env_var <- newIORef emptyNameEnv
let mod = ms_mod mod_summary
hsc_env' = hsc_env{ hsc_type_env_var = Just (mod, type_env_var) }
msg what = oneShotMsg hsc_env' what
skip = do msg UpToDate
dumpIfaceStats hsc_env'
return HscUpToDate
compile mb_old_hash reason = runHsc hsc_env' $ do
liftIO $ msg reason
tc_result <- genericHscFrontend mod_summary
guts0 <- hscDesugar' (ms_location mod_summary) tc_result
dflags <- getDynFlags
case hscTarget dflags of
HscNothing -> return HscNotGeneratingCode
_ ->
case ms_hsc_src mod_summary of
HsBootFile ->
do (iface, changed, _) <- hscSimpleIface' tc_result mb_old_hash
liftIO $ hscWriteIface dflags iface changed mod_summary
return HscUpdateBoot
_ ->
do guts <- hscSimplify' guts0
(iface, changed, _details, cgguts) <- hscNormalIface' extCore_filename guts mb_old_hash
liftIO $ hscWriteIface dflags iface changed mod_summary
return $ HscRecomp cgguts mod_summary
-- XXX This is always False, because in one-shot mode the
-- concept of stability does not exist. The driver never
-- passes SourceUnmodifiedAndStable in here.
stable = case src_changed of
SourceUnmodifiedAndStable -> True
_ -> False
(recomp_reqd, mb_checked_iface)
<- {-# SCC "checkOldIface" #-}
checkOldIface hsc_env' mod_summary src_changed Nothing
-- save the interface that comes back from checkOldIface.
-- In one-shot mode we don't have the old iface until this
-- point, when checkOldIface reads it from the disk.
let mb_old_hash = fmap mi_iface_hash mb_checked_iface
case mb_checked_iface of
Just iface | not (recompileRequired recomp_reqd) ->
-- If the module used TH splices when it was last compiled,
-- then the recompilation check is not accurate enough (#481)
-- and we must ignore it. However, if the module is stable
-- (none of the modules it depends on, directly or indirectly,
-- changed), then we *can* skip recompilation. This is why
-- the SourceModified type contains SourceUnmodifiedAndStable,
-- and it's pretty important: otherwise ghc --make would
-- always recompile TH modules, even if nothing at all has
-- changed. Stability is just the same check that make is
-- doing for us in one-shot mode.
if mi_used_th iface && not stable
then compile mb_old_hash (RecompBecause "TH")
else skip
_ ->
compile mb_old_hash recomp_reqd
--------------------------------------------------------------
-- NoRecomp handlers
--------------------------------------------------------------
genModDetails :: HscEnv -> ModIface -> IO ModDetails
genModDetails hsc_env old_iface
= do
new_details <- {-# SCC "tcRnIface" #-}
initIfaceCheck hsc_env (typecheckIface old_iface)
dumpIfaceStats hsc_env
return new_details
--------------------------------------------------------------
-- Progress displayers.
--------------------------------------------------------------
oneShotMsg :: HscEnv -> RecompileRequired -> IO ()
oneShotMsg hsc_env recomp =
case recomp of
UpToDate ->
compilationProgressMsg (hsc_dflags hsc_env) $
"compilation IS NOT required"
_ ->
return ()
batchMsg :: Messager
batchMsg hsc_env mod_index recomp mod_summary =
case recomp of
MustCompile -> showMsg "Compiling " ""
UpToDate
| verbosity (hsc_dflags hsc_env) >= 2 -> showMsg "Skipping " ""
| otherwise -> return ()
RecompBecause reason -> showMsg "Compiling " (" [" ++ reason ++ "]")
where
dflags = hsc_dflags hsc_env
showMsg msg reason =
compilationProgressMsg dflags $
(showModuleIndex mod_index ++
msg ++ showModMsg dflags (hscTarget dflags)
(recompileRequired recomp) mod_summary)
++ reason
--------------------------------------------------------------
-- FrontEnds
--------------------------------------------------------------
hscFileFrontEnd :: ModSummary -> Hsc TcGblEnv
hscFileFrontEnd mod_summary = do
hpm <- hscParse' mod_summary
hsc_env <- getHscEnv
tcg_env <- tcRnModule' hsc_env mod_summary False hpm
return tcg_env
--------------------------------------------------------------
-- Safe Haskell
--------------------------------------------------------------
-- Note [Safe Haskell Trust Check]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Safe Haskell checks that an import is trusted according to the following
-- rules for an import of module M that resides in Package P:
--
-- * If M is recorded as Safe and all its trust dependencies are OK
-- then M is considered safe.
-- * If M is recorded as Trustworthy and P is considered trusted and
-- all M's trust dependencies are OK then M is considered safe.
--
-- By trust dependencies we mean that the check is transitive. So if
-- a module M that is Safe relies on a module N that is trustworthy,
-- importing module M will first check (according to the second case)
-- that N is trusted before checking M is trusted.
--
-- This is a minimal description, so please refer to the user guide
-- for more details. The user guide is also considered the authoritative
-- source in this matter, not the comments or code.
-- Note [Safe Haskell Inference]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Safe Haskell does Safe inference on modules that don't have any specific
-- safe haskell mode flag. The basic aproach to this is:
-- * When deciding if we need to do a Safe language check, treat
-- an unmarked module as having -XSafe mode specified.
-- * For checks, don't throw errors but return them to the caller.
-- * Caller checks if there are errors:
-- * For modules explicitly marked -XSafe, we throw the errors.
-- * For unmarked modules (inference mode), we drop the errors
-- and mark the module as being Unsafe.
-- | Check that the safe imports of the module being compiled are valid.
-- If not we either issue a compilation error if the module is explicitly
-- using Safe Haskell, or mark the module as unsafe if we're in safe
-- inference mode.
hscCheckSafeImports :: TcGblEnv -> Hsc TcGblEnv
hscCheckSafeImports tcg_env = do
dflags <- getDynFlags
tcg_env' <- checkSafeImports dflags tcg_env
case safeLanguageOn dflags of
True -> do
-- we nuke user written RULES in -XSafe
logWarnings $ warns dflags (tcg_rules tcg_env')
return tcg_env' { tcg_rules = [] }
False
-- user defined RULES, so not safe or already unsafe
| safeInferOn dflags && not (null $ tcg_rules tcg_env') ||
safeHaskell dflags == Sf_None
-> wipeTrust tcg_env' $ warns dflags (tcg_rules tcg_env')
-- trustworthy OR safe inferred with no RULES
| otherwise
-> return tcg_env'
where
warns dflags rules = listToBag $ map (warnRules dflags) rules
warnRules dflags (L loc (HsRule n _ _ _ _ _ _)) =
mkPlainWarnMsg dflags loc $
text "Rule \"" <> ftext n <> text "\" ignored" $+$
text "User defined rules are disabled under Safe Haskell"
-- | Validate that safe imported modules are actually safe. For modules in the
-- HomePackage (the package the module we are compiling in resides) this just
-- involves checking its trust type is 'Safe' or 'Trustworthy'. For modules
-- that reside in another package we also must check that the external pacakge
-- is trusted. See the Note [Safe Haskell Trust Check] above for more
-- information.
--
-- The code for this is quite tricky as the whole algorithm is done in a few
-- distinct phases in different parts of the code base. See
-- RnNames.rnImportDecl for where package trust dependencies for a module are
-- collected and unioned. Specifically see the Note [RnNames . Tracking Trust
-- Transitively] and the Note [RnNames . Trust Own Package].
checkSafeImports :: DynFlags -> TcGblEnv -> Hsc TcGblEnv
checkSafeImports dflags tcg_env
= do
-- We want to use the warning state specifically for detecting if safe
-- inference has failed, so store and clear any existing warnings.
oldErrs <- getWarnings
clearWarnings
imps <- mapM condense imports'
pkgs <- mapM checkSafe imps
-- grab any safe haskell specific errors and restore old warnings
errs <- getWarnings
clearWarnings
logWarnings oldErrs
-- See the Note [Safe Haskell Inference]
case (not $ isEmptyBag errs) of
-- We have errors!
True ->
-- did we fail safe inference or fail -XSafe?
case safeInferOn dflags of
True -> wipeTrust tcg_env errs
False -> liftIO . throwIO . mkSrcErr $ errs
-- All good matey!
False -> do
when (packageTrustOn dflags) $ checkPkgTrust dflags pkg_reqs
-- add in trusted package requirements for this module
let new_trust = emptyImportAvails { imp_trust_pkgs = catMaybes pkgs }
return tcg_env { tcg_imports = imp_info `plusImportAvails` new_trust }
where
imp_info = tcg_imports tcg_env -- ImportAvails
imports = imp_mods imp_info -- ImportedMods
imports' = moduleEnvToList imports -- (Module, [ImportedModsVal])
pkg_reqs = imp_trust_pkgs imp_info -- [PackageId]
condense :: (Module, [ImportedModsVal]) -> Hsc (Module, SrcSpan, IsSafeImport)
condense (_, []) = panic "HscMain.condense: Pattern match failure!"
condense (m, x:xs) = do (_,_,l,s) <- foldlM cond' x xs
-- we turn all imports into safe ones when
-- inference mode is on.
let s' = if safeInferOn dflags then True else s
return (m, l, s')
-- ImportedModsVal = (ModuleName, Bool, SrcSpan, IsSafeImport)
cond' :: ImportedModsVal -> ImportedModsVal -> Hsc ImportedModsVal
cond' v1@(m1,_,l1,s1) (_,_,_,s2)
| s1 /= s2
= throwErrors $ unitBag $ mkPlainErrMsg dflags l1
(text "Module" <+> ppr m1 <+>
(text $ "is imported both as a safe and unsafe import!"))
| otherwise
= return v1
-- easier interface to work with
checkSafe (_, _, False) = return Nothing
checkSafe (m, l, True ) = fst `fmap` hscCheckSafe' dflags m l
-- | Check that a module is safe to import.
--
-- We return True to indicate the import is safe and False otherwise
-- although in the False case an exception may be thrown first.
hscCheckSafe :: HscEnv -> Module -> SrcSpan -> IO Bool
hscCheckSafe hsc_env m l = runHsc hsc_env $ do
dflags <- getDynFlags
pkgs <- snd `fmap` hscCheckSafe' dflags m l
when (packageTrustOn dflags) $ checkPkgTrust dflags pkgs
errs <- getWarnings
return $ isEmptyBag errs
-- | Return if a module is trusted and the pkgs it depends on to be trusted.
hscGetSafe :: HscEnv -> Module -> SrcSpan -> IO (Bool, [PackageId])
hscGetSafe hsc_env m l = runHsc hsc_env $ do
dflags <- getDynFlags
(self, pkgs) <- hscCheckSafe' dflags m l
good <- isEmptyBag `fmap` getWarnings
clearWarnings -- don't want them printed...
let pkgs' | Just p <- self = p:pkgs
| otherwise = pkgs
return (good, pkgs')
-- | Is a module trusted? If not, throw or log errors depending on the type.
-- Return (regardless of trusted or not) if the trust type requires the modules
-- own package be trusted and a list of other packages required to be trusted
-- (these later ones haven't been checked) but the own package trust has been.
hscCheckSafe' :: DynFlags -> Module -> SrcSpan -> Hsc (Maybe PackageId, [PackageId])
hscCheckSafe' dflags m l = do
(tw, pkgs) <- isModSafe m l
case tw of
False -> return (Nothing, pkgs)
True | isHomePkg m -> return (Nothing, pkgs)
| otherwise -> return (Just $ modulePackageId m, pkgs)
where
isModSafe :: Module -> SrcSpan -> Hsc (Bool, [PackageId])
isModSafe m l = do
iface <- lookup' m
case iface of
-- can't load iface to check trust!
Nothing -> throwErrors $ unitBag $ mkPlainErrMsg dflags l
$ text "Can't load the interface file for" <+> ppr m
<> text ", to check that it can be safely imported"
-- got iface, check trust
Just iface' ->
let trust = getSafeMode $ mi_trust iface'
trust_own_pkg = mi_trust_pkg iface'
-- check module is trusted
safeM = trust `elem` [Sf_SafeInferred, Sf_Safe, Sf_Trustworthy]
-- check package is trusted
safeP = packageTrusted trust trust_own_pkg m
-- pkg trust reqs
pkgRs = map fst $ filter snd $ dep_pkgs $ mi_deps iface'
-- General errors we throw but Safe errors we log
errs = case (safeM, safeP) of
(True, True ) -> emptyBag
(True, False) -> pkgTrustErr
(False, _ ) -> modTrustErr
in do
logWarnings errs
return (trust == Sf_Trustworthy, pkgRs)
where
pkgTrustErr = unitBag $ mkPlainErrMsg dflags l $
sep [ ppr (moduleName m)
<> text ": Can't be safely imported!"
, text "The package (" <> ppr (modulePackageId m)
<> text ") the module resides in isn't trusted."
]
modTrustErr = unitBag $ mkPlainErrMsg dflags l $
sep [ ppr (moduleName m)
<> text ": Can't be safely imported!"
, text "The module itself isn't safe." ]
-- | Check the package a module resides in is trusted. Safe compiled
-- modules are trusted without requiring that their package is trusted. For
-- trustworthy modules, modules in the home package are trusted but
-- otherwise we check the package trust flag.
packageTrusted :: SafeHaskellMode -> Bool -> Module -> Bool
packageTrusted Sf_None _ _ = False -- shouldn't hit these cases
packageTrusted Sf_Unsafe _ _ = False -- prefer for completeness.
packageTrusted _ _ _
| not (packageTrustOn dflags) = True
packageTrusted Sf_Safe False _ = True
packageTrusted Sf_SafeInferred False _ = True
packageTrusted _ _ m
| isHomePkg m = True
| otherwise = trusted $ getPackageDetails (pkgState dflags)
(modulePackageId m)
lookup' :: Module -> Hsc (Maybe ModIface)
lookup' m = do
hsc_env <- getHscEnv
hsc_eps <- liftIO $ hscEPS hsc_env
let pkgIfaceT = eps_PIT hsc_eps
homePkgT = hsc_HPT hsc_env
iface = lookupIfaceByModule dflags homePkgT pkgIfaceT m
#ifdef GHCI
-- the 'lookupIfaceByModule' method will always fail when calling from GHCi
-- as the compiler hasn't filled in the various module tables
-- so we need to call 'getModuleInterface' to load from disk
iface' <- case iface of
Just _ -> return iface
Nothing -> snd `fmap` (liftIO $ getModuleInterface hsc_env m)
return iface'
#else
return iface
#endif
isHomePkg :: Module -> Bool
isHomePkg m
| thisPackage dflags == modulePackageId m = True
| otherwise = False
-- | Check the list of packages are trusted.
checkPkgTrust :: DynFlags -> [PackageId] -> Hsc ()
checkPkgTrust dflags pkgs =
case errors of
[] -> return ()
_ -> (liftIO . throwIO . mkSrcErr . listToBag) errors
where
errors = catMaybes $ map go pkgs
go pkg
| trusted $ getPackageDetails (pkgState dflags) pkg
= Nothing
| otherwise
= Just $ mkPlainErrMsg dflags noSrcSpan
$ text "The package (" <> ppr pkg <> text ") is required" <>
text " to be trusted but it isn't!"
-- | Set module to unsafe and wipe trust information.
--
-- Make sure to call this method to set a module to inferred unsafe,
-- it should be a central and single failure method.
wipeTrust :: TcGblEnv -> WarningMessages -> Hsc TcGblEnv
wipeTrust tcg_env whyUnsafe = do
dflags <- getDynFlags
when (wopt Opt_WarnUnsafe dflags)
(logWarnings $ unitBag $
mkPlainWarnMsg dflags (warnUnsafeOnLoc dflags) (whyUnsafe' dflags))
liftIO $ writeIORef (tcg_safeInfer tcg_env) False
return $ tcg_env { tcg_imports = wiped_trust }
where
wiped_trust = (tcg_imports tcg_env) { imp_trust_pkgs = [] }
pprMod = ppr $ moduleName $ tcg_mod tcg_env
whyUnsafe' df = vcat [ quotes pprMod <+> text "has been inferred as unsafe!"
, text "Reason:"
, nest 4 $ (vcat $ badFlags df) $+$
(vcat $ pprErrMsgBagWithLoc whyUnsafe)
]
badFlags df = concat $ map (badFlag df) unsafeFlags
badFlag df (str,loc,on,_)
| on df = [mkLocMessage SevOutput (loc df) $
text str <+> text "is not allowed in Safe Haskell"]
| otherwise = []
-- | Figure out the final correct safe haskell mode
hscGetSafeMode :: TcGblEnv -> Hsc SafeHaskellMode
hscGetSafeMode tcg_env = do
dflags <- getDynFlags
liftIO $ finalSafeMode dflags tcg_env
--------------------------------------------------------------
-- Simplifiers
--------------------------------------------------------------
hscSimplify :: HscEnv -> ModGuts -> IO ModGuts
hscSimplify hsc_env modguts = runHsc hsc_env $ hscSimplify' modguts
hscSimplify' :: ModGuts -> Hsc ModGuts
hscSimplify' ds_result = do
hsc_env <- getHscEnv
{-# SCC "Core2Core" #-}
liftIO $ core2core hsc_env ds_result
--------------------------------------------------------------
-- Interface generators
--------------------------------------------------------------
hscSimpleIface :: HscEnv
-> TcGblEnv
-> Maybe Fingerprint
-> IO (ModIface, Bool, ModDetails)
hscSimpleIface hsc_env tc_result mb_old_iface
= runHsc hsc_env $ hscSimpleIface' tc_result mb_old_iface
hscSimpleIface' :: TcGblEnv
-> Maybe Fingerprint
-> Hsc (ModIface, Bool, ModDetails)
hscSimpleIface' tc_result mb_old_iface = do
hsc_env <- getHscEnv
details <- liftIO $ mkBootModDetailsTc hsc_env tc_result
safe_mode <- hscGetSafeMode tc_result
(new_iface, no_change)
<- {-# SCC "MkFinalIface" #-}
ioMsgMaybe $
mkIfaceTc hsc_env mb_old_iface safe_mode details tc_result
-- And the answer is ...
liftIO $ dumpIfaceStats hsc_env
return (new_iface, no_change, details)
hscNormalIface :: HscEnv
-> FilePath
-> ModGuts
-> Maybe Fingerprint
-> IO (ModIface, Bool, ModDetails, CgGuts)
hscNormalIface hsc_env extCore_filename simpl_result mb_old_iface =
runHsc hsc_env $ hscNormalIface' extCore_filename simpl_result mb_old_iface
hscNormalIface' :: FilePath
-> ModGuts
-> Maybe Fingerprint
-> Hsc (ModIface, Bool, ModDetails, CgGuts)
hscNormalIface' extCore_filename simpl_result mb_old_iface = do
hsc_env <- getHscEnv
(cg_guts, details) <- {-# SCC "CoreTidy" #-}
liftIO $ tidyProgram hsc_env simpl_result
-- BUILD THE NEW ModIface and ModDetails
-- and emit external core if necessary
-- This has to happen *after* code gen so that the back-end
-- info has been set. Not yet clear if it matters waiting
-- until after code output
(new_iface, no_change)
<- {-# SCC "MkFinalIface" #-}
ioMsgMaybe $
mkIface hsc_env mb_old_iface details simpl_result
-- Emit external core
-- This should definitely be here and not after CorePrep,
-- because CorePrep produces unqualified constructor wrapper declarations,
-- so its output isn't valid External Core (without some preprocessing).
liftIO $ emitExternalCore (hsc_dflags hsc_env) extCore_filename cg_guts
liftIO $ dumpIfaceStats hsc_env
-- Return the prepared code.
return (new_iface, no_change, details, cg_guts)
--------------------------------------------------------------
-- BackEnd combinators
--------------------------------------------------------------
hscWriteIface :: DynFlags -> ModIface -> Bool -> ModSummary -> IO ()
hscWriteIface dflags iface no_change mod_summary = do
let ifaceFile = ml_hi_file (ms_location mod_summary)
unless no_change $
{-# SCC "writeIface" #-}
writeIfaceFile dflags ifaceFile iface
whenGeneratingDynamicToo dflags $ do
-- TODO: We should do a no_change check for the dynamic
-- interface file too
-- TODO: Should handle the dynamic hi filename properly
let dynIfaceFile = replaceExtension ifaceFile (dynHiSuf dflags)
dynIfaceFile' = addBootSuffix_maybe (mi_boot iface) dynIfaceFile
dynDflags = dynamicTooMkDynamicDynFlags dflags
writeIfaceFile dynDflags dynIfaceFile' iface
-- | Compile to hard-code.
hscGenHardCode :: HscEnv -> CgGuts -> ModSummary -> FilePath
-> IO (FilePath, Maybe FilePath) -- ^ @Just f@ <=> _stub.c is f
hscGenHardCode hsc_env cgguts mod_summary output_filename = do
let CgGuts{ -- This is the last use of the ModGuts in a compilation.
-- From now on, we just use the bits we need.
cg_module = this_mod,
cg_binds = core_binds,
cg_tycons = tycons,
cg_foreign = foreign_stubs0,
cg_dep_pkgs = dependencies,
cg_hpc_info = hpc_info } = cgguts
dflags = hsc_dflags hsc_env
location = ms_location mod_summary
data_tycons = filter isDataTyCon tycons
-- cg_tycons includes newtypes, for the benefit of External Core,
-- but we don't generate any code for newtypes
-------------------
-- PREPARE FOR CODE GENERATION
-- Do saturation and convert to A-normal form
prepd_binds <- {-# SCC "CorePrep" #-}
corePrepPgm dflags hsc_env core_binds data_tycons ;
----------------- Convert to STG ------------------
(stg_binds, cost_centre_info)
<- {-# SCC "CoreToStg" #-}
myCoreToStg dflags this_mod prepd_binds
let prof_init = profilingInitCode this_mod cost_centre_info
foreign_stubs = foreign_stubs0 `appendStubC` prof_init
------------------ Code generation ------------------
cmms <- {-# SCC "NewCodeGen" #-}
tryNewCodeGen hsc_env this_mod data_tycons
cost_centre_info
stg_binds hpc_info
------------------ Code output -----------------------
rawcmms0 <- {-# SCC "cmmToRawCmm" #-}
cmmToRawCmm dflags cmms
let dump a = do dumpIfSet_dyn dflags Opt_D_dump_cmm_raw "Raw Cmm"
(ppr a)
return a
rawcmms1 = Stream.mapM dump rawcmms0
(output_filename, (_stub_h_exists, stub_c_exists))
<- {-# SCC "codeOutput" #-}
codeOutput dflags this_mod output_filename location
foreign_stubs dependencies rawcmms1
return (output_filename, stub_c_exists)
hscInteractive :: HscEnv
-> CgGuts
-> ModSummary
-> IO (Maybe FilePath, CompiledByteCode, ModBreaks)
#ifdef GHCI
hscInteractive hsc_env cgguts mod_summary = do
let dflags = hsc_dflags hsc_env
let CgGuts{ -- This is the last use of the ModGuts in a compilation.
-- From now on, we just use the bits we need.
cg_module = this_mod,
cg_binds = core_binds,
cg_tycons = tycons,
cg_foreign = foreign_stubs,
cg_modBreaks = mod_breaks } = cgguts
location = ms_location mod_summary
data_tycons = filter isDataTyCon tycons
-- cg_tycons includes newtypes, for the benefit of External Core,
-- but we don't generate any code for newtypes
-------------------
-- PREPARE FOR CODE GENERATION
-- Do saturation and convert to A-normal form
prepd_binds <- {-# SCC "CorePrep" #-}
corePrepPgm dflags hsc_env core_binds data_tycons
----------------- Generate byte code ------------------
comp_bc <- byteCodeGen dflags this_mod prepd_binds data_tycons mod_breaks
------------------ Create f-x-dynamic C-side stuff ---
(_istub_h_exists, istub_c_exists)
<- outputForeignStubs dflags this_mod location foreign_stubs
return (istub_c_exists, comp_bc, mod_breaks)
#else
hscInteractive _ _ = panic "GHC not compiled with interpreter"
#endif
------------------------------
hscCompileCmmFile :: HscEnv -> FilePath -> FilePath -> IO ()
hscCompileCmmFile hsc_env filename output_filename = runHsc hsc_env $ do
let dflags = hsc_dflags hsc_env
cmm <- ioMsgMaybe $ parseCmmFile dflags filename
liftIO $ do
us <- mkSplitUniqSupply 'S'
let initTopSRT = initUs_ us emptySRT
dumpIfSet_dyn dflags Opt_D_dump_cmm "Parsed Cmm" (ppr cmm)
(_, cmmgroup) <- cmmPipeline hsc_env initTopSRT cmm
rawCmms <- cmmToRawCmm dflags (Stream.yield cmmgroup)
_ <- codeOutput dflags no_mod output_filename no_loc NoStubs [] rawCmms
return ()
where
no_mod = panic "hscCmmFile: no_mod"
no_loc = ModLocation{ ml_hs_file = Just filename,
ml_hi_file = panic "hscCmmFile: no hi file",
ml_obj_file = panic "hscCmmFile: no obj file" }
-------------------- Stuff for new code gen ---------------------
tryNewCodeGen :: HscEnv -> Module -> [TyCon]
-> CollectedCCs
-> [StgBinding]
-> HpcInfo
-> IO (Stream IO CmmGroup ())
-- Note we produce a 'Stream' of CmmGroups, so that the
-- backend can be run incrementally. Otherwise it generates all
-- the C-- up front, which has a significant space cost.
tryNewCodeGen hsc_env this_mod data_tycons
cost_centre_info stg_binds hpc_info = do
let dflags = hsc_dflags hsc_env
let cmm_stream :: Stream IO CmmGroup ()
cmm_stream = {-# SCC "StgCmm" #-}
StgCmm.codeGen dflags this_mod data_tycons
cost_centre_info stg_binds hpc_info
-- codegen consumes a stream of CmmGroup, and produces a new
-- stream of CmmGroup (not necessarily synchronised: one
-- CmmGroup on input may produce many CmmGroups on output due
-- to proc-point splitting).
let dump1 a = do dumpIfSet_dyn dflags Opt_D_dump_cmm
"Cmm produced by new codegen" (ppr a)
return a
ppr_stream1 = Stream.mapM dump1 cmm_stream
-- We are building a single SRT for the entire module, so
-- we must thread it through all the procedures as we cps-convert them.
us <- mkSplitUniqSupply 'S'
-- When splitting, we generate one SRT per split chunk, otherwise
-- we generate one SRT for the whole module.
let
pipeline_stream
| gopt Opt_SplitObjs dflags
= {-# SCC "cmmPipeline" #-}
let run_pipeline us cmmgroup = do
let (topSRT', us') = initUs us emptySRT
(topSRT, cmmgroup) <- cmmPipeline hsc_env topSRT' cmmgroup
let srt | isEmptySRT topSRT = []
| otherwise = srtToData topSRT
return (us', srt ++ cmmgroup)
in do _ <- Stream.mapAccumL run_pipeline us ppr_stream1
return ()
| otherwise
= {-# SCC "cmmPipeline" #-}
let initTopSRT = initUs_ us emptySRT
run_pipeline = cmmPipeline hsc_env
in do topSRT <- Stream.mapAccumL run_pipeline initTopSRT ppr_stream1
Stream.yield (srtToData topSRT)
let
dump2 a = do dumpIfSet_dyn dflags Opt_D_dump_cmm "Output Cmm" $ ppr a
return a
ppr_stream2 = Stream.mapM dump2 pipeline_stream
return ppr_stream2
myCoreToStg :: DynFlags -> Module -> CoreProgram
-> IO ( [StgBinding] -- output program
, CollectedCCs) -- cost centre info (declared and used)
myCoreToStg dflags this_mod prepd_binds = do
stg_binds
<- {-# SCC "Core2Stg" #-}
coreToStg dflags this_mod prepd_binds
(stg_binds2, cost_centre_info)
<- {-# SCC "Stg2Stg" #-}
stg2stg dflags this_mod stg_binds
return (stg_binds2, cost_centre_info)
{- **********************************************************************
%* *
\subsection{Compiling a do-statement}
%* *
%********************************************************************* -}
{-
When the UnlinkedBCOExpr is linked you get an HValue of type *IO [HValue]* When
you run it you get a list of HValues that should be the same length as the list
of names; add them to the ClosureEnv.
A naked expression returns a singleton Name [it]. The stmt is lifted into the
IO monad as explained in Note [Interactively-bound Ids in GHCi] in HscTypes
-}
#ifdef GHCI
-- | Compile a stmt all the way to an HValue, but don't run it
--
-- We return Nothing to indicate an empty statement (or comment only), not a
-- parse error.
hscStmt :: HscEnv -> String -> IO (Maybe ([Id], IO [HValue], FixityEnv))
hscStmt hsc_env stmt = hscStmtWithLocation hsc_env stmt "<interactive>" 1
-- | Compile a stmt all the way to an HValue, but don't run it
--
-- We return Nothing to indicate an empty statement (or comment only), not a
-- parse error.
hscStmtWithLocation :: HscEnv
-> String -- ^ The statement
-> String -- ^ The source
-> Int -- ^ Starting line
-> IO (Maybe ([Id], IO [HValue], FixityEnv))
hscStmtWithLocation hsc_env0 stmt source linenumber =
runInteractiveHsc hsc_env0 $ do
maybe_stmt <- hscParseStmtWithLocation source linenumber stmt
case maybe_stmt of
Nothing -> return Nothing
Just parsed_stmt -> do
-- Rename and typecheck it
hsc_env <- getHscEnv
(ids, tc_expr, fix_env) <- ioMsgMaybe $ tcRnStmt hsc_env parsed_stmt
-- Desugar it
ds_expr <- ioMsgMaybe $ deSugarExpr hsc_env tc_expr
liftIO (lintInteractiveExpr "desugar expression" hsc_env ds_expr)
handleWarnings
-- Then code-gen, and link it
-- It's important NOT to have package 'interactive' as thisPackageId
-- for linking, else we try to link 'main' and can't find it.
-- Whereas the linker already knows to ignore 'interactive'
let src_span = srcLocSpan interactiveSrcLoc
hval <- liftIO $ hscCompileCoreExpr hsc_env src_span ds_expr
let hval_io = unsafeCoerce# hval :: IO [HValue]
return $ Just (ids, hval_io, fix_env)
-- | Compile a decls
hscDecls :: HscEnv
-> String -- ^ The statement
-> IO ([TyThing], InteractiveContext)
hscDecls hsc_env str = hscDeclsWithLocation hsc_env str "<interactive>" 1
-- | Compile a decls
hscDeclsWithLocation :: HscEnv
-> String -- ^ The statement
-> String -- ^ The source
-> Int -- ^ Starting line
-> IO ([TyThing], InteractiveContext)
hscDeclsWithLocation hsc_env0 str source linenumber =
runInteractiveHsc hsc_env0 $ do
L _ (HsModule{ hsmodDecls = decls }) <-
hscParseThingWithLocation source linenumber parseModule str
{- Rename and typecheck it -}
hsc_env <- getHscEnv
tc_gblenv <- ioMsgMaybe $ tcRnDeclsi hsc_env decls
{- Grab the new instances -}
-- We grab the whole environment because of the overlapping that may have
-- been done. See the notes at the definition of InteractiveContext
-- (ic_instances) for more details.
let finsts = tcg_fam_insts tc_gblenv
insts = tcg_insts tc_gblenv
let defaults = tcg_default tc_gblenv
{- Desugar it -}
-- We use a basically null location for iNTERACTIVE
let iNTERACTIVELoc = ModLocation{ ml_hs_file = Nothing,
ml_hi_file = panic "hsDeclsWithLocation:ml_hi_file",
ml_obj_file = panic "hsDeclsWithLocation:ml_hi_file"}
ds_result <- hscDesugar' iNTERACTIVELoc tc_gblenv
{- Simplify -}
simpl_mg <- liftIO $ hscSimplify hsc_env ds_result
{- Tidy -}
(tidy_cg, _mod_details) <- liftIO $ tidyProgram hsc_env simpl_mg
let dflags = hsc_dflags hsc_env
!CgGuts{ cg_module = this_mod,
cg_binds = core_binds,
cg_tycons = tycons,
cg_modBreaks = mod_breaks } = tidy_cg
data_tycons = filter isDataTyCon tycons
{- Prepare For Code Generation -}
-- Do saturation and convert to A-normal form
prepd_binds <- {-# SCC "CorePrep" #-}
liftIO $ corePrepPgm dflags hsc_env core_binds data_tycons
{- Generate byte code -}
cbc <- liftIO $ byteCodeGen dflags this_mod
prepd_binds data_tycons mod_breaks
let src_span = srcLocSpan interactiveSrcLoc
liftIO $ linkDecls hsc_env src_span cbc
let tcs = filterOut isImplicitTyCon (mg_tcs simpl_mg)
ext_ids = [ id | id <- bindersOfBinds core_binds
, isExternalName (idName id)
, not (isDFunId id || isImplicitId id) ]
-- We only need to keep around the external bindings
-- (as decided by TidyPgm), since those are the only ones
-- that might be referenced elsewhere.
-- The DFunIds are in 'insts' (see Note [ic_tythings] in HscTypes
-- Implicit Ids are implicit in tcs
tythings = map AnId ext_ids ++ map ATyCon tcs
let icontext = hsc_IC hsc_env
ictxt1 = extendInteractiveContext icontext tythings
ictxt = ictxt1 { ic_instances = (insts, finsts)
, ic_default = defaults }
return (tythings, ictxt)
hscImport :: HscEnv -> String -> IO (ImportDecl RdrName)
hscImport hsc_env str = runInteractiveHsc hsc_env $ do
(L _ (HsModule{hsmodImports=is})) <-
hscParseThing parseModule str
case is of
[i] -> return (unLoc i)
_ -> liftIO $ throwOneError $
mkPlainErrMsg (hsc_dflags hsc_env) noSrcSpan $
ptext (sLit "parse error in import declaration")
-- | Typecheck an expression (but don't run it)
-- Returns its most general type
hscTcExpr :: HscEnv
-> String -- ^ The expression
-> IO Type
hscTcExpr hsc_env0 expr = runInteractiveHsc hsc_env0 $ do
hsc_env <- getHscEnv
maybe_stmt <- hscParseStmt expr
case maybe_stmt of
Just (L _ (BodyStmt expr _ _ _)) ->
ioMsgMaybe $ tcRnExpr hsc_env expr
_ ->
throwErrors $ unitBag $ mkPlainErrMsg (hsc_dflags hsc_env) noSrcSpan
(text "not an expression:" <+> quotes (text expr))
-- | Find the kind of a type
-- Currently this does *not* generalise the kinds of the type
hscKcType
:: HscEnv
-> Bool -- ^ Normalise the type
-> String -- ^ The type as a string
-> IO (Type, Kind) -- ^ Resulting type (possibly normalised) and kind
hscKcType hsc_env0 normalise str = runInteractiveHsc hsc_env0 $ do
hsc_env <- getHscEnv
ty <- hscParseType str
ioMsgMaybe $ tcRnType hsc_env normalise ty
hscParseStmt :: String -> Hsc (Maybe (GhciLStmt RdrName))
hscParseStmt = hscParseThing parseStmt
hscParseStmtWithLocation :: String -> Int -> String
-> Hsc (Maybe (GhciLStmt RdrName))
hscParseStmtWithLocation source linenumber stmt =
hscParseThingWithLocation source linenumber parseStmt stmt
hscParseType :: String -> Hsc (LHsType RdrName)
hscParseType = hscParseThing parseType
#endif
hscParseIdentifier :: HscEnv -> String -> IO (Located RdrName)
hscParseIdentifier hsc_env str =
runInteractiveHsc hsc_env $ hscParseThing parseIdentifier str
hscParseThing :: (Outputable thing) => Lexer.P thing -> String -> Hsc thing
hscParseThing = hscParseThingWithLocation "<interactive>" 1
hscParseThingWithLocation :: (Outputable thing) => String -> Int
-> Lexer.P thing -> String -> Hsc thing
hscParseThingWithLocation source linenumber parser str
= {-# SCC "Parser" #-} do
dflags <- getDynFlags
liftIO $ showPass dflags "Parser"
let buf = stringToStringBuffer str
loc = mkRealSrcLoc (fsLit source) linenumber 1
case unP parser (mkPState dflags buf loc) of
PFailed span err -> do
let msg = mkPlainErrMsg dflags span err
throwErrors $ unitBag msg
POk pst thing -> do
logWarningsReportErrors (getMessages pst)
liftIO $ dumpIfSet_dyn dflags Opt_D_dump_parsed "Parser" (ppr thing)
return thing
hscCompileCore :: HscEnv -> Bool -> SafeHaskellMode -> ModSummary
-> CoreProgram -> FilePath -> FilePath -> IO ()
hscCompileCore hsc_env simplify safe_mode mod_summary binds output_filename extCore_filename
= runHsc hsc_env $ do
guts <- maybe_simplify (mkModGuts (ms_mod mod_summary) safe_mode binds)
(iface, changed, _details, cgguts) <- hscNormalIface' extCore_filename guts Nothing
liftIO $ hscWriteIface (hsc_dflags hsc_env) iface changed mod_summary
_ <- liftIO $ hscGenHardCode hsc_env cgguts mod_summary output_filename
return ()
where
maybe_simplify mod_guts | simplify = hscSimplify' mod_guts
| otherwise = return mod_guts
-- Makes a "vanilla" ModGuts.
mkModGuts :: Module -> SafeHaskellMode -> CoreProgram -> ModGuts
mkModGuts mod safe binds =
ModGuts {
mg_module = mod,
mg_boot = False,
mg_exports = [],
mg_deps = noDependencies,
mg_dir_imps = emptyModuleEnv,
mg_used_names = emptyNameSet,
mg_used_th = False,
mg_rdr_env = emptyGlobalRdrEnv,
mg_fix_env = emptyFixityEnv,
mg_tcs = [],
mg_insts = [],
mg_fam_insts = [],
mg_patsyns = [],
mg_rules = [],
mg_vect_decls = [],
mg_binds = binds,
mg_foreign = NoStubs,
mg_warns = NoWarnings,
mg_anns = [],
mg_hpc_info = emptyHpcInfo False,
mg_modBreaks = emptyModBreaks,
mg_vect_info = noVectInfo,
mg_inst_env = emptyInstEnv,
mg_fam_inst_env = emptyFamInstEnv,
mg_safe_haskell = safe,
mg_trust_pkg = False,
mg_dependent_files = []
}
{- **********************************************************************
%* *
Desugar, simplify, convert to bytecode, and link an expression
%* *
%********************************************************************* -}
#ifdef GHCI
hscCompileCoreExpr :: HscEnv -> SrcSpan -> CoreExpr -> IO HValue
hscCompileCoreExpr hsc_env =
lookupHook hscCompileCoreExprHook hscCompileCoreExpr' (hsc_dflags hsc_env) hsc_env
hscCompileCoreExpr' :: HscEnv -> SrcSpan -> CoreExpr -> IO HValue
hscCompileCoreExpr' hsc_env srcspan ds_expr
| rtsIsProfiled
= throwIO (InstallationError "You can't call hscCompileCoreExpr in a profiled compiler")
-- Otherwise you get a seg-fault when you run it
| otherwise
= do { let dflags = hsc_dflags hsc_env
{- Simplify it -}
; simpl_expr <- simplifyExpr dflags ds_expr
{- Tidy it (temporary, until coreSat does cloning) -}
; let tidy_expr = tidyExpr emptyTidyEnv simpl_expr
{- Prepare for codegen -}
; prepd_expr <- corePrepExpr dflags hsc_env tidy_expr
{- Lint if necessary -}
; lintInteractiveExpr "hscCompileExpr" hsc_env prepd_expr
{- Convert to BCOs -}
; bcos <- coreExprToBCOs dflags (icInteractiveModule (hsc_IC hsc_env)) prepd_expr
{- link it -}
; hval <- linkExpr hsc_env srcspan bcos
; return hval }
#endif
{- **********************************************************************
%* *
Statistics on reading interfaces
%* *
%********************************************************************* -}
dumpIfaceStats :: HscEnv -> IO ()
dumpIfaceStats hsc_env = do
eps <- readIORef (hsc_EPS hsc_env)
dumpIfSet dflags (dump_if_trace || dump_rn_stats)
"Interface statistics"
(ifaceStats eps)
where
dflags = hsc_dflags hsc_env
dump_rn_stats = dopt Opt_D_dump_rn_stats dflags
dump_if_trace = dopt Opt_D_dump_if_trace dflags
{- **********************************************************************
%* *
Progress Messages: Module i of n
%* *
%********************************************************************* -}
showModuleIndex :: (Int, Int) -> String
showModuleIndex (i,n) = "[" ++ padded ++ " of " ++ n_str ++ "] "
where
n_str = show n
i_str = show i
padded = replicate (length n_str - length i_str) ' ' ++ i_str
| lukexi/ghc-7.8-arm64 | compiler/main/HscMain.hs | bsd-3-clause | 66,217 | 7 | 28 | 19,708 | 11,068 | 5,667 | 5,401 | -1 | -1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ConstraintKinds #-}
-- | Cache information about previous builds
module Stack.Build.Cache
( tryGetBuildCache
, tryGetConfigCache
, tryGetCabalMod
, getInstalledExes
, buildCacheTimes
, tryGetFlagCache
, deleteCaches
, markExeInstalled
, markExeNotInstalled
, writeFlagCache
, writeBuildCache
, writeConfigCache
, writeCabalMod
, setTestSuccess
, unsetTestSuccess
, checkTestSuccess
, writePrecompiledCache
, readPrecompiledCache
) where
import Control.Exception.Enclosed (handleIO)
import Control.Monad.Catch (MonadThrow)
import Control.Monad.IO.Class
import Control.Monad.Logger (MonadLogger)
import Control.Monad.Reader
import qualified Crypto.Hash.SHA256 as SHA256
import qualified Data.Binary as Binary (encode)
import Data.Binary.VersionTagged
import qualified Data.ByteString.Char8 as S8
import qualified Data.ByteString.Base16 as B16
import Data.Map (Map)
import Data.Maybe (fromMaybe, mapMaybe)
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text (Text)
import qualified Data.Text as T
import GHC.Generics (Generic)
import Path
import Path.IO
import Stack.Types.Build
import Stack.Constants
import Stack.Types
-- | Directory containing files to mark an executable as installed
exeInstalledDir :: (MonadReader env m, HasEnvConfig env, MonadThrow m)
=> InstallLocation -> m (Path Abs Dir)
exeInstalledDir Snap = (</> $(mkRelDir "installed-packages")) `liftM` installationRootDeps
exeInstalledDir Local = (</> $(mkRelDir "installed-packages")) `liftM` installationRootLocal
-- | Get all of the installed executables
getInstalledExes :: (MonadReader env m, HasEnvConfig env, MonadIO m, MonadThrow m)
=> InstallLocation -> m [PackageIdentifier]
getInstalledExes loc = do
dir <- exeInstalledDir loc
(_, files) <- liftIO $ handleIO (const $ return ([], [])) $ listDirectory dir
return $ mapMaybe (parsePackageIdentifierFromString . toFilePath . filename) files
-- | Mark the given executable as installed
markExeInstalled :: (MonadReader env m, HasEnvConfig env, MonadIO m, MonadThrow m)
=> InstallLocation -> PackageIdentifier -> m ()
markExeInstalled loc ident = do
dir <- exeInstalledDir loc
createTree dir
ident' <- parseRelFile $ packageIdentifierString ident
let fp = toFilePath $ dir </> ident'
-- TODO consideration for the future: list all of the executables
-- installed, and invalidate this file in getInstalledExes if they no
-- longer exist
liftIO $ writeFile fp "Installed"
-- | Mark the given executable as not installed
markExeNotInstalled :: (MonadReader env m, HasEnvConfig env, MonadIO m, MonadThrow m)
=> InstallLocation -> PackageIdentifier -> m ()
markExeNotInstalled loc ident = do
dir <- exeInstalledDir loc
ident' <- parseRelFile $ packageIdentifierString ident
removeFileIfExists (dir </> ident')
-- | Stored on disk to know whether the flags have changed or any
-- files have changed.
data BuildCache = BuildCache
{ buildCacheTimes :: !(Map FilePath FileCacheInfo)
-- ^ Modification times of files.
}
deriving (Generic)
instance Binary BuildCache
instance HasStructuralInfo BuildCache
instance HasSemanticVersion BuildCache
instance NFData BuildCache
-- | Try to read the dirtiness cache for the given package directory.
tryGetBuildCache :: (MonadIO m, MonadReader env m, HasConfig env, MonadThrow m, MonadLogger m, HasEnvConfig env)
=> Path Abs Dir -> m (Maybe (Map FilePath FileCacheInfo))
tryGetBuildCache = liftM (fmap buildCacheTimes) . tryGetCache buildCacheFile
-- | Try to read the dirtiness cache for the given package directory.
tryGetConfigCache :: (MonadIO m, MonadReader env m, HasConfig env, MonadThrow m, MonadLogger m, HasEnvConfig env)
=> Path Abs Dir -> m (Maybe ConfigCache)
tryGetConfigCache = tryGetCache configCacheFile
-- | Try to read the mod time of the cabal file from the last build
tryGetCabalMod :: (MonadIO m, MonadReader env m, HasConfig env, MonadThrow m, MonadLogger m, HasEnvConfig env)
=> Path Abs Dir -> m (Maybe ModTime)
tryGetCabalMod = tryGetCache configCabalMod
-- | Try to load a cache.
tryGetCache :: (MonadIO m, BinarySchema a)
=> (Path Abs Dir -> m (Path Abs File))
-> Path Abs Dir
-> m (Maybe a)
tryGetCache get' dir = do
fp <- get' dir
decodeFileOrFailDeep fp
-- | Write the dirtiness cache for this package's files.
writeBuildCache :: (MonadIO m, MonadReader env m, HasConfig env, MonadThrow m, MonadLogger m, HasEnvConfig env)
=> Path Abs Dir -> Map FilePath FileCacheInfo -> m ()
writeBuildCache dir times =
writeCache
dir
buildCacheFile
BuildCache
{ buildCacheTimes = times
}
-- | Write the dirtiness cache for this package's configuration.
writeConfigCache :: (MonadIO m, MonadReader env m, HasConfig env, MonadThrow m, MonadLogger m, HasEnvConfig env)
=> Path Abs Dir
-> ConfigCache
-> m ()
writeConfigCache dir = writeCache dir configCacheFile
-- | See 'tryGetCabalMod'
writeCabalMod :: (MonadIO m, MonadReader env m, HasConfig env, MonadThrow m, MonadLogger m, HasEnvConfig env)
=> Path Abs Dir
-> ModTime
-> m ()
writeCabalMod dir = writeCache dir configCabalMod
-- | Delete the caches for the project.
deleteCaches :: (MonadIO m, MonadReader env m, HasConfig env, MonadLogger m, MonadThrow m, HasEnvConfig env)
=> Path Abs Dir -> m ()
deleteCaches dir = do
{- FIXME confirm that this is acceptable to remove
bfp <- buildCacheFile dir
removeFileIfExists bfp
-}
cfp <- configCacheFile dir
removeFileIfExists cfp
-- | Write to a cache.
writeCache :: (BinarySchema a, MonadIO m)
=> Path Abs Dir
-> (Path Abs Dir -> m (Path Abs File))
-> a
-> m ()
writeCache dir get' content = do
fp <- get' dir
taggedEncodeFile fp content
flagCacheFile :: (MonadIO m, MonadThrow m, MonadReader env m, HasEnvConfig env)
=> Installed
-> m (Path Abs File)
flagCacheFile installed = do
rel <- parseRelFile $
case installed of
Library _ gid -> ghcPkgIdString gid
Executable ident -> packageIdentifierString ident
dir <- flagCacheLocal
return $ dir </> rel
-- | Loads the flag cache for the given installed extra-deps
tryGetFlagCache :: (MonadIO m, MonadThrow m, MonadReader env m, HasEnvConfig env)
=> Installed
-> m (Maybe ConfigCache)
tryGetFlagCache gid = do
fp <- flagCacheFile gid
decodeFileOrFailDeep fp
writeFlagCache :: (MonadIO m, MonadReader env m, HasEnvConfig env, MonadThrow m)
=> Installed
-> ConfigCache
-> m ()
writeFlagCache gid cache = do
file <- flagCacheFile gid
liftIO $ do
createTree (parent file)
taggedEncodeFile file cache
-- | Mark a test suite as having succeeded
setTestSuccess :: (MonadIO m, MonadLogger m, MonadThrow m, MonadReader env m, HasConfig env, HasEnvConfig env)
=> Path Abs Dir
-> m ()
setTestSuccess dir =
writeCache
dir
testSuccessFile
True
-- | Mark a test suite as not having succeeded
unsetTestSuccess :: (MonadIO m, MonadLogger m, MonadThrow m, MonadReader env m, HasConfig env, HasEnvConfig env)
=> Path Abs Dir
-> m ()
unsetTestSuccess dir =
writeCache
dir
testSuccessFile
False
-- | Check if the test suite already passed
checkTestSuccess :: (MonadIO m, MonadLogger m, MonadThrow m, MonadReader env m, HasConfig env, HasEnvConfig env)
=> Path Abs Dir
-> m Bool
checkTestSuccess dir =
liftM
(fromMaybe False)
(tryGetCache testSuccessFile dir)
--------------------------------------
-- Precompiled Cache
--
-- Idea is simple: cache information about packages built in other snapshots,
-- and then for identical matches (same flags, config options, dependencies)
-- just copy over the executables and reregister the libraries.
--------------------------------------
-- | The file containing information on the given package/configuration
-- combination. The filename contains a hash of the non-directory configure
-- options for quick lookup if there's a match.
precompiledCacheFile :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> PackageIdentifier
-> ConfigureOpts
-> Set GhcPkgId -- ^ dependencies
-> m (Path Abs File)
precompiledCacheFile pkgident copts installedPackageIDs = do
ec <- asks getEnvConfig
compiler <- parseRelDir $ compilerVersionString $ envConfigCompilerVersion ec
cabal <- parseRelDir $ versionString $ envConfigCabalVersion ec
pkg <- parseRelDir $ packageIdentifierString pkgident
-- In Cabal versions 1.22 and later, the configure options contain the
-- installed package IDs, which is what we need for a unique hash.
-- Unfortunately, earlier Cabals don't have the information, so we must
-- supplement it with the installed package IDs directly. In 20/20
-- hindsight, we would simply always do that, but previous Stack releases
-- used only the options, and we don't want to invalidate old caches
-- unnecessarily.
--
-- See issue: https://github.com/commercialhaskell/stack/issues/1103
let cacheInput
| envConfigCabalVersion ec >= $(mkVersion "1.22") =
Binary.encode $ coNoDirs copts
| otherwise =
Binary.encode
( coNoDirs copts
, installedPackageIDs
)
-- We only pay attention to non-directory options. We don't want to avoid a
-- cache hit just because it was installed in a different directory.
copts' <- parseRelFile $ S8.unpack $ B16.encode $ SHA256.hashlazy cacheInput
return $ getStackRoot ec
</> $(mkRelDir "precompiled")
</> compiler
</> cabal
</> pkg
</> copts'
-- | Write out information about a newly built package
writePrecompiledCache :: (MonadThrow m, MonadReader env m, HasEnvConfig env, MonadIO m)
=> BaseConfigOpts
-> PackageIdentifier
-> ConfigureOpts
-> Set GhcPkgId -- ^ dependencies
-> Installed -- ^ library
-> Set Text -- ^ executables
-> m ()
writePrecompiledCache baseConfigOpts pkgident copts depIDs mghcPkgId exes = do
file <- precompiledCacheFile pkgident copts depIDs
createTree $ parent file
mlibpath <-
case mghcPkgId of
Executable _ -> return Nothing
Library _ ipid -> liftM Just $ do
ipid' <- parseRelFile $ ghcPkgIdString ipid ++ ".conf"
return $ toFilePath $ bcoSnapDB baseConfigOpts </> ipid'
exes' <- forM (Set.toList exes) $ \exe -> do
name <- parseRelFile $ T.unpack exe
return $ toFilePath $ bcoSnapInstallRoot baseConfigOpts </> bindirSuffix </> name
liftIO $ taggedEncodeFile file PrecompiledCache
{ pcLibrary = mlibpath
, pcExes = exes'
}
-- | Check the cache for a precompiled package matching the given
-- configuration.
readPrecompiledCache :: (MonadThrow m, MonadReader env m, HasEnvConfig env, MonadIO m)
=> PackageIdentifier -- ^ target package
-> ConfigureOpts
-> Set GhcPkgId -- ^ dependencies
-> m (Maybe PrecompiledCache)
readPrecompiledCache pkgident copts depIDs = do
file <- precompiledCacheFile pkgident copts depIDs
decodeFileOrFailDeep file
| rvion/stack | src/Stack/Build/Cache.hs | bsd-3-clause | 12,323 | 0 | 17 | 3,332 | 2,694 | 1,377 | 1,317 | 229 | 2 |
-- ----------------------------------------------------------------------------
-- | Handle conversion of CmmData to LLVM code.
--
module LlvmCodeGen.Data (
genLlvmData
) where
#include "HsVersions.h"
import Llvm
import LlvmCodeGen.Base
import BlockId
import CLabel
import Cmm
import FastString
import qualified Outputable
-- ----------------------------------------------------------------------------
-- * Constants
--
-- | The string appended to a variable name to create its structure type alias
structStr :: LMString
structStr = fsLit "_struct"
-- ----------------------------------------------------------------------------
-- * Top level
--
-- | Pass a CmmStatic section to an equivalent Llvm code.
genLlvmData :: (Section, CmmStatics) -> LlvmM LlvmData
genLlvmData (sec, Statics lbl xs) = do
label <- strCLabel_llvm lbl
static <- mapM genData xs
let types = map getStatType static
strucTy = LMStruct types
alias = LMAlias ((label `appendFS` structStr), strucTy)
struct = Just $ LMStaticStruc static alias
link = if (externallyVisibleCLabel lbl)
then ExternallyVisible else Internal
const = if isSecConstant sec then Constant else Global
glob = LMGlobalVar label alias link Nothing Nothing const
return ([LMGlobal glob struct], [alias])
-- | Should a data in this section be considered constant
isSecConstant :: Section -> Bool
isSecConstant Text = True
isSecConstant ReadOnlyData = True
isSecConstant RelocatableReadOnlyData = True
isSecConstant ReadOnlyData16 = True
isSecConstant Data = False
isSecConstant UninitialisedData = False
isSecConstant (OtherSection _) = False
-- ----------------------------------------------------------------------------
-- * Generate static data
--
-- | Handle static data
genData :: CmmStatic -> LlvmM LlvmStatic
genData (CmmString str) = do
let v = map (\x -> LMStaticLit $ LMIntLit (fromIntegral x) i8) str
ve = v ++ [LMStaticLit $ LMIntLit 0 i8]
return $ LMStaticArray ve (LMArray (length ve) i8)
genData (CmmUninitialised bytes)
= return $ LMUninitType (LMArray bytes i8)
genData (CmmStaticLit lit)
= genStaticLit lit
-- | Generate Llvm code for a static literal.
--
-- Will either generate the code or leave it unresolved if it is a 'CLabel'
-- which isn't yet known.
genStaticLit :: CmmLit -> LlvmM LlvmStatic
genStaticLit (CmmInt i w)
= return $ LMStaticLit (LMIntLit i (LMInt $ widthInBits w))
genStaticLit (CmmFloat r w)
= return $ LMStaticLit (LMFloatLit (fromRational r) (widthToLlvmFloat w))
genStaticLit (CmmVec ls)
= do sls <- mapM toLlvmLit ls
return $ LMStaticLit (LMVectorLit sls)
where
toLlvmLit :: CmmLit -> LlvmM LlvmLit
toLlvmLit lit = do
slit <- genStaticLit lit
case slit of
LMStaticLit llvmLit -> return llvmLit
_ -> panic "genStaticLit"
-- Leave unresolved, will fix later
genStaticLit cmm@(CmmLabel l) = do
var <- getGlobalPtr =<< strCLabel_llvm l
dflags <- getDynFlags
let ptr = LMStaticPointer var
lmty = cmmToLlvmType $ cmmLitType dflags cmm
return $ LMPtoI ptr lmty
genStaticLit (CmmLabelOff label off) = do
dflags <- getDynFlags
var <- genStaticLit (CmmLabel label)
let offset = LMStaticLit $ LMIntLit (toInteger off) (llvmWord dflags)
return $ LMAdd var offset
genStaticLit (CmmLabelDiffOff l1 l2 off) = do
dflags <- getDynFlags
var1 <- genStaticLit (CmmLabel l1)
var2 <- genStaticLit (CmmLabel l2)
let var = LMSub var1 var2
offset = LMStaticLit $ LMIntLit (toInteger off) (llvmWord dflags)
return $ LMAdd var offset
genStaticLit (CmmBlock b) = genStaticLit $ CmmLabel $ infoTblLbl b
genStaticLit (CmmHighStackMark)
= panic "genStaticLit: CmmHighStackMark unsupported!"
-- -----------------------------------------------------------------------------
-- * Misc
--
-- | Error Function
panic :: String -> a
panic s = Outputable.panic $ "LlvmCodeGen.Data." ++ s
| jwiegley/ghc-release | compiler/llvmGen/LlvmCodeGen/Data.hs | gpl-3.0 | 4,136 | 0 | 16 | 928 | 1,025 | 517 | 508 | 78 | 3 |
{-# LANGUAGE CPP #-}
import Control.Monad
import Data.IORef
import Control.Exception (SomeException, catch)
import Distribution.Simple
import Distribution.Simple.BuildPaths (autogenModulesDir)
import Distribution.Simple.InstallDirs as I
import Distribution.Simple.LocalBuildInfo as L
import qualified Distribution.Simple.Setup as S
import qualified Distribution.Simple.Program as P
import Distribution.Simple.Utils (createDirectoryIfMissingVerbose, rewriteFile, notice, installOrdinaryFiles)
import Distribution.Compiler
import Distribution.PackageDescription
import Distribution.Text
import System.Environment
import System.Exit
import System.FilePath ((</>), splitDirectories,isAbsolute)
import System.Directory
import qualified System.FilePath.Posix as Px
import System.Process
-- After Idris is built, we need to check and install the prelude and other libs
-- -----------------------------------------------------------------------------
-- Idris Command Path
-- make on mingw32 exepects unix style separators
#ifdef mingw32_HOST_OS
(<//>) = (Px.</>)
idrisCmd local = Px.joinPath $ splitDirectories $ ".." <//> ".." <//> buildDir local <//> "idris" <//> "idris"
#else
idrisCmd local = ".." </> ".." </> buildDir local </> "idris" </> "idris"
#endif
-- -----------------------------------------------------------------------------
-- Make Commands
-- use GNU make on FreeBSD
#if defined(freebsd_HOST_OS) || defined(dragonfly_HOST_OS)\
|| defined(openbsd_HOST_OS) || defined(netbsd_HOST_OS)
mymake = "gmake"
#else
mymake = "make"
#endif
make verbosity =
P.runProgramInvocation verbosity . P.simpleProgramInvocation mymake
#ifdef mingw32_HOST_OS
windres verbosity = P.runProgramInvocation verbosity . P.simpleProgramInvocation "windres"
#endif
-- -----------------------------------------------------------------------------
-- Flags
usesGMP :: S.ConfigFlags -> Bool
usesGMP flags =
case lookup (FlagName "gmp") (S.configConfigurationsFlags flags) of
Just True -> True
Just False -> False
Nothing -> False
execOnly :: S.ConfigFlags -> Bool
execOnly flags =
case lookup (FlagName "execonly") (S.configConfigurationsFlags flags) of
Just True -> True
Just False -> False
Nothing -> False
isRelease :: S.ConfigFlags -> Bool
isRelease flags =
case lookup (FlagName "release") (S.configConfigurationsFlags flags) of
Just True -> True
Just False -> False
Nothing -> False
isFreestanding :: S.ConfigFlags -> Bool
isFreestanding flags =
case lookup (FlagName "freestanding") (S.configConfigurationsFlags flags) of
Just True -> True
Just False -> False
Nothing -> False
-- -----------------------------------------------------------------------------
-- Clean
idrisClean _ flags _ _ = cleanStdLib
where
verbosity = S.fromFlag $ S.cleanVerbosity flags
cleanStdLib = makeClean "libs"
makeClean dir = make verbosity [ "-C", dir, "clean", "IDRIS=idris" ]
-- -----------------------------------------------------------------------------
-- Configure
gitHash :: IO String
gitHash = do h <- Control.Exception.catch (readProcess "git" ["rev-parse", "--short", "HEAD"] "")
(\e -> let e' = (e :: SomeException) in return "PRE")
return $ takeWhile (/= '\n') h
-- Put the Git hash into a module for use in the program
-- For release builds, just put the empty string in the module
generateVersionModule verbosity dir release = do
hash <- gitHash
let versionModulePath = dir </> "Version_idris" Px.<.> "hs"
putStrLn $ "Generating " ++ versionModulePath ++
if release then " for release" else " for prerelease " ++ hash
createDirectoryIfMissingVerbose verbosity True dir
rewriteFile versionModulePath (versionModuleContents hash)
where versionModuleContents h = "module Version_idris where\n\n" ++
"gitHash :: String\n" ++
if release
then "gitHash = \"\"\n"
else "gitHash = \"git:" ++ h ++ "\"\n"
-- Generate a module that contains the lib path for a freestanding Idris
generateTargetModule verbosity dir targetDir = do
let absPath = isAbsolute targetDir
let targetModulePath = dir </> "Target_idris" Px.<.> "hs"
putStrLn $ "Generating " ++ targetModulePath
createDirectoryIfMissingVerbose verbosity True dir
rewriteFile targetModulePath (versionModuleContents absPath targetDir)
where versionModuleContents absolute td = "module Target_idris where\n\n" ++
"import System.FilePath\n" ++
"import System.Environment\n" ++
"getDataDir :: IO String\n" ++
if absolute
then "getDataDir = return \"" ++ td ++ "\"\n"
else "getDataDir = do \n" ++
" expath <- getExecutablePath\n" ++
" execDir <- return $ dropFileName expath\n" ++
" return $ execDir ++ \"" ++ td ++ "\"\n"
++ "getDataFileName :: FilePath -> IO FilePath\n"
++ "getDataFileName name = do\n"
++ " dir <- getDataDir\n"
++ " return (dir ++ \"/\" ++ name)"
-- a module that has info about existence and location of a bundled toolchain
generateToolchainModule verbosity srcDir toolDir = do
let commonContent = "module Tools_idris where\n\n"
let toolContent = case toolDir of
Just dir -> "hasBundledToolchain = True\n" ++
"getToolchainDir = \"" ++ dir ++ "\"\n"
Nothing -> "hasBundledToolchain = False\n" ++
"getToolchainDir = \"\""
let toolPath = srcDir </> "Tools_idris" Px.<.> "hs"
createDirectoryIfMissingVerbose verbosity True srcDir
rewriteFile toolPath (commonContent ++ toolContent)
idrisConfigure _ flags _ local = do
configureRTS
generateVersionModule verbosity (autogenModulesDir local) (isRelease (configFlags local))
if isFreestanding $ configFlags local
then do
toolDir <- lookupEnv "IDRIS_TOOLCHAIN_DIR"
generateToolchainModule verbosity (autogenModulesDir local) toolDir
targetDir <- lookupEnv "IDRIS_LIB_DIR"
case targetDir of
Just d -> generateTargetModule verbosity (autogenModulesDir local) d
Nothing -> error $ "Trying to build freestanding without a target directory."
++ " Set it by defining IDRIS_LIB_DIR."
else
generateToolchainModule verbosity (autogenModulesDir local) Nothing
where
verbosity = S.fromFlag $ S.configVerbosity flags
version = pkgVersion . package $ localPkgDescr local
-- This is a hack. I don't know how to tell cabal that a data file needs
-- installing but shouldn't be in the distribution. And it won't make the
-- distribution if it's not there, so instead I just delete
-- the file after configure.
configureRTS = make verbosity ["-C", "rts", "clean"]
idrisPreSDist args flags = do
let dir = S.fromFlag (S.sDistDirectory flags)
let verb = S.fromFlag (S.sDistVerbosity flags)
generateVersionModule verb "src" True
generateTargetModule verb "src" "./libs"
generateToolchainModule verb "src" Nothing
preSDist simpleUserHooks args flags
idrisSDist sdist pkgDesc bi hooks flags = do
pkgDesc' <- addGitFiles pkgDesc
sdist pkgDesc' bi hooks flags
where
addGitFiles :: PackageDescription -> IO PackageDescription
addGitFiles pkgDesc = do
files <- gitFiles
return $ pkgDesc { extraSrcFiles = extraSrcFiles pkgDesc ++ files}
gitFiles :: IO [FilePath]
gitFiles = liftM lines (readProcess "git" ["ls-files"] "")
idrisPostSDist args flags desc lbi = do
Control.Exception.catch (do let file = "src" </> "Version_idris" Px.<.> "hs"
let targetFile = "src" </> "Target_idris" Px.<.> "hs"
putStrLn $ "Removing generated modules:\n "
++ file ++ "\n" ++ targetFile
removeFile file
removeFile targetFile)
(\e -> let e' = (e :: SomeException) in return ())
postSDist simpleUserHooks args flags desc lbi
-- -----------------------------------------------------------------------------
-- Build
getVersion :: Args -> S.BuildFlags -> IO HookedBuildInfo
getVersion args flags = do
hash <- gitHash
let buildinfo = (emptyBuildInfo { cppOptions = ["-DVERSION="++hash] }) :: BuildInfo
return (Just buildinfo, [])
idrisPreBuild args flags = do
#ifdef mingw32_HOST_OS
createDirectoryIfMissingVerbose verbosity True dir
windres verbosity ["icons/idris_icon.rc","-o", dir++"/idris_icon.o"]
return (Nothing, [("idris", emptyBuildInfo { ldOptions = [dir ++ "/idris_icon.o"] })])
where
verbosity = S.fromFlag $ S.buildVerbosity flags
dir = S.fromFlagOrDefault "dist" $ S.buildDistPref flags
#else
return (Nothing, [])
#endif
idrisBuild _ flags _ local = unless (execOnly (configFlags local)) $ do
buildStdLib
buildRTS
where
verbosity = S.fromFlag $ S.buildVerbosity flags
buildStdLib = do
putStrLn "Building libraries..."
makeBuild "libs"
where
makeBuild dir = make verbosity [ "-C", dir, "build" , "IDRIS=" ++ idrisCmd local]
buildRTS = make verbosity (["-C", "rts", "build"] ++
gmpflag (usesGMP (configFlags local)))
gmpflag False = []
gmpflag True = ["GMP=-DIDRIS_GMP"]
-- -----------------------------------------------------------------------------
-- Copy/Install
idrisInstall verbosity copy pkg local = unless (execOnly (configFlags local)) $ do
installStdLib
installRTS
installManPage
where
target = datadir $ L.absoluteInstallDirs pkg local copy
installStdLib = do
let target' = target -- </> "libs"
putStrLn $ "Installing libraries in " ++ target'
makeInstall "libs" target'
installRTS = do
let target' = target </> "rts"
putStrLn $ "Installing run time system in " ++ target'
makeInstall "rts" target'
installManPage = do
let mandest = mandir (L.absoluteInstallDirs pkg local copy) ++ "/man1"
notice verbosity $ unwords ["Copying man page to", mandest]
installOrdinaryFiles verbosity mandest [("man", "idris.1")]
makeInstall src target =
make verbosity [ "-C", src, "install" , "TARGET=" ++ target, "IDRIS=" ++ idrisCmd local]
-- -----------------------------------------------------------------------------
-- Test
-- FIXME: We use the __GLASGOW_HASKELL__ macro because MIN_VERSION_cabal seems
-- to be broken !
-- There are two "dataDir" in cabal, and they don't relate to each other.
-- When fetching modules, idris uses the second path (in the pkg record),
-- which by default is the root folder of the project.
-- We want it to be the install directory where we put the idris libraries.
fixPkg pkg target = pkg { dataDir = target }
-- The "Args" argument of the testHooks has been added in cabal 1.22.0,
-- and should therefore be ignored for prior versions.
#if __GLASGOW_HASKELL__ < 710
originalTestHook _ = testHook simpleUserHooks
#else
originalTestHook = testHook simpleUserHooks
#endif
idrisTestHook args pkg local hooks flags = do
let target = datadir $ L.absoluteInstallDirs pkg local NoCopyDest
originalTestHook args (fixPkg pkg target) local hooks flags
-- -----------------------------------------------------------------------------
-- Main
-- Install libraries during both copy and install
-- See https://github.com/haskell/cabal/issues/709
main = defaultMainWithHooks $ simpleUserHooks
{ postClean = idrisClean
, postConf = idrisConfigure
, preBuild = idrisPreBuild
, postBuild = idrisBuild
, postCopy = \_ flags pkg local ->
idrisInstall (S.fromFlag $ S.copyVerbosity flags)
(S.fromFlag $ S.copyDest flags) pkg local
, postInst = \_ flags pkg local ->
idrisInstall (S.fromFlag $ S.installVerbosity flags)
NoCopyDest pkg local
, preSDist = idrisPreSDist
, sDistHook = idrisSDist (sDistHook simpleUserHooks)
, postSDist = idrisPostSDist
#if __GLASGOW_HASKELL__ < 710
, testHook = idrisTestHook ()
#else
, testHook = idrisTestHook
#endif
}
| ben-schulz/Idris-dev | Setup.hs | bsd-3-clause | 12,983 | 0 | 17 | 3,418 | 2,555 | 1,315 | 1,240 | 197 | 3 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.