code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE NoImplicitPrelude #-}
-- |
-- Module: $HEADER$
-- Description: Concrete proxies for types from Data.Word
-- Copyright: (c) 2014, Peter Trško
-- License: BSD3
--
-- Maintainer: peter.trsko@gmail.com
-- Stability: experimental
-- Portability: NoImplicitPrelude
--
-- Concrete proxies for types from "Data.Word".
module Data.Proxy.Word
(
word
, word8
, word16
, word32
, word64
)
where
import Data.Proxy (Proxy(Proxy))
import Data.Word (Word, Word8, Word16, Word32, Word64)
-- | Type proxy for 'Word'.
word :: Proxy Word
word = Proxy
{-# INLINE word #-}
-- | Type proxy for 'Word8'.
word8 :: Proxy Word8
word8 = Proxy
{-# INLINE word8 #-}
-- | Type proxy for 'Word16'.
word16 :: Proxy Word16
word16 = Proxy
{-# INLINE word16 #-}
-- | Type proxy for 'Word32'.
word32 :: Proxy Word32
word32 = Proxy
{-# INLINE word32 #-}
-- | Type proxy for 'Word64'.
word64 :: Proxy Word64
word64 = Proxy
{-# INLINE word64 #-}
|
trskop/type-proxies
|
src/Data/Proxy/Word.hs
|
bsd-3-clause
| 980
| 0
| 6
| 213
| 147
| 96
| 51
| 25
| 1
|
{-# LANGUAGE TupleSections #-}
{-@ LIQUID "--cabaldir" @-}
{-@ LIQUID "--diff" @-}
import Data.Maybe
import Data.Monoid (mconcat, mempty)
import System.Exit
import Control.Applicative ((<$>))
import Control.DeepSeq
import Text.PrettyPrint.HughesPJ
import CoreSyn
import Var
import System.Console.CmdArgs.Verbosity (whenLoud)
import System.Console.CmdArgs.Default
import qualified Language.Fixpoint.Config as FC
import qualified Language.Haskell.Liquid.DiffCheck as DC
import Language.Fixpoint.Misc
import Language.Fixpoint.Interface
import Language.Fixpoint.Types (sinfo)
import Language.Haskell.Liquid.Types
import Language.Haskell.Liquid.Errors
import Language.Haskell.Liquid.CmdLine
import Language.Haskell.Liquid.GhcInterface
import Language.Haskell.Liquid.Constraint.Generate
import Language.Haskell.Liquid.Constraint.ToFixpoint
import Language.Haskell.Liquid.Constraint.Types
import Language.Haskell.Liquid.TransformRec
import Language.Haskell.Liquid.Annotate (mkOutput)
import System.Environment (getArgs)
main :: IO b
main = do cfg0 <- getOpts
res <- mconcat <$> mapM (checkOne cfg0) (files cfg0)
let ecode = resultExit $ {- traceShow "RESULT" $ -} o_result res
-- putStrLn $ "ExitCode: " ++ show ecode
exitWith ecode
checkOne :: Config -> FilePath -> IO (Output Doc)
checkOne cfg0 t = getGhcInfo cfg0 t >>= either errOut (liquidOne t)
where
errOut r = exitWithResult cfg0 t $ mempty { o_result = r}
liquidOne :: FilePath -> GhcInfo -> IO (Output Doc)
liquidOne target info =
do donePhase Loud "Extracted Core using GHC"
let cfg = config $ spec info
whenLoud $ do putStrLn "**** Config **************************************************"
print cfg
whenLoud $ do putStrLn $ showpp info
putStrLn "*************** Original CoreBinds ***************************"
putStrLn $ showpp (cbs info)
let cbs' = transformScope (cbs info)
whenLoud $ do donePhase Loud "transformRecExpr"
putStrLn "*************** Transform Rec Expr CoreBinds *****************"
putStrLn $ showpp cbs'
putStrLn "*************** Slicing Out Unchanged CoreBinds *****************"
dc <- prune cfg cbs' target info
let cbs'' = maybe cbs' DC.newBinds dc
let info' = maybe info (\z -> info {spec = DC.newSpec z}) dc
let cgi = {-# SCC "generateConstraints" #-} generateConstraints $! info' {cbs = cbs''}
cgi `deepseq` donePhase Loud "generateConstraints"
out <- solveCs cfg target cgi info' dc
donePhase Loud "solve"
let out' = mconcat [maybe mempty DC.oldOutput dc, out]
DC.saveResult target out'
exitWithResult cfg target out'
-- checkedNames :: Maybe DC.DiffCheck -> Maybe [Name.Name]
checkedNames dc = concatMap names . DC.newBinds <$> dc
where
names (NonRec v _ ) = [showpp $ shvar v]
names (Rec xs) = map (shvar . fst) xs
shvar = showpp . varName
-- prune :: Config -> [CoreBind] -> FilePath -> GhcInfo -> IO (Maybe Diff)
prune cfg cbs target info
| not (null vs) = return . Just $ DC.DC (DC.thin cbs vs) mempty sp
| diffcheck cfg = DC.slice target cbs sp
| otherwise = return Nothing
where
vs = tgtVars sp
sp = spec info
solveCs cfg target cgi info dc
= do finfo <- cgInfoFInfo info cgi
(r, sol) <- solve fx finfo
let names = checkedNames dc
let warns = logErrors cgi
let annm = annotMap cgi
let res = ferr sol r
let out0 = mkOutput cfg res sol annm
return $ out0 { o_vars = names } { o_errors = warns} { o_result = res }
where
fx = def { FC.solver = fromJust (smtsolver cfg)
, FC.real = real cfg
, FC.native = native cfg
, FC.srcFile = target
-- , FC.stats = True
}
ferr s r = fmap (tidyError s) $ result $ sinfo <$> r
-- writeCGI tgt cgi = {-# SCC "ConsWrite" #-} writeFile (extFileName Cgi tgt) str
-- where
-- str = {-# SCC "PPcgi" #-} showpp cgi
|
Kyly/liquidhaskell
|
Liquid.hs
|
bsd-3-clause
| 4,468
| 3
| 16
| 1,376
| 1,124
| 577
| 547
| 82
| 2
|
module Santa where
import Control.Concurrent.STM
import Control.Concurrent
import System.Random
import Control.Concurrent.MVar
meetInStudy :: Int -> IO ()
meetInStudy id = putStrLn ("Elf " ++ show id ++ " is meeting in the study.")
deliverToys :: Int -> IO ()
deliverToys id = putStrLn ("Reindeer " ++ show id ++ " is delivering toys.")
helper1 :: Group -> MVar () -> IO () -> IO ()
helper1 group mon do_task = do
(in_gate, out_gate) <- joinGroup group
passGate in_gate
withMVar mon $ \_ -> do_task
passGate out_gate
elf1, reindeer1 :: Group -> Int -> MVar () -> IO ()
elf1 gp id mon = helper1 gp mon (meetInStudy id)
reindeer1 gp id mon = helper1 gp mon (deliverToys id)
data Gate = MkGate Int (TVar Int)
newGate :: Int -> STM Gate
newGate n = do
tv <- newTVar 0
return $ MkGate n tv
passGate :: Gate -> IO ()
passGate (MkGate n tv) = do
atomically $ do
n_left <- readTVar tv
check $ n_left > 0
writeTVar tv $ n_left - 1
operateGate :: Gate -> IO ()
operateGate (MkGate n tv) = do
atomically $ writeTVar tv n
atomically $ do
n_left <- readTVar tv
check $ n_left == 0
data Group = MkGroup Int (TVar (Int, Gate, Gate))
newGroup :: Int -> IO Group
newGroup n = atomically $ do
g1 <- newGate n
g2 <- newGate n
tv <- newTVar (n, g1, g2)
return $ MkGroup n tv
joinGroup :: Group -> IO (Gate, Gate)
joinGroup (MkGroup n tv) = atomically $ do
(n_left, g1, g2) <- readTVar tv
check $ n_left > 0
writeTVar tv (n_left - 1, g1, g2)
return (g1, g2)
awaitGroup :: Group -> STM (Gate, Gate)
awaitGroup (MkGroup n tv) = do
(n_left, g1, g2) <- readTVar tv
check $ n_left == 0
new_g1 <- newGate n
new_g2 <- newGate n
writeTVar tv (n, new_g1, new_g2)
return (g1, g2)
forever :: IO () -> IO ()
forever act = do
act
forever act
randomDelay :: IO ()
randomDelay = do
waitTime <- getStdRandom $ randomR (1, 1000000)
threadDelay waitTime
elf :: Group -> Int -> MVar () -> IO ThreadId
elf gp id mon = forkIO $ forever $ do
randomDelay
elf1 gp id mon
reindeer :: Group -> Int -> MVar () -> IO ThreadId
reindeer gp id mon = forkIO $ forever $ do
randomDelay
reindeer1 gp id mon
santa :: Group -> Group -> MVar () -> IO ()
santa elf_gp deer_gp mon = do
withMVar mon $ \_ -> putStrLn "------------------------------------"
(task, (in_gate, out_gate)) <- atomically $ orElse
(chooseGroup deer_gp "deliver toys")
(chooseGroup elf_gp "meet in my study")
withMVar mon $ \_ -> putStrLn ("Ho! Ho! Ho! Let's " ++ task ++ ".")
operateGate in_gate
operateGate out_gate
where
chooseGroup :: Group -> String -> STM (String, (Gate, Gate))
chooseGroup gp task = do
gates <- awaitGroup gp
return (task, gates)
runSanta :: IO ()
runSanta = do
pmon <- newMVar ()
elf_group <- newGroup 3
sequence_ [elf elf_group n pmon | n <- [1..10]]
deer_group <- newGroup 9
sequence_ [reindeer deer_group n pmon | n <- [1..9]]
forever (santa elf_group deer_group pmon)
|
aztecrex/haskell-experiments-scratch
|
src/Santa.hs
|
bsd-3-clause
| 2,980
| 0
| 12
| 695
| 1,304
| 631
| 673
| 93
| 1
|
{-# LANGUAGE OverloadedStrings, DeriveFunctor, ViewPatterns #-}
module Crawl.FloorItems (
SquareItems(..), knownItems, possiblyAny,
FloorItems, trackFloorItems, scanFloorItems,
wantItem, wantItemPickup,
butcherable, isBook, isRune
) where
import Control.Applicative ((<$>), (<*>), liftA2)
import Control.Monad (mplus)
import qualified Data.HashMap.Strict as H
import qualified Data.HashSet as HS
import qualified Reactive.Banana as R
import qualified Data.Text as T
import Crawl.BananaUtils
import Crawl.Bindings
import Crawl.Equipment
import Crawl.Inventory
import Crawl.Item
import Crawl.LevelInfo
import Crawl.Messages
import Crawl.Move
import Crawl.ParseItem
data SquareItems item =
Empty
| SingleItem item
| UnexploredStack item
| ExploredStack [item]
| BigStack
deriving (Functor)
knownItems :: SquareItems item -> [item]
knownItems Empty = []
knownItems (SingleItem i) = [i]
knownItems (UnexploredStack i) = [i]
knownItems (ExploredStack is) = is
knownItems BigStack = []
possiblyAny :: (item -> Bool) -> SquareItems item -> Bool
possiblyAny _ Empty = False
possiblyAny f (SingleItem i) = f i
possiblyAny _ (UnexploredStack _) = True
possiblyAny f (ExploredStack is) = any f is
possiblyAny _ BigStack = False
type FloorItems = H.HashMap Coord (Maybe Int, SquareItems Item)
-- Store results of scanning for items.
-- At each square, we store the result of looking at that square with 'x',
-- along with the item tile data for that square at that time.
trackFloorItems :: R.Behavior t Coord ->
R.Behavior t LevelInfo ->
R.Behavior t MouseMode ->
R.Event t Message ->
R.Behavior t (Int, Move) ->
R.Behavior t Coord ->
R.Event t Move ->
R.Event t MouseMode ->
R.Behavior t FloorItems
trackFloorItems cursor level inputModeB messages0 lastMove loc moves inputModeE =
fmap ((fmap . fmap . fmap) parseItem) $
-- automatically clear out entries when we can see the item is gone
-- XXX wouldn't it be better to remove the entry from the accumB?
liftA2 (H.intersectionWith (flip const)) (fmap _levelItemTiles level) $
R.accumB H.empty $
(handleItemMessages <$> cursor <*> level R.<@> (itemMessages $ R.whenE ((== MOUSE_MODE_TARGET) <$> inputModeB) messages))
`R.union` (handleYouSeeHereMessages <$> loc <*> level R.<@> youSeeHereMessages messages)
`R.union` (handleThingsThatAreHereMessages <$> loc <*> level R.<@>
thingsThatAreHereMessages (R.whenE ((/= MOUSE_MODE_TARGET) <$> inputModeB) messages) (R.filterE (== MOUSE_MODE_COMMAND) inputModeE))
`R.union` (handleManyItemsHereMessages <$> loc <*> level R.<@> R.filterE ((== "<lightgrey>There are many items here.<lightgrey>") . _msgText) messages)
`R.union` (handleScanBigStackMessages <$> loc <*> level R.<@> (filterBy (T.stripSuffix "? ((y)es/(n)o/(a)ll/(m)enu/*?g,/q)<lightgrey>") . filterBy (T.stripPrefix "<cyan>Pick up ") . fmap _msgText) scanBigStackMessages)
`R.union` (handleMove <$> loc R.<@> moves)
where messages = R.whenE ((\(_, m) -> case m of AutoExplore -> False; _ -> True) <$> lastMove) messages0
scanBigStackMessages = R.whenE ((\(_, m) -> case m of ScanBigStack -> True; _ -> False) <$> lastMove) messages0
handleItemMessages c ll imsgs = H.insert c (tile, items)
where tile = H.lookup c (_levelItemTiles ll)
items = case imsgs of
[] -> Empty
[item] -> SingleItem item
[item, "<lightgrey>There is something else lying underneath.<lightgrey>"] -> UnexploredStack item
-- is it possible that we're throwing away ExploredStack data?
_ -> error $ "strange itemMessages: " ++ show imsgs
handleYouSeeHereMessages l ll item = H.insert l (H.lookup l (_levelItemTiles ll), SingleItem item)
handleThingsThatAreHereMessages l ll items = H.insert l (H.lookup l (_levelItemTiles ll), ExploredStack items)
handleManyItemsHereMessages l ll _ = H.insert l (H.lookup l (_levelItemTiles ll), BigStack)
handleScanBigStackMessages l ll m old = H.insert l (H.lookup l (_levelItemTiles ll), newItems) old
where newItems = case H.lookup l old of
Just (_, BigStack) -> ExploredStack [m]
Just (_, ExploredStack e) -> ExploredStack (e ++ [m])
_ -> error "handleScanBigStackMessages: unexpected previous floorItems contents"
-- could potentially do something cleverer here:
-- accumulate the item list and only update on "Okay, then.",
-- but this should be fine
handleMove l Butcher = H.delete l
handleMove l Pray = H.delete l
handleMove l (PickUp _) = H.delete l
handleMove _ BurnBooks = const H.empty -- trigger rescan; XXX just delete books
handleMove _ GoDown = const H.empty
handleMove _ _ = id
scanFloorItems :: LevelInfo -> Coord -> FloorItems -> Maybe Move
scanFloorItems level l@(Coord lx ly) floorItems = scanBigStackHere `mplus` scanNearby
where scanBigStackHere = -- examine big stack if we're on one
case H.lookup l floorItems of
Just (_, BigStack) -> Just ScanBigStack
_ -> Nothing
scanNearby = -- find items in LOS whose tile data has changed
case [ c | (c, t) <- H.toList (_levelItemTiles level), c `HS.member` _levelLOS level, fmap fst (H.lookup c floorItems) /= Just (Just t) ] of
Coord x y : _ -> Just (ScanItem (x-lx) (y-ly))
_ -> Nothing
itemMessages :: R.Event t Message -> R.Event t [T.Text]
itemMessages messages =
R.filterJust $
fst . R.mapAccum [] . fmap handleMessage $ messages
where handleMessage Message { _msgChannel = MSGCH_PROMPT } _acc = (Nothing, [])
handleMessage Message { _msgChannel = MSGCH_FLOOR_ITEMS, _msgText = t } acc = (Nothing, acc ++ [t])
handleMessage Message { _msgChannel = MSGCH_EXAMINE_FILTER } acc = (Just acc, [])
handleMessage _ acc = (Nothing, acc)
youSeeHereMessages :: R.Event t Message -> R.Event t T.Text
youSeeHereMessages = filterBy (T.stripPrefix "<lightgrey>You see here " . _msgText) . R.filterE ((== MSGCH_FLOOR_ITEMS) . _msgChannel)
thingsThatAreHereMessages :: R.Event t Message -> R.Event t a -> R.Event t [T.Text]
thingsThatAreHereMessages messages commandMode =
R.filterJust . fst . R.mapAccum Nothing $ fmap handleMessage messages `R.union` fmap handleCommandMode commandMode
where handleMessage Message { _msgChannel = MSGCH_FLOOR_ITEMS, _msgText = "<lightgrey>Things that are here:<lightgrey>" } Nothing = (Nothing, Just [])
handleMessage Message { _msgChannel = MSGCH_PLAIN, _msgText = "<lightgrey>There are no objects that can be picked up here.<lightgrey>" } Nothing = (Nothing, Just [])
handleMessage Message { _msgChannel = MSGCH_PLAIN, _msgText = item } acc = (\x -> (x, x)) (fmap (++ [item]) acc)
-- ugh. we have to do it this way because the floorItems must be updated *before* the input_mode = 0 event arrives,
-- or we will choose a move based on the old value.
-- In 0.17 we started getting these here frequently, not sure why,
-- but it seems safe to ignore them
handleMessage Message { _msgChannel = MSGCH_DURATION } _ = (Nothing, Nothing)
handleMessage Message { _msgChannel = MSGCH_FOOD } _ = (Nothing, Nothing)
handleMessage Message { _msgChannel = MSGCH_SOUND } _ = (Nothing, Nothing)
handleMessage Message { _msgChannel = MSGCH_ROTTEN_MEAT } _ = (Nothing, Nothing)
handleMessage Message { _msgChannel = MSGCH_TIMED_PORTAL } _ = (Nothing, Nothing)
handleMessage Message { _msgChannel = MSGCH_MONSTER_WARNING } _ = (Nothing, Nothing)
handleMessage _ Nothing = (Nothing, Nothing)
handleMessage _ (Just _) = error "unexpected message in thingsThatAreHereMessages"
handleCommandMode _ _ = (Nothing, Nothing)
-- The rest should probably get split out into its own module.
wantItem :: Bool -> Inventory -> Item -> Bool
wantItem corpsesOnly inv item
= not corpsesOnly && wantItemPickup inv item || butcherable item
wantItemPickup :: Inventory -> Item -> Bool
wantItemPickup inv item = case itemData item of
ItemGold -> False
ItemFood _ -> True
ItemPotion Nothing -> True
ItemPotion (Just POT_CURING) -> True
ItemPotion (Just POT_HEAL_WOUNDS) -> True
ItemPotion (Just POT_HASTE) -> True
ItemPotion (Just POT_MIGHT) -> True
ItemPotion (Just POT_AGILITY) -> True
ItemPotion (Just POT_RESISTANCE) -> True
ItemPotion (Just POT_EXPERIENCE) -> True
ItemPotion (Just POT_BENEFICIAL_MUTATION) -> True
ItemScroll Nothing -> True
ItemScroll (Just SCR_REMOVE_CURSE) -> True
ItemScroll (Just SCR_ENCHANT_WEAPON) -> True
ItemScroll (Just SCR_ENCHANT_ARMOUR) -> True
ItemScroll (Just SCR_TELEPORTATION) -> True
ItemScroll (Just SCR_MAGIC_MAPPING) -> True
ItemScroll (Just SCR_ACQUIREMENT) -> True
ItemMiscellany (Just MISC_RUNE_OF_ZOT) -> True
_ -> isEquipmentUpgrade inv item
butcherable :: Item -> Bool
butcherable (itemData -> ItemCorpse _ False) = True
butcherable _ = False
isBook :: Item -> Bool
isBook (itemData -> ItemBook {}) = True
isBook _ = False
isRune :: Item -> Bool
isRune (itemData -> ItemMiscellany (Just MISC_RUNE_OF_ZOT)) = True
isRune _ = False
|
rwbarton/rw
|
Crawl/FloorItems.hs
|
bsd-3-clause
| 9,418
| 0
| 18
| 2,083
| 2,753
| 1,445
| 1,308
| 153
| 20
|
-- http://judge.u-aizu.ac.jp/onlinejudge/description.jsp?id=ITP1_5_C
-- Print a Chessboard
-- input:
-- 3 4
-- 5 6
-- 3 3
-- 2 2
-- 1 1
-- 0 0
-- output:
-- #.#.
-- .#.#
-- #.#.
--
-- #.#.#.
-- .#.#.#
-- #.#.#.
-- .#.#.#
-- #.#.#.
--
-- #.#
-- .#.
-- #.#
--
-- #.
-- .#
--
-- #
--
import Control.Applicative
import qualified Control.Monad as Monad
main = loop
loop :: IO ()
loop = do
[h,w] <- map (read :: String -> Int) . words <$> getLine
Monad.when (h /= 0 || w /= 0) $ do
printChessboard h w
loop
printChessboard :: Int -> Int -> IO ()
printChessboard h w = do
Monad.forM_ [1..h] $ \i -> do
putStrLn $ squareLine w i
putStrLn ""
squareLine :: Int -> Int -> String
squareLine w i = if (odd i) then
takeSquare w "#" "."
else
takeSquare w "." "#"
takeSquare :: Int -> String -> String -> String
takeSquare w s1 s2 = take w $ concat $ zipWith (++) (repeat s1) (repeat s2)
|
ku00/aoj-haskell
|
src/ITP1_5_C.hs
|
bsd-3-clause
| 975
| 0
| 12
| 282
| 315
| 173
| 142
| 20
| 2
|
{-# LANGUAGE RecordWildCards #-}
-- | EKG monitoring.
module Pos.Infra.Reporting.Ekg
( withEkgServer
, registerEkgMetrics
, EkgNodeMetrics (..)
, registerEkgNodeMetrics
) where
import Universum
import Node (Node)
import Node.Util.Monitor (registerMetrics)
import qualified System.Metrics as Metrics
import qualified System.Remote.Monitoring.Wai as Monitoring
import Pos.Core.Metrics.Constants (cardanoNamespace)
import Pos.Infra.Statistics (EkgParams (..))
import Pos.Infra.Util.Monitor (stopMonitor)
-- | All you need in order to register EKG metrics on a time-warp node.
data EkgNodeMetrics = EkgNodeMetrics
{ enmStore :: Metrics.Store
}
-- | Register various network-related EKG metrics (relevant to a Node).
registerEkgNodeMetrics
:: EkgNodeMetrics
-> Node
-> IO ()
registerEkgNodeMetrics ekgNodeMetrics nd =
registerMetrics (Just cardanoNamespace) nd (enmStore ekgNodeMetrics)
-- | Register RTS/GC ekg metrics.
registerEkgMetrics
:: Metrics.Store
-> IO ()
registerEkgMetrics ekgStore = Metrics.registerGcMetrics ekgStore
-- | Bracket an EKG web server, so you can look at the metrics in your browser.
withEkgServer
:: EkgParams
-> Metrics.Store
-> IO t
-> IO t
withEkgServer EkgParams {..} ekgStore act = bracket acquire release (const act)
where
acquire = Monitoring.forkServerWith ekgStore ekgHost ekgPort
release = stopMonitor
|
input-output-hk/pos-haskell-prototype
|
infra/src/Pos/Infra/Reporting/Ekg.hs
|
mit
| 1,484
| 0
| 9
| 317
| 281
| 163
| 118
| 34
| 1
|
{- scheduled activities
-
- Copyright 2013-2014 Joey Hess <id@joeyh.name>
-
- License: BSD-2-clause
-}
module Utility.Scheduled (
Schedule(..),
Recurrance(..),
ScheduledTime(..),
NextTime(..),
WeekDay,
MonthDay,
YearDay,
nextTime,
calcNextTime,
startTime,
fromSchedule,
fromScheduledTime,
toScheduledTime,
fromRecurrance,
toRecurrance,
toSchedule,
parseSchedule,
prop_schedule_roundtrips,
prop_past_sane,
) where
import Utility.Data
import Utility.QuickCheck
import Utility.PartialPrelude
import Utility.Misc
import Data.List
import Data.Time.Clock
import Data.Time.LocalTime
import Data.Time.Calendar
import Data.Time.Calendar.WeekDate
import Data.Time.Calendar.OrdinalDate
import Data.Tuple.Utils
import Data.Char
import Control.Applicative
import Prelude
{- Some sort of scheduled event. -}
data Schedule = Schedule Recurrance ScheduledTime
deriving (Eq, Read, Show, Ord)
data Recurrance
= Daily
| Weekly (Maybe WeekDay)
| Monthly (Maybe MonthDay)
| Yearly (Maybe YearDay)
| Divisible Int Recurrance
-- ^ Days, Weeks, or Months of the year evenly divisible by a number.
-- (Divisible Year is years evenly divisible by a number.)
deriving (Eq, Read, Show, Ord)
type WeekDay = Int
type MonthDay = Int
type YearDay = Int
data ScheduledTime
= AnyTime
| SpecificTime Hour Minute
deriving (Eq, Read, Show, Ord)
type Hour = Int
type Minute = Int
-- | Next time a Schedule should take effect. The NextTimeWindow is used
-- when a Schedule is allowed to start at some point within the window.
data NextTime
= NextTimeExactly LocalTime
| NextTimeWindow LocalTime LocalTime
deriving (Eq, Read, Show)
startTime :: NextTime -> LocalTime
startTime (NextTimeExactly t) = t
startTime (NextTimeWindow t _) = t
nextTime :: Schedule -> Maybe LocalTime -> IO (Maybe NextTime)
nextTime schedule lasttime = do
now <- getCurrentTime
tz <- getTimeZone now
return $ calcNextTime schedule lasttime $ utcToLocalTime tz now
-- | Calculate the next time that fits a Schedule, based on the
-- last time it occurred, and the current time.
calcNextTime :: Schedule -> Maybe LocalTime -> LocalTime -> Maybe NextTime
calcNextTime schedule@(Schedule recurrance scheduledtime) lasttime currenttime
| scheduledtime == AnyTime = do
next <- findfromtoday True
return $ case next of
NextTimeWindow _ _ -> next
NextTimeExactly t -> window (localDay t) (localDay t)
| otherwise = NextTimeExactly . startTime <$> findfromtoday False
where
findfromtoday anytime = findfrom recurrance afterday today
where
today = localDay currenttime
afterday = sameaslastrun || toolatetoday
toolatetoday = not anytime && localTimeOfDay currenttime >= nexttime
sameaslastrun = lastrun == Just today
lastrun = localDay <$> lasttime
nexttime = case scheduledtime of
AnyTime -> TimeOfDay 0 0 0
SpecificTime h m -> TimeOfDay h m 0
exactly d = NextTimeExactly $ LocalTime d nexttime
window startd endd = NextTimeWindow
(LocalTime startd nexttime)
(LocalTime endd (TimeOfDay 23 59 0))
findfrom r afterday candidate
| ynum candidate > (ynum (localDay currenttime)) + 100 =
-- avoid possible infinite recusion
error $ "bug: calcNextTime did not find a time within 100 years to run " ++
show (schedule, lasttime, currenttime)
| otherwise = findfromChecked r afterday candidate
findfromChecked r afterday candidate = case r of
Daily
| afterday -> Just $ exactly $ addDays 1 candidate
| otherwise -> Just $ exactly candidate
Weekly Nothing
| afterday -> skip 1
| otherwise -> case (wday <$> lastrun, wday candidate) of
(Nothing, _) -> Just $ window candidate (addDays 6 candidate)
(Just old, curr)
| old == curr -> Just $ window candidate (addDays 6 candidate)
| otherwise -> skip 1
Monthly Nothing
| afterday -> skip 1
| maybe True (candidate `oneMonthPast`) lastrun ->
Just $ window candidate (endOfMonth candidate)
| otherwise -> skip 1
Yearly Nothing
| afterday -> skip 1
| maybe True (candidate `oneYearPast`) lastrun ->
Just $ window candidate (endOfYear candidate)
| otherwise -> skip 1
Weekly (Just w)
| w < 0 || w > maxwday -> Nothing
| w == wday candidate -> if afterday
then Just $ exactly $ addDays 7 candidate
else Just $ exactly candidate
| otherwise -> Just $ exactly $
addDays (fromIntegral $ (w - wday candidate) `mod` 7) candidate
Monthly (Just m)
| m < 0 || m > maxmday -> Nothing
-- TODO can be done more efficiently than recursing
| m == mday candidate -> if afterday
then skip 1
else Just $ exactly candidate
| otherwise -> skip 1
Yearly (Just y)
| y < 0 || y > maxyday -> Nothing
| y == yday candidate -> if afterday
then skip 365
else Just $ exactly candidate
| otherwise -> skip 1
Divisible n r'@Daily -> handlediv n r' yday (Just maxyday)
Divisible n r'@(Weekly _) -> handlediv n r' wnum (Just maxwnum)
Divisible n r'@(Monthly _) -> handlediv n r' mnum (Just maxmnum)
Divisible n r'@(Yearly _) -> handlediv n r' ynum Nothing
Divisible _ r'@(Divisible _ _) -> findfrom r' afterday candidate
where
skip n = findfrom r False (addDays n candidate)
handlediv n r' getval mmax
| n > 0 && maybe True (n <=) mmax =
findfromwhere r' (divisible n . getval) afterday candidate
| otherwise = Nothing
findfromwhere r p afterday candidate
| maybe True (p . getday) next = next
| otherwise = maybe Nothing (findfromwhere r p True . getday) next
where
next = findfrom r afterday candidate
getday = localDay . startTime
divisible n v = v `rem` n == 0
-- Check if the new Day occurs one month or more past the old Day.
oneMonthPast :: Day -> Day -> Bool
new `oneMonthPast` old = fromGregorian y (m+1) d <= new
where
(y,m,d) = toGregorian old
-- Check if the new Day occurs one year or more past the old Day.
oneYearPast :: Day -> Day -> Bool
new `oneYearPast` old = fromGregorian (y+1) m d <= new
where
(y,m,d) = toGregorian old
endOfMonth :: Day -> Day
endOfMonth day =
let (y,m,_d) = toGregorian day
in fromGregorian y m (gregorianMonthLength y m)
endOfYear :: Day -> Day
endOfYear day =
let (y,_m,_d) = toGregorian day
in endOfMonth (fromGregorian y maxmnum 1)
-- extracting various quantities from a Day
wday :: Day -> Int
wday = thd3 . toWeekDate
wnum :: Day -> Int
wnum = snd3 . toWeekDate
mday :: Day -> Int
mday = thd3 . toGregorian
mnum :: Day -> Int
mnum = snd3 . toGregorian
yday :: Day -> Int
yday = snd . toOrdinalDate
ynum :: Day -> Int
ynum = fromIntegral . fst . toOrdinalDate
-- Calendar max values.
maxyday :: Int
maxyday = 366 -- with leap days
maxwnum :: Int
maxwnum = 53 -- some years have more than 52
maxmday :: Int
maxmday = 31
maxmnum :: Int
maxmnum = 12
maxwday :: Int
maxwday = 7
fromRecurrance :: Recurrance -> String
fromRecurrance (Divisible n r) =
fromRecurrance' (++ "s divisible by " ++ show n) r
fromRecurrance r = fromRecurrance' ("every " ++) r
fromRecurrance' :: (String -> String) -> Recurrance -> String
fromRecurrance' a Daily = a "day"
fromRecurrance' a (Weekly n) = onday n (a "week")
fromRecurrance' a (Monthly n) = onday n (a "month")
fromRecurrance' a (Yearly n) = onday n (a "year")
fromRecurrance' a (Divisible _n r) = fromRecurrance' a r -- not used
onday :: Maybe Int -> String -> String
onday (Just n) s = "on day " ++ show n ++ " of " ++ s
onday Nothing s = s
toRecurrance :: String -> Maybe Recurrance
toRecurrance s = case words s of
("every":"day":[]) -> Just Daily
("on":"day":sd:"of":"every":something:[]) -> withday sd something
("every":something:[]) -> noday something
("days":"divisible":"by":sn:[]) ->
Divisible <$> getdivisor sn <*> pure Daily
("on":"day":sd:"of":something:"divisible":"by":sn:[]) ->
Divisible
<$> getdivisor sn
<*> withday sd something
("every":something:"divisible":"by":sn:[]) ->
Divisible
<$> getdivisor sn
<*> noday something
(something:"divisible":"by":sn:[]) ->
Divisible
<$> getdivisor sn
<*> noday something
_ -> Nothing
where
constructor "week" = Just Weekly
constructor "month" = Just Monthly
constructor "year" = Just Yearly
constructor u
| "s" `isSuffixOf` u = constructor $ reverse $ drop 1 $ reverse u
| otherwise = Nothing
withday sd u = do
c <- constructor u
d <- readish sd
Just $ c (Just d)
noday u = do
c <- constructor u
Just $ c Nothing
getdivisor sn = do
n <- readish sn
if n > 0
then Just n
else Nothing
fromScheduledTime :: ScheduledTime -> String
fromScheduledTime AnyTime = "any time"
fromScheduledTime (SpecificTime h m) =
show h' ++ (if m > 0 then ":" ++ pad 2 (show m) else "") ++ " " ++ ampm
where
pad n s = take (n - length s) (repeat '0') ++ s
(h', ampm)
| h == 0 = (12, "AM")
| h < 12 = (h, "AM")
| h == 12 = (h, "PM")
| otherwise = (h - 12, "PM")
toScheduledTime :: String -> Maybe ScheduledTime
toScheduledTime "any time" = Just AnyTime
toScheduledTime v = case words v of
(s:ampm:[])
| map toUpper ampm == "AM" ->
go s h0
| map toUpper ampm == "PM" ->
go s (\h -> (h0 h) + 12)
| otherwise -> Nothing
(s:[]) -> go s id
_ -> Nothing
where
h0 h
| h == 12 = 0
| otherwise = h
go :: String -> (Int -> Int) -> Maybe ScheduledTime
go s adjust =
let (h, m) = separate (== ':') s
in SpecificTime
<$> (adjust <$> readish h)
<*> if null m then Just 0 else readish m
fromSchedule :: Schedule -> String
fromSchedule (Schedule recurrance scheduledtime) = unwords
[ fromRecurrance recurrance
, "at"
, fromScheduledTime scheduledtime
]
toSchedule :: String -> Maybe Schedule
toSchedule = eitherToMaybe . parseSchedule
parseSchedule :: String -> Either String Schedule
parseSchedule s = do
r <- maybe (Left $ "bad recurrance: " ++ recurrance) Right
(toRecurrance recurrance)
t <- maybe (Left $ "bad time of day: " ++ scheduledtime) Right
(toScheduledTime scheduledtime)
Right $ Schedule r t
where
(rws, tws) = separate (== "at") (words s)
recurrance = unwords rws
scheduledtime = unwords tws
instance Arbitrary Schedule where
arbitrary = Schedule <$> arbitrary <*> arbitrary
instance Arbitrary ScheduledTime where
arbitrary = oneof
[ pure AnyTime
, SpecificTime
<$> choose (0, 23)
<*> choose (1, 59)
]
instance Arbitrary Recurrance where
arbitrary = oneof
[ pure Daily
, Weekly <$> arbday
, Monthly <$> arbday
, Yearly <$> arbday
, Divisible
<$> positive arbitrary
<*> oneof -- no nested Divisibles
[ pure Daily
, Weekly <$> arbday
, Monthly <$> arbday
, Yearly <$> arbday
]
]
where
arbday = oneof
[ Just <$> nonNegative arbitrary
, pure Nothing
]
prop_schedule_roundtrips :: Schedule -> Bool
prop_schedule_roundtrips s = toSchedule (fromSchedule s) == Just s
prop_past_sane :: Bool
prop_past_sane = and
[ all (checksout oneMonthPast) (mplus1 ++ yplus1)
, all (not . (checksout oneMonthPast)) (map swap (mplus1 ++ yplus1))
, all (checksout oneYearPast) yplus1
, all (not . (checksout oneYearPast)) (map swap yplus1)
]
where
mplus1 = -- new date old date, 1+ months before it
[ (fromGregorian 2014 01 15, fromGregorian 2013 12 15)
, (fromGregorian 2014 01 15, fromGregorian 2013 02 15)
, (fromGregorian 2014 02 15, fromGregorian 2013 01 15)
, (fromGregorian 2014 03 01, fromGregorian 2013 01 15)
, (fromGregorian 2014 03 01, fromGregorian 2013 12 15)
, (fromGregorian 2015 01 01, fromGregorian 2010 01 01)
]
yplus1 = -- new date old date, 1+ years before it
[ (fromGregorian 2014 01 15, fromGregorian 2012 01 16)
, (fromGregorian 2014 01 15, fromGregorian 2013 01 14)
, (fromGregorian 2022 12 31, fromGregorian 2000 01 01)
]
checksout cmp (new, old) = new `cmp` old
swap (a,b) = (b,a)
|
sjfloat/propellor
|
src/Utility/Scheduled.hs
|
bsd-2-clause
| 11,697
| 142
| 18
| 2,453
| 4,382
| 2,220
| 2,162
| 325
| 18
|
{-# OPTIONS_GHC -Wall #-}
module Type.Unify (unify) where
import Control.Monad (zipWithM_)
import Control.Monad.Except (ExceptT, lift, liftIO, throwError, runExceptT)
import qualified Data.Map as Map
import qualified Data.UnionFind.IO as UF
import qualified AST.Variable as Var
import qualified Reporting.Region as R
import qualified Reporting.Error.Type as Error
import qualified Type.State as TS
import Type.Type as Type
-- KICK OFF UNIFICATION
unify :: Error.Hint -> R.Region -> Variable -> Variable -> TS.Solver ()
unify hint region expected actual =
do result <- runExceptT (guardedUnify ExpectedActual expected actual)
case result of
Right state ->
return state
Left (Mismatch _subExpected _subActual maybeReason) ->
let
mkError =
do expectedSrcType <- Type.toSrcType expected
actualSrcType <- Type.toSrcType actual
mergeHelp expected actual Error
let info = Error.MismatchInfo hint expectedSrcType actualSrcType maybeReason
return (Error.Mismatch info)
in
TS.addError region =<< liftIO mkError
-- UNIFICATION HELPERS
type Unify =
ExceptT Mismatch TS.Solver
data Context = Context
{ _orientation :: Orientation
, _first :: Variable
, _firstDesc :: Descriptor
, _second :: Variable
, _secondDesc :: Descriptor
}
data Orientation = ExpectedActual | ActualExpected
reorient :: Context -> Context
reorient (Context orientation var1 desc1 var2 desc2) =
let
otherOrientation =
case orientation of
ExpectedActual -> ActualExpected
ActualExpected -> ExpectedActual
in
Context otherOrientation var2 desc2 var1 desc1
-- ERROR MESSAGES
data Mismatch
= Mismatch Variable Variable (Maybe Error.Reason)
mismatch :: Context -> Maybe Error.Reason -> Unify a
mismatch (Context orientation first _ second _) maybeReason =
let
(expected, actual, orientedReason) =
case orientation of
ExpectedActual ->
(first, second, maybeReason)
ActualExpected ->
(second, first, Error.flipReason <$> maybeReason)
in
throwError (Mismatch expected actual orientedReason)
badRigid :: Maybe String -> Error.Reason
badRigid maybeName =
Error.BadVar (Just (Error.Rigid maybeName)) Nothing
badSuper :: Super -> Error.Reason
badSuper super =
Error.BadVar (Just (errorSuper super)) Nothing
doubleBad :: Error.VarType -> Error.VarType -> Error.Reason
doubleBad vt1 vt2 =
Error.BadVar (Just vt1) (Just vt2)
errorSuper :: Super -> Error.VarType
errorSuper super =
case super of
Number ->
Error.Number
Comparable ->
Error.Comparable
Appendable ->
Error.Appendable
CompAppend ->
Error.CompAppend
-- MERGE
merge :: Context -> Content -> Unify ()
merge (Context _ first _ second _) content =
liftIO $ mergeHelp first second content
mergeHelp :: Variable -> Variable -> Content -> IO ()
mergeHelp first second content =
UF.union' first second $ \desc1 desc2 ->
return $
Descriptor
{ _content = content
, _rank = min (_rank desc1) (_rank desc2)
, _mark = noMark
, _copy = Nothing
}
fresh :: Context -> Content -> Unify Variable
fresh (Context _ _ desc1 _ desc2) content =
do freshVariable <-
liftIO $ UF.fresh $
Descriptor
{ _content = content
, _rank = min (_rank desc1) (_rank desc2)
, _mark = noMark
, _copy = Nothing
}
lift (TS.register freshVariable)
-- ACTUALLY UNIFY THINGS
guardedUnify :: Orientation -> Variable -> Variable -> Unify ()
guardedUnify orientation left right =
do equivalent <- liftIO $ UF.equivalent left right
if equivalent
then return ()
else
do leftDesc <- liftIO $ UF.descriptor left
rightDesc <- liftIO $ UF.descriptor right
actuallyUnify (Context orientation left leftDesc right rightDesc)
subUnify :: Context -> Variable -> Variable -> Unify ()
subUnify context var1 var2 =
guardedUnify (_orientation context) var1 var2
actuallyUnify :: Context -> Unify ()
actuallyUnify context@(Context _ _ firstDesc _ secondDesc) =
let
secondContent = _content secondDesc
in
case _content firstDesc of
Error ->
-- If there was an error, just pretend it is okay. This lets us avoid
-- "cascading" errors where one problem manifests as multiple message.
return ()
Var Flex Nothing _ ->
unifyFlex context secondContent
Var Flex (Just super) _ ->
unifySuper context super secondContent
Var Rigid maybeSuper maybeName ->
unifyRigid context maybeSuper maybeName secondContent
Atom name ->
unifyAtom context name secondContent
Alias name args realVar ->
unifyAlias context name args realVar secondContent
Structure term ->
unifyStructure context term secondContent
-- UNIFY FLEXIBLE VARIABLES
unifyFlex :: Context -> Content -> Unify ()
unifyFlex context otherContent =
case otherContent of
Error ->
return ()
Var Flex _ _ ->
merge context otherContent
Var Rigid _ _ ->
merge context otherContent
Atom _ ->
merge context otherContent
Alias _ _ _ ->
merge context otherContent
Structure _ ->
merge context otherContent
-- UNIFY RIGID VARIABLES
unifyRigid :: Context -> Maybe Super -> Maybe String -> Content -> Unify ()
unifyRigid context maybeSuper maybeName otherContent =
case otherContent of
Error ->
return ()
Var Flex otherMaybeSuper _ ->
case (maybeSuper, otherMaybeSuper) of
(_, Nothing) ->
merge context (Var Rigid maybeSuper maybeName)
(Nothing, Just _) ->
mismatch context (Just (badRigid maybeName))
(Just super, Just otherSuper) ->
case combineSupers super otherSuper of
Right newSuper | newSuper == otherSuper ->
merge context otherContent
_ ->
mismatch context (Just (badRigid maybeName))
Var Rigid _ otherMaybeName ->
mismatch context $ Just $
doubleBad (Error.Rigid maybeName) (Error.Rigid otherMaybeName)
Atom _ ->
mismatch context (Just (badRigid maybeName))
Alias _ _ _ ->
mismatch context (Just (badRigid maybeName))
Structure _ ->
mismatch context (Just (badRigid maybeName))
-- UNIFY SUPER VARIABLES
unifySuper :: Context -> Super -> Content -> Unify ()
unifySuper context super otherContent =
case otherContent of
Structure term ->
unifySuperStructure context super term
Atom name ->
if atomMatchesSuper super name then
merge context otherContent
else
mismatch context (Just (badSuper super))
Var Rigid Nothing maybeName ->
mismatch context (Just (doubleBad (errorSuper super) (Error.Rigid maybeName)))
Var Rigid (Just otherSuper) maybeName ->
case combineSupers super otherSuper of
Right newSuper | newSuper == super ->
merge context otherContent
_ ->
mismatch context $ Just $
doubleBad (errorSuper super) (Error.Rigid maybeName)
Var Flex Nothing _ ->
merge context (Var Flex (Just super) Nothing)
Var Flex (Just otherSuper) _ ->
case combineSupers super otherSuper of
Left reason ->
mismatch context (Just reason)
Right newSuper ->
merge context (Var Flex (Just newSuper) Nothing)
Alias _ _ realVar ->
subUnify context (_first context) realVar
Error ->
return ()
combineSupers :: Super -> Super -> Either Error.Reason Super
combineSupers firstSuper secondSuper =
case (firstSuper, secondSuper) of
(Number , Number ) -> Right Number
(Comparable, Number ) -> Right Number
(Number , Comparable) -> Right Number
(Comparable, Comparable) -> Right Comparable
(Appendable, Appendable) -> Right Appendable
(Appendable, Comparable) -> Right CompAppend
(Comparable, Appendable) -> Right CompAppend
(CompAppend, CompAppend) -> Right CompAppend
(CompAppend, Comparable) -> Right CompAppend
(Comparable, CompAppend) -> Right CompAppend
(CompAppend, Appendable) -> Right CompAppend
(Appendable, CompAppend) -> Right CompAppend
(_ , _ ) ->
Left $ doubleBad (errorSuper firstSuper) (errorSuper secondSuper)
isPrimitiveFrom :: [String] -> Var.Canonical -> Bool
isPrimitiveFrom prims var =
any (\p -> Var.isPrim p var) prims
atomMatchesSuper :: Super -> Var.Canonical -> Bool
atomMatchesSuper super name =
case super of
Number ->
isPrimitiveFrom ["Int", "Float"] name
Comparable ->
isPrimitiveFrom ["Int", "Float", "Char", "String"] name
Appendable ->
Var.isPrim "String" name
CompAppend ->
Var.isPrim "String" name
unifySuperStructure :: Context -> Super -> Term1 Variable -> Unify ()
unifySuperStructure context super term =
do appStructure <- liftIO (collectApps (Structure term))
case appStructure of
Other ->
mismatch context (Just (badSuper super))
List variable ->
case super of
Number ->
mismatch context (Just (badSuper super))
Appendable ->
merge context (Structure term)
Comparable ->
do merge context (Structure term)
unifyComparableRecursive (_orientation context) variable
CompAppend ->
do merge context (Structure term)
unifyComparableRecursive (_orientation context) variable
Tuple entries ->
case super of
Number ->
mismatch context (Just (badSuper super))
Appendable ->
mismatch context (Just (badSuper super))
Comparable ->
if length entries > 6 then
mismatch context (Just (Error.TooLongComparableTuple (length entries)))
else
do merge context (Structure term)
mapM_ (unifyComparableRecursive (_orientation context)) entries
CompAppend ->
mismatch context (Just (badSuper super))
unifyComparableRecursive :: Orientation -> Variable -> Unify ()
unifyComparableRecursive orientation var =
do compVar <-
liftIO $
do desc <- UF.descriptor var
UF.fresh $
Descriptor
{ _content = Var Flex (Just Comparable) Nothing
, _rank = _rank desc
, _mark = noMark
, _copy = Nothing
}
guardedUnify orientation compVar var
data AppStructure
= List Variable
| Tuple [Variable]
| Other
collectApps :: Content -> IO AppStructure
collectApps content =
collectAppsHelp [] content
collectAppsHelp :: [Variable] -> Content -> IO AppStructure
collectAppsHelp args content =
case (content, args) of
(Structure (App1 func arg), _) ->
collectAppsHelp (args ++ [arg]) =<< getContent func
(Atom name, [arg]) | Var.isList name ->
return (List arg)
(Atom name, _) | Var.isTuple name ->
return (Tuple args)
_ ->
return Other
getContent :: Variable -> IO Content
getContent variable =
_content <$> UF.descriptor variable
-- UNIFY ATOMS
unifyAtom :: Context -> Var.Canonical -> Content -> Unify ()
unifyAtom context name otherContent =
case otherContent of
Error ->
return ()
Var Flex Nothing _ ->
merge context (Atom name)
Var Flex (Just super) _ ->
if atomMatchesSuper super name then
merge context (Atom name)
else
mismatch context (Just ((Error.flipReason (badSuper super))))
Var Rigid _ maybeName ->
mismatch context (Just (Error.flipReason (badRigid maybeName)))
Atom otherName ->
if name == otherName then
merge context otherContent
else
mismatch context $
if isIntFloat name otherName || isIntFloat otherName name then
Just Error.IntFloat
else
Nothing
Alias _ _ realVar ->
subUnify context (_first context) realVar
Structure _ ->
mismatch context Nothing
isIntFloat :: Var.Canonical -> Var.Canonical -> Bool
isIntFloat name otherName =
Var.isPrim "Int" name && Var.isPrim "Float" otherName
-- UNIFY ALIASES
unifyAlias :: Context -> Var.Canonical -> [(String, Variable)] -> Variable -> Content -> Unify ()
unifyAlias context name args realVar otherContent =
case otherContent of
Error ->
return ()
Var Flex Nothing _ ->
merge context (Alias name args realVar)
Var _ _ _ ->
subUnify context realVar (_second context)
Atom _ ->
subUnify context realVar (_second context)
Alias otherName otherArgs otherRealVar ->
if name == otherName then
do zipWithM_ (subUnify context) (map snd args) (map snd otherArgs)
merge context otherContent
else
subUnify context realVar otherRealVar
Structure _ ->
subUnify context realVar (_second context)
-- UNIFY STRUCTURES
unifyStructure :: Context -> Term1 Variable -> Content -> Unify ()
unifyStructure context term otherContent =
case otherContent of
Error ->
return ()
Var Flex Nothing _ ->
merge context (Structure term)
Var Flex (Just super) _ ->
unifySuper (reorient context) super (Structure term)
Var Rigid _ maybeName ->
mismatch context (Just (Error.flipReason (badRigid maybeName)))
Atom _ ->
mismatch context Nothing
Alias _ _ realVar ->
subUnify context (_first context) realVar
Structure otherTerm ->
case (term, otherTerm) of
(App1 func arg, App1 otherFunc otherArg) ->
do subUnify context func otherFunc
subUnify context arg otherArg
merge context otherContent
(Fun1 arg result, Fun1 otherArg otherResult) ->
do subUnify context arg otherArg
subUnify context result otherResult
merge context otherContent
(EmptyRecord1, EmptyRecord1) ->
merge context otherContent
(Record1 fields ext, EmptyRecord1) | Map.null fields ->
subUnify context ext (_second context)
(EmptyRecord1, Record1 fields ext) | Map.null fields ->
subUnify context (_first context) ext
(Record1 fields extension, Record1 otherFields otherExtension) ->
do firstStructure <- gatherFields context fields extension
secondStructure <- gatherFields context otherFields otherExtension
unifyRecord context firstStructure secondStructure
_ ->
mismatch context Nothing
-- UNIFY RECORDS
unifyRecord :: Context -> RecordStructure -> RecordStructure -> Unify ()
unifyRecord context firstStructure secondStructure =
do let (RecordStructure expFields expVar expStruct) = firstStructure
let (RecordStructure actFields actVar actStruct) = secondStructure
-- call after unifying extension, make sure record shape matches before
-- looking into whether the particular field types match.
let unifySharedFields otherFields ext =
do let sharedFields = Map.intersectionWith (,) expFields actFields
_ <- traverse (uncurry (subUnify context)) sharedFields
let allFields = Map.union (Map.map fst sharedFields) otherFields
merge context (Structure (Record1 allFields ext))
let uniqueExpFields = Map.difference expFields actFields
let uniqueActFields = Map.difference actFields expFields
case (expStruct, Map.null uniqueExpFields, actStruct, Map.null uniqueActFields) of
(_, True, _, True) ->
do subUnify context expVar actVar
unifySharedFields Map.empty expVar
(Empty, _, _, False) ->
mismatch context (Just (Error.MessyFields (Map.keys uniqueExpFields) (Map.keys uniqueActFields)))
(_, False, Empty, _) ->
mismatch context (Just (Error.MessyFields (Map.keys uniqueExpFields) (Map.keys uniqueActFields)))
(_, False, _, True) ->
do subRecord <- fresh context (Structure (Record1 uniqueExpFields expVar))
subUnify context subRecord actVar
unifySharedFields Map.empty subRecord
(_, True, _, False) ->
do subRecord <- fresh context (Structure (Record1 uniqueActFields actVar))
subUnify context expVar subRecord
unifySharedFields Map.empty subRecord
(Extension, False, Extension, False) ->
do let subFields = Map.union uniqueExpFields uniqueActFields
subExt <- fresh context (Var Flex Nothing Nothing)
expRecord <- fresh context (Structure (Record1 uniqueActFields subExt))
actRecord <- fresh context (Structure (Record1 uniqueExpFields subExt))
subUnify context expVar expRecord
subUnify context actRecord actVar
unifySharedFields subFields subExt
-- GATHER RECORD STRUCTURE
data RecordStructure = RecordStructure
{ _fields :: Map.Map String Variable
, _extVar :: Variable
, _extStruct :: ExtensionStructure
}
data ExtensionStructure
= Empty
| Extension
gatherFields :: Context -> Map.Map String Variable -> Variable -> Unify RecordStructure
gatherFields context fields variable =
do desc <- liftIO (UF.descriptor variable)
case _content desc of
Structure (Record1 subFields subExt) ->
gatherFields context (Map.union fields subFields) subExt
Structure EmptyRecord1 ->
return (RecordStructure fields variable Empty)
Alias _ _ var ->
-- TODO may be dropping useful alias info here
gatherFields context fields var
_ ->
return (RecordStructure fields variable Extension)
|
mgold/Elm
|
src/Type/Unify.hs
|
bsd-3-clause
| 18,653
| 0
| 20
| 5,648
| 5,278
| 2,591
| 2,687
| 429
| 13
|
{-# LANGUAGE NoImplicitPrelude, MagicHash, ScopedTypeVariables, KindSignatures,
UnboxedTuples, FlexibleContexts, UnliftedFFITypes, TypeOperators,
AllowAmbiguousTypes, DataKinds, TypeFamilies #-}
-----------------------------------------------------------------------------
-- |
-- Module : Java.Utils
-- Copyright : (c) Rahul Muttineni 2016
--
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : rahulmutt@gmail.com
-- Stability : provisional
-- Portability : portable
--
-- The utility functions for the Java FFI.
--
-----------------------------------------------------------------------------
module Java.Utils
( JClass(..)
, getClass
, toString
, equals
, instanceOf
, classObject
, hashCode
, Proxy(..)
, eqObject#
, toString#
, safeDowncast
, Void
, Comparator(..)
, Comparable(..)
, Enum(..) )
where
import GHC.Base
import Data.Proxy
import Java.StringBase (fromJString, toJString)
data {-# CLASS "java.lang.Class" #-} JClass a = JClass (Object# (JClass a))
deriving Class
getClass :: forall a. Class a => Proxy a -> JClass a
getClass _ = forName (classIdentifier (proxy# :: Proxy# a))
foreign import java unsafe "@static java.lang.Class.forName" forName :: String -> JClass a
foreign import java unsafe "getClass" classObject :: (a <: Object) => a -> JClass a
foreign import java unsafe toString :: (a <: Object) => a -> JString
foreign import java unsafe hashCode :: (a <: Object) => a -> Int
foreign import java unsafe equals :: (a <: Object, b <: Object)
=> a -> b -> Bool
foreign import java unsafe "equals" eqObject# :: Object# a -> Object# b -> Bool
foreign import java unsafe "toString" toString# :: Object# a -> String
foreign import java unsafe "@static eta.base.Utils.convertInstanceOfObject"
castObject :: (t <: Object, o <: Object) => o -> JClass t -> Maybe t
foreign import java unsafe "@static eta.base.Utils.instanceOf"
instanceOf :: (o <: Object) => o -> JClass t -> Bool
{-# INLINE safeDowncast #-}
safeDowncast :: forall a b. (Class a, Class b) => a -> Maybe b
safeDowncast x = castObject x (getClass (Proxy :: Proxy b))
-- Start java.lang.Void
data {-# CLASS "java.lang.Void" #-} Void = Void (Object# Void)
deriving Class
-- End java.lang.Void
-- Start java.util.Comparator
data {-# CLASS "java.util.Comparator" #-} Comparator t = Comparator (Object# (Comparator t))
deriving Class
foreign import java unsafe "@interface compare"
compare :: (t <: Object, b <: (Comparator t)) => t -> t -> Java b Int
foreign import java unsafe "@interface equals"
equalsComparator :: (t <: Object, b <: (Comparator t)) => Object -> Java b Bool
-- End java.util.Comparator
-- Start java.lang.Enum
data {-# CLASS "java.lang.Enum" #-} Enum e = Enum (Object# (Enum e))
deriving Class
type instance Inherits (Enum e) = '[Object, Comparable e]
foreign import java unsafe getDeclaringClass :: (e <: Enum e) => Java e (JClass e)
foreign import java unsafe name :: (e <: Enum e) => Java e String
foreign import java unsafe ordinal :: (e <: Enum e) => Java e Int
-- End java.lang.Enum
|
rahulmutt/ghcvm
|
libraries/base/Java/Utils.hs
|
bsd-3-clause
| 3,171
| 27
| 10
| 614
| 836
| 464
| 372
| -1
| -1
|
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1998
\section[DataCon]{@DataCon@: Data Constructors}
-}
{-# LANGUAGE CPP, DeriveDataTypeable #-}
module DataCon (
-- * Main data types
DataCon, DataConRep(..),
SrcStrictness(..), SrcUnpackedness(..),
HsSrcBang(..), HsImplBang(..),
StrictnessMark(..),
ConTag,
-- ** Equality specs
EqSpec, mkEqSpec, eqSpecTyVar, eqSpecType,
eqSpecPair, eqSpecPreds,
substEqSpec,
-- ** Field labels
FieldLbl(..), FieldLabel, FieldLabelString,
-- ** Type construction
mkDataCon, buildAlgTyCon, fIRST_TAG,
-- ** Type deconstruction
dataConRepType, dataConSig, dataConInstSig, dataConFullSig,
dataConName, dataConIdentity, dataConTag, dataConTyCon,
dataConOrigTyCon, dataConUserType,
dataConUnivTyVars, dataConExTyVars, dataConAllTyVars,
dataConEqSpec, dataConTheta,
dataConStupidTheta,
dataConInstArgTys, dataConOrigArgTys, dataConOrigResTy,
dataConInstOrigArgTys, dataConRepArgTys,
dataConFieldLabels, dataConFieldType,
dataConSrcBangs,
dataConSourceArity, dataConRepArity, dataConRepRepArity,
dataConIsInfix,
dataConWorkId, dataConWrapId, dataConWrapId_maybe,
dataConImplicitTyThings,
dataConRepStrictness, dataConImplBangs, dataConBoxer,
splitDataProductType_maybe,
-- ** Predicates on DataCons
isNullarySrcDataCon, isNullaryRepDataCon, isTupleDataCon, isUnboxedTupleCon,
isVanillaDataCon, classDataCon, dataConCannotMatch,
isBanged, isMarkedStrict, eqHsBang, isSrcStrict, isSrcUnpacked,
specialPromotedDc, isLegacyPromotableDataCon, isLegacyPromotableTyCon,
-- ** Promotion related functions
promoteDataCon
) where
#include "HsVersions.h"
import {-# SOURCE #-} MkId( DataConBoxer )
import Type
import ForeignCall ( CType )
import Coercion
import Unify
import TyCon
import FieldLabel
import Class
import Name
import PrelNames
import NameEnv
import Var
import Outputable
import ListSetOps
import Util
import BasicTypes
import FastString
import Module
import Binary
import qualified Data.Data as Data
import qualified Data.Typeable
import Data.Char
import Data.Word
import Data.List( mapAccumL, find )
{-
Data constructor representation
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider the following Haskell data type declaration
data T = T !Int ![Int]
Using the strictness annotations, GHC will represent this as
data T = T Int# [Int]
That is, the Int has been unboxed. Furthermore, the Haskell source construction
T e1 e2
is translated to
case e1 of { I# x ->
case e2 of { r ->
T x r }}
That is, the first argument is unboxed, and the second is evaluated. Finally,
pattern matching is translated too:
case e of { T a b -> ... }
becomes
case e of { T a' b -> let a = I# a' in ... }
To keep ourselves sane, we name the different versions of the data constructor
differently, as follows.
Note [Data Constructor Naming]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Each data constructor C has two, and possibly up to four, Names associated with it:
OccName Name space Name of Notes
---------------------------------------------------------------------------
The "data con itself" C DataName DataCon In dom( GlobalRdrEnv )
The "worker data con" C VarName Id The worker
The "wrapper data con" $WC VarName Id The wrapper
The "newtype coercion" :CoT TcClsName TyCon
EVERY data constructor (incl for newtypes) has the former two (the
data con itself, and its worker. But only some data constructors have a
wrapper (see Note [The need for a wrapper]).
Each of these three has a distinct Unique. The "data con itself" name
appears in the output of the renamer, and names the Haskell-source
data constructor. The type checker translates it into either the wrapper Id
(if it exists) or worker Id (otherwise).
The data con has one or two Ids associated with it:
The "worker Id", is the actual data constructor.
* Every data constructor (newtype or data type) has a worker
* The worker is very like a primop, in that it has no binding.
* For a *data* type, the worker *is* the data constructor;
it has no unfolding
* For a *newtype*, the worker has a compulsory unfolding which
does a cast, e.g.
newtype T = MkT Int
The worker for MkT has unfolding
\\(x:Int). x `cast` sym CoT
Here CoT is the type constructor, witnessing the FC axiom
axiom CoT : T = Int
The "wrapper Id", \$WC, goes as follows
* Its type is exactly what it looks like in the source program.
* It is an ordinary function, and it gets a top-level binding
like any other function.
* The wrapper Id isn't generated for a data type if there is
nothing for the wrapper to do. That is, if its defn would be
\$wC = C
Note [The need for a wrapper]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Why might the wrapper have anything to do? Two reasons:
* Unboxing strict fields (with -funbox-strict-fields)
data T = MkT !(Int,Int)
\$wMkT :: (Int,Int) -> T
\$wMkT (x,y) = MkT x y
Notice that the worker has two fields where the wapper has
just one. That is, the worker has type
MkT :: Int -> Int -> T
* Equality constraints for GADTs
data T a where { MkT :: a -> T [a] }
The worker gets a type with explicit equality
constraints, thus:
MkT :: forall a b. (a=[b]) => b -> T a
The wrapper has the programmer-specified type:
\$wMkT :: a -> T [a]
\$wMkT a x = MkT [a] a [a] x
The third argument is a coercion
[a] :: [a]~[a]
INVARIANT: the dictionary constructor for a class
never has a wrapper.
A note about the stupid context
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Data types can have a context:
data (Eq a, Ord b) => T a b = T1 a b | T2 a
and that makes the constructors have a context too
(notice that T2's context is "thinned"):
T1 :: (Eq a, Ord b) => a -> b -> T a b
T2 :: (Eq a) => a -> T a b
Furthermore, this context pops up when pattern matching
(though GHC hasn't implemented this, but it is in H98, and
I've fixed GHC so that it now does):
f (T2 x) = x
gets inferred type
f :: Eq a => T a b -> a
I say the context is "stupid" because the dictionaries passed
are immediately discarded -- they do nothing and have no benefit.
It's a flaw in the language.
Up to now [March 2002] I have put this stupid context into the
type of the "wrapper" constructors functions, T1 and T2, but
that turned out to be jolly inconvenient for generics, and
record update, and other functions that build values of type T
(because they don't have suitable dictionaries available).
So now I've taken the stupid context out. I simply deal with
it separately in the type checker on occurrences of a
constructor, either in an expression or in a pattern.
[May 2003: actually I think this decision could evasily be
reversed now, and probably should be. Generics could be
disabled for types with a stupid context; record updates now
(H98) needs the context too; etc. It's an unforced change, so
I'm leaving it for now --- but it does seem odd that the
wrapper doesn't include the stupid context.]
[July 04] With the advent of generalised data types, it's less obvious
what the "stupid context" is. Consider
C :: forall a. Ord a => a -> a -> T (Foo a)
Does the C constructor in Core contain the Ord dictionary? Yes, it must:
f :: T b -> Ordering
f = /\b. \x:T b.
case x of
C a (d:Ord a) (p:a) (q:a) -> compare d p q
Note that (Foo a) might not be an instance of Ord.
************************************************************************
* *
\subsection{Data constructors}
* *
************************************************************************
-}
-- | A data constructor
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClose','ApiAnnotation.AnnComma'
-- For details on above see note [Api annotations] in ApiAnnotation
data DataCon
= MkData {
dcName :: Name, -- This is the name of the *source data con*
-- (see "Note [Data Constructor Naming]" above)
dcUnique :: Unique, -- Cached from Name
dcTag :: ConTag, -- ^ Tag, used for ordering 'DataCon's
-- Running example:
--
-- *** As declared by the user
-- data T a where
-- MkT :: forall x y. (x~y,Ord x) => x -> y -> T (x,y)
-- *** As represented internally
-- data T a where
-- MkT :: forall a. forall x y. (a~(x,y),x~y,Ord x) => x -> y -> T a
--
-- The next six fields express the type of the constructor, in pieces
-- e.g.
--
-- dcUnivTyVars = [a]
-- dcExTyVars = [x,y]
-- dcEqSpec = [a~(x,y)]
-- dcOtherTheta = [x~y, Ord x]
-- dcOrigArgTys = [x,y]
-- dcRepTyCon = T
dcVanilla :: Bool, -- True <=> This is a vanilla Haskell 98 data constructor
-- Its type is of form
-- forall a1..an . t1 -> ... tm -> T a1..an
-- No existentials, no coercions, nothing.
-- That is: dcExTyVars = dcEqSpec = dcOtherTheta = []
-- NB 1: newtypes always have a vanilla data con
-- NB 2: a vanilla constructor can still be declared in GADT-style
-- syntax, provided its type looks like the above.
-- The declaration format is held in the TyCon (algTcGadtSyntax)
dcUnivTyVars :: [TyVar], -- Universally-quantified type vars [a,b,c]
-- INVARIANT: length matches arity of the dcRepTyCon
--- result type of (rep) data con is exactly (T a b c)
dcExTyVars :: [TyVar], -- Existentially-quantified type vars
-- In general, the dcUnivTyVars are NOT NECESSARILY THE SAME AS THE TYVARS
-- FOR THE PARENT TyCon. With GADTs the data con might not even have
-- the same number of type variables.
-- [This is a change (Oct05): previously, vanilla datacons guaranteed to
-- have the same type variables as their parent TyCon, but that seems ugly.]
-- INVARIANT: the UnivTyVars and ExTyVars all have distinct OccNames
-- Reason: less confusing, and easier to generate IfaceSyn
dcEqSpec :: [EqSpec], -- Equalities derived from the result type,
-- _as written by the programmer_
-- This field allows us to move conveniently between the two ways
-- of representing a GADT constructor's type:
-- MkT :: forall a b. (a ~ [b]) => b -> T a
-- MkT :: forall b. b -> T [b]
-- Each equality is of the form (a ~ ty), where 'a' is one of
-- the universally quantified type variables
-- The next two fields give the type context of the data constructor
-- (aside from the GADT constraints,
-- which are given by the dcExpSpec)
-- In GADT form, this is *exactly* what the programmer writes, even if
-- the context constrains only universally quantified variables
-- MkT :: forall a b. (a ~ b, Ord b) => a -> T a b
dcOtherTheta :: ThetaType, -- The other constraints in the data con's type
-- other than those in the dcEqSpec
dcStupidTheta :: ThetaType, -- The context of the data type declaration
-- data Eq a => T a = ...
-- or, rather, a "thinned" version thereof
-- "Thinned", because the Report says
-- to eliminate any constraints that don't mention
-- tyvars free in the arg types for this constructor
--
-- INVARIANT: the free tyvars of dcStupidTheta are a subset of dcUnivTyVars
-- Reason: dcStupidTeta is gotten by thinning the stupid theta from the tycon
--
-- "Stupid", because the dictionaries aren't used for anything.
-- Indeed, [as of March 02] they are no longer in the type of
-- the wrapper Id, because that makes it harder to use the wrap-id
-- to rebuild values after record selection or in generics.
dcOrigArgTys :: [Type], -- Original argument types
-- (before unboxing and flattening of strict fields)
dcOrigResTy :: Type, -- Original result type, as seen by the user
-- NB: for a data instance, the original user result type may
-- differ from the DataCon's representation TyCon. Example
-- data instance T [a] where MkT :: a -> T [a]
-- The OrigResTy is T [a], but the dcRepTyCon might be :T123
-- Now the strictness annotations and field labels of the constructor
dcSrcBangs :: [HsSrcBang],
-- See Note [Bangs on data constructor arguments]
--
-- The [HsSrcBang] as written by the programmer.
--
-- Matches 1-1 with dcOrigArgTys
-- Hence length = dataConSourceArity dataCon
dcFields :: [FieldLabel],
-- Field labels for this constructor, in the
-- same order as the dcOrigArgTys;
-- length = 0 (if not a record) or dataConSourceArity.
-- The curried worker function that corresponds to the constructor:
-- It doesn't have an unfolding; the code generator saturates these Ids
-- and allocates a real constructor when it finds one.
dcWorkId :: Id,
-- Constructor representation
dcRep :: DataConRep,
-- Cached
-- dcRepArity == length dataConRepArgTys
dcRepArity :: Arity,
-- dcSourceArity == length dcOrigArgTys
dcSourceArity :: Arity,
-- Result type of constructor is T t1..tn
dcRepTyCon :: TyCon, -- Result tycon, T
dcRepType :: Type, -- Type of the constructor
-- forall a x y. (a~(x,y), x~y, Ord x) =>
-- x -> y -> T a
-- (this is *not* of the constructor wrapper Id:
-- see Note [Data con representation] below)
-- Notice that the existential type parameters come *second*.
-- Reason: in a case expression we may find:
-- case (e :: T t) of
-- MkT x y co1 co2 (d:Ord x) (v:r) (w:F s) -> ...
-- It's convenient to apply the rep-type of MkT to 't', to get
-- forall x y. (t~(x,y), x~y, Ord x) => x -> y -> T t
-- and use that to check the pattern. Mind you, this is really only
-- used in CoreLint.
dcInfix :: Bool, -- True <=> declared infix
-- Used for Template Haskell and 'deriving' only
-- The actual fixity is stored elsewhere
dcPromoted :: TyCon -- The promoted TyCon
-- See Note [Promoted data constructors] in TyCon
}
deriving Data.Typeable.Typeable
data DataConRep
= NoDataConRep -- No wrapper
| DCR { dcr_wrap_id :: Id -- Takes src args, unboxes/flattens,
-- and constructs the representation
, dcr_boxer :: DataConBoxer
, dcr_arg_tys :: [Type] -- Final, representation argument types,
-- after unboxing and flattening,
-- and *including* all evidence args
, dcr_stricts :: [StrictnessMark] -- 1-1 with dcr_arg_tys
-- See also Note [Data-con worker strictness] in MkId.hs
, dcr_bangs :: [HsImplBang] -- The actual decisions made (including failures)
-- about the original arguments; 1-1 with orig_arg_tys
-- See Note [Bangs on data constructor arguments]
}
-- Algebraic data types always have a worker, and
-- may or may not have a wrapper, depending on whether
-- the wrapper does anything.
--
-- Data types have a worker with no unfolding
-- Newtypes just have a worker, which has a compulsory unfolding (just a cast)
-- _Neither_ the worker _nor_ the wrapper take the dcStupidTheta dicts as arguments
-- The wrapper (if it exists) takes dcOrigArgTys as its arguments
-- The worker takes dataConRepArgTys as its arguments
-- If the worker is absent, dataConRepArgTys is the same as dcOrigArgTys
-- The 'NoDataConRep' case is important
-- Not only is this efficient,
-- but it also ensures that the wrapper is replaced
-- by the worker (because it *is* the worker)
-- even when there are no args. E.g. in
-- f (:) x
-- the (:) *is* the worker.
-- This is really important in rule matching,
-- (We could match on the wrappers,
-- but that makes it less likely that rules will match
-- when we bring bits of unfoldings together.)
-------------------------
-- | Bangs on data constructor arguments as the user wrote them in the
-- source code.
--
-- (HsSrcBang _ SrcUnpack SrcLazy) and
-- (HsSrcBang _ SrcUnpack NoSrcStrict) (without StrictData) makes no sense, we
-- emit a warning (in checkValidDataCon) and treat it like
-- (HsSrcBang _ NoSrcUnpack SrcLazy)
data HsSrcBang =
HsSrcBang (Maybe SourceText) -- Note [Pragma source text] in BasicTypes
SrcUnpackedness
SrcStrictness
deriving (Data.Data, Data.Typeable)
-- | Bangs of data constructor arguments as generated by the compiler
-- after consulting HsSrcBang, flags, etc.
data HsImplBang
= HsLazy -- ^ Lazy field
| HsStrict -- ^ Strict but not unpacked field
| HsUnpack (Maybe Coercion)
-- ^ Strict and unpacked field
-- co :: arg-ty ~ product-ty HsBang
deriving (Data.Data, Data.Typeable)
-- | What strictness annotation the user wrote
data SrcStrictness = SrcLazy -- ^ Lazy, ie '~'
| SrcStrict -- ^ Strict, ie '!'
| NoSrcStrict -- ^ no strictness annotation
deriving (Eq, Data.Data, Data.Typeable)
-- | What unpackedness the user requested
data SrcUnpackedness = SrcUnpack -- ^ {-# UNPACK #-} specified
| SrcNoUnpack -- ^ {-# NOUNPACK #-} specified
| NoSrcUnpack -- ^ no unpack pragma
deriving (Eq, Data.Data, Data.Typeable)
-------------------------
-- StrictnessMark is internal only, used to indicate strictness
-- of the DataCon *worker* fields
data StrictnessMark = MarkedStrict | NotMarkedStrict
-- | An 'EqSpec' is a tyvar/type pair representing an equality made in
-- rejigging a GADT constructor
data EqSpec = EqSpec TyVar
Type
-- | Make an 'EqSpec'
mkEqSpec :: TyVar -> Type -> EqSpec
mkEqSpec tv ty = EqSpec tv ty
eqSpecTyVar :: EqSpec -> TyVar
eqSpecTyVar (EqSpec tv _) = tv
eqSpecType :: EqSpec -> Type
eqSpecType (EqSpec _ ty) = ty
eqSpecPair :: EqSpec -> (TyVar, Type)
eqSpecPair (EqSpec tv ty) = (tv, ty)
eqSpecPreds :: [EqSpec] -> ThetaType
eqSpecPreds spec = [ mkPrimEqPred (mkTyVarTy tv) ty
| EqSpec tv ty <- spec ]
-- | Substitute in an 'EqSpec'. Precondition: if the LHS of the EqSpec
-- is mapped in the substitution, it is mapped to a type variable, not
-- a full type.
substEqSpec :: TCvSubst -> EqSpec -> EqSpec
substEqSpec subst (EqSpec tv ty)
= EqSpec tv' (substTy subst ty)
where
tv' = getTyVar "substEqSpec" (substTyVar subst tv)
instance Outputable EqSpec where
ppr (EqSpec tv ty) = ppr (tv, ty)
{- Note [Bangs on data constructor arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
data T = MkT !Int {-# UNPACK #-} !Int Bool
When compiling the module, GHC will decide how to represent
MkT, depending on the optimisation level, and settings of
flags like -funbox-small-strict-fields.
Terminology:
* HsSrcBang: What the user wrote
Constructors: HsSrcBang
* HsImplBang: What GHC decided
Constructors: HsLazy, HsStrict, HsUnpack
* If T was defined in this module, MkT's dcSrcBangs field
records the [HsSrcBang] of what the user wrote; in the example
[ HsSrcBang _ NoSrcUnpack SrcStrict
, HsSrcBang _ SrcUnpack SrcStrict
, HsSrcBang _ NoSrcUnpack NoSrcStrictness]
* However, if T was defined in an imported module, the importing module
must follow the decisions made in the original module, regardless of
the flag settings in the importing module.
Also see Note [Bangs on imported data constructors] in MkId
* The dcr_bangs field of the dcRep field records the [HsImplBang]
If T was defined in this module, Without -O the dcr_bangs might be
[HsStrict, HsStrict, HsLazy]
With -O it might be
[HsStrict, HsUnpack _, HsLazy]
With -funbox-small-strict-fields it might be
[HsUnpack, HsUnpack _, HsLazy]
With -XStrictData it might be
[HsStrict, HsUnpack _, HsStrict]
Note [Data con representation]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The dcRepType field contains the type of the representation of a contructor
This may differ from the type of the constructor *Id* (built
by MkId.mkDataConId) for two reasons:
a) the constructor Id may be overloaded, but the dictionary isn't stored
e.g. data Eq a => T a = MkT a a
b) the constructor may store an unboxed version of a strict field.
Here's an example illustrating both:
data Ord a => T a = MkT Int! a
Here
T :: Ord a => Int -> a -> T a
but the rep type is
Trep :: Int# -> a -> T a
Actually, the unboxed part isn't implemented yet!
************************************************************************
* *
\subsection{Instances}
* *
************************************************************************
-}
instance Eq DataCon where
a == b = getUnique a == getUnique b
a /= b = getUnique a /= getUnique b
instance Ord DataCon where
a <= b = getUnique a <= getUnique b
a < b = getUnique a < getUnique b
a >= b = getUnique a >= getUnique b
a > b = getUnique a > getUnique b
compare a b = getUnique a `compare` getUnique b
instance Uniquable DataCon where
getUnique = dcUnique
instance NamedThing DataCon where
getName = dcName
instance Outputable DataCon where
ppr con = ppr (dataConName con)
instance OutputableBndr DataCon where
pprInfixOcc con = pprInfixName (dataConName con)
pprPrefixOcc con = pprPrefixName (dataConName con)
instance Data.Data DataCon where
-- don't traverse?
toConstr _ = abstractConstr "DataCon"
gunfold _ _ = error "gunfold"
dataTypeOf _ = mkNoRepType "DataCon"
instance Outputable HsSrcBang where
ppr (HsSrcBang _ prag mark) = ppr prag <+> ppr mark
instance Outputable HsImplBang where
ppr HsLazy = text "Lazy"
ppr (HsUnpack Nothing) = text "Unpacked"
ppr (HsUnpack (Just co)) = text "Unpacked" <> parens (ppr co)
ppr HsStrict = text "StrictNotUnpacked"
instance Outputable SrcStrictness where
ppr SrcLazy = char '~'
ppr SrcStrict = char '!'
ppr NoSrcStrict = empty
instance Outputable SrcUnpackedness where
ppr SrcUnpack = text "{-# UNPACK #-}"
ppr SrcNoUnpack = text "{-# NOUNPACK #-}"
ppr NoSrcUnpack = empty
instance Outputable StrictnessMark where
ppr MarkedStrict = text "!"
ppr NotMarkedStrict = empty
instance Binary SrcStrictness where
put_ bh SrcLazy = putByte bh 0
put_ bh SrcStrict = putByte bh 1
put_ bh NoSrcStrict = putByte bh 2
get bh =
do h <- getByte bh
case h of
0 -> return SrcLazy
1 -> return SrcLazy
_ -> return NoSrcStrict
instance Binary SrcUnpackedness where
put_ bh SrcNoUnpack = putByte bh 0
put_ bh SrcUnpack = putByte bh 1
put_ bh NoSrcUnpack = putByte bh 2
get bh =
do h <- getByte bh
case h of
0 -> return SrcNoUnpack
1 -> return SrcUnpack
_ -> return NoSrcUnpack
-- | Compare strictness annotations
eqHsBang :: HsImplBang -> HsImplBang -> Bool
eqHsBang HsLazy HsLazy = True
eqHsBang HsStrict HsStrict = True
eqHsBang (HsUnpack Nothing) (HsUnpack Nothing) = True
eqHsBang (HsUnpack (Just c1)) (HsUnpack (Just c2))
= eqType (coercionType c1) (coercionType c2)
eqHsBang _ _ = False
isBanged :: HsImplBang -> Bool
isBanged (HsUnpack {}) = True
isBanged (HsStrict {}) = True
isBanged HsLazy = False
isSrcStrict :: SrcStrictness -> Bool
isSrcStrict SrcStrict = True
isSrcStrict _ = False
isSrcUnpacked :: SrcUnpackedness -> Bool
isSrcUnpacked SrcUnpack = True
isSrcUnpacked _ = False
isMarkedStrict :: StrictnessMark -> Bool
isMarkedStrict NotMarkedStrict = False
isMarkedStrict _ = True -- All others are strict
{-
************************************************************************
* *
\subsection{Construction}
* *
************************************************************************
-}
-- | Build a new data constructor
mkDataCon :: Name
-> Bool -- ^ Is the constructor declared infix?
-> TyConRepName -- ^ TyConRepName for the promoted TyCon
-> [HsSrcBang] -- ^ Strictness/unpack annotations, from user
-> [FieldLabel] -- ^ Field labels for the constructor,
-- if it is a record, otherwise empty
-> [TyVar] -- ^ Universally quantified type variables
-> [TyVar] -- ^ Existentially quantified type variables
-> [EqSpec] -- ^ GADT equalities
-> ThetaType -- ^ Theta-type occuring before the arguments proper
-> [Type] -- ^ Original argument types
-> Type -- ^ Original result type
-> RuntimeRepInfo -- ^ See comments on 'TyCon.RuntimeRepInfo'
-> TyCon -- ^ Representation type constructor
-> ThetaType -- ^ The "stupid theta", context of the data
-- declaration e.g. @data Eq a => T a ...@
-> Id -- ^ Worker Id
-> DataConRep -- ^ Representation
-> DataCon
-- Can get the tag from the TyCon
mkDataCon name declared_infix prom_info
arg_stricts -- Must match orig_arg_tys 1-1
fields
univ_tvs ex_tvs
eq_spec theta
orig_arg_tys orig_res_ty rep_info rep_tycon
stupid_theta work_id rep
-- Warning: mkDataCon is not a good place to check invariants.
-- If the programmer writes the wrong result type in the decl, thus:
-- data T a where { MkT :: S }
-- then it's possible that the univ_tvs may hit an assertion failure
-- if you pull on univ_tvs. This case is checked by checkValidDataCon,
-- so the error is detected properly... it's just that asaertions here
-- are a little dodgy.
= con
where
is_vanilla = null ex_tvs && null eq_spec && null theta
con = MkData {dcName = name, dcUnique = nameUnique name,
dcVanilla = is_vanilla, dcInfix = declared_infix,
dcUnivTyVars = univ_tvs, dcExTyVars = ex_tvs,
dcEqSpec = eq_spec,
dcOtherTheta = theta,
dcStupidTheta = stupid_theta,
dcOrigArgTys = orig_arg_tys, dcOrigResTy = orig_res_ty,
dcRepTyCon = rep_tycon,
dcSrcBangs = arg_stricts,
dcFields = fields, dcTag = tag, dcRepType = rep_ty,
dcWorkId = work_id,
dcRep = rep,
dcSourceArity = length orig_arg_tys,
dcRepArity = length rep_arg_tys,
dcPromoted = promoted }
-- The 'arg_stricts' passed to mkDataCon are simply those for the
-- source-language arguments. We add extra ones for the
-- dictionary arguments right here.
tag = assoc "mkDataCon" (tyConDataCons rep_tycon `zip` [fIRST_TAG..]) con
rep_arg_tys = dataConRepArgTys con
-- NB: This type is user-facing for datatypes that don't need wrappers;
-- so it's important to use mkSpecForAllTys
rep_ty = mkSpecForAllTys univ_tvs $ mkSpecForAllTys ex_tvs $
mkFunTys rep_arg_tys $
mkTyConApp rep_tycon (mkTyVarTys univ_tvs)
-- See Note [Promoted data constructors] in TyCon
prom_binders = map (mkNamedBinder Specified)
((univ_tvs `minusList` map eqSpecTyVar eq_spec) ++
ex_tvs) ++
map mkAnonBinder theta ++
map mkAnonBinder orig_arg_tys
prom_res_kind = orig_res_ty
promoted
= mkPromotedDataCon con name prom_info prom_binders prom_res_kind roles rep_info
roles = map (const Nominal) (univ_tvs ++ ex_tvs) ++
map (const Representational) orig_arg_tys
-- | The 'Name' of the 'DataCon', giving it a unique, rooted identification
dataConName :: DataCon -> Name
dataConName = dcName
-- | The tag used for ordering 'DataCon's
dataConTag :: DataCon -> ConTag
dataConTag = dcTag
-- | The type constructor that we are building via this data constructor
dataConTyCon :: DataCon -> TyCon
dataConTyCon = dcRepTyCon
-- | The original type constructor used in the definition of this data
-- constructor. In case of a data family instance, that will be the family
-- type constructor.
dataConOrigTyCon :: DataCon -> TyCon
dataConOrigTyCon dc
| Just (tc, _) <- tyConFamInst_maybe (dcRepTyCon dc) = tc
| otherwise = dcRepTyCon dc
-- | The representation type of the data constructor, i.e. the sort
-- type that will represent values of this type at runtime
dataConRepType :: DataCon -> Type
dataConRepType = dcRepType
-- | Should the 'DataCon' be presented infix?
dataConIsInfix :: DataCon -> Bool
dataConIsInfix = dcInfix
-- | The universally-quantified type variables of the constructor
dataConUnivTyVars :: DataCon -> [TyVar]
dataConUnivTyVars = dcUnivTyVars
-- | The existentially-quantified type variables of the constructor
dataConExTyVars :: DataCon -> [TyVar]
dataConExTyVars = dcExTyVars
-- | Both the universal and existentiatial type variables of the constructor
dataConAllTyVars :: DataCon -> [TyVar]
dataConAllTyVars (MkData { dcUnivTyVars = univ_tvs, dcExTyVars = ex_tvs })
= univ_tvs ++ ex_tvs
-- | Equalities derived from the result type of the data constructor, as written
-- by the programmer in any GADT declaration. This includes *all* GADT-like
-- equalities, including those written in by hand by the programmer.
dataConEqSpec :: DataCon -> [EqSpec]
dataConEqSpec (MkData { dcEqSpec = eq_spec, dcOtherTheta = theta })
= eq_spec ++
[ spec -- heterogeneous equality
| Just (tc, [_k1, _k2, ty1, ty2]) <- map splitTyConApp_maybe theta
, tc `hasKey` heqTyConKey
, spec <- case (getTyVar_maybe ty1, getTyVar_maybe ty2) of
(Just tv1, _) -> [mkEqSpec tv1 ty2]
(_, Just tv2) -> [mkEqSpec tv2 ty1]
_ -> []
] ++
[ spec -- homogeneous equality
| Just (tc, [_k, ty1, ty2]) <- map splitTyConApp_maybe theta
, tc `hasKey` eqTyConKey
, spec <- case (getTyVar_maybe ty1, getTyVar_maybe ty2) of
(Just tv1, _) -> [mkEqSpec tv1 ty2]
(_, Just tv2) -> [mkEqSpec tv2 ty1]
_ -> []
]
-- | The *full* constraints on the constructor type.
dataConTheta :: DataCon -> ThetaType
dataConTheta (MkData { dcEqSpec = eq_spec, dcOtherTheta = theta })
= eqSpecPreds eq_spec ++ theta
-- | Get the Id of the 'DataCon' worker: a function that is the "actual"
-- constructor and has no top level binding in the program. The type may
-- be different from the obvious one written in the source program. Panics
-- if there is no such 'Id' for this 'DataCon'
dataConWorkId :: DataCon -> Id
dataConWorkId dc = dcWorkId dc
-- | Get the Id of the 'DataCon' wrapper: a function that wraps the "actual"
-- constructor so it has the type visible in the source program: c.f. 'dataConWorkId'.
-- Returns Nothing if there is no wrapper, which occurs for an algebraic data constructor
-- and also for a newtype (whose constructor is inlined compulsorily)
dataConWrapId_maybe :: DataCon -> Maybe Id
dataConWrapId_maybe dc = case dcRep dc of
NoDataConRep -> Nothing
DCR { dcr_wrap_id = wrap_id } -> Just wrap_id
-- | Returns an Id which looks like the Haskell-source constructor by using
-- the wrapper if it exists (see 'dataConWrapId_maybe') and failing over to
-- the worker (see 'dataConWorkId')
dataConWrapId :: DataCon -> Id
dataConWrapId dc = case dcRep dc of
NoDataConRep-> dcWorkId dc -- worker=wrapper
DCR { dcr_wrap_id = wrap_id } -> wrap_id
-- | Find all the 'Id's implicitly brought into scope by the data constructor. Currently,
-- the union of the 'dataConWorkId' and the 'dataConWrapId'
dataConImplicitTyThings :: DataCon -> [TyThing]
dataConImplicitTyThings (MkData { dcWorkId = work, dcRep = rep })
= [AnId work] ++ wrap_ids
where
wrap_ids = case rep of
NoDataConRep -> []
DCR { dcr_wrap_id = wrap } -> [AnId wrap]
-- | The labels for the fields of this particular 'DataCon'
dataConFieldLabels :: DataCon -> [FieldLabel]
dataConFieldLabels = dcFields
-- | Extract the type for any given labelled field of the 'DataCon'
dataConFieldType :: DataCon -> FieldLabelString -> Type
dataConFieldType con label
= case find ((== label) . flLabel . fst) (dcFields con `zip` dcOrigArgTys con) of
Just (_, ty) -> ty
Nothing -> pprPanic "dataConFieldType" (ppr con <+> ppr label)
-- | Strictness/unpack annotations, from user; or, for imported
-- DataCons, from the interface file
-- The list is in one-to-one correspondence with the arity of the 'DataCon'
dataConSrcBangs :: DataCon -> [HsSrcBang]
dataConSrcBangs = dcSrcBangs
-- | Source-level arity of the data constructor
dataConSourceArity :: DataCon -> Arity
dataConSourceArity (MkData { dcSourceArity = arity }) = arity
-- | Gives the number of actual fields in the /representation/ of the
-- data constructor. This may be more than appear in the source code;
-- the extra ones are the existentially quantified dictionaries
dataConRepArity :: DataCon -> Arity
dataConRepArity (MkData { dcRepArity = arity }) = arity
-- | The number of fields in the /representation/ of the constructor
-- AFTER taking into account the unpacking of any unboxed tuple fields
dataConRepRepArity :: DataCon -> RepArity
dataConRepRepArity dc = typeRepArity (dataConRepArity dc) (dataConRepType dc)
-- | Return whether there are any argument types for this 'DataCon's original source type
isNullarySrcDataCon :: DataCon -> Bool
isNullarySrcDataCon dc = null (dcOrigArgTys dc)
-- | Return whether there are any argument types for this 'DataCon's runtime representation type
isNullaryRepDataCon :: DataCon -> Bool
isNullaryRepDataCon dc = dataConRepArity dc == 0
dataConRepStrictness :: DataCon -> [StrictnessMark]
-- ^ Give the demands on the arguments of a
-- Core constructor application (Con dc args)
dataConRepStrictness dc = case dcRep dc of
NoDataConRep -> [NotMarkedStrict | _ <- dataConRepArgTys dc]
DCR { dcr_stricts = strs } -> strs
dataConImplBangs :: DataCon -> [HsImplBang]
-- The implementation decisions about the strictness/unpack of each
-- source program argument to the data constructor
dataConImplBangs dc
= case dcRep dc of
NoDataConRep -> replicate (dcSourceArity dc) HsLazy
DCR { dcr_bangs = bangs } -> bangs
dataConBoxer :: DataCon -> Maybe DataConBoxer
dataConBoxer (MkData { dcRep = DCR { dcr_boxer = boxer } }) = Just boxer
dataConBoxer _ = Nothing
-- | The \"signature\" of the 'DataCon' returns, in order:
--
-- 1) The result of 'dataConAllTyVars',
--
-- 2) All the 'ThetaType's relating to the 'DataCon' (coercion, dictionary, implicit
-- parameter - whatever)
--
-- 3) The type arguments to the constructor
--
-- 4) The /original/ result type of the 'DataCon'
dataConSig :: DataCon -> ([TyVar], ThetaType, [Type], Type)
dataConSig con@(MkData {dcUnivTyVars = univ_tvs, dcExTyVars = ex_tvs,
dcOrigArgTys = arg_tys, dcOrigResTy = res_ty})
= (univ_tvs ++ ex_tvs, dataConTheta con, arg_tys, res_ty)
dataConInstSig
:: DataCon
-> [Type] -- Instantiate the *universal* tyvars with these types
-> ([TyVar], ThetaType, [Type]) -- Return instantiated existentials
-- theta and arg tys
-- ^ Instantantiate the universal tyvars of a data con,
-- returning the instantiated existentials, constraints, and args
dataConInstSig (MkData { dcUnivTyVars = univ_tvs, dcExTyVars = ex_tvs
, dcEqSpec = eq_spec, dcOtherTheta = theta
, dcOrigArgTys = arg_tys })
univ_tys
= (ex_tvs'
, substTheta subst (eqSpecPreds eq_spec ++ theta)
, substTys subst arg_tys)
where
univ_subst = zipTvSubst univ_tvs univ_tys
(subst, ex_tvs') = mapAccumL Type.substTyVarBndr univ_subst ex_tvs
-- | The \"full signature\" of the 'DataCon' returns, in order:
--
-- 1) The result of 'dataConUnivTyVars'
--
-- 2) The result of 'dataConExTyVars'
--
-- 3) The GADT equalities
--
-- 4) The result of 'dataConDictTheta'
--
-- 5) The original argument types to the 'DataCon' (i.e. before
-- any change of the representation of the type)
--
-- 6) The original result type of the 'DataCon'
dataConFullSig :: DataCon
-> ([TyVar], [TyVar], [EqSpec], ThetaType, [Type], Type)
dataConFullSig (MkData {dcUnivTyVars = univ_tvs, dcExTyVars = ex_tvs,
dcEqSpec = eq_spec, dcOtherTheta = theta,
dcOrigArgTys = arg_tys, dcOrigResTy = res_ty})
= (univ_tvs, ex_tvs, eq_spec, theta, arg_tys, res_ty)
dataConOrigResTy :: DataCon -> Type
dataConOrigResTy dc = dcOrigResTy dc
-- | The \"stupid theta\" of the 'DataCon', such as @data Eq a@ in:
--
-- > data Eq a => T a = ...
dataConStupidTheta :: DataCon -> ThetaType
dataConStupidTheta dc = dcStupidTheta dc
dataConUserType :: DataCon -> Type
-- ^ The user-declared type of the data constructor
-- in the nice-to-read form:
--
-- > T :: forall a b. a -> b -> T [a]
--
-- rather than:
--
-- > T :: forall a c. forall b. (c~[a]) => a -> b -> T c
--
-- NB: If the constructor is part of a data instance, the result type
-- mentions the family tycon, not the internal one.
dataConUserType (MkData { dcUnivTyVars = univ_tvs,
dcExTyVars = ex_tvs, dcEqSpec = eq_spec,
dcOtherTheta = theta, dcOrigArgTys = arg_tys,
dcOrigResTy = res_ty })
= mkSpecForAllTys ((univ_tvs `minusList` map eqSpecTyVar eq_spec) ++
ex_tvs) $
mkFunTys theta $
mkFunTys arg_tys $
res_ty
-- | Finds the instantiated types of the arguments required to construct a 'DataCon' representation
-- NB: these INCLUDE any dictionary args
-- but EXCLUDE the data-declaration context, which is discarded
-- It's all post-flattening etc; this is a representation type
dataConInstArgTys :: DataCon -- ^ A datacon with no existentials or equality constraints
-- However, it can have a dcTheta (notably it can be a
-- class dictionary, with superclasses)
-> [Type] -- ^ Instantiated at these types
-> [Type]
dataConInstArgTys dc@(MkData {dcUnivTyVars = univ_tvs,
dcExTyVars = ex_tvs}) inst_tys
= ASSERT2( length univ_tvs == length inst_tys
, text "dataConInstArgTys" <+> ppr dc $$ ppr univ_tvs $$ ppr inst_tys)
ASSERT2( null ex_tvs, ppr dc )
map (substTyWith univ_tvs inst_tys) (dataConRepArgTys dc)
-- | Returns just the instantiated /value/ argument types of a 'DataCon',
-- (excluding dictionary args)
dataConInstOrigArgTys
:: DataCon -- Works for any DataCon
-> [Type] -- Includes existential tyvar args, but NOT
-- equality constraints or dicts
-> [Type]
-- For vanilla datacons, it's all quite straightforward
-- But for the call in MatchCon, we really do want just the value args
dataConInstOrigArgTys dc@(MkData {dcOrigArgTys = arg_tys,
dcUnivTyVars = univ_tvs,
dcExTyVars = ex_tvs}) inst_tys
= ASSERT2( length tyvars == length inst_tys
, text "dataConInstOrigArgTys" <+> ppr dc $$ ppr tyvars $$ ppr inst_tys )
map (substTyWith tyvars inst_tys) arg_tys
where
tyvars = univ_tvs ++ ex_tvs
-- | Returns the argument types of the wrapper, excluding all dictionary arguments
-- and without substituting for any type variables
dataConOrigArgTys :: DataCon -> [Type]
dataConOrigArgTys dc = dcOrigArgTys dc
-- | Returns the arg types of the worker, including *all*
-- evidence, after any flattening has been done and without substituting for
-- any type variables
dataConRepArgTys :: DataCon -> [Type]
dataConRepArgTys (MkData { dcRep = rep
, dcEqSpec = eq_spec
, dcOtherTheta = theta
, dcOrigArgTys = orig_arg_tys })
= case rep of
NoDataConRep -> ASSERT( null eq_spec ) theta ++ orig_arg_tys
DCR { dcr_arg_tys = arg_tys } -> arg_tys
-- | The string @package:module.name@ identifying a constructor, which is attached
-- to its info table and used by the GHCi debugger and the heap profiler
dataConIdentity :: DataCon -> [Word8]
-- We want this string to be UTF-8, so we get the bytes directly from the FastStrings.
dataConIdentity dc = bytesFS (unitIdFS (moduleUnitId mod)) ++
fromIntegral (ord ':') : bytesFS (moduleNameFS (moduleName mod)) ++
fromIntegral (ord '.') : bytesFS (occNameFS (nameOccName name))
where name = dataConName dc
mod = ASSERT( isExternalName name ) nameModule name
isTupleDataCon :: DataCon -> Bool
isTupleDataCon (MkData {dcRepTyCon = tc}) = isTupleTyCon tc
isUnboxedTupleCon :: DataCon -> Bool
isUnboxedTupleCon (MkData {dcRepTyCon = tc}) = isUnboxedTupleTyCon tc
-- | Vanilla 'DataCon's are those that are nice boring Haskell 98 constructors
isVanillaDataCon :: DataCon -> Bool
isVanillaDataCon dc = dcVanilla dc
-- | Should this DataCon be allowed in a type even without -XDataKinds?
-- Currently, only Lifted & Unlifted
specialPromotedDc :: DataCon -> Bool
specialPromotedDc = isKindTyCon . dataConTyCon
-- | Was this datacon promotable before GHC 8.0? That is, is it promotable
-- without -XTypeInType
isLegacyPromotableDataCon :: DataCon -> Bool
isLegacyPromotableDataCon dc
= null (dataConEqSpec dc) -- no GADTs
&& null (dataConTheta dc) -- no context
&& not (isFamInstTyCon (dataConTyCon dc)) -- no data instance constructors
&& all isLegacyPromotableTyCon (nameEnvElts $
tyConsOfType (dataConUserType dc))
-- | Was this tycon promotable before GHC 8.0? That is, is it promotable
-- without -XTypeInType
isLegacyPromotableTyCon :: TyCon -> Bool
isLegacyPromotableTyCon tc
= isVanillaAlgTyCon tc ||
-- This returns True more often than it should, but it's quite painful
-- to make this fully accurate. And no harm is caused; we just don't
-- require -XTypeInType every time we need to. (We'll always require
-- -XDataKinds, though, so there's no standards-compliance issue.)
isFunTyCon tc || isKindTyCon tc
classDataCon :: Class -> DataCon
classDataCon clas = case tyConDataCons (classTyCon clas) of
(dict_constr:no_more) -> ASSERT( null no_more ) dict_constr
[] -> panic "classDataCon"
dataConCannotMatch :: [Type] -> DataCon -> Bool
-- Returns True iff the data con *definitely cannot* match a
-- scrutinee of type (T tys)
-- where T is the dcRepTyCon for the data con
dataConCannotMatch tys con
| null inst_theta = False -- Common
| all isTyVarTy tys = False -- Also common
| otherwise = typesCantMatch (concatMap predEqs inst_theta)
where
(_, inst_theta, _) = dataConInstSig con tys
-- TODO: could gather equalities from superclasses too
predEqs pred = case classifyPredType pred of
EqPred NomEq ty1 ty2 -> [(ty1, ty2)]
ClassPred eq [_, ty1, ty2]
| eq `hasKey` eqTyConKey -> [(ty1, ty2)]
_ -> []
{-
%************************************************************************
%* *
Promoting of data types to the kind level
* *
************************************************************************
-}
promoteDataCon :: DataCon -> TyCon
promoteDataCon (MkData { dcPromoted = tc }) = tc
{-
************************************************************************
* *
\subsection{Splitting products}
* *
************************************************************************
-}
-- | Extract the type constructor, type argument, data constructor and it's
-- /representation/ argument types from a type if it is a product type.
--
-- Precisely, we return @Just@ for any type that is all of:
--
-- * Concrete (i.e. constructors visible)
--
-- * Single-constructor
--
-- * Not existentially quantified
--
-- Whether the type is a @data@ type or a @newtype@
splitDataProductType_maybe
:: Type -- ^ A product type, perhaps
-> Maybe (TyCon, -- The type constructor
[Type], -- Type args of the tycon
DataCon, -- The data constructor
[Type]) -- Its /representation/ arg types
-- Rejecting existentials is conservative. Maybe some things
-- could be made to work with them, but I'm not going to sweat
-- it through till someone finds it's important.
splitDataProductType_maybe ty
| Just (tycon, ty_args) <- splitTyConApp_maybe ty
, Just con <- isDataProductTyCon_maybe tycon
= Just (tycon, ty_args, con, dataConInstArgTys con ty_args)
| otherwise
= Nothing
{-
************************************************************************
* *
Building an algebraic data type
* *
************************************************************************
buildAlgTyCon is here because it is called from TysWiredIn, which can
depend on this module, but not on BuildTyCl.
-}
buildAlgTyCon :: Name
-> [TyVar] -- ^ Kind variables and type variables
-> [Role]
-> Maybe CType
-> ThetaType -- ^ Stupid theta
-> AlgTyConRhs
-> RecFlag
-> Bool -- ^ True <=> was declared in GADT syntax
-> AlgTyConFlav
-> TyCon
buildAlgTyCon tc_name ktvs roles cType stupid_theta rhs
is_rec gadt_syn parent
= mkAlgTyCon tc_name binders liftedTypeKind ktvs roles cType stupid_theta
rhs parent is_rec gadt_syn
where
binders = mkTyBindersPreferAnon ktvs liftedTypeKind
|
mcschroeder/ghc
|
compiler/basicTypes/DataCon.hs
|
bsd-3-clause
| 49,218
| 0
| 20
| 14,464
| 5,694
| 3,242
| 2,452
| 487
| 5
|
module Dotnet.System.Collections.IDictionaryEnumerator where
import Dotnet
import qualified Dotnet.System.Collections.DictionaryEntry
import qualified Dotnet.System.Object
data IDictionaryEnumerator_ a
type IDictionaryEnumerator a = Dotnet.System.Object.Object (IDictionaryEnumerator_ a)
foreign import dotnet
"method System.Collections.IDictionaryEnumerator.get_Entry"
get_Entry :: IDictionaryEnumerator obj -> IO (Dotnet.System.Collections.DictionaryEntry.DictionaryEntry a0)
foreign import dotnet
"method System.Collections.IDictionaryEnumerator.get_Value"
get_Value :: IDictionaryEnumerator obj -> IO (Dotnet.System.Object.Object a0)
foreign import dotnet
"method System.Collections.IDictionaryEnumerator.get_Key"
get_Key :: IDictionaryEnumerator obj -> IO (Dotnet.System.Object.Object a0)
|
alekar/hugs
|
dotnet/lib/Dotnet/System/Collections/IDictionaryEnumerator.hs
|
bsd-3-clause
| 813
| 0
| 10
| 79
| 147
| 86
| 61
| -1
| -1
|
------------------------------------------------------------------------------
-- | The Heist snaplet makes it easy to add Heist to your application and use
-- it in other snaplets.
--
module Snap.Snaplet.Heist
(
-- * Heist and its type class
Heist
, HasHeist(..)
-- * Initializer Functions
-- $initializerSection
, heistInit
, heistInit'
, Unclassed.heistReloader
, Unclassed.setInterpreted
, Unclassed.getCurHeistConfig
, addTemplates
, addTemplatesAt
, Unclassed.addConfig
, getHeistState
, modifyHeistState
, withHeistState
-- * Handler Functions
-- $handlerSection
, gRender
, gRenderAs
, gHeistServe
, gHeistServeSingle
, chooseMode
, cRender
, cRenderAs
, cHeistServe
, cHeistServeSingle
, render
, renderAs
, heistServe
, heistServeSingle
, heistLocal
, withSplices
, renderWithSplices
-- * Writing Splices
-- $spliceSection
, Unclassed.SnapletHeist
, Unclassed.SnapletCSplice
, Unclassed.SnapletISplice
, clearHeistCache
) where
------------------------------------------------------------------------------
import Prelude hiding (id, (.))
import Control.Monad.State
import Data.ByteString (ByteString)
import Heist
------------------------------------------------------------------------------
import Snap.Snaplet
import Snap.Snaplet.Heist.Internal
import qualified Snap.Snaplet.HeistNoClass as Unclassed
import Snap.Snaplet.HeistNoClass ( heistInit
, heistInit'
, clearHeistCache
)
------------------------------------------------------------------------------
-- | A single snaplet should never need more than one instance of Heist as a
-- subsnaplet. This type class allows you to make it easy for other snaplets
-- to get the lens that identifies the heist snaplet. Here's an example of
-- how the heist snaplet might be declared:
--
-- > data App = App { _heist :: Snaplet (Heist App) }
-- > makeLenses ''App
-- >
-- > instance HasHeist App where heistLens = subSnaplet heist
-- >
-- > appInit = makeSnaplet "app" "" Nothing $ do
-- > h <- nestSnaplet "heist" heist $ heistInit "templates"
-- > addConfig h heistConfigWithMyAppSplices
-- > return $ App h
class HasHeist b where
-- | A lens to the Heist snaplet. The b parameter to Heist will
-- typically be the base state of your application.
heistLens :: SnapletLens (Snaplet b) (Heist b)
-- $initializerSection
-- This section contains functions for use in setting up your Heist state
-- during initialization.
------------------------------------------------------------------------------
-- | Adds templates to the Heist HeistState. Other snaplets should use
-- this function to add their own templates. The templates are automatically
-- read from the templates directory in the current snaplet's filesystem root.
addTemplates :: HasHeist b
=> Snaplet (Heist b)
-> ByteString
-- ^ The url prefix for the template routes
-> Initializer b v ()
addTemplates h pfx = withTop' heistLens (Unclassed.addTemplates h pfx)
------------------------------------------------------------------------------
-- | Adds templates to the Heist HeistState, and lets you specify where
-- they are found in the filesystem. Note that the path to the template
-- directory is an absolute path. This allows you more flexibility in where
-- your templates are located, but means that you have to explicitly call
-- getSnapletFilePath if you want your snaplet to use templates within its
-- normal directory structure.
addTemplatesAt :: HasHeist b
=> Snaplet (Heist b)
-> ByteString
-- ^ URL prefix for template routes
-> FilePath
-- ^ Path to templates
-> Initializer b v ()
addTemplatesAt h pfx p =
withTop' heistLens (Unclassed.addTemplatesAt h pfx p)
------------------------------------------------------------------------------
-- | More general function allowing arbitrary HeistState modification.
getHeistState :: (HasHeist b)
=> Handler b v (HeistState (Handler b b))
getHeistState = Unclassed.getHeistState heistLens
------------------------------------------------------------------------------
-- | More general function allowing arbitrary HeistState modification.
modifyHeistState :: (HasHeist b)
=> (HeistState (Handler b b) -> HeistState (Handler b b))
-- ^ HeistState modifying function
-> Initializer b v ()
modifyHeistState = Unclassed.modifyHeistState' heistLens
------------------------------------------------------------------------------
-- | Runs a function on with the Heist snaplet's 'HeistState'.
withHeistState :: (HasHeist b)
=> (HeistState (Handler b b) -> a)
-- ^ HeistState function to run
-> Handler b v a
withHeistState = Unclassed.withHeistState' heistLens
-- $handlerSection
-- This section contains functions in the 'Handler' monad that you'll use in
-- processing requests. Functions beginning with a 'g' prefix use generic
-- rendering that checks the preferred rendering mode and chooses
-- appropriately. Functions beginning with a 'c' prefix use compiled template
-- rendering. The other functions use the older interpreted rendering.
-- Interpreted splices added with addConfig will only work if you use
-- interpreted rendering.
--
-- The generic functions are useful if you are writing general snaplets that
-- use heist, but need to work for applications that use either interpreted
-- or compiled mode.
------------------------------------------------------------------------------
-- | Generic version of 'render'/'cRender'.
gRender :: HasHeist b
=> ByteString
-- ^ Template name
-> Handler b v ()
gRender t = withTop' heistLens (Unclassed.gRender t)
------------------------------------------------------------------------------
-- | Generic version of 'renderAs'/'cRenderAs'.
gRenderAs :: HasHeist b
=> ByteString
-- ^ Content type to render with
-> ByteString
-- ^ Template name
-> Handler b v ()
gRenderAs ct t = withTop' heistLens (Unclassed.gRenderAs ct t)
------------------------------------------------------------------------------
-- | Generic version of 'heistServe'/'cHeistServe'.
gHeistServe :: HasHeist b => Handler b v ()
gHeistServe = withTop' heistLens Unclassed.gHeistServe
------------------------------------------------------------------------------
-- | Generic version of 'heistServeSingle'/'cHeistServeSingle'.
gHeistServeSingle :: HasHeist b
=> ByteString
-- ^ Template name
-> Handler b v ()
gHeistServeSingle t = withTop' heistLens (Unclassed.gHeistServeSingle t)
------------------------------------------------------------------------------
-- | Chooses between a compiled action and an interpreted action based on the
-- configured default.
chooseMode :: HasHeist b
=> Handler b v a
-- ^ A compiled action
-> Handler b v a
-- ^ An interpreted action
-> Handler b v a
chooseMode cAction iAction = do
mode <- withTop' heistLens $ gets _defMode
case mode of
Unclassed.Compiled -> cAction
Unclassed.Interpreted -> iAction
------------------------------------------------------------------------------
-- | Renders a compiled template as text\/html. If the given template is not
-- found, this returns 'empty'.
cRender :: HasHeist b
=> ByteString
-- ^ Template name
-> Handler b v ()
cRender t = withTop' heistLens (Unclassed.cRender t)
------------------------------------------------------------------------------
-- | Renders a compiled template as the given content type. If the given
-- template is not found, this returns 'empty'.
cRenderAs :: HasHeist b
=> ByteString
-- ^ Content type to render with
-> ByteString
-- ^ Template name
-> Handler b v ()
cRenderAs ct t = withTop' heistLens (Unclassed.cRenderAs ct t)
------------------------------------------------------------------------------
-- | A compiled version of 'heistServe'.
cHeistServe :: HasHeist b => Handler b v ()
cHeistServe = withTop' heistLens Unclassed.cHeistServe
------------------------------------------------------------------------------
-- | Analogous to 'fileServeSingle'. If the given template is not found,
-- this throws an error.
cHeistServeSingle :: HasHeist b
=> ByteString
-- ^ Template name
-> Handler b v ()
cHeistServeSingle t = withTop' heistLens (Unclassed.cHeistServeSingle t)
------------------------------------------------------------------------------
-- | Renders a template as text\/html. If the given template is not found,
-- this returns 'empty'.
render :: HasHeist b
=> ByteString
-- ^ Template name
-> Handler b v ()
render t = withTop' heistLens (Unclassed.render t)
------------------------------------------------------------------------------
-- | Renders a template as the given content type. If the given template
-- is not found, this returns 'empty'.
renderAs :: HasHeist b
=> ByteString
-- ^ Content type to render with
-> ByteString
-- ^ Template name
-> Handler b v ()
renderAs ct t = withTop' heistLens (Unclassed.renderAs ct t)
------------------------------------------------------------------------------
-- | A handler that serves all the templates (similar to 'serveDirectory').
-- If the template specified in the request path is not found, it returns
-- 'empty'. Also, this function does not serve any templates beginning with
-- an underscore. This gives you a way to prevent some templates from being
-- served. For example, you might have a template that contains only the
-- navbar of your pages, and you probably wouldn't want that template to be
-- visible to the user as a standalone template. So if you put it in a file
-- called \"_nav.tpl\", this function won't serve it.
heistServe :: HasHeist b => Handler b v ()
heistServe = withTop' heistLens Unclassed.heistServe
------------------------------------------------------------------------------
-- | Handler for serving a single template (similar to 'fileServeSingle'). If
-- the given template is not found, this throws an error.
heistServeSingle :: HasHeist b
=> ByteString
-- ^ Template name
-> Handler b v ()
heistServeSingle t = withTop' heistLens (Unclassed.heistServeSingle t)
------------------------------------------------------------------------------
-- | Renders a template with a given set of splices. This is syntax sugar for
-- a common combination of heistLocal, bindSplices, and render.
renderWithSplices :: HasHeist b
=> ByteString
-- ^ Template name
-> Splices (Unclassed.SnapletISplice b)
-- ^ Splices to bind
-> Handler b v ()
renderWithSplices = Unclassed.renderWithSplices' heistLens
------------------------------------------------------------------------------
-- | Runs an action with additional splices bound into the Heist
-- 'HeistState'.
withSplices :: HasHeist b
=> Splices (Unclassed.SnapletISplice b)
-- ^ Splices to bind
-> Handler b v a
-- ^ Handler to run
-> Handler b v a
withSplices = Unclassed.withSplices' heistLens
------------------------------------------------------------------------------
-- | Runs a handler with a modified 'HeistState'. You might want to use
-- this if you had a set of splices which were customised for a specific
-- action. To do that you would do:
--
-- > heistLocal (bindSplices mySplices) handlerThatNeedsSplices
heistLocal :: HasHeist b
=> (HeistState (Handler b b) -> HeistState (Handler b b))
-- ^ HeistState modifying function
-> Handler b v a
-- ^ Handler to run
-> Handler b v a
heistLocal = Unclassed.heistLocal' heistLens
-- $spliceSection
-- The type signature for SnapletHeist uses @(Handler b b)@ as the Heist
-- snaplet's runtime monad. This means that your splices must use the
-- top-level snaplet's @Handler b b@ monad. The reasons for this are beyond
-- the scope of this discussion, but the result is that 'lift' inside a splice
-- only works with @Handler b b@ actions. When you're writing your own
-- snaplets using some snaplet-specific monad @Handler b v@ you still have to
-- use @Handler b b@ for your splices. If the splices need any of the context
-- provided by the @v@, you must pass it in as a parameter to the splice
-- function.
|
sopvop/snap
|
src/Snap/Snaplet/Heist.hs
|
bsd-3-clause
| 13,135
| 0
| 11
| 3,008
| 1,481
| 826
| 655
| 139
| 2
|
{-# OPTIONS -Wall #-}
-----------------------------------------------------------------------------
-- |
-- Module : CRoundtrip.hs (executable)
-- Copyright : (c) 2008 Benedikt Huber
-- License : BSD-style
-- Maintainer : benedikt.huber@gmail.com
--
-- This module is invoked just like gcc. It preprocesses the C source file in the given argument list,
-- parses it, pretty prints it again, and compares the two ASTs.
--
-- Tests are logged, and serialized into a result file.
-- If `CRoundtrip' finishes without runtime error, it always returns ExitSuccess.
--
-- see 'TestEnvironment'.
-----------------------------------------------------------------------------
module Main (main) where
import Control.Monad.State
import System.FilePath (takeBaseName)
import Text.PrettyPrint
import Language.C
import Language.C.Analysis
import Language.C.Test.Environment
import Language.C.Test.Framework
import Language.C.Test.ParseTests
import Language.C.Test.TestMonad
main :: IO ()
main = defaultMain usage roundtripTest
usage :: Doc
usage = text "./CRoundTrip <gcc-args> file.(c|hc|i)"
$$ (nest 4 $ text "Roundtrip Test Driver: preprocess, parse, typecheck, pretty print, compile pretty printed, parse again, compare ASTs")
$+$ envHelpDoc []
roundtripTest :: [String] -> TestMonad ()
roundtripTest args =
case mungeCcArgs args of
Ignore -> errorOnInit args $ "No C source file found in argument list: `cc " ++ unwords args ++ "'"
Unknown err -> errorOnInit args $ "Could not munge CC args: " ++ err ++ " in `cc "++ unwords args ++ "'"
Groked [origFile] gccArgs -> roundtripTest' origFile gccArgs
Groked cFiles _ -> errorOnInit args $ "More than one c source file given: "++ unwords cFiles
roundtripTest' :: FilePath -> [String] -> TestMonad ()
roundtripTest' origFile gccArgs = do
modify $ setTmpTemplate (takeBaseName origFile)
(cFile, preFile) <- runCPP origFile gccArgs
modify $ setTestRunResults (emptyTestResults (takeBaseName origFile) [cFile])
-- parse
let parseTest1 = initializeTestResult (parseTestTemplate { testName = "01-parse" }) [origFile]
parseResult <- runParseTest preFile (initPos cFile)
addTestM $
setTestStatus parseTest1 $
either (uncurry testFailWithReport) (testOkNoReport . snd) parseResult
ast <- either (const exitTest) (return . fst) parseResult
-- typecheck
let tcTest = initializeTestResult (parseTestTemplate { testName = "02-typecheck"}) [origFile]
tcResult <- runTypecheckTest ast
addTestM $
setTestStatus tcTest $
either testFailNoReport testOkNoReport tcResult
-- pretty print
let prettyTest = initializeTestResult (ppTestTemplate { testName = "03-pretty-print" }) [origFile]
((prettyFile,report),metric) <- runPrettyPrint ast
addTestM $
setTestStatus prettyTest $
testOkWithReport metric report
-- parse again (TODO: factor out code duplication with first parse test)
let parseTest2 = initializeTestResult (parseTestTemplate { testName = "04-parse-pretty-printed" }) [prettyFile]
parseResult2 <- runParseTest prettyFile (initPos prettyFile)
addTestM $
setTestStatus parseTest2 $
either (uncurry testFailWithReport) (testOkNoReport . snd) parseResult2
ast2 <- either (const exitTest) (return . fst) parseResult2
-- compile
let compileTest = initializeTestResult (compileTestTemplate { testName = "05-compile"}) [prettyFile]
compileResult <- runCompileTest ("-fsyntax-only":gccArgs) prettyFile
addTestM $
setTestStatus compileTest $
either (uncurry testFailWithReport) (testOkNoReport . snd) compileResult
-- check equiv
let equivTest = initializeTestResult (equivTestTemplate { testName = "06-orig-equiv-pp" }) []
equivResult <- runEquivTest ast ast2
addTestM $
setTestStatus equivTest $
either (uncurry testFailure) testOkNoReport equivResult
return ()
-- XXX: TODO: Move to framework ?
runTypecheckTest :: CTranslUnit -- ^ preprocesed file
-> TestMonad (Either String PerfMeasure) -- ^ either errMsg (decls,elapsedTime)
runTypecheckTest ast = do
-- typecheck
dbgMsg $ "Starting Typecheck\n"
(tcResult, elapsed) <-
time $ do return $! case runTrav_ (analyseAST ast) of
Left errs -> Left (concatMap show errs)
Right _ -> Right ()
-- check error and add test
dbgMsg $ "TypeCheck result: " ++ show tcResult ++ "\n"
case tcResult of
Left err -> return $ Left err
Right _ -> return $ Right (PerfMeasure (0,elapsed))
|
micknelso/language-c
|
test/src/CRoundTrip.hs
|
bsd-3-clause
| 4,637
| 0
| 17
| 952
| 1,054
| 533
| 521
| 74
| 4
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ro-RO">
<title>FuzzDB Files</title>
<maps>
<homeID>fuzzdb</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/fuzzdb/src/main/javahelp/help_ro_RO/helpset_ro_RO.hs
|
apache-2.0
| 960
| 77
| 66
| 156
| 407
| 206
| 201
| -1
| -1
|
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.PreProcess
-- Copyright : (c) 2003-2005, Isaac Jones, Malcolm Wallace
-- License : BSD3
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- This defines a 'PreProcessor' abstraction which represents a pre-processor
-- that can transform one kind of file into another. There is also a
-- 'PPSuffixHandler' which is a combination of a file extension and a function
-- for configuring a 'PreProcessor'. It defines a bunch of known built-in
-- preprocessors like @cpp@, @cpphs@, @c2hs@, @hsc2hs@, @happy@, @alex@ etc and
-- lists them in 'knownSuffixHandlers'. On top of this it provides a function
-- for actually preprocessing some sources given a bunch of known suffix
-- handlers. This module is not as good as it could be, it could really do with
-- a rewrite to address some of the problems we have with pre-processors.
module Distribution.Simple.PreProcess (preprocessComponent, preprocessExtras,
knownSuffixHandlers, ppSuffixes,
PPSuffixHandler, PreProcessor(..),
mkSimplePreProcessor, runSimplePreProcessor,
ppCpp, ppCpp', ppGreenCard, ppC2hs, ppHsc2hs,
ppHappy, ppAlex, ppUnlit, platformDefines
)
where
import Distribution.Simple.PreProcess.Unlit
import Distribution.Package
import qualified Distribution.ModuleName as ModuleName
import Distribution.PackageDescription as PD
import qualified Distribution.InstalledPackageInfo as Installed
import qualified Distribution.Simple.PackageIndex as PackageIndex
import Distribution.Simple.CCompiler
import Distribution.Simple.Compiler
import Distribution.Simple.LocalBuildInfo
import Distribution.Simple.BuildPaths
import Distribution.Simple.Utils
import Distribution.Simple.Program
import Distribution.Simple.Test.LibV09
import Distribution.System
import Distribution.Text
import Distribution.Version
import Distribution.Verbosity
import Control.Monad
import Data.Maybe (fromMaybe)
import Data.List (nub, isSuffixOf)
import System.Directory (doesFileExist)
import System.Info (os, arch)
import System.FilePath (splitExtension, dropExtensions, (</>), (<.>),
takeDirectory, normalise, replaceExtension,
takeExtensions)
-- |The interface to a preprocessor, which may be implemented using an
-- external program, but need not be. The arguments are the name of
-- the input file, the name of the output file and a verbosity level.
-- Here is a simple example that merely prepends a comment to the given
-- source file:
--
-- > ppTestHandler :: PreProcessor
-- > ppTestHandler =
-- > PreProcessor {
-- > platformIndependent = True,
-- > runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity ->
-- > do info verbosity (inFile++" has been preprocessed to "++outFile)
-- > stuff <- readFile inFile
-- > writeFile outFile ("-- preprocessed as a test\n\n" ++ stuff)
-- > return ExitSuccess
--
-- We split the input and output file names into a base directory and the
-- rest of the file name. The input base dir is the path in the list of search
-- dirs that this file was found in. The output base dir is the build dir where
-- all the generated source files are put.
--
-- The reason for splitting it up this way is that some pre-processors don't
-- simply generate one output .hs file from one input file but have
-- dependencies on other generated files (notably c2hs, where building one
-- .hs file may require reading other .chi files, and then compiling the .hs
-- file may require reading a generated .h file). In these cases the generated
-- files need to embed relative path names to each other (eg the generated .hs
-- file mentions the .h file in the FFI imports). This path must be relative to
-- the base directory where the generated files are located, it cannot be
-- relative to the top level of the build tree because the compilers do not
-- look for .h files relative to there, ie we do not use \"-I .\", instead we
-- use \"-I dist\/build\" (or whatever dist dir has been set by the user)
--
-- Most pre-processors do not care of course, so mkSimplePreProcessor and
-- runSimplePreProcessor functions handle the simple case.
--
data PreProcessor = PreProcessor {
-- Is the output of the pre-processor platform independent? eg happy output
-- is portable haskell but c2hs's output is platform dependent.
-- This matters since only platform independent generated code can be
-- inlcuded into a source tarball.
platformIndependent :: Bool,
-- TODO: deal with pre-processors that have implementaion dependent output
-- eg alex and happy have --ghc flags. However we can't really inlcude
-- ghc-specific code into supposedly portable source tarballs.
runPreProcessor :: (FilePath, FilePath) -- Location of the source file relative to a base dir
-> (FilePath, FilePath) -- Output file name, relative to an output base dir
-> Verbosity -- verbosity
-> IO () -- Should exit if the preprocessor fails
}
-- | Function to determine paths to possible extra C sources for a
-- preprocessor: just takes the path to the build directory and uses
-- this to search for C sources with names that match the
-- preprocessor's output name format.
type PreProcessorExtras = FilePath -> IO [FilePath]
mkSimplePreProcessor :: (FilePath -> FilePath -> Verbosity -> IO ())
-> (FilePath, FilePath)
-> (FilePath, FilePath) -> Verbosity -> IO ()
mkSimplePreProcessor simplePP
(inBaseDir, inRelativeFile)
(outBaseDir, outRelativeFile) verbosity = simplePP inFile outFile verbosity
where inFile = normalise (inBaseDir </> inRelativeFile)
outFile = normalise (outBaseDir </> outRelativeFile)
runSimplePreProcessor :: PreProcessor -> FilePath -> FilePath -> Verbosity
-> IO ()
runSimplePreProcessor pp inFile outFile verbosity =
runPreProcessor pp (".", inFile) (".", outFile) verbosity
-- |A preprocessor for turning non-Haskell files with the given extension
-- into plain Haskell source files.
type PPSuffixHandler
= (String, BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor)
-- | Apply preprocessors to the sources from 'hsSourceDirs' for a given
-- component (lib, exe, or test suite).
preprocessComponent :: PackageDescription
-> Component
-> LocalBuildInfo
-> ComponentLocalBuildInfo
-> Bool
-> Verbosity
-> [PPSuffixHandler]
-> IO ()
preprocessComponent pd comp lbi clbi isSrcDist verbosity handlers = case comp of
(CLib lib@Library{ libBuildInfo = bi }) -> do
let dirs = hsSourceDirs bi ++ [autogenModulesDir lbi clbi]
setupMessage verbosity "Preprocessing library" (packageId pd)
forM_ (map ModuleName.toFilePath $ libModules lib) $
pre dirs (buildDir lbi) (localHandlers bi)
(CExe exe@Executable { buildInfo = bi, exeName = nm }) -> do
let exeDir = buildDir lbi </> nm </> nm ++ "-tmp"
dirs = hsSourceDirs bi ++ [autogenModulesDir lbi clbi]
setupMessage verbosity ("Preprocessing executable '" ++ nm ++ "' for") (packageId pd)
forM_ (map ModuleName.toFilePath $ otherModules bi) $
pre dirs exeDir (localHandlers bi)
pre (hsSourceDirs bi) exeDir (localHandlers bi) $
dropExtensions (modulePath exe)
CTest test@TestSuite{ testName = nm } -> do
setupMessage verbosity ("Preprocessing test suite '" ++ nm ++ "' for") (packageId pd)
case testInterface test of
TestSuiteExeV10 _ f ->
preProcessTest test f $ buildDir lbi </> testName test
</> testName test ++ "-tmp"
TestSuiteLibV09 _ _ -> do
let testDir = buildDir lbi </> stubName test
</> stubName test ++ "-tmp"
writeSimpleTestStub test testDir
preProcessTest test (stubFilePath test) testDir
TestSuiteUnsupported tt -> die $ "No support for preprocessing test "
++ "suite type " ++ display tt
CBench bm@Benchmark{ benchmarkName = nm } -> do
setupMessage verbosity ("Preprocessing benchmark '" ++ nm ++ "' for") (packageId pd)
case benchmarkInterface bm of
BenchmarkExeV10 _ f ->
preProcessBench bm f $ buildDir lbi </> benchmarkName bm
</> benchmarkName bm ++ "-tmp"
BenchmarkUnsupported tt -> die $ "No support for preprocessing benchmark "
++ "type " ++ display tt
where
builtinHaskellSuffixes = ["hs", "lhs", "hsig", "lhsig"]
builtinCSuffixes = cSourceExtensions
builtinSuffixes = builtinHaskellSuffixes ++ builtinCSuffixes
localHandlers bi = [(ext, h bi lbi clbi) | (ext, h) <- handlers]
pre dirs dir lhndlrs fp =
preprocessFile dirs dir isSrcDist fp verbosity builtinSuffixes lhndlrs
preProcessTest test = preProcessComponent (testBuildInfo test)
(testModules test)
preProcessBench bm = preProcessComponent (benchmarkBuildInfo bm)
(benchmarkModules bm)
preProcessComponent bi modules exePath dir = do
let biHandlers = localHandlers bi
sourceDirs = hsSourceDirs bi ++ [ autogenModulesDir lbi clbi ]
sequence_ [ preprocessFile sourceDirs dir isSrcDist
(ModuleName.toFilePath modu) verbosity builtinSuffixes
biHandlers
| modu <- modules ]
preprocessFile (dir : (hsSourceDirs bi)) dir isSrcDist
(dropExtensions $ exePath) verbosity
builtinSuffixes biHandlers
--TODO: try to list all the modules that could not be found
-- not just the first one. It's annoying and slow due to the need
-- to reconfigure after editing the .cabal file each time.
-- |Find the first extension of the file that exists, and preprocess it
-- if required.
preprocessFile
:: [FilePath] -- ^source directories
-> FilePath -- ^build directory
-> Bool -- ^preprocess for sdist
-> FilePath -- ^module file name
-> Verbosity -- ^verbosity
-> [String] -- ^builtin suffixes
-> [(String, PreProcessor)] -- ^possible preprocessors
-> IO ()
preprocessFile searchLoc buildLoc forSDist baseFile verbosity builtinSuffixes handlers = do
-- look for files in the various source dirs with this module name
-- and a file extension of a known preprocessor
psrcFiles <- findFileWithExtension' (map fst handlers) searchLoc baseFile
case psrcFiles of
-- no preprocessor file exists, look for an ordinary source file
-- just to make sure one actually exists at all for this module.
-- Note: by looking in the target/output build dir too, we allow
-- source files to appear magically in the target build dir without
-- any corresponding "real" source file. This lets custom Setup.hs
-- files generate source modules directly into the build dir without
-- the rest of the build system being aware of it (somewhat dodgy)
Nothing -> do
bsrcFiles <- findFileWithExtension builtinSuffixes (buildLoc : searchLoc) baseFile
case bsrcFiles of
Nothing -> die $ "can't find source for " ++ baseFile
++ " in " ++ intercalate ", " searchLoc
_ -> return ()
-- found a pre-processable file in one of the source dirs
Just (psrcLoc, psrcRelFile) -> do
let (srcStem, ext) = splitExtension psrcRelFile
psrcFile = psrcLoc </> psrcRelFile
pp = fromMaybe (error "Distribution.Simple.PreProcess: Just expected")
(lookup (tailNotNull ext) handlers)
-- Preprocessing files for 'sdist' is different from preprocessing
-- for 'build'. When preprocessing for sdist we preprocess to
-- avoid that the user has to have the preprocessors available.
-- ATM, we don't have a way to specify which files are to be
-- preprocessed and which not, so for sdist we only process
-- platform independent files and put them into the 'buildLoc'
-- (which we assume is set to the temp. directory that will become
-- the tarball).
--TODO: eliminate sdist variant, just supply different handlers
when (not forSDist || forSDist && platformIndependent pp) $ do
-- look for existing pre-processed source file in the dest dir to
-- see if we really have to re-run the preprocessor.
ppsrcFiles <- findFileWithExtension builtinSuffixes [buildLoc] baseFile
recomp <- case ppsrcFiles of
Nothing -> return True
Just ppsrcFile ->
psrcFile `moreRecentFile` ppsrcFile
when recomp $ do
let destDir = buildLoc </> dirName srcStem
createDirectoryIfMissingVerbose verbosity True destDir
runPreProcessorWithHsBootHack pp
(psrcLoc, psrcRelFile)
(buildLoc, srcStem <.> "hs")
where
dirName = takeDirectory
tailNotNull [] = []
tailNotNull x = tail x
-- FIXME: This is a somewhat nasty hack. GHC requires that hs-boot files
-- be in the same place as the hs files, so if we put the hs file in dist/
-- then we need to copy the hs-boot file there too. This should probably be
-- done another way. Possibly we should also be looking for .lhs-boot
-- files, but I think that preprocessors only produce .hs files.
runPreProcessorWithHsBootHack pp
(inBaseDir, inRelativeFile)
(outBaseDir, outRelativeFile) = do
runPreProcessor pp
(inBaseDir, inRelativeFile)
(outBaseDir, outRelativeFile) verbosity
exists <- doesFileExist inBoot
when exists $ copyFileVerbose verbosity inBoot outBoot
where
inBoot = replaceExtension inFile "hs-boot"
outBoot = replaceExtension outFile "hs-boot"
inFile = normalise (inBaseDir </> inRelativeFile)
outFile = normalise (outBaseDir </> outRelativeFile)
-- ------------------------------------------------------------
-- * known preprocessors
-- ------------------------------------------------------------
ppGreenCard :: BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor
ppGreenCard _ lbi _
= PreProcessor {
platformIndependent = False,
runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity ->
rawSystemProgramConf verbosity greencardProgram (withPrograms lbi)
(["-tffi", "-o" ++ outFile, inFile])
}
-- This one is useful for preprocessors that can't handle literate source.
-- We also need a way to chain preprocessors.
ppUnlit :: PreProcessor
ppUnlit =
PreProcessor {
platformIndependent = True,
runPreProcessor = mkSimplePreProcessor $ \inFile outFile _verbosity ->
withUTF8FileContents inFile $ \contents ->
either (writeUTF8File outFile) die (unlit inFile contents)
}
ppCpp :: BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor
ppCpp = ppCpp' []
ppCpp' :: [String] -> BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor
ppCpp' extraArgs bi lbi clbi =
case compilerFlavor (compiler lbi) of
GHC -> ppGhcCpp ghcProgram (>= Version [6,6] []) args bi lbi clbi
GHCJS -> ppGhcCpp ghcjsProgram (const True) args bi lbi clbi
_ -> ppCpphs args bi lbi clbi
where cppArgs = getCppOptions bi lbi
args = cppArgs ++ extraArgs
ppGhcCpp :: Program -> (Version -> Bool)
-> [String] -> BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor
ppGhcCpp program xHs extraArgs _bi lbi clbi =
PreProcessor {
platformIndependent = False,
runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> do
(prog, version, _) <- requireProgramVersion verbosity
program anyVersion (withPrograms lbi)
rawSystemProgram verbosity prog $
["-E", "-cpp"]
-- This is a bit of an ugly hack. We're going to
-- unlit the file ourselves later on if appropriate,
-- so we need GHC not to unlit it now or it'll get
-- double-unlitted. In the future we might switch to
-- using cpphs --unlit instead.
++ (if xHs version then ["-x", "hs"] else [])
++ [ "-optP-include", "-optP"++ (autogenModulesDir lbi clbi </> cppHeaderName) ]
++ ["-o", outFile, inFile]
++ extraArgs
}
ppCpphs :: [String] -> BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor
ppCpphs extraArgs _bi lbi clbi =
PreProcessor {
platformIndependent = False,
runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> do
(cpphsProg, cpphsVersion, _) <- requireProgramVersion verbosity
cpphsProgram anyVersion (withPrograms lbi)
rawSystemProgram verbosity cpphsProg $
("-O" ++ outFile) : inFile
: "--noline" : "--strip"
: (if cpphsVersion >= Version [1,6] []
then ["--include="++ (autogenModulesDir lbi clbi </> cppHeaderName)]
else [])
++ extraArgs
}
ppHsc2hs :: BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor
ppHsc2hs bi lbi clbi =
PreProcessor {
platformIndependent = False,
runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> do
(gccProg, _) <- requireProgram verbosity gccProgram (withPrograms lbi)
rawSystemProgramConf verbosity hsc2hsProgram (withPrograms lbi) $
[ "--cc=" ++ programPath gccProg
, "--ld=" ++ programPath gccProg ]
-- Additional gcc options
++ [ "--cflag=" ++ opt | opt <- programDefaultArgs gccProg
++ programOverrideArgs gccProg ]
++ [ "--lflag=" ++ opt | opt <- programDefaultArgs gccProg
++ programOverrideArgs gccProg ]
-- OSX frameworks:
++ [ what ++ "=-F" ++ opt
| isOSX
, opt <- nub (concatMap Installed.frameworkDirs pkgs)
, what <- ["--cflag", "--lflag"] ]
++ [ "--lflag=" ++ arg
| isOSX
, opt <- PD.frameworks bi ++ concatMap Installed.frameworks pkgs
, arg <- ["-framework", opt] ]
-- Note that on ELF systems, wherever we use -L, we must also use -R
-- because presumably that -L dir is not on the normal path for the
-- system's dynamic linker. This is needed because hsc2hs works by
-- compiling a C program and then running it.
++ [ "--cflag=" ++ opt | opt <- platformDefines lbi ]
-- Options from the current package:
++ [ "--cflag=-I" ++ dir | dir <- PD.includeDirs bi ]
++ [ "--cflag=" ++ opt | opt <- PD.ccOptions bi
++ PD.cppOptions bi ]
++ [ "--cflag=" ++ opt | opt <-
[ "-I" ++ autogenModulesDir lbi clbi,
"-include", autogenModulesDir lbi clbi </> cppHeaderName ] ]
++ [ "--lflag=-L" ++ opt | opt <- PD.extraLibDirs bi ]
++ [ "--lflag=-Wl,-R," ++ opt | isELF
, opt <- PD.extraLibDirs bi ]
++ [ "--lflag=-l" ++ opt | opt <- PD.extraLibs bi ]
++ [ "--lflag=" ++ opt | opt <- PD.ldOptions bi ]
-- Options from dependent packages
++ [ "--cflag=" ++ opt
| pkg <- pkgs
, opt <- [ "-I" ++ opt | opt <- Installed.includeDirs pkg ]
++ [ opt | opt <- Installed.ccOptions pkg ] ]
++ [ "--lflag=" ++ opt
| pkg <- pkgs
, opt <- [ "-L" ++ opt | opt <- Installed.libraryDirs pkg ]
++ [ "-Wl,-R," ++ opt | isELF
, opt <- Installed.libraryDirs pkg ]
++ [ "-l" ++ opt | opt <- Installed.extraLibraries pkg ]
++ [ opt | opt <- Installed.ldOptions pkg ] ]
++ ["-o", outFile, inFile]
}
where
-- TODO: installedPkgs contains ALL dependencies associated with
-- the package, but we really only want to look at packages for the
-- *current* dependency. We should use PackageIndex.dependencyClosure
-- on the direct depends of the component. The signature of this
-- function was recently refactored, so this should be fixable
-- now. Tracked with #2971 (which has a test case.)
pkgs = PackageIndex.topologicalOrder (packageHacks (installedPkgs lbi))
isOSX = case buildOS of OSX -> True; _ -> False
isELF = case buildOS of OSX -> False; Windows -> False; AIX -> False; _ -> True;
packageHacks = case compilerFlavor (compiler lbi) of
GHC -> hackRtsPackage
GHCJS -> hackRtsPackage
_ -> id
-- We don't link in the actual Haskell libraries of our dependencies, so
-- the -u flags in the ldOptions of the rts package mean linking fails on
-- OS X (it's ld is a tad stricter than gnu ld). Thus we remove the
-- ldOptions for GHC's rts package:
hackRtsPackage index =
case PackageIndex.lookupPackageName index (PackageName "rts") of
[(_, [rts])]
-> PackageIndex.insert rts { Installed.ldOptions = [] } index
_ -> error "No (or multiple) ghc rts package is registered!!"
ppHsc2hsExtras :: PreProcessorExtras
ppHsc2hsExtras buildBaseDir = filter ("_hsc.c" `isSuffixOf`) `fmap`
getDirectoryContentsRecursive buildBaseDir
ppC2hs :: BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor
ppC2hs bi lbi clbi =
PreProcessor {
platformIndependent = False,
runPreProcessor = \(inBaseDir, inRelativeFile)
(outBaseDir, outRelativeFile) verbosity -> do
(c2hsProg, _, _) <- requireProgramVersion verbosity
c2hsProgram (orLaterVersion (Version [0,15] []))
(withPrograms lbi)
(gccProg, _) <- requireProgram verbosity gccProgram (withPrograms lbi)
rawSystemProgram verbosity c2hsProg $
-- Options from the current package:
[ "--cpp=" ++ programPath gccProg, "--cppopts=-E" ]
++ [ "--cppopts=" ++ opt | opt <- getCppOptions bi lbi ]
++ [ "--cppopts=-include" ++ (autogenModulesDir lbi clbi </> cppHeaderName) ]
++ [ "--include=" ++ outBaseDir ]
-- Options from dependent packages
++ [ "--cppopts=" ++ opt
| pkg <- pkgs
, opt <- [ "-I" ++ opt | opt <- Installed.includeDirs pkg ]
++ [ opt | opt@('-':c:_) <- Installed.ccOptions pkg
, c `elem` "DIU" ] ]
--TODO: install .chi files for packages, so we can --include
-- those dirs here, for the dependencies
-- input and output files
++ [ "--output-dir=" ++ outBaseDir
, "--output=" ++ outRelativeFile
, inBaseDir </> inRelativeFile ]
}
where
pkgs = PackageIndex.topologicalOrder (installedPkgs lbi)
ppC2hsExtras :: PreProcessorExtras
ppC2hsExtras d = filter (\p -> takeExtensions p == ".chs.c") `fmap`
getDirectoryContentsRecursive d
--TODO: perhaps use this with hsc2hs too
--TODO: remove cc-options from cpphs for cabal-version: >= 1.10
getCppOptions :: BuildInfo -> LocalBuildInfo -> [String]
getCppOptions bi lbi
= platformDefines lbi
++ cppOptions bi
++ ["-I" ++ dir | dir <- PD.includeDirs bi]
++ [opt | opt@('-':c:_) <- PD.ccOptions bi, c `elem` "DIU"]
platformDefines :: LocalBuildInfo -> [String]
platformDefines lbi =
case compilerFlavor comp of
GHC ->
["-D__GLASGOW_HASKELL__=" ++ versionInt version] ++
["-D" ++ os ++ "_BUILD_OS=1"] ++
["-D" ++ arch ++ "_BUILD_ARCH=1"] ++
map (\os' -> "-D" ++ os' ++ "_HOST_OS=1") osStr ++
map (\arch' -> "-D" ++ arch' ++ "_HOST_ARCH=1") archStr
GHCJS ->
compatGlasgowHaskell ++
["-D__GHCJS__=" ++ versionInt version] ++
["-D" ++ os ++ "_BUILD_OS=1"] ++
["-D" ++ arch ++ "_BUILD_ARCH=1"] ++
map (\os' -> "-D" ++ os' ++ "_HOST_OS=1") osStr ++
map (\arch' -> "-D" ++ arch' ++ "_HOST_ARCH=1") archStr
JHC -> ["-D__JHC__=" ++ versionInt version]
HaskellSuite {} ->
["-D__HASKELL_SUITE__"] ++
map (\os' -> "-D" ++ os' ++ "_HOST_OS=1") osStr ++
map (\arch' -> "-D" ++ arch' ++ "_HOST_ARCH=1") archStr
_ -> []
where
comp = compiler lbi
Platform hostArch hostOS = hostPlatform lbi
version = compilerVersion comp
compatGlasgowHaskell =
maybe [] (\v -> ["-D__GLASGOW_HASKELL__=" ++ versionInt v])
(compilerCompatVersion GHC comp)
-- TODO: move this into the compiler abstraction
-- FIXME: this forces GHC's crazy 4.8.2 -> 408 convention on all
-- the other compilers. Check if that's really what they want.
versionInt :: Version -> String
versionInt (Version { versionBranch = [] }) = "1"
versionInt (Version { versionBranch = [n] }) = show n
versionInt (Version { versionBranch = n1:n2:_ })
= -- 6.8.x -> 608
-- 6.10.x -> 610
let s1 = show n1
s2 = show n2
middle = case s2 of
_ : _ : _ -> ""
_ -> "0"
in s1 ++ middle ++ s2
osStr = case hostOS of
Linux -> ["linux"]
Windows -> ["mingw32"]
OSX -> ["darwin"]
FreeBSD -> ["freebsd"]
OpenBSD -> ["openbsd"]
NetBSD -> ["netbsd"]
DragonFly -> ["dragonfly"]
Solaris -> ["solaris2"]
AIX -> ["aix"]
HPUX -> ["hpux"]
IRIX -> ["irix"]
HaLVM -> []
IOS -> ["ios"]
Android -> ["android"]
Ghcjs -> ["ghcjs"]
Hurd -> ["hurd"]
OtherOS _ -> []
archStr = case hostArch of
I386 -> ["i386"]
X86_64 -> ["x86_64"]
PPC -> ["powerpc"]
PPC64 -> ["powerpc64"]
Sparc -> ["sparc"]
Arm -> ["arm"]
Mips -> ["mips"]
SH -> []
IA64 -> ["ia64"]
S390 -> ["s390"]
Alpha -> ["alpha"]
Hppa -> ["hppa"]
Rs6000 -> ["rs6000"]
M68k -> ["m68k"]
Vax -> ["vax"]
JavaScript -> ["javascript"]
OtherArch _ -> []
ppHappy :: BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor
ppHappy _ lbi _ = pp { platformIndependent = True }
where pp = standardPP lbi happyProgram (hcFlags hc)
hc = compilerFlavor (compiler lbi)
hcFlags GHC = ["-agc"]
hcFlags GHCJS = ["-agc"]
hcFlags _ = []
ppAlex :: BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor
ppAlex _ lbi _ = pp { platformIndependent = True }
where pp = standardPP lbi alexProgram (hcFlags hc)
hc = compilerFlavor (compiler lbi)
hcFlags GHC = ["-g"]
hcFlags GHCJS = ["-g"]
hcFlags _ = []
standardPP :: LocalBuildInfo -> Program -> [String] -> PreProcessor
standardPP lbi prog args =
PreProcessor {
platformIndependent = False,
runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity ->
rawSystemProgramConf verbosity prog (withPrograms lbi)
(args ++ ["-o", outFile, inFile])
}
-- |Convenience function; get the suffixes of these preprocessors.
ppSuffixes :: [ PPSuffixHandler ] -> [String]
ppSuffixes = map fst
-- |Standard preprocessors: GreenCard, c2hs, hsc2hs, happy, alex and cpphs.
knownSuffixHandlers :: [ PPSuffixHandler ]
knownSuffixHandlers =
[ ("gc", ppGreenCard)
, ("chs", ppC2hs)
, ("hsc", ppHsc2hs)
, ("x", ppAlex)
, ("y", ppHappy)
, ("ly", ppHappy)
, ("cpphs", ppCpp)
]
-- |Standard preprocessors with possible extra C sources: c2hs, hsc2hs.
knownExtrasHandlers :: [ PreProcessorExtras ]
knownExtrasHandlers = [ ppC2hsExtras, ppHsc2hsExtras ]
-- | Find any extra C sources generated by preprocessing that need to
-- be added to the component (addresses issue #238).
preprocessExtras :: Component
-> LocalBuildInfo
-> IO [FilePath]
preprocessExtras comp lbi = case comp of
CLib _ -> pp $ buildDir lbi
(CExe Executable { exeName = nm }) ->
pp $ buildDir lbi </> nm </> nm ++ "-tmp"
CTest test -> do
case testInterface test of
TestSuiteExeV10 _ _ ->
pp $ buildDir lbi </> testName test </> testName test ++ "-tmp"
TestSuiteLibV09 _ _ ->
pp $ buildDir lbi </> stubName test </> stubName test ++ "-tmp"
TestSuiteUnsupported tt -> die $ "No support for preprocessing test "
++ "suite type " ++ display tt
CBench bm -> do
case benchmarkInterface bm of
BenchmarkExeV10 _ _ ->
pp $ buildDir lbi </> benchmarkName bm </> benchmarkName bm ++ "-tmp"
BenchmarkUnsupported tt -> die $ "No support for preprocessing benchmark "
++ "type " ++ display tt
where
pp dir = (map (dir </>) . concat) `fmap` forM knownExtrasHandlers ($ dir)
|
headprogrammingczar/cabal
|
Cabal/Distribution/Simple/PreProcess.hs
|
bsd-3-clause
| 29,824
| 163
| 27
| 8,458
| 5,468
| 3,020
| 2,448
| 441
| 40
|
-----------------------------------------------------------------------------
--
-- (c) The University of Glasgow 2011
--
-- CmmLint: checking the correctness of Cmm statements and expressions
--
-----------------------------------------------------------------------------
{-# LANGUAGE GADTs, CPP #-}
module CmmLint (
cmmLint, cmmLintGraph
) where
import Hoopl
import Cmm
import CmmUtils
import CmmLive
import CmmSwitch (switchTargetsToList)
import PprCmm ()
import BlockId
import FastString
import Outputable
import DynFlags
import Control.Monad (liftM, ap)
#if __GLASGOW_HASKELL__ < 709
import Control.Applicative (Applicative(..))
#endif
-- Things to check:
-- - invariant on CmmBlock in CmmExpr (see comment there)
-- - check for branches to blocks that don't exist
-- - check types
-- -----------------------------------------------------------------------------
-- Exported entry points:
cmmLint :: (Outputable d, Outputable h)
=> DynFlags -> GenCmmGroup d h CmmGraph -> Maybe SDoc
cmmLint dflags tops = runCmmLint dflags (mapM_ (lintCmmDecl dflags)) tops
cmmLintGraph :: DynFlags -> CmmGraph -> Maybe SDoc
cmmLintGraph dflags g = runCmmLint dflags (lintCmmGraph dflags) g
runCmmLint :: Outputable a => DynFlags -> (a -> CmmLint b) -> a -> Maybe SDoc
runCmmLint dflags l p =
case unCL (l p) dflags of
Left err -> Just (vcat [ptext $ sLit ("Cmm lint error:"),
nest 2 err,
ptext $ sLit ("Program was:"),
nest 2 (ppr p)])
Right _ -> Nothing
lintCmmDecl :: DynFlags -> GenCmmDecl h i CmmGraph -> CmmLint ()
lintCmmDecl dflags (CmmProc _ lbl _ g)
= addLintInfo (text "in proc " <> ppr lbl) $ lintCmmGraph dflags g
lintCmmDecl _ (CmmData {})
= return ()
lintCmmGraph :: DynFlags -> CmmGraph -> CmmLint ()
lintCmmGraph dflags g =
cmmLocalLiveness dflags g `seq` mapM_ (lintCmmBlock labels) blocks
-- cmmLiveness throws an error if there are registers
-- live on entry to the graph (i.e. undefined
-- variables)
where
blocks = toBlockList g
labels = setFromList (map entryLabel blocks)
lintCmmBlock :: BlockSet -> CmmBlock -> CmmLint ()
lintCmmBlock labels block
= addLintInfo (text "in basic block " <> ppr (entryLabel block)) $ do
let (_, middle, last) = blockSplit block
mapM_ lintCmmMiddle (blockToList middle)
lintCmmLast labels last
-- -----------------------------------------------------------------------------
-- lintCmmExpr
-- Checks whether a CmmExpr is "type-correct", and check for obvious-looking
-- byte/word mismatches.
lintCmmExpr :: CmmExpr -> CmmLint CmmType
lintCmmExpr (CmmLoad expr rep) = do
_ <- lintCmmExpr expr
-- Disabled, if we have the inlining phase before the lint phase,
-- we can have funny offsets due to pointer tagging. -- EZY
-- when (widthInBytes (typeWidth rep) >= wORD_SIZE) $
-- cmmCheckWordAddress expr
return rep
lintCmmExpr expr@(CmmMachOp op args) = do
dflags <- getDynFlags
tys <- mapM lintCmmExpr args
if map (typeWidth . cmmExprType dflags) args == machOpArgReps dflags op
then cmmCheckMachOp op args tys
else cmmLintMachOpErr expr (map (cmmExprType dflags) args) (machOpArgReps dflags op)
lintCmmExpr (CmmRegOff reg offset)
= do dflags <- getDynFlags
let rep = typeWidth (cmmRegType dflags reg)
lintCmmExpr (CmmMachOp (MO_Add rep)
[CmmReg reg, CmmLit (CmmInt (fromIntegral offset) rep)])
lintCmmExpr expr =
do dflags <- getDynFlags
return (cmmExprType dflags expr)
-- Check for some common byte/word mismatches (eg. Sp + 1)
cmmCheckMachOp :: MachOp -> [CmmExpr] -> [CmmType] -> CmmLint CmmType
cmmCheckMachOp op [lit@(CmmLit (CmmInt { })), reg@(CmmReg _)] tys
= cmmCheckMachOp op [reg, lit] tys
cmmCheckMachOp op _ tys
= do dflags <- getDynFlags
return (machOpResultType dflags op tys)
{-
isOffsetOp :: MachOp -> Bool
isOffsetOp (MO_Add _) = True
isOffsetOp (MO_Sub _) = True
isOffsetOp _ = False
-- This expression should be an address from which a word can be loaded:
-- check for funny-looking sub-word offsets.
_cmmCheckWordAddress :: CmmExpr -> CmmLint ()
_cmmCheckWordAddress e@(CmmMachOp op [arg, CmmLit (CmmInt i _)])
| isOffsetOp op && notNodeReg arg && i `rem` fromIntegral (wORD_SIZE dflags) /= 0
= cmmLintDubiousWordOffset e
_cmmCheckWordAddress e@(CmmMachOp op [CmmLit (CmmInt i _), arg])
| isOffsetOp op && notNodeReg arg && i `rem` fromIntegral (wORD_SIZE dflags) /= 0
= cmmLintDubiousWordOffset e
_cmmCheckWordAddress _
= return ()
-- No warnings for unaligned arithmetic with the node register,
-- which is used to extract fields from tagged constructor closures.
notNodeReg :: CmmExpr -> Bool
notNodeReg (CmmReg reg) | reg == nodeReg = False
notNodeReg _ = True
-}
lintCmmMiddle :: CmmNode O O -> CmmLint ()
lintCmmMiddle node = case node of
CmmComment _ -> return ()
CmmTick _ -> return ()
CmmUnwind{} -> return ()
CmmAssign reg expr -> do
dflags <- getDynFlags
erep <- lintCmmExpr expr
let reg_ty = cmmRegType dflags reg
if (erep `cmmEqType_ignoring_ptrhood` reg_ty)
then return ()
else cmmLintAssignErr (CmmAssign reg expr) erep reg_ty
CmmStore l r -> do
_ <- lintCmmExpr l
_ <- lintCmmExpr r
return ()
CmmUnsafeForeignCall target _formals actuals -> do
lintTarget target
mapM_ lintCmmExpr actuals
lintCmmLast :: BlockSet -> CmmNode O C -> CmmLint ()
lintCmmLast labels node = case node of
CmmBranch id -> checkTarget id
CmmCondBranch e t f -> do
dflags <- getDynFlags
mapM_ checkTarget [t,f]
_ <- lintCmmExpr e
checkCond dflags e
CmmSwitch e ids -> do
dflags <- getDynFlags
mapM_ checkTarget $ switchTargetsToList ids
erep <- lintCmmExpr e
if (erep `cmmEqType_ignoring_ptrhood` bWord dflags)
then return ()
else cmmLintErr (text "switch scrutinee is not a word: " <>
ppr e <> text " :: " <> ppr erep)
CmmCall { cml_target = target, cml_cont = cont } -> do
_ <- lintCmmExpr target
maybe (return ()) checkTarget cont
CmmForeignCall tgt _ args succ _ _ _ -> do
lintTarget tgt
mapM_ lintCmmExpr args
checkTarget succ
where
checkTarget id
| setMember id labels = return ()
| otherwise = cmmLintErr (text "Branch to nonexistent id" <+> ppr id)
lintTarget :: ForeignTarget -> CmmLint ()
lintTarget (ForeignTarget e _) = lintCmmExpr e >> return ()
lintTarget (PrimTarget {}) = return ()
checkCond :: DynFlags -> CmmExpr -> CmmLint ()
checkCond _ (CmmMachOp mop _) | isComparisonMachOp mop = return ()
checkCond dflags (CmmLit (CmmInt x t)) | x == 0 || x == 1, t == wordWidth dflags = return () -- constant values
checkCond _ expr
= cmmLintErr (hang (text "expression is not a conditional:") 2
(ppr expr))
-- -----------------------------------------------------------------------------
-- CmmLint monad
-- just a basic error monad:
newtype CmmLint a = CmmLint { unCL :: DynFlags -> Either SDoc a }
instance Functor CmmLint where
fmap = liftM
instance Applicative CmmLint where
pure = return
(<*>) = ap
instance Monad CmmLint where
CmmLint m >>= k = CmmLint $ \dflags ->
case m dflags of
Left e -> Left e
Right a -> unCL (k a) dflags
return a = CmmLint (\_ -> Right a)
instance HasDynFlags CmmLint where
getDynFlags = CmmLint (\dflags -> Right dflags)
cmmLintErr :: SDoc -> CmmLint a
cmmLintErr msg = CmmLint (\_ -> Left msg)
addLintInfo :: SDoc -> CmmLint a -> CmmLint a
addLintInfo info thing = CmmLint $ \dflags ->
case unCL thing dflags of
Left err -> Left (hang info 2 err)
Right a -> Right a
cmmLintMachOpErr :: CmmExpr -> [CmmType] -> [Width] -> CmmLint a
cmmLintMachOpErr expr argsRep opExpectsRep
= cmmLintErr (text "in MachOp application: " $$
nest 2 (ppr expr) $$
(text "op is expecting: " <+> ppr opExpectsRep) $$
(text "arguments provide: " <+> ppr argsRep))
cmmLintAssignErr :: CmmNode e x -> CmmType -> CmmType -> CmmLint a
cmmLintAssignErr stmt e_ty r_ty
= cmmLintErr (text "in assignment: " $$
nest 2 (vcat [ppr stmt,
text "Reg ty:" <+> ppr r_ty,
text "Rhs ty:" <+> ppr e_ty]))
{-
cmmLintDubiousWordOffset :: CmmExpr -> CmmLint a
cmmLintDubiousWordOffset expr
= cmmLintErr (text "offset is not a multiple of words: " $$
nest 2 (ppr expr))
-}
|
urbanslug/ghc
|
compiler/cmm/CmmLint.hs
|
bsd-3-clause
| 8,908
| 0
| 16
| 2,314
| 2,259
| 1,108
| 1,151
| 155
| 7
|
module Name
( Name -- :: *; Show, Eq, Ord, Hash
, (%) -- :: Name -> Int -> Name
, name, prim -- :: String -> Name
, strip -- :: Name -> Name
-- names
, vr, sk, dp, sp, tr, dm, un, df, el, eq
, isSimpleName
, isSkolemnName
, isEltName
, getIndex
)
where
{-
Paradox/Equinox -- Copyright (c) 2003-2007, Koen Claessen, Niklas Sorensson
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-}
import Str
---------------------------------------------------------------------------
-- name
data Name
= Name !Str
| Prim !Str
| !Name :% Int
deriving ( Eq, Ord )
instance Show Name where
show (Name a) = show a
show (Prim a) = "$" ++ show a
show (nm :% i) = show nm ++ "!" ++ show i
-- functions
name :: String -> Name
name s = Name (str s)
prim :: String -> Name
prim s = Prim (str s)
(%) :: Name -> Int -> Name
n % i = n :% i
strip :: Name -> Name
strip (n :% _) = strip n
strip n = n
-- internal names
vr = prim "X"
eq = prim "eq"
sk = prim "sk"
dp = prim "dp"
sp = prim "sp"
tr = prim "truth"
dm = prim "dm"
un = prim "un"
df = prim "df"
el = name ""
isName :: (Name -> Bool) -> Name -> Bool
isName p n | p n = True
isName p (n :% _) = isName p n
isName p _ = False
isSimpleName (Name _) = True
isSimpleName _ = False
isSkolemnName = isName (== sk)
isEltName = isName (== el)
getIndex :: Name -> Int
getIndex (_ :% i) = i
getIndex _ = 0
---------------------------------------------------------------------------
-- the end.
|
msakai/folkung
|
Haskell/Name.hs
|
mit
| 2,500
| 0
| 8
| 562
| 536
| 289
| 247
| 56
| 1
|
module Dice.TestInstances where
import Test.QuickCheck
import Control.Monad.Random
import D20.Dice
instance Bounded Die where
minBound = Four
maxBound = Percentile
instance Random Die where
random g =
case randomR (fromEnum (minBound :: Die),fromEnum (maxBound :: Die)) g of
(r,g') -> (toEnum r,g')
randomR (a,b) g =
case randomR (fromEnum a,fromEnum b) g of
(r,g') -> (toEnum r,g')
instance Arbitrary Die where
arbitrary = choose (minBound,maxBound)
|
elkorn/d20
|
test/Properties/Dice/TestInstances.hs
|
mit
| 486
| 0
| 10
| 99
| 190
| 105
| 85
| 16
| 0
|
{-# LANGUAGE NoImplicitPrelude #-}
module IHaskell.CSS (ihaskellCSS) where
import IHaskellPrelude
ihaskellCSS :: String
ihaskellCSS =
unlines
[
-- Custom IHaskell CSS
"/* Styles used for the Hoogle display in the pager */"
, ".hoogle-doc {"
, "display: block;"
, "padding-bottom: 1.3em;"
, "padding-left: 0.4em;"
, "}"
, ".hoogle-code {"
, "display: block;"
, "font-family: monospace;"
, "white-space: pre;"
, "}"
, ".hoogle-text {"
, "display: block;"
, "}"
, ".hoogle-name {"
, "color: green;"
, "font-weight: bold;"
, "}"
, ".hoogle-head {"
, "font-weight: bold;"
, "}"
, ".hoogle-sub {"
, "display: block;"
, "margin-left: 0.4em;"
, "}"
, ".hoogle-package {"
, "font-weight: bold;"
, "font-style: italic;"
, "}"
, ".hoogle-module {"
, "font-weight: bold;"
, "}"
, ".hoogle-class {"
, "font-weight: bold;"
, "}"
,
-- Styles used for basic displays
".get-type {"
, "color: green;"
, "font-weight: bold;"
, "font-family: monospace;"
, "display: block;"
, "white-space: pre-wrap;"
, "}"
, ".show-type {"
, "color: green;"
, "font-weight: bold;"
, "font-family: monospace;"
, "margin-left: 1em;"
, "}"
, ".mono {"
, "font-family: monospace;"
, "display: block;"
, "}"
, ".err-msg {"
, "color: red;"
, "font-style: italic;"
, "font-family: monospace;"
, "white-space: pre;"
, "display: block;"
, "}"
, "#unshowable {"
, "color: red;"
, "font-weight: bold;"
, "}"
, ".err-msg.in.collapse {"
, "padding-top: 0.7em;"
, "}"
,
-- Code that will get highlighted before it is highlighted
".highlight-code {"
, "white-space: pre;"
, "font-family: monospace;"
, "}"
,
-- Hlint styles
".suggestion-warning { "
, "font-weight: bold;"
, "color: rgb(200, 130, 0);"
, "}"
, ".suggestion-error { "
, "font-weight: bold;"
, "color: red;"
, "}"
, ".suggestion-name {"
, "font-weight: bold;"
, "}"
]
|
gibiansky/IHaskell
|
src/IHaskell/CSS.hs
|
mit
| 2,154
| 0
| 6
| 651
| 275
| 183
| 92
| 88
| 1
|
-- :l codes/haskell/Scene.hs
module Scene
where
import Geom
-- Camera
maxdepth = 9
vrp = [0.0, 0.0, (-20.0)]
xleft = (-12)
xright = 12
ytop = 9
ybottom = (-9)
swidth = 640
sheight = 480
dx = (xright - xleft) / swidth
dy = (ytop - ybottom) / sheight
xcoord px = xleft + (0.5 + px) * dx
ycoord py = ytop - (0.5 + py) * dy
-- minimum distance CHECK
sdist = 0.0001
-- datatype defs
data Material = Mat {color, am :: [Double], df, ref, t, ior, spec, phong :: Double}
deriving (Eq)
instance Show Material where
show s = show (color s)
-- This really doesn't matter since we can't ever compare materials,
-- but Haskell's type system demands it
instance Ord Material where
x < y = (df x) < (df y)
x > y = (df x) > (df y)
getMaterial :: Material -> [Double]
getMaterial c = color c
data Sphere = Sp {spref :: Int, center :: [Double], rad :: Double, mat :: Material}
deriving (Eq)
instance Show Sphere where
show s = show (center s)
instance Ord Sphere where
x < y = (rad x) < (rad y)
getSColor :: Sphere -> [Double]
getSColor c = color (mat c)
data Plane = Pl {plref :: Int, normal :: [Double], d :: Double, matp :: Material}
deriving (Eq)
instance Show Plane where
show p = show (normal p)
instance Ord Plane where
x < y = (d x) < (d y)
data HitCo = Hc {vi, vsn :: [Double], dist :: Double, insref :: Int}
deriving (Eq)
instance Show HitCo where
show h = show [(vi h),(vsn h),[(dist h)]]
instance Ord HitCo where
x < y = (dist x) < (dist y)
x > y = (dist x) > (dist y)
minimumhc [] = error "Prelude.minimum: empty list"
minimumhc [x] = x
minimumhc (x:y:xs) = if (fst x) < (fst y) then minimum (x:xs)
else minimum (y:xs)
nullmat = Mat [0,0,0] [0,0,0] 0 0 0 0 0 0
nullhc = Hc [0,0,0] [0,0,0] 0 0
data Light = Li {coord, lcolor, nl :: [Double], warnc :: Double}
deriving (Eq)
-- Lights
--lights = [[(-10.0),15.0,0.0]]
lights = [Li [-20,45,0] (vmults 3 [1,1,1]) (vnorm [20,-45,0]) 16]
bg_color = [0.2,0.7,1.0]
--- objects
--spheres = []
spheres = [Sp 100 [0, 0, 12] 5.0 (Mat [1.0,0,0] [0.2,0,0] 0.6 0.2 0 1 0.8 40)]
++ [Sp 101 [-9,6,8] 4.0 (Mat [0,1.0,0] [0,0.1,0] 0.3 0.4 0 1 0.3 10)]
++ [Sp 102 [-4,-3,0] 4.0 (Mat [0,0.3,1.0] [0.2,0.2,0.2] 0.8 0 0 1 0.15 5)]
++ [Sp 103 [3,4,3] 2.9 (Mat [0.4,0.8,1.0] [0,0,0] 0.2 0.1 0.8 1.33 0.95 60)]
++ [Sp 104 [-1,2,3] 1.0 (Mat [1,1,1] [0,0,0] 0.05 0 1 1.7 1 100)]
--planes = []
planes = [Pl 20 [0, 1, 0] (-6) (Mat [1.0,0.5,0] [0.1,0.2,0.2] 0.6 0 0 1 0.2 7)]
|
pnex2000/hastracer
|
Scene.hs
|
mit
| 2,553
| 10
| 13
| 641
| 1,376
| 778
| 598
| 61
| 2
|
{- |
Module : $Header$
Description : The entrypoint of the application.
Author : Nils 'bash0r' Jonsson
Copyright : (c) 2015 Nils 'bash0r' Jonsson
License : MIT
Maintainer : aka.bash0r@gmail.com
Stability : unstable
Portability : non-portable (Portability is untested.)
The entrypoint of the application.
-}
module Main
( main
) where
import Control.Applicative
import Control.Monad
import System.Environment
import qualified Headergen.Commands.Creation as Creation
import qualified Headergen.Commands.Help as Help
import qualified Headergen.Commands.Initialization as Initialization
main :: IO ()
main = do
args <- getArgs
handleParameters args
where
handleParameters ("init" :xs) = Initialization.command xs
handleParameters ("create":xs) = Creation.command xs
handleParameters ("help" :xs) = Help.command xs
handleParameters _ = Help.command []
|
aka-bash0r/headergen
|
src/Main.hs
|
mit
| 960
| 0
| 9
| 220
| 157
| 88
| 69
| 16
| 4
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TupleSections #-}
module Main (main) where
import Control.Monad.Writer (execWriter, tell)
import qualified Data.Configurator as Config
import Data.Text (Text)
import qualified Data.Text as Text
import Options.Applicative
import System.Directory
import System.FilePath ((</>))
import TBR.Core
import TBR.Monad
import TBR.Script
--------------------------------------------------------------------------------
-- Argument parsing and dispatch
text :: Monad m => String -> m Text
text = return . Text.pack
readConfiguration :: Argument -> IO (Command, Configuration)
readConfiguration Argument{..} = runScriptT . scriptIO $ do
home <- getHomeDirectory
let defaultTarget = home </> ".tbr" </> "tbr.txt"
config <- Config.load [Config.Optional "$(HOME)/.tbr/config"]
(argCommand,) <$>
(Configuration <$> Config.lookupDefault
defaultTarget config "tbr.target"
<*> pure argDryRun)
-- | Dispatch the correct subcommand based on the options.
dispatch :: Command -> Configuration -> IO ()
dispatch cmd = flip runBooksM $
case cmd of
Add{..} -> add addTitle addAuthor addList
Finish{..} -> finish finishQuery
List{..} -> list listList
Move{..} -> move moveQuery moveList
Random{..} -> random randomList
Remove{..} -> remove removeQuery
Search{..} -> search searchQuery
Start{..} -> start startQuery
Status -> status
Stop{..} -> stop stopQuery stopList
-- | Represents the subcommands offered by the program.
data Command = Add { addTitle :: Text
, addAuthor :: Text
, addList :: Maybe Text }
| Finish { finishQuery :: Maybe Text }
| List { listList :: Maybe Text }
| Move { moveQuery :: Text
, moveList :: Text }
| Random { randomList :: Maybe Text }
| Remove { removeQuery :: Text }
| Search { searchQuery :: Text }
| Start { startQuery :: Text }
| Status
| Stop { stopQuery :: Maybe Text
, stopList :: Maybe Text }
deriving (Show, Eq)
-- | Represents all the command line arguments accepted by the program.
data Argument = Argument { argDryRun :: Bool
, argCommand :: Command
} deriving (Show, Eq)
addParser, finishParser, listParser, moveParser, randomParser, removeParser,
searchParser, startParser, stopParser :: Parser Command
listArgument :: Parser Text
listArgument = argument text (metavar "LIST")
queryParser :: Parser Text
queryParser = argument text (metavar "QUERY")
addParser = Add <$> argument text (metavar "TITLE")
<*> argument text (metavar "AUTHOR")
<*> optional listArgument
finishParser = Finish <$> optional queryParser
listParser = List <$> optional listArgument
moveParser = Move <$> queryParser
<*> listArgument
randomParser = Random <$> optional listArgument
removeParser = Remove <$> queryParser
searchParser = Search <$> queryParser
startParser = Start <$> queryParser
stopParser = Stop <$> optional queryParser
<*> optional listArgument
-- | Parser for all subcommands.
commandParser :: Parser Command
commandParser = subparser . execWriter $ do
cmd addParser "add" "Add a book."
cmd finishParser "finish" "Mark a book finished."
cmd listParser "list" "List all books to be read."
cmd moveParser "move" "Move a book between lists."
cmd randomParser "random" "Suggest a random book."
cmd removeParser "remove" "Remove a book."
cmd searchParser "search" "Search for a book in the list."
cmd startParser "start" "Start reading a book."
cmd (pure Status) "status" "Show reading status."
cmd stopParser "stop" "Stop reading a book."
where
cmd parser name desc = tell $ command name (info parser $ progDesc desc)
-- | Build the complete command line argument parser.
argumentParser :: Parser Argument
argumentParser = Argument
<$> switch (long "dry-run" <> short 'n' <> help "Don't change the file.")
<*> commandParser
main :: IO ()
main = execParser (info (helper <*> argumentParser)
(fullDesc <> header "A tool to maintain reading lists."))
>>= readConfiguration
>>= uncurry dispatch
|
abhinav/tbr
|
app/main.hs
|
mit
| 4,729
| 0
| 13
| 1,417
| 1,091
| 573
| 518
| 97
| 10
|
module Triangle (TriangleType(..), triangleType) where
import Data.List (sort)
data TriangleType = Equilateral
| Isosceles
| Scalene
| Illegal
deriving (Eq, Show)
triangleType :: (Ord a, Num a) => a -> a -> a -> TriangleType
triangleType a b c | x <= 0 || x + y < z = Illegal
| x == z = Equilateral
| x == y || y == z = Isosceles
| otherwise = Scalene
where [x, y, z] = sort [a, b, c]
|
genos/online_problems
|
exercism/haskell/triangle/src/Triangle.hs
|
mit
| 545
| 0
| 11
| 240
| 191
| 103
| 88
| 13
| 1
|
module Bindings.ESKit (OptimisationProblem(OptimisationProblem),
DistributionHandler(CMAHandler, SepCMAHandler, CSAHandler),
n, mu, lambda, fitnessFunction, distributionHandlerSettings,
optimize)
where
import qualified Bindings.ESKitLowlevel as LL
import Control.Monad
import Data.Maybe
import Foreign
import Foreign.C.Types
data OptimisationProblem = OptimisationProblem {
n :: Int,
mu :: Maybe Int,
lambda :: Maybe Int,
fitnessFunction :: [Double] -> Double,
distributionHandlerSettings :: DistributionHandler
};
data DistributionHandler =
CMAHandler (Double, Double) |
SepCMAHandler (Double, Double) |
CSAHandler (Double, Double)
setupDistribHandler :: CSize -> Ptr LL.C'ekOptimizer -> DistributionHandler -> IO (Ptr CChar)
setupDistribHandler np eo (CMAHandler (sigmalow, sigmahigh)) = do
p <- LL.c'mkekCMA np
LL.c'ekCMA_setSigma p (realToFrac sigmahigh) (realToFrac sigmalow)
LL.c'ekCMA_setOptimizer p eo
return $ castPtr p
setupDistribHandler np eo (SepCMAHandler (sigmalow, sigmahigh)) = do
p <- LL.c'mkekSepCMA np
LL.c'ekSepCMA_setSigma p (realToFrac sigmahigh) (realToFrac sigmalow)
LL.c'ekSepCMA_setOptimizer p eo
return $ castPtr p
setupDistribHandler np eo (CSAHandler (sigmalow, sigmahigh)) = do
p <- LL.c'mkekCSA np
LL.c'ekCSA_setSigma p (realToFrac sigmahigh) (realToFrac sigmalow)
LL.c'ekCSA_setOptimizer p eo
return $ castPtr p
destroyDistribHandler :: Ptr CChar -> DistributionHandler -> IO ()
destroyDistribHandler dh (CMAHandler _) =
LL.c'delekCMA (castPtr dh)
destroyDistribHandler dh (SepCMAHandler _) =
LL.c'delekSepCMA (castPtr dh)
destroyDistribHandler dh (CSAHandler _) =
LL.c'delekCSA (castPtr dh)
optimize :: OptimisationProblem -> Either String [Double]
optimize (OptimisationProblem { n = np, mu = mmu, lambda = mlambda,
fitnessFunction = fitness,
distributionHandlerSettings = dhsettings }) =
unsafePerformIO $ do
eo <- LL.c'mkekOptimizer (fromIntegral np)
dh <- setupDistribHandler (fromIntegral np) eo dhsettings
eod <- peek eo
let lambdav = (flip fromMaybe (fmap fromIntegral mlambda) (LL.c'ekOptimizer'lambda eod))
(LL.c'ekOptimizer_setMuLambda eo)
(flip fromMaybe (fmap fromIntegral mmu) (LL.c'ekOptimizer'mu eod))
lambdav
LL.c'ekOptimizer_start eo
let continueOptimization =
do
LL.c'ekOptimizer_sampleCloud eo
allPts <- liftM LL.c'ekOptimizer'points (peek eo)
forM_ [0..((fromIntegral lambdav)-1)] $ \i ->
do
ptp <- peekElemOff allPts i
pt <- peek ptp
let xcoordp = LL.c'ekPoint'x pt
xcoord <- mapM (liftM (realToFrac :: CDouble -> Double) . peekElemOff xcoordp) [0..(np-1)]
poke ptp (pt {LL.c'ekPoint'fitness = realToFrac $ fitness xcoord })
LL.c'ekOptimizer_update eo
stop <- LL.c'ekOptimizer_stop eo
if stop == 0
then
continueOptimization
else
return stop
whyStop <- continueOptimization
destroyDistribHandler dh dhsettings
ret <-
do
bestPoint <- liftM LL.c'ekOptimizer'bestPoint (peek eo)
let xcoordp = LL.c'ekPoint'x bestPoint
xcoord <- mapM (liftM (realToFrac :: CDouble -> Double) . peekElemOff xcoordp) [0..(np-1)]
return $ Right xcoord
{-
case ()
of
() | whyStop == LL.c'ekStopCriterionId_LowSigma ->
() | whyStop == LL.c'ekStopCriterionId_DistributionNotSet ->
return $ Left "No point distribution handler has been associated with the opimiser"
() | whyStop == LL.c'ekStopCriterionId_NoEffectAxis ->
return $ Left "Axes of the Gaussian distribution are beyond what numerical precision can handle"
() | whyStop == LL.c'ekStopCriterionId_NoEffectCoord ->
return $ Left "Eigen vector basis of the Gaussian distribution are beyond what numerical precision can handle"
() | whyStop == LL.c'ekStopCriterionId_ConditionCov ->
return $ Left "Covariance matrix conditionning is beyond what numerical precision can handle."
() ->
return $ Left "The eigenvalue solver failed."
-}
LL.c'delekOptimizer eo
return ret
{-
To do: tests:
optimize (OptimisationProblem { n = 2, mu = Nothing, lambda = Nothing, fitnessFunction = \(x:y:[]) -> (1-x)^2 + 100 * (y - x^2)^2, distributionHandlerSettings = CMAHandler (0.0001,1000)})
=> Should be close to [1,1]
optimize (OptimisationProblem { n = 1, mu = Nothing, lambda = Nothing, fitnessFunction = \(x:[]) -> abs((x-100) * (x - 200) * (x - 300) * (x + 400)), distributionHandlerSettings = CMAHandler (0.0001,1000)})
=> Several solutions (obviously).
-}
|
A1kmm/bindings-eskit
|
src/Bindings/ESKit.hs
|
mit
| 5,402
| 0
| 23
| 1,717
| 1,080
| 540
| 540
| 85
| 2
|
module Data.FixedWidth.LargeFile where
import qualified Data.Text as StrictText
import qualified Data.Text.Lazy as LazyText
import qualified Data.Text.Lazy.IO as LazyTextIO
import System.IO (Handle, IOMode(..), openFile)
hGetLines :: Handle -> IO [StrictText.Text]
hGetLines handle = do
lazyText <- LazyTextIO.hGetContents handle
return $ map LazyText.toStrict (LazyText.lines lazyText)
getLines :: String -> IO [StrictText.Text]
getLines filename = openFile filename ReadMode >>= hGetLines
|
michaelochurch/fixedwidth-hs
|
Data/FixedWidth/LargeFile.hs
|
mit
| 498
| 0
| 11
| 64
| 147
| 84
| 63
| 11
| 1
|
module Chapter05.Arith3Broken where
main :: IO ()
main = do
print (1 + 2)
print 10
--putStrLn 10
print (negate (-1))
print ((+) 5 blah)
where blah = negate 1
-- print x where x = (negate -1)
-- let x = (negate -1)
-- print x
-- print x where x = (negate -1)
-- print y where
-- blah = negate 1
-- y = ((+) 0 blah)
|
brodyberg/LearnHaskell
|
HaskellProgramming.hsproj/Chapter05/Arith3Broken.hs
|
mit
| 373
| 0
| 11
| 129
| 91
| 49
| 42
| 8
| 1
|
-- |
module Main where
triangle :: Int -> [Int]
triangle 1 = [1]
triangle 2 = [1, 1]
triangle n = let prev = triangle (n-1) in
(1:(map (\i -> prev !! i + prev !! (i+1)) [0,1..length prev-2])) ++ [1]
main :: IO ()
main = do
n <- readLn
mapM_ (\x -> let row = triangle x in
do mapM_ (putStr . (\c -> show c ++ " ")) row
putStrLn "") [1..n]
|
jerryzh168/hackerrank
|
FunctionalProgramming/Recursion/PascalsTriangle.hs
|
mit
| 391
| 0
| 20
| 129
| 228
| 119
| 109
| 12
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Y2017.M10.D09.Solution where
{--
So, 'yesterday' we saved our data out as JSON, read it back in, and then
visualized it. The data was the NYT article archive, and, doing that, we
saw we had a lot of data to visualize.
Today, let's par that down a smidge.
--}
import Control.Arrow (second)
import Data.Aeson
import qualified Data.ByteString.Lazy.Char8 as BL
import Data.List (sortOn)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (fromJust)
import Data.Ord
import Data.Set (Set)
import qualified Data.Set as Set
-- below imports available via 1HaskellADay git repository
import Data.Hierarchy -- hint: use this to visualize your results
import Store.SQL.Util.Pivots
import Y2017.M10.D04.Solution
import Y2017.M10.D05.Solution
-- From the grouping that you formed before (so, you have to reformulate that
-- grouping again, today), par it down to the top 5 topics, that is to say:
-- the topics that have the most articles
topicality :: Grouping -> [(Topic, Int)]
topicality = map topicCount . Map.toList
-- topicality gives us the number of articles of that topic
topicCount :: (a, Map b [c]) -> (a, Int)
topicCount = second (length . concat . Map.elems)
{--
>>> subjs <- (rows . fromJust . decode <$> BL.readFile "Y2017/M10/D05/subj.json") :: IO [Subject]
>>> pivs <- (rows . fromJust . decode <$> BL.readFile "Y2017/M10/D05/art-subj.json") :: IO [Pivot]
>>> arts <- (rows . fromJust . decode <$> BL.readFile "Y2017/M10/D05/art.json") :: IO [ArticleSummary]
>>> let grp = graphTopics subjs pivs arts
>>> let tops = topicality grp
>>> let top5 = take 5 (sortOn (Down . snd) tops)
>>> top5
[("Social networks",42),("Presidents",30),("Hurricanes",25),
("Books",19),("Floods",16)]
--}
-- Now that you have topicality, reform the group to contain only those
-- top 5 topics (and no other topics)
reformGrouping :: Set Topic -> Grouping -> Grouping
reformGrouping tops =
Map.fromList . filter (flip Set.member tops . fst) . Map.toList
{--
>>> let subgrps = reformGrouping (Set.fromList (map fst top5)) grp
>>> length subgrps
5
--}
-- Great! Now let's visualize that subset. What do you get? Tweet your results
{--
>>> visualize "Y2017/M10/D09/topics.json" subgrps
$ php -S 127.0.0.1:8080 &
... and we see the concentric circle representation Y2017/M10/D09/top5-topics.png
--}
{-- BONUS -----------------------------------------------------------------
* What are the topics of the NYT archive slice that have 10 or more articles?
* How many topic make the cut of ten or more articles?
* Chart those topics
--}
top10sTopics :: Grouping -> [(Topic, Int)]
top10sTopics = takeWhile ((> 9) . snd) . sortOn (Down . snd) . topicality
{--
>>> let top10s = top10sTopics grp
>>> length top10s
21
>>> top10s
[("Social networks",42),("Presidents",30),("Hurricanes",25),("Books",19),
("Floods",16),("Aliens",15),("Immigration policy",15),("Motion pictures",15),
("Theater",14),("White supremacists",14),("Bills",13),("Storm damage",13),
("Students",13),("Families & family life",12),("Tournaments & championships",12),
("Deportation",11),("Politics",11),("Tennis",11),("Art galleries & museums",10),
("Criminal investigations",10),("Political parties",10)]
>>> let subgrps = reformGrouping (Set.fromList (map fst top10s)) grp
>>> visualize "Y2017/M10/D09/top10-topics.json" subgrps
And now we see th new presentation. NOICE!
--}
|
geophf/1HaskellADay
|
exercises/HAD/Y2017/M10/D09/Solution.hs
|
mit
| 3,424
| 0
| 11
| 513
| 326
| 199
| 127
| 25
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Main
where
import qualified Data.ByteString.Char8 as B
import Control.Monad.State
import Data.Conduit
import Data.Conduit.Network
import Network.Socket (withSocketsDo)
-- Mostly from: http://www.yesodweb.com/blog/2014/03/network-conduit-async
main = do
putStrLn "-----------------------------------------------"
putStrLn "EXAMPLE: Reading from one socket and writing to"
putStrLn " another socket"
putStrLn "-----------------------------------------------"
putStrLn "Start the example server from conduit-101-02 for writing."
putStrLn ""
putStrLn "Press <ENTER> when ready to continue."
putStrLn ""
putStr "> "
_ <- getLine
putStrLn ""
putStrLn "Use 'telnet 127.0.0.1 4002' or equivalent on port 4002"
putStrLn "to read data from the socket."
putStrLn ""
putStrLn "Writing to the conduit-101-02 server will continue until"
putStrLn "a newline is entered."
putStrLn ""
putStrLn "Press <Ctrl-C> to stop this server."
putStrLn ""
putStr "> "
withSocketsDo $ do
runTCPServer (serverSettings 4002 "*") $ \client ->
runTCPClient (clientSettings 4000 "localhost") $ \server -> do
(appSource client $= echo $$ appSink server)
echo :: ConduitM B.ByteString B.ByteString IO ()
echo = do
awaitForever $ \x -> do
liftIO $ B.putStr x
yield x
|
stormont/conduit-examples
|
examples/conduit-101-04/Main.hs
|
mit
| 1,356
| 0
| 19
| 251
| 293
| 132
| 161
| 37
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-apigateway-domainname.html
module Stratosphere.Resources.ApiGatewayDomainName where
import Stratosphere.ResourceImports
import Stratosphere.ResourceProperties.ApiGatewayDomainNameEndpointConfiguration
-- | Full data type definition for ApiGatewayDomainName. See
-- 'apiGatewayDomainName' for a more convenient constructor.
data ApiGatewayDomainName =
ApiGatewayDomainName
{ _apiGatewayDomainNameCertificateArn :: Maybe (Val Text)
, _apiGatewayDomainNameDomainName :: Val Text
, _apiGatewayDomainNameEndpointConfiguration :: Maybe ApiGatewayDomainNameEndpointConfiguration
, _apiGatewayDomainNameRegionalCertificateArn :: Maybe (Val Text)
} deriving (Show, Eq)
instance ToResourceProperties ApiGatewayDomainName where
toResourceProperties ApiGatewayDomainName{..} =
ResourceProperties
{ resourcePropertiesType = "AWS::ApiGateway::DomainName"
, resourcePropertiesProperties =
hashMapFromList $ catMaybes
[ fmap (("CertificateArn",) . toJSON) _apiGatewayDomainNameCertificateArn
, (Just . ("DomainName",) . toJSON) _apiGatewayDomainNameDomainName
, fmap (("EndpointConfiguration",) . toJSON) _apiGatewayDomainNameEndpointConfiguration
, fmap (("RegionalCertificateArn",) . toJSON) _apiGatewayDomainNameRegionalCertificateArn
]
}
-- | Constructor for 'ApiGatewayDomainName' containing required fields as
-- arguments.
apiGatewayDomainName
:: Val Text -- ^ 'agdnDomainName'
-> ApiGatewayDomainName
apiGatewayDomainName domainNamearg =
ApiGatewayDomainName
{ _apiGatewayDomainNameCertificateArn = Nothing
, _apiGatewayDomainNameDomainName = domainNamearg
, _apiGatewayDomainNameEndpointConfiguration = Nothing
, _apiGatewayDomainNameRegionalCertificateArn = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-apigateway-domainname.html#cfn-apigateway-domainname-certificatearn
agdnCertificateArn :: Lens' ApiGatewayDomainName (Maybe (Val Text))
agdnCertificateArn = lens _apiGatewayDomainNameCertificateArn (\s a -> s { _apiGatewayDomainNameCertificateArn = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-apigateway-domainname.html#cfn-apigateway-domainname-domainname
agdnDomainName :: Lens' ApiGatewayDomainName (Val Text)
agdnDomainName = lens _apiGatewayDomainNameDomainName (\s a -> s { _apiGatewayDomainNameDomainName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-apigateway-domainname.html#cfn-apigateway-domainname-endpointconfiguration
agdnEndpointConfiguration :: Lens' ApiGatewayDomainName (Maybe ApiGatewayDomainNameEndpointConfiguration)
agdnEndpointConfiguration = lens _apiGatewayDomainNameEndpointConfiguration (\s a -> s { _apiGatewayDomainNameEndpointConfiguration = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-apigateway-domainname.html#cfn-apigateway-domainname-regionalcertificatearn
agdnRegionalCertificateArn :: Lens' ApiGatewayDomainName (Maybe (Val Text))
agdnRegionalCertificateArn = lens _apiGatewayDomainNameRegionalCertificateArn (\s a -> s { _apiGatewayDomainNameRegionalCertificateArn = a })
|
frontrowed/stratosphere
|
library-gen/Stratosphere/Resources/ApiGatewayDomainName.hs
|
mit
| 3,387
| 0
| 15
| 352
| 455
| 260
| 195
| 41
| 1
|
{-
Suggest better pragmas
OPTIONS_GHC -cpp => LANGUAGE CPP
OPTIONS_GHC -fglasgow-exts => LANGUAGE ... (in HSE)
OPTIONS_GHC -XFoo => LANGUAGE Foo
LANGUAGE A, A => LANGUAGE A
-- do not do LANGUAGE A, LANGUAGE B to combine
<TEST>
{-# OPTIONS_GHC -cpp #-} -- {-# LANGUAGE CPP #-}
{-# OPTIONS -cpp #-} -- {-# LANGUAGE CPP #-}
{-# OPTIONS_YHC -cpp #-}
{-# OPTIONS_GHC -XFoo #-} -- {-# LANGUAGE Foo #-}
{-# OPTIONS_GHC -fglasgow-exts #-} -- ???
{-# LANGUAGE A, B, C, A #-} -- {-# LANGUAGE A, B, C #-}
{-# LANGUAGE A #-}
{-# OPTIONS_GHC -cpp -foo #-} -- {-# LANGUAGE CPP #-} {-# OPTIONS_GHC -foo #-}
{-# OPTIONS_GHC -cpp #-} \
{-# LANGUAGE CPP, Text #-} --
{-# LANGUAGE A #-} \
{-# LANGUAGE B #-}
{-# LANGUAGE A #-} \
{-# LANGUAGE B, A #-} -- {-# LANGUAGE A, B #-}
</TEST>
-}
module Hint.Pragma where
import Hint.Type
import Data.List
import Data.Maybe
import Util
pragmaHint :: ModuHint
pragmaHint _ x = languageDupes lang ++ [pragmaIdea old $ [LanguagePragma an (map toNamed ns2) | ns2 /= []] ++ catMaybes new | old /= []]
where
lang = [x | x@LanguagePragma{} <- modulePragmas x]
(old,new,ns) = unzip3 [(old,new,ns) | old <- modulePragmas x, Just (new,ns) <- [optToLanguage old]]
ns2 = nub (concat ns) \\ concat [map fromNamed n | LanguagePragma _ n <- lang]
pragmaIdea :: [ModulePragma S] -> [ModulePragma S] -> Idea
pragmaIdea xs ys = rawIdea Error "Use better pragmas" (toSrcLoc $ ann $ head xs) (f xs) (f ys) ""
where f = unlines . map prettyPrint
languageDupes :: [ModulePragma S] -> [Idea]
languageDupes [] = []
languageDupes (a@(LanguagePragma _ x):xs) =
(if nub_ x `neqList` x
then [pragmaIdea [a] [LanguagePragma an $ nub_ x]]
else [pragmaIdea [a,b] [LanguagePragma an (nub_ $ x ++ y)] | b@(LanguagePragma _ y) <- xs, notNull $ intersect_ x y]) ++
languageDupes xs
-- Given a pragma, can you extract some language features out
strToLanguage :: String -> Maybe [String]
strToLanguage "-cpp" = Just ["CPP"]
strToLanguage x | "-X" `isPrefixOf` x = Just [drop 2 x]
strToLanguage "-fglasgow-exts" = Just $ map show glasgowExts
strToLanguage _ = Nothing
optToLanguage :: ModulePragma S -> Maybe (Maybe (ModulePragma S), [String])
optToLanguage (OptionsPragma sl tool val)
| maybe True (== GHC) tool && any isJust vs = Just (res, concat $ catMaybes vs)
where
strs = words val
vs = map strToLanguage strs
keep = concat $ zipWith (\v s -> [s | isNothing v]) vs strs
res = if null keep then Nothing else Just $ OptionsPragma sl tool (unwords keep)
optToLanguage _ = Nothing
|
alphaHeavy/hlint
|
src/Hint/Pragma.hs
|
gpl-2.0
| 2,604
| 0
| 14
| 569
| 770
| 397
| 373
| 33
| 2
|
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# OPTIONS_HADDOCK show-extensions #-}
-- |
-- Module : Yi.Editor
-- License : GPL-2
-- Maintainer : yi-devel@googlegroups.com
-- Stability : experimental
-- Portability : portable
--
-- The top level editor state, and operations on it. This is inside an
-- internal module for easy re-export with Yi.Types bits.
module Yi.Editor ( Editor(..), EditorM(..), MonadEditor(..)
, runEditor
, acceptedInputsOtherWindow
, addJumpAtE
, addJumpHereE
, alternateBufferE
, askConfigVariableA
, bufferSet
, buffersA
, closeBufferAndWindowE
, closeBufferE
, closeOtherE
, clrStatus
, commonNamePrefix
, currentBuffer
, currentRegexA
, currentWindowA
, deleteBuffer
, deleteTabE
, emptyEditor
, findBuffer
, findBufferWith
, findBufferWithName
, findWindowWith
, focusWindowE
, getBufferStack
, getBufferWithName
, getBufferWithNameOrCurrent
, getEditorDyn
, getRegE
, jumpBackE
, jumpForwardE
, killringA
, layoutManagerNextVariantE
, layoutManagerPreviousVariantE
, layoutManagersNextE
, layoutManagersPreviousE
, moveTabE
, moveWinNextE
, moveWinPrevE
, newBufferE
, newEmptyBufferE
, newTabE
, newTempBufferE
, newWindowE
, nextTabE
, nextWinE
, pendingEventsA
, prevWinE
, previousTabE
, printMsg
, printMsgs
, printStatus
, pushWinToFirstE
, putEditorDyn
, searchDirectionA
, setDividerPosE
, setRegE
, setStatus
, shiftOtherWindow
, splitE
, statusLine
, statusLineInfo
, statusLinesA
, stringToNewBuffer
, swapWinWithFirstE
, switchToBufferE
, switchToBufferWithNameE
, tabsA
, tryCloseE
, windows
, windowsA
, windowsOnBufferE
, withCurrentBuffer
, withEveryBuffer
, withGivenBuffer
, withGivenBufferAndWindow
, withOtherWindow
, withWindowE
) where
import Control.Applicative
import Control.Lens
import Control.Monad
import Control.Monad.Reader hiding (mapM, forM_ )
import Control.Monad.State hiding (get, put, mapM, forM_)
import Data.Binary
import Data.Default
import qualified Data.DelayList as DelayList
import Data.DynamicState.Serializable
import Data.Foldable hiding (forM_)
import Data.List (delete, (\\))
import Data.List.NonEmpty (fromList, NonEmpty(..), nub)
import qualified Data.List.NonEmpty as NE
import qualified Data.List.PointedList as PL (atEnd, moveTo)
import qualified Data.List.PointedList.Circular as PL
import qualified Data.Map as M
import Data.Maybe
import qualified Data.Monoid as Mon
import Data.Semigroup
import qualified Data.Text as T
import Prelude hiding (foldl,concatMap,foldr,all)
import System.FilePath (splitPath)
import Yi.Buffer
import Yi.Config
import Yi.Interact as I
import Yi.JumpList
import Yi.KillRing
import Yi.Layout
import Yi.Monad
import Yi.Rope (YiString, fromText, empty)
import qualified Yi.Rope as R
import Yi.String
import Yi.Style (defaultStyle)
import Yi.Tab
import Yi.Types
import Yi.Utils hiding ((+~))
import Yi.Window
instance Binary Editor where
put (Editor bss bs supply ts dv _sl msh kr re _dir _ev _cwa ) =
let putNE (x :| xs) = put x >> put xs
in putNE bss >> put bs >> put supply >> put ts
>> put dv >> put msh >> put kr >> put re
get = do
bss <- (:|) <$> get <*> get
bs <- get
supply <- get
ts <- get
dv <- get
msh <- get
kr <- get
re <- get
return $ emptyEditor { bufferStack = bss
, buffers = bs
, refSupply = supply
, tabs_ = ts
, dynamic = dv
, maxStatusHeight = msh
, killring = kr
, currentRegex = re
}
-- | The initial state
emptyEditor :: Editor
emptyEditor = Editor
{ buffers = M.singleton (bkey buf) buf
, tabs_ = PL.singleton tab
, bufferStack = bkey buf :| []
, refSupply = 3
, currentRegex = Nothing
, searchDirection = Forward
, dynamic = mempty
, statusLines = DelayList.insert (maxBound, ([""], defaultStyle)) []
, killring = krEmpty
, pendingEvents = []
, maxStatusHeight = 1
, onCloseActions = M.empty
}
where buf = newB 0 (MemBuffer "console") mempty
win = (dummyWindow (bkey buf)) { wkey = WindowRef 1 , isMini = False }
tab = makeTab1 2 win
-- ---------------------------------------------------------------------
makeLensesWithSuffix "A" ''Editor
windows :: Editor -> PL.PointedList Window
windows e = e ^. windowsA
windowsA :: Lens' Editor (PL.PointedList Window)
windowsA = currentTabA . tabWindowsA
tabsA :: Lens' Editor (PL.PointedList Tab)
tabsA = fixCurrentBufferA_ . tabs_A
currentTabA :: Lens' Editor Tab
currentTabA = tabsA . PL.focus
askConfigVariableA :: (YiConfigVariable b, MonadEditor m) => m b
askConfigVariableA = do cfg <- askCfg
return $ cfg ^. configVariable
-- ---------------------------------------------------------------------
-- Buffer operations
newRef :: MonadEditor m => m Int
newRef = withEditor (refSupplyA %= (+ 1) >> use refSupplyA)
newBufRef :: MonadEditor m => m BufferRef
newBufRef = liftM BufferRef newRef
-- | Create and fill a new buffer, using contents of string.
-- | Does not focus the window, or make it the current window.
-- | Call newWindowE or switchToBufferE to take care of that.
stringToNewBuffer :: MonadEditor m
=> BufferId -- ^ The buffer indentifier
-> YiString -- ^ The contents with which to populate
-- the buffer
-> m BufferRef
stringToNewBuffer nm cs = withEditor $ do
u <- newBufRef
defRegStyle <- configRegionStyle <$> askCfg
insertBuffer $ newB u nm cs
m <- asks configFundamentalMode
withGivenBuffer u $ do
putRegionStyle defRegStyle
setAnyMode m
return u
insertBuffer :: MonadEditor m => FBuffer -> m ()
insertBuffer b = withEditor . modify $ \e ->
-- insert buffers at the end, so that
-- "background" buffers do not interfere.
e { bufferStack = nub (bufferStack e <> (bkey b :| []))
, buffers = M.insert (bkey b) b (buffers e)}
-- Prevent possible space leaks in the editor structure
forceFold1 :: Foldable t => t a -> t a
forceFold1 x = foldr seq x x
forceFoldTabs :: Foldable t => t Tab -> t Tab
forceFoldTabs x = foldr (seq . forceTab) x x
-- | Delete a buffer (and release resources associated with it).
deleteBuffer :: MonadEditor m => BufferRef -> m ()
deleteBuffer k = withEditor $ do
-- If the buffer has an associated close action execute that now.
-- Unless the buffer is the last buffer in the editor. In which case
-- it cannot be closed and, I think, the close action should not be
-- applied.
--
-- The close actions seem dangerous, but I know of no other simple
-- way to resolve issues related to what buffer receives actions
-- after the minibuffer closes.
gets bufferStack >>= \case
_ :| [] -> return ()
_ -> M.lookup k <$> gets onCloseActions
>>= \m_action -> fromMaybe (return ()) m_action
-- Now try deleting the buffer. Checking, once again, that it is not
-- the last buffer.
bs <- gets bufferStack
ws <- use windowsA
case bs of
b0 :| nextB : _ -> do
let pickOther w = if bufkey w == k then w {bufkey = other} else w
visibleBuffers = bufkey <$> toList ws
-- This ‘head’ always works because we witness that length of
-- bs ≥ 2 (through case) and ‘delete’ only deletes up to 1
-- element so we at worst we end up with something like
-- ‘head $ [] ++ [foo]’ when bs ≡ visibleBuffers
bs' = NE.toList bs
other = head $ (bs' \\ visibleBuffers) ++ delete k bs'
when (b0 == k) $
-- we delete the currently selected buffer: the next buffer
-- will become active in the main window, therefore it must be
-- assigned a new window.
switchToBufferE nextB
-- NOTE: This *only* works if not all bufferStack buffers are
-- equivalent to ‘k’. Assuring that there are no duplicates in
-- the bufferStack is equivalent in this case because of its
-- length.
modify $ \e ->
e & bufferStackA %~ fromList . forceFold1 . NE.filter (k /=)
& buffersA %~ M.delete k
& tabs_A %~ forceFoldTabs . fmap (mapWindows pickOther)
-- all windows open on that buffer must switch to another
-- buffer.
windowsA . mapped . bufAccessListA %= forceFold1 . filter (k /=)
_ -> return () -- Don't delete the last buffer.
-- | Return the buffers we have, /in no particular order/
bufferSet :: Editor -> [FBuffer]
bufferSet = M.elems . buffers
-- | Return a prefix that can be removed from all buffer paths while
-- keeping them unique.
commonNamePrefix :: Editor -> [FilePath]
commonNamePrefix = commonPrefix . fmap (dropLast . splitPath)
. fbufs . fmap (^. identA) . bufferSet
where dropLast [] = []
dropLast x = init x
fbufs xs = [ x | FileBuffer x <- xs ]
-- drop the last component, so that it is never hidden.
getBufferStack :: MonadEditor m => m (NonEmpty FBuffer)
getBufferStack = withEditor $ do
bufMap <- gets buffers
gets $ fmap (bufMap M.!) . bufferStack
findBuffer :: MonadEditor m => BufferRef -> m (Maybe FBuffer)
findBuffer k = withEditor (gets (M.lookup k . buffers))
-- | Find buffer with this key
findBufferWith :: BufferRef -> Editor -> FBuffer
findBufferWith k e = case M.lookup k (buffers e) of
Just x -> x
Nothing -> error "Editor.findBufferWith: no buffer has this key"
-- | Find buffers with this name
findBufferWithName :: T.Text -> Editor -> [BufferRef]
findBufferWithName n e =
let bufs = M.elems $ buffers e
sameIdent b = shortIdentString (length $ commonNamePrefix e) b == n
in map bkey $ filter sameIdent bufs
-- | Find buffer with given name. Fail if not found.
getBufferWithName :: MonadEditor m => T.Text -> m BufferRef
getBufferWithName bufName = withEditor $ do
bs <- gets $ findBufferWithName bufName
case bs of
[] -> fail ("Buffer not found: " ++ T.unpack bufName)
b:_ -> return b
-- | Make all buffers visible by splitting the current window list.
-- FIXME: rename to displayAllBuffersE; make sure buffers are not open twice.
openAllBuffersE :: EditorM ()
openAllBuffersE = do
bs <- gets bufferSet
forM_ bs $ ((%=) windowsA . PL.insertRight =<<) . newWindowE False . bkey
------------------------------------------------------------------------
-- | Perform action with any given buffer, using the last window that
-- was used for that buffer.
withGivenBuffer :: MonadEditor m => BufferRef -> BufferM a -> m a
withGivenBuffer k f = do
b <- gets (findBufferWith k)
withGivenBufferAndWindow (b ^. lastActiveWindowA) k f
-- | Perform action with any given buffer
withGivenBufferAndWindow :: MonadEditor m
=> Window -> BufferRef -> BufferM a -> m a
withGivenBufferAndWindow w k f = withEditor $ do
accum <- asks configKillringAccumulate
let edit e = let b = findBufferWith k e
(v, us, b') = runBufferFull w b f
in (e & buffersA .~ mapAdjust' (const b') k (buffers e)
& killringA %~
if accum && all updateIsDelete us
then foldl (.) id $ reverse [ krPut dir s
| Delete _ dir s <- us ]
else id
, (us, v))
(us, v) <- getsAndModify edit
updHandler <- return . bufferUpdateHandler =<< ask
unless (null us || null updHandler) $
forM_ updHandler (\h -> withGivenBufferAndWindow w k (h us))
return v
-- | Perform action with current window's buffer
withCurrentBuffer :: MonadEditor m => BufferM a -> m a
withCurrentBuffer f = withEditor $ do
w <- use currentWindowA
withGivenBufferAndWindow w (bufkey w) f
withEveryBuffer :: MonadEditor m => BufferM a -> m [a]
withEveryBuffer action =
withEditor (gets bufferStack) >>= mapM (`withGivenBuffer` action) . NE.toList
currentWindowA :: Lens' Editor Window
currentWindowA = windowsA . PL.focus
-- | Return the current buffer
currentBuffer :: Editor -> BufferRef
currentBuffer = NE.head . bufferStack
-----------------------
-- Handling of status
-- | Prints a message with 'defaultStyle'.
printMsg :: MonadEditor m => T.Text -> m ()
printMsg s = printStatus ([s], defaultStyle)
-- | Prints a all given messages with 'defaultStyle'.
printMsgs :: MonadEditor m => [T.Text] -> m ()
printMsgs s = printStatus (s, defaultStyle)
printStatus :: MonadEditor m => Status -> m ()
printStatus = setTmpStatus 1
-- | Set the "background" status line
setStatus :: MonadEditor m => Status -> m ()
setStatus = setTmpStatus maxBound
-- | Clear the status line
clrStatus :: EditorM ()
clrStatus = setStatus ([""], defaultStyle)
statusLine :: Editor -> [T.Text]
statusLine = fst . statusLineInfo
statusLineInfo :: Editor -> Status
statusLineInfo = snd . head . statusLines
setTmpStatus :: MonadEditor m => Int -> Status -> m ()
setTmpStatus delay s = withEditor $ do
statusLinesA %= DelayList.insert (delay, s)
-- also show in the messages buffer, so we don't loose any message
bs <- gets (filter ((== MemBuffer "messages") . view identA) . M.elems . buffers)
b <- case bs of
(b':_) -> return $ bkey b'
[] -> stringToNewBuffer (MemBuffer "messages") mempty
let m = listify $ R.fromText <$> fst s
withGivenBuffer b $ botB >> insertN (m `R.snoc` '\n')
-- ---------------------------------------------------------------------
-- kill-register (vim-style) interface to killring.
--
-- Note that our vim keymap currently has its own registers
-- and doesn't use killring.
-- | Put string into yank register
setRegE :: R.YiString -> EditorM ()
setRegE s = killringA %= krSet s
-- | Return the contents of the yank register
getRegE :: EditorM R.YiString
getRegE = uses killringA krGet
-- ---------------------------------------------------------------------
-- | Dynamically-extensible state components.
--
-- These hooks are used by keymaps to store values that result from
-- Actions (i.e. that restult from IO), as opposed to the pure values
-- they generate themselves, and can be stored internally.
--
-- The `dynamic' field is a type-indexed map.
--
-- | Retrieve a value from the extensible state
getEditorDyn :: (MonadEditor m, YiVariable a, Default a, Functor m) => m a
getEditorDyn = fromMaybe def <$> getDyn (use dynamicA) (assign dynamicA)
-- | Insert a value into the extensible state, keyed by its type
putEditorDyn :: (MonadEditor m, YiVariable a, Functor m) => a -> m ()
putEditorDyn = putDyn (use dynamicA) (assign dynamicA)
-- | Like fnewE, create a new buffer filled with the String @s@,
-- Switch the current window to this buffer. Doesn't associate any
-- file with the buffer (unlike fnewE) and so is good for popup
-- internal buffers (like scratch)
newBufferE :: BufferId -- ^ buffer name
-> YiString -- ^ buffer contents
-> EditorM BufferRef
newBufferE f s = do
b <- stringToNewBuffer f s
switchToBufferE b
return b
-- | Like 'newBufferE' but defaults to empty contents.
newEmptyBufferE :: BufferId -> EditorM BufferRef
newEmptyBufferE f = newBufferE f Yi.Rope.empty
alternateBufferE :: Int -> EditorM ()
alternateBufferE n = do
Window { bufAccessList = lst } <- use currentWindowA
if null lst || (length lst - 1) < n
then fail "no alternate buffer"
else switchToBufferE $ lst!!n
-- | Create a new zero size window on a given buffer
newZeroSizeWindow :: Bool -> BufferRef -> WindowRef -> Window
newZeroSizeWindow mini bk ref = Window mini bk [] 0 0 emptyRegion ref 0 Nothing
-- | Create a new window onto the given buffer.
newWindowE :: Bool -> BufferRef -> EditorM Window
newWindowE mini bk = newZeroSizeWindow mini bk . WindowRef <$> newRef
-- | Attach the specified buffer to the current window
switchToBufferE :: BufferRef -> EditorM ()
switchToBufferE bk = windowsA . PL.focus %= \w ->
w & bufkeyA .~ bk
& bufAccessListA %~ forceFold1 . (bufkey w:) . filter (bk /=)
-- | Attach the specified buffer to some other window than the current one
switchToBufferOtherWindowE :: BufferRef -> EditorM ()
switchToBufferOtherWindowE b = shiftOtherWindow >> switchToBufferE b
-- | Switch to the buffer specified as parameter. If the buffer name
-- is empty, switch to the next buffer.
switchToBufferWithNameE :: T.Text -> EditorM ()
switchToBufferWithNameE "" = alternateBufferE 0
switchToBufferWithNameE bufName = switchToBufferE =<< getBufferWithName bufName
-- | Close a buffer.
-- Note: close the current buffer if the empty string is given
closeBufferE :: T.Text -> EditorM ()
closeBufferE nm = deleteBuffer =<< getBufferWithNameOrCurrent nm
getBufferWithNameOrCurrent :: MonadEditor m => T.Text -> m BufferRef
getBufferWithNameOrCurrent t = withEditor $
case T.null t of
True -> gets currentBuffer
False -> getBufferWithName t
------------------------------------------------------------------------
-- | Close current buffer and window, unless it's the last one.
closeBufferAndWindowE :: EditorM ()
closeBufferAndWindowE = do
-- Fetch the current buffer *before* closing the window. Required
-- for the onCloseBufferE actions to work as expected by the
-- minibuffer. The tryCloseE, since it uses tabsA, will have the
-- current buffer "fixed" to the buffer of the window that is
-- brought into focus. If the current buffer is accessed after the
-- tryCloseE then the current buffer may not be the same as the
-- buffer before tryCloseE. This would be bad.
b <- gets currentBuffer
tryCloseE
deleteBuffer b
-- | Rotate focus to the next window
nextWinE :: EditorM ()
nextWinE = windowsA %= PL.next
-- | Rotate focus to the previous window
prevWinE :: EditorM ()
prevWinE = windowsA %= PL.previous
-- | Swaps the focused window with the first window. Useful for
-- layouts such as 'HPairOneStack', for which the first window is the
-- largest.
swapWinWithFirstE :: EditorM ()
swapWinWithFirstE = windowsA %= swapFocus (fromJust . PL.moveTo 0)
-- | Moves the focused window to the first window, and moves all other
-- windows down the stack.
pushWinToFirstE :: EditorM ()
pushWinToFirstE = windowsA %= pushToFirst
where
pushToFirst ws = case PL.delete ws of
Nothing -> ws
Just ws' -> PL.insertLeft (ws ^. PL.focus) (fromJust $ PL.moveTo 0 ws')
-- | Swap focused window with the next one
moveWinNextE :: EditorM ()
moveWinNextE = windowsA %= swapFocus PL.next
-- | Swap focused window with the previous one
moveWinPrevE :: EditorM ()
moveWinPrevE = windowsA %= swapFocus PL.previous
-- | A "fake" accessor that fixes the current buffer after a change of
-- the current window.
--
-- Enforces invariant that top of buffer stack is the buffer of the
-- current window.
fixCurrentBufferA_ :: Lens' Editor Editor
fixCurrentBufferA_ = lens id (\_old new -> let
ws = windows new
b = findBufferWith (bufkey $ PL._focus ws) new
newBufferStack = nub (bkey b NE.<| bufferStack new)
-- make sure we do not hold to old versions by seqing the length.
in NE.length newBufferStack `seq` new & bufferStackA .~ newBufferStack)
-- | Counterpart of fixCurrentBufferA_: fix the current window to point to the
-- right buffer.
fixCurrentWindowE :: EditorM ()
fixCurrentWindowE =
gets currentBuffer >>= \b -> windowsA . PL.focus . bufkeyA .= b
withWindowE :: Window -> BufferM a -> EditorM a
withWindowE w = withGivenBufferAndWindow w (bufkey w)
findWindowWith :: WindowRef -> Editor -> Window
findWindowWith k e =
head $ concatMap (\win -> [win | wkey win == k]) $ windows e
-- | Return the windows that are currently open on the buffer whose
-- key is given
windowsOnBufferE :: BufferRef -> EditorM [Window]
windowsOnBufferE k = do
ts <- use tabsA
let tabBufEq = concatMap (\win -> [win | bufkey win == k]) . (^. tabWindowsA)
return $ concatMap tabBufEq ts
-- | bring the editor focus the window with the given key.
--
-- Fails if no window with the given key is found.
focusWindowE :: WindowRef -> EditorM ()
focusWindowE k = do
-- Find the tab index and window index
ts <- use tabsA
let check (False, i) win = if wkey win == k
then (True, i)
else (False, i + 1)
check r@(True, _) _win = r
searchWindowSet (False, tabIndex, _) ws =
case foldl check (False, 0) (ws ^. tabWindowsA) of
(True, winIndex) -> (True, tabIndex, winIndex)
(False, _) -> (False, tabIndex + 1, 0)
searchWindowSet r@(True, _, _) _ws = r
case foldl searchWindowSet (False, 0, 0) ts of
(False, _, _) -> fail $ "No window with key " ++ show wkey ++ "found. (focusWindowE)"
(True, tabIndex, winIndex) -> do
assign tabsA (fromJust $ PL.moveTo tabIndex ts)
windowsA %= fromJust . PL.moveTo winIndex
-- | Split the current window, opening a second window onto current buffer.
-- TODO: unfold newWindowE here?
splitE :: EditorM ()
splitE = do
w <- gets currentBuffer >>= newWindowE False
windowsA %= PL.insertRight w
-- | Cycle to the next layout manager, or the first one if the current
-- one is nonstandard.
layoutManagersNextE :: EditorM ()
layoutManagersNextE = withLMStackE PL.next
-- | Cycle to the previous layout manager, or the first one if the
-- current one is nonstandard.
layoutManagersPreviousE :: EditorM ()
layoutManagersPreviousE = withLMStackE PL.previous
-- | Helper function for 'layoutManagersNext' and 'layoutManagersPrevious'
withLMStackE :: (PL.PointedList AnyLayoutManager
-> PL.PointedList AnyLayoutManager)
-> EditorM ()
withLMStackE f = askCfg >>= \cfg ->
currentTabA . tabLayoutManagerA %= go (layoutManagers cfg)
where
go [] lm = lm
go lms lm =
case findPL (layoutManagerSameType lm) lms of
Nothing -> head lms
Just lmsPL -> f lmsPL ^. PL.focus
-- | Next variant of the current layout manager, as given by 'nextVariant'
layoutManagerNextVariantE :: EditorM ()
layoutManagerNextVariantE = currentTabA . tabLayoutManagerA %= nextVariant
-- | Previous variant of the current layout manager, as given by
-- 'previousVariant'
layoutManagerPreviousVariantE :: EditorM ()
layoutManagerPreviousVariantE =
currentTabA . tabLayoutManagerA %= previousVariant
-- | Sets the given divider position on the current tab
setDividerPosE :: DividerRef -> DividerPosition -> EditorM ()
setDividerPosE ref = assign (currentTabA . tabDividerPositionA ref)
-- | Creates a new tab containing a window that views the current buffer.
newTabE :: EditorM ()
newTabE = do
bk <- gets currentBuffer
win <- newWindowE False bk
ref <- newRef
tabsA %= PL.insertRight (makeTab1 ref win)
-- | Moves to the next tab in the round robin set of tabs
nextTabE :: EditorM ()
nextTabE = tabsA %= PL.next
-- | Moves to the previous tab in the round robin set of tabs
previousTabE :: EditorM ()
previousTabE = tabsA %= PL.previous
-- | Moves the focused tab to the given index, or to the end if the
-- index is not specified.
moveTabE :: Maybe Int -> EditorM ()
moveTabE Nothing = do
count <- uses tabsA PL.length
tabsA %= fromJust . PL.moveTo (pred count)
moveTabE (Just n) = do
newTabs <- uses tabsA (PL.moveTo n)
when (isNothing newTabs) failure
assign tabsA $ fromJust newTabs
where failure = fail $ "moveTab " ++ show n ++ ": no such tab"
-- | Deletes the current tab. If there is only one tab open then error out.
-- When the last tab is focused, move focus to the left, otherwise
-- move focus to the right.
deleteTabE :: EditorM ()
deleteTabE = tabsA %= fromMaybe failure . deleteTab
where failure = error "deleteTab: cannot delete sole tab"
deleteTab tabs = if PL.atEnd tabs
then PL.deleteLeft tabs
else PL.deleteRight tabs
-- | Close the current window. If there is only one tab open and the tab
-- contains only one window then do nothing.
tryCloseE :: EditorM ()
tryCloseE = do
ntabs <- uses tabsA PL.length
nwins <- uses windowsA PL.length
unless (ntabs == 1 && nwins == 1) $ if nwins == 1
-- Could the Maybe response from deleteLeft be used instead of the
-- def 'if'?
then tabsA %= fromJust . PL.deleteLeft
else windowsA %= fromJust . PL.deleteLeft
-- | Make the current window the only window on the screen
closeOtherE :: EditorM ()
closeOtherE = windowsA %= PL.deleteOthers
-- | Switch focus to some other window. If none is available, create one.
shiftOtherWindow :: MonadEditor m => m ()
shiftOtherWindow = withEditor $ do
len <- uses windowsA PL.length
if len == 1
then splitE
else nextWinE
-- | Execute the argument in the context of an other window. Create
-- one if necessary. The current window is re-focused after the
-- argument has completed.
withOtherWindow :: MonadEditor m => m a -> m a
withOtherWindow f = do
shiftOtherWindow
x <- f
withEditor prevWinE
return x
acceptedInputs :: EditorM [T.Text]
acceptedInputs = do
km <- defaultKm <$> askCfg
keymap <- withCurrentBuffer $ gets (withMode0 modeKeymap)
let l = I.accepted 3 . I.mkAutomaton . extractTopKeymap . keymap $ km
return $ fmap T.unwords l
-- | Shows the current key bindings in a new window
acceptedInputsOtherWindow :: EditorM ()
acceptedInputsOtherWindow = do
ai <- acceptedInputs
b <- stringToNewBuffer (MemBuffer "keybindings") (fromText $ T.unlines ai)
w <- newWindowE False b
windowsA %= PL.insertRight w
-- | Defines an action to be executed when the current buffer is closed.
--
-- Used by the minibuffer to assure the focus is restored to the
-- buffer that spawned the minibuffer.
--
-- todo: These actions are not restored on reload.
--
-- todo: These actions should probably be very careful at what they
-- do.
--
-- TODO: All in all, this is a very ugly way to achieve the purpose.
-- The nice way to proceed is to somehow attach the miniwindow to the
-- window that has spawned it.
onCloseBufferE :: BufferRef -> EditorM () -> EditorM ()
onCloseBufferE b a =
onCloseActionsA %= M.insertWith' (\_ old_a -> old_a >> a) b a
addJumpHereE :: EditorM ()
addJumpHereE = addJumpAtE =<< withCurrentBuffer pointB
addJumpAtE :: Point -> EditorM ()
addJumpAtE point = do
w <- use currentWindowA
shouldAddJump <- case jumpList w of
Just (PL.PointedList _ (Jump mark bf) _) -> do
bfStillAlive <- gets (M.lookup bf . buffers)
case bfStillAlive of
Nothing -> return False
_ -> do
p <- withGivenBuffer bf . use $ markPointA mark
return $! (p, bf) /= (point, bufkey w)
_ -> return True
when shouldAddJump $ do
m <- withCurrentBuffer setMarkHereB
let bf = bufkey w
j = Jump m bf
assign currentWindowA $ w & jumpListA %~ addJump j
return ()
jumpBackE :: EditorM ()
jumpBackE = addJumpHereE >> modifyJumpListE jumpBack
jumpForwardE :: EditorM ()
jumpForwardE = modifyJumpListE jumpForward
modifyJumpListE :: (JumpList -> JumpList) -> EditorM ()
modifyJumpListE f = do
w <- use currentWindowA
case f $ w ^. jumpListA of
Nothing -> return ()
Just (PL.PointedList _ (Jump mark bf) _) -> do
switchToBufferE bf
withCurrentBuffer $ use (markPointA mark) >>= moveTo
currentWindowA . jumpListA %= f
-- | Creates an in-memory buffer with a unique name.
newTempBufferE :: EditorM BufferRef
newTempBufferE = do
e <- gets id
-- increment the index of the hint until no buffer is found with that name
let find_next currentName (nextName:otherNames) =
case findBufferWithName currentName e of
(_b : _) -> find_next nextName otherNames
[] -> currentName
find_next _ [] = error "Looks like nearly infinite list has just ended."
next_tmp_name = find_next name names
(name : names) = (fmap (("tmp-" Mon.<>) . T.pack . show) [0 :: Int ..])
newEmptyBufferE (MemBuffer next_tmp_name)
|
atsukotakahashi/wi
|
src/library/Yi/Editor.hs
|
gpl-2.0
| 29,751
| 0
| 23
| 7,925
| 6,795
| 3,541
| 3,254
| -1
| -1
|
(a, e) -> b
|
hmemcpy/milewski-ctfp-pdf
|
src/content/3.7/code/haskell/snippet06.hs
|
gpl-3.0
| 11
| 1
| 5
| 3
| 15
| 7
| 8
| -1
| -1
|
-- | Primitive function definitions for HScheme.
--
-- Copyright 2008 Mats Klingberg
--
-- This file is part of hscheme and is licensed under the GNU GPL, see the
-- LICENSE file for the full license text.
module Primitives (
-- * Exported Haskell functions
getPrimitiveEnv
-- * Primitive Scheme functions
-- ** Numerical operations
-- $numops
-- ** Numerical comparisons
-- $numcomp
-- ** List operations
-- $listops
) where
-- System imports
import Data.List
import Control.Monad.Error
import Data.IORef
-- Local imports
import Types
-- $numops
-- The following binary numerical operations are supported:
--
-- [@+@] addition
--
-- [@-@] subtraction
--
-- [@*@] multiplication
--
-- [@quotient@] Integer division
-- $numcomp
-- The basic primitive funcitons for comparing numbers (all are binary):
--
-- [@=@] Test for numeric equality
--
-- [@<@] Less than
--
-- [@>@] Greater than
--
-- [@<=@] Less than or equal
--
-- [@>=@] Greater than or equal
-- $listops
-- Basic list primitives:
--
-- [@car@] Return the first element of a pair (or the head of a list).
--
-- [@cdr@] Return the second element of a pair (or the tail of a list).
--
-- [@cons@] Create a new pair from two elements. If the second element is a
-- pair itself, the new structure is called a list.
-- | List of primitive functions
primitives :: [(String, PrimitiveFunction)]
primitives = [
("+", numericFoldOp (+) 0),
("-", minus),
("*", numericFoldOp (*) 1),
("quotient", numericBinOp quot),
("=", numericCompare (==)),
("<", numericCompare (<)),
(">", numericCompare (>)),
("<=", numericCompare (<=)),
(">=", numericCompare (>=)),
("car", car),
("cdr", cdr),
("cons", cons)
]
-- | Get an environment with primitive functions defined.
getPrimitiveEnv :: IO Env
getPrimitiveEnv =
mapM addBinding primitives >>= newIORef
where addBinding (name, func) = do f <- newIORef $ PrimFunc name func
return (name, f)
-----------------------
-- Numerical operations
-----------------------
-- | Get a number (or throw an error)
getNumber :: Expr -> IOThrowsError Integer
getNumber (Number x) = return x
getNumber notNumber = throwError $ TypeError "Integer" notNumber
-- | Create numerical fold operators
numericFoldOp :: (Integer -> Integer -> Integer) -> Integer -> PrimitiveFunction
numericFoldOp func start args = fmap (Number . foldl' func start) (mapM getNumber args)
-- | Create numerical binary operators
numericBinOp :: (Integer -> Integer -> Integer) -> PrimitiveFunction
numericBinOp func (a:b:[]) = liftM2 (\x y -> Number $ func x y) (getNumber a) (getNumber b)
numericBinOp _ args = throwError $ NumArgs 2 args
-- | Subtraction
--
-- This is a little special since a unary minus should negate it's argument,
-- while binary (or m-ary) minus should subtract _from_ it's first argument.
minus :: PrimitiveFunction
minus [] = throwError $ NumArgs 1 []
minus (x:[]) = fmap (Number . negate) (getNumber x)
minus (x:xs) = getNumber x >>= \num -> numericFoldOp (-) num xs
-- | Numerical comparisons
numericCompare :: (Integer -> Integer -> Bool) -> PrimitiveFunction
numericCompare func args =
do nums <- mapM getNumber args
return $ Bool $ foldr (\(x,y) -> (&& func x y))
True
(zip nums $ tail nums)
------------------
-- List operations
------------------
-- | First element of list or pair
car :: PrimitiveFunction
car [List (x:xs)] = return x
car [Dotted (x:xs) _] = return x
car [List []] = throwError $ TypeError "List" $ List []
car [noList] = throwError $ TypeError "List" noList
car args = throwError $ NumArgs 1 args
-- | Tail of list or second element of pair
cdr :: PrimitiveFunction
cdr [List (x:xs)] = return $ List xs
cdr [Dotted [x] y] = return y
cdr [Dotted (x:xs) y] = return $ Dotted xs y
cdr [List []] = throwError $ TypeError "List" $ List []
cdr [noList] = throwError $ TypeError "List" noList
cdr args = throwError $ NumArgs 1 args
-- | Create a pair or add a new head to list
cons :: PrimitiveFunction
cons [x, List xs] = return $ List (x:xs)
cons [x, Dotted xs y] = return $ Dotted (x:xs) y
cons [x, y] = return $ Dotted [x] y
cons args = throwError $ NumArgs 2 args
|
matkli/hscheme
|
Primitives.hs
|
gpl-3.0
| 4,313
| 0
| 12
| 940
| 1,123
| 625
| 498
| 61
| 1
|
lenAlg :: ListF e Int -> Int
lenAlg (ConsF e n) = n + 1
lenAlg NilF = 0
|
hmemcpy/milewski-ctfp-pdf
|
src/content/3.8/code/haskell/snippet21.hs
|
gpl-3.0
| 71
| 0
| 7
| 18
| 43
| 21
| 22
| 3
| 1
|
{-# LANGUAGE NoImplicitPrelude, GeneralizedNewtypeDeriving, TypeFamilies #-}
module Lamdu.Sugar.Names.Get
( fromExpression, fromBody
) where
import Prelude.Compat
import Control.Monad.Trans.State (State, runState)
import qualified Control.Monad.Trans.State as State
import Lamdu.Sugar.Names.CPS (CPS(..))
import Lamdu.Sugar.Names.Walk (MonadNaming)
import qualified Lamdu.Sugar.Names.Walk as Walk
import Lamdu.Sugar.Types
newtype Collect name (m :: * -> *) a = Collect { unCollect :: State [name] a }
deriving (Functor, Applicative, Monad)
runCollect :: Collect name m a -> (a, [name])
runCollect = (`runState` []) . unCollect
instance Monad m => MonadNaming (Collect name m) where
type OldName (Collect name m) = name
type NewName (Collect name m) = ()
type TM (Collect name m) = m
opRun = pure $ Walk.InTransaction (return . fst . runCollect)
opWithParamName _ = cpsTellName
opWithLetName _ = cpsTellName
opWithDefName = cpsTellName
opWithTagName = cpsTellName
opGetName _ = tellName
tellName :: Walk.NameConvertor (Collect name m)
tellName name = Collect (State.modify (name:))
cpsTellName :: Walk.CPSNameConvertor (Collect name m)
cpsTellName name = CPS $ \k -> (,) <$> tellName name <*> k
-- | Returns all the *foldable* names in the given expression
-- (excluding names hidden behind transactions)
fromExpression :: Monad m => Expression name m a -> [name]
fromExpression = snd . runCollect . Walk.toExpression
fromBody :: Monad m => Body name m expr -> [name]
fromBody = snd . runCollect . Walk.toBody pure
|
da-x/lamdu
|
Lamdu/Sugar/Names/Get.hs
|
gpl-3.0
| 1,621
| 0
| 10
| 324
| 494
| 280
| 214
| 32
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.CognitoSync.UnsubscribeFromDataset
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Unsubscribes from receiving notifications when a dataset is modified by
-- another device.
--
-- <http://docs.aws.amazon.com/cognitosync/latest/APIReference/API_UnsubscribeFromDataset.html>
module Network.AWS.CognitoSync.UnsubscribeFromDataset
(
-- * Request
UnsubscribeFromDataset
-- ** Request constructor
, unsubscribeFromDataset
-- ** Request lenses
, ufdDatasetName
, ufdDeviceId
, ufdIdentityId
, ufdIdentityPoolId
-- * Response
, UnsubscribeFromDatasetResponse
-- ** Response constructor
, unsubscribeFromDatasetResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.RestJSON
import Network.AWS.CognitoSync.Types
import qualified GHC.Exts
data UnsubscribeFromDataset = UnsubscribeFromDataset
{ _ufdDatasetName :: Text
, _ufdDeviceId :: Text
, _ufdIdentityId :: Text
, _ufdIdentityPoolId :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'UnsubscribeFromDataset' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ufdDatasetName' @::@ 'Text'
--
-- * 'ufdDeviceId' @::@ 'Text'
--
-- * 'ufdIdentityId' @::@ 'Text'
--
-- * 'ufdIdentityPoolId' @::@ 'Text'
--
unsubscribeFromDataset :: Text -- ^ 'ufdIdentityPoolId'
-> Text -- ^ 'ufdIdentityId'
-> Text -- ^ 'ufdDatasetName'
-> Text -- ^ 'ufdDeviceId'
-> UnsubscribeFromDataset
unsubscribeFromDataset p1 p2 p3 p4 = UnsubscribeFromDataset
{ _ufdIdentityPoolId = p1
, _ufdIdentityId = p2
, _ufdDatasetName = p3
, _ufdDeviceId = p4
}
-- | The name of the dataset from which to unsubcribe.
ufdDatasetName :: Lens' UnsubscribeFromDataset Text
ufdDatasetName = lens _ufdDatasetName (\s a -> s { _ufdDatasetName = a })
-- | The unique ID generated for this device by Cognito.
ufdDeviceId :: Lens' UnsubscribeFromDataset Text
ufdDeviceId = lens _ufdDeviceId (\s a -> s { _ufdDeviceId = a })
-- | Unique ID for this identity.
ufdIdentityId :: Lens' UnsubscribeFromDataset Text
ufdIdentityId = lens _ufdIdentityId (\s a -> s { _ufdIdentityId = a })
-- | A name-spaced GUID (for example,
-- us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon Cognito.
-- The ID of the pool to which this identity belongs.
ufdIdentityPoolId :: Lens' UnsubscribeFromDataset Text
ufdIdentityPoolId =
lens _ufdIdentityPoolId (\s a -> s { _ufdIdentityPoolId = a })
data UnsubscribeFromDatasetResponse = UnsubscribeFromDatasetResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'UnsubscribeFromDatasetResponse' constructor.
unsubscribeFromDatasetResponse :: UnsubscribeFromDatasetResponse
unsubscribeFromDatasetResponse = UnsubscribeFromDatasetResponse
instance ToPath UnsubscribeFromDataset where
toPath UnsubscribeFromDataset{..} = mconcat
[ "/identitypools/"
, toText _ufdIdentityPoolId
, "/identities/"
, toText _ufdIdentityId
, "/datasets/"
, toText _ufdDatasetName
, "/subscriptions/"
, toText _ufdDeviceId
]
instance ToQuery UnsubscribeFromDataset where
toQuery = const mempty
instance ToHeaders UnsubscribeFromDataset
instance ToJSON UnsubscribeFromDataset where
toJSON = const (toJSON Empty)
instance AWSRequest UnsubscribeFromDataset where
type Sv UnsubscribeFromDataset = CognitoSync
type Rs UnsubscribeFromDataset = UnsubscribeFromDatasetResponse
request = delete
response = nullResponse UnsubscribeFromDatasetResponse
|
dysinger/amazonka
|
amazonka-cognito-sync/gen/Network/AWS/CognitoSync/UnsubscribeFromDataset.hs
|
mpl-2.0
| 4,607
| 0
| 9
| 1,022
| 563
| 341
| 222
| 73
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Run.Projects.Locations.Configurations.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Get information about a configuration.
--
-- /See:/ <https://cloud.google.com/run/ Cloud Run Admin API Reference> for @run.projects.locations.configurations.get@.
module Network.Google.Resource.Run.Projects.Locations.Configurations.Get
(
-- * REST Resource
ProjectsLocationsConfigurationsGetResource
-- * Creating a Request
, projectsLocationsConfigurationsGet
, ProjectsLocationsConfigurationsGet
-- * Request Lenses
, plcgXgafv
, plcgUploadProtocol
, plcgAccessToken
, plcgUploadType
, plcgName
, plcgCallback
) where
import Network.Google.Prelude
import Network.Google.Run.Types
-- | A resource alias for @run.projects.locations.configurations.get@ method which the
-- 'ProjectsLocationsConfigurationsGet' request conforms to.
type ProjectsLocationsConfigurationsGetResource =
"v1" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Configuration
-- | Get information about a configuration.
--
-- /See:/ 'projectsLocationsConfigurationsGet' smart constructor.
data ProjectsLocationsConfigurationsGet =
ProjectsLocationsConfigurationsGet'
{ _plcgXgafv :: !(Maybe Xgafv)
, _plcgUploadProtocol :: !(Maybe Text)
, _plcgAccessToken :: !(Maybe Text)
, _plcgUploadType :: !(Maybe Text)
, _plcgName :: !Text
, _plcgCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsConfigurationsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'plcgXgafv'
--
-- * 'plcgUploadProtocol'
--
-- * 'plcgAccessToken'
--
-- * 'plcgUploadType'
--
-- * 'plcgName'
--
-- * 'plcgCallback'
projectsLocationsConfigurationsGet
:: Text -- ^ 'plcgName'
-> ProjectsLocationsConfigurationsGet
projectsLocationsConfigurationsGet pPlcgName_ =
ProjectsLocationsConfigurationsGet'
{ _plcgXgafv = Nothing
, _plcgUploadProtocol = Nothing
, _plcgAccessToken = Nothing
, _plcgUploadType = Nothing
, _plcgName = pPlcgName_
, _plcgCallback = Nothing
}
-- | V1 error format.
plcgXgafv :: Lens' ProjectsLocationsConfigurationsGet (Maybe Xgafv)
plcgXgafv
= lens _plcgXgafv (\ s a -> s{_plcgXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
plcgUploadProtocol :: Lens' ProjectsLocationsConfigurationsGet (Maybe Text)
plcgUploadProtocol
= lens _plcgUploadProtocol
(\ s a -> s{_plcgUploadProtocol = a})
-- | OAuth access token.
plcgAccessToken :: Lens' ProjectsLocationsConfigurationsGet (Maybe Text)
plcgAccessToken
= lens _plcgAccessToken
(\ s a -> s{_plcgAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
plcgUploadType :: Lens' ProjectsLocationsConfigurationsGet (Maybe Text)
plcgUploadType
= lens _plcgUploadType
(\ s a -> s{_plcgUploadType = a})
-- | The name of the configuration to retrieve. For Cloud Run (fully
-- managed), replace {namespace_id} with the project ID or number.
plcgName :: Lens' ProjectsLocationsConfigurationsGet Text
plcgName = lens _plcgName (\ s a -> s{_plcgName = a})
-- | JSONP
plcgCallback :: Lens' ProjectsLocationsConfigurationsGet (Maybe Text)
plcgCallback
= lens _plcgCallback (\ s a -> s{_plcgCallback = a})
instance GoogleRequest
ProjectsLocationsConfigurationsGet
where
type Rs ProjectsLocationsConfigurationsGet =
Configuration
type Scopes ProjectsLocationsConfigurationsGet =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient ProjectsLocationsConfigurationsGet'{..}
= go _plcgName _plcgXgafv _plcgUploadProtocol
_plcgAccessToken
_plcgUploadType
_plcgCallback
(Just AltJSON)
runService
where go
= buildClient
(Proxy ::
Proxy ProjectsLocationsConfigurationsGetResource)
mempty
|
brendanhay/gogol
|
gogol-run/gen/Network/Google/Resource/Run/Projects/Locations/Configurations/Get.hs
|
mpl-2.0
| 5,078
| 5
| 16
| 1,098
| 695
| 408
| 287
| 104
| 1
|
--
-- Copyright 2017-2018 Azad Bolour
-- Licensed under GNU Affero General Public License v3.0 -
-- https://github.com/azadbolour/boardgame/blob/master/LICENSE.md
--
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE DisambiguateRecordFields #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DeriveFunctor #-}
module Bolour.Plane.Domain.LineSegment (
LineSegment(..)
) where
import Bolour.Plane.Domain.Axis (Axis)
import qualified Bolour.Plane.Domain.Axis as Axis
import Bolour.Plane.Domain.Point (Point, Point(Point))
-- | A line segment within a 2-dimensional grid.
-- Each point of a line segment may or may not have a value (represented as Maybe val).
data LineSegment val = LineSegment {
-- | The direction of the line segment - along the X or Y axis.
axis :: Axis
-- | The line number among all grid lines along the same direction.
, lineNumber :: Int
-- | The index of the beginning point of this line segment along its line within the grid.
, begin :: Int
-- | The index of the end point of this line segment along its line within the grid.
, end :: Int
-- | The values/non-values of the points in this line segment.
, segment :: [Maybe val]
} deriving (Functor)
row :: LineSegment val -> Int -> Int
row LineSegment { axis, lineNumber, begin } offset =
case axis of
Axis.X -> lineNumber
Axis.Y -> begin + offset
column :: LineSegment val -> Int -> Int
column LineSegment { axis, lineNumber, begin } offset =
case axis of
Axis.X -> begin + offset
Axis.Y -> lineNumber
pointAt :: LineSegment val -> Int -> Point
pointAt line offset = Point (row line offset) (column line offset)
|
azadbolour/boardgame
|
haskell-server/src/Bolour/Plane/Domain/LineSegment.hs
|
agpl-3.0
| 1,686
| 0
| 10
| 336
| 299
| 178
| 121
| 29
| 2
|
module TinyThreePassCompiler where
import Data.List.Split
import Data.List
data AST = Imm Int
| Arg Int
| Add AST AST
| Sub AST AST
| Mul AST AST
| Div AST AST
deriving (Eq, Show)
--
data Production = FromToken Token
| Expression AST
| Term AST
| Factor AST
deriving (Eq, Show)
--
data Token = TChar Char
| TInt Int
| TStr String
deriving (Eq, Show)
--
alpha, digit :: String
alpha = ['a'..'z'] ++ ['A'..'Z']
digit = ['0'..'9']
tokenize :: String -> [Token]
tokenize [] = []
tokenize xxs@(c:cs)
| c `elem` "-+*/()[]" = TChar c : tokenize cs
| not (null i) = TInt (read i) : tokenize is
| not (null s) = TStr s : tokenize ss
| otherwise = tokenize cs
where
(i, is) = span (`elem` digit) xxs
(s, ss) = span (`elem` alpha) xxs
--
-- compile :: String -> [String]
-- compile = pass3 . pass2 . pass1
(==>) :: Maybe a -> (a -> b) -> b
(==>) (Just a) f = f a
-- (==>) Nothing _ = No errors
extract :: Maybe a -> a
extract (Just a) = a
-- extract Nothing = No errors
mergeHelper :: Token -> AST -> AST -> AST
mergeHelper (TChar '+') a b = Add a b
mergeHelper (TChar '-') a b = Sub a b
mergeHelper (TChar '*') a b = Mul a b
mergeHelper (TChar '/') a b = Div a b
parse :: [Token] -> [Production] -> [Production]
parse _ [FromToken (TInt i)] = [Factor $ Imm i]
parse params [FromToken (TStr s)] = [Factor (elemIndex (TStr s) params ==> Arg)]
parse _ [FromToken (TChar '('), Expression expr, FromToken (TChar ')')] = [Factor expr]
parse _ [Expression expr, FromToken (TChar '+'), Term term] = [Expression $ Add expr term]
parse _ [Expression expr, FromToken (TChar '-'), Term term] = [Expression $ Sub expr term]
parse _ [Term term, FromToken (TChar '*'), Factor fact] = [Term $ Mul term fact]
parse _ [Term term, FromToken (TChar '/'), Factor fact] = [Term $ Div term fact]
parse _ [Factor fact] = [Term fact]
parse _ [Term term] = [Expression term]
parse _ production = production
--
parseHelper :: [Token] -> [Production] -> [Production]
parseHelper _ [p] = [p]
parseHelper p [a,b,c] = parse p [a,b,c]
parseHelper p pro = let after = ((chunksOf 3 pro) >>= (parse p)) in
case length after of
1 -> after
_ -> if after /= pro then parseHelper p after
else parseHelper p (head after : (parseHelper p $ init after))
-- parseHelper p (h:t)
pass1 :: String -> AST
pass1 code = let
[params, tokens0] = (splitOn [TChar ']'] (tail $ tokenize code))
-- collapse = zipWith3 (\x y z -> [x, y, z]) tokens0 (tail tokens0) (drop 2 tokens0)
production = tokens0 >>= (\x -> parse params [FromToken x]) in
case parseHelper params production of
[Expression parsed] -> parsed
[Term parsed] -> parsed
[Factor parsed] -> parsed
--
evaluate :: AST -> AST
evaluate (Add (Imm a) (Imm b)) = Imm (a + b)
evaluate (Sub (Imm a) (Imm b)) = Imm (a - b)
evaluate (Mul (Imm a) (Imm b)) = Imm (a * b)
evaluate (Div (Imm a) (Imm b)) = Imm $ div a b
evaluate wtf = wtf
-- pass2 :: AST -> AST
-- pass2 = undefined
-- pass3 :: AST -> [String]
-- pass3 = undefined
|
ice1000/OI-codes
|
codewars/101-200/tiny-three-pass-compiler-also-give-up.hs
|
agpl-3.0
| 3,151
| 0
| 16
| 805
| 1,413
| 738
| 675
| 73
| 3
|
-- Copyright (c) 2013-2014 PivotCloud, Inc.
--
-- Aws.Lambda
--
-- Please feel free to contact us at licensing@pivotmail.com with any
-- contributions, additions, or other feedback; we would love to hear from
-- you.
--
-- Licensed under the Apache License, Version 2.0 (the "License"); you may
-- not use this file except in compliance with the License. You may obtain a
-- copy of the License at http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-- WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-- License for the specific language governing permissions and limitations
-- under the License.
module Aws.Lambda
( module Aws.Lambda.Core
, module Aws.Lambda.Types
, module Aws.Lambda.Commands.AddEventSource
, module Aws.Lambda.Commands.DeleteFunction
, module Aws.Lambda.Commands.GetEventSource
, module Aws.Lambda.Commands.GetFunction
, module Aws.Lambda.Commands.GetFunctionConfiguration
, module Aws.Lambda.Commands.InvokeAsync
, module Aws.Lambda.Commands.ListEventSources
, module Aws.Lambda.Commands.ListFunctions
, module Aws.Lambda.Commands.RemoveEventSource
, module Aws.Lambda.Commands.UpdateFunctionConfiguration
, module Aws.Lambda.Commands.UploadFunction
) where
import Aws.Lambda.Core
import Aws.Lambda.Types
import Aws.Lambda.Commands.AddEventSource
import Aws.Lambda.Commands.DeleteFunction
import Aws.Lambda.Commands.GetEventSource
import Aws.Lambda.Commands.GetFunction
import Aws.Lambda.Commands.GetFunctionConfiguration
import Aws.Lambda.Commands.InvokeAsync
import Aws.Lambda.Commands.ListEventSources
import Aws.Lambda.Commands.ListFunctions
import Aws.Lambda.Commands.RemoveEventSource
import Aws.Lambda.Commands.UpdateFunctionConfiguration
import Aws.Lambda.Commands.UploadFunction
|
alephcloud/hs-aws-lambda
|
src/Aws/Lambda.hs
|
apache-2.0
| 1,881
| 0
| 5
| 196
| 216
| 161
| 55
| 27
| 0
|
--Final Project ALP.
--Student: Marcos Pividori
-- |This module presents the configuration of the database used and the types of data stored in it.
{-# LANGUAGE OverloadedStrings, TypeFamilies, TemplateHaskell,
QuasiQuotes, MultiParamTypeClasses, GeneralizedNewtypeDeriving, FlexibleContexts, GADTs #-}
module DataBase where
import Yesod
import Data.Time (UTCTime(..))
share [mkPersist sqlSettings, mkMigrate "migrateAll"] [persist|
Devices
usuario String
password String
regId String
UniqueUsuario usuario
deriving Show
Image
filename String
usuario String
date UTCTime
deriving Show
Audio
filename String
usuario String
date UTCTime
deriving Show
Ubicacion
usuario String
lati Double
lngi Double
datei UTCTime
|]
|
MarcosPividori/Yesod-server-for-GCM
|
DataBase.hs
|
apache-2.0
| 802
| 0
| 7
| 175
| 49
| 31
| 18
| 6
| 0
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TupleSections #-}
-- | A module responsible for identifying occurrences dictionary
-- entries in the Skladnica treebank.
module NLP.Skladnica.Map
( MapCfg (..)
, mapMWEs
) where
import Control.Monad (forM_)
-- import qualified Data.ByteString as BS
import Data.Either (lefts)
import Data.Maybe (mapMaybe)
import qualified Data.Set as S
import qualified Data.Map.Strict as M
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Data.Tree as R
import qualified System.FilePath.Find as F
import qualified NLP.Skladnica as Skl
import qualified NLP.Walenty as W
import qualified NLP.Skladnica.Walenty.Mapping as Mapping
import qualified NLP.Skladnica.Walenty.Prune as P
import qualified NLP.Skladnica.Walenty.Search2 as Q
-- import qualified NLP.Skladnica.Walenty.Select as Select
import qualified NLP.Skladnica.Walenty.MweTree as MWE
import qualified NLP.Skladnica.Walenty.NcpNEs as NE
import qualified NLP.Skladnica.Walenty.Sejf as Sejf
------------------------------------------------------------------------------
-- Tests
--------------------------------------------------------------------------------
-- | Recursively retrieve all Xml files from the given directory.
getXmlFiles :: FilePath -> IO [FilePath]
getXmlFiles dir = do
F.find F.always (F.fileName F.~~? "*.xml") dir
------------------------------------------------------------------------------
-- Grammar Extraction Tests
--------------------------------------------------------------------------------
-- | Mapping configuration.
data MapCfg = MapCfg
{ skladnicaDir :: FilePath
, mayWalentyPath :: Maybe (FilePath, FilePath)
-- ^ The first component is the Walenty file, the second one
-- is the expansion file
, maySejfPath :: Maybe FilePath
, mayNcpPath :: Maybe FilePath
} deriving (Show, Eq, Ord)
-- | Map individual MWEs on Składnica and print the resulting trees on stdin.
-- mapMWEs
-- :: FilePath -- ^ Skladnica directory
-- -> FilePath -- ^ Walenty file
-- -> FilePath -- ^ Walenty expansion file
-- -> FilePath -- ^ SEJF file
-- -> FilePath -- ^ NCP directory (for NEs)
-- -> IO ()
mapMWEs
:: MapCfg
-> IO ()
mapMWEs MapCfg{..} = do
-- read *lexicalized* verbal entries from Walenty
walenty <- case mayWalentyPath of
Just paths -> uncurry readWalenty paths
Nothing -> return []
-- read SEJF dictionary
sejf0 <- case maySejfPath of
Just sejfPath -> Sejf.readSejf sejfPath
Nothing -> return []
-- TODO: it's not nice that we have to specify case sensitivity
-- in many places, this leads to problems
let sejf = sejfPartition Sejf.IgnoreCase Sejf.orth sejf0
-- read NCP-NEs dictionary
nes0 <- nubOrd <$> case mayNcpPath of
Just ncpPath -> NE.nesInCorpus ncpPath
Nothing -> return []
let nes = sejfPartition Sejf.CaseSensitive NE.orth nes0
-- per each XML file...
xmlFiles <- getXmlFiles skladnicaDir
forM_ xmlFiles (procPath walenty sejf nes)
where
procPath walenty sejf0 nes0 skladnicaXML = do
sklForest <- forestFromXml skladnicaXML
let sentSet = case sklForest of
sklTree : _ -> S.fromList $
map Skl.orth (Mapping.terminals sklTree)
_ -> S.empty
sejf =
[ entry
| (entry, wordSet) <- sejf0
-- , wordSet `S.isSubsetOf` sentSet ]
-- TODO: and again, below, we have to account for potential case...
, wordSet `S.isSubsetOf`
(S.fromList . map T.toCaseFold . S.toList) sentSet ]
nes =
[ entry
| (entry, wordSet) <- nes0
, wordSet `S.isSubsetOf` sentSet ]
let exprs1 = map ((,walentyInfo) . Mapping.querify) walenty
exprs2 = map ((,sejfInfo) . Sejf.querify' Sejf.IgnoreCase) sejf
exprs3 = map
( \ne ->
( Sejf.querifyOrth' Sejf.CaseSensitive $ NE.orth ne
, nesInfo ne )
) nes
forM_ sklForest $ \sklTree -> do
let mweTree = Mapping.markSklTree (exprs1 ++ exprs2 ++ exprs3) sklTree
T.putStrLn . MWE.renderXml $
let path = drop (length skladnicaDir) skladnicaXML
in MWE.outToXml
. MWE.Top mweTree
$ M.fromList [("file", T.pack path)]
walentyInfo = genericInfo "walenty"
sejfInfo = genericInfo "sejf"
genericInfo orig = MWE.MweInfo
{ MWE.origin = Just orig
, MWE.mweTyp = Nothing
, MWE.reading = Nothing }
nesInfo NE.NE{..} = MWE.MweInfo
{ MWE.origin = Just "nkjp"
, MWE.mweTyp = Just $ neType `T.append` case neSubType of
Nothing -> ""
Just subType -> "-" `T.append` subType
, MWE.reading = Nothing }
------------------------------------------------------------------------------
-- Utils
--------------------------------------------------------------------------------
-- readWalenty
-- :: FilePath -- ^ Walenty filePath
-- -> FilePath -- ^ Expansion file
-- -> IO ()
readWalenty walentyPath expansionPath = do
expMap <- W.readExpMap expansionPath
walenty <-
S.toList . S.fromList
. mapMaybe P.pruneVerb
-- NEW 01/06/2016: additional pruning is needed to remove non-lexical
-- constraints introduced during in the process of expansion
. map (W.expandVerb expMap)
-- the first pruning is needed before expansions so that unsignificant
-- expansions are not treated as lexical constraints
. mapMaybe P.pruneVerb
. lefts
<$> W.readWalenty walentyPath
return walenty
-- | Extract skladnica trees from the given XML file.
forestFromXml :: FilePath -> IO [Skl.Tree Skl.Node Skl.IsHead]
forestFromXml xml = do
nodesDAG <- Skl.mkDAG <$> Skl.readTop xml
return $ Skl.forest Skl.chosen 0 nodesDAG
-- | Use `Sejf.partition` to determine component words and put them (under the
-- form of a set) on the second position of the corresponding list elements.
-- Preserve only these entries which have more than one component words.
sejfPartition
:: Sejf.CaseSensitivity
-> (a -> T.Text)
-> [a]
-> [(a, S.Set T.Text)]
sejfPartition caseSens f xs =
[ (x, wordSet)
| x <- xs
, let wordSet = S.fromList . Sejf.partition . withCase . f $ x
, S.size wordSet > 1 ]
where
withCase = case caseSens of
Sejf.CaseSensitive -> id
Sejf.IgnoreCase -> T.toCaseFold
--------------------------------------------------
-- Misc
--------------------------------------------------
nubOrd :: (Ord a) => [a] -> [a]
nubOrd = S.toList . S.fromList
|
kawu/skladnica-with-walenty
|
src/NLP/Skladnica/Map.hs
|
bsd-2-clause
| 6,928
| 8
| 20
| 1,748
| 1,415
| 778
| 637
| 121
| 6
|
{-# LANGUAGE ConstraintKinds, FlexibleContexts, FlexibleInstances,
MultiParamTypeClasses, TypeFamilies #-}
module HaskHOL.Core.Overloadings
( module HaskHOL.Core.Kernel
, module HaskHOL.Core.Basics
, module HaskHOL.Core.Parser
, module HaskHOL.Core.Overloadings
)
where
import HaskHOL.Core.Lib
import HaskHOL.Core.Kernel hiding
(tyApp, destFunTy, primTYBETA, primTYAPP, primTYAPP2, primTYABS,
primINST, primINST_TYPE_FULL, primINST_TYPE, primDEDUCT_ANTISYM,
primEQ_MP, primASSUME, primBETA, primABS, primMK_COMB, primTRANS,
primREFL, varSubst, destEq, destTyComb, destTyAbs, destComb, destAbs,
destVar, mkTyComb, mkTyAbs, mkComb, mkAbs, mkVar, inst, typeMatch,
mkUTypes, mkUType, typeOf)
import qualified HaskHOL.Core.Kernel as K
import HaskHOL.Core.Basics hiding
(destNumeral, destLet, destList, destCons, destTyEx, destTyAll, destUExists,
destNeg, destDisj, destExists, destForall, destImp, destConj, destIff,
destTyBinder, destBinder, destGAbs, listMkBinop, mkBinop, destBinop,
destBinary, mkIComb, bodyTyabs, bndvarTyabs, body, bndvar, rand, rator,
listMkTyAbs, listMkAbs, listMkTyComb, listMkComb, alphaTyabs, alpha,
subst, mkEq, alphaUtype, tysubst)
import qualified HaskHOL.Core.Basics as B
import HaskHOL.Core.Parser hiding
(newTypeAbbrev, prioritizeOverload, overloadInterface, overrideInterface,
reduceInterface, makeOverloadable)
import qualified HaskHOL.Core.Parser as P
import HaskHOL.Core.State.Monad (HOL, Theory, Constraint)
-- Overloading Skeletons
class Overload a b where
type family OverloadTy a b cls thry :: Constraint
overload :: OverloadTy a b cls thry => b -> HOL cls thry a
instance Overload HOLType ty where
type OverloadTy HOLType ty cls thry = HOLTypeRep ty cls thry
overload = toHTy
instance Overload HOLTerm tm where
type OverloadTy HOLTerm tm cls thry = HOLTermRep tm cls thry
overload = toHTm
instance Overload HOLThm thm where
type OverloadTy HOLThm thm cls thry = HOLThmRep thm cls thry
overload = toHThm
instance (Overload a1 b1, Overload a2 b2) => Overload (a1, a2) (b1, b2) where
type OverloadTy (a1, a2) (b1, b2) cls thry =
(OverloadTy a1 b1 cls thry, OverloadTy a2 b2 cls thry)
overload = overload `ffCombM` overload
instance (Overload a1 b1, Overload a2 b2, Overload a3 b3) =>
Overload (a1, a2, a3) (b1, b2, b3) where
type OverloadTy (a1, a2, a3) (b1, b2, b3) cls thry =
(OverloadTy a1 b1 cls thry, OverloadTy a2 b2 cls thry,
OverloadTy a3 b3 cls thry)
overload (x, y, z) =
do {x' <- overload x; y' <- overload y; z' <- overload z; return (x',y',z')}
-- Has the potential for a space leak for large argument lists due to mapM.
instance Overload a b => Overload [a] [b] where
type OverloadTy [a] [b] cls thry = OverloadTy a b cls thry
overload = mapM overload
-- One off to clean up overloadings related to type substitution
instance Overload K.TypeOp K.TypeOp where
type OverloadTy K.TypeOp K.TypeOp cls thry = ()
overload = return
overload1 :: (Overload a b, OverloadTy a b cls thry)
=> (a -> HOL cls thry c) -> b -> HOL cls thry c
overload1 f x = join (f <$!> overload x)
overload2 :: (Overload a1 b1, OverloadTy a1 b1 cls thry,
Overload a2 b2, OverloadTy a2 b2 cls thry)
=> (a1 -> a2 -> HOL cls thry c) -> b1 -> b2 -> HOL cls thry c
overload2 f x y = join (f <$!> overload x <*> overload y)
overload3 :: (Overload a1 b1, OverloadTy a1 b1 cls thry,
Overload a2 b2, OverloadTy a2 b2 cls thry,
Overload a3 b3, OverloadTy a3 b3 cls thry)
=> (a1 -> a2 -> a3 -> HOL cls thry c)
-> b1 -> b2 -> b3 -> HOL cls thry c
overload3 f x y z = join (f <$!> overload x <*> overload y <*> overload z)
-- Kernel Type Functions
destFunTy :: HOLTypeRep ty cls thry => ty -> HOL cls thry (HOLType, HOLType)
destFunTy = overload1 K.destFunTy
tyApp :: HOLTypeRep ty cls thry => TypeOp -> [ty] -> HOL cls thry HOLType
tyApp = overload2 K.tyApp
mkUType :: (HOLTypeRep ty1 cls thry, HOLTypeRep ty2 cls thry)
=> ty1 -> ty2 -> HOL cls thry HOLType
mkUType = overload2 K.mkUType
mkUTypes :: (HOLTypeRep ty1 cls thry, HOLTypeRep ty2 cls thry)
=> [ty1] -> ty2 -> HOL cls thry HOLType
mkUTypes = overload2 K.mkUTypes
typeMatch :: (HOLTypeRep ty1 cls thry, HOLTypeRep ty2 cls thry,
HOLTypeRep ty3 cls thry, HOLTypeRep ty4 cls thry,
HOLTypeRep ty5 cls thry)
=> ty1 -> ty2
-> ([(ty3, ty4)], [(K.TypeOp, ty5)], [(K.TypeOp, K.TypeOp)])
-> HOL cls thry SubstTrip
typeMatch = overload3 K.typeMatch
{-# INLINEABLE typeMatch_NIL #-}
typeMatch_NIL :: (HOLTypeRep ty1 cls thry, HOLTypeRep ty2 cls thry)
=> ty1 -> ty2 -> HOL cls thry SubstTrip
typeMatch_NIL x y =
HaskHOL.Core.Overloadings.typeMatch x y (([], [], [])::SubstTrip)
-- Kernel Term Functions
typeOf :: HOLTermRep tm cls thry => tm -> HOL cls thry HOLType
typeOf = overload1 (return . K.typeOf)
mkVar :: HOLTypeRep ty cls thry => Text -> ty -> HOL cls thry HOLTerm
mkVar x = overload1 (return . K.mkVar x)
mkAbs :: (HOLTermRep tm1 cls thry, HOLTermRep tm2 cls thry)
=> tm1 -> tm2 -> HOL cls thry HOLTerm
mkAbs = overload2 K.mkAbs
mkComb :: (HOLTermRep tm1 cls thry, HOLTermRep tm2 cls thry)
=> tm1 -> tm2 -> HOL cls thry HOLTerm
mkComb = overload2 K.mkComb
mkTyAbs :: (HOLTypeRep ty cls thry, HOLTermRep tm cls thry)
=> ty -> tm -> HOL cls thry HOLTerm
mkTyAbs = overload2 K.mkTyAbs
mkTyComb :: (HOLTermRep tm cls thry, HOLTypeRep ty cls thry)
=> tm -> ty -> HOL cls thry HOLTerm
mkTyComb = overload2 K.mkTyComb
destVar :: HOLTermRep tm cls thry => tm -> HOL cls thry (Text, HOLType)
destVar = overload1 K.destVar
destAbs :: HOLTermRep tm cls thry => tm -> HOL cls thry (HOLTerm, HOLTerm)
destAbs = overload1 K.destAbs
destComb :: HOLTermRep tm cls thry => tm -> HOL cls thry (HOLTerm, HOLTerm)
destComb = overload1 K.destComb
destTyAbs :: HOLTermRep tm cls thry => tm -> HOL cls thry (HOLType, HOLTerm)
destTyAbs = overload1 K.destTyAbs
destTyComb :: HOLTermRep tm cls thry => tm -> HOL cls thry (HOLTerm, HOLType)
destTyComb = overload1 K.destTyComb
destEq :: HOLTermRep tm cls thry => tm -> HOL cls thry (HOLTerm, HOLTerm)
destEq = overload1 K.destEq
varSubst :: (HOLTermRep tm1 cls thry, HOLTermRep tm2 cls thry,
HOLTermRep tm3 cls thry)
=> [(tm1, tm2)] -> tm3 -> HOL cls thry HOLTerm
varSubst = overload2 K.varSubst
-- Is there a cleaner way to do this? Overloading requires ambiguous types.
class InstHOL a b cls thry where
instHOL :: [(a, b)] -> HOLTerm -> HOL cls thry HOLTerm
instTypeHOL :: [(a, b)] -> HOLThm -> HOL cls thry HOLThm
instance (HOLTypeRep l cls thry, HOLTypeRep r cls thry) =>
InstHOL l r cls thry where
instHOL penv tm =
do env <- mapM (toHTy `ffCombM` toHTy) penv
return $! K.inst env tm
instTypeHOL penv thm =
do env <- mapM (toHTy `ffCombM` toHTy) penv
return $! K.primINST_TYPE env thm
instance HOLTypeRep r cls thry => InstHOL TypeOp r cls thry where
instHOL penv tm =
do env <- mapM (return `ffCombM` toHTy) penv
return $! K.inst env tm
instTypeHOL penv thm =
do env <- mapM (return `ffCombM` toHTy) penv
return $! K.primINST_TYPE env thm
instance InstHOL TypeOp TypeOp cls thry where
instHOL penv tm = return $! K.inst penv tm
instTypeHOL penv thm = return $! K.primINST_TYPE penv thm
inst :: (InstHOL a b cls thry, HOLTermRep tm cls thry)
=> [(a, b)] -> tm -> HOL cls thry HOLTerm
inst penv = overload1 (instHOL penv)
-- Kernel Theorem Functions
{-|
A redefinition of 'K.primREFL' to overload it for all valid term
representations as defined by 'HOLTermRep'.
-}
primREFL :: HOLTermRep tm cls thry => tm -> HOL cls thry HOLThm
primREFL = overload1 (return . K.primREFL)
{-|
A redefinition of 'K.primTRANS' to overload it for all valid theorem
representations as defined by 'HOLThmRep'.
-}
primTRANS :: (HOLThmRep thm1 cls thry, HOLThmRep thm2 cls thry)
=> thm1 -> thm2 -> HOL cls thry HOLThm
primTRANS = overload2 K.primTRANS
{-|
A redefinition of 'K.primMK_COMB' to overload it for all valid theorem
representations as defined by 'HOLThmRep'.
-}
primMK_COMB :: (HOLThmRep thm1 cls thry, HOLThmRep thm2 cls thry)
=> thm1 -> thm2 -> HOL cls thry HOLThm
primMK_COMB = overload2 K.primMK_COMB
{-|
A redefinition of 'K.primABS' to overload it for all valid term and theorem
representations as defined by 'HOLTermRep' and 'HOLThmRep'.
-}
primABS :: (HOLTermRep tm cls thry, HOLThmRep thm cls thry)
=> tm -> thm -> HOL cls thry HOLThm
primABS = overload2 K.primABS
{-|
A redefinition of 'K.primBETA' to overload it for all valid term
representations as defined by 'HOLTermRep'.
-}
primBETA :: HOLTermRep tm cls thry => tm -> HOL cls thry HOLThm
primBETA = overload1 K.primBETA
{-|
A redefinition of 'K.primASSUME' to overload it for all valid term
representations as defined by 'HOLTermRep'.
-}
primASSUME :: HOLTermRep tm cls thry => tm -> HOL cls thry HOLThm
primASSUME = overload1 K.primASSUME
{-|
A redefinition of 'K.primEQ_MP' to overload it for all valid theorem
representations as defined by 'HOLThmRep'.
-}
primEQ_MP :: (HOLThmRep thm1 cls thry, HOLThmRep thm2 cls thry)
=> thm1 -> thm2 -> HOL cls thry HOLThm
primEQ_MP = overload2 K.primEQ_MP
{-|
A redefinition of 'K.primDEDUCT_ANTISYM' to overload it for all valid theorem
representations as defined by 'HOLThmRep'.
-}
primDEDUCT_ANTISYM :: (HOLThmRep thm1 cls thry, HOLThmRep thm2 cls thry)
=> thm1 -> thm2 -> HOL cls thry HOLThm
primDEDUCT_ANTISYM = overload2 (\ x -> return . K.primDEDUCT_ANTISYM x)
{-|
A redefinition of 'K.primINST_TYPE' to overload it for all valid theorem
representations as defined by 'HOLThmRep'.
-}
primINST_TYPE :: (InstHOL a b cls thry, HOLThmRep thm cls thry)
=> [(a, b)] -> thm -> HOL cls thry HOLThm
primINST_TYPE penv = overload1 (instTypeHOL penv)
{-|
A redefinition of 'K.primINST_TYPE_FULL' to overload it for all valid theorem
representations as defined by 'HOLThmRep'.
-}
primINST_TYPE_FULL :: HOLThmRep thm cls thry
=> SubstTrip -> thm -> HOL cls thry HOLThm
primINST_TYPE_FULL tyenv = overload1 (return . K.primINST_TYPE_FULL tyenv)
{-|
A redefinition of 'K.primINST' to overload it for all valid theorem
representations as defined by 'HOLThmRep'.
-}
primINST :: (HOLTermRep tm1 cls thry, HOLTermRep tm2 cls thry,
HOLThmRep thm cls thry)
=> [(tm1, tm2)] -> thm -> HOL cls thry HOLThm
primINST = overload2 K.primINST
{-|
A redefinition of 'K.primTYABS' to overload it for all valid theorem
representations as defined by 'HOLThmRep'.
-}
primTYABS :: (HOLTypeRep ty cls thry, HOLThmRep thm cls thry)
=> ty -> thm -> HOL cls thry HOLThm
primTYABS = overload2 K.primTYABS
{-|
A redefinition of 'K.primTYAPP2' to overload it for all valid type and theorem
representations as defined by 'HOLTypeRep' and 'HOLThmRep'.
-}
primTYAPP2 :: (HOLTypeRep ty1 cls thry, HOLTypeRep ty2 cls thry,
HOLThmRep thm cls thry)
=> ty1 -> ty2 -> thm -> HOL cls thry HOLThm
primTYAPP2 = overload3 K.primTYAPP2
{-|
A redefinition of 'K.primTYAPP' to overload it for all valid type and theorem
representations as defined by 'HOLTypeRep' and 'HOLThmRep'.
-}
primTYAPP :: (HOLTypeRep ty cls thry, HOLThmRep thm cls thry)
=> ty -> thm -> HOL cls thry HOLThm
primTYAPP = overload2 K.primTYAPP
{-|
A redefinition of 'K.primTYBETA' to overload it for all valid term
representations as defined by 'HOLTermRep'.
-}
primTYBETA :: HOLTermRep tm cls thry => tm -> HOL cls thry HOLThm
primTYBETA = overload1 K.primTYBETA
-- Core "Basic" Functions
tysubst :: (HOLTypeRep ty1 cls thry, HOLTypeRep ty2 cls thry,
HOLTypeRep ty3 cls thry)
=> [(ty1, ty2)] -> ty3 -> HOL cls thry HOLType
tysubst = overload2 B.tysubst
alphaUtype :: (HOLTypeRep ty1 cls thry, HOLTypeRep ty2 cls thry)
=> ty1 -> ty2 -> HOL cls thry HOLType
alphaUtype = overload2 B.alphaUtype
mkEq :: (HOLTermRep tm1 cls thry, HOLTermRep tm2 cls thry)
=> tm1 -> tm2 -> HOL cls thry HOLTerm
mkEq = overload2 B.mkEq
subst :: (HOLTermRep tm1 cls thry, HOLTermRep tm2 cls thry,
HOLTermRep tm3 cls thry)
=> [(tm1, tm2)] -> tm3 -> HOL cls thry HOLTerm
subst = overload2 B.subst
alpha :: (HOLTermRep tm1 cls thry, HOLTermRep tm2 cls thry)
=> tm1 -> tm2 -> HOL cls thry HOLTerm
alpha = overload2 B.alpha
alphaTyabs :: (HOLTypeRep ty cls thry, HOLTermRep tm cls thry)
=> ty -> tm -> HOL cls thry HOLTerm
alphaTyabs = overload2 B.alphaTyabs
listMkComb :: (HOLTermRep tm1 cls thry, HOLTermRep tm2 cls thry)
=> tm1 -> [tm2] -> HOL cls thry HOLTerm
listMkComb = overload2 B.listMkComb
listMkTyComb :: (HOLTermRep tm cls thry, HOLTypeRep ty cls thry)
=> tm -> [ty] -> HOL cls thry HOLTerm
listMkTyComb = overload2 B.listMkTyComb
listMkAbs :: (HOLTermRep tm1 cls thry, HOLTermRep tm2 cls thry)
=> [tm1] -> tm2 -> HOL cls thry HOLTerm
listMkAbs = overload2 B.listMkAbs
listMkTyAbs :: (HOLTypeRep ty cls thry, HOLTermRep tm cls thry)
=> [ty] -> tm -> HOL cls thry HOLTerm
listMkTyAbs = overload2 B.listMkTyAbs
rator :: HOLTermRep tm cls thry => tm -> HOL cls thry HOLTerm
rator = overload1 B.rator
rand :: HOLTermRep tm cls thry => tm -> HOL cls thry HOLTerm
rand = overload1 B.rand
bndvar :: HOLTermRep tm cls thry => tm -> HOL cls thry HOLTerm
bndvar = overload1 B.bndvar
body :: HOLTermRep tm cls thry => tm -> HOL cls thry HOLTerm
body = overload1 B.body
bndvarTyabs :: HOLTermRep tm cls thry => tm -> HOL cls thry HOLType
bndvarTyabs = overload1 B.bndvarTyabs
bodyTyabs :: HOLTermRep tm cls thry => tm -> HOL cls thry HOLTerm
bodyTyabs = overload1 B.bodyTyabs
mkIComb :: (HOLTermRep tm1 cls thry, HOLTermRep tm2 cls thry)
=> tm1 -> tm2 -> HOL cls thry HOLTerm
mkIComb = overload2 B.mkIComb
destBinary :: HOLTermRep tm cls thry
=> Text -> tm -> HOL cls thry (HOLTerm, HOLTerm)
destBinary s = overload1 (B.destBinary s)
destBinop :: (HOLTermRep tm1 cls thry, HOLTermRep tm2 cls thry)
=> tm1 -> tm2 -> HOL cls thry (HOLTerm, HOLTerm)
destBinop = overload2 B.destBinop
mkBinop :: (HOLTermRep tm1 cls thry, HOLTermRep tm2 cls thry,
HOLTermRep tm3 cls thry)
=> tm1 -> tm2 -> tm3 -> HOL cls thry HOLTerm
mkBinop = overload3 B.mkBinop
listMkBinop :: (HOLTermRep tm1 cls thry, HOLTermRep tm2 cls thry)
=> tm1 -> [tm2] -> HOL cls thry HOLTerm
listMkBinop = overload2 B.listMkBinop
destGAbs :: HOLTermRep tm cls thry
=> tm -> HOL cls thry (HOLTerm, HOLTerm)
destGAbs = overload1 B.destGAbs
destBinder :: HOLTermRep tm cls thry
=> Text -> tm -> HOL cls thry (HOLTerm, HOLTerm)
destBinder op = overload1 (B.destBinder op)
destTyBinder :: HOLTermRep tm cls thry
=> Text -> tm -> HOL cls thry (HOLType, HOLTerm)
destTyBinder op = overload1 (B.destTyBinder op)
destIff :: HOLTermRep tm cls thry => tm -> HOL cls thry (HOLTerm, HOLTerm)
destIff = overload1 B.destIff
destConj :: HOLTermRep tm cls thry => tm -> HOL cls thry (HOLTerm, HOLTerm)
destConj = overload1 B.destConj
destImp :: HOLTermRep tm cls thry => tm -> HOL cls thry (HOLTerm, HOLTerm)
destImp = overload1 B.destImp
destForall :: HOLTermRep tm cls thry => tm -> HOL cls thry (HOLTerm, HOLTerm)
destForall = overload1 B.destForall
destExists :: HOLTermRep tm cls thry => tm -> HOL cls thry (HOLTerm, HOLTerm)
destExists = overload1 B.destExists
destNeg :: HOLTermRep tm cls thry => tm -> HOL cls thry HOLTerm
destNeg = overload1 B.destNeg
destDisj :: HOLTermRep tm cls thry => tm -> HOL cls thry (HOLTerm, HOLTerm)
destDisj = overload1 B.destDisj
destUExists :: HOLTermRep tm cls thry => tm -> HOL cls thry (HOLTerm, HOLTerm)
destUExists = overload1 B.destUExists
destTyAll :: HOLTermRep tm cls thry => tm -> HOL cls thry (HOLType, HOLTerm)
destTyAll = overload1 B.destTyAll
destTyEx :: HOLTermRep tm cls thry => tm -> HOL cls thry (HOLType, HOLTerm)
destTyEx = overload1 B.destTyEx
destCons :: HOLTermRep tm cls thry => tm -> HOL cls thry (HOLTerm, HOLTerm)
destCons = overload1 B.destCons
destList :: HOLTermRep tm cls thry => tm -> HOL cls thry [HOLTerm]
destList = overload1 B.destList
destLet :: HOLTermRep tm cls thry
=> tm -> HOL cls thry ([(HOLTerm, HOLTerm)], HOLTerm)
destLet = overload1 B.destLet
destNumeral :: HOLTermRep tm cls thry => tm -> HOL cls thry Integer
destNumeral = overload1 B.destNumeral
-- Parser Functions
{-|
A redefinition of 'P.makeOverloadable' to overload it for all valid type
representations as defined by 'HOLTypeRep'.
-}
makeOverloadable :: HOLTypeRep ty Theory thry
=> Text -> ty -> HOL Theory thry ()
makeOverloadable s = overload1 (P.makeOverloadable s)
{-|
A redefinition of 'P.reduceInterface' to overload it for all valid term
representations as defined by 'HOLTermRep'.
-}
reduceInterface :: HOLTermRep tm Theory thry
=> Text -> tm -> HOL Theory thry ()
reduceInterface s = overload1 (P.reduceInterface s)
{-|
A redefinition of 'P.overrideInterface' to overload it for all valid term
representations as defined by 'HOLTermRep'.
-}
overrideInterface :: HOLTermRep tm Theory thry
=> Text -> tm -> HOL Theory thry ()
overrideInterface s = overload1 (P.overrideInterface s)
{-|
A redefinition of 'P.overloadInterface' to overload it for all valid term
representations as defined by 'HOLTermRep'.
-}
overloadInterface :: HOLTermRep tm Theory thry
=> Text -> tm -> HOL Theory thry ()
overloadInterface s = overload1 (P.overloadInterface s)
{-|
A redefinition of 'P.prioritizeOverload' to overload it for all valid type
representations as defined by 'HOLTypeRep'.
-}
prioritizeOverload :: HOLTypeRep ty Theory thry => ty -> HOL Theory thry ()
prioritizeOverload = overload1 P.prioritizeOverload
{-|
A redefinition of 'P.newTypeAbbrev' to overload it for all valid type
representations as defined by 'HOLTypeRep'.
-}
newTypeAbbrev :: HOLTypeRep ty Theory thry => Text -> ty -> HOL Theory thry ()
newTypeAbbrev s = overload1 (P.newTypeAbbrev s)
|
ecaustin/haskhol-core
|
src/HaskHOL/Core/Overloadings.hs
|
bsd-2-clause
| 18,394
| 0
| 12
| 3,982
| 5,731
| 3,000
| 2,731
| 305
| 1
|
{-# LANGUAGE InstanceSigs #-}
module Control.Effect.Parameterised.State where
-- Bye Monads... as we know them
import Prelude hiding (Monad(..))
import Control.Effect.Parameterised
newtype State s1 s2 a = State { runState :: s1 -> (a, s2) }
-- State parameterised monad
-- ... just like the
instance PMonad State where
return :: a -> State s s a
return x = State (\s -> (x, s))
(>>=) :: State s1 s2 a -> (a -> State s2 s3 b) -> State s1 s3 b
(State m) >>= k =
State $ \s0 -> let (a, s1) = m s0
State m' = k a in m' s1
|
dorchard/effect-monad
|
src/Control/Effect/Parameterised/State.hs
|
bsd-2-clause
| 562
| 0
| 12
| 149
| 211
| 118
| 93
| 12
| 0
|
{-
- Driver file for running tests at a command line prompt. This is the standard
- pass-through to the test-framework package.
-
- Refer to CabalTest.hs for the setup used when running "cabal test".
-}
module Main (main) where
import Test.Framework (defaultMain)
import ChessTools.Test.Suite (tests)
main :: IO ()
main = defaultMain tests
|
malcolmt/chess-tools
|
src/ChessTools/Test/ConsoleTest.hs
|
bsd-3-clause
| 349
| 0
| 6
| 63
| 49
| 29
| 20
| 5
| 1
|
{-# LANGUAGE RecordWildCards #-}
-- | read/write ImpulseTracker imstruments
module Codec.Tracker.IT.Instrument (
Instrument (..)
, Envelope (..)
, getInstrument
, putInstrument
) where
import Control.Monad
import Data.Binary
import Data.Binary.Get
import Data.Binary.Put
import Data.List.Split
import Data.Tuple
-- | Envelope given by a list of nodes.
data Envelope = Envelope { flag :: Word8
, numNodes :: Word8
, loopStart :: Word8
, loopEnd :: Word8
, sustainLoopStart :: Word8
, sustainLoopEnd :: Word8
, nodes :: [(Word16, Word8)] -- 75 bytes
, epad0 :: Word8
}
deriving (Show, Eq)
-- | Impulsetracker instrument
data Instrument = Instrument { magicNumber :: Word32 -- ^ \"IMPI\"
, fileName :: [Word8] -- ^ 12 bytes
, ipad0 :: Word8 -- ^ padding
, newNoteAction :: Word8 -- ^ New note action:
--
-- 0: cut,
-- 1: continue,
-- 2: note off,
-- 3: note fade
, duplicateCheckType :: Word8 -- ^ Duplicate check type:
--
-- 0: off,
-- 1: note,
-- 2: sample,
-- 3: instrument
, duplicateCheckAction :: Word8 -- ^ Duplicate check action
--
-- 0: cut,
-- 1: note off,
-- 2: note fade
, fadeOut :: Word16
, pitchPanSeparation :: Word8
, pitchPanCenter :: Word8
, globalVolume :: Word8
, defaultPan :: Word8
, ipad1 :: [Word8] -- ^ padding (2 bytes)
, version :: Word16
, sampleNum :: Word8
, ipad2 :: Word8
, name :: [Word8] -- 26 bytes
, ipad3 :: [Word8] -- ^ padding (6 bytes)
, noteSampleTable :: [(Word8, Word8)] -- 240 bytes
, volumeEnvelope :: Envelope
, panningEnvelope :: Envelope
, pitchEnvelope :: Envelope
}
deriving (Show, Eq)
-- | Read a single envelope node from the monad state.
getNode :: Get (Word16, Word8)
getNode = label "IT.Instrument Node" $
fmap swap (liftM2 (,) getWord8 getWord16le)
-- | Read an `Envelope` from the monad state.
getEnvelope :: Get Envelope
getEnvelope = label "IT.Instrument Envelope" $
Envelope <$> getWord8 <*> getWord8 <*> getWord8
<*> getWord8 <*> getWord8 <*> getWord8
<*> replicateM 25 getNode <*> getWord8
-- | Write single envelope node to the buffer.
putNode :: (Word16, Word8) -> Put
putNode (a,b) = putWord16le a >> putWord8 b
-- | Write an `Envelope` to the buffer.
putEnvelope :: Envelope -> Put
putEnvelope Envelope{..} = do
mapM_ putWord8 [ flag, numNodes, loopStart, loopEnd
, sustainLoopStart, sustainLoopEnd
]
mapM_ putNode nodes
putWord8 epad0
-- | Read an `Instrument` from the monad state.
getInstrument :: Get Instrument
getInstrument = label "IT.Instrument" $
Instrument <$> getWord32le <*> replicateM 12 getWord8 <*> getWord8
<*> getWord8 <*> getWord8 <*> getWord8 <*> getWord16le
<*> getWord8 <*> getWord8 <*> getWord8 <*> getWord8
<*> replicateM 2 getWord8 <*> getWord16le <*> getWord8
<*> getWord8 <*> replicateM 26 getWord8 <*> replicateM 6 getWord8
<*> getNoteSampleTable <*> getEnvelope <*> getEnvelope <*> getEnvelope
-- | Read a set of (key, sample) mappings from the monad state.
getNoteSampleTable :: Get [(Word8, Word8)]
getNoteSampleTable = label "IT.Instrument NoteSampleTable" $
fmap (l2t . chunksOf 2) (replicateM 240 getWord8)
where l2t = map (\[a,b] -> (a,b))
-- | Write an `Instrument` to the buffer.
putInstrument :: Instrument -> Put
putInstrument Instrument{..} = do
putWord32le magicNumber
mapM_ putWord8 fileName
mapM_ putWord8
[ ipad0
, newNoteAction
, duplicateCheckType
, duplicateCheckAction
]
putWord16le fadeOut
mapM_ putWord8
[ pitchPanSeparation
, pitchPanCenter
, globalVolume
, defaultPan
]
mapM_ putWord8 ipad1
putWord16le version
mapM_ putWord8 ([sampleNum, ipad2] ++ name ++ ipad3)
mapM_ putWord8 (foldr (\(f,s) a -> f : s : a) [] noteSampleTable)
mapM_ putEnvelope
[ volumeEnvelope
, panningEnvelope
, pitchEnvelope
]
|
riottracker/modfile
|
src/Codec/Tracker/IT/Instrument.hs
|
bsd-3-clause
| 6,460
| 0
| 27
| 3,421
| 935
| 536
| 399
| 94
| 1
|
module ProjectEuler.Problem033 (solution033) where
import Data.Ratio
reductibleFractions :: [Ratio Integer]
reductibleFractions = [(10 * a + b) / (10 * b + c) | a <- [1..9], b <- [1..9], c <- [1..9], 9 * a * c + b * c == 10 * a * b]
solution033 :: Integer
solution033 = denominator $ product reductibleFractions
|
guillaume-nargeot/project-euler-haskell
|
src/ProjectEuler/Problem033.hs
|
bsd-3-clause
| 315
| 0
| 13
| 60
| 149
| 81
| 68
| 6
| 1
|
{-# LANGUAGE CPP #-}
module TcInteract (
solveSimpleGivens, -- Solves [Ct]
solveSimpleWanteds, -- Solves Cts
solveCallStack, -- for use in TcSimplify
) where
#include "HsVersions.h"
import BasicTypes ( infinity, IntWithInf, intGtLimit )
import HsTypes ( HsIPName(..) )
import TcCanonical
import TcFlatten
import VarSet
import Type
import InstEnv( DFunInstType, lookupInstEnv, instanceDFunId )
import CoAxiom( sfInteractTop, sfInteractInert )
import Var
import TcType
import Name
import PrelNames ( knownNatClassName, knownSymbolClassName,
typeableClassName, coercibleTyConKey,
heqTyConKey, ipClassKey )
import TysWiredIn ( typeNatKind, typeSymbolKind, heqDataCon,
coercibleDataCon )
import TysPrim ( eqPrimTyCon, eqReprPrimTyCon )
import Id( idType )
import CoAxiom ( Eqn, CoAxiom(..), CoAxBranch(..), fromBranches )
import Class
import TyCon
import DataCon( dataConWrapId )
import FunDeps
import FamInst
import FamInstEnv
import Unify ( tcUnifyTyWithTFs )
import TcEvidence
import Outputable
import TcRnTypes
import TcSMonad
import Bag
import MonadUtils ( concatMapM )
import Data.List( partition, foldl', deleteFirstsBy )
import SrcLoc
import VarEnv
import Control.Monad
import Maybes( isJust )
import Pair (Pair(..))
import Unique( hasKey )
import DynFlags
import Util
import qualified GHC.LanguageExtensions as LangExt
{-
**********************************************************************
* *
* Main Interaction Solver *
* *
**********************************************************************
Note [Basic Simplifier Plan]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1. Pick an element from the WorkList if there exists one with depth
less than our context-stack depth.
2. Run it down the 'stage' pipeline. Stages are:
- canonicalization
- inert reactions
- spontaneous reactions
- top-level intreactions
Each stage returns a StopOrContinue and may have sideffected
the inerts or worklist.
The threading of the stages is as follows:
- If (Stop) is returned by a stage then we start again from Step 1.
- If (ContinueWith ct) is returned by a stage, we feed 'ct' on to
the next stage in the pipeline.
4. If the element has survived (i.e. ContinueWith x) the last stage
then we add him in the inerts and jump back to Step 1.
If in Step 1 no such element exists, we have exceeded our context-stack
depth and will simply fail.
Note [Unflatten after solving the simple wanteds]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We unflatten after solving the wc_simples of an implication, and before attempting
to float. This means that
* The fsk/fmv flatten-skolems only survive during solveSimples. We don't
need to worry about them across successive passes over the constraint tree.
(E.g. we don't need the old ic_fsk field of an implication.
* When floating an equality outwards, we don't need to worry about floating its
associated flattening constraints.
* Another tricky case becomes easy: Trac #4935
type instance F True a b = a
type instance F False a b = b
[w] F c a b ~ gamma
(c ~ True) => a ~ gamma
(c ~ False) => b ~ gamma
Obviously this is soluble with gamma := F c a b, and unflattening
will do exactly that after solving the simple constraints and before
attempting the implications. Before, when we were not unflattening,
we had to push Wanted funeqs in as new givens. Yuk!
Another example that becomes easy: indexed_types/should_fail/T7786
[W] BuriedUnder sub k Empty ~ fsk
[W] Intersect fsk inv ~ s
[w] xxx[1] ~ s
[W] forall[2] . (xxx[1] ~ Empty)
=> Intersect (BuriedUnder sub k Empty) inv ~ Empty
Note [Running plugins on unflattened wanteds]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There is an annoying mismatch between solveSimpleGivens and
solveSimpleWanteds, because the latter needs to fiddle with the inert
set, unflatten and zonk the wanteds. It passes the zonked wanteds
to runTcPluginsWanteds, which produces a replacement set of wanteds,
some additional insolubles and a flag indicating whether to go round
the loop again. If so, prepareInertsForImplications is used to remove
the previous wanteds (which will still be in the inert set). Note
that prepareInertsForImplications will discard the insolubles, so we
must keep track of them separately.
-}
solveSimpleGivens :: [Ct] -> TcS Cts
solveSimpleGivens givens
| null givens -- Shortcut for common case
= return emptyCts
| otherwise
= do { traceTcS "solveSimpleGivens {" (ppr givens)
; go givens
; given_insols <- takeGivenInsolubles
; traceTcS "End solveSimpleGivens }" (text "Insoluble:" <+> pprCts given_insols)
; return given_insols }
where
go givens = do { solveSimples (listToBag givens)
; new_givens <- runTcPluginsGiven
; when (notNull new_givens) $
go new_givens }
solveSimpleWanteds :: Cts -> TcS WantedConstraints
-- NB: 'simples' may contain /derived/ equalities, floated
-- out from a nested implication. So don't discard deriveds!
solveSimpleWanteds simples
= do { traceTcS "solveSimpleWanteds {" (ppr simples)
; dflags <- getDynFlags
; (n,wc) <- go 1 (solverIterations dflags) (emptyWC { wc_simple = simples })
; traceTcS "solveSimpleWanteds end }" $
vcat [ text "iterations =" <+> ppr n
, text "residual =" <+> ppr wc ]
; return wc }
where
go :: Int -> IntWithInf -> WantedConstraints -> TcS (Int, WantedConstraints)
go n limit wc
| n `intGtLimit` limit
= failTcS (hang (text "solveSimpleWanteds: too many iterations"
<+> parens (text "limit =" <+> ppr limit))
2 (vcat [ text "Set limit with -fsolver-iterations=n; n=0 for no limit"
, text "Simples =" <+> ppr simples
, text "WC =" <+> ppr wc ]))
| isEmptyBag (wc_simple wc)
= return (n,wc)
| otherwise
= do { -- Solve
(unif_count, wc1) <- solve_simple_wanteds wc
-- Run plugins
; (rerun_plugin, wc2) <- runTcPluginsWanted wc1
-- See Note [Running plugins on unflattened wanteds]
; if unif_count == 0 && not rerun_plugin
then return (n, wc2) -- Done
else do { traceTcS "solveSimple going round again:" (ppr rerun_plugin)
; go (n+1) limit wc2 } } -- Loop
solve_simple_wanteds :: WantedConstraints -> TcS (Int, WantedConstraints)
-- Try solving these constraints
-- Affects the unification state (of course) but not the inert set
solve_simple_wanteds (WC { wc_simple = simples1, wc_insol = insols1, wc_impl = implics1 })
= nestTcS $
do { solveSimples simples1
; (implics2, tv_eqs, fun_eqs, insols2, others) <- getUnsolvedInerts
; (unif_count, unflattened_eqs) <- reportUnifications $
unflatten tv_eqs fun_eqs
-- See Note [Unflatten after solving the simple wanteds]
; return ( unif_count
, WC { wc_simple = others `andCts` unflattened_eqs
, wc_insol = insols1 `andCts` insols2
, wc_impl = implics1 `unionBags` implics2 }) }
{- Note [The solveSimpleWanteds loop]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Solving a bunch of simple constraints is done in a loop,
(the 'go' loop of 'solveSimpleWanteds'):
1. Try to solve them; unflattening may lead to improvement that
was not exploitable during solving
2. Try the plugin
3. If step 1 did improvement during unflattening; or if the plugin
wants to run again, go back to step 1
Non-obviously, improvement can also take place during
the unflattening that takes place in step (1). See TcFlatten,
See Note [Unflattening can force the solver to iterate]
-}
-- The main solver loop implements Note [Basic Simplifier Plan]
---------------------------------------------------------------
solveSimples :: Cts -> TcS ()
-- Returns the final InertSet in TcS
-- Has no effect on work-list or residual-implications
-- The constraints are initially examined in left-to-right order
solveSimples cts
= {-# SCC "solveSimples" #-}
do { updWorkListTcS (\wl -> foldrBag extendWorkListCt wl cts)
; solve_loop }
where
solve_loop
= {-# SCC "solve_loop" #-}
do { sel <- selectNextWorkItem
; case sel of
Nothing -> return ()
Just ct -> do { runSolverPipeline thePipeline ct
; solve_loop } }
-- | Extract the (inert) givens and invoke the plugins on them.
-- Remove solved givens from the inert set and emit insolubles, but
-- return new work produced so that 'solveSimpleGivens' can feed it back
-- into the main solver.
runTcPluginsGiven :: TcS [Ct]
runTcPluginsGiven
= do { plugins <- getTcPlugins
; if null plugins then return [] else
do { givens <- getInertGivens
; if null givens then return [] else
do { p <- runTcPlugins plugins (givens,[],[])
; let (solved_givens, _, _) = pluginSolvedCts p
; updInertCans (removeInertCts solved_givens)
; mapM_ emitInsoluble (pluginBadCts p)
; return (pluginNewCts p) } } }
-- | Given a bag of (flattened, zonked) wanteds, invoke the plugins on
-- them and produce an updated bag of wanteds (possibly with some new
-- work) and a bag of insolubles. The boolean indicates whether
-- 'solveSimpleWanteds' should feed the updated wanteds back into the
-- main solver.
runTcPluginsWanted :: WantedConstraints -> TcS (Bool, WantedConstraints)
runTcPluginsWanted wc@(WC { wc_simple = simples1, wc_insol = insols1, wc_impl = implics1 })
| isEmptyBag simples1
= return (False, wc)
| otherwise
= do { plugins <- getTcPlugins
; if null plugins then return (False, wc) else
do { given <- getInertGivens
; simples1 <- zonkSimples simples1 -- Plugin requires zonked inputs
; let (wanted, derived) = partition isWantedCt (bagToList simples1)
; p <- runTcPlugins plugins (given, derived, wanted)
; let (_, _, solved_wanted) = pluginSolvedCts p
(_, unsolved_derived, unsolved_wanted) = pluginInputCts p
new_wanted = pluginNewCts p
-- SLPJ: I'm deeply suspicious of this
-- ; updInertCans (removeInertCts $ solved_givens ++ solved_deriveds)
; mapM_ setEv solved_wanted
; return ( notNull (pluginNewCts p)
, WC { wc_simple = listToBag new_wanted `andCts` listToBag unsolved_wanted
`andCts` listToBag unsolved_derived
, wc_insol = listToBag (pluginBadCts p) `andCts` insols1
, wc_impl = implics1 } ) } }
where
setEv :: (EvTerm,Ct) -> TcS ()
setEv (ev,ct) = case ctEvidence ct of
CtWanted { ctev_dest = dest } -> setWantedEvTerm dest ev
_ -> panic "runTcPluginsWanted.setEv: attempt to solve non-wanted!"
-- | A triple of (given, derived, wanted) constraints to pass to plugins
type SplitCts = ([Ct], [Ct], [Ct])
-- | A solved triple of constraints, with evidence for wanteds
type SolvedCts = ([Ct], [Ct], [(EvTerm,Ct)])
-- | Represents collections of constraints generated by typechecker
-- plugins
data TcPluginProgress = TcPluginProgress
{ pluginInputCts :: SplitCts
-- ^ Original inputs to the plugins with solved/bad constraints
-- removed, but otherwise unmodified
, pluginSolvedCts :: SolvedCts
-- ^ Constraints solved by plugins
, pluginBadCts :: [Ct]
-- ^ Constraints reported as insoluble by plugins
, pluginNewCts :: [Ct]
-- ^ New constraints emitted by plugins
}
getTcPlugins :: TcS [TcPluginSolver]
getTcPlugins = do { tcg_env <- getGblEnv; return (tcg_tc_plugins tcg_env) }
-- | Starting from a triple of (given, derived, wanted) constraints,
-- invoke each of the typechecker plugins in turn and return
--
-- * the remaining unmodified constraints,
-- * constraints that have been solved,
-- * constraints that are insoluble, and
-- * new work.
--
-- Note that new work generated by one plugin will not be seen by
-- other plugins on this pass (but the main constraint solver will be
-- re-invoked and they will see it later). There is no check that new
-- work differs from the original constraints supplied to the plugin:
-- the plugin itself should perform this check if necessary.
runTcPlugins :: [TcPluginSolver] -> SplitCts -> TcS TcPluginProgress
runTcPlugins plugins all_cts
= foldM do_plugin initialProgress plugins
where
do_plugin :: TcPluginProgress -> TcPluginSolver -> TcS TcPluginProgress
do_plugin p solver = do
result <- runTcPluginTcS (uncurry3 solver (pluginInputCts p))
return $ progress p result
progress :: TcPluginProgress -> TcPluginResult -> TcPluginProgress
progress p (TcPluginContradiction bad_cts) =
p { pluginInputCts = discard bad_cts (pluginInputCts p)
, pluginBadCts = bad_cts ++ pluginBadCts p
}
progress p (TcPluginOk solved_cts new_cts) =
p { pluginInputCts = discard (map snd solved_cts) (pluginInputCts p)
, pluginSolvedCts = add solved_cts (pluginSolvedCts p)
, pluginNewCts = new_cts ++ pluginNewCts p
}
initialProgress = TcPluginProgress all_cts ([], [], []) [] []
discard :: [Ct] -> SplitCts -> SplitCts
discard cts (xs, ys, zs) =
(xs `without` cts, ys `without` cts, zs `without` cts)
without :: [Ct] -> [Ct] -> [Ct]
without = deleteFirstsBy eqCt
eqCt :: Ct -> Ct -> Bool
eqCt c c' = case (ctEvidence c, ctEvidence c') of
(CtGiven pred _ _, CtGiven pred' _ _) -> pred `eqType` pred'
(CtWanted pred _ _, CtWanted pred' _ _) -> pred `eqType` pred'
(CtDerived pred _ , CtDerived pred' _ ) -> pred `eqType` pred'
(_ , _ ) -> False
add :: [(EvTerm,Ct)] -> SolvedCts -> SolvedCts
add xs scs = foldl' addOne scs xs
addOne :: SolvedCts -> (EvTerm,Ct) -> SolvedCts
addOne (givens, deriveds, wanteds) (ev,ct) = case ctEvidence ct of
CtGiven {} -> (ct:givens, deriveds, wanteds)
CtDerived{} -> (givens, ct:deriveds, wanteds)
CtWanted {} -> (givens, deriveds, (ev,ct):wanteds)
type WorkItem = Ct
type SimplifierStage = WorkItem -> TcS (StopOrContinue Ct)
runSolverPipeline :: [(String,SimplifierStage)] -- The pipeline
-> WorkItem -- The work item
-> TcS ()
-- Run this item down the pipeline, leaving behind new work and inerts
runSolverPipeline pipeline workItem
= do { wl <- getWorkList
; traceTcS "Start solver pipeline {" $
vcat [ text "work item =" <+> ppr workItem
, text "rest of worklist =" <+> ppr wl ]
; bumpStepCountTcS -- One step for each constraint processed
; final_res <- run_pipeline pipeline (ContinueWith workItem)
; final_is <- getTcSInerts
; case final_res of
Stop ev s -> do { traceFireTcS ev s
; traceTcS "End solver pipeline (discharged) }"
(text "inerts =" <+> ppr final_is)
; return () }
ContinueWith ct -> do { traceFireTcS (ctEvidence ct) (text "Kept as inert")
; traceTcS "End solver pipeline (kept as inert) }" $
vcat [ text "final_item =" <+> ppr ct
, pprTvBndrs (varSetElems $ tyCoVarsOfCt ct)
, text "inerts =" <+> ppr final_is]
; addInertCan ct }
}
where run_pipeline :: [(String,SimplifierStage)] -> StopOrContinue Ct
-> TcS (StopOrContinue Ct)
run_pipeline [] res = return res
run_pipeline _ (Stop ev s) = return (Stop ev s)
run_pipeline ((stg_name,stg):stgs) (ContinueWith ct)
= do { traceTcS ("runStage " ++ stg_name ++ " {")
(text "workitem = " <+> ppr ct)
; res <- stg ct
; traceTcS ("end stage " ++ stg_name ++ " }") empty
; run_pipeline stgs res }
{-
Example 1:
Inert: {c ~ d, F a ~ t, b ~ Int, a ~ ty} (all given)
Reagent: a ~ [b] (given)
React with (c~d) ==> IR (ContinueWith (a~[b])) True []
React with (F a ~ t) ==> IR (ContinueWith (a~[b])) False [F [b] ~ t]
React with (b ~ Int) ==> IR (ContinueWith (a~[Int]) True []
Example 2:
Inert: {c ~w d, F a ~g t, b ~w Int, a ~w ty}
Reagent: a ~w [b]
React with (c ~w d) ==> IR (ContinueWith (a~[b])) True []
React with (F a ~g t) ==> IR (ContinueWith (a~[b])) True [] (can't rewrite given with wanted!)
etc.
Example 3:
Inert: {a ~ Int, F Int ~ b} (given)
Reagent: F a ~ b (wanted)
React with (a ~ Int) ==> IR (ContinueWith (F Int ~ b)) True []
React with (F Int ~ b) ==> IR Stop True [] -- after substituting we re-canonicalize and get nothing
-}
thePipeline :: [(String,SimplifierStage)]
thePipeline = [ ("canonicalization", TcCanonical.canonicalize)
, ("interact with inerts", interactWithInertsStage)
, ("top-level reactions", topReactionsStage) ]
{-
*********************************************************************************
* *
The interact-with-inert Stage
* *
*********************************************************************************
Note [The Solver Invariant]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
We always add Givens first. So you might think that the solver has
the invariant
If the work-item is Given,
then the inert item must Given
But this isn't quite true. Suppose we have,
c1: [W] beta ~ [alpha], c2 : [W] blah, c3 :[W] alpha ~ Int
After processing the first two, we get
c1: [G] beta ~ [alpha], c2 : [W] blah
Now, c3 does not interact with the the given c1, so when we spontaneously
solve c3, we must re-react it with the inert set. So we can attempt a
reaction between inert c2 [W] and work-item c3 [G].
It *is* true that [Solver Invariant]
If the work-item is Given,
AND there is a reaction
then the inert item must Given
or, equivalently,
If the work-item is Given,
and the inert item is Wanted/Derived
then there is no reaction
-}
-- Interaction result of WorkItem <~> Ct
type StopNowFlag = Bool -- True <=> stop after this interaction
interactWithInertsStage :: WorkItem -> TcS (StopOrContinue Ct)
-- Precondition: if the workitem is a CTyEqCan then it will not be able to
-- react with anything at this stage.
interactWithInertsStage wi
= do { inerts <- getTcSInerts
; let ics = inert_cans inerts
; case wi of
CTyEqCan {} -> interactTyVarEq ics wi
CFunEqCan {} -> interactFunEq ics wi
CIrredEvCan {} -> interactIrred ics wi
CDictCan {} -> interactDict ics wi
_ -> pprPanic "interactWithInerts" (ppr wi) }
-- CHoleCan are put straight into inert_frozen, so never get here
-- CNonCanonical have been canonicalised
data InteractResult
= IRKeep -- Keep the existing inert constraint in the inert set
| IRReplace -- Replace the existing inert constraint with the work item
| IRDelete -- Delete the existing inert constraint from the inert set
instance Outputable InteractResult where
ppr IRKeep = text "keep"
ppr IRReplace = text "replace"
ppr IRDelete = text "delete"
solveOneFromTheOther :: CtEvidence -- Inert
-> CtEvidence -- WorkItem
-> TcS (InteractResult, StopNowFlag)
-- Preconditions:
-- 1) inert and work item represent evidence for the /same/ predicate
-- 2) ip/class/irred constraints only; not used for equalities
solveOneFromTheOther ev_i ev_w
| isDerived ev_w -- Work item is Derived; just discard it
= return (IRKeep, True)
| isDerived ev_i -- The inert item is Derived, we can just throw it away,
= return (IRDelete, False) -- The ev_w is inert wrt earlier inert-set items,
-- so it's safe to continue on from this point
| CtWanted { ctev_loc = loc_w } <- ev_w
, prohibitedSuperClassSolve (ctEvLoc ev_i) loc_w
= return (IRDelete, False)
| CtWanted { ctev_dest = dest } <- ev_w
-- Inert is Given or Wanted
= do { setWantedEvTerm dest (ctEvTerm ev_i)
; return (IRKeep, True) }
| CtWanted { ctev_loc = loc_i } <- ev_i -- Work item is Given
, prohibitedSuperClassSolve (ctEvLoc ev_w) loc_i
= return (IRKeep, False) -- Just discard the un-usable Given
-- This never actually happens because
-- Givens get processed first
| CtWanted { ctev_dest = dest } <- ev_i
= do { setWantedEvTerm dest (ctEvTerm ev_w)
; return (IRReplace, True) }
-- So they are both Given
-- See Note [Replacement vs keeping]
| lvl_i == lvl_w
= do { binds <- getTcEvBindsMap
; return (same_level_strategy binds, True) }
| otherwise -- Both are Given, levels differ
= return (different_level_strategy, True)
where
pred = ctEvPred ev_i
loc_i = ctEvLoc ev_i
loc_w = ctEvLoc ev_w
lvl_i = ctLocLevel loc_i
lvl_w = ctLocLevel loc_w
different_level_strategy
| isIPPred pred, lvl_w > lvl_i = IRReplace
| lvl_w < lvl_i = IRReplace
| otherwise = IRKeep
same_level_strategy binds -- Both Given
| GivenOrigin (InstSC s_i) <- ctLocOrigin loc_i
= case ctLocOrigin loc_w of
GivenOrigin (InstSC s_w) | s_w < s_i -> IRReplace
| otherwise -> IRKeep
_ -> IRReplace
| GivenOrigin (InstSC {}) <- ctLocOrigin loc_w
= IRKeep
| has_binding binds ev_w
, not (has_binding binds ev_i)
= IRReplace
| otherwise = IRKeep
has_binding binds ev = isJust (lookupEvBind binds (ctEvId ev))
{-
Note [Replacement vs keeping]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we have two Given constraints both of type (C tys), say, which should
we keep? More subtle than you might think!
* Constraints come from different levels (different_level_strategy)
- For implicit parameters we want to keep the innermost (deepest)
one, so that it overrides the outer one.
See Note [Shadowing of Implicit Parameters]
- For everything else, we want to keep the outermost one. Reason: that
makes it more likely that the inner one will turn out to be unused,
and can be reported as redundant. See Note [Tracking redundant constraints]
in TcSimplify.
It transpires that using the outermost one is reponsible for an
8% performance improvement in nofib cryptarithm2, compared to
just rolling the dice. I didn't investigate why.
* Constraints coming from the same level (i.e. same implication)
- Always get rid of InstSC ones if possible, since they are less
useful for solving. If both are InstSC, choose the one with
the smallest TypeSize
See Note [Solving superclass constraints] in TcInstDcls
- Keep the one that has a non-trivial evidence binding.
Example: f :: (Eq a, Ord a) => blah
then we may find [G] d3 :: Eq a
[G] d2 :: Eq a
with bindings d3 = sc_sel (d1::Ord a)
We want to discard d2 in favour of the superclass selection from
the Ord dictionary.
Why? See Note [Tracking redundant constraints] in TcSimplify again.
* Finally, when there is still a choice, use IRKeep rather than
IRReplace, to avoid unnecessary munging of the inert set.
Doing the depth-check for implicit parameters, rather than making the work item
always overrride, is important. Consider
data T a where { T1 :: (?x::Int) => T Int; T2 :: T a }
f :: (?x::a) => T a -> Int
f T1 = ?x
f T2 = 3
We have a [G] (?x::a) in the inert set, and at the pattern match on T1 we add
two new givens in the work-list: [G] (?x::Int)
[G] (a ~ Int)
Now consider these steps
- process a~Int, kicking out (?x::a)
- process (?x::Int), the inner given, adding to inert set
- process (?x::a), the outer given, overriding the inner given
Wrong! The depth-check ensures that the inner implicit parameter wins.
(Actually I think that the order in which the work-list is processed means
that this chain of events won't happen, but that's very fragile.)
*********************************************************************************
* *
interactIrred
* *
*********************************************************************************
-}
-- Two pieces of irreducible evidence: if their types are *exactly identical*
-- we can rewrite them. We can never improve using this:
-- if we want ty1 :: Constraint and have ty2 :: Constraint it clearly does not
-- mean that (ty1 ~ ty2)
interactIrred :: InertCans -> Ct -> TcS (StopOrContinue Ct)
interactIrred inerts workItem@(CIrredEvCan { cc_ev = ev_w })
| let pred = ctEvPred ev_w
(matching_irreds, others)
= partitionBag (\ct -> ctPred ct `tcEqTypeNoKindCheck` pred)
(inert_irreds inerts)
, (ct_i : rest) <- bagToList matching_irreds
, let ctev_i = ctEvidence ct_i
= ASSERT( null rest )
do { (inert_effect, stop_now) <- solveOneFromTheOther ctev_i ev_w
; case inert_effect of
IRKeep -> return ()
IRDelete -> updInertIrreds (\_ -> others)
IRReplace -> updInertIrreds (\_ -> others `snocCts` workItem)
-- These const upd's assume that solveOneFromTheOther
-- has no side effects on InertCans
; if stop_now then
return (Stop ev_w (text "Irred equal" <+> parens (ppr inert_effect)))
; else
continueWith workItem }
| otherwise
= continueWith workItem
interactIrred _ wi = pprPanic "interactIrred" (ppr wi)
{-
*********************************************************************************
* *
interactDict
* *
*********************************************************************************
-}
interactDict :: InertCans -> Ct -> TcS (StopOrContinue Ct)
interactDict inerts workItem@(CDictCan { cc_ev = ev_w, cc_class = cls, cc_tyargs = tys })
| isWanted ev_w
, Just ip_name <- isCallStackDict cls tys
, OccurrenceOf func <- ctLocOrigin (ctEvLoc ev_w)
-- If we're given a CallStack constraint that arose from a function
-- call, we need to push the current call-site onto the stack instead
-- of solving it directly from a given.
-- See Note [Overview of implicit CallStacks]
= do { let loc = ctEvLoc ev_w
-- First we emit a new constraint that will capture the
-- given CallStack.
; let new_loc = setCtLocOrigin loc (IPOccOrigin (HsIPName ip_name))
-- We change the origin to IPOccOrigin so
-- this rule does not fire again.
-- See Note [Overview of implicit CallStacks]
; mb_new <- newWantedEvVar new_loc (ctEvPred ev_w)
; emitWorkNC (freshGoals [mb_new])
-- Then we solve the wanted by pushing the call-site onto the
-- newly emitted CallStack.
; let ev_cs = EvCsPushCall func (ctLocSpan loc) (getEvTerm mb_new)
; solveCallStack ev_w ev_cs
; stopWith ev_w "Wanted CallStack IP" }
| Just ctev_i <- lookupInertDict inerts cls tys
= do { (inert_effect, stop_now) <- solveOneFromTheOther ctev_i ev_w
; case inert_effect of
IRKeep -> return ()
IRDelete -> updInertDicts $ \ ds -> delDict ds cls tys
IRReplace -> updInertDicts $ \ ds -> addDict ds cls tys workItem
; if stop_now then
return (Stop ev_w (text "Dict equal" <+> parens (ppr inert_effect)))
else
continueWith workItem }
| cls `hasKey` ipClassKey
, isGiven ev_w
= interactGivenIP inerts workItem
| otherwise
= do { addFunDepWork inerts ev_w cls
; continueWith workItem }
interactDict _ wi = pprPanic "interactDict" (ppr wi)
addFunDepWork :: InertCans -> CtEvidence -> Class -> TcS ()
-- Add derived constraints from type-class functional dependencies.
addFunDepWork inerts work_ev cls
= mapBagM_ add_fds (findDictsByClass (inert_dicts inerts) cls)
-- No need to check flavour; fundeps work between
-- any pair of constraints, regardless of flavour
-- Importantly we don't throw workitem back in the
-- worklist because this can cause loops (see #5236)
where
work_pred = ctEvPred work_ev
work_loc = ctEvLoc work_ev
add_fds inert_ct
= emitFunDepDeriveds $
improveFromAnother derived_loc inert_pred work_pred
-- We don't really rewrite tys2, see below _rewritten_tys2, so that's ok
-- NB: We do create FDs for given to report insoluble equations that arise
-- from pairs of Givens, and also because of floating when we approximate
-- implications. The relevant test is: typecheck/should_fail/FDsFromGivens.hs
where
inert_pred = ctPred inert_ct
inert_loc = ctLoc inert_ct
derived_loc = work_loc { ctl_origin = FunDepOrigin1 work_pred work_loc
inert_pred inert_loc }
{-
**********************************************************************
* *
Implicit parameters
* *
**********************************************************************
-}
interactGivenIP :: InertCans -> Ct -> TcS (StopOrContinue Ct)
-- Work item is Given (?x:ty)
-- See Note [Shadowing of Implicit Parameters]
interactGivenIP inerts workItem@(CDictCan { cc_ev = ev, cc_class = cls
, cc_tyargs = tys@(ip_str:_) })
= do { updInertCans $ \cans -> cans { inert_dicts = addDict filtered_dicts cls tys workItem }
; stopWith ev "Given IP" }
where
dicts = inert_dicts inerts
ip_dicts = findDictsByClass dicts cls
other_ip_dicts = filterBag (not . is_this_ip) ip_dicts
filtered_dicts = addDictsByClass dicts cls other_ip_dicts
-- Pick out any Given constraints for the same implicit parameter
is_this_ip (CDictCan { cc_ev = ev, cc_tyargs = ip_str':_ })
= isGiven ev && ip_str `tcEqType` ip_str'
is_this_ip _ = False
interactGivenIP _ wi = pprPanic "interactGivenIP" (ppr wi)
{-
Note [Shadowing of Implicit Parameters]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider the following example:
f :: (?x :: Char) => Char
f = let ?x = 'a' in ?x
The "let ?x = ..." generates an implication constraint of the form:
?x :: Char => ?x :: Char
Furthermore, the signature for `f` also generates an implication
constraint, so we end up with the following nested implication:
?x :: Char => (?x :: Char => ?x :: Char)
Note that the wanted (?x :: Char) constraint may be solved in
two incompatible ways: either by using the parameter from the
signature, or by using the local definition. Our intention is
that the local definition should "shadow" the parameter of the
signature, and we implement this as follows: when we add a new
*given* implicit parameter to the inert set, it replaces any existing
givens for the same implicit parameter.
This works for the normal cases but it has an odd side effect
in some pathological programs like this:
-- This is accepted, the second parameter shadows
f1 :: (?x :: Int, ?x :: Char) => Char
f1 = ?x
-- This is rejected, the second parameter shadows
f2 :: (?x :: Int, ?x :: Char) => Int
f2 = ?x
Both of these are actually wrong: when we try to use either one,
we'll get two incompatible wnated constraints (?x :: Int, ?x :: Char),
which would lead to an error.
I can think of two ways to fix this:
1. Simply disallow multiple constratits for the same implicit
parameter---this is never useful, and it can be detected completely
syntactically.
2. Move the shadowing machinery to the location where we nest
implications, and add some code here that will produce an
error if we get multiple givens for the same implicit parameter.
**********************************************************************
* *
interactFunEq
* *
**********************************************************************
-}
interactFunEq :: InertCans -> Ct -> TcS (StopOrContinue Ct)
-- Try interacting the work item with the inert set
interactFunEq inerts workItem@(CFunEqCan { cc_ev = ev, cc_fun = tc
, cc_tyargs = args, cc_fsk = fsk })
| Just (CFunEqCan { cc_ev = ev_i
, cc_fsk = fsk_i }) <- matching_inerts
= if ev_i `funEqCanDischarge` ev
then -- Rewrite work-item using inert
do { traceTcS "reactFunEq (discharge work item):" $
vcat [ text "workItem =" <+> ppr workItem
, text "inertItem=" <+> ppr ev_i ]
; reactFunEq ev_i fsk_i ev fsk
; stopWith ev "Inert rewrites work item" }
else -- Rewrite inert using work-item
ASSERT2( ev `funEqCanDischarge` ev_i, ppr ev $$ ppr ev_i )
do { traceTcS "reactFunEq (rewrite inert item):" $
vcat [ text "workItem =" <+> ppr workItem
, text "inertItem=" <+> ppr ev_i ]
; updInertFunEqs $ \ feqs -> insertFunEq feqs tc args workItem
-- Do the updInertFunEqs before the reactFunEq, so that
-- we don't kick out the inertItem as well as consuming it!
; reactFunEq ev fsk ev_i fsk_i
; stopWith ev "Work item rewrites inert" }
| otherwise -- Try improvement
= do { improveLocalFunEqs loc inerts tc args fsk
; continueWith workItem }
where
loc = ctEvLoc ev
funeqs = inert_funeqs inerts
matching_inerts = findFunEq funeqs tc args
interactFunEq _ workItem = pprPanic "interactFunEq" (ppr workItem)
improveLocalFunEqs :: CtLoc -> InertCans -> TyCon -> [TcType] -> TcTyVar
-> TcS ()
-- Generate derived improvement equalities, by comparing
-- the current work item with inert CFunEqs
-- E.g. x + y ~ z, x + y' ~ z => [D] y ~ y'
improveLocalFunEqs loc inerts fam_tc args fsk
| not (null improvement_eqns)
= do { traceTcS "interactFunEq improvements: " $
vcat [ text "Eqns:" <+> ppr improvement_eqns
, text "Candidates:" <+> ppr funeqs_for_tc
, text "Model:" <+> ppr model ]
; mapM_ (unifyDerived loc Nominal) improvement_eqns }
| otherwise
= return ()
where
model = inert_model inerts
funeqs = inert_funeqs inerts
funeqs_for_tc = findFunEqsByTyCon funeqs fam_tc
rhs = lookupFlattenTyVar model fsk
--------------------
improvement_eqns
| Just ops <- isBuiltInSynFamTyCon_maybe fam_tc
= -- Try built-in families, notably for arithmethic
concatMap (do_one_built_in ops) funeqs_for_tc
| Injective injective_args <- familyTyConInjectivityInfo fam_tc
= -- Try improvement from type families with injectivity annotations
concatMap (do_one_injective injective_args) funeqs_for_tc
| otherwise
= []
--------------------
do_one_built_in ops (CFunEqCan { cc_tyargs = iargs, cc_fsk = ifsk })
= sfInteractInert ops args rhs iargs (lookupFlattenTyVar model ifsk)
do_one_built_in _ _ = pprPanic "interactFunEq 1" (ppr fam_tc)
--------------------
-- See Note [Type inference for type families with injectivity]
do_one_injective injective_args
(CFunEqCan { cc_tyargs = iargs, cc_fsk = ifsk })
| rhs `tcEqType` lookupFlattenTyVar model ifsk
= [Pair arg iarg | (arg, iarg, True)
<- zip3 args iargs injective_args ]
| otherwise
= []
do_one_injective _ _ = pprPanic "interactFunEq 2" (ppr fam_tc)
-------------
lookupFlattenTyVar :: InertModel -> TcTyVar -> TcType
-- See Note [lookupFlattenTyVar]
lookupFlattenTyVar model ftv
= case lookupVarEnv model ftv of
Just (CTyEqCan { cc_rhs = rhs, cc_eq_rel = NomEq }) -> rhs
_ -> mkTyVarTy ftv
reactFunEq :: CtEvidence -> TcTyVar -- From this :: F args1 ~ fsk1
-> CtEvidence -> TcTyVar -- Solve this :: F args2 ~ fsk2
-> TcS ()
reactFunEq from_this fsk1 solve_this fsk2
| CtGiven { ctev_evar = evar, ctev_loc = loc } <- solve_this
= do { let fsk_eq_co = mkTcSymCo (mkTcCoVarCo evar) `mkTcTransCo`
ctEvCoercion from_this
-- :: fsk2 ~ fsk1
fsk_eq_pred = mkTcEqPredLikeEv solve_this
(mkTyVarTy fsk2) (mkTyVarTy fsk1)
; new_ev <- newGivenEvVar loc (fsk_eq_pred, EvCoercion fsk_eq_co)
; emitWorkNC [new_ev] }
| otherwise
= do { traceTcS "reactFunEq" (ppr from_this $$ ppr fsk1 $$
ppr solve_this $$ ppr fsk2)
; dischargeFmv solve_this fsk2 (ctEvCoercion from_this) (mkTyVarTy fsk1)
; traceTcS "reactFunEq done" (ppr from_this $$ ppr fsk1 $$
ppr solve_this $$ ppr fsk2) }
{- Note [lookupFlattenTyVar]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Supppose we have an injective function F and
inert_funeqs: F t1 ~ fsk1
F t2 ~ fsk2
model fsk1 ~ fsk2
We never rewrite the RHS (cc_fsk) of a CFunEqCan. But we /do/ want to
get the [D] t1 ~ t2 from the injectiveness of F. So we look up the
cc_fsk of CFunEqCans in the model when trying to find derived
equalities arising from injectivity.
Note [Type inference for type families with injectivity]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have a type family with an injectivity annotation:
type family F a b = r | r -> b
Then if we have two CFunEqCan constraints for F with the same RHS
F s1 t1 ~ rhs
F s2 t2 ~ rhs
then we can use the injectivity to get a new Derived constraint on
the injective argument
[D] t1 ~ t2
That in turn can help GHC solve constraints that would otherwise require
guessing. For example, consider the ambiguity check for
f :: F Int b -> Int
We get the constraint
[W] F Int b ~ F Int beta
where beta is a unification variable. Injectivity lets us pick beta ~ b.
Injectivity information is also used at the call sites. For example:
g = f True
gives rise to
[W] F Int b ~ Bool
from which we can derive b. This requires looking at the defining equations of
a type family, ie. finding equation with a matching RHS (Bool in this example)
and infering values of type variables (b in this example) from the LHS patterns
of the matching equation. For closed type families we have to perform
additional apartness check for the selected equation to check that the selected
is guaranteed to fire for given LHS arguments.
These new constraints are simply *Derived* constraints; they have no evidence.
We could go further and offer evidence from decomposing injective type-function
applications, but that would require new evidence forms, and an extension to
FC, so we don't do that right now (Dec 14).
See also Note [Injective type families] in TyCon
Note [Cache-caused loops]
~~~~~~~~~~~~~~~~~~~~~~~~~
It is very dangerous to cache a rewritten wanted family equation as 'solved' in our
solved cache (which is the default behaviour or xCtEvidence), because the interaction
may not be contributing towards a solution. Here is an example:
Initial inert set:
[W] g1 : F a ~ beta1
Work item:
[W] g2 : F a ~ beta2
The work item will react with the inert yielding the _same_ inert set plus:
i) Will set g2 := g1 `cast` g3
ii) Will add to our solved cache that [S] g2 : F a ~ beta2
iii) Will emit [W] g3 : beta1 ~ beta2
Now, the g3 work item will be spontaneously solved to [G] g3 : beta1 ~ beta2
and then it will react the item in the inert ([W] g1 : F a ~ beta1). So it
will set
g1 := g ; sym g3
and what is g? Well it would ideally be a new goal of type (F a ~ beta2) but
remember that we have this in our solved cache, and it is ... g2! In short we
created the evidence loop:
g2 := g1 ; g3
g3 := refl
g1 := g2 ; sym g3
To avoid this situation we do not cache as solved any workitems (or inert)
which did not really made a 'step' towards proving some goal. Solved's are
just an optimization so we don't lose anything in terms of completeness of
solving.
Note [Efficient Orientation]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we are interacting two FunEqCans with the same LHS:
(inert) ci :: (F ty ~ xi_i)
(work) cw :: (F ty ~ xi_w)
We prefer to keep the inert (else we pass the work item on down
the pipeline, which is a bit silly). If we keep the inert, we
will (a) discharge 'cw'
(b) produce a new equality work-item (xi_w ~ xi_i)
Notice the orientation (xi_w ~ xi_i) NOT (xi_i ~ xi_w):
new_work :: xi_w ~ xi_i
cw := ci ; sym new_work
Why? Consider the simplest case when xi1 is a type variable. If
we generate xi1~xi2, porcessing that constraint will kick out 'ci'.
If we generate xi2~xi1, there is less chance of that happening.
Of course it can and should still happen if xi1=a, xi1=Int, say.
But we want to avoid it happening needlessly.
Similarly, if we *can't* keep the inert item (because inert is Wanted,
and work is Given, say), we prefer to orient the new equality (xi_i ~
xi_w).
Note [Carefully solve the right CFunEqCan]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
---- OLD COMMENT, NOW NOT NEEDED
---- because we now allow multiple
---- wanted FunEqs with the same head
Consider the constraints
c1 :: F Int ~ a -- Arising from an application line 5
c2 :: F Int ~ Bool -- Arising from an application line 10
Suppose that 'a' is a unification variable, arising only from
flattening. So there is no error on line 5; it's just a flattening
variable. But there is (or might be) an error on line 10.
Two ways to combine them, leaving either (Plan A)
c1 :: F Int ~ a -- Arising from an application line 5
c3 :: a ~ Bool -- Arising from an application line 10
or (Plan B)
c2 :: F Int ~ Bool -- Arising from an application line 10
c4 :: a ~ Bool -- Arising from an application line 5
Plan A will unify c3, leaving c1 :: F Int ~ Bool as an error
on the *totally innocent* line 5. An example is test SimpleFail16
where the expected/actual message comes out backwards if we use
the wrong plan.
The second is the right thing to do. Hence the isMetaTyVarTy
test when solving pairwise CFunEqCan.
**********************************************************************
* *
interactTyVarEq
* *
**********************************************************************
-}
interactTyVarEq :: InertCans -> Ct -> TcS (StopOrContinue Ct)
-- CTyEqCans are always consumed, so always returns Stop
interactTyVarEq inerts workItem@(CTyEqCan { cc_tyvar = tv
, cc_rhs = rhs
, cc_ev = ev
, cc_eq_rel = eq_rel })
| (ev_i : _) <- [ ev_i | CTyEqCan { cc_ev = ev_i, cc_rhs = rhs_i }
<- findTyEqs inerts tv
, ev_i `eqCanDischarge` ev
, rhs_i `tcEqType` rhs ]
= -- Inert: a ~ ty
-- Work item: a ~ ty
do { setEvBindIfWanted ev $
EvCoercion (tcDowngradeRole (eqRelRole eq_rel)
(ctEvRole ev_i)
(ctEvCoercion ev_i))
; stopWith ev "Solved from inert" }
| Just tv_rhs <- getTyVar_maybe rhs
, (ev_i : _) <- [ ev_i | CTyEqCan { cc_ev = ev_i, cc_rhs = rhs_i }
<- findTyEqs inerts tv_rhs
, ev_i `eqCanDischarge` ev
, rhs_i `tcEqType` mkTyVarTy tv ]
= -- Inert: a ~ b
-- Work item: b ~ a
do { setEvBindIfWanted ev $
EvCoercion (mkTcSymCo $
tcDowngradeRole (eqRelRole eq_rel)
(ctEvRole ev_i)
(ctEvCoercion ev_i))
; stopWith ev "Solved from inert (r)" }
| otherwise
= do { tclvl <- getTcLevel
; if canSolveByUnification tclvl ev eq_rel tv rhs
then do { solveByUnification ev tv rhs
; n_kicked <- kickOutAfterUnification tv
; return (Stop ev (text "Solved by unification" <+> ppr_kicked n_kicked)) }
else do { traceTcS "Can't solve tyvar equality"
(vcat [ text "LHS:" <+> ppr tv <+> dcolon <+> ppr (tyVarKind tv)
, ppWhen (isMetaTyVar tv) $
nest 4 (text "TcLevel of" <+> ppr tv
<+> text "is" <+> ppr (metaTyVarTcLevel tv))
, text "RHS:" <+> ppr rhs <+> dcolon <+> ppr (typeKind rhs)
, text "TcLevel =" <+> ppr tclvl ])
; addInertEq workItem
; return (Stop ev (text "Kept as inert")) } }
interactTyVarEq _ wi = pprPanic "interactTyVarEq" (ppr wi)
-- @trySpontaneousSolve wi@ solves equalities where one side is a
-- touchable unification variable.
-- Returns True <=> spontaneous solve happened
canSolveByUnification :: TcLevel -> CtEvidence -> EqRel
-> TcTyVar -> Xi -> Bool
canSolveByUnification tclvl gw eq_rel tv xi
| ReprEq <- eq_rel -- we never solve representational equalities this way.
= False
| isGiven gw -- See Note [Touchables and givens]
= False
| isTouchableMetaTyVar tclvl tv
= case metaTyVarInfo tv of
SigTv -> is_tyvar xi
_ -> True
| otherwise -- Untouchable
= False
where
is_tyvar xi
= case tcGetTyVar_maybe xi of
Nothing -> False
Just tv -> case tcTyVarDetails tv of
MetaTv { mtv_info = info }
-> case info of
SigTv -> True
_ -> False
SkolemTv {} -> True
FlatSkol {} -> False
RuntimeUnk -> True
solveByUnification :: CtEvidence -> TcTyVar -> Xi -> TcS ()
-- Solve with the identity coercion
-- Precondition: kind(xi) equals kind(tv)
-- Precondition: CtEvidence is Wanted or Derived
-- Precondition: CtEvidence is nominal
-- Returns: workItem where
-- workItem = the new Given constraint
--
-- NB: No need for an occurs check here, because solveByUnification always
-- arises from a CTyEqCan, a *canonical* constraint. Its invariants
-- say that in (a ~ xi), the type variable a does not appear in xi.
-- See TcRnTypes.Ct invariants.
--
-- Post: tv is unified (by side effect) with xi;
-- we often write tv := xi
solveByUnification wd tv xi
= do { let tv_ty = mkTyVarTy tv
; traceTcS "Sneaky unification:" $
vcat [text "Unifies:" <+> ppr tv <+> text ":=" <+> ppr xi,
text "Coercion:" <+> pprEq tv_ty xi,
text "Left Kind is:" <+> ppr (typeKind tv_ty),
text "Right Kind is:" <+> ppr (typeKind xi) ]
; unifyTyVar tv xi
; setEvBindIfWanted wd (EvCoercion (mkTcNomReflCo xi)) }
ppr_kicked :: Int -> SDoc
ppr_kicked 0 = empty
ppr_kicked n = parens (int n <+> text "kicked out")
{- Note [Avoid double unifications]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The spontaneous solver has to return a given which mentions the unified unification
variable *on the left* of the equality. Here is what happens if not:
Original wanted: (a ~ alpha), (alpha ~ Int)
We spontaneously solve the first wanted, without changing the order!
given : a ~ alpha [having unified alpha := a]
Now the second wanted comes along, but he cannot rewrite the given, so we simply continue.
At the end we spontaneously solve that guy, *reunifying* [alpha := Int]
We avoid this problem by orienting the resulting given so that the unification
variable is on the left. [Note that alternatively we could attempt to
enforce this at canonicalization]
See also Note [No touchables as FunEq RHS] in TcSMonad; avoiding
double unifications is the main reason we disallow touchable
unification variables as RHS of type family equations: F xis ~ alpha.
************************************************************************
* *
* Functional dependencies, instantiation of equations
* *
************************************************************************
When we spot an equality arising from a functional dependency,
we now use that equality (a "wanted") to rewrite the work-item
constraint right away. This avoids two dangers
Danger 1: If we send the original constraint on down the pipeline
it may react with an instance declaration, and in delicate
situations (when a Given overlaps with an instance) that
may produce new insoluble goals: see Trac #4952
Danger 2: If we don't rewrite the constraint, it may re-react
with the same thing later, and produce the same equality
again --> termination worries.
To achieve this required some refactoring of FunDeps.hs (nicer
now!).
-}
emitFunDepDeriveds :: [FunDepEqn CtLoc] -> TcS ()
emitFunDepDeriveds fd_eqns
= mapM_ do_one_FDEqn fd_eqns
where
do_one_FDEqn (FDEqn { fd_qtvs = tvs, fd_eqs = eqs, fd_loc = loc })
| null tvs -- Common shortcut
= mapM_ (unifyDerived loc Nominal) eqs
| otherwise
= do { (subst, _) <- instFlexiTcS tvs -- Takes account of kind substitution
; mapM_ (do_one_eq loc subst) eqs }
do_one_eq loc subst (Pair ty1 ty2)
= unifyDerived loc Nominal $
Pair (Type.substTyUnchecked subst ty1) (Type.substTyUnchecked subst ty2)
{-
**********************************************************************
* *
The top-reaction Stage
* *
**********************************************************************
-}
topReactionsStage :: WorkItem -> TcS (StopOrContinue Ct)
topReactionsStage wi
= do { tir <- doTopReact wi
; case tir of
ContinueWith wi -> continueWith wi
Stop ev s -> return (Stop ev (text "Top react:" <+> s)) }
doTopReact :: WorkItem -> TcS (StopOrContinue Ct)
-- The work item does not react with the inert set, so try interaction with top-level
-- instances. Note:
--
-- (a) The place to add superclasses in not here in doTopReact stage.
-- Instead superclasses are added in the worklist as part of the
-- canonicalization process. See Note [Adding superclasses].
doTopReact work_item
= do { traceTcS "doTopReact" (ppr work_item)
; case work_item of
CDictCan {} -> do { inerts <- getTcSInerts
; doTopReactDict inerts work_item }
CFunEqCan {} -> doTopReactFunEq work_item
_ -> -- Any other work item does not react with any top-level equations
continueWith work_item }
--------------------
doTopReactDict :: InertSet -> Ct -> TcS (StopOrContinue Ct)
-- Try to use type-class instance declarations to simplify the constraint
doTopReactDict inerts work_item@(CDictCan { cc_ev = fl, cc_class = cls
, cc_tyargs = xis })
| isGiven fl -- Never use instances for Given constraints
= do { try_fundep_improvement
; continueWith work_item }
| Just ev <- lookupSolvedDict inerts cls xis -- Cached
= do { setEvBindIfWanted fl (ctEvTerm ev)
; stopWith fl "Dict/Top (cached)" }
| isDerived fl -- Use type-class instances for Deriveds, in the hope
-- of generating some improvements
-- C.f. Example 3 of Note [The improvement story]
-- It's easy because no evidence is involved
= do { dflags <- getDynFlags
; lkup_inst_res <- matchClassInst dflags inerts cls xis dict_loc
; case lkup_inst_res of
GenInst { lir_new_theta = preds
, lir_safe_over = s } ->
do { emitNewDeriveds dict_loc preds
; unless s $ insertSafeOverlapFailureTcS work_item
; stopWith fl "Dict/Top (solved)" }
NoInstance ->
do { -- If there is no instance, try improvement
try_fundep_improvement
; continueWith work_item } }
| otherwise -- Wanted, but not cached
= do { dflags <- getDynFlags
; lkup_inst_res <- matchClassInst dflags inerts cls xis dict_loc
; case lkup_inst_res of
GenInst { lir_new_theta = theta
, lir_mk_ev = mk_ev
, lir_safe_over = s } ->
do { addSolvedDict fl cls xis
; unless s $ insertSafeOverlapFailureTcS work_item
; solve_from_instance theta mk_ev }
NoInstance ->
do { try_fundep_improvement
; continueWith work_item } }
where
dict_pred = mkClassPred cls xis
dict_loc = ctEvLoc fl
dict_origin = ctLocOrigin dict_loc
deeper_loc = zap_origin (bumpCtLocDepth dict_loc)
zap_origin loc -- After applying an instance we can set ScOrigin to
-- infinity, so that prohibitedSuperClassSolve never fires
| ScOrigin {} <- dict_origin
= setCtLocOrigin loc (ScOrigin infinity)
| otherwise
= loc
solve_from_instance :: [TcPredType]
-> ([EvTerm] -> EvTerm) -> TcS (StopOrContinue Ct)
-- Precondition: evidence term matches the predicate workItem
solve_from_instance theta mk_ev
| null theta
= do { traceTcS "doTopReact/found nullary instance for" $ ppr fl
; setWantedEvBind (ctEvId fl) (mk_ev [])
; stopWith fl "Dict/Top (solved, no new work)" }
| otherwise
= do { checkReductionDepth deeper_loc dict_pred
; traceTcS "doTopReact/found non-nullary instance for" $ ppr fl
; evc_vars <- mapM (newWanted deeper_loc) theta
; setWantedEvBind (ctEvId fl) (mk_ev (map getEvTerm evc_vars))
; emitWorkNC (freshGoals evc_vars)
; stopWith fl "Dict/Top (solved, more work)" }
-- We didn't solve it; so try functional dependencies with
-- the instance environment, and return
-- See also Note [Weird fundeps]
try_fundep_improvement
= do { traceTcS "try_fundeps" (ppr work_item)
; instEnvs <- getInstEnvs
; emitFunDepDeriveds $
improveFromInstEnv instEnvs mk_ct_loc dict_pred }
mk_ct_loc :: PredType -- From instance decl
-> SrcSpan -- also from instance deol
-> CtLoc
mk_ct_loc inst_pred inst_loc
= dict_loc { ctl_origin = FunDepOrigin2 dict_pred dict_origin
inst_pred inst_loc }
doTopReactDict _ w = pprPanic "doTopReactDict" (ppr w)
--------------------
doTopReactFunEq :: Ct -> TcS (StopOrContinue Ct)
doTopReactFunEq work_item@(CFunEqCan { cc_ev = old_ev, cc_fun = fam_tc
, cc_tyargs = args, cc_fsk = fsk })
= do { match_res <- matchFam fam_tc args
-- Look up in top-level instances, or built-in axiom
-- See Note [MATCHING-SYNONYMS]
; case match_res of
Nothing -> do { improveTopFunEqs (ctEvLoc old_ev) fam_tc args fsk
; continueWith work_item }
Just (ax_co, rhs_ty)
-> reduce_top_fun_eq old_ev fsk ax_co rhs_ty }
doTopReactFunEq w = pprPanic "doTopReactFunEq" (ppr w)
reduce_top_fun_eq :: CtEvidence -> TcTyVar -> TcCoercion -> TcType
-> TcS (StopOrContinue Ct)
-- Found an applicable top-level axiom: use it to reduce
reduce_top_fun_eq old_ev fsk ax_co rhs_ty
| Just (tc, tc_args) <- tcSplitTyConApp_maybe rhs_ty
, isTypeFamilyTyCon tc
, tc_args `lengthIs` tyConArity tc -- Short-cut
= shortCutReduction old_ev fsk ax_co tc tc_args
-- Try shortcut; see Note [Short cut for top-level reaction]
| isGiven old_ev -- Not shortcut
= do { let final_co = mkTcSymCo (ctEvCoercion old_ev) `mkTcTransCo` ax_co
-- final_co :: fsk ~ rhs_ty
; new_ev <- newGivenEvVar deeper_loc (mkPrimEqPred (mkTyVarTy fsk) rhs_ty,
EvCoercion final_co)
; emitWorkNC [new_ev] -- Non-cannonical; that will mean we flatten rhs_ty
; stopWith old_ev "Fun/Top (given)" }
-- So old_ev is Wanted or Derived
| not (fsk `elemVarSet` tyCoVarsOfType rhs_ty)
= do { dischargeFmv old_ev fsk ax_co rhs_ty
; traceTcS "doTopReactFunEq" $
vcat [ text "old_ev:" <+> ppr old_ev
, nest 2 (text ":=") <+> ppr ax_co ]
; stopWith old_ev "Fun/Top (wanted)" }
| otherwise -- We must not assign ufsk := ...ufsk...!
= do { alpha_ty <- newFlexiTcSTy (tyVarKind fsk)
; new_ev <- case old_ev of
CtWanted {} -> do { (ev, _) <- newWantedEq loc Nominal alpha_ty rhs_ty
; updWorkListTcS $
extendWorkListEq (mkNonCanonical ev)
; return ev }
CtDerived {} -> do { ev <- newDerivedNC loc pred
; updWorkListTcS (extendWorkListDerived loc ev)
; return ev }
where pred = mkPrimEqPred alpha_ty rhs_ty
_ -> pprPanic "reduce_top_fun_eq" (ppr old_ev)
-- By emitting this as non-canonical, we deal with all
-- flattening, occurs-check, and ufsk := ufsk issues
; let final_co = ax_co `mkTcTransCo` mkTcSymCo (ctEvCoercion new_ev)
-- ax_co :: fam_tc args ~ rhs_ty
-- ev :: alpha ~ rhs_ty
-- ufsk := alpha
-- final_co :: fam_tc args ~ alpha
; dischargeFmv old_ev fsk final_co alpha_ty
; traceTcS "doTopReactFunEq (occurs)" $
vcat [ text "old_ev:" <+> ppr old_ev
, nest 2 (text ":=") <+> ppr final_co
, text "new_ev:" <+> ppr new_ev ]
; stopWith old_ev "Fun/Top (wanted)" }
where
loc = ctEvLoc old_ev
deeper_loc = bumpCtLocDepth loc
improveTopFunEqs :: CtLoc -> TyCon -> [TcType] -> TcTyVar -> TcS ()
improveTopFunEqs loc fam_tc args fsk
= do { model <- getInertModel
; fam_envs <- getFamInstEnvs
; eqns <- improve_top_fun_eqs fam_envs fam_tc args
(lookupFlattenTyVar model fsk)
; mapM_ (unifyDerived loc Nominal) eqns }
improve_top_fun_eqs :: FamInstEnvs
-> TyCon -> [TcType] -> TcType
-> TcS [Eqn]
improve_top_fun_eqs fam_envs fam_tc args rhs_ty
| Just ops <- isBuiltInSynFamTyCon_maybe fam_tc
= return (sfInteractTop ops args rhs_ty)
-- see Note [Type inference for type families with injectivity]
| isOpenTypeFamilyTyCon fam_tc
, Injective injective_args <- familyTyConInjectivityInfo fam_tc
= -- it is possible to have several compatible equations in an open type
-- family but we only want to derive equalities from one such equation.
concatMapM (injImproveEqns injective_args) (take 1 $
buildImprovementData (lookupFamInstEnvByTyCon fam_envs fam_tc)
fi_tys fi_rhs (const Nothing))
| Just ax <- isClosedSynFamilyTyConWithAxiom_maybe fam_tc
, Injective injective_args <- familyTyConInjectivityInfo fam_tc
= concatMapM (injImproveEqns injective_args) $
buildImprovementData (fromBranches (co_ax_branches ax))
cab_lhs cab_rhs Just
| otherwise
= return []
where
buildImprovementData
:: [a] -- axioms for a TF (FamInst or CoAxBranch)
-> (a -> [Type]) -- get LHS of an axiom
-> (a -> Type) -- get RHS of an axiom
-> (a -> Maybe CoAxBranch) -- Just => apartness check required
-> [( [Type], TCvSubst, TyVarSet, Maybe CoAxBranch )]
-- Result:
-- ( [arguments of a matching axiom]
-- , RHS-unifying substitution
-- , axiom variables without substitution
-- , Maybe matching axiom [Nothing - open TF, Just - closed TF ] )
buildImprovementData axioms axiomLHS axiomRHS wrap =
[ (ax_args, subst, unsubstTvs, wrap axiom)
| axiom <- axioms
, let ax_args = axiomLHS axiom
, let ax_rhs = axiomRHS axiom
, Just subst <- [tcUnifyTyWithTFs False ax_rhs rhs_ty]
, let tvs = tyCoVarsOfTypes ax_args
notInSubst tv = not (tv `elemVarEnv` getTvSubstEnv subst)
unsubstTvs = filterVarSet (notInSubst <&&> isTyVar) tvs ]
injImproveEqns :: [Bool]
-> ([Type], TCvSubst, TyCoVarSet, Maybe CoAxBranch)
-> TcS [Eqn]
injImproveEqns inj_args (ax_args, theta, unsubstTvs, cabr) = do
(theta', _) <- instFlexiTcS (varSetElems unsubstTvs)
let subst = theta `unionTCvSubst` theta'
return [ Pair arg (substTyUnchecked subst ax_arg)
| case cabr of
Just cabr' -> apartnessCheck (substTys subst ax_args) cabr'
_ -> True
, (arg, ax_arg, True) <- zip3 args ax_args inj_args ]
shortCutReduction :: CtEvidence -> TcTyVar -> TcCoercion
-> TyCon -> [TcType] -> TcS (StopOrContinue Ct)
-- See Note [Top-level reductions for type functions]
shortCutReduction old_ev fsk ax_co fam_tc tc_args
= ASSERT( ctEvEqRel old_ev == NomEq)
do { (xis, cos) <- flattenManyNom old_ev tc_args
-- ax_co :: F args ~ G tc_args
-- cos :: xis ~ tc_args
-- old_ev :: F args ~ fsk
-- G cos ; sym ax_co ; old_ev :: G xis ~ fsk
; new_ev <- case ctEvFlavour old_ev of
Given -> newGivenEvVar deeper_loc
( mkPrimEqPred (mkTyConApp fam_tc xis) (mkTyVarTy fsk)
, EvCoercion (mkTcTyConAppCo Nominal fam_tc cos
`mkTcTransCo` mkTcSymCo ax_co
`mkTcTransCo` ctEvCoercion old_ev) )
Derived -> newDerivedNC deeper_loc $
mkPrimEqPred (mkTyConApp fam_tc xis)
(mkTyVarTy fsk)
Wanted ->
do { (new_ev, new_co) <- newWantedEq deeper_loc Nominal
(mkTyConApp fam_tc xis) (mkTyVarTy fsk)
; setWantedEq (ctev_dest old_ev) $
ax_co `mkTcTransCo` mkTcSymCo (mkTcTyConAppCo Nominal
fam_tc cos)
`mkTcTransCo` new_co
; return new_ev }
; let new_ct = CFunEqCan { cc_ev = new_ev, cc_fun = fam_tc
, cc_tyargs = xis, cc_fsk = fsk }
; updWorkListTcS (extendWorkListFunEq new_ct)
; stopWith old_ev "Fun/Top (shortcut)" }
where
deeper_loc = bumpCtLocDepth (ctEvLoc old_ev)
dischargeFmv :: CtEvidence -> TcTyVar -> TcCoercion -> TcType -> TcS ()
-- (dischargeFmv x fmv co ty)
-- [W] ev :: F tys ~ fmv
-- co :: F tys ~ xi
-- Precondition: fmv is not filled, and fuv `notElem` xi
--
-- Then set fmv := xi,
-- set ev := co
-- kick out any inert things that are now rewritable
--
-- Does not evaluate 'co' if 'ev' is Derived
dischargeFmv ev fmv co xi
= ASSERT2( not (fmv `elemVarSet` tyCoVarsOfType xi), ppr ev $$ ppr fmv $$ ppr xi )
do { setEvBindIfWanted ev (EvCoercion co)
; unflattenFmv fmv xi
; n_kicked <- kickOutAfterUnification fmv
; traceTcS "dischargeFmv" (ppr fmv <+> equals <+> ppr xi $$ ppr_kicked n_kicked) }
{- Note [Top-level reductions for type functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
c.f. Note [The flattening story] in TcFlatten
Suppose we have a CFunEqCan F tys ~ fmv/fsk, and a matching axiom.
Here is what we do, in four cases:
* Wanteds: general firing rule
(work item) [W] x : F tys ~ fmv
instantiate axiom: ax_co : F tys ~ rhs
Then:
Discharge fmv := alpha
Discharge x := ax_co ; sym x2
New wanted [W] x2 : alpha ~ rhs (Non-canonical)
This is *the* way that fmv's get unified; even though they are
"untouchable".
NB: it can be the case that fmv appears in the (instantiated) rhs.
In that case the new Non-canonical wanted will be loopy, but that's
ok. But it's good reason NOT to claim that it is canonical!
* Wanteds: short cut firing rule
Applies when the RHS of the axiom is another type-function application
(work item) [W] x : F tys ~ fmv
instantiate axiom: ax_co : F tys ~ G rhs_tys
It would be a waste to create yet another fmv for (G rhs_tys).
Instead (shortCutReduction):
- Flatten rhs_tys (cos : rhs_tys ~ rhs_xis)
- Add G rhs_xis ~ fmv to flat cache (note: the same old fmv)
- New canonical wanted [W] x2 : G rhs_xis ~ fmv (CFunEqCan)
- Discharge x := ax_co ; G cos ; x2
* Givens: general firing rule
(work item) [G] g : F tys ~ fsk
instantiate axiom: ax_co : F tys ~ rhs
Now add non-canonical given (since rhs is not flat)
[G] (sym g ; ax_co) : fsk ~ rhs (Non-canonical)
* Givens: short cut firing rule
Applies when the RHS of the axiom is another type-function application
(work item) [G] g : F tys ~ fsk
instantiate axiom: ax_co : F tys ~ G rhs_tys
It would be a waste to create yet another fsk for (G rhs_tys).
Instead (shortCutReduction):
- Flatten rhs_tys: flat_cos : tys ~ flat_tys
- Add new Canonical given
[G] (sym (G flat_cos) ; co ; g) : G flat_tys ~ fsk (CFunEqCan)
Note [Cached solved FunEqs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
When trying to solve, say (FunExpensive big-type ~ ty), it's important
to see if we have reduced (FunExpensive big-type) before, lest we
simply repeat it. Hence the lookup in inert_solved_funeqs. Moreover
we must use `funEqCanDischarge` because both uses might (say) be Wanteds,
and we *still* want to save the re-computation.
Note [MATCHING-SYNONYMS]
~~~~~~~~~~~~~~~~~~~~~~~~
When trying to match a dictionary (D tau) to a top-level instance, or a
type family equation (F taus_1 ~ tau_2) to a top-level family instance,
we do *not* need to expand type synonyms because the matcher will do that for us.
Note [RHS-FAMILY-SYNONYMS]
~~~~~~~~~~~~~~~~~~~~~~~~~~
The RHS of a family instance is represented as yet another constructor which is
like a type synonym for the real RHS the programmer declared. Eg:
type instance F (a,a) = [a]
Becomes:
:R32 a = [a] -- internal type synonym introduced
F (a,a) ~ :R32 a -- instance
When we react a family instance with a type family equation in the work list
we keep the synonym-using RHS without expansion.
Note [FunDep and implicit parameter reactions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Currently, our story of interacting two dictionaries (or a dictionary
and top-level instances) for functional dependencies, and implicit
paramters, is that we simply produce new Derived equalities. So for example
class D a b | a -> b where ...
Inert:
d1 :g D Int Bool
WorkItem:
d2 :w D Int alpha
We generate the extra work item
cv :d alpha ~ Bool
where 'cv' is currently unused. However, this new item can perhaps be
spontaneously solved to become given and react with d2,
discharging it in favour of a new constraint d2' thus:
d2' :w D Int Bool
d2 := d2' |> D Int cv
Now d2' can be discharged from d1
We could be more aggressive and try to *immediately* solve the dictionary
using those extra equalities, but that requires those equalities to carry
evidence and derived do not carry evidence.
If that were the case with the same inert set and work item we might dischard
d2 directly:
cv :w alpha ~ Bool
d2 := d1 |> D Int cv
But in general it's a bit painful to figure out the necessary coercion,
so we just take the first approach. Here is a better example. Consider:
class C a b c | a -> b
And:
[Given] d1 : C T Int Char
[Wanted] d2 : C T beta Int
In this case, it's *not even possible* to solve the wanted immediately.
So we should simply output the functional dependency and add this guy
[but NOT its superclasses] back in the worklist. Even worse:
[Given] d1 : C T Int beta
[Wanted] d2: C T beta Int
Then it is solvable, but its very hard to detect this on the spot.
It's exactly the same with implicit parameters, except that the
"aggressive" approach would be much easier to implement.
Note [Weird fundeps]
~~~~~~~~~~~~~~~~~~~~
Consider class Het a b | a -> b where
het :: m (f c) -> a -> m b
class GHet (a :: * -> *) (b :: * -> *) | a -> b
instance GHet (K a) (K [a])
instance Het a b => GHet (K a) (K b)
The two instances don't actually conflict on their fundeps,
although it's pretty strange. So they are both accepted. Now
try [W] GHet (K Int) (K Bool)
This triggers fundeps from both instance decls;
[D] K Bool ~ K [a]
[D] K Bool ~ K beta
And there's a risk of complaining about Bool ~ [a]. But in fact
the Wanted matches the second instance, so we never get as far
as the fundeps.
Trac #7875 is a case in point.
Note [Overriding implicit parameters]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
f :: (?x::a) -> Bool -> a
g v = let ?x::Int = 3
in (f v, let ?x::Bool = True in f v)
This should probably be well typed, with
g :: Bool -> (Int, Bool)
So the inner binding for ?x::Bool *overrides* the outer one.
Hence a work-item Given overrides an inert-item Given.
-}
{- *******************************************************************
* *
Class lookup
* *
**********************************************************************-}
-- | Indicates if Instance met the Safe Haskell overlapping instances safety
-- check.
--
-- See Note [Safe Haskell Overlapping Instances] in TcSimplify
-- See Note [Safe Haskell Overlapping Instances Implementation] in TcSimplify
type SafeOverlapping = Bool
data LookupInstResult
= NoInstance
| GenInst { lir_new_theta :: [TcPredType]
, lir_mk_ev :: [EvTerm] -> EvTerm
, lir_safe_over :: SafeOverlapping }
instance Outputable LookupInstResult where
ppr NoInstance = text "NoInstance"
ppr (GenInst { lir_new_theta = ev
, lir_safe_over = s })
= text "GenInst" <+> vcat [ppr ev, ss]
where ss = text $ if s then "[safe]" else "[unsafe]"
matchClassInst :: DynFlags -> InertSet -> Class -> [Type] -> CtLoc -> TcS LookupInstResult
matchClassInst dflags inerts clas tys loc
-- First check whether there is an in-scope Given that could
-- match this constraint. In that case, do not use top-level
-- instances. See Note [Instance and Given overlap]
| not (xopt LangExt.IncoherentInstances dflags)
, not (naturallyCoherentClass clas)
, let matchable_givens = matchableGivens loc pred inerts
, not (isEmptyBag matchable_givens)
= do { traceTcS "Delaying instance application" $
vcat [ text "Work item=" <+> pprClassPred clas tys
, text "Potential matching givens:" <+> ppr matchable_givens ]
; return NoInstance }
where
pred = mkClassPred clas tys
matchClassInst dflags _ clas tys loc
= do { traceTcS "matchClassInst" $ vcat [ text "pred =" <+> ppr (mkClassPred clas tys) ]
; res <- match_class_inst dflags clas tys loc
; traceTcS "matchClassInst result" $ ppr res
; return res }
match_class_inst :: DynFlags -> Class -> [Type] -> CtLoc -> TcS LookupInstResult
match_class_inst dflags clas tys loc
| cls_name == knownNatClassName = matchKnownNat clas tys
| cls_name == knownSymbolClassName = matchKnownSymbol clas tys
| isCTupleClass clas = matchCTuple clas tys
| cls_name == typeableClassName = matchTypeable clas tys
| clas `hasKey` heqTyConKey = matchLiftedEquality tys
| clas `hasKey` coercibleTyConKey = matchLiftedCoercible tys
| otherwise = matchInstEnv dflags clas tys loc
where
cls_name = className clas
{- Note [Instance and Given overlap]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Example, from the OutsideIn(X) paper:
instance P x => Q [x]
instance (x ~ y) => R y [x]
wob :: forall a b. (Q [b], R b a) => a -> Int
g :: forall a. Q [a] => [a] -> Int
g x = wob x
This will generate the impliation constraint:
Q [a] => (Q [beta], R beta [a])
If we react (Q [beta]) with its top-level axiom, we end up with a
(P beta), which we have no way of discharging. On the other hand,
if we react R beta [a] with the top-level we get (beta ~ a), which
is solvable and can help us rewrite (Q [beta]) to (Q [a]) which is
now solvable by the given Q [a].
The solution is that:
In matchClassInst (and thus in topReact), we return a matching
instance only when there is no Given in the inerts which is
unifiable to this particular dictionary.
We treat any meta-tyvar as "unifiable" for this purpose,
*including* untouchable ones
The end effect is that, much as we do for overlapping instances, we
delay choosing a class instance if there is a possibility of another
instance OR a given to match our constraint later on. This fixes
Trac #4981 and #5002.
Other notes:
* The check is done *first*, so that it also covers classes
with built-in instance solving, such as
- constraint tuples
- natural numbers
- Typeable
* The given-overlap problem is arguably not easy to appear in practice
due to our aggressive prioritization of equality solving over other
constraints, but it is possible. I've added a test case in
typecheck/should-compile/GivenOverlapping.hs
* Another "live" example is Trac #10195; another is #10177.
* We ignore the overlap problem if -XIncoherentInstances is in force:
see Trac #6002 for a worked-out example where this makes a
difference.
* Moreover notice that our goals here are different than the goals of
the top-level overlapping checks. There we are interested in
validating the following principle:
If we inline a function f at a site where the same global
instance environment is available as the instance environment at
the definition site of f then we should get the same behaviour.
But for the Given Overlap check our goal is just related to completeness of
constraint solving.
-}
{- *******************************************************************
* *
Class lookup in the instance environment
* *
**********************************************************************-}
matchInstEnv :: DynFlags -> Class -> [Type] -> CtLoc -> TcS LookupInstResult
matchInstEnv dflags clas tys loc
= do { instEnvs <- getInstEnvs
; let safeOverlapCheck = safeHaskell dflags `elem` [Sf_Safe, Sf_Trustworthy]
(matches, unify, unsafeOverlaps) = lookupInstEnv True instEnvs clas tys
safeHaskFail = safeOverlapCheck && not (null unsafeOverlaps)
; case (matches, unify, safeHaskFail) of
-- Nothing matches
([], _, _)
-> do { traceTcS "matchClass not matching" $
vcat [ text "dict" <+> ppr pred ]
; return NoInstance }
-- A single match (& no safe haskell failure)
([(ispec, inst_tys)], [], False)
-> do { let dfun_id = instanceDFunId ispec
; traceTcS "matchClass success" $
vcat [text "dict" <+> ppr pred,
text "witness" <+> ppr dfun_id
<+> ppr (idType dfun_id) ]
-- Record that this dfun is needed
; match_one (null unsafeOverlaps) dfun_id inst_tys }
-- More than one matches (or Safe Haskell fail!). Defer any
-- reactions of a multitude until we learn more about the reagent
(matches, _, _)
-> do { traceTcS "matchClass multiple matches, deferring choice" $
vcat [text "dict" <+> ppr pred,
text "matches" <+> ppr matches]
; return NoInstance } }
where
pred = mkClassPred clas tys
match_one :: SafeOverlapping -> DFunId -> [DFunInstType] -> TcS LookupInstResult
-- See Note [DFunInstType: instantiating types] in InstEnv
match_one so dfun_id mb_inst_tys
= do { checkWellStagedDFun pred dfun_id loc
; (tys, theta) <- instDFunType dfun_id mb_inst_tys
; return $ GenInst { lir_new_theta = theta
, lir_mk_ev = EvDFunApp dfun_id tys
, lir_safe_over = so } }
{- ********************************************************************
* *
Class lookup for CTuples
* *
***********************************************************************-}
matchCTuple :: Class -> [Type] -> TcS LookupInstResult
matchCTuple clas tys -- (isCTupleClass clas) holds
= return (GenInst { lir_new_theta = tys
, lir_mk_ev = tuple_ev
, lir_safe_over = True })
-- The dfun *is* the data constructor!
where
data_con = tyConSingleDataCon (classTyCon clas)
tuple_ev = EvDFunApp (dataConWrapId data_con) tys
{- ********************************************************************
* *
Class lookup for Literals
* *
***********************************************************************-}
matchKnownNat :: Class -> [Type] -> TcS LookupInstResult
matchKnownNat clas [ty] -- clas = KnownNat
| Just n <- isNumLitTy ty = makeLitDict clas ty (EvNum n)
matchKnownNat _ _ = return NoInstance
matchKnownSymbol :: Class -> [Type] -> TcS LookupInstResult
matchKnownSymbol clas [ty] -- clas = KnownSymbol
| Just n <- isStrLitTy ty = makeLitDict clas ty (EvStr n)
matchKnownSymbol _ _ = return NoInstance
makeLitDict :: Class -> Type -> EvLit -> TcS LookupInstResult
-- makeLitDict adds a coercion that will convert the literal into a dictionary
-- of the appropriate type. See Note [KnownNat & KnownSymbol and EvLit]
-- in TcEvidence. The coercion happens in 2 steps:
--
-- Integer -> SNat n -- representation of literal to singleton
-- SNat n -> KnownNat n -- singleton to dictionary
--
-- The process is mirrored for Symbols:
-- String -> SSymbol n
-- SSymbol n -> KnownSymbol n -}
makeLitDict clas ty evLit
| Just (_, co_dict) <- tcInstNewTyCon_maybe (classTyCon clas) [ty]
-- co_dict :: KnownNat n ~ SNat n
, [ meth ] <- classMethods clas
, Just tcRep <- tyConAppTyCon_maybe -- SNat
$ funResultTy -- SNat n
$ dropForAlls -- KnownNat n => SNat n
$ idType meth -- forall n. KnownNat n => SNat n
, Just (_, co_rep) <- tcInstNewTyCon_maybe tcRep [ty]
-- SNat n ~ Integer
, let ev_tm = mkEvCast (EvLit evLit) (mkTcSymCo (mkTcTransCo co_dict co_rep))
= return $ GenInst { lir_new_theta = []
, lir_mk_ev = \_ -> ev_tm
, lir_safe_over = True }
| otherwise
= panicTcS (text "Unexpected evidence for" <+> ppr (className clas)
$$ vcat (map (ppr . idType) (classMethods clas)))
{- ********************************************************************
* *
Class lookup for Typeable
* *
***********************************************************************-}
-- | Assumes that we've checked that this is the 'Typeable' class,
-- and it was applied to the correct argument.
matchTypeable :: Class -> [Type] -> TcS LookupInstResult
matchTypeable clas [k,t] -- clas = Typeable
-- For the first two cases, See Note [No Typeable for polytypes or qualified types]
| isForAllTy k = return NoInstance -- Polytype
| isJust (tcSplitPredFunTy_maybe t) = return NoInstance -- Qualified type
-- Now cases that do work
| k `eqType` typeNatKind = doTyLit knownNatClassName t
| k `eqType` typeSymbolKind = doTyLit knownSymbolClassName t
| Just (tc, ks) <- splitTyConApp_maybe t -- See Note [Typeable (T a b c)]
, onlyNamedBndrsApplied tc ks = doTyConApp clas t ks
| Just (f,kt) <- splitAppTy_maybe t = doTyApp clas t f kt
matchTypeable _ _ = return NoInstance
doTyConApp :: Class -> Type -> [Kind] -> TcS LookupInstResult
-- Representation for type constructor applied to some kinds
doTyConApp clas ty args
= return $ GenInst (map (mk_typeable_pred clas) args)
(\tms -> EvTypeable ty $ EvTypeableTyCon tms)
True
-- Representation for concrete kinds. We just use the kind itself,
-- but first we must make sure that we've instantiated all kind-
-- polymorphism, but no more.
onlyNamedBndrsApplied :: TyCon -> [KindOrType] -> Bool
onlyNamedBndrsApplied tc ks
= all isNamedBinder used_bndrs &&
not (any isNamedBinder leftover_bndrs)
where
bndrs = tyConBinders tc
(used_bndrs, leftover_bndrs) = splitAtList ks bndrs
doTyApp :: Class -> Type -> Type -> KindOrType -> TcS LookupInstResult
-- Representation for an application of a type to a type-or-kind.
-- This may happen when the type expression starts with a type variable.
-- Example (ignoring kind parameter):
-- Typeable (f Int Char) -->
-- (Typeable (f Int), Typeable Char) -->
-- (Typeable f, Typeable Int, Typeable Char) --> (after some simp. steps)
-- Typeable f
doTyApp clas ty f tk
| isForAllTy (typeKind f)
= return NoInstance -- We can't solve until we know the ctr.
| otherwise
= return $ GenInst [mk_typeable_pred clas f, mk_typeable_pred clas tk]
(\[t1,t2] -> EvTypeable ty $ EvTypeableTyApp t1 t2)
True
-- Emit a `Typeable` constraint for the given type.
mk_typeable_pred :: Class -> Type -> PredType
mk_typeable_pred clas ty = mkClassPred clas [ typeKind ty, ty ]
-- Typeable is implied by KnownNat/KnownSymbol. In the case of a type literal
-- we generate a sub-goal for the appropriate class. See #10348 for what
-- happens when we fail to do this.
doTyLit :: Name -> Type -> TcS LookupInstResult
doTyLit kc t = do { kc_clas <- tcLookupClass kc
; let kc_pred = mkClassPred kc_clas [ t ]
mk_ev [ev] = EvTypeable t $ EvTypeableTyLit ev
mk_ev _ = panic "doTyLit"
; return (GenInst [kc_pred] mk_ev True) }
{- Note [Typeable (T a b c)]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For type applications we always decompose using binary application,
vai doTyApp, until we get to a *kind* instantiation. Exmaple
Proxy :: forall k. k -> *
To solve Typeable (Proxy (* -> *) Maybe) we
- First decompose with doTyApp,
to get (Typeable (Proxy (* -> *))) and Typeable Maybe
- Then sovle (Typeable (Proxy (* -> *))) with doTyConApp
If we attempt to short-cut by solving it all at once, via
doTyCOnAPp
Note [No Typeable for polytypes or qualified types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We do not support impredicative typeable, such as
Typeable (forall a. a->a)
Typeable (Eq a => a -> a)
Typeable (() => Int)
Typeable (((),()) => Int)
See Trac #9858. For forall's the case is clear: we simply don't have
a TypeRep for them. For qualified but not polymorphic types, like
(Eq a => a -> a), things are murkier. But:
* We don't need a TypeRep for these things. TypeReps are for
monotypes only.
* Perhaps we could treat `=>` as another type constructor for `Typeable`
purposes, and thus support things like `Eq Int => Int`, however,
at the current state of affairs this would be an odd exception as
no other class works with impredicative types.
For now we leave it off, until we have a better story for impredicativity.
-}
solveCallStack :: CtEvidence -> EvCallStack -> TcS ()
solveCallStack ev ev_cs = do
-- We're given ev_cs :: CallStack, but the evidence term should be a
-- dictionary, so we have to coerce ev_cs to a dictionary for
-- `IP ip CallStack`. See Note [Overview of implicit CallStacks]
let ev_tm = mkEvCast (EvCallStack ev_cs) (wrapIP (ctEvPred ev))
setWantedEvBind (ctEvId ev) ev_tm
{- ********************************************************************
* *
Class lookup for lifted equality
* *
***********************************************************************-}
-- See also Note [The equality types story] in TysPrim
matchLiftedEquality :: [Type] -> TcS LookupInstResult
matchLiftedEquality args
= return (GenInst { lir_new_theta = [ mkTyConApp eqPrimTyCon args ]
, lir_mk_ev = EvDFunApp (dataConWrapId heqDataCon) args
, lir_safe_over = True })
-- See also Note [The equality types story] in TysPrim
matchLiftedCoercible :: [Type] -> TcS LookupInstResult
matchLiftedCoercible args@[k, t1, t2]
= return (GenInst { lir_new_theta = [ mkTyConApp eqReprPrimTyCon args' ]
, lir_mk_ev = EvDFunApp (dataConWrapId coercibleDataCon)
args
, lir_safe_over = True })
where
args' = [k, k, t1, t2]
matchLiftedCoercible args = pprPanic "matchLiftedCoercible" (ppr args)
|
mcschroeder/ghc
|
compiler/typecheck/TcInteract.hs
|
bsd-3-clause
| 89,059
| 48
| 22
| 26,640
| 12,788
| 6,647
| 6,141
| -1
| -1
|
{-# LANGUAGE PackageImports #-}
module Control.Monad.Zip (module M) where
import "base" Control.Monad.Zip as M
|
silkapp/base-noprelude
|
src/Control/Monad/Zip.hs
|
bsd-3-clause
| 116
| 0
| 4
| 18
| 23
| 17
| 6
| 3
| 0
|
{-# LANGUAGE TupleSections, OverloadedStrings, QuasiQuotes, TemplateHaskell, TypeFamilies, RecordWildCards,
DeriveGeneric ,MultiParamTypeClasses ,FlexibleInstances #-}
module Protocol.ROC.PointTypes.PointType89 where
import GHC.Generics
import qualified Data.ByteString as BS
import Data.Word
import Data.Binary
import Data.Binary.Get
import Protocol.ROC.Float
import Protocol.ROC.Utils
data PointType89 = PointType89 {
pointType89ChartType :: !PointType89ChartType
,pointType89HistPntNumber :: !PointType89HistPntNumber
,pointType89DynamicPointDataTLPRef :: !PointType89DynamicPointDataTLPRef
,pointType89TextString :: !PointType89TextString
,pointType89UnitsString :: !PointType89UnitsString
,pointType89ScalingOption :: !PointType89ScalingOption
,pointType89UserUpperScaleRange :: !PointType89UserUpperScaleRange
,pointType89UserLowerScaleRange :: !PointType89UserLowerScaleRange
} deriving (Read,Eq, Show, Generic)
type PointType89ChartType = Word8
type PointType89HistPntNumber = Word8
type PointType89DynamicPointDataTLPRef = [Word8]
type PointType89TextString = BS.ByteString
type PointType89UnitsString = BS.ByteString
type PointType89ScalingOption = Bool
type PointType89UserUpperScaleRange = Float
type PointType89UserLowerScaleRange = Float
pointType89Parser :: Get PointType89
pointType89Parser = do
chartType <- getWord8
histPntNumber <- getWord8
dynamicPointDataTLPRef <- getTLP
textString <- getByteString 10
unitsString <- getByteString 10
scalingOption <- anyButNull
userUpperScaleRange <- getIeeeFloat32
userLowerScaleRange <- getIeeeFloat32
return $ PointType89 chartType histPntNumber dynamicPointDataTLPRef textString unitsString scalingOption userUpperScaleRange userLowerScaleRange
|
jqpeterson/roc-translator
|
src/Protocol/ROC/PointTypes/PointType89.hs
|
bsd-3-clause
| 2,368
| 0
| 9
| 805
| 288
| 161
| 127
| 55
| 1
|
module Text.Highlighter.Lexers.PythonTraceback (lexer) where
import qualified Text.Highlighter.Lexers.Python as Python
import Text.Regex.PCRE.Light
import Text.Highlighter.Types
lexer :: Lexer
lexer = Lexer
{ lName = "Python Traceback"
, lAliases = ["pytb"]
, lExtensions = [".pytb"]
, lMimetypes = ["text/x-python-traceback"]
, lStart = root'
, lFlags = [multiline]
}
root' :: TokenMatcher
root' =
[ tokNext "^Traceback \\(most recent call last\\):\\n" (Arbitrary "Generic" :. Arbitrary "Traceback") (GoTo intb')
, tokNext "^(?= File \"[^\"]+\", line \\d+)" (Arbitrary "Generic" :. Arbitrary "Traceback") (GoTo intb')
, tok "^.*\\n" (Arbitrary "Other")
]
intb' :: TokenMatcher
intb' =
[ tok "^( File )(\"[^\"]+\")(, line )(\\d+)(, in )(.+)(\\n)" (ByGroups [(Arbitrary "Text"), (Arbitrary "Name" :. Arbitrary "Builtin"), (Arbitrary "Text"), (Arbitrary "Literal" :. Arbitrary "Number"), (Arbitrary "Text"), (Arbitrary "Name"), (Arbitrary "Text")])
, tok "^( File )(\"[^\"]+\")(, line )(\\d+)(\\n)" (ByGroups [(Arbitrary "Text"), (Arbitrary "Name" :. Arbitrary "Builtin"), (Arbitrary "Text"), (Arbitrary "Literal" :. Arbitrary "Number"), (Arbitrary "Text")])
, tok "^( )(.+)(\\n)" (ByGroups [(Arbitrary "Text"), (Using Python.lexer), (Arbitrary "Text")])
, tok "^([ \\t]*)(...)(\\n)" (ByGroups [(Arbitrary "Text"), (Arbitrary "Comment"), (Arbitrary "Text")])
, tokNext "^(.+)(: )(.+)(\\n)" (ByGroups [(Arbitrary "Generic" :. Arbitrary "Error"), (Arbitrary "Text"), (Arbitrary "Name"), (Arbitrary "Text")]) Pop
, tokNext "^([a-zA-Z_][a-zA-Z0-9_]*)(:?\\n)" (ByGroups [(Arbitrary "Generic" :. Arbitrary "Error"), (Arbitrary "Text")]) Pop
]
|
chemist/highlighter
|
src/Text/Highlighter/Lexers/PythonTraceback.hs
|
bsd-3-clause
| 1,717
| 0
| 12
| 277
| 536
| 295
| 241
| 25
| 1
|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NamedFieldPuns #-}
module Keys where
import Prelude hiding (fail, readFile)
import GHC.Generics
import Data.Aeson (ToJSON)
import qualified Data.ByteString.Lazy.Char8 as BSL
import Control.Monad (replicateM)
import System.IO.Strict (readFile)
import System.Random (randomRIO)
import Data.Digest.Pure.SHA (hmacSha1, showDigest)
import qualified Rel.Cmd as Cmd
import qualified Rel.Log as Log
import Util.Request (Request)
import Monad.Result
import AppMonad
data KeySet = KeySet
{ privKey :: FilePath
, pubKey :: String
, name :: String
, secret :: String
} deriving (Show, Generic)
instance ToJSON KeySet
request :: Request -> App ()
request _ =
do
key <- create
_ <- Log.debug $ "Created keyset: " ++ (show key)
ok key
create :: App KeySet
create =
do
name <- randomName
secret <- randomSecret
privKey <- (++ "/" ++ name) `fmap` getKeystorePath
_ <- Cmd.run "ssh-keygen" ["-t", "rsa", "-f", privKey, "-q", "-N", "", "-C", secret]
(pubKey, _) <- readPubKey $ privKey ++ ".pub"
return $ KeySet { privKey, pubKey, name, secret }
load :: String -> App KeySet
load name =
do
privKey <- (++ "/" ++ name) `fmap` getKeystorePath
(pubKey, secret) <- readPubKey $ privKey ++ ".pub"
return $ KeySet { privKey, pubKey, name, secret }
validSig :: String -> KeySet -> BSL.ByteString -> App Bool
validSig sig key text =
return $ sig == "sha1=" ++ expected
where expected = showDigest $ hmacSha1 secret' text
secret' = BSL.pack $ secret key
getSecret :: String -> App String
getSecret keyName = snd `fmap` readPubKey keyName
-- | Read a public key, returning the key part separately from the comment
readPubKey :: FilePath -> App (String, String)
readPubKey path = (safe $ readFile path) >>= parts . words
where parts ["ssh-rsa", k, desc] = return ("ssh-rsa " ++ k, desc)
parts _ = fail $ "Parse error in pubkey: " ++ path
pick :: [a] -> IO a
pick xs = (xs !!) `fmap` randomRIO (0, length xs - 1)
pickN :: Int -> [a] -> IO [a]
pickN n = replicateM n . pick
randomName :: App String
randomName = safe $ pickN 24 nameChars
randomSecret :: App String
randomSecret = safe $ pickN 64 hmacChars
nameChars :: [Char]
nameChars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890"
hmacChars :: [Char]
hmacChars = nameChars ++ "`~!@#$%^&*()-_=+[{]}|;:'<,>.?/"
|
shmookey/pure
|
app/Keys.hs
|
bsd-3-clause
| 2,463
| 0
| 10
| 537
| 804
| 445
| 359
| 67
| 2
|
module Data.Concurrent.Queue.Roq.Util
(
generate_key
, retry_dirty_write
, retry_dirty_read
)
where
import Prelude -- hiding (catch)
import Control.Distributed.Process hiding (call)
import Data.Concurrent.Queue.Roq.Hroq
import Data.Concurrent.Queue.Roq.Mnesia
-- import Data.Time.Clock
import Data.Thyme.Clock
-- ---------------------------------------------------------------------
generate_key :: Process QKey
generate_key = do
k <- liftIO $ getCurrentTime
return $ QK (show k)
-- ---------------------------------------------------------------------
{-
retry_dirty_write(0, _, _) ->
error;
retry_dirty_write(N, TableName, Record) ->
case catch(mnesia:dirty_write(TableName, Record)) of
ok ->
ok;
{'EXIT', Reason} ->
?warn({dirty_write, TableName, N, Record, Reason}),
timer:sleep(100),
retry_dirty_write(N-1, TableName, Record)
end.
-}
-- retry_dirty_write :: Int -> TableName -> QEntry -> Process ()
retry_dirty_write n tableName record = do
dirty_write tableName record
{-
catch op handler
where
op = dirty_write tableName record
handler :: SomeException -> Process ()
handler e = do
logm $ "retry_dirty_write:" ++ (show (tableName, n, record, e))
liftIO $ threadDelay (100*1000) -- Haskell sleep takes us
retry_dirty_write (n-1) tableName record
-}
-- ---------------------------------------------------------------------
{-
retry_dirty_read(0, _, _) ->
error;
retry_dirty_read(N, TableName, Key) ->
case catch(mnesia:dirty_read(TableName, Key)) of
{'EXIT', Reason} ->
?warn({dirty_read, TableName, N, Key, Reason}),
retry_dirty_read(N-1, TableName, Key);
Data ->
{ok, Data}
end.
-}
{-
retry_dirty_read :: (Show a,Indexable a, Typeable b) -- , Serialize b)
=> Int -> TableName -> a -> Process [b]
retry_dirty_read 0 tableName key = do
logm $ "retry_dirty_read:giving up" ++ (show (0,tableName,key))
return [] -- TODO: throw an exception, blow up, etc
retry_dirty_read n tableName key = do
logm $ "retry_dirty_read:" ++ (show (n,tableName,key))
catch op handler
where
op = dirty_read tableName key :: [b]
handler :: (Typeable b) => SomeException -> Process [b]
handler e = do
logm $ "retry_dirty_read:exception" ++ (show (n,tableName,key,e))
liftIO $ threadDelay (100*1000) -- Haskell sleep takes us
retry_dirty_read (n - 1) tableName key
-}
retry_dirty_read ::
Integer -> TableName -> MetaKey -> Process (Maybe Meta)
retry_dirty_read n tableName key = do
-- logm $ "retry_dirty_read:" ++ (show (n,tableName,key))
dirty_read tableName key
|
alanz/hroq
|
src/Data/Concurrent/Queue/Roq/Util.hs
|
bsd-3-clause
| 2,653
| 0
| 10
| 514
| 174
| 101
| 73
| 20
| 1
|
{-# LANGUAGE ParallelListComp #-}
-- The Timber compiler <timber-lang.org>
--
-- Copyright 2008-2009 Johan Nordlander <nordland@csee.ltu.se>
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
--
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- 3. Neither the names of the copyright holder and any identified
-- contributors, nor the names of their affiliations, may be used to
-- endorse or promote products derived from this software without
-- specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS ``AS IS'' AND ANY EXPRESS
-- OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-- WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-- DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
-- ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
-- OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
-- HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
-- STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
-- ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-- POSSIBILITY OF SUCH DAMAGE.
module Prepare4C(prepare4c) where
import Control.Monad
import Common
import Kindle
import PP
import qualified Core
import qualified Core2Kindle
-- Establishes that:
-- every struct function simply relays all work to a global function with an explicit "this" parameter
-- every EEnter expression has a variable as its body
-- every ENew expression occurs at the rhs of a Val binding
-- Removes the type arguments from all type constructors except Array
-- Replaces type variables with the special type constructor POLY
-- Adds type casts wherever necessary
-- Replaces type abstraction and application with BITSET parameters indicating pointer/non-pointer status
-- Replaces nullary structs with out-of-band pointers (casts from the corresponding tag value)
-- Replaces type constructor switching by switching on integer tags (embedded as pointer values or as explicit tag fields)
-- Makes field selection from matched struct variants explicit
-- Removes CBreak commands that appear directly under a CSeq (without any intervening CSwitch)
-- Removes redundant defaults in CSwitch commands
-- (Not yet):
-- Flattens the struct subtyping graph by inlining the first coercion field
-- Ditto for enums...
prepare4c e2 e3 m = localStore (pModule e2 e3 m)
-- ===========================
-- Local environment
-- ===========================
data Env = Env { decls :: Decls, -- all structs in scope
tenv :: TEnv, -- all fun and val variables in scope
strinfo :: Map Name (Int,[Bool]), -- number of ptr fields + 1 / relevance flags of tvars for each struct
polyenv :: Map Name (Exp,Int), -- status location (refexp,bit-number) for type variables in scope
conval :: Map Name Int, -- numerical values of all variant constructor names
nulls :: [Name], -- lists all nullary struct variants
tagged :: [Name], -- lists all struct variants that use an explicit tag field
this :: Maybe Name }
env0 = Env { decls = [], tenv = [], strinfo = [], polyenv = [], conval = [], nulls = [], tagged = [], this = Nothing }
addDecls ds env = env { decls = ds ++ decls env, strinfo = info ++ strinfo env, conval = convals ++ conval env,
nulls = nullcons ++ nulls env, tagged = taggedcons ++ taggedunions ++ tagged env }
where unioncons = unions ds
allvariants = map (variants ds) unioncons
convals = concat (map (`zip` [0..]) allvariants)
nullcons = [ n | (n, Struct _ [] (Extends _)) <- ds ]
singlecons = map head . filter ((==1) . length) . map (\\nullcons) $ allvariants
taggedcons = dom convals \\ (singlecons++nullcons)
taggedunions = [ n | (n,ns) <- unioncons `zip` allvariants, any (`elem` taggedcons) ns ]
info = mapSnd structInfo ds
structInfo (Struct vs te _) = (length (ptrFields te []), map (`elem` relevant) vs)
where relevant = rng (varFields te)
addTEnv te env = env { tenv = te ++ tenv env }
addVals te env = env { tenv = mapSnd ValT te ++ tenv env }
addPolyEnv vs is es env = env { polyenv = mkPolyEnv vs is es ++ polyenv env }
-- Create a polyTag ATEnv on basis of polymorphic arity and return a mapping from each tyvar to its corresponding arg/bit
mkPolyEnv [] _ _ = []
mkPolyEnv (v:vs) (i:is) es = (v, (es!!(i`div`32), i`mod`32)) : mkPolyEnv vs is es
setThis x env = env { this = Just x }
findValT xx te x = t
where ValT t = lookup'' xx te x
findFunT te x ts = (vs `zip` ts, ts', t)
where FunT vs ts' t = lookup'' "B" te x
findStructTEnv xx env (TCon n ts)
| isTuple n = (abcSupply `zip` ts, abcSupply `zip` map (ValT . tVar) (take (width n) abcSupply))
| otherwise = (vs `zip` ts, te)
where Struct vs te _ = lookup'' xx (decls env) n
findStructInfo env n
| isTuple n = (width n + 1, take (width n) (repeat True))
| otherwise = lookup' (strinfo env) n
findPolyTag xx env v = lookup'' (xx ++ ": " ++ show (polyenv env)) (polyenv env) v
conLit env n = lInt (lookup'' "GGG" (conval env) n)
allCons env (ACon n _ _ _ : _)
| isTuple n = [n]
| otherwise = variants (decls env) n0
where Struct _ _ (Extends n0) = lookup'' "Apa" (decls env) n
allCons env _ = []
visibleArity env n = structArity ds (structRoot ds n)
where ds = decls env
-- =====================================
-- Replacing polymorphism with polyTags
-- =====================================
-- Create a list of polyTag types on basis of polymorphic arity
polyTagTypes 0 = []
polyTagTypes a | a <= 32 = [tBITS32]
| otherwise = tBITS32 : polyTagTypes (a-32)
-- Create a polyTag TEnv on basis of polymorphic arity (the externally visible part)
polyTagEnv0 a | a <= 4 = [(prim GCINFO, ValT tPOLY)]
| otherwise = (prim GCINFO, ValT tPOLY) :
_ABCSupply `zip` map ValT (polyTagTypes a)
-- Create a polyTag TEnv on basis of polymorphic arity (the existentially quantified part)
polyTagEnv1 a = _abcSupply `zip` map ValT (polyTagTypes a)
-- Create a polyTag struct binding from a list of type arguments
polyTagBinds env n ts = bs0 ++ bs1
where mkBind (x,ValT t) e = (x, Val t e)
ts0 = zipFilter vflags ts
ts1 = drop va ts
va = visibleArity env n
l_ts0 = length ts0
te0 = polyTagEnv0 l_ts0
es0 = polyTagArgs env ts0
bs0 | l_ts0 <= 4 = zipWith mkBind te0 [ECall (gcInfoName n) [] (map offset es0)]
| otherwise = zipWith mkBind te0 (ECall (gcInfoName n) [] [] : es0)
bs1 = zipWith mkBind (polyTagEnv1 (length ts1)) (polyTagArgs env ts1)
offset e = ECall (prim IntTimes) [] [ELit (lInt (d + 2)),e]
(d,vflags) = findStructInfo env n
gcInfoName n@(Name s t m a)
| isClosure n = Name (gcinfoSym ++ s) 0 m a
| okForC s = Name (gcinfoSym ++ s) t m a
| otherwise = Name (gcinfoSym ++ "_sym_" ++ show t) t m a
gcInfoName (Tuple n a) = Name (gcinfoSym ++ "TUP" ++ show n) 0 Nothing a
gcInfoName (Prim p a) = Name (gcinfoSym ++ strRep2 p) 0 Nothing a
-- Create a list of polyTag Exp arguments from a list of type arguments
polyTagArgs env [] = []
polyTagArgs env ts
| vars && ordered && total = [e0]
where vars = l_ts == length vs && length (nub es) == 1
l_ts = length ts
vs = [ n | TVar n _ <- ts ]
(es,is) = unzip (map (findPolyTag "XX" env) vs)
e0 = head es
ordered = is == [0..l_ts-1]
total = length [ v | (v,(e,_)) <- polyenv env, e == e0 ] == l_ts
polyTagArgs env ts = args (length ts) ts
where
args 0 [] = []
args a ts | a <= 32 = [arg 0 ts]
| otherwise = arg 0 ts0 : args (a-32) ts1
where (ts0,ts1) = splitAt a ts
arg k [] = intlit 0
arg k (TVar n _ : ts) = bor (arg (k+1) ts) (shift (band e (mask i)) i k)
where (e,i) = findPolyTag "YY" env n
arg k (TCon (Prim p _) _ : ts)
| p `elem` scalarPrims = bor (mask k) (arg (k+1) ts)
arg k (_ : ts) = arg (k+1) ts
shift e i k | i < k = shiftL e (ELit (lInt (k-i)))
| i > k = shiftR e (ELit (lInt (i-k)))
| otherwise = e
shiftL e1 e2 = ECall (prim SHIFTL32) [] [e1,e2]
shiftR e1 e2 = ECall (prim SHIFTR32) [] [e1,e2]
mask i = intlit (2^i)
bor e1 e2 | e1 == intlit 0 = e2
| e2 == intlit 0 = e1
| otherwise = ECall (prim OR32) [] [e1,e2]
band e1 e2 = ECall (prim AND32) [] [e1,e2]
intlit i = ECast tBITS32 (ELit (lInt i))
-- =============================
-- Prepare modules and types
-- =============================
pModule e2 dsi (Module m ns ds bs)
= do -- tr (render (vcat (map pr dsi))
let (_,_,_,Core.Binds _ te2 _) = e2
tei <- Core2Kindle.c2kTEnv dsi te2
let env1 = addTEnv (primTEnv++tei) (addDecls (primDecls++dsi) env0)
env = addTEnv (mapSnd typeOf bs) (addDecls ds env1)
(bf,bs) <- pBinds pBind env bs
bs' <- currentStore
return (Module m ns (pDecls env ds) (gcinfo env ds ++ flatBinds bf ++ bs ++ reverse bs'))
-- Prepare structs declarations
pDecls env ds = map f ds
where f (n,Struct vs te _) = (n, Struct [] (polyTagEnv0 l_vs0 ++ tagSig ++ mapSnd pType te ++ polyTagEnv1 l_vs1) Top)
where tagSig = if n `elem` tagged env then [(prim Tag, ValT tInt)] else []
l_vs0 = length (zipFilter vflags vs)
l_vs1 = length (drop va vs)
va = visibleArity env n
(_,vflags) = findStructInfo env n
-- gcinfo types: these must match corresponding defines in gc.c
gcSTD = ELit (lInt 0)
gcARRAY = ELit (lInt 1)
gcTUPLE = ELit (lInt 2)
gcBIG = ELit (lInt 3)
gcMUT = ELit (lInt 4)
-- Generate gcinfo for structs
gcinfo env ds = map f (prune ds (nulls env))
where f (n,Struct vs te cs) = (gcInfoName n, Val tPOLY (ECall (prim GCINFO) [] es))
where es | l_vs1 <= 4 = concat [ EVar n : gcSTD : pad l_es0 (ptrFields te vs) | vs <- sampleSpaces vs1 ]
| otherwise = EVar n : gcBIG : es0 ++ concat (map bitRef (varFields te)) ++ [ELit (lInt 0)]
es0 = ptrFields te []
l_es0 = length es0
(d,vflags) = findStructInfo env n
vs1 = zipFilter vflags vs
l_vs1 = length vs1
idx = vs1 `zip` [0..]
bitRef (n,v) = [ EVar n, ELit (lInt (i `div` 32 + 1)), ELit (lInt (i `mod` 32)) ]
where i = lookup'' "DDD" idx v
pad n es = es ++ replicate (n - length es) (ELit (lInt 0))
ptrFields te vs = map EVar (dom (filter (isPtr vs) te)) ++ [ELit (lInt 0)]
sampleSpaces [] = [[]]
sampleSpaces (v:vs) = [ vs1 ++ vs2 | vs1 <- [[],[v]], vs2 <- sampleSpaces vs ]
isPtr vs (n,FunT _ _ _) = False
isPtr vs (n,ValT (TVar v _)) = v `notElem` vs
isPtr vs (Prim Next _, _) = False -- the next field in Msg and its subtypes is custom handled during timerQ scanning.
isPtr vs (n,ValT (TCon k _)) = not (isScalar k)
isScalar (Prim p _) = p `elem` scalarPrims
isScalar n = False
varFields te = [ (n,v) | (n,ValT (TVar v _)) <- te ]
-- Simplify types
pType (ValT t) = ValT (erase t)
pType (FunT vs ts t) = FunT [] (polyTagTypes (length vs) ++ map erase ts) (erase t)
-- Erase polymorphism from atomic types
erase (TCon n _) = TCon n []
erase (TVar _ _) = tPOLY
eraseEnv te = mapSnd erase te
-- =============================
-- Prepare bindings and commands
-- =============================
-- Prepare bindings
pBinds f env xs = do (bfs,xs) <- fmap unzip (mapM (f env) xs)
return (foldr (.) id bfs, xs)
-- Prepare top-level & cmd bindings (assume code is lambda-lifted)
pBind env (x, Val t e) = do (bf,t',e) <- pRhsExp env e
return (bf, (x, Val (erase t) (cast t t' e)))
pBind env (x, Fun vs t te c) = do te' <- newEnv paramSym (polyTagTypes (length vs))
c <- pCmd (addVals te (addPolyEnv vs [0..] (map EVar (dom te')) env)) t c
return (id, (x, Fun [] (erase t) (te' ++ eraseEnv te) c))
-- Prepare struct bindings (assume code is lambda-lifted)
pSBind _ te0 env (x,Val t e) = do (bf,e) <- pExpT env t e
return (bf, (x, Val (erase t0) (cast t0 t e)))
where t0 = findValT "1" te0 x
pSBind _ te0 env (x,Fun [] t te c@(CRet (ECall f [] (EThis:es))))
| okAlready = return (id, (x, Fun [] t te c))
where (_,ts0,t0) = findFunT te0 x []
okAlready = t == erase t0 && rng te == map erase ts0 && es == map EVar (dom te)
pSBind ty te0 env (x,Fun vs t te c)
= do y <- newName thisSym
te0 <- newEnv paramSym ts0
te' <- newEnv paramSym (polyTagTypes (length vs))
let bs0 = [ (x, Val t (cast t t0 (EVar x0))) | (x0,t0) <- te0 | (x,t) <- te ]
te1 = [ if isEVar e then (x,t) else xt0 | (x, Val t e) <- bs0 | xt0 <- te0 ]
bs1 = [ b | b@(_,Val _ e) <- bs0, not (isEVar e) ]
te1' = te' ++ eraseEnv te1
env' = addPolyEnv vs [0..] (map EVar (dom te')) (rebindPolyEnv ty y env)
c <- pCmd (setThis y (addVals ((y,ty):te) env')) t0 c
f <- newName functionSym
addToStore (f, Fun [] t0' ((y,erase ty):te1') (cBind bs1 c))
return (id, (x, Fun [] t0' te1' (CRet (ECall f [] (EThis : map EVar (dom te1'))))))
where (_,ts0,t0) = findFunT te0 x []
t0' = erase t0
rebindPolyEnv (TCon n ts) y env = addPolyEnv vs is (map (ESel (EVar y)) _abcSupply) env
where ts1 = drop (visibleArity env n) ts
(vs,is) = unzip [ (v,i) | (TVar v _, i) <- ts1 `zip` [0..] ]
-- Prepare commands
pCmd env t0 (CRet e) = do (bf,e) <- pExpT env t0 e
return (bf (CRet e))
pCmd env t0 (CRun e c) = do (bf,_,e) <- pExp env e
liftM (bf . CRun e) (pCmd env t0 c)
pCmd env t0 (CBind _ [] c) = pCmd env t0 c
pCmd env t0 (CBind False bs c) = do (bf,bs) <- pBinds pBind env bs
liftM (bf . CBind False bs) (pCmd env' t0 c)
where env' = addTEnv (mapSnd typeOf bs) env
pCmd env t0 (CBind True bs c) = do (bf,bs) <- pBinds pBind env' bs
liftM (CBind True (flatBinds bf ++ bs)) (pCmd env' t0 c)
where env' = addTEnv (mapSnd typeOf bs) env
pCmd env t0 (CUpd x e c) = do (bf,e) <- pExpT env (findValT "2" (tenv env) x) e
liftM (bf . CUpd x e) (pCmd env t0 c)
pCmd env t0 (CUpdS e x e' c) = do (bf,t1,e) <- pExp env e
let (s,te) = findStructTEnv "AA" env t1
(bf',e') <- pExpT env (findValT "3" te x) e'
liftM (bf . bf' . CUpdS e x e') (pCmd env t0 c)
pCmd env t0 (CUpdA e i e' c) = do (bf,TCon (Prim Array _) [t],e) <- pExp env e
(bf',i) <- pExpT env tInt i
(bf'',e') <- pExpT env tPOLY e'
liftM (bf . bf' . bf'' . CUpdA e i e') (pCmd env t0 c)
pCmd env t0 (CSeq c c') = liftM2 mkSeq (pCmd env t0 c) (pCmd env t0 c')
pCmd env t0 (CBreak) = return CBreak
pCmd env t0 (CRaise e) = do (bf,e) <- pExpT env tInt e
return (bf (CRaise e))
pCmd env t0 (CWhile e c c') = do (bf,e) <- pExpT env tBool e
c <- pCmd env t0 c
liftM (bf . CWhile e c) (pCmd env t0 c')
pCmd env t0 (CCont) = return CCont
pCmd env t0 (CSwitch e alts)
| any litA alts = if simple (litType (firstLit alts)) then
do (bf,e) <- pExpT env tInt e
alts <- mapM (pAlt env e tInt t0) alts
return (bf (CSwitch e alts))
else mkVarSwitch env t0 e alts
| isEVar e || all nullA alts = do (bf,t,e) <- pExp env e
alts <- mapM (pAlt env e t t0) alts
let (alts0,alts1) = partition nullA [ a | a@(ACon _ _ _ _) <- alts ]
altsW = [ a | a@(AWild _) <- alts ]
return (bf (mkSwitch env e (alts0++absent0 altsW) (alts1++absent1 altsW)))
| otherwise = mkVarSwitch env t0 e alts
where nullA (ACon k _ _ _) = k `elem` nulls env
nullA _ = False
absent = allCons env alts \\ [ k | ACon k _ _ _ <- alts ]
(abs0,abs1) = partition (`elem` nulls env) absent
absent0 [] = [ ACon k [] [] CBreak | k <- abs0 ]
absent0 (AWild d : _) = [ ACon k [] [] d | k <- abs0 ]
absent1 altsW = [ a | a <- altsW, not (null abs1) ]
litA (ALit _ _) = True
litA _ = False
firstLit (ALit l _ : _) = l
firstLit (_ : as) = firstLit as
simple (TCon (Prim Int _) []) = True
simple (TCon (Prim Char _) []) = True
simple _ = False
mkSwitch env (EVar _) [] [] = CBreak
mkSwitch env e [] [] = CSwitch e [AWild CBreak]
mkSwitch env e [] [ACon n _ _ c]
| n `notElem` tagged env = c
mkSwitch env e [] [AWild c] = c
mkSwitch env e [] alts1 = CSwitch (ESel e (prim Tag)) (map (mkLitAlt env) alts1)
mkSwitch env e alts0@[ACon n _ _ c] []
| allCons env alts0 == [n] = c
mkSwitch env e alts0 [] = CSwitch (ECast tInt e) (map (mkLitAlt env) alts0)
mkSwitch env e alts0 alts1 = mkSwitch env e (alts0++[AWild d]) []
where d = mkSwitch env e [] alts1
mkVarSwitch env t0 e alts
| isEVar e = do (bf,t,e) <- pExp env e
alts <- mapM (pAlt env e t t0) alts
return (bf (CSwitch e alts))
| otherwise = do (bf,t,e) <- pExp env e
x <- newName tempSym
c <- pCmd (addVals [(x,t)] env) t0 (CSwitch (EVar x) alts)
return (bf (cBind [(x,Val t e)] c))
mkLitAlt env (ACon n [] [] c) = ALit (conLit env n) c
mkLitAlt env a = a
-- Prepare switch alternatives
pAlt env _ _ t0 (AWild c) = liftM AWild (pCmd env t0 c)
pAlt env _ _ t0 (ALit l c) = liftM (ALit l) (pCmd env t0 c)
pAlt env e (TCon _ ts) t0 (ACon k vs te c)
= do te' <- newEnv paramSym (polyTagTypes (length vs))
c <- pCmd (addPolyEnv vs [0..] (map EVar (dom te')) (addVals te env)) t0 c
return (ACon k [] [] (cBind (bs0 te' ++ bs1) c))
where bs0 te = zipWith mkBind te (_abcSupply `zip` repeat (ValT tBITS32))
(_,te0) = findStructTEnv "KKK" env (TCon k (ts ++ map tVar vs))
bs1 = filter (not . isDummy . fst) (zipWith mkBind te te0)
mkBind (x,t) (y,ValT u) = (x, Val t (cast t u (ESel (ECast (TCon k (ts ++ map tVar vs)) e) y)))
mkSeq c1 c2 = case anchor c1 of
(bf,CBreak) -> bf c2
(bf,CCont) -> c1
_ -> CSeq c1 c2
where anchor (CBind r bs c) = (CBind r bs . bf, c')
where (bf,c') = anchor c
anchor (CRun e c) = (CRun e . bf, c')
where (bf,c') = anchor c
anchor (CUpd x e c) = (CUpd x e . bf, c')
where (bf,c') = anchor c
anchor (CUpdS e x e' c) = (CUpdS e x e' . bf, c')
where (bf,c') = anchor c
anchor (CUpdA e i e' c) = (CUpdA e i e' . bf, c')
where (bf,c') = anchor c
anchor c = (id, c)
-- =============================
-- Prepare expressions
-- =============================
-- Prepare a right-hand-side expression
pRhsExp env (ENew n ts bs) = pNewExp env n ts bs
pRhsExp env (ECast t (ENew n ts bs))
= do (bf,t',e) <- pNewExp env n ts bs
return (bf, t, cast t t' e)
pRhsExp env e = pExp env e
pNewExp env n ts bs
| n `elem` nulls env = return (id, t0, cast t0 tInt (ELit (conLit env n)))
| otherwise = do (bf,bs) <- pBinds (pSBind t0 te0) env bs
return (bf, t0, ENew n [] (bs''++bs'++bs))
where bs' = if n `elem` tagged env then [(prim Tag, Val tInt (ELit (conLit env n)))] else []
bs'' = polyTagBinds env n ts
t0 = TCon n ts
(_,te0) = findStructTEnv "BB" env t0
pRefBind te0 env (x,Val _ e) = do (bf,t,e) <- pRhsExp env e
return (bf, (x, Val (erase t0) (cast t0 t e)))
where t0 = findValT "1" te0 x
-- Prepare an expression in an arbitrary position and match its type with the expected one
pExpT env t0 e = do (bf,t,e) <- pExp env e
return (bf, cast t0 t e)
cast t0 t1 e
| u0 == u1 = e
| u0 == tPOLY && smallPrim u1 = ECast tPOLY (ECast tInt e)
| smallPrim u0 && u1 == tPOLY = ECast u0 (ECast tInt e)
| u0 == tPOLY && u1 == tFloat = ECall (prim Float2POLY) [] [e]
| u0 == tFloat && u1 == tPOLY = ECall (prim POLY2Float) [] [e]
| otherwise = ECast u0 e
where u0 = erase t0
u1 = erase t1
smallPrim (TCon (Prim p _) _) = p `elem` smallPrims
smallPrim _ = False
pExpTs env [] [] = return (id, [])
pExpTs env (t:ts) (e:es) = do (bf1,e) <- pExpT env t e
(bf2,es) <- pExpTs env ts es
return (bf1 . bf2, e:es)
-- Prepare an expression in an arbitrary position and compute its type
pExp env (EVar x) = return (id, findValT "4" (tenv env) x, EVar x)
pExp env (ELit l) = return (id, litType l, ELit l)
pExp env (EThis) = return (id, findValT "5" (tenv env) x, EVar x)
where x = fromJust (this env)
pExp env (ESel e l) = do (bf,t1,e) <- pExp env e
let (s,te) = findStructTEnv "CC" env t1
t = findValT ("6" ++ " e: " ++ render (pr e) ++ " te: " ++ show te) te l
specialize s t bf (ESel e l)
pExp env (ECall f ts es) = do (bf,es) <- pExpTs env ts0 es
specialize s t bf (ECall f [] (polyTagArgs env ts ++ es))
where (s,ts0,t) = findFunT (tenv env) f ts
pExp env (EEnter (EVar x) f ts es) = do let t1 = findValT "7" (tenv env) x
let (s,te) = findStructTEnv "DD" env t1
(s',ts0,t) = findFunT te f ts
(bf,es) <- pExpTs env ts0 es
specialize (s'@@s) t bf (EEnter (EVar x) f [] (polyTagArgs env ts ++ es))
pExp env (EEnter e f ts es) = do (bf1,t1,e) <- pRhsExp env e
let (s,te) = findStructTEnv "EE" env t1
(s',ts0,t) = findFunT te f ts
(bf2,es) <- pExpTs env ts0 es
x <- newName tempSym
specialize (s'@@s) t (bf1 . bf2 . cBind [(x, Val (erase t1) e)]) (EEnter (EVar x) f [] (polyTagArgs env ts ++ es))
pExp env (ECast t e) = do (bf,t',e) <- pExp env e
return (bf, t, cast t t' e)
pExp env (ENew n ts bs)
| n `elem` nulls env = return (id, tInt, ELit (conLit env n))
| otherwise = do (bf,t,e) <- pNewExp env n ts bs
x <- newName tempSym
return (bf . cBind [(x, Val (erase t) e)], t, EVar x)
specialize s t bf e = return (bf, t', cast t' t e)
where t' = subst s t
|
UBMLtonGroup/timberc
|
src/Prepare4C.hs
|
bsd-3-clause
| 28,152
| 3
| 19
| 11,839
| 9,959
| 5,065
| 4,894
| 360
| 8
|
{-# LANGUAGE OverloadedStrings #-}
module TestConfig where
import Data.Map
import Data.Maybe
import Data.OpenSRS.Types
import Data.Time
-- | Testing contacts
testContacts :: Map String Contact
testContacts = fromList [("owner", testc),
("admin", testc),
("billing", testc),
("tech", testc)]
where
testc = Contact (Just "Jane")
(Just "Doe")
(Just "Frobozz Pty Ltd")
(Just "jane.doe@anchor.com.au")
(Just "+61.299999999")
Nothing
(Just "Frobozz Pty Ltd")
(Just "Level 50")
(Just "1 George Street")
(Just "Sydney")
(Just "NSW")
(Just "2000")
(Just "AU")
-- | Testing nameservers
testNameservers :: [Nameserver]
testNameservers = [
Nameserver (Just "ns1.example.com") (Just "0") (Just "127.0.0.1"),
Nameserver (Just "ns2.example.com") (Just "0") (Just "127.0.0.2") ]
-- | Valid password
validPassword :: String
validPassword = "automatedTest124"
-- | Invalid password
invalidPassword :: String
invalidPassword = "Hi; I am trash!"
|
anchor/haskell-opensrs
|
tests/TestConfig.hs
|
bsd-3-clause
| 1,261
| 0
| 9
| 454
| 279
| 155
| 124
| 32
| 1
|
-- | This module defines commonly useful functions that are related specifically with vectors and matrices.
module FPNLA.Matrix.Utils where
import FPNLA.Matrix (Matrix(..))
import Debug.Trace (trace)
-- | Prints a matrix to the standard output.
-- This operation requires the elements of the matrix to have an instance of 'Show' but does not requires a 'Show' instance for the matrix data type.
print_m :: (Show e, Matrix m e) => m e -> IO ()
print_m mi = for 0 0
where (m,n) = dim_m mi
for i j | i >= m = return ()
| j < n = (putStr . show $ elem_m i j mi) >> putStr " " >> for i (j+1)
| j >= n = putStrLn "" >> for (i+1) 0
| otherwise = trace "print_m" undefined
|
mauroblanco/fpnla
|
src/FPNLA/Matrix/Utils.hs
|
bsd-3-clause
| 737
| 0
| 12
| 205
| 223
| 113
| 110
| 10
| 1
|
{-# LANGUAGE DeriveGeneric, GeneralizedNewtypeDeriving #-}
-- | Actors perceiving other actors and the dungeon level.
--
-- Visibility works according to KISS. Everything that player sees is real.
-- There are no unmarked hidden tiles and only solid tiles can be marked,
-- so there are no invisible walls and to pass through an illusory wall,
-- you have to use a turn bumping into it first. Only tiles marked with Suspect
-- can turn out to be another tile. (So, if all tiles are marked with
-- Suspect, the player knows nothing for sure, but this should be avoided,
-- because searching becomes too time-consuming.)
-- Each actor sees adjacent tiles, even when blind, so adjacent tiles are
-- known, so the actor can decide accurately whether to pass thorugh
-- or alter, etc.
--
-- Items are always real and visible. Actors are real, but can be invisible.
-- Invisible actors in walls can't be hit, but are hinted at when altering
-- the tile, so the player can flee or block. Invisible actors in open
-- space can be hit.
module Game.LambdaHack.Common.Perception
( PerVisible(..)
, PerSmelled(..)
, Perception(..)
, PerLid
, PerFid
, totalVisible, totalSmelled
, emptyPer, nullPer, addPer, diffPer
) where
import Prelude ()
import Game.LambdaHack.Core.Prelude
import Data.Binary
import qualified Data.EnumMap.Strict as EM
import qualified Data.EnumSet as ES
import GHC.Generics (Generic)
import Game.LambdaHack.Common.Types
import Game.LambdaHack.Common.Point
-- | Visible positions.
newtype PerVisible = PerVisible {pvisible :: ES.EnumSet Point}
deriving (Show, Eq, Binary)
-- | Smelled positions.
newtype PerSmelled = PerSmelled {psmelled :: ES.EnumSet Point}
deriving (Show, Eq, Binary)
-- | The type representing the perception of a faction on a level.
data Perception = Perception
{ psight :: PerVisible
, psmell :: PerSmelled
}
deriving (Show, Eq, Generic)
instance Binary Perception
-- | Perception of a single faction, indexed by level identifier.
type PerLid = EM.EnumMap LevelId Perception
-- | Perception indexed by faction identifier.
-- This can't be added to @FactionDict@, because clients can't see it
-- for other factions.
type PerFid = EM.EnumMap FactionId PerLid
-- | The set of tiles visible by at least one hero.
totalVisible :: Perception -> ES.EnumSet Point
totalVisible = pvisible . psight
-- | The set of tiles smelt by at least one hero.
totalSmelled :: Perception -> ES.EnumSet Point
totalSmelled = psmelled . psmell
emptyPer :: Perception
emptyPer = Perception { psight = PerVisible ES.empty
, psmell = PerSmelled ES.empty }
nullPer :: Perception -> Bool
nullPer per = per == emptyPer
addPer :: Perception -> Perception -> Perception
addPer per1 per2 =
Perception
{ psight = PerVisible
$ totalVisible per1 `ES.union` totalVisible per2
, psmell = PerSmelled
$ totalSmelled per1 `ES.union` totalSmelled per2
}
diffPer :: Perception -> Perception -> Perception
diffPer per1 per2 =
Perception
{ psight = PerVisible
$ totalVisible per1 ES.\\ totalVisible per2
, psmell = PerSmelled
$ totalSmelled per1 ES.\\ totalSmelled per2
}
|
LambdaHack/LambdaHack
|
engine-src/Game/LambdaHack/Common/Perception.hs
|
bsd-3-clause
| 3,230
| 0
| 9
| 654
| 519
| 310
| 209
| 51
| 1
|
{-# LANGUAGE FlexibleContexts, RankNTypes #-}
module Cloud.AWS.EC2.SecurityGroup
( describeSecurityGroups
, createSecurityGroup
, deleteSecurityGroup
, authorizeSecurityGroupIngress
, authorizeSecurityGroupEgress
, revokeSecurityGroupIngress
, revokeSecurityGroupEgress
) where
import Data.Text (Text)
import Data.ByteString (ByteString)
import Data.Conduit
import Control.Applicative
import Control.Monad.Trans.Resource (MonadThrow, MonadResource, MonadBaseControl)
import Cloud.AWS.Lib.Parser.Unordered (XmlElement, (.<))
import Cloud.AWS.EC2.Internal
import Cloud.AWS.EC2.Types
import Cloud.AWS.EC2.Query
describeSecurityGroups
:: (MonadResource m, MonadBaseControl IO m)
=> [Text] -- ^ GroupNames
-> [Text] -- ^ GroupIds
-> [Filter] -- ^ Filters
-> EC2 m (ResumableSource m SecurityGroup)
describeSecurityGroups names ids filters =
ec2QuerySource "DescribeSecurityGroups" params path
$ itemConduit $ \xml ->
SecurityGroup
<$> xml .< "ownerId"
<*> xml .< "groupId"
<*> xml .< "groupName"
<*> xml .< "groupDescription"
<*> xml .< "vpcId"
<*> ipPermissionsConv "ipPermissions" xml
<*> ipPermissionsConv "ipPermissionsEgress" xml
<*> resourceTagConv xml
where
path = itemsPath "securityGroupInfo"
params =
[ "GroupName" |.#= names
, "GroupId" |.#= ids
, filtersParam filters
]
ipPermissionsConv :: (MonadThrow m, Applicative m)
=> Text -> XmlElement -> m [IpPermission]
ipPermissionsConv name = itemsSet name conv
where
conv e = IpPermission
<$> e .< "ipProtocol"
<*> e .< "fromPort"
<*> e .< "toPort"
<*> itemsSet "groups" uidConv e
<*> itemsSet "ipRanges" (.< "cidrIp") e
uidConv e = UserIdGroupPair
<$> e .< "userId"
<*> e .< "groupId"
<*> e .< "groupName"
createSecurityGroup
:: (MonadResource m, MonadBaseControl IO m)
=> Text -- ^ GroupName
-> Text -- ^ GroupDescription
-> Maybe Text -- ^ VpcId
-> EC2 m (Maybe Text) -- ^ GroupId
createSecurityGroup name desc vpc =
ec2Query "CreateSecurityGroup" params (.< "groupId")
where
params =
[ "GroupName" |= name
, "GroupDescription" |= desc
, "VpcId" |=? vpc
]
deleteSecurityGroup
:: (MonadResource m, MonadBaseControl IO m)
=> SecurityGroupRequest
-> EC2 m Bool
deleteSecurityGroup param =
ec2Query "DeleteSecurityGroup" params (.< "return")
where
params = [securityGroupRequestParam param]
securityGroupRequestParam :: SecurityGroupRequest -> QueryParam
securityGroupRequestParam (SecurityGroupRequestGroupId t) =
"GroupId" |= t
securityGroupRequestParam (SecurityGroupRequestGroupName t) =
"GroupName" |= t
authorizeSecurityGroupIngress
:: (MonadResource m, MonadBaseControl IO m)
=> SecurityGroupRequest
-> [IpPermission]
-> EC2 m Bool
authorizeSecurityGroupIngress =
securityGroupQuery "AuthorizeSecurityGroupIngress"
authorizeSecurityGroupEgress
:: (MonadResource m, MonadBaseControl IO m)
=> Text -- ^ GroupId
-> [IpPermission]
-> EC2 m Bool
authorizeSecurityGroupEgress gid =
securityGroupQuery "AuthorizeSecurityGroupEgress"
$ SecurityGroupRequestGroupId gid
revokeSecurityGroupIngress
:: (MonadResource m, MonadBaseControl IO m)
=> SecurityGroupRequest
-> [IpPermission]
-> EC2 m Bool
revokeSecurityGroupIngress =
securityGroupQuery "RevokeSecurityGroupIngress"
revokeSecurityGroupEgress
:: (MonadResource m, MonadBaseControl IO m)
=> Text -- ^ GroupId
-> [IpPermission]
-> EC2 m Bool
revokeSecurityGroupEgress gid =
securityGroupQuery "RevokeSecurityGroupEgress"
$ SecurityGroupRequestGroupId gid
securityGroupQuery
:: (MonadResource m, MonadBaseControl IO m)
=> ByteString -- ^ Action
-> SecurityGroupRequest
-> [IpPermission]
-> EC2 m Bool
securityGroupQuery act param ipps =
ec2Query act params (.< "return")
where
params =
[ securityGroupRequestParam param
, "IpPermissions" |.#. map ipPermissionParams ipps
]
ipPermissionParams :: IpPermission -> [QueryParam]
ipPermissionParams ipp =
[ "IpProtocol" |= ipPermissionIpProtocol ipp
, "FromPort" |=? ipPermissionFromPort ipp
, "ToPort" |=? ipPermissionToPort ipp
, "Groups" |.#. map groupPairParams (ipPermissionGroups ipp)
, "IpRanges" |.#. map (\a -> ["CidrIp" |= a]) (ipPermissionIpRanges ipp)
]
where
groupPairParams gp =
[ "UserId" |=? userIdGroupPairUserId gp
, "GroupId" |=? userIdGroupPairGroupId gp
, "GroupName" |=? userIdGroupPairGroupName gp
]
|
worksap-ate/aws-sdk
|
Cloud/AWS/EC2/SecurityGroup.hs
|
bsd-3-clause
| 4,743
| 0
| 19
| 1,074
| 1,098
| 588
| 510
| 130
| 1
|
-----------------------------------------------------------------------------
-- |
-- Module : Data.Promotion.Prelude.Eq
-- Copyright : (C) 2014 Jan Stolarek
-- License : BSD-style (see LICENSE)
-- Maintainer : Jan Stolarek (jan.stolarek@p.lodz.pl)
-- Stability : experimental
-- Portability : non-portable
--
-- Provided promoted definitions related to type-level equality.
--
-----------------------------------------------------------------------------
{-# LANGUAGE ExplicitNamespaces #-}
module Data.Promotion.Prelude.Eq (
PEq(..), (:==$), (:==$$), (:==$$$), (:/=$), (:/=$$), (:/=$$$)
) where
import Data.Singletons.Prelude.Eq
|
int-index/singletons
|
src/Data/Promotion/Prelude/Eq.hs
|
bsd-3-clause
| 657
| 0
| 5
| 89
| 67
| 54
| 13
| 4
| 0
|
module Main where
import Graphics.ChalkBoard
import Control.Applicative
example1 :: Point -> Point -> R -> Active (Board Bool)
example1 s e th = id
$ fmap (\ (a,b) -> straightLine (s,(a,b)) th)
$ scale 1
$ actLerp age s e
example2 :: O RGB -> Active (Board RGB)
example2 rgb
= fmap (scale 0.2)
$ fmap (choose rgb white .$)
$ flicker [ example1 f' t' 0.05
| (f,t,i) <- zip3 srcs targets [0..15]
, let (f',t') = scale ((60-fromInteger i)/60) (f,t)
]
where
srcs = [ (-1,1), (1,1), (1,-1), (-1,-1) ] ++ srcs
targets = tail srcs
example3 :: R -> O RGB -> Active (Board RGB)
example3 r rgb = rot <*> ex
where
ex = example2 rgb
rot = fmap (\ ui -> rotate (pi * ui * r)) age `streach` ex
examples = turnPages lerpAct [example3 0 red,example3 1 yellow,example3 0 blue]
main = startChalkBoard [] $ \ cb -> do
let loop (brd:rest) = do
drawChalkBoard cb brd
loop rest
loop [] = return ()
-- startMyWriteStream cb "ffmpeg -f image2pipe -vcodec ppm -i - -vcodec libx264 -b 500k -vpre hq -vpre main square.mp4"
loop (simulate 10 examples)
-- endWriteStream cb
exitChalkBoard cb
|
andygill/chalkboard2
|
tests/anim1/Main.hs
|
bsd-3-clause
| 1,131
| 9
| 17
| 266
| 533
| 277
| 256
| 29
| 2
|
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE TemplateHaskell #-}
module Main
where
-- Where a declaration starts with "prop_smoketest_" and has no type sig, it is
-- a Bool that needs to be checked; otherwise we just care that things in here
-- typecheck.
--
-- TODO make some proper quickcheck tests, especially using the polymorphic TH
-- test generator stuff.
--
-- Contributers: Any changes that simplify or improve the structure of the
-- Compose module are very welcome, as long as they don't break this test
-- module.
import Data.Shapely
import Data.Shapely.Normal as Sh
import Data.Shapely.Normal.TypeIndexed
import Data.Shapely.Spine
import Data.Proxy
import Control.Monad(forM)
import Data.Foldable(toList)
import Data.Tree
import Test.QuickCheck.All
main = do passed <- $quickCheckAll
if passed then putStrLn "Ok" else error "Some tests failed"
-- ---------------------------
s :: (Int,()) :+: (Char,()) :+: (Bool :*! String)
-- Either (Int,()) (Either (Char,()) (Bool,(String,())))
s = Right (Right (True,("true",())))
p :: (Int,(Char,(Bool,())))
p = 1 *: 'a' *! True
-- (1,('a',(True,())))
prop_smoketest_constructorsOfNormal_prod = constructorsOfNormal ('a',('b',())) 'x' 'y' == ('x',('y',()))
{-
-- CONCATABLE
concated_p :: (Int,(Char,(Bool,(Int,(Char,(Bool,()))))))
concated_p = Sh.concat (p, (p, ()))
prop_smoketest_concated_s = ( Sh.concat $ (Right s :: Either (Either (Int,()) (Either (Char,()) (Bool,()))) (Either (Int,()) (Either (Char,()) (Bool,(String,())))) ) )
== Right (Right (Right (Right (Right (True,("true",()))))))
-}
prop_smoketest_distributeTerm =
let s' = Right (Right (1,(True,("true",(9,())))))
in s' == 1 *< s >* 9
multiply1
:: Either
(Int, (Int, ()))
(Either
(Int, (Char, ()))
(Either
(Int, (Bool, (String, ())))
(Either
(Char, (Int, ()))
(Either
(Char, (Char, ()))
(Either
(Char, (Bool, (String, ())))
(Either
(Bool, ([Char], (Int, ())))
(Either
(Bool, ([Char], (Char, ())))
(Bool, ([Char], (Bool, (String, ())))))))))))
multiply1 = s >*< s
prop_smoketest_multiply2 = p >*< () >*< p == 1 *: 'a' *: True *: 1 *: 'a' *! True
prop_smoketest_multiply_monoid = and $
[ () >*< p == p
, p >*< () == p
, () >*< s == s
, s >*< () == s
, (p >*< p) >*< p == p >*< (p >*< p)
, (s >*< p) >*< p == s >*< (p >*< p)
, (p >*< s) >*< p == p >*< (s >*< p)
, (p >*< p) >*< s == p >*< (p >*< s)
, (s >*< s) >*< p == s >*< (s >*< p)
, (p >*< s) >*< s == p >*< (s >*< s)
, (s >*< s) >*< s == s >*< (s >*< s)
]
-- REVERSABLE
s_rev :: Either (Bool,(String,())) (Either (Char,()) (Int,()))
s_rev = Sh.reverse s
p_rev :: (Bool,(Char,(Int,())))
p_rev = Sh.reverse p
p_empty_rev :: ()
p_empty_rev = Sh.reverse ()
-- SHIFTING:
sr :: Either (Bool,(String,())) (Either (Int,()) (Char,()))
sr = shiftr s
sl :: Either (Char,()) (Either (Bool,(String,())) (Int,()))
sl = shiftl s
pr :: (Bool,(Int,(Char,())))
pr = shiftr p
pl :: (Char,(Bool,(Int,())))
pl = shiftl p
-- FANIN
prop_smoketest_fanin_prod = Sh.fanin (\i c b-> if b then (i,c) else (9,'z')) p == (1,'a')
prop_smoketest_unfanin_prod = Sh.fanin (Sh.unfanin(\(i,(c,(b,())))-> if b then (i,c) else (9,'z'))) p == (1,'a')
prop_smoketest_fanin_sum =
-- the sum arg must be unambiguous, but hopefully in practice a
-- type signature won't be necessary (when e.g. the sum is a
-- TH-generated instance):
let s' = Right $ Right (1,([2..5],())) :: Either (Int,()) ( Either () (Int,([Int],())) )
in fanin ((+1), (3, (foldr (+), ()))) s' == 15
prop_smoketest_unfanin_sum =
let f (Left (_,())) = "a"
f (Right (Left (_,()))) = "b"
f (Right (Right (_,(s,())))) = s
in fanin (unfanin f) s == "true"
-- NOTE: 'ary' is required for inference here
prop_smoketest_ary_with_unfanin = (unfanin (_4 `ary` (shiftl . Sh.reverse)) 1 2 3 4) == (3,(2,(1,(4,()))))
-- APPEND
{-
appended :: (Int,(Char,(Bool,(Int,(Char,(Bool,()))))))
appended = p .++. p
appended_s
:: Either
(Char, ())
(Either
(Int, ()) (Either (Int, ()) (Either (Char, ()) (Bool, (String,())))))
appended_s = let s_ss = (Right s) :: Either ( Either (Char,()) (Int,()) ) ( Either (Int,()) (Either (Char,()) (Bool,(String,()))) )
in append s_ss
-- == Right (Right (Right (Right (True,()))))
-}
-- Homogeneous
prop_smoketest_toList = ( toList $ toFList (1,(2,(3,()))) ) == [1,2,3]
prop_smoketest_toList2 = null $ toList $ toFList ()
prop_smoketest_homogenous_inferrence = (\(a,as) -> a == 1) $ fromFList $ toFList (1,(2,(3,())))
-- CARTESIAN-ESQUE
-- NOTE: ambiguous without `==`
prop_smoketest_fanout_prod = fanout (head,(tail,(Prelude.length,()))) [1..3] == (1,([2,3],(3,())))
-- test of inferrence convenience:
prop_smoketest_repeat = (3 ==) $ (\(x,(y,(z,())))-> x+y+z) $ Sh.repeat 1
-- THIS DOESN'T WORK, HOWEVER. any way to restructure fanin to make inferrence possible?
-- repeat_test2 = (3 ==) $ Sh.uncurry (\x y z-> x+y+z) $ Sh.repeat 1
prop_smoketest_repeat2 = (3 ==) $ Sh.fanin (\x y z-> x+y+z) (Sh.repeat 1 :: (Int,(Int,(Int,()))))
prop_smoketest_replicate = (\(_,(a,_)) -> a == 2) $ Sh.replicate (Proxy :: Proxy (Either () (Either () ()))) 2
prop_smoketest_extract = let s' :: Either (Int,()) (Either (Int,()) (Int,()))
s' = Right (Right (1,()))
in extract s' == (1,())
prop_smoketest_factorPrefix = ('a',(True,())) ==
(fst $ factorPrefix (Left ('a',(True,('b',()))) :: Either (Char,(Bool,(Char,()))) (Char,(Bool,())) ))
-------- MASSAGEABLE
mb = ('a',("hi",()))
mb_0 :: Either () (String,(Char,()))
mb_0 = massageNormal ()
mb_1 :: Either (String,(Char,())) ()
mb_1 = massageNormal ()
mb_2 :: Either (Int,(Char,())) (String,(Char,()))
mb_2 = massageNormal mb
mb_3 :: Either (Char,(Int,())) (String,(Char,()))
mb_3 = massageNormal mb
mb_4 :: Either (Char,(String,(Int,()))) (Either () (String,(Char,())))
mb_4 = massageNormal mb
mb_5 :: Either (String,()) (String,(Char,()))
mb_5 = massageNormal mb
mc = Left mb :: Either (Char,(String,())) ()
mc_0 :: Either (Int,()) (Either (String,(Char,())) ())
mc_0 = massageNormal mc
-- Testing ordered tuples:
md = (Left ('a',('b',(3,())))) :: Either (Char,(Char,(Int,()))) ()
md_0 :: Either (Char,(Char,(Bool,()))) (Either () (Char,(Char,(Int,()))))
md_0 = massageNormal md
prop_smoketest_md_1 = ( massageNormal md :: Either (Char,(Int,(Char,()))) (Either () (Char,(Char,(Int,())))) ) == (Right $ Right ('a',('b',(3,()))))
prop_smoketest_md_2 = ( massageNormal ('a',('b',(True,()))) :: Either (Bool,()) (Char,(Char,(Bool,()))) ) == (Right ('a',('b',(True,()))))
prop_smoketest_md_3 = ( massageNormal ('a',('b',())) :: Either (Char,(Char,())) (Either () (Int,())) ) == (Left ('a',('b',())))
{-
-- must not typecheck
massageNormal mb :: Either (String,(Char,())) (Char,(String,()))
massageNormal () :: Either () ()
massageNormal mc :: Either (Int,()) (Either (String,(Char,())) (String,()))
-- ordered product style
massageNormal md :: Either (Char,(Char,(Int,()))) (Either () (Char,(Char,(Int,()))))
massageNormal md :: Either (Char,(Char,(Bool,()))) (Either (Int,()) (Char,(Char,(Int,()))))
-}
-- Testing recursion:
prop_smoketest_mr_id = massage "foo" == "foo"
data OrderedRec = OCons Int Int OrderedRec | ONull deriving Eq
deriveShapely ''OrderedRec
prop_smoketest_orderedr_id = massage (OCons 1 1 (OCons 2 2 ONull)) == (OCons 1 1 (OCons 2 2 ONull))
-- OrderedRec but reordered constructors, plus an extra constructor to
-- demonstrate non-bijective mapping, where the cons is non-ambiguous because
-- it uses ordering-significant matching (because all terms not unique types)
data OrderedRec3 = ONull3 | OCons3 Int Int OrderedRec3 | ONewCons3 OrderedRec3 Int Int deriving Eq
deriveShapely ''OrderedRec3
prop_smoketest_rec_ordered_expand = massage (OCons 1 1 (OCons 2 2 ONull)) == (OCons3 1 1 (OCons3 2 2 ONull3))
-- [a] with both order of products and sums reversed:
data Tsil a = Snoc (Tsil a) a
| Lin
deriving Eq
deriveShapely ''Tsil
prop_smoketest_m_unorderedr = massage "123" == Snoc (Snoc (Snoc Lin '3') '2') '1'
-------- TYPE-INDEXED
prop_smoketest_viewFirstTypeOf_prod = (('a',(False,(True,("potato",())))) `viewFirstTypeOf` True)
== (False,('a',(True,("potato",()))))
prop_smoketest_viewTypeOf_prod = (('a',(False,(True,("potato",())))) `viewFirstTypeOf` "tuber")
== ("potato",('a',(False,(True,()))))
viewTypeOf_sum1 :: Either (Int, ()) (Either (Char, ()) (Bool :*! String))
viewTypeOf_sum1 = s `viewTypeOf` ((1,()) :: (Int,()))
viewTypeOf_sum2 :: Either (Char, ()) (Either (Int, ()) (Bool :*! String))
viewTypeOf_sum2 = s `viewTypeOf` ('a',())
viewTypeOf_sum3 :: Either (Bool :*! String) (Either (Int, ()) (Char, ()))
viewTypeOf_sum3 = s `viewTypeOf` (True,("string",()))
prop_smoketest_viewFirstTypeOf_sum1 = (Left () :: Either () ()) `viewFirstTypeOf` () == Left ()
prop_smoketest_viewFirstTypeOf_sum2 = (Right $ Left () :: Either (Int,()) (Either () ())) `viewFirstTypeOf` () == Left ()
{- MUST NOT TYPECHECK:
('a',(False,(True,("potato",())))) `viewTypeOf` True
(Right $ Left () :: Either (Int,()) (Either () ())) `viewTypeOf` ()
(Left () :: Either () ()) `viewTypeOf` ()
-}
nub_prod :: (Int, (Char, (Bool, ())))
nub_prod = nubType (undefined :: (Int,(Char,(Int,(Int,(Bool,(Bool,())))))))
-------- TH DERIVING:
-- NON-RECURSIVE:
data A = A deriving (Eq,Show) -- ()
data B = B Int deriving (Eq,Show)
data C a b = C a b deriving (Eq,Show) -- (,)
data D a b = D0 a | D1 b deriving (Eq,Show) -- Either
data E a = E0 | E1 a deriving (Eq,Show) -- Maybe
data F a b c = F0 a b c | F1 a b | F2 a deriving (Eq,Show)
deriveShapely ''A
deriveShapely ''B
deriveShapely ''C
deriveShapely ''D
deriveShapely ''E
deriveShapely ''F
-- RECURSIVE: -------
data Li = Em | Co Char Li deriving Eq
deriveShapely ''Li
prop_smoketest_th_rec =
let a = "works"
b = Co 'w' $ Co 'o' $ Co 'r' $ Co 'k' $ Co 's' $ Em
in coerce a == b && coerce b == a
data SimpleTree a = SBr (SimpleTree a) a (SimpleTree a)
| SEm
deriving (Eq,Show)
deriveShapely ''SimpleTree
data LRTree a = LRTop (LTree a) a (RTree a)
| LRTopEm
data LTree a = LBr (LTree a) a (RTree a)
| LEm
data RTree a = RBr (LTree a) a (RTree a)
| REm
fmap Prelude.concat $ forM [''LRTree , ''LTree , ''RTree ] deriveShapely
-- test deeper recursive structure:
prop_smoketest_th_rec_multi =
let lrTree = LRTop (LBr LEm 'b' REm) 'a' (RBr LEm 'b' REm)
--st0 = (Proxy :: Proxy (LRTree Char), (Proxy :: Proxy (LTree Char), (Proxy :: Proxy (RTree Char), ())))
st0 = spine :: LRTree Char :-: LTree Char :-! RTree Char
st1 = spine :: LRTree :-: LTree :-! RTree
in coerceWith st0 lrTree == SBr (SBr SEm 'b' SEm) 'a' (SBr SEm 'b' SEm) &&
coerceWith st1 lrTree == SBr (SBr SEm 'b' SEm) 'a' (SBr SEm 'b' SEm)
-- These demonstrate the need for parameter-agnostic spine elements: our type
-- is recursive, with the paramters flip-flopping. Lots of other examples.
data Simple2Tree a b = S2Br (Simple2Tree b a) a b (Simple2Tree b a)
| S2Em
deriving (Eq,Show)
deriveShapely ''Simple2Tree
data LR2Tree a b = LR2Top (L2Tree b a) a b (R2Tree b a)
| LR2TopEm
data L2Tree a b = L2Br (L2Tree b a) a b (R2Tree b a)
| L2Em
data R2Tree a b = R2Br (L2Tree b a) a b (R2Tree b a)
| R2Em
fmap Prelude.concat $ forM [''LR2Tree , ''L2Tree , ''R2Tree ] deriveShapely
-- test deeper recursive structure:
prop_smoketest_th_rec_multi_parameter_agnostic =
let lrTree = LR2Top (L2Br (L2Br L2Em 'c' True R2Em) False 'b' R2Em) 'a' True (R2Br L2Em False 'b' R2Em)
st = spine :: LR2Tree :-: L2Tree :-! R2Tree
-- this avoids enumerating a/b, b/a variants for all types:
-- st = spine :: LR2Tree Char Bool :-: L2Tree Char Bool :-: R2Tree Char Bool :-:
-- LR2Tree Bool Char :-: L2Tree Bool Char :-! R2Tree Bool Char
in coerceWith st lrTree == S2Br (S2Br (S2Br S2Em 'c' True S2Em) False 'b' S2Em) 'a' True (S2Br S2Em False 'b' S2Em)
-- 'coerce' should handle regular recursion with parameter shuffling, because
-- it uses Unapplied:
data RegRecParams1 a b = RRPCons1 a b (RegRecParams1 b a) | RRPNil1 deriving (Eq,Show)
data RegRecParams2 a b = RRPCons2 a b (RegRecParams2 b a) | RRPNil2 deriving (Eq,Show)
fmap Prelude.concat $ forM [''RegRecParams1, ''RegRecParams2] deriveShapely
prop_smoketest_th_rec_reg_param_swapping_coerce =
(coerce $ RRPCons1 'a' True RRPNil1) == RRPCons2 'a' True RRPNil2
coerce_recursive_self :: [Char]
coerce_recursive_self = coerce "where the instance shows source/target term equality, and equality in outer constructors"
-- excercise coerce with recursive Functor type application
data OurTree a = OurNode a (OurForest a) deriving (Eq, Functor, Show)
data OurForest a = OurEmptyForest | OurForestCons (OurTree a) (OurForest a)
deriving (Eq, Functor, Show) -- really a list
fmap Prelude.concat $ forM [''Tree, ''OurTree, ''OurForest] deriveShapely
ourTree = OurNode 'a' (OurForestCons (OurNode 'b' OurEmptyForest) (OurForestCons (OurNode 'c' OurEmptyForest) OurEmptyForest))
theirTree = Node 'a' ( [ Node 'b' [] , Node 'c' [] ])
prop_smoketest_coerceWith_type_application = coerceWith (spine :: OurTree :-! OurForest) ourTree == theirTree &&
coerceWith (spine :: [] :-! Tree) theirTree == ourTree
{- TODO WE WOULD LIKE TO SUPPORT THIS:
- where we need Shapely of OurForest to inline the newtype wrapper
data OurTree a = OurNode a (OurForest a) deriving (Functor, Show)
newtype OurForest a = OurForest [OurTree a] deriving ( Functor, Show)
fmap Prelude.concat $ forM [''Tree, ''OurTree, ''OurForest] deriveShapely
ourTree = OurNode 'a' (OurForest [OurNode 'b' (OurForest []) , OurNode 'c' (OurForest []) ])
theirTree = Node 'a' ( [ Node 'b' [] , Node 'c' [] ])
-}
data WithFunctorTerm1 = WFT1 (Maybe WithFunctorTerm1) (Maybe [Int]) deriving Eq
data WithFunctorTerm2 = WFT2 (Maybe WithFunctorTerm2) (Maybe [Int]) deriving Eq
fmap Prelude.concat $ forM [''WithFunctorTerm1, ''WithFunctorTerm2] deriveShapely
prop_smoketest_functor_term_sanity = coerce (WFT1 Nothing $ Just [1..3]) == (WFT2 Nothing $ Just [1..3])
-- TODO POLYMORPHISM/INFERRENCE-PRESERVING STUFF WE MIGHT LIKE TO SUPPORT SOMEHOW
-- ------------------------------
{-
prop_smoketest_th_rec_reg_param_swapping_coerce =
(coerce RRPNil1) == (RRPNil2 :: RegRecParams2 Char Bool)
prop_smoketest_th_rec_reg_poly_param_swapping_coerce =
let (x,y) = (RRPNil1,coerce x) :: (RegRecParams1 a b, RegRecParams2 a b)
in y == RRPNil2
-- But note: this is also (at the top level) ambiguous:
-- foo = RRPNil2 == RRPNil2
th_rec_reg_poly_param_swapping_coerce :: (RegRecParams1 a b, RegRecParams2 a b)
th_rec_reg_poly_param_swapping_coerce =
let (x,y) = (RRPNil1, coerce x)
in (x,y)
-- if we can make FactorPrefix look like:
-- class (Product ab)=> FactorPrefix ab abcs cs | ab abcs -> cs, ab cs -> abcs, abcs cs -> ab where
-- we'd get better inferrence, supporting:
prop_smoketest_factorPrefix2 = ( ('a',(True,())) , (Left ('b',())) :: Either (Char,()) () ) ==
(factorPrefix (Left ('a',(True,('b',()))) ))
prop_smoketest_toList2 = ( toList $ toFList () ) == []
-- currently we need: _4th `asLength` as
fanin (1,(2,(3,(4,())))) _4th
-- we'd like this type to be inferable (AGAIN TECHNICALLY POSSIBLE WITH CLOSED TYPE FAMILIES)
prop_smoketest_fanout_prod = fanout (head,(tail,(Prelude.length,()))) [1..3] == (1,([2,3],(3,())))
-}
-- ---------------------------------------------------------------------------
{-
-- TO THINK ABOUT, when doing inlining, deeper structure on next version:
-- these are old notes
newtype Strange0 a = Strange0 (Either a (Strange0 a))
-- must pass `Strange0` as recursive target.
newtype Strange1 = Strange1 [Strange1]
-- e.g. (S1 []) : (S1 [ S1 [], S1 [] ]) : []
-- Either () (AlsoNormal Strange1, (AlsoNormal [Strange1], ()))
-- we take normal form from argument [Strange1]:
-- Either () (Strange1,([Strange1],()))
-- ...but pass along *both* the newtype and inner wrapped type as recursion candidates
data OddTree a rt = OddBranch (OddTree a rt) a rt | OddLeaf
newtype Strange3 a = Strange3 (OddTree a (Strange3 a))
-- Either (AlsoNormal (OddTree a (Strange3 a)), (a, (AlsoNormal (Strange3 a), ()))) ()
-- (this is the same as Strange1)
newtype Strange4 = Strange4 ([Either Strange4 Int]) -- a strange rose tree
-- we have a mutually-recursive structure, but where recursive subterms are not at top-level, same as:
data Strange4' = Cons4' (Either Strange4' Int) Strange4' | Empty4'
-- Either (Either (AlsoNormal (Strange4')) (Int,()) , (AlsoNormal Strange4', ())) ()
-- \ ____________________________________ /
-- Normal (Either Strange4' Int)
--
-- We can't wrap in AlsoNormal, because an instance AlsoNormal (Either
-- Strange4' Int) would overlap . But if that Either was a type we didn't have
-- a Shapely instance for, we'd need to generate it. But we'd in turn need to
-- generate the instance for the newtype-wrapped type, since we need its
-- recursive Strange4' term bound. So...
--
-- A different approach seems in order:
-- - reify all *exposed* types on the RHS of type declaration
-- - add AlsoNormal wrappers everywhere necessary to break cycles
-- this might mean doing AlsoNormal [Foo] but keeping [Bar]
--
-- Or maybe transform to a "flat" type first? by running an `mconcat`, e.g.
-- Strange4' becomes:
-- data Strange4' = Cons4'A Strange4' Strange4'
-- | Cons4'B Int Strange4'
-- | Empty4'
-- And `data Bar a= Bar Int ((a,Char) , Int)` becomes:
-- data Bar a = Bar Int a Char Int
--
-- But then how de we differentiate between an Int term (which we shouldn't try
-- to "unpack") and a Foo term? Just if it has arguments or not?
--
-- Perhaps look at other generics libraries and see what they do.
-}
|
jberryman/shapely-data
|
test/Main.hs
|
bsd-3-clause
| 18,518
| 0
| 24
| 3,930
| 5,235
| 2,966
| 2,269
| 210
| 3
|
module Language.Deb where
data Expr =
VarE Int
| LamE Expr
| AppE Expr Expr
| LetE Expr Expr
instance Show Expr where
show e =
case e of
VarE n -> show n
LamE e -> "lam(." ++ show e ++ ")"
AppE e1 e2 -> "app(" ++ show e1 ++ "; " ++ show e2 ++ ")"
LetE e e' -> "let(" ++ show e ++ "; ." ++ show e' ++ ")"
|
lambdageek/small
|
src/Language/Deb.hs
|
bsd-3-clause
| 344
| 0
| 13
| 115
| 152
| 74
| 78
| 13
| 0
|
{-# LANGUAGE GADTs #-}
-- |
-- Module : Data.StableTree.Properties
-- Copyright : Jeremy Groven
-- License : BSD3
--
-- Various functions for getting interested data about 'StableTree's and
-- 'Tree's.
module Data.StableTree.Properties
( getKey
, completeKey
, size
, lookup
, keys
, elems
, assocs
, treeContents
, toMap
, stableChildren
, bottomChildren
, branchChildren
, selectNode
) where
import qualified Data.StableTree.Key as Key
import Data.StableTree.Types
import qualified Data.Map as Map
import Control.Arrow ( second )
import Data.Map ( Map )
import Prelude hiding ( lookup )
-- |Get the key of the first entry in this branch. If the branch is empty,
-- returns Nothing.
getKey :: Tree d c k v -> Maybe k
getKey (Bottom (k,_) _ _ _) = Just $ Key.unwrap k
getKey (IBottom0 Nothing) = Nothing
getKey (IBottom0 (Just (k,_))) = Just $ Key.unwrap k
getKey (IBottom1 (k,_) _ _) = Just $ Key.unwrap k
getKey (Branch _ (k,_,_) _ _ _) = Just $ Key.unwrap k
getKey (IBranch0 _ (k,_,_)) = Just $ Key.unwrap k
getKey (IBranch1 _ (k,_,_) _) = Just $ Key.unwrap k
getKey (IBranch2 _ (k,_,_) _ _ _) = Just $ Key.unwrap k
-- |Get the key of the first entry in this complete branch. This function is
-- total.
completeKey :: Tree d Complete k v -> k
completeKey (Bottom (k,_) _ _ _) = Key.unwrap k
completeKey (Branch _ (k,_,_) _ _ _) = Key.unwrap k
-- |Get the total number of k/v pairs in the tree
size :: StableTree k v -> ValueCount
size = getValueCount
-- |Get the value associated with the given key, or Nothing if there is no
-- value for the key.
lookup :: Ord k => k -> StableTree k v -> Maybe v
lookup key tree =
case tree of
StableTree_I i -> lookup' key i
StableTree_C c -> lookup' key c
where
lookup' :: Ord k => k -> Tree d c k v -> Maybe v
lookup' k t =
case t of
Bottom _ _ _ _ -> Map.lookup k $ bottomChildren t
IBottom0 _ -> Map.lookup k $ bottomChildren t
IBottom1 _ _ _ -> Map.lookup k $ bottomChildren t
Branch _ _ _ _ _ -> lookup'' k t
IBranch0 _ _ -> lookup'' k t
IBranch1 _ _ _ -> lookup'' k t
IBranch2 _ _ _ _ _ -> lookup'' k t
lookup'' :: Ord k => k -> Tree (S d) c k v -> Maybe v
lookup'' k t =
case selectNode k t of
Left (_, inc) -> lookup' k inc
Right (_, comp, _, _) -> lookup' k comp
-- |Get the keys in the map
keys :: Ord k => StableTree k v -> [k]
keys = map fst . assocs
-- |Get the elements stored in the map
elems :: Ord k => StableTree k v -> [v]
elems = map snd . assocs
-- |Get the key/value pairs in the map
assocs :: Ord k => StableTree k v -> [(k, v)]
assocs tree =
case tree of
StableTree_I i -> assocs' i
StableTree_C c -> assocs' c
where
assocs' :: Ord k => Tree d c k v -> [(k, v)]
assocs' t =
case t of
Bottom _ _ _ _ -> Map.assocs $ bottomChildren t
IBottom0 _ -> Map.assocs $ bottomChildren t
IBottom1 _ _ _ -> Map.assocs $ bottomChildren t
Branch _ _ _ _ _ -> assocs'' t
IBranch0 _ _ -> assocs'' t
IBranch1 _ _ _ -> assocs'' t
IBranch2 _ _ _ _ _ -> assocs'' t
assocs'' :: Ord k => Tree (S d) c k v -> [(k, v)]
assocs'' t =
let (completes, mincomplete) = branchChildren t
ckeys = concat [assocs' ct | (_, ct) <- Map.elems completes]
ikeys = case mincomplete of
Nothing -> []
Just (_, _, it) -> assocs' it
in ckeys ++ ikeys
-- |Convert an entire Tree into a k/v map.
treeContents :: Ord k => Tree d c k v -> Map k v
treeContents t =
case t of
(Bottom _ _ _ _) -> bottomChildren t
(IBottom0 _) -> bottomChildren t
(IBottom1 _ _ _) -> bottomChildren t
(Branch _ _ _ _ _) -> recur $ branchChildren t
(IBranch0 _ _) -> recur $ branchChildren t
(IBranch1 _ _ _) -> recur $ branchChildren t
(IBranch2 _ _ _ _ _) -> recur $ branchChildren t
where
recur :: Ord k
=> ( Map k (ValueCount, Tree d Complete k v)
, Maybe (k, ValueCount, Tree d Incomplete k v))
-> Map k v
recur x =
case x of
( completes, Nothing) ->
Map.unions $ map (treeContents . snd) $ Map.elems completes
( completes, Just (_k, _c, iv)) ->
Map.unions $ treeContents iv:map (treeContents . snd) (Map.elems completes)
-- |Convert a 'StableTree' into a normal key/value Map
toMap :: Ord k => StableTree k v -> Map k v
toMap (StableTree_I i) = treeContents i
toMap (StableTree_C c) = treeContents c
-- |Either get the StableTree "children" of a 'StableTree', or get the
-- key/value map if the tree is already a bottom.
stableChildren :: Ord k
=> StableTree k v
-> Either (Map k v) (Map k (ValueCount, StableTree k v))
stableChildren tree =
case tree of
StableTree_I i -> stableChildren' i
StableTree_C c -> stableChildren' c
where
stableChildren' :: Ord k
=> Tree d c k v
-> Either (Map k v) (Map k (ValueCount, StableTree k v))
stableChildren' t =
case t of
(Bottom _ _ _ _) -> Left $ bottomChildren t
(IBottom0 _) -> Left $ bottomChildren t
(IBottom1 _ _ _) -> Left $ bottomChildren t
(Branch _ _ _ _ _) -> Right $ branchChildren' t
(IBranch0 _ _) -> Right $ branchChildren' t
(IBranch1 _ _ _) -> Right $ branchChildren' t
(IBranch2 _ _ _ _ _) -> Right $ branchChildren' t
branchChildren' :: Ord k
=> Tree (S d) c k v
-> Map k (ValueCount, StableTree k v)
branchChildren' t =
let (compMap, minc) = branchChildren t
stableMap = Map.map (second StableTree_C) compMap
fullMap = case minc of
Nothing ->
stableMap
Just (k, c, i) ->
Map.insert k (c, StableTree_I i) stableMap
in fullMap
-- |Non-recursive function to simply get the immediate children of the given
-- branch. This will either give the key/value map of a Bottom, or the key/tree
-- map of a non-bottom branch.
bottomChildren :: Ord k
=> Tree Z c k v
-> Map k v
bottomChildren (Bottom (k1,v1) (k2,v2) terms (kt,vt)) =
let terms' = Map.mapKeys Key.fromKey terms
conts = Map.insert (Key.unwrap k1) v1
$ Map.insert (Key.unwrap k2) v2
$ Map.insert (Key.fromKey kt) vt
terms'
in conts
bottomChildren (IBottom0 Nothing) =
Map.empty
bottomChildren (IBottom0 (Just (k,v))) =
Map.singleton (Key.unwrap k) v
bottomChildren (IBottom1 (k1,v1) (k2,v2) terms) =
let terms' = Map.mapKeys Key.fromKey terms
conts = Map.insert (Key.unwrap k1) v1
$ Map.insert (Key.unwrap k2) v2
terms'
in conts
-- |Get the 'Tree's stored under the given Tree. The Tree type prevents this
-- function from being called on bottom Trees.
branchChildren :: Ord k
=> Tree (S d) c k v
-> ( Map k (ValueCount, Tree d Complete k v)
, Maybe (k, ValueCount, Tree d Incomplete k v))
branchChildren (Branch _d (k1,c1,v1) (k2,c2,v2) terms (kt,ct,vt)) =
let terms' = Map.mapKeys Key.fromKey terms
conts = Map.insert (Key.unwrap k1) (c1,v1)
$ Map.insert (Key.unwrap k2) (c2,v2)
$ Map.insert (Key.fromKey kt) (ct,vt)
terms'
in (conts, Nothing)
branchChildren (IBranch0 _d (ik,ic,iv)) =
(Map.empty, Just (Key.unwrap ik, ic, iv))
branchChildren (IBranch1 _d (k1,c1,v1) mIncomplete) =
( Map.singleton (Key.unwrap k1) (c1,v1)
, mIncomplete >>= (\(k,c,v) -> return (Key.unwrap k,c,v)))
branchChildren (IBranch2 _d (k1,c1,v1) (k2,c2,v2) terms mIncomplete) =
let terms' = Map.mapKeys Key.fromKey terms
conts = Map.insert (Key.unwrap k1) (c1,v1)
$ Map.insert (Key.unwrap k2) (c2,v2)
terms'
in (conts, mIncomplete >>= \(k,c,v) -> return (Key.unwrap k, c, v))
-- |Choose the child node most likely to hold the given key. If this returns
-- Left, then the chosen node is the Incomplete node. In the Right case, the
-- sole Complete node is the best node. The Complete nodes in the first slot of
-- the quad are the nodes that came before the chosen node, while the nodes in
-- the third slot are the nodes that came after. This is useful for changing a
-- specific node, and then patching things back together with the
-- `Data.StableTree.Build.merge` function.
selectNode :: Ord k
=> k
-> Tree (S d) c k v
-> Either ( [Tree d Complete k v], Tree d Incomplete k v )
( [Tree d Complete k v], Tree d Complete k v
, [Tree d Complete k v], Maybe (Tree d Incomplete k v) )
selectNode key branch =
let (completes, minc) = branchChildren branch
pairs = Map.toAscList completes
minc_t = Prelude.fmap (\(_, _, t) -> t) minc
test = \(k, _) -> k <= key
-- begin_k is every tree whose lowest key is leq to the given key
(begin_k, after_k) = span test pairs
begin = [ t | (_, (_, t)) <- begin_k ]
after = [ t | (_, (_, t)) <- after_k ]
in case (reverse begin, after, minc) of
([], [], Nothing) -> -- empty branch
error "this is totally unreachable. branches are _not_ empty"
([], [], Just (_, _, i)) -> -- only choice is the incomplete
Left ([], i)
(_, [], Just (k, _, t)) | k <= key -> -- key goes with the incomplete
Left (begin, t)
([], t:rest, _) -> -- key is before everything
Right ([], t, rest, minc_t)
(t:rest, _, _) -> -- key goes with "t"
Right (reverse rest, t, after, minc_t)
|
tsuraan/stable-tree
|
src/Data/StableTree/Properties.hs
|
bsd-3-clause
| 9,851
| 0
| 16
| 2,994
| 3,585
| 1,857
| 1,728
| 203
| 9
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE PostfixOperators #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
--------------------------------------------------------------------------------
-- UART client example, corresponding to
-- smaccm/models/Trusted_Build_Test/test_uart_active2.
--
-- (c) 2015 Galois, Inc.
--
--------------------------------------------------------------------------------
module Main where
import Ivory.Language
import Ivory.Stdlib
import Ivory.Tower
import Tower.AADL
import qualified Ivory.Tower.HAL.Bus.Interface as I
import Tower.Odroid.UART
--------------------------------------------------------------------------------
testSerial :: Tower e ()
testSerial = do
towerModule towerDepModule
towerDepends towerDepModule
per <- period (2000`ms`)
-- Driver wrapper
(b, o) <- uartTower
monitor "sender" $ do
c <- stateInit "charState" (ival 65) -- 'A'
packet <- stateInit "packet" (izero :: Init UartPacket)
handler per "periodicHandler" $ do
e <- emitter (I.backpressureTransmit b) 1 -- Send to wrapper
callback $ \_msg -> do
for 5 $ \ix -> do
let arr = packet ~> stringDataL
c' <- deref c
store (arr!ix) c'
call_ printf2 "Sending code: 0x%x --> %c\n" c' c'
ifte_ (c' >? 90) -- 'Z'
(store c 65)
(c += 1)
store (packet ~> stringLengthL) 5
call_ printf0 "Sent!\n"
emit e (constRef packet)
handler (I.backpressureComplete b) "resp" $ do
callback $ \_msg -> do
call_ printf0 "Received response.\n"
monitor "receiver" $ do
handler o "receiverHandler" $ do
callback $ \msg -> do -- Receive from wrapper
len <- msg ~>* stringLengthL
let d = msg ~> stringDataL
arrayMap $ \ix -> do
when (fromIx ix <? len) $
call_ printf1 "Received input: %c\n" =<< deref (d!ix)
--------------------------------------------------------------------------------
-- Compiler
main :: IO ()
main = compileTowerAADL id p testSerial
where
p _ = return uartConfig
--------------------------------------------------------------------------------
-- Helpers
[ivory|
import (stdio.h, printf) void printf0(string x)
import (stdio.h, printf) void printf1(string x, uint8_t y)
import (stdio.h, printf) void printf2(string x, uint8_t y, uint8_t z)
|]
towerDepModule :: Module
towerDepModule = package "towerDeps" $ do
incl printf0
incl printf1
incl printf2
depend uartModule
|
GaloisInc/tower-camkes-odroid
|
test/SerialTest.hs
|
bsd-3-clause
| 2,741
| 0
| 26
| 578
| 585
| 294
| 291
| 61
| 1
|
module PackageDBsSpec (main, spec) where
import Prelude ()
import Prelude.Compat
import qualified Control.Exception as E
import Data.List (intercalate)
import PackageDBs
import System.Directory (getCurrentDirectory, setCurrentDirectory)
import System.Environment.Compat
import System.FilePath (searchPathSeparator)
import Test.Hspec
main :: IO ()
main = hspec spec
withCurrentDirectory :: FilePath -> IO a -> IO a
withCurrentDirectory workingDir action = do
E.bracket getCurrentDirectory setCurrentDirectory $ \_ -> do
setCurrentDirectory workingDir
action
withEnv :: String -> String -> IO a -> IO a
withEnv k v action = E.bracket save restore $ \_ -> do
setEnv k v >> action
where
save = lookup k <$> getEnvironment
restore = maybe (unsetEnv k) (setEnv k)
clearEnv :: IO a -> IO a
clearEnv = withEnv "GHC_PACKAGE_PATH" ""
. withEnv "HASKELL_PACKAGE_SANDBOX" ""
. withEnv "HASKELL_PACKAGE_SANDBOXES" ""
combineDirs :: [FilePath] -> String
combineDirs = intercalate [searchPathSeparator]
spec :: Spec
spec = do
describe "getPackageDBsFromEnv" $ do
it "uses global and user when no env or sandboxing used" $
withCurrentDirectory "test" $ clearEnv $ do
dbs <- getPackageDBsFromEnv
dbs `shouldBe` PackageDBs True True []
it "respects GHC_PACKAGE_PATH" $
withCurrentDirectory "test" $ clearEnv $
withEnv "GHC_PACKAGE_PATH" (combineDirs ["foo", "bar", ""]) $ do
dbs <- getPackageDBsFromEnv
dbs `shouldBe` PackageDBs False True ["foo", "bar"]
it "HASKELL_PACKAGE_SANDBOXES trumps GHC_PACKAGE_PATH" $
withCurrentDirectory "test" $ clearEnv $
withEnv "GHC_PACKAGE_PATH" (combineDirs ["foo1", "bar1", ""]) $
withEnv "HASKELL_PACKAGE_SANDBOXES" (combineDirs ["foo2", "bar2", ""]) $ do
dbs <- getPackageDBsFromEnv
dbs `shouldBe` PackageDBs False True ["foo2", "bar2"]
it "HASKELL_PACKAGE_SANDBOX trumps GHC_PACKAGE_PATH" $
withCurrentDirectory "test" $ clearEnv $
withEnv "GHC_PACKAGE_PATH" (combineDirs ["foo1", "bar1", ""]) $
withEnv "HASKELL_PACKAGE_SANDBOX" (combineDirs ["foo2"]) $ do
dbs <- getPackageDBsFromEnv
dbs `shouldBe` PackageDBs True True ["foo2"]
it "respects cabal sandboxes" $
withCurrentDirectory "test/sandbox" $ clearEnv $ do
dbs <- getPackageDBsFromEnv
dbs `shouldBe` PackageDBs False True ["/home/me/doctest-haskell/.cabal-sandbox/i386-osx-ghc-7.6.3-packages.conf.d"]
it "env trumps cabal sandboxes" $
withCurrentDirectory "test/sandbox" $ clearEnv $
withEnv "GHC_PACKAGE_PATH" (combineDirs ["foo", "bar"]) $ do
dbs <- getPackageDBsFromEnv
dbs `shouldBe` PackageDBs False False ["foo", "bar"]
|
ekmett/doctest
|
test/PackageDBsSpec.hs
|
mit
| 2,873
| 0
| 16
| 681
| 763
| 385
| 378
| 61
| 1
|
module B1.Graphics.Rendering.OpenGL.Shapes
( opaqueBubble
) where
import Graphics.Rendering.OpenGL
import B1.Graphics.Rendering.OpenGL.Utils
opaqueBubble :: GLfloat -> GLfloat -> GLfloat
-> Color4 GLfloat -> Color4 GLfloat -> IO ()
opaqueBubble width height padding fillColor borderColor = do
blend $= Disabled
color fillColor >> fillRectangle width height padding
color borderColor >> drawRectangle width height padding
blend $= Enabled
fillRectangle :: GLfloat -> GLfloat -> GLfloat -> IO ()
fillRectangle = renderRectangle Polygon
drawRectangle :: GLfloat -> GLfloat -> GLfloat -> IO ()
drawRectangle = renderRectangle LineLoop
renderRectangle :: PrimitiveMode -> GLfloat -> GLfloat -> GLfloat -> IO ()
renderRectangle primitiveMode width height padding =
renderPrimitive primitiveMode $ do
vertex $ vertex2 left top
vertex $ vertex2 (right - padding) top
vertex $ vertex2 right (top - padding)
vertex $ vertex2 right bottom
vertex $ vertex2 (left + padding) bottom
vertex $ vertex2 left (bottom + padding)
where
halfWidth = width / 2
halfHeight = height / 2
left = -halfWidth
right = halfWidth
top = halfHeight
bottom = -halfHeight
|
madjestic/b1
|
src/B1/Graphics/Rendering/OpenGL/Shapes.hs
|
bsd-3-clause
| 1,211
| 0
| 11
| 240
| 379
| 189
| 190
| 30
| 1
|
{-# LANGUAGE DefaultSignatures, FlexibleInstances, FlexibleContexts #-}
------------------------------------------------------------------------------
-- |
-- Module: Database.PostgreSQL.Simple.ToRow
-- Copyright: (c) 2011 MailRank, Inc.
-- (c) 2011-2012 Leon P Smith
-- License: BSD3
-- Maintainer: Leon P Smith <leon@melding-monads.com>
-- Stability: experimental
--
-- The 'ToRow' typeclass, for rendering a collection of
-- parameters to a SQL query.
--
-- Predefined instances are provided for tuples containing up to ten
-- elements.
--
------------------------------------------------------------------------------
module Database.PostgreSQL.Simple.ToRow
(
ToRow(..)
) where
import Database.PostgreSQL.Simple.ToField (Action(..), ToField(..))
import Database.PostgreSQL.Simple.Types (Only(..), (:.)(..))
import GHC.Generics
-- | A collection type that can be turned into a list of rendering
-- 'Action's.
--
-- Instances should use the 'toField' method of the 'ToField' class
-- to perform conversion of each element of the collection.
class ToRow a where
toRow :: a -> [Action]
default toRow :: (Generic a, GToRow (Rep a)) => a -> [Action]
toRow = gtoRow . from
-- ^ ToField a collection of values.
instance ToRow () where
toRow _ = []
instance (ToField a) => ToRow (Only a) where
toRow (Only v) = [toField v]
instance (ToField a, ToField b) => ToRow (a,b) where
toRow (a,b) = [toField a, toField b]
instance (ToField a, ToField b, ToField c) => ToRow (a,b,c) where
toRow (a,b,c) = [toField a, toField b, toField c]
instance (ToField a, ToField b, ToField c, ToField d) => ToRow (a,b,c,d) where
toRow (a,b,c,d) = [toField a, toField b, toField c, toField d]
instance (ToField a, ToField b, ToField c, ToField d, ToField e)
=> ToRow (a,b,c,d,e) where
toRow (a,b,c,d,e) =
[toField a, toField b, toField c, toField d, toField e]
instance (ToField a, ToField b, ToField c, ToField d, ToField e, ToField f)
=> ToRow (a,b,c,d,e,f) where
toRow (a,b,c,d,e,f) =
[toField a, toField b, toField c, toField d, toField e, toField f]
instance (ToField a, ToField b, ToField c, ToField d, ToField e, ToField f,
ToField g)
=> ToRow (a,b,c,d,e,f,g) where
toRow (a,b,c,d,e,f,g) =
[toField a, toField b, toField c, toField d, toField e, toField f,
toField g]
instance (ToField a, ToField b, ToField c, ToField d, ToField e, ToField f,
ToField g, ToField h)
=> ToRow (a,b,c,d,e,f,g,h) where
toRow (a,b,c,d,e,f,g,h) =
[toField a, toField b, toField c, toField d, toField e, toField f,
toField g, toField h]
instance (ToField a, ToField b, ToField c, ToField d, ToField e, ToField f,
ToField g, ToField h, ToField i)
=> ToRow (a,b,c,d,e,f,g,h,i) where
toRow (a,b,c,d,e,f,g,h,i) =
[toField a, toField b, toField c, toField d, toField e, toField f,
toField g, toField h, toField i]
instance (ToField a, ToField b, ToField c, ToField d, ToField e, ToField f,
ToField g, ToField h, ToField i, ToField j)
=> ToRow (a,b,c,d,e,f,g,h,i,j) where
toRow (a,b,c,d,e,f,g,h,i,j) =
[toField a, toField b, toField c, toField d, toField e, toField f,
toField g, toField h, toField i, toField j]
instance (ToField a) => ToRow [a] where
toRow = map toField
instance (ToRow a, ToRow b) => ToRow (a :. b) where
toRow (a :. b) = toRow a ++ toRow b
-- Type class for default implementation of ToRow using generics
class GToRow f where
gtoRow :: f p -> [Action]
instance GToRow f => GToRow (M1 c i f) where
gtoRow (M1 x) = gtoRow x
instance (GToRow f, GToRow g) => GToRow (f :*: g) where
gtoRow (f :*: g) = gtoRow f ++ gtoRow g
instance (ToField a) => GToRow (K1 R a) where
gtoRow (K1 a) = [toField a]
instance GToRow U1 where
gtoRow _ = []
|
timmytofu/postgresql-simple
|
src/Database/PostgreSQL/Simple/ToRow.hs
|
bsd-3-clause
| 3,902
| 0
| 11
| 876
| 1,632
| 905
| 727
| -1
| -1
|
-- A simple push button calcualtor without operator precedence
module CalcModel (
Number,
Calc,
BinOp, plus, minus, times, divide,
clearCalc, enterDigit, enterDecimalPoint, enterBinOp, evaluate
) where
import Data.Char (isDigit)
import Control.Monad (when)
import Numeric (showGFloat)
-- we could change this to rational
type Number = Double
data Calc = Calc {
number :: [Digit],
operator :: BinOp,
total :: Number,
resetOnNum :: Bool -- a state flag, after pressing '=', if we enter an
} -- operator then we're carrying on the previous
-- calculation, otherwise we should start a new one.
data Digit = Digit Int -- in range [0..9]
| DecimalPoint
deriving Eq
data BinOp = BinOp (Number -> Number -> Number)
plus, minus, times, divide :: BinOp
plus = BinOp (+)
minus = BinOp (-)
times = BinOp (*)
divide = BinOp (/)
clearCalc :: Calc
clearCalc = Calc {
number = [],
operator = plus,
total = 0,
resetOnNum = True
}
-- Maybe for the case when the operation makes no sense
enterDigit :: Int -> Calc -> Maybe (String, Calc)
enterDigit digit calc
| digit `elem` [0..9]
&& not (number calc == [] && digit == 0)
= let newNumber = number calc ++ [Digit digit]
in if resetOnNum calc
then Just (show newNumber,
calc {
number = newNumber,
total = 0,
resetOnNum = False
})
else Just (show newNumber, calc { number = newNumber })
| otherwise = Nothing
enterDecimalPoint :: Calc -> Maybe (String, Calc)
enterDecimalPoint calc
| DecimalPoint `notElem` number calc
= let newNumber = number calc ++ [DecimalPoint]
in if resetOnNum calc
then Just (show newNumber,
calc {
number = newNumber,
total = 0,
resetOnNum = False
})
else Just (show newNumber, calc { number = newNumber })
| otherwise = Nothing
enterBinOp :: BinOp -> Calc -> Maybe (String, Calc)
enterBinOp binop calc =
let newTotal = (case operator calc of BinOp op -> op)
(total calc)
(digitsToNumber (number calc))
in Just (showNumber newTotal,
Calc {
number = [],
operator = binop,
total = newTotal,
resetOnNum = False
})
evaluate :: Calc -> Maybe (String, Calc)
evaluate calc =
let newTotal = (case operator calc of BinOp op -> op)
(total calc)
(digitsToNumber (number calc))
in Just (showNumber newTotal,
Calc {
number = [],
operator = plus,
total = newTotal,
resetOnNum = True
})
instance Show Digit where
show (Digit n) = show n
show DecimalPoint = "."
showList = showString . concatMap show
digitsToNumber :: [Digit] -> Number
digitsToNumber [] = 0
digitsToNumber digits@(DecimalPoint:_) = digitsToNumber (Digit 0:digits)
digitsToNumber digits | last digits == DecimalPoint
= digitsToNumber (init digits)
| otherwise = read (show digits) --CHEAT!
precision = Just 5 --digits of precision, or Nothing for as much as possible
showNumber :: Number -> String
showNumber num =
if '.' `elem` numStr then stripTrailingZeros numStr
else numStr
where numStr = showGFloat precision num ""
stripTrailingZeros =
reverse
. (\str -> if head str == '.' then tail str else str)
. dropWhile (\c -> c=='0')
. reverse
testProg :: IO ()
testProg = do
evalLoop clearCalc
where evalLoop :: Calc -> IO ()
evalLoop calc = do
putStr "calc> "
line <- getLine
when (line /= "q") $ do
result <- case line of
[digit] | isDigit digit
-> return $ enterDigit (read [digit]) calc
"." -> return $ enterDecimalPoint calc
"+" -> return $ enterBinOp plus calc
"-" -> return $ enterBinOp minus calc
"*" -> return $ enterBinOp times calc
"/" -> return $ enterBinOp divide calc
"=" -> return $ evaluate calc
"c" -> return $ Just ("0",clearCalc)
_ -> do putStrLn "invalid input"
return Nothing
case result of
Nothing -> evalLoop calc
Just (display, calc') -> do putStrLn display
evalLoop calc'
|
keithodulaigh/Hets
|
glade-0.12.5.0/demo/calc/CalcModel.hs
|
gpl-2.0
| 4,832
| 0
| 21
| 1,848
| 1,343
| 713
| 630
| 121
| 10
|
-- Copyright 2016 TensorFlow authors.
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
import Control.Monad.IO.Class (liftIO)
import Data.Int (Int64)
import Test.Framework (defaultMain, Test)
import TensorFlow.Types (ListOf(..), Scalar(..), (/:/))
import TensorFlow.Ops (scalar)
import TensorFlow.Queue
import TensorFlow.Session
( asyncProdNodes
, build
, run
, runSession
, run_
)
import Test.Framework.Providers.HUnit (testCase)
import Test.HUnit ((@=?))
import qualified Data.ByteString as BS
-- | Test basic queue behaviors.
testBasic :: Test
testBasic = testCase "testBasic" $ runSession $ do
q :: Queue [Int64, BS.ByteString] <- build $ makeQueue 1 ""
run_ =<< enqueue q (42 :/ scalar "Hi" :/ Nil)
x <- run =<< dequeue q
liftIO $ (Scalar 42 /:/ Scalar "Hi" /:/ Nil) @=? x
run_ =<< enqueue q (56 :/ scalar "Bar" :/ Nil)
y <- run =<< dequeue q
-- Note: we use explicit "Scalar" here to specify the type that was
-- fetched. Equivalently we could write
-- 56 /:/ "Bar" /:/ Nil :: List [Scalar Int64, Scalar BS.ByteString]
-- or else allow the types to be determined by future use of the fetched
-- value.
let expected = Scalar 56 /:/ Scalar "Bar" /:/ Nil
liftIO $ expected @=? y
-- | Test queue pumping.
testPump :: Test
testPump = testCase "testPump" $ runSession $ do
(deq, pump) <- build $ do
q :: Queue [Int64, BS.ByteString] <- makeQueue 2 "ThePumpQueue"
(,) <$> dequeue q
<*> enqueue q (31 :/ scalar "Baz" :/ Nil)
-- This is a realistic use. The pump inputs are pre-bound to some
-- nodes that produce values when pumped (e.g. read from a
-- file).
run_ (pump, pump)
(x, y) <- run (deq, deq)
let expected = Scalar 31 /:/ Scalar "Baz" /:/ Nil
liftIO $ expected @=? x
liftIO $ expected @=? y
testAsync :: Test
testAsync = testCase "testAsync" $ runSession $ do
(deq, pump) <- do
q :: Queue [Int64, BS.ByteString] <- makeQueue 2 ""
(,) <$> dequeue q
<*> enqueue q (10 :/ scalar "Async" :/ Nil)
-- Pumps the queue until canceled by runSession exiting.
asyncProdNodes pump
-- Picks up a couple values and verifies they are as expected.
let expected = Scalar 10 /:/ Scalar "Async" /:/ Nil
run deq >>= liftIO . (expected @=?)
run deq >>= liftIO . (expected @=?)
main :: IO ()
main = defaultMain
[ testBasic
, testPump
, testAsync
]
|
judah/tensorflow-haskell
|
tensorflow-ops/tests/QueueTest.hs
|
apache-2.0
| 3,114
| 0
| 17
| 743
| 725
| 387
| 338
| 55
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# OPTIONS_HADDOCK show-extensions #-}
-- |
-- Module : Yi.Buffer.TextUnit
-- License : GPL-2
-- Maintainer : yi-devel@googlegroups.com
-- Stability : experimental
-- Portability : portable
--
-- Working with blocks (units) of text.
--
module Yi.Buffer.TextUnit
( TextUnit(..)
, outsideUnit
, leftBoundaryUnit
, unitWord
, unitViWord
, unitViWORD
, unitViWordAnyBnd
, unitViWORDAnyBnd
, unitViWordOnLine
, unitViWORDOnLine
, unitDelimited
, unitSentence, unitEmacsParagraph, unitParagraph
, isAnySep, unitSep, unitSepThisLine, isWordChar
, moveB, maybeMoveB
, transformB, transposeB
, regionOfB, regionOfNonEmptyB, regionOfPartB
, regionWithTwoMovesB
, regionOfPartNonEmptyB, regionOfPartNonEmptyAtB
, readPrevUnitB, readUnitB
, untilB, doUntilB_, untilB_, whileB, doIfCharB
, atBoundaryB
, numberOfB
, deleteB, genMaybeMoveB
, genMoveB, BoundarySide(..), genAtBoundaryB
, checkPeekB
, halfUnit
, deleteUnitB
) where
import Control.Applicative (Applicative ((<*>)), (<$>))
import Control.Monad (void, when, (<=<))
import Data.Char (GeneralCategory (LineSeparator, ParagraphSeparator, Space),
generalCategory, isAlphaNum, isSeparator, isSpace)
import Data.Typeable (Typeable)
import Yi.Buffer.Basic (Direction (..), Point (Point), mayReverse, reverseDir)
import Yi.Buffer.Misc
import Yi.Buffer.Region
import Yi.Rope (YiString)
import qualified Yi.Rope as R (head, reverse, tail, toString)
-- | Designate a given "unit" of text.
data TextUnit = Character -- ^ a single character
| Line -- ^ a line of text (between newlines)
| VLine -- ^ a "vertical" line of text (area of text between two characters at the same column number)
| Document -- ^ the whole document
| GenUnit {genEnclosingUnit :: TextUnit,
genUnitBoundary :: Direction -> BufferM Bool}
-- there could be more text units, like Page, Searched, etc. it's probably a good
-- idea to use GenUnit though.
deriving Typeable
-- | Turns a unit into its "negative" by inverting the boundaries. For example,
-- @outsideUnit unitViWord@ will be the unit of spaces between words. For units
-- without boundaries ('Character', 'Document', ...), this is the identity
-- function.
outsideUnit :: TextUnit -> TextUnit
outsideUnit (GenUnit enclosing boundary) = GenUnit enclosing (boundary . reverseDir)
outsideUnit x = x -- for a lack of better definition
-- | Common boundary checking function: run the condition on @len@
-- characters in specified direction shifted by specified offset.
genBoundary :: Int -- ^ Offset from current position
-> Int -- ^ Look-ahead
-> (YiString -> Bool) -- ^ predicate
-> Direction -- ^ Direction to look in
-> BufferM Bool
genBoundary ofs len condition dir = condition <$> peekB
where
peekB = do
Point p' <- pointB
let pt@(Point p) = Point (p' + mayNegate ofs)
case dir of
Forward -> betweenB pt (Point $ max 0 p + len)
Backward -> R.reverse <$> betweenB (Point $ p - len) pt
mayNegate = case dir of
Forward -> id
Backward -> negate
-- | a word as in use in Emacs (fundamental mode)
unitWord :: TextUnit
unitWord =
GenUnit Document $
\direction -> checkPeekB (-1) [isWordChar, not . isWordChar] direction
-- | delimited on the left and right by given characters, boolean
-- argument tells if whether those are included.
unitDelimited :: Char -> Char -> Bool -> TextUnit
unitDelimited left right included = GenUnit Document $ \direction ->
case (included,direction) of
(False, Backward) -> do
isCursorOnLeftChar <- (== left) <$> readB
when isCursorOnLeftChar rightB
checkPeekB 0 [(== left)] Backward
(False, Forward) -> do
isCursorOnRightChar <- (== right) <$> readB
isTextUnitBlank <- checkPeekB 0 [(== left)] Backward
if isTextUnitBlank && isCursorOnRightChar
then leftB >> return True
else return isCursorOnRightChar
(True, Backward) -> checkPeekB 0 [(== left)] Forward
(True, Forward) -> rightB >> checkPeekB 0 [(== right)] Backward
isWordChar :: Char -> Bool
isWordChar x = isAlphaNum x || x == '_'
isNl :: Char -> Bool
isNl = (== '\n')
-- | Tells if a char can end a sentence ('.', '!', '?').
isEndOfSentence :: Char -> Bool
isEndOfSentence = (`elem` ".!?")
-- | Verifies that the string matches all the predicates, pairwise. If
-- the string is "too small", then return 'False'. Note the length of
-- predicates has to be finite.
checks :: [Char -> Bool] -> YiString -> Bool
checks ps' t' = go ps' (R.toString t')
where
go [] _ = True
go _ [] = False
go (p:ps) (x:xs) = p x && go ps xs
checkPeekB :: Int -> [Char -> Bool] -> Direction -> BufferM Bool
checkPeekB offset conds = genBoundary offset (length conds) (checks conds)
-- | Helper that takes first two characters of YiString. Faster than
-- take 2 and string conversion.
firstTwo :: YiString -> Maybe (Char, Char)
firstTwo t = case R.head t of
Nothing -> Nothing
Just c -> case R.tail t >>= R.head of
Nothing -> Nothing
Just c' -> Just (c, c')
atViWordBoundary :: (Char -> Int) -> Direction -> BufferM Bool
atViWordBoundary charType = genBoundary (-1) 2 $ \cs -> case firstTwo cs of
Just (c1, c2) -> isNl c1 && isNl c2 -- stop at empty lines
|| not (isSpace c1) && (charType c1 /= charType c2)
Nothing -> True
atAnyViWordBoundary :: (Char -> Int) -> Direction -> BufferM Bool
atAnyViWordBoundary charType = genBoundary (-1) 2 $ \cs -> case firstTwo cs of
Just (c1, c2) -> isNl c1 || isNl c2 || charType c1 /= charType c2
Nothing -> True
atViWordBoundaryOnLine :: (Char -> Int) -> Direction -> BufferM Bool
atViWordBoundaryOnLine charType = genBoundary (-1) 2 $ \cs -> case firstTwo cs of
Just (c1, c2)-> isNl c1 || isNl c2 || not (isSpace c1) && charType c1 /= charType c2
Nothing -> True
unitViWord :: TextUnit
unitViWord = GenUnit Document $ atViWordBoundary viWordCharType
unitViWORD :: TextUnit
unitViWORD = GenUnit Document $ atViWordBoundary viWORDCharType
unitViWordAnyBnd :: TextUnit
unitViWordAnyBnd = GenUnit Document $ atAnyViWordBoundary viWordCharType
unitViWORDAnyBnd :: TextUnit
unitViWORDAnyBnd = GenUnit Document $ atAnyViWordBoundary viWORDCharType
unitViWordOnLine :: TextUnit
unitViWordOnLine = GenUnit Document $ atViWordBoundaryOnLine viWordCharType
unitViWORDOnLine :: TextUnit
unitViWORDOnLine = GenUnit Document $ atViWordBoundaryOnLine viWORDCharType
viWordCharType :: Char -> Int
viWordCharType c | isSpace c = 1
| isWordChar c = 2
| otherwise = 3
viWORDCharType :: Char -> Int
viWORDCharType c | isSpace c = 1
| otherwise = 2
-- | Separator characters (space, tab, unicode separators). Most of
-- the units above attempt to identify "words" with various
-- punctuation and symbols included or excluded. This set of units is
-- a simple inverse: it is true for "whitespace" or "separators" and
-- false for anything that is not (letters, numbers, symbols,
-- punctuation, whatever).
isAnySep :: Char -> Bool
isAnySep c = isSeparator c || isSpace c || generalCategory c `elem` seps
where
seps = [ Space, LineSeparator, ParagraphSeparator ]
atSepBoundary :: Direction -> BufferM Bool
atSepBoundary = genBoundary (-1) 2 $ \cs -> case firstTwo cs of
Just (c1, c2) -> isNl c1 || isNl c2 || isAnySep c1 /= isAnySep c2
Nothing -> True
-- | unitSep is true for any kind of whitespace/separator
unitSep :: TextUnit
unitSep = GenUnit Document atSepBoundary
-- | unitSepThisLine is true for any kind of whitespace/separator on this line only
unitSepThisLine :: TextUnit
unitSepThisLine = GenUnit Line atSepBoundary
-- | Is the point at a @Unit@ boundary in the specified @Direction@?
atBoundary :: TextUnit -> Direction -> BufferM Bool
atBoundary Document Backward = (== 0) <$> pointB
atBoundary Document Forward = (>=) <$> pointB <*> sizeB
atBoundary Character _ = return True
atBoundary VLine _ = return True -- a fallacy; this needs a little refactoring.
atBoundary Line direction = checkPeekB 0 [isNl] direction
atBoundary (GenUnit _ atBound) dir = atBound dir
enclosingUnit :: TextUnit -> TextUnit
enclosingUnit (GenUnit enclosing _) = enclosing
enclosingUnit _ = Document
atBoundaryB :: TextUnit -> Direction -> BufferM Bool
atBoundaryB Document d = atBoundary Document d
atBoundaryB u d = (||) <$> atBoundary u d <*> atBoundaryB (enclosingUnit u) d
-- | Paragraph to implement emacs-like forward-paragraph/backward-paragraph
unitEmacsParagraph :: TextUnit
unitEmacsParagraph = GenUnit Document $ checkPeekB (-2) [not . isNl, isNl, isNl]
-- | Paragraph that begins and ends in the paragraph, not the empty lines surrounding it.
unitParagraph :: TextUnit
unitParagraph = GenUnit Document $ checkPeekB (-1) [not . isNl, isNl, isNl]
unitSentence :: TextUnit
unitSentence = GenUnit unitEmacsParagraph $ \dir -> checkPeekB (if dir == Forward then -1 else 0) (mayReverse dir [isEndOfSentence, isSpace]) dir
-- | Unit that have its left and right boundaries at the left boundary of the argument unit.
leftBoundaryUnit :: TextUnit -> TextUnit
leftBoundaryUnit u = GenUnit Document (\_dir -> atBoundaryB u Backward)
-- | @genAtBoundaryB u d s@ returns whether the point is at a given boundary @(d,s)@ .
-- Boundary @(d,s)@ , taking Word as example, means:
-- Word
-- ^^ ^^
-- 12 34
-- 1: (Backward,OutsideBound)
-- 2: (Backward,InsideBound)
-- 3: (Forward,InsideBound)
-- 4: (Forward,OutsideBound)
--
-- rules:
-- genAtBoundaryB u Backward InsideBound = atBoundaryB u Backward
-- genAtBoundaryB u Forward OutsideBound = atBoundaryB u Forward
genAtBoundaryB :: TextUnit -> Direction -> BoundarySide -> BufferM Bool
genAtBoundaryB u d s = withOffset (off u d s) $ atBoundaryB u d
where withOffset 0 f = f
withOffset ofs f = savingPointB (((ofs +) <$> pointB) >>= moveTo >> f)
off _ Backward InsideBound = 0
off _ Backward OutsideBound = 1
off _ Forward InsideBound = 1
off _ Forward OutsideBound = 0
numberOfB :: TextUnit -> TextUnit -> BufferM Int
numberOfB unit containingUnit = savingPointB $ do
maybeMoveB containingUnit Backward
start <- pointB
moveB containingUnit Forward
end <- pointB
moveTo start
length <$> untilB ((>= end) <$> pointB) (moveB unit Forward)
whileB :: BufferM Bool -> BufferM a -> BufferM [a]
whileB cond = untilB (not <$> cond)
-- | Repeat an action until the condition is fulfilled or the cursor
-- stops moving. The Action may be performed zero times.
untilB :: BufferM Bool -> BufferM a -> BufferM [a]
untilB cond f = do
stop <- cond
if stop then return [] else doUntilB cond f
-- | Repeat an action until the condition is fulfilled or the cursor
-- stops moving. The Action is performed at least once.
doUntilB :: BufferM Bool -> BufferM a -> BufferM [a]
doUntilB cond f = loop
where loop = do
p <- pointB
x <- f
p' <- pointB
stop <- cond
(x:) <$> if p /= p' && not stop
then loop
else return []
doUntilB_ :: BufferM Bool -> BufferM a -> BufferM ()
doUntilB_ cond f = void (doUntilB cond f) -- maybe do an optimized version?
untilB_ :: BufferM Bool -> BufferM a -> BufferM ()
untilB_ cond f = void (untilB cond f) -- maybe do an optimized version?
-- | Do an action if the current buffer character passes the predicate
doIfCharB :: (Char -> Bool) -> BufferM a -> BufferM ()
doIfCharB p o = readB >>= \c -> when (p c) $ void o
-- | Boundary side
data BoundarySide = InsideBound | OutsideBound
deriving Eq
-- | Generic move operation
-- Warning: moving To the (OutsideBound, Backward) bound of Document is impossible (offset -1!)
-- @genMoveB u b d@: move in direction d until encountering boundary b or unit u. See 'genAtBoundaryB' for boundary explanation.
genMoveB :: TextUnit -> (Direction, BoundarySide) -> Direction -> BufferM ()
genMoveB Document (Forward,InsideBound) Forward = moveTo =<< subtract 1 <$> sizeB
genMoveB Document _ Forward = moveTo =<< sizeB
genMoveB Document _ Backward = moveTo 0 -- impossible to go outside beginning of doc.
genMoveB Character _ Forward = rightB
genMoveB Character _ Backward = leftB
genMoveB VLine _ Forward = do
ofs <- lineMoveRel 1
when (ofs < 1) (maybeMoveB Line Forward)
genMoveB VLine _ Backward = lineUp
genMoveB unit (boundDir, boundSide) moveDir =
doUntilB_ (genAtBoundaryB unit boundDir boundSide) (moveB Character moveDir)
-- | Generic maybe move operation.
-- As genMoveB, but don't move if we are at boundary already.
genMaybeMoveB :: TextUnit -> (Direction, BoundarySide) -> Direction -> BufferM ()
-- optimized case for Document
genMaybeMoveB Document boundSpec moveDir = genMoveB Document boundSpec moveDir
-- optimized case for start/end of Line
genMaybeMoveB Line (Backward, InsideBound) Backward = moveTo =<< solPointB =<< pointB
genMaybeMoveB Line (Forward, OutsideBound) Forward = moveTo =<< eolPointB =<< pointB
genMaybeMoveB unit (boundDir, boundSide) moveDir =
untilB_ (genAtBoundaryB unit boundDir boundSide) (moveB Character moveDir)
-- | Move to the next unit boundary
moveB :: TextUnit -> Direction -> BufferM ()
moveB u d = genMoveB u (d, case d of Forward -> OutsideBound; Backward -> InsideBound) d
-- | As 'moveB', unless the point is at a unit boundary
-- So for example here moveToEol = maybeMoveB Line Forward;
-- in that it will move to the end of current line and nowhere if we
-- are already at the end of the current line. Similarly for moveToSol.
maybeMoveB :: TextUnit -> Direction -> BufferM ()
maybeMoveB u d = genMaybeMoveB u (d, case d of Forward -> OutsideBound; Backward -> InsideBound) d
transposeB :: TextUnit -> Direction -> BufferM ()
transposeB unit direction = do
moveB unit (reverseDir direction)
w0 <- pointB
moveB unit direction
w0' <- pointB
moveB unit direction
w1' <- pointB
moveB unit (reverseDir direction)
w1 <- pointB
swapRegionsB (mkRegion w0 w0') (mkRegion w1 w1')
moveTo w1'
-- | Transforms the region given by 'TextUnit' in the 'Direction' with
-- user-supplied function.
transformB :: (YiString -> YiString) -> TextUnit -> Direction -> BufferM ()
transformB f unit direction = do
p <- pointB
moveB unit direction
q <- pointB
let r = mkRegion p q
replaceRegionB r =<< f <$> readRegionB r
-- | Delete between point and next unit boundary, return the deleted region.
deleteB :: TextUnit -> Direction -> BufferM ()
deleteB unit dir = deleteRegionB =<< regionOfPartNonEmptyB unit dir
regionWithTwoMovesB :: BufferM a -> BufferM b -> BufferM Region
regionWithTwoMovesB move1 move2 =
savingPointB $ mkRegion <$> (move1 >> pointB) <*> (move2 >> pointB)
-- | Region of the whole textunit where the current point is.
regionOfB :: TextUnit -> BufferM Region
regionOfB unit = regionWithTwoMovesB (maybeMoveB unit Backward) (maybeMoveB unit Forward)
-- An alternate definition would be the following, but it can return two units if the current point is between them.
-- eg. "word1 ^ word2" would return both words.
-- regionOfB unit = mkRegion
-- <$> pointAfter (maybeMoveB unit Backward)
-- <*> destinationOfMoveB (maybeMoveB unit Forward)
-- | Non empty region of the whole textunit where the current point is.
regionOfNonEmptyB :: TextUnit -> BufferM Region
regionOfNonEmptyB unit = savingPointB $
mkRegion <$> (maybeMoveB unit Backward >> pointB) <*> (moveB unit Forward >> pointB)
-- | Region between the point and the next boundary.
-- The region is empty if the point is at the boundary.
regionOfPartB :: TextUnit -> Direction -> BufferM Region
regionOfPartB unit dir = mkRegion <$> pointB <*> destinationOfMoveB (maybeMoveB unit dir)
-- | Non empty region between the point and the next boundary,
-- In fact the region can be empty if we are at the end of file.
regionOfPartNonEmptyB :: TextUnit -> Direction -> BufferM Region
regionOfPartNonEmptyB unit dir = mkRegion <$> pointB <*> destinationOfMoveB (moveB unit dir)
-- | Non empty region at given point and the next boundary,
regionOfPartNonEmptyAtB :: TextUnit -> Direction -> Point -> BufferM Region
regionOfPartNonEmptyAtB unit dir p = do
oldP <- pointB
moveTo p
r <- regionOfPartNonEmptyB unit dir
moveTo oldP
return r
readPrevUnitB :: TextUnit -> BufferM YiString
readPrevUnitB unit = readRegionB =<< regionOfPartNonEmptyB unit Backward
readUnitB :: TextUnit -> BufferM YiString
readUnitB = readRegionB <=< regionOfB
halfUnit :: Direction -> TextUnit -> TextUnit
halfUnit dir (GenUnit enclosing boundary) =
GenUnit enclosing (\d -> if d == dir then boundary d else return False)
halfUnit _dir tu = tu
deleteUnitB :: TextUnit -> Direction -> BufferM ()
deleteUnitB unit dir = deleteRegionB =<< regionOfPartNonEmptyB unit dir
|
TOSPIO/yi
|
src/library/Yi/Buffer/TextUnit.hs
|
gpl-2.0
| 17,359
| 0
| 16
| 3,836
| 4,195
| 2,200
| 1,995
| 279
| 5
|
module Fun00006 where
import Prelude hiding ((++), (.))
rev :: [a] -> [a]
rev xs = go [] xs
where go acc [] = acc
go acc (x:xs) = go (x:acc) xs
(++) :: [a] -> [a] -> [a]
[] ++ ys = ys
(x:xs) ++ ys = x:(xs ++ ys)
(+++) = (++)
(f . g) x = f (g x)
infixr 5 ++
infix 7 :+
infixl 9 +++
infix 3 .
|
charleso/intellij-haskforce
|
tests/gold/parser/Fun00006.hs
|
apache-2.0
| 318
| 2
| 10
| 101
| 211
| 120
| 91
| 15
| 2
|
{-# OPTIONS_HADDOCK hide #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.GL.PixelRectangles.Reset
-- Copyright : (c) Sven Panne 2002-2013
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- This is a purely internal module for (un-)marshaling Reset.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.GL.PixelRectangles.Reset (
Reset(..), marshalReset
) where
import Graphics.Rendering.OpenGL.GL.GLboolean
import Graphics.Rendering.OpenGL.Raw
--------------------------------------------------------------------------------
data Reset =
NoReset
| Reset
deriving ( Eq, Ord, Show )
marshalReset :: Reset -> GLboolean
marshalReset x = marshalGLboolean (x == Reset)
|
hesiod/OpenGL
|
src/Graphics/Rendering/OpenGL/GL/PixelRectangles/Reset.hs
|
bsd-3-clause
| 920
| 0
| 7
| 126
| 104
| 70
| 34
| 11
| 1
|
module Graphics.UI.Gtk.Layout.MaybeWidget where
import Control.Monad
import Data.IORef
import Graphics.UI.Gtk
import System.Glib.Types
data MaybeWidget a = MaybeWidget Notebook a Label (IORef MaybeWidgetParams)
type MaybeWidgetParams = Bool
instance WidgetClass (MaybeWidget a)
instance ObjectClass (MaybeWidget a)
instance GObjectClass (MaybeWidget a) where
toGObject (MaybeWidget nb _ _ _) = toGObject nb
unsafeCastGObject o = MaybeWidget (unsafeCastGObject o) undefined undefined undefined
maybeWidgetNewWithLabel :: (WidgetClass a) => a -> Maybe String -> IO (MaybeWidget a)
maybeWidgetNewWithLabel w label = do
lblW <- labelNew label
nb <- notebookNew
_ <- notebookAppendPage nb lblW ""
_ <- notebookAppendPage nb w ""
notebookSetShowTabs nb False
params <- newIORef False
return $ MaybeWidget nb w lblW params
maybeWidgetGetWidget :: MaybeWidget a -> a
maybeWidgetGetWidget (MaybeWidget _ a _ _) = a
maybeWidgetLabelText :: Attr (MaybeWidget a) String
maybeWidgetLabelText = newAttr getter setter
where getter (MaybeWidget _ _ lblW _) = get lblW labelLabel
setter (MaybeWidget _ _ lblW _) s = set lblW [ labelLabel := s ]
maybeWidgetActivated :: Attr (MaybeWidget a) Bool
maybeWidgetActivated = newAttr getter setter
where getter (MaybeWidget _ _ _ paramsR) = readIORef paramsR
setter (MaybeWidget nb _ _ paramsR) v = do
params <- readIORef paramsR
when (v /= params) $ do let upd = if v then 1 else 0
notebookSetCurrentPage nb upd
writeIORef paramsR v
maybeWidgetToggle :: MaybeWidget a -> IO()
maybeWidgetToggle w = set w [ maybeWidgetActivated :~ not ]
|
keera-studios/gtk-helpers
|
gtk3/src/Graphics/UI/Gtk/Layout/MaybeWidget.hs
|
bsd-3-clause
| 1,741
| 0
| 15
| 408
| 557
| 273
| 284
| 37
| 2
|
{-# LANGUAGE StandaloneKindSignatures #-}
{-# LANGUAGE GADTs #-}
module SAKS_Fail011 where
import Data.Kind (Type)
type G :: Type -> Type
data G where
MkG :: a -> G a
|
sdiehl/ghc
|
testsuite/tests/saks/should_fail/saks_fail011.hs
|
bsd-3-clause
| 172
| 0
| 7
| 34
| 42
| 26
| 16
| -1
| -1
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sr-SP">
<title>SVN Digger Files</title>
<maps>
<homeID>svndigger</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
kingthorin/zap-extensions
|
addOns/svndigger/src/main/javahelp/help_sr_SP/helpset_sr_SP.hs
|
apache-2.0
| 967
| 77
| 66
| 157
| 409
| 207
| 202
| -1
| -1
|
module ShowUser where
import qualified Github.Users as Github
import Data.Maybe (fromMaybe)
main = do
possibleUser <- Github.userInfoFor "mike-burns"
putStrLn $ either (("Error: "++) . show) formatUser possibleUser
formatUser user@(Github.DetailedOrganization {}) =
"Organization: " ++ (formatName userName login) ++ "\t" ++
(fromMaybe "" company) ++ "\t" ++
(fromMaybe "" location) ++ "\n" ++
(fromMaybe "" blog) ++ "\t" ++ "\n" ++
htmlUrl ++ "\t" ++ (formatDate createdAt) ++ "\n\n" ++
(fromMaybe "" bio)
where
userName = Github.detailedOwnerName user
login = Github.detailedOwnerLogin user
company = Github.detailedOwnerCompany user
location = Github.detailedOwnerLocation user
blog = Github.detailedOwnerBlog user
htmlUrl = Github.detailedOwnerHtmlUrl user
createdAt = Github.detailedOwnerCreatedAt user
bio = Github.detailedOwnerBio user
formatUser user@(Github.DetailedUser {}) =
(formatName userName login) ++ "\t" ++ (fromMaybe "" company) ++ "\t" ++
(fromMaybe "" location) ++ "\n" ++
(fromMaybe "" blog) ++ "\t" ++ "<" ++ (fromMaybe "" email) ++ ">" ++ "\n" ++
htmlUrl ++ "\t" ++ (formatDate createdAt) ++ "\n" ++
"hireable: " ++ (formatHireable (fromMaybe False isHireable)) ++ "\n\n" ++
(fromMaybe "" bio)
where
userName = Github.detailedOwnerName user
login = Github.detailedOwnerLogin user
company = Github.detailedOwnerCompany user
location = Github.detailedOwnerLocation user
blog = Github.detailedOwnerBlog user
email = Github.detailedOwnerEmail user
htmlUrl = Github.detailedOwnerHtmlUrl user
createdAt = Github.detailedOwnerCreatedAt user
isHireable = Github.detailedOwnerHireable user
bio = Github.detailedOwnerBio user
formatName Nothing login = login
formatName (Just name) login = name ++ "(" ++ login ++ ")"
formatHireable True = "yes"
formatHireable False = "no"
formatDate = show . Github.fromGithubDate
|
bitemyapp/github
|
samples/Users/ShowUser.hs
|
bsd-3-clause
| 1,962
| 0
| 25
| 368
| 599
| 306
| 293
| 43
| 1
|
module Literals where
str :: String
str = "str literal"
num :: Num a => a
num = 0 + 1 + 1010011 * 41231 + 12131
frac :: Fractional a => a
frac = 42.0000001
list :: [[[[a]]]]
list = [[], [[]], [[[]]]]
pair :: ((), ((), (), ()), ())
pair = ((), ((), (), ()), ())
|
sdiehl/ghc
|
testsuite/tests/hiefile/should_compile/hie005.hs
|
bsd-3-clause
| 267
| 0
| 8
| 64
| 176
| 104
| 72
| 11
| 1
|
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TemplateHaskell #-}
module Shape.Description.AABB where
import Shape.Description
import Control.Lens
import Data.Semigroup
import Linear
import Linear.Affine
data AABB a = AABB
{ _aabbCenter :: Point V2 a
, _aabbSize :: V2 a
} deriving (Eq, Show, Functor)
makeAABBFromShape
:: (Ord a, Integral a)
=> Shape a -> AABB a
makeAABBFromShape (ManyShape [a]) = makeAABBFromShape a
makeAABBFromShape (ManyShape (a:as)) =
foldr (<>) (makeAABBFromShape a) (map makeAABBFromShape as)
makeAABBFromShape (Circle cen r) = AABB cen (pure r)
makeAABBFromShape (Rectangle cen sz) = AABB cen $ (`div` 2) <$> sz
makeAABBFromShape (Image cen sz _) = AABB cen sz
makeAABBFromShape (WithFillColour _ s) = makeAABBFromShape s
makeAABBFromShape (WithStrokeColour _ s) = makeAABBFromShape s
makeLenses ''AABB
combineAABB
:: (Num a, Ord a)
=> (a -> a) -> AABB a -> AABB a -> AABB a
combineAABB divFunc a b =
AABB (P $ V2 cenX cenY) (V2 (maxX - cenX) (maxY - cenY))
where
minX =
min
(a ^. aabbCenter . _x - a ^. aabbSize . _x)
(b ^. aabbCenter . _x - b ^. aabbSize . _x)
minY =
min
(a ^. aabbCenter . _y - a ^. aabbSize . _y)
(b ^. aabbCenter . _y - b ^. aabbSize . _y)
maxX =
max
(a ^. aabbCenter . _x + a ^. aabbSize . _x)
(b ^. aabbCenter . _x + b ^. aabbSize . _x)
maxY =
max
(a ^. aabbCenter . _y + a ^. aabbSize . _y)
(b ^. aabbCenter . _y + b ^. aabbSize . _y)
cenX = divFunc (minX + maxX)
cenY = divFunc (minY + maxY)
instance (Integral a, Ord a) =>
Semigroup (AABB a) where
(<>) = combineAABB (`div` 2)
pointInsideAABB
:: (Ord a, Num a, R2 f)
=> AABB a -> f a -> Bool
pointInsideAABB (AABB cen sz) p =
(p ^. _x >= cen ^. _x - sz ^. _x) &&
(p ^. _x <= cen ^. _x + sz ^. _x) &&
(p ^. _y >= cen ^. _y - sz ^. _y) &&
(p ^. _y <= cen ^. _y + sz ^. _y)
cornerPoints
:: Num a
=> AABB a -> [V2 a]
cornerPoints (AABB cen sz) =
V2 <$> [cen ^. _x - sz ^. _x, cen ^. _x + sz ^. _x] <*>
[cen ^. _y - sz ^. _y, cen ^. _y + sz ^. _y]
aabbOverlap
:: (Ord a, Num a)
=> AABB a -> AABB a -> Bool
aabbOverlap a = any (pointInsideAABB a) . cornerPoints
|
edwardwas/shape-descr
|
src/lib/Shape/Description/AABB.hs
|
isc
| 2,270
| 0
| 13
| 601
| 1,027
| 533
| 494
| -1
| -1
|
module Main where
import MyLib
import Test.Tasty
import Test.Tasty.HUnit
import Test.Tasty.QuickCheck as QC
import Test.Tasty.SmallCheck as SC
main :: IO ()
main = defaultMain tests
tests :: TestTree
tests = testGroup "Tests" [properties, unitTests]
properties :: TestTree
properties = testGroup "Properties" [scProps, qcProps]
scProps :: TestTree
scProps = testGroup
"(checked by SmallCheck)"
[ SC.testProperty "sort == sort . reverse"
$ \xs -> sortInts xs == sort (reverse xs)
, SC.testProperty "Fermat's little theorem" $ \x -> flt x 7 == 0
, SC.testProperty "Fermat's last theorem"
$ \x y z n -> n >= 3 SC.==> fLT x y z n /= 0
]
qcProps :: TestTree
qcProps = testGroup
"(checked by QuickCheck)"
[ QC.testProperty "sort == sort . reverse"
$ \xs -> sortInts xs == sortInts (reverse xs)
, QC.testProperty "Fermat's little theorem" $ \x -> flt x 7 == 0
, QC.testProperty "Fermat's last theorem"
$ \x y z n -> n >= 3 QC.==> fLT x y z n /= 0
]
unitTests :: TestTree
unitTests = testGroup
"Unit tests"
[ testCase "List comparison (different length)"
$ ([1, 2, 3] :: [Int])
`compare` ([1, 2] :: [Int])
@?= GT
, testCase "List comparison (same length)"
$ ([1, 2, 3] :: [Int])
`compare` ([1, 2, 2] :: [Int])
@?= LT
]
|
genos/Programming
|
workbench/tasty-example/test/Test.hs
|
mit
| 1,314
| 0
| 12
| 316
| 458
| 253
| 205
| -1
| -1
|
{-# LANGUAGE OverloadedStrings #-}
module Yesod.Transloadit.OrderedJSON (
encode,
is,
obj,
str
) where
import Data.Monoid (mconcat)
import Data.Text
type KeyValue = (Text, OrderedValue)
data OrderedValue = Object [KeyValue] | String Text deriving (Eq, Show)
quote :: Text
quote = "\""
lbrace :: Text
lbrace = "{"
rbrace :: Text
rbrace = "}"
colon :: Text
colon = ":"
comma :: Text
comma = ","
encodeKV :: KeyValue -> Text
encodeKV (t, v) = mconcat [quote, t, quote, colon, encode v]
encode :: OrderedValue -> Text
encode (String t) = mconcat [quote, t, quote]
encode (Object kvs) = mconcat [lbrace, intercalate comma $ fmap encodeKV kvs, rbrace]
is :: Text -> OrderedValue -> KeyValue
is = (,)
obj :: [KeyValue] -> OrderedValue
obj = Object
str :: Text -> OrderedValue
str = String
|
bobjflong/yesod-transloadit
|
src/Yesod/Transloadit/OrderedJSON.hs
|
mit
| 834
| 0
| 8
| 184
| 296
| 171
| 125
| 31
| 1
|
module Regen where
import Control.Monad
import Data.List
import Data.Either
import Data.Foldable as F
import CharSet
import Seq
type TSeq = RSeq RAlt
data RAlt = RAlt CharSet [TSeq] deriving (Eq)
data Expr = Seq TSeq | Alt RAlt deriving (Eq)
-- partitionx :: Eq a => ([b] -> b) -> ([b] -> [b]) -> [[a]] -> [(a, [[a]])]
partitionx head tail sequences =
map (\group -> (group, map tail (startsWith group))) groups
where
groups = nub (map head sequences)
startsWith v = filter (\x -> v == (head x)) sequences
partition1 :: [TSeq] -> [(RAlt, [Either TSeq RAlt])]
partition1 = partitionx headSeq tailSeq
partition9 :: [TSeq] -> [(RAlt, [Either TSeq RAlt])]
partition9 = partitionx lastSeq initSeq
sq_seq :: [TSeq] -> [TSeq]
sq_seq sequences =
sf $ pr sequences
where
pr :: [TSeq] -> [TSeq]
pr x = map prepend $ partition1 x
sf :: [TSeq] -> [TSeq]
sf x = map append $ partition9 x
prepend :: (RAlt, [Either TSeq RAlt]) -> TSeq
prepend (start, [Left tail]) = prependSeq start tail
prepend (start, [Right tail]) = newSeq start tail
prepend (start, tails) = newSeq start (squeezeAlt $ alts tails)
append :: (RAlt, [Either TSeq RAlt]) -> TSeq
append (end, [Left init]) = appendSeq init end
append (end, [Right init]) = newSeq init end
append (end, inits) = newSeq (squeezeAlt $ alts inits) end
alts :: [Either TSeq RAlt] -> RAlt -- length >= 2
alts seqs = alt cs (ss ++ ss2)
where
(ss, as) = partitionEithers seqs
cs = map (\a -> case a of RAlt x y -> x) as
ss2 = Data.List.concatMap (\a -> case a of RAlt x y -> y) as
alt :: [CharSet] -> [TSeq] -> RAlt
alt cs ss = RAlt (Data.List.foldl (><) empty cs) ss
squeezeAlt :: RAlt -> RAlt
squeezeAlt (RAlt chars sequences) = RAlt chars (sq_seq sequences)
squeeze :: Expr -> Expr
squeeze (Seq s) = Seq $ fmap squeezeAlt s
squeeze (Alt r) = Alt $ squeezeAlt r
achar :: Char -> RAlt
achar c = RAlt (single c) []
str2expr :: String -> Either CharSet TSeq -- RSeq CharSet
str2expr str = case str of
[] -> error "empty input line"
[c] -> Left $ single c
c1:c2:cs -> Right $ Data.List.foldl appendSeq (newSeq (achar c1) (achar c2)) (map achar cs)
range :: [String] -> RAlt
range s = Data.List.foldl altEmAll (RAlt empty []) (map str2expr s)
where
altEmAll :: RAlt -> Either CharSet TSeq -> RAlt
altEmAll (RAlt c s) (Left c2) = RAlt (c >< c2) s
altEmAll (RAlt c s) (Right s2) = RAlt c (s ++ [s2])
sss :: RAlt -> Expr
sss = Alt
shift :: [String] -> [String]
shift x = map (indent ++) x
where indent = replicate 4 ' '
printE :: Expr -> [String]
printE (Seq s) = ["seq["] ++ (F.concatMap (\x -> shift $ printE (sss x)) s) ++ ["]"]
printE (Alt (RAlt c s)) = ["alt["] ++ (shift $ [show c] ++ (Data.List.concatMap printE (map Seq s))) ++ ["]"]
numbers :: [Int] -> Int -> [String]
numbers s p = map i2s s
where i2s = (pad p) . show
pad :: Int -> String -> String
pad p s = (replicate (p - (length s)) '0') ++ s
instance Show Expr where
show a = intercalate "\n" $ printE a
|
andy128k/regen
|
Regen.hs
|
mit
| 3,104
| 0
| 15
| 766
| 1,426
| 752
| 674
| 72
| 5
|
module GitHub.Gitignore where
import GitHub.Internal
templates = "/gitignore/templates"
templates t = (templates <> "/" <> t)
--| GET /gitignore/templates
listGitignoreTemplates ::
GitHub GitignoreTemplatesData
listGitignoreTemplates = ghGet templates
--| GET /gitignore/templates/:templatename
getGitignoreTemplate ::
Text ->
GitHub GitIgnoreTemplateData
getGitignoreTemplate = ghGet . template
|
SaneApp/github-api
|
src/GitHub/Gitignore.hs
|
mit
| 402
| 6
| 9
| 47
| 116
| 60
| 56
| -1
| -1
|
import System.Random
-- ||GOAL:
-- || import qualified RapidlyExploringRandomTree as RRT
-- || f = Dynamics {{Function StatePoint ControlInput}}
-- || system = ControlledSystem f
-- ||
-- || random = {{Make a Random instance}}
-- || RRT.build random 10**3 system
-- | Rapidly-Exploring Random Tree Framework
-- Multiple Instances required?
class RRTFramework a where
sampleState :: Random rand => rand -> ( StatePoint a, rand )
sampleControl :: Random rand => rand -> StatePoint a -> ( ControlInput a, rand )
nearestState :: [StatePoint a] -> StatePoint a -> StatePoint a
approxControlInput :: StatePoint a -> StatePoint a -> ControlInput a
forwardSimulate :: StatePoint a -> ControlInput a -> StatePoint a
type StatePoint a = (Float, Float, Float)
type ControlInput a = Float
type RRT a = Tree (StatePoint a) (ControlInput a)
type Tree a b = [(a, b, a)]
nodes :: Tree a b -> [a]
nodes [] = []
nodes ((x, u, x'):rest) = [x, x'] ++ nodes rest
empty :: Tree a b
empty = []
addLeaf :: Tree a b -> (a, b, a) -> Tree a b
addLeaf t e = t ++ [e]
-- | Build an RRT without a goal.
build :: RRTFramework a => Random rand => rand -> Int -> RRT a
build _ 0 = empty::RRT a
build rand iters
= let (x_rand::StatePoint a, rand') = sampleState rand
x_near = nearestState (nodes tree) x_rand
u_new = approxControlInput x_near x_rand
x_new = forwardSimulate x_near u_new
tree = build rand' $ iters - 1
in addLeaf tree (x_near, u_new, x_new)
-- | TODO: Let us expand the old trees....
-- | Just build rand iters = build' rand iters empty
--build' :: Random -> Int -> RRT a -> RRT a
-- | Build an RRT with a goal, but only in the forward direction...
-- | Build an RRT with a goal in both directions at once...
-- | Build an RRT where we forward bias the results buy adding extra nodes for randomly sampled controls.
-- | TODO: Is this better than just warping the voroni sets?
|
Zomega/thesis
|
Wurm/Trajectory/RRT.hs
|
mit
| 1,960
| 0
| 11
| 450
| 507
| 273
| 234
| -1
| -1
|
-- 1. let x = 5 in x
-- 2. let x = 5 in x * x
-- 3. let x = 5; y = 6 in x * y
-- 4. let x = 3; y = 1000 in x + 3
mult1 = x * y
where x = 5
y = 6
-- (\x -> x) 0
-- (\x -> x) 1
-- (\x -> x) "blah"
-- let id = \x -> x
-- id 0
-- id 1
-- let id x = x
-- id 0
-- id 1
-- let a = b in c
-- (\a -> c) b
-- let x = 10 in x + 9091
-- (\x -> x + 9091) 10
-- c where a = b
-- (\a -> c) b
-- x + 9091 where x = 10
-- (\x -> x + 9091) 10
-- 1. let x = 3; y = 1000 in x * 3 + y
-- 2. let y = 10; x = 10 * 5 + y in x * 5
-- 3. let x = 7; y = negate x; z = y * 10 in z / x + y
|
diminishedprime/.org
|
reading-list/haskell_programming_from_first_principles/02_12.hs
|
mit
| 584
| 0
| 6
| 226
| 46
| 36
| 10
| 3
| 1
|
-- Haskell homework for our Programming Languages Class
-- 1139626 José Alberto Esquivel
-- 1195815 Eduardo Sánchez
--1. Implementar la función recursiva distintos que liste los elementos distintos
--que pertenecen a dos listas. Asumir que los elementos no se repiten dentro de la
--misma lista.
contiene :: (Eq a) => [a] -> a -> Bool
contiene [] el = False
contiene (x : xs) el
| x == el = True
| otherwise = contiene xs el
distintos :: (Eq a) => [a] -> [a] -> [a]
distintos list1 list2 = distintos_aux list1 list2 [] []
distintos_aux :: (Eq a) => [a] -> [a] -> [a] -> [a] -> [a]
distintos_aux [] [] rep res = res
distintos_aux [] list2 rep res = distintos_aux list2 [] rep res
distintos_aux (x : xs) list2 rep res
| contiene xs x || contiene list2 x || contiene res x || contiene rep x = distintos_aux xs list2 (rep ++ [x]) res
| otherwise = distintos_aux xs list2 rep (res ++ [x])
--2. Implementar la función recursiva multiplica que obtenga una lista de 1’s que
--represente el resultado en unario de multiplicar dos enteros no negativos en decimal.
multiplica :: Integer -> Integer -> [Integer]
multiplica a b = repite (a * b)
repite :: Integer -> [Integer]
repite 0 = []
repite n = [1] ++ repite (n - 1)
--3. Implementar la función recursiva bolos que genere el patrón de acomodo
--común para N bolos. La última línea de bolos puede quedar incompleta!
bolos :: Integer -> [[Integer]]
bolos 0 = []
bolos 1 = [[1]]
bolos 3 = bolos 1 ++ [[2, 3]]
bolos 6 = bolos 3 ++ [[4, 5, 6]]
bolos 10 = bolos 6 ++ [[7, 8, 9, 10]]
bolos n
| n > 6 = bolos 6 ++ [ bolosaux 7 n ]
| n > 3 = bolos 3 ++ [ bolosaux 4 n ]
| n > 1 = bolos 1 ++ [ bolosaux 2 n ]
| otherwise = [[]]
bolosaux :: Integer -> Integer -> [Integer]
bolosaux x n = if x <= n then x : ( bolosaux (x+1) n ) else []
--4. Implementar la función obten-mayores en Scheme que dados un árbol binario y
--un valor como argumentos, cree una lista con los valores de los nodos que contengan
--valores mayores que el valor dado como argumento. Los valores en la lista resultante
--pueden, o no, estar ordenados
data AB t = A (AB t) t (AB t) | V deriving Show
ab = A (A (A V 2 V)
5
(A V 7 V))
8
(A V
9
(A (A V 11 V)
15
V))
obten_mayores :: AB Integer -> Integer -> [Integer]
obten_mayores V valor = []
obten_mayores (A l v r) valor
| v > valor = [v] ++ (obten_mayores l valor) ++ (obten_mayores r valor)
| otherwise = (obten_mayores r valor)
--5. Implementar la función internos en Haskell que dado un árbol binario regrese una
--lista con los valores que se encuentran en los nodos internos del árbol
internos :: AB Integer -> [Integer]
internos V = []
internos (A V t V) = []
internos (A i t e) = [t] ++ internos i ++ internos e
--6. Implementar la función recursiva g_distintos que utilizando guardias liste
--los elementos que pertenecen a dos listas.
g_distintos :: (Eq a) => [a] -> [a] -> [a]
g_distintos l1 l2 = g_distintos_aux l1 l2 ++ g_distintos_aux l2 l1
g_distintos_aux :: (Eq a) => [a] -> [a] -> [a]
g_distintos_aux [] _ = []
g_distintos_aux l1@(el:ls) l2
| l2 == [] = l1
| contiene l2 el = g_distintos_aux ls l2
| otherwise = el : g_distintos_aux ls l2
--7. Implementar la función no-recursiva c_tabla en Haskell que utilizando
--"comprensión de listas" obtenga la tabla de multiplicar especificada. Los
--elementos de la tabla deben aparecer en tuplas.
c_tabla :: Int -> [((Int, Int), Int)]
c_tabla n = [((n, x), n * x) | x <- [1..10]]
--8. Implementar la función no-recursiva f_prodpar en Haskell que utilizando la FOS
--(funciones de orden superior) cree una lista con los productos de los elementos de las
--listas de tamaño impar.
f_prodpar = (\lista -> [ product l | l <- lista, odd(length l) ] )
|
betoesquivel/haskell
|
1139626_1195815_Tarea6.hs
|
mit
| 3,886
| 0
| 11
| 920
| 1,280
| 673
| 607
| 61
| 2
|
module AI
where
import Types
import Util
mapFigure :: Figure -> (Int, Int) -> Field -> Field
mapFigure (block, r) (x, y) field = start
where
blockM = blockMatrix block
blockL = length blockM
figure = (apply r rotateCW) blockM
start
| y < 0 = loop (drop (-y) figure) field
| otherwise = take y field ++ loop figure (drop y field)
loop [] field' = field'
loop _ [] = []
loop figure' field'
= line (head figure') (head field')
: loop (tail figure') (tail field')
line bline fline
= take x fline
++ join bline (drop x fline)
++ drop (x + blockL) fline
join [] _ = []
join _ [] = []
join (x : xs) (y : ys)
| x == 1 = x : join xs ys
| otherwise = y : join xs ys
hasContact :: Figure -> (Int, Int) -> Field -> Bool
hasContact figure (x, y) field
= numSolid figure (x, y + 0) field
> numSolid figure (x, y + 1) field
where
numSolid figure position
= calcBlocks . paintField . mapFigure figure position
paintField :: Field -> Field
paintField = map (map (\x -> if x == 1 then 2 else x))
calcBlocks :: Field -> Int
calcBlocks = sum . concat . map (map (\x -> if x > 1 then 1 else 0))
dropFigure :: Figure -> (Int, Int) -> Field -> Field
dropFigure figure position@(x, y) field
| hasContact figure position field
= paintField $ mapFigure figure position field
| otherwise
= dropFigure figure (x, y + 1) field
fitness :: Field -> Int
fitness field = (sum . map calc . rotateCCW $ field)
where
width = length field
height = length (head field)
calc = length . filter (< 2) . dropWhile (< 2)
|
artems/blockbattle
|
src/AI.hs
|
mit
| 1,687
| 0
| 12
| 510
| 744
| 383
| 361
| 46
| 5
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TemplateHaskell #-}
-- | Various sum types.
--
module Network.AWS.Wolf.Types.Sum where
import Data.Aeson.TH
import Network.AWS.Wolf.Prelude
-- | LabelType
--
-- Tags for referencing workers.
--
data LabelType
= LabelWolf
| LabelAct
| LabelDecide
deriving (Show, Eq)
$(deriveJSON spinalOptions ''LabelType)
|
mfine/wolf
|
src/Network/AWS/Wolf/Types/Sum.hs
|
mit
| 368
| 0
| 8
| 59
| 68
| 44
| 24
| 11
| 0
|
{-|
Module : PostgREST.Auth
Description : PostgREST authorization functions.
This module provides functions to deal with the JWT authorization (http://jwt.io).
It also can be used to define other authorization functions,
in the future Oauth, LDAP and similar integrations can be coded here.
Authentication should always be implemented in an external service.
In the test suite there is an example of simple login function that can be used for a
very simple authentication system inside the PostgreSQL database.
-}
{-# LANGUAGE RecordWildCards #-}
module PostgREST.Auth
( containsRole
, jwtClaims
, JWTClaims
) where
import qualified Crypto.JWT as JWT
import qualified Data.Aeson as JSON
import qualified Data.HashMap.Strict as M
import qualified Data.Vector as V
import Control.Lens (set)
import Control.Monad.Except (liftEither)
import Data.Either.Combinators (mapLeft)
import Data.Time.Clock (UTCTime)
import PostgREST.Config (AppConfig (..), JSPath, JSPathExp (..))
import PostgREST.Error (Error (..))
import Protolude
type JWTClaims = M.HashMap Text JSON.Value
-- | Receives the JWT secret and audience (from config) and a JWT and returns a
-- map of JWT claims.
jwtClaims :: Monad m =>
AppConfig -> LByteString -> UTCTime -> ExceptT Error m JWTClaims
jwtClaims _ "" _ = return M.empty
jwtClaims AppConfig{..} payload time = do
secret <- liftEither . maybeToRight JwtTokenMissing $ configJWKS
eitherClaims <-
lift . runExceptT $
JWT.verifyClaimsAt validation secret time =<< JWT.decodeCompact payload
liftEither . mapLeft jwtClaimsError $ claimsMap configJwtRoleClaimKey <$> eitherClaims
where
validation =
JWT.defaultJWTValidationSettings audienceCheck & set JWT.allowedSkew 1
audienceCheck :: JWT.StringOrURI -> Bool
audienceCheck = maybe (const True) (==) configJwtAudience
jwtClaimsError :: JWT.JWTError -> Error
jwtClaimsError JWT.JWTExpired = JwtTokenInvalid "JWT expired"
jwtClaimsError e = JwtTokenInvalid $ show e
-- | Turn JWT ClaimSet into something easier to work with.
--
-- Also, here the jspath is applied to put the "role" in the map.
claimsMap :: JSPath -> JWT.ClaimsSet -> JWTClaims
claimsMap jspath claims =
case JSON.toJSON claims of
val@(JSON.Object o) ->
M.delete "role" o `M.union` role val
_ ->
M.empty
where
role value =
maybe M.empty (M.singleton "role") $ walkJSPath (Just value) jspath
walkJSPath :: Maybe JSON.Value -> JSPath -> Maybe JSON.Value
walkJSPath x [] = x
walkJSPath (Just (JSON.Object o)) (JSPKey key:rest) = walkJSPath (M.lookup key o) rest
walkJSPath (Just (JSON.Array ar)) (JSPIdx idx:rest) = walkJSPath (ar V.!? idx) rest
walkJSPath _ _ = Nothing
-- | Whether a response from jwtClaims contains a role claim
containsRole :: JWTClaims -> Bool
containsRole = M.member "role"
|
steve-chavez/postgrest
|
src/PostgREST/Auth.hs
|
mit
| 2,987
| 0
| 12
| 656
| 666
| 355
| 311
| 49
| 5
|
{-# LANGUAGE TemplateHaskell, MultiParamTypeClasses, FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances, FunctionalDependencies, ConstraintKinds #-}
{-# LANGUAGE RankNTypes #-}
module Chimera.Engine.Core.Util (
boxVertex, boxVertexRotated
, cutIntoN
, rot2M, rotate2
, insertIM, insertIM', insertsIM'
, (<=~)
) where
import FreeGame
import Control.Lens
import Control.Monad.State.Class
import qualified Data.IntMap.Strict as IM
infixl 5 <=~
boxVertex :: Vec2 -> Vec2 -> [Vec2]
boxVertex pos size = [pos - size,
pos + V2 (size^._x) (-size^._y),
pos + size,
pos + V2 (-size^._x) (size^._y)]
boxVertexRotated :: Vec2 -> Vec2 -> Double -> [Vec2]
boxVertexRotated pos size ang =
map (pos +) $ map (\v -> v `rotate2` ang) $ boxVertex 0 size
cutIntoN :: Int -> Bitmap -> [Bitmap]
cutIntoN n img = let (w,h) = bitmapSize img; w1 = w `div` n in
[cropBitmap img (w1,h) (w1*i,0) | i <- [0..n-1]]
rot2M :: Double -> M22 Double
rot2M r = let c = cos(-r); s = sin(-r) in
V2 (V2 c (-s))
(V2 s c)
rotate2 :: Vec2 -> Double -> Vec2
rotate2 v r = rot2M r !* v
insertIM :: a -> IM.IntMap a -> IM.IntMap a
insertIM a m = snd $ insertIM' a m
insertIM' :: a -> IM.IntMap a -> (Int, IM.IntMap a)
insertIM' a m
| IM.size m == 0 = (0,IM.insert 0 a m)
| otherwise = let (k,_) = IM.findMax m in (k+1,IM.insert (k+1) a m)
insertsIM' :: [a] -> IM.IntMap a -> IM.IntMap a
insertsIM' as m = foldr insertIM m as
(<=~) :: (MonadState s m) => Lens' s a -> (a -> m a) -> m ()
l <=~ f = l <~ (f =<< use l)
|
myuon/Chimera
|
Chimera/Engine/Core/Util.hs
|
mit
| 1,580
| 0
| 11
| 375
| 730
| 390
| 340
| 41
| 1
|
module Feature.NonexistentSchemaSpec where
import Network.Wai (Application)
import Test.Hspec
import Test.Hspec.Wai
import Protolude hiding (get)
spec :: SpecWith ((), Application)
spec =
describe "Non existent api schema" $ do
it "succeeds when requesting root path" $
get "/" `shouldRespondWith` 200
it "gives 404 when requesting a nonexistent table in this nonexistent schema" $
get "/nonexistent_table" `shouldRespondWith` 404
|
steve-chavez/postgrest
|
test/Feature/NonexistentSchemaSpec.hs
|
mit
| 458
| 0
| 10
| 83
| 102
| 56
| 46
| 12
| 1
|
module Unused.LikelihoodCalculator
( calculateLikelihood
, LanguageConfiguration
) where
import qualified Data.List as L
import qualified Data.Maybe as M
import qualified Unused.ResponseFilter as RF
import Unused.ResultsClassifier (LanguageConfiguration(..), LowLikelihoodMatch(..))
import Unused.Types (TermResults(..), Occurrences(..), RemovalLikelihood(..), Removal(..), totalOccurrenceCount)
calculateLikelihood :: [LanguageConfiguration] -> TermResults -> TermResults
calculateLikelihood lcs r =
r { trRemoval = uncurry Removal newLikelihood }
where
newLikelihood
| M.isJust firstAutoLowLikelihood = (Low, autoLowLikelihoodMessage)
| singleNonTestUsage r && testsExist r = (High, "only the definition and corresponding tests exist")
| doubleNonTestUsage r && testsExist r = (Medium, "only the definition and one other use, along with tests, exists")
| totalScore < 2 = (High, "occurs once")
| totalScore < 6 = (Medium, "used semi-frequently")
| totalScore >= 6 = (Low, "used frequently")
| otherwise = (Unknown, "could not determine likelihood")
totalScore = totalOccurrenceCount r
firstAutoLowLikelihood = L.find (`RF.autoLowLikelihood` r) lcs
autoLowLikelihoodMessage = maybe "" languageConfirmationMessage firstAutoLowLikelihood
languageConfirmationMessage :: LanguageConfiguration -> String
languageConfirmationMessage lc =
langFramework ++ ": allowed term or " ++ lowLikelihoodNames
where
langFramework = lcName lc
lowLikelihoodNames = L.intercalate ", " $ map smName $ lcAutoLowLikelihood lc
singleNonTestUsage :: TermResults -> Bool
singleNonTestUsage = (1 ==) . oOccurrences . trAppOccurrences
doubleNonTestUsage :: TermResults -> Bool
doubleNonTestUsage = (2 ==) . oOccurrences . trAppOccurrences
testsExist :: TermResults -> Bool
testsExist = (> 0) . oOccurrences . trTestOccurrences
|
joshuaclayton/unused
|
src/Unused/LikelihoodCalculator.hs
|
mit
| 1,929
| 0
| 11
| 345
| 459
| 258
| 201
| 33
| 1
|
import Control.Monad
f n arr = -- Complete this Function
join $ map (take n . repeat) arr
-- This part handles the Input and Output and can be used as it is. Do not modify this part.
main = do
n <- readLn :: IO Int
inputdata <- getContents
mapM_ putStrLn $ map show $ f n $ map (read :: String -> Int) $ lines inputdata
|
jerryzh168/hackerrank
|
FunctionalProgramming/Introduction/ListReplications.hs
|
mit
| 333
| 0
| 11
| 81
| 106
| 51
| 55
| 7
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module ZoomHub.Web.Errors
( error400,
error401,
error404,
error503,
)
where
import qualified Data.ByteString.Lazy.UTF8 as BU
import Servant (ServerError, err400, err401, err404, err503, errBody, errHeaders)
error400 :: String -> ServerError
error400 = mkError err400
error401 :: String -> ServerError
error401 = mkError err401
error404 :: String -> ServerError
error404 = mkError err404
error503 :: String -> ServerError
error503 = mkError err503
mkError :: ServerError -> String -> ServerError
mkError errorType message =
errorType
{ errHeaders = [("Content-Type", "text/html; charset=utf-8")],
errBody = BU.fromString message
}
|
zoomhub/zoomhub
|
src/ZoomHub/Web/Errors.hs
|
mit
| 704
| 0
| 8
| 125
| 178
| 106
| 72
| 21
| 1
|
module Command.Summary.Calculate (calculateSummary) where
import Data.Either
import Data.List
import Data.Maybe
import Data.Time
import Database.HDBC
import Database.HDBC.Sqlite3
import Text.PrettyPrint.Boxes
import Command.Summary.Types
import Currency
import Settings
import Util
-- amount, frequency, start
data TimeRule = TimeRule Integer String UTCTime
-- percentage, rule amount, start, transaction amount
data CategoryRule = CategoryRule Double Integer UTCTime Integer
calculateSummary :: IO (Integer, [Envelope])
calculateSummary = do
conn <- getDbConnection
accountsResult <- quickQuery' conn "SELECT SUM(account.balance) FROM account" []
let totalAccountBalance = fromSql . head . head $ accountsResult
envelopesResult <- quickQuery'
conn
("SELECT envelope.id, envelope.name, envelope.amount, SUM([transaction].amount)"
++ " FROM envelope"
++ " LEFT OUTER JOIN envelope_category ON envelope_category.envelope_id = envelope.id"
++ " LEFT OUTER JOIN category ON category.id = envelope_category.category_id OR category.parent_id = envelope_category.category_id"
++ " LEFT OUTER JOIN [transaction] ON [transaction].category_id = category.id"
++ " GROUP BY envelope.name, envelope.amount"
++ " ORDER BY envelope.name ASC")
[]
let eBalances = mapMaybe calcEnvelopeBalance envelopesResult
eBalancesWithRules <- mapM (calcRules' conn) eBalances
disconnect conn
let totalRemainingBalance = totalAccountBalance - envelopeSum eBalancesWithRules
return (totalRemainingBalance, eBalancesWithRules)
calcRules' :: Connection -> Envelope -> IO Envelope
calcRules' conn envelope@(Envelope eId eName _) = do
now <- getCurrentTime
-- TODO: query based on start date?
timeRulesResult <- quickQuery'
conn
("SELECT time_rule.amount, time_rule.frequency, time_rule.start"
++ " FROM time_rule"
++ " WHERE time_rule.envelope_id = ?")
[toSql eId]
-- TODO: query based on start date
categoryRulesResult <- quickQuery'
conn
("SELECT category_rule.percentage, category_rule.amount, category_rule.start, [transaction].amount"
++ " FROM category_rule"
++ " INNER JOIN [transaction] ON category_rule.category_id = [transaction].category_id"
++ " WHERE category_rule.envelope_id = ? AND [transaction].[date] >= category_rule.start")
[toSql eId]
let timeRules = map timeRuleFromSql timeRulesResult
let categoryRules = map categoryRuleFromSql categoryRulesResult
let timeRuleEnvelope = foldr (calcTimeRule' now) envelope timeRules
let categoryRuleEnvelope = foldr calcCategoryRule' timeRuleEnvelope categoryRules
return categoryRuleEnvelope
-- TODO: don't use fromJust, instead convert from database immediately
calcTimeRule' :: UTCTime -> TimeRule -> Envelope -> Envelope
calcTimeRule' now (TimeRule rAmount frequency start) (Envelope eId eName eAmount) =
let n = numberOfOccurrences start now (fromJust (lookup frequency frequencyMap))
in Envelope eId eName (eAmount + n * rAmount)
calcCategoryRule' :: CategoryRule -> Envelope -> Envelope
calcCategoryRule' (CategoryRule percentage rAmount _ tAmount) (Envelope eId eName eAmount) =
let amount = (fromIntegral eAmount) + percentage * (fromIntegral tAmount) + (fromIntegral rAmount)
in Envelope eId eName (floor amount)
timeRuleFromSql :: [SqlValue] -> TimeRule
timeRuleFromSql (amount:frequency:start:[]) =
TimeRule (fromSql amount) (fromSql frequency) (fromSql start)
categoryRuleFromSql :: [SqlValue] -> CategoryRule
categoryRuleFromSql (percentage:rAmount:start:tAmount:[]) =
CategoryRule (fromSql percentage) (fromSql rAmount) (fromSql start) (fromSql tAmount)
envelopeSum :: [Envelope] -> Integer
envelopeSum = sum . map (\(Envelope _ _ amount) -> amount)
calcEnvelopeBalance :: [SqlValue] -> Maybe Envelope
calcEnvelopeBalance (eId:eName:eAmount:tAmount:[]) =
case (safeFromSql eId, safeFromSql eName, safeFromSql eAmount, safeFromSql tAmount) of
(Right eIdConverted, Right eNameConverted, Right eAmountConverted, Right tAmountConverted) ->
Just (Envelope eIdConverted eNameConverted (eAmountConverted - tAmountConverted))
(Right eIdConverted, Right eNameConverted, Right eAmountConverted, _) ->
Just (Envelope eIdConverted eNameConverted eAmountConverted)
otherwise -> Nothing
|
jpotterm/manila-hs
|
src/Command/Summary/Calculate.hs
|
cc0-1.0
| 4,498
| 0
| 15
| 854
| 1,000
| 506
| 494
| 79
| 3
|
{-# LANGUAGE OverloadedStrings #-}
module Main (main) where
import qualified SDL
import qualified Common as C
import Control.Monad.Loops (iterateUntilM)
import Data.Foldable (foldl')
data Intent
= Idle
| Quit
| Press Quadrant
| Release Quadrant
| Hover Quadrant
| Leave Quadrant
data World = World
{ exiting :: Bool
, panes :: PaneMap
}
data PaneMap = PaneMap
{ topLeft :: Pane
, topRight :: Pane
, bottomLeft :: Pane
, bottomRight :: Pane
}
data Pane
= Out
| Over
| Down
| Up
data Quadrant
= TopLeft
| TopRight
| BottomLeft
| BottomRight
initialWorld :: World
initialWorld = World
{ exiting = False
, panes = initialPanes
}
initialPanes :: PaneMap
initialPanes = PaneMap
{ topLeft = Out
, topRight = Out
, bottomLeft = Out
, bottomRight = Out
}
main :: IO ()
main = C.withSDL $ C.withSDLImage $ do
C.setHintQuality
C.withWindow "Lesson 17" (640, 480) $ \w ->
C.withRenderer w $ \r -> do
t <- C.loadTextureWithInfo r "./assets/mouse_states.png"
let doRender = renderWorld r t
_ <- iterateUntilM
exiting
(\x ->
updateWorld x <$> SDL.pollEvents
>>= \x' -> x' <$ doRender x'
)
initialWorld
SDL.destroyTexture (fst t)
updateWorld :: World -> [SDL.Event] -> World
updateWorld w
= foldl' (flip applyIntent) w
. fmap (payloadToIntent . SDL.eventPayload)
payloadToIntent :: SDL.EventPayload -> Intent
payloadToIntent SDL.QuitEvent = Quit
payloadToIntent (SDL.MouseMotionEvent e) = motionIntent e
payloadToIntent (SDL.MouseButtonEvent e) = buttonIntent e
payloadToIntent _ = Idle
motionIntent :: SDL.MouseMotionEventData -> Intent
motionIntent e = Hover q
where
q = selectQuadrant x y
(SDL.P (SDL.V2 x y)) = SDL.mouseMotionEventPos e
-- | SDL.mouseButtonEventMotion e == SDL.Pressed -> Down
--
buttonIntent :: SDL.MouseButtonEventData -> Intent
buttonIntent e = t q
where
q = selectQuadrant x y
(SDL.P (SDL.V2 x y)) = SDL.mouseButtonEventPos e
t = if SDL.mouseButtonEventMotion e == SDL.Pressed
then Press
else Release
selectQuadrant :: (Num a, Ord a) => a -> a -> Quadrant
selectQuadrant x y
| x < 320 && y < 240 = TopLeft
| x >= 320 && y < 240 = TopRight
| x < 320 && y >= 240 = BottomLeft
| x >= 320 && y >= 240 = BottomRight
| otherwise = undefined
applyIntent :: Intent -> World -> World
applyIntent (Press q) = pressWorld q
applyIntent (Release q) = releaseWorld q
applyIntent (Hover q) = hoverWorld q
applyIntent (Leave q) = leaveWorld q
applyIntent Idle = idleWorld
applyIntent Quit = quitWorld
updatePaneMap :: (Pane -> Pane) -> (Pane -> Pane) -> Quadrant -> PaneMap -> PaneMap
updatePaneMap f g TopLeft (PaneMap tl tr bl br) = PaneMap (f tl) (g tr) (g bl) (g br)
updatePaneMap f g TopRight (PaneMap tl tr bl br) = PaneMap (g tl) (f tr) (g bl) (g br)
updatePaneMap f g BottomLeft (PaneMap tl tr bl br) = PaneMap (g tl) (g tr) (f bl) (g br)
updatePaneMap f g BottomRight (PaneMap tl tr bl br) = PaneMap (g tl) (g tr) (g bl) (f br)
pressWorld :: Quadrant -> World -> World
pressWorld q w = w { panes = panes' }
where panes' = updatePaneMap setDown id q (panes w)
releaseWorld :: Quadrant -> World -> World
releaseWorld q w = w { panes = panes' }
where panes' = updatePaneMap setUp id q (panes w)
hoverWorld :: Quadrant -> World -> World
hoverWorld q w = w { panes = panes' }
where panes' = updatePaneMap setOver setOut q (panes w)
leaveWorld :: Quadrant -> World -> World
leaveWorld q w = w { panes = panes' }
where panes' = updatePaneMap setOut setOver q (panes w)
setOut :: Pane -> Pane
setOut Down = Down
setOut _ = Out
setOver :: Pane -> Pane
setOver Down = Down
setOver Up = Up
setOver _ = Over
setDown :: Pane -> Pane
setDown _ = Down
setUp :: Pane -> Pane
setUp Down = Up
setUp p = p
idleWorld :: World -> World
idleWorld = id
quitWorld :: World -> World
quitWorld w = w { exiting = True }
renderWorld :: SDL.Renderer -> (SDL.Texture, SDL.TextureInfo) -> World -> IO ()
renderWorld r t w = do
SDL.clear r
drawWorld r t w
SDL.present r
drawWorld :: SDL.Renderer -> (SDL.Texture, SDL.TextureInfo) -> World -> IO ()
drawWorld r (t, ti) w = do
renderPane (topLeft $ panes w) TopLeft
renderPane (topRight $ panes w) TopRight
renderPane (bottomLeft $ panes w) BottomLeft
renderPane (bottomRight $ panes w) BottomRight
where
tw :: Double
tw = fromIntegral $ SDL.textureWidth ti
th = fromIntegral $ SDL.textureHeight ti
s = C.mkRect 0 0 (tw / 2) (th / 2)
mFor c = s `moveTo` getMask c
pFor c = s `moveTo` getPosition c
renderPane p q
= SDL.copy r t
(Just $ floor <$> mFor p)
(Just $ floor <$> pFor q)
getMask :: (Num a) => Pane -> (a, a)
getMask Out = ( 0, 0)
getMask Over = (320, 0)
getMask Down = ( 0, 240)
getMask Up = (320, 240)
getPosition :: (Num a) => Quadrant -> (a, a)
getPosition TopLeft = ( 0, 0)
getPosition TopRight = (320, 0)
getPosition BottomLeft = ( 0, 240)
getPosition BottomRight = (320, 240)
moveTo :: SDL.Rectangle a -> (a, a) -> SDL.Rectangle a
moveTo (SDL.Rectangle _ d) (x, y) = SDL.Rectangle (C.mkPoint x y) d
|
palf/haskellSDL2Examples
|
examples/lesson17/src/Lesson17.hs
|
gpl-2.0
| 5,348
| 0
| 21
| 1,395
| 2,072
| 1,081
| 991
| 155
| 2
|
module Expressions where
import qualified Data.Map as Map
import Control.Monad.State
import Control.Monad.Error
-- S-expressions
data LispExpr = Blank |
LispInt Integer |
LispSymbol String |
LispLambda LispResult FunctionSignature |
LispSpecial LispResult FunctionSignature |
LispFunc LispResult FunctionName FunctionSignature |
LispList [LispExpr]
type FunctionName = String
type FunctionSignature = [String]
type SymbolTable = Map.Map String LispExpr
-- Context (scope) in which expressions are be evaluated
data Context = Ctx { contextSymbols :: SymbolTable, parentContext :: (Maybe Context) }
-- IO Monad + error handling in String form, IO is the inner monad
type LispError = ErrorT String IO
-- State monad holds a context as the state, the error monad as an inner monad and an evaluation result
type LispResult = StateT Context LispError LispExpr
-- Helper functions for context manipulation
updateSymbol symbol value = modify (\(Ctx sym_table parentCtx) -> (Ctx (Map.insert symbol value sym_table)) parentCtx)
updateSymbolInParent symbol value = modify (\(Ctx sym_table parent_ctx)->(Ctx sym_table (updatedCtx parent_ctx)))
where updatedCtx (Just (Ctx sym_table ctx)) = (Just (Ctx (Map.insert symbol value sym_table) ctx))
updatedCtx Nothing = Nothing
pushContext ctx = Ctx Map.empty (Just ctx)
popContext ctx@(Ctx _ Nothing) = ctx
popContext (Ctx _ (Just parentCtx)) = parentCtx
-- Printing s-expressions
instance Show LispExpr where
show Blank = ""
show (LispInt x) = show x
show (LispSymbol x) = x
show (LispLambda _ sig) = "<lambda (" ++ (unwords sig) ++ ")>"
show (LispFunc _ name sig) = "<function " ++ (show name) ++ " (" ++ (unwords sig) ++ ")>"
show (LispSpecial _ sig) = "<special-form (" ++ (unwords sig) ++ ")>"
show (LispList x) = "(" ++ unwords (map show x) ++ ")"
instance Eq LispExpr where
(LispInt a) == (LispInt b) = a == b
(LispSymbol a) == (LispSymbol b) = a == b
(LispList []) == (LispList []) = True
_ == _ = False
|
aksiazek/yali
|
src/Expressions.hs
|
gpl-2.0
| 2,130
| 26
| 17
| 489
| 680
| 354
| 326
| 37
| 2
|
module Handler.Bug where
import Import
getBugR :: BugId -> Handler Html
getBugR bugId = do
bug <- runDB $ get404 bugId
defaultLayout $ do
setTitle $ toHtml $ bugTitle bug
$(widgetFile "bug")
|
juanvallejo/portable-bug-tracker
|
Handler/Bug.hs
|
gpl-2.0
| 217
| 0
| 12
| 58
| 74
| 35
| 39
| 8
| 1
|
module Literate where
import Data.Monoid
import Data.DList hiding (foldr, map)
import MarXupParser
import Data.List (isPrefixOf)
import Output
import Config
----------------------------------------------
-- Top-level generation
rHaskells :: [Haskell] -> Doc
rHaskells xs = mconcat $ map rHaskell xs
rHaskell :: Haskell -> DList Char
rHaskell (HaskLn pos) = oPos pos <> text "\n"
rHaskell (Quote xs) = mconcat $ map rMarxup xs
rHaskell _ = mempty
rMarxup :: MarXup -> Doc
rMarxup (Unquote _ [(_,HaskChunk fct),(position,Quote code)]) | "haskell" `isPrefixOf` fct = oPos position <> foldMap rInlineHask code
rMarxup _ = mempty
rInlineHask :: MarXup -> Doc
rInlineHask (TextChunk x) = text x
rInlineHask QuotedAntiQuote = case antiQuoteStrings of
x:_ -> text x
rInlineHask _ = mempty
|
jyp/MarXup
|
Literate.hs
|
gpl-2.0
| 790
| 0
| 10
| 124
| 280
| 147
| 133
| 21
| 1
|
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies #-}
-----------------------------------------------------------------------------
--
-- Module : Math.InnerProduct
-- Copyright : Christian Gosch
-- License : BSD3
--
-- Maintainer :
-- Stability :
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
module Numeric.Jalla.InnerProduct (
InnerProduct(..)
) where
{-| A class that is used to define the canonical inner product on
/CVector/ type vectors. -}
class InnerProduct o f | o -> f where
innerProduct :: o -> o -> f
|
cgo/jalla
|
Numeric/Jalla/InnerProduct.hs
|
gpl-3.0
| 616
| 0
| 8
| 107
| 61
| 42
| 19
| 5
| 0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.