code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeFamilies #-}
module Physics.Broadphase.Grid where
import GHC.Generics (Generic)
import GHC.Types (Double (D#))
import Control.Monad.ST
import Control.DeepSeq
import Control.Lens
import Data.Foldable (foldl')
import qualified Data.IntMap.Strict as IM
import Data.List (sortBy)
import Data.Maybe (mapMaybe)
import qualified Data.Vector.Unboxed as V
import Data.Vector.Unboxed.Deriving
import Physics.Broadphase.Aabb (Aabb (..), Bounds (..),
aabbCheck, toTaggedAabbs)
import qualified Physics.Constraint as C
import Physics.Contact.ConvexHull
import Physics.World
import Utils.Descending
import Utils.Utils
{- |
The grid is indexed in row-major order:
3 4 5
0 1 2
(where X is horizontal and Y is vertical)
* The grid is only used for shape queries, so it should only contain AABBs.
* We may want a reverse-lookup from shape ID to grid squares in the future.
-}
data Grid = Grid
{ _gridSquares :: IM.IntMap (IM.IntMap TaggedAabb)
, _gridX :: !GridAxis
, _gridY :: !GridAxis
} deriving (Eq, Show, Generic, NFData)
data GridAxis = GridAxis
{ _gridLength :: !Int
, _gridUnit :: !Double
, _gridOrigin :: !Double
} deriving (Eq, Show, Generic, NFData)
data TaggedAabb = TaggedAabb
{ _taggedStatic :: !Bool
, _taggedBox :: !Aabb
} deriving (Eq, Show, Generic, NFData)
makeLenses ''Grid
makeLenses ''GridAxis
toGrid :: (GridAxis, GridAxis) -> World s label -> ST s Grid
toGrid axes@(xAxis, yAxis) world = do
taggedAabbs <- toTaggedAabbs isStatic world
return $ Grid (fromTaggedAabbs axes taggedAabbs) xAxis yAxis
where
isStatic i = (C.isStatic . C._physObjInvMass) <$> readPhysObj world i
culledKeys :: Grid -> Descending (Int, Int)
culledKeys Grid{..} = Descending . uniq . sortBy f . concat $ culledKeys' <$> IM.elems _gridSquares
where f x y = case compare x y of LT -> GT
EQ -> EQ
GT -> LT
culledKeys' :: IM.IntMap TaggedAabb -> [(Int, Int)]
culledKeys' square = mapMaybe colliding $ allPairs $ IM.toDescList square
where colliding ((_, TaggedAabb True _), (_, TaggedAabb True _)) = Nothing
-- ^ Don't check two static shapes for collision.
colliding ((a, TaggedAabb _ boxA), (b, TaggedAabb _ boxB))
| aabbCheck boxA boxB = Just (a, b)
| otherwise = Nothing
allPairs :: [a] -> [(a, a)]
allPairs [] = []
allPairs (x:xs) = f [] x xs
where f accumPairs first [] = accumPairs
f accumPairs first remaining@(x:xs) = f (foldl' g accumPairs remaining) x xs
where g accumPairs x = (first, x):accumPairs
uniq :: Eq a => [a] -> [a]
uniq [] = []
uniq [x] = [x]
uniq (x:y:rest)
| x == y = uniq (x:rest)
| otherwise = x : uniq (y:rest)
fromTaggedAabbs :: (GridAxis, GridAxis) -> V.Vector (Int, Aabb, Bool) -> IM.IntMap (IM.IntMap TaggedAabb)
fromTaggedAabbs (x, y) = V.foldl' insertBox IM.empty
where
insertBox grid (key, box, isStatic) = foldl' insertBoxAt grid indices
where
indices = boxIndices (x, y) box
insertBoxAt grid index =
grid & at index . non IM.empty . at key .~ Just taggedBox
taggedBox = TaggedAabb isStatic box
-- | Flatten a pair of axial indices to a single grid index.
flattenIndex :: Grid -> (Int, Int) -> Int
flattenIndex Grid{..} (x, y) = flattenIndex' _gridX (x, y)
-- | Flatten a pair of axial indices to a single grid index.
flattenIndex' :: GridAxis -> (Int, Int) -> Int
flattenIndex' xAxis@GridAxis{..} (x, y) = x + (y * _gridLength)
-- | Flattened grid index of a given point.
pointIndex :: Grid -> (Double, Double) -> Int
pointIndex grid@Grid{..} (x, y) = flattenIndex' _gridX (i, j)
where i = axialIndex _gridX x
j = axialIndex _gridY y
-- | Index along a single axis.
axialIndex :: GridAxis -> Double -> Int
axialIndex GridAxis{..} val =
floor $ (val - _gridOrigin) / _gridUnit
-- | All flattened grid indices that match a given 'Aabb'.
boxIndices :: (GridAxis, GridAxis) -> Aabb -> [Int]
boxIndices (xAxis, yAxis) Aabb {..} = do
x <- axisRange _aabbx xAxis
y <- axisRange _aabby yAxis
return $ flattenIndex' xAxis (x, y)
where
axisRange (Bounds min max) axis = [minIx .. maxIx]
where
minIx = axialIndex axis (D# min)
maxIx = axialIndex axis (D# max)
|
ublubu/shapes
|
shapes/src/Physics/Broadphase/Grid.hs
|
mit
| 4,986
| 0
| 13
| 1,369
| 1,489
| 807
| 682
| -1
| -1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedLabels #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE GADTs #-}
module Bench.ARec where
import Data.Vinyl
import Data.Vinyl.ARec.Internal
import Data.Vinyl.Syntax ()
import Bench.Rec
mkARec :: Int -> ARec ElField Fields
mkARec i= arec (Field i `arcons` Field i `arcons` Field i `arcons` Field i `arcons`
Field i `arcons` Field i `arcons` Field i `arcons` Field i `arcons`
Field i `arcons` Field i `arcons` Field i `arcons` Field i `arcons`
Field i `arcons` Field i `arcons` Field i `arcons` Field 99 `arcons`
arnil)
mkToARec :: Int -> ARec ElField Fields
mkToARec i= toARec (Field i :& Field i :& Field i :& Field i :&
Field i :& Field i :& Field i :& Field i :&
Field i :& Field i :& Field i :& Field i :&
Field i :& Field i :& Field i :& Field 99 :&
RNil)
sumARec :: ARec ElField Fields -> Int
sumARec str =
get #a0 str + get #a1 str + get #a2 str + get #a3 str + get #a4 str
+ get #a5 str + get #a6 str + get #a7 str + get #a8 str
+ get #a9 str + get #a10 str + get #a11 str + get #a12 str
+ get #a13 str + get #a14 str + get #a15 str
where
get label r = rvalf label r
{-# INLINE get #-}
|
VinylRecords/Vinyl
|
benchmarks/Bench/ARec.hs
|
mit
| 1,411
| 0
| 23
| 434
| 530
| 265
| 265
| 31
| 1
|
module Antiqua.Utils where
select :: a -> a -> Bool -> a
select f t b = if b then t else f
(&&&) :: (a -> Bool) -> (a -> Bool ) -> a -> Bool
(&&&) f g x = all id [f x, g x]
(|@|) :: (a, b) -> c -> (a, b, c)
(|@|) (x, y) z = (x, y, z)
(.:) :: (a -> b) -> (x -> y -> a) -> x -> y -> b
(.:) = (.) . (.)
fold3 :: [a] -> b -> c -> (a -> b -> c -> (a, b, c)) -> ([a], b, c)
fold3 xs w y f = foldl (\(acc, w', y') x -> let (p, q, r) = f x w' y' in (p:acc, q, r)) ([], w, y) xs
fold2 :: [a] -> b -> (a -> b -> (a, b)) -> ([a], b)
fold2 xs w f = foldl (\(acc, w') x -> let (p, q) = f x w' in (p:acc, q)) ([], w) xs
-- returns the first element satisfying f, and the list with that element removed
-- this reverses the list
splitFind :: (a -> Bool) -> [a] -> Maybe (a, [a])
splitFind _ [] = Nothing
splitFind g xs =
let inner _ _ [] = Nothing
inner f acc (y:ys) =
if f y
then Just (y, acc ++ ys)
else inner f (y:acc) ys
in
inner g [] xs
headOpt :: [a] -> Maybe a
headOpt (x:_) = Just x
headOpt _ = Nothing
clamp :: Ord a => a -> a -> a -> a
clamp mn mx value
| value < mn = mn
| value > mx = mx
| otherwise = value
|
olive/antiqua-prime
|
src/Antiqua/Utils.hs
|
mit
| 1,183
| 0
| 13
| 371
| 748
| 412
| 336
| 30
| 3
|
{-|
Module : Bang
Description : A Domain Specific Language for generating drum compositions
Copyright : (c) Benjamin Kovach, 2014
License : MIT
Maintainer : bkovach13@gmail.com
Stability : experimental
Portability : Mac OSX
The Bang module exports the main functions to actually play a constructed composition. You can use either 'bang' to
play a composition a single time, or 'bangR' to continuously repeat a composition /ad infinitum/.
-}
module Bang
( bang
, bangR
, bangWith
, bangRWith
, Options(..)
, defaultOptions
, play
, module Bang.Music
, module Bang.Interface
, (<>)
)
where
import Control.Concurrent
import Control.Monad
import Control.Monad.Trans
import Control.Monad.Trans.State
import System.Info
import System.MIDI
import Bang.Interface
import Bang.Interpreter
import Bang.Music
data Options = Options {
o_bpm :: Integer,
-- ^ BPM of the composition to play
o_tempo :: Rational
-- ^ Initial 'Tempo' of the composition to play
} deriving (Show, Eq)
-- | Default options to 'bang' with.
--
-- > defaultOptions = Options{ o_bpm = 120, o_tempo = 1 }
defaultOptions :: Options
defaultOptions = Options {o_bpm = 120, o_tempo = 1}
-- | Play a composition over the first system `Destination` for MIDI events.
--
-- > bang = bangWith defaultOptions
bang :: Music PercussionSound -> IO ()
bang = bangWith defaultOptions
-- | 'bang' a composition repeatedly.
--
-- > bangR = bang . mconcat . repeat
bangR :: Music PercussionSound -> IO ()
bangR = bangRWith defaultOptions
-- | 'bangR' with specified 'Options'.
--
-- > bangRWith opts = bangWith opts . mconcat . repeat
bangRWith :: Options -> Music PercussionSound -> IO ()
bangRWith opts = bangWith opts . mconcat . repeat
-- | 'bang' with specified 'Options'.
bangWith :: Options -> Music PercussionSound -> IO ()
bangWith opts song = do
dstlist <- enumerateDestinations
case dstlist of
[] -> fail "No MIDI Devices found."
(dst : _) -> do
name <- getName dst
putStrLn $ "Using MIDI device: " ++ name
conn <- openDestination dst
playWith opts conn song
-- | 'play' with specified 'Options'
playWith :: Options -> Connection -> Music PercussionSound -> IO ()
playWith (Options oBpm oTempo) conn song = do
-- Add a dummy note at the end 'cause Windows doesn't play the last one for some reason.
-- This is stupid, but windows is generally unsupported anyway.
let song' | os == "mingw32" || os == "mingw" = song <> bd
| otherwise = song
start conn
evalStateT runComposition (conn, interpret (bpm oBpm $ tempo oTempo song'))
close conn
-- | 'play' a composition over a given 'Connection'
play :: Connection -> Music PercussionSound -> IO ()
play conn song = do
start conn
evalStateT runComposition (conn, interpret song)
close conn
-- | Run a composition by repeatedly updating the `Connection` and sending events as they come.
runComposition :: StateT (Connection, [Primitive PercussionSound]) IO ()
runComposition = do
(conn, evs) <- get
t <- lift $ currentTime conn
case evs of
[] -> return ()
(Rest _ : xs) -> do
put (conn, xs)
lift $ threadDelay 250
runComposition
(e@(Note _ _) : xs) -> do
let (MidiEvent s ev) = drumToMidiEvent e
when (s < t) $ do
put (conn, xs)
lift $ send conn ev
lift $ threadDelay 250
runComposition
|
5outh/Bang
|
src/Bang.hs
|
mit
| 3,526
| 0
| 16
| 880
| 780
| 401
| 379
| 71
| 3
|
{-# LANGUAGE TemplateHaskell #-}
module UnitTest.CallbackParse.OptinCallback where
import Data.Aeson (Value)
import Data.Yaml.TH (decodeFile)
import Test.Tasty as Tasty
import Web.Facebook.Messenger
import UnitTest.Internal
-----------
-- OPTIN --
-----------
optinCallbackVal :: Value
optinCallbackVal = $$(decodeFile "test/json/callback/optin_callback.json")
optinTest :: TestTree
optinTest = parseTest "Optin Callback" optinCallbackVal
$ standardMessaging (Just 1458692752478)
Nothing
$ CMOptin $ Optin "some_ref_or_another"
$ Just "another_ref"
|
Vlix/facebookmessenger
|
test/UnitTest/CallbackParse/OptinCallback.hs
|
mit
| 669
| 0
| 11
| 175
| 115
| 66
| 49
| -1
| -1
|
{-# LANGUAGE OverloadedStrings #-}
module DynamoDbEventStore.Paging
(runStreamRequest
,FeedDirection(..)
,StreamResult(..)
,EventStartPosition(..)
,ReadStreamRequest(..)
,StreamOffset)
where
import DynamoDbEventStore.ProjectPrelude
import qualified Test.QuickCheck as QC
import qualified Pipes.Prelude as P
import Safe
import DynamoDbEventStore.Types (RecordedEvent(..), StreamId, QueryDirection(..))
data FeedDirection = FeedDirectionForward | FeedDirectionBackward
deriving (Eq, Show)
instance QC.Arbitrary FeedDirection where
arbitrary = QC.elements [FeedDirectionForward, FeedDirectionBackward]
data EventStartPosition = EventStartHead | EventStartPosition Int64 deriving (Show, Eq)
type StreamOffset = (FeedDirection, EventStartPosition, Natural)
data StreamResult = StreamResult {
streamResultEvents :: [RecordedEvent]
, streamResultFirst :: Maybe StreamOffset
, streamResultNext :: Maybe StreamOffset
, streamResultPrevious :: Maybe StreamOffset
, streamResultLast :: Maybe StreamOffset
} deriving Show
data ReadStreamRequest = ReadStreamRequest {
rsrStreamId :: StreamId,
rsrStartEventNumber :: Maybe Int64,
rsrMaxItems :: Natural,
rsrDirection :: FeedDirection
} deriving (Show)
buildStreamResult :: FeedDirection -> Maybe Int64 -> [RecordedEvent] -> Maybe Int64 -> Natural -> Maybe StreamResult
buildStreamResult _ Nothing _ _ _ = Nothing
buildStreamResult FeedDirectionBackward (Just lastEvent) events requestedStartEventNumber maxItems =
let
maxEventNumber = maximum $ recordedEventNumber <$> events
startEventNumber = fromMaybe maxEventNumber requestedStartEventNumber
nextEventNumber = startEventNumber - fromIntegral maxItems
in Just StreamResult {
streamResultEvents = events,
streamResultFirst = Just (FeedDirectionBackward, EventStartHead, maxItems),
streamResultNext =
if nextEventNumber >= 0 then
Just (FeedDirectionBackward, EventStartPosition nextEventNumber, maxItems)
else Nothing,
streamResultPrevious = Just (FeedDirectionForward, EventStartPosition (min (startEventNumber + 1) (lastEvent + 1)), maxItems),
streamResultLast =
if nextEventNumber >= 0 then
Just (FeedDirectionForward, EventStartPosition 0, maxItems)
else Nothing
}
buildStreamResult FeedDirectionForward (Just _lastEvent) events requestedStartEventNumber maxItems =
let
maxEventNumber = maximumMay $ recordedEventNumber <$> events
minEventNumber = minimumMay $ recordedEventNumber <$> events
nextEventNumber = fromMaybe (fromMaybe 0 ((\x -> x - 1) <$> requestedStartEventNumber)) ((\x -> x - 1) <$> minEventNumber)
previousEventNumber = (+1) <$> maxEventNumber
in Just StreamResult {
streamResultEvents = events,
streamResultFirst = Just (FeedDirectionBackward, EventStartHead, maxItems),
streamResultNext =
if nextEventNumber >= 0 then
Just (FeedDirectionBackward, EventStartPosition nextEventNumber, maxItems)
else Nothing,
streamResultPrevious = (\eventNumber -> (FeedDirectionForward, EventStartPosition eventNumber, maxItems)) <$> previousEventNumber,
streamResultLast =
if maybe True (> 0) minEventNumber then
Just (FeedDirectionForward, EventStartPosition 0, maxItems)
else Nothing
}
getLastEvent
:: (Monad m)
=> (QueryDirection -> StreamId -> Maybe Int64 -> Natural -> Producer RecordedEvent m ())
-> StreamId
-> m (Maybe Int64)
getLastEvent eventProducer streamId = do
x <- P.head $ eventProducer QueryDirectionBackward streamId Nothing 1
return $ recordedEventNumber <$> x
runStreamRequest
:: (Monad m)
=> (QueryDirection -> StreamId -> Maybe Int64 -> Natural -> Producer RecordedEvent m ())
-> ReadStreamRequest
-> m (Maybe StreamResult)
runStreamRequest eventProducer (ReadStreamRequest streamId startEventNumber maxItems FeedDirectionBackward) =
do
lastEvent <- getLastEvent eventProducer streamId
events <-
P.toListM $
eventProducer QueryDirectionBackward streamId startEventNumber 10
>-> filterLastEvent startEventNumber
>-> maxItemsFilter startEventNumber
return $ buildStreamResult FeedDirectionBackward lastEvent events startEventNumber maxItems
where
maxItemsFilter Nothing = P.take (fromIntegral maxItems)
maxItemsFilter (Just v) = P.takeWhile (\r -> recordedEventNumber r > minimumEventNumber v)
minimumEventNumber start = fromIntegral start - fromIntegral maxItems
filterLastEvent Nothing = P.filter (const True)
filterLastEvent (Just v) = P.filter ((<= v) . recordedEventNumber)
runStreamRequest eventProducer (ReadStreamRequest streamId startEventNumber maxItems FeedDirectionForward) =
do
lastEvent <- getLastEvent eventProducer streamId
events <-
P.toListM $
eventProducer QueryDirectionForward streamId startEventNumber 10
>-> filterFirstEvent startEventNumber
>-> maxItemsFilter startEventNumber
return $ buildStreamResult FeedDirectionForward lastEvent events startEventNumber maxItems
where
maxItemsFilter Nothing = P.take (fromIntegral maxItems)
maxItemsFilter (Just v) = P.takeWhile (\r -> recordedEventNumber r <= maximumEventNumber v)
maximumEventNumber start = fromIntegral start + fromIntegral maxItems - 1
filterFirstEvent Nothing = P.filter (const True)
filterFirstEvent (Just v) = P.filter ((>= v) . recordedEventNumber)
|
adbrowne/dynamodb-eventstore
|
dynamodb-eventstore-web/src/DynamoDbEventStore/Paging.hs
|
mit
| 5,509
| 0
| 16
| 1,010
| 1,382
| 736
| 646
| 110
| 5
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeOperators #-}
module Odin.Engine.Eff.Coroutine
( Next
, next
, nextForever
, raceEither
, raceAny
) where
import Control.Monad.Freer
import Control.Monad.Freer.Coroutine
import Data.Function (fix)
type Next = Yield () ()
-- | Pause a coroutine effect to be picked up later. This is useful for control
-- flow.
next :: Member Next r => Eff r a -> Eff r a
next eff = do
yield () $ \() -> ()
eff
-- | Run a computation and yield, then pick back up where you left off again,
-- forever.
nextForever :: Member Next r => Eff r a -> Eff r b
nextForever eff = fix $ \loop -> eff >> next loop
withEither'
:: Member Next r
=> Status r () () a
-> Status r () () b
-> Eff r (Either a b)
withEither' (Done a) _ = return $ Left a
withEither' _ (Done b) = return $ Right b
withEither' (Continue () fa) (Continue () fb) = next $
fa () >>= \case
Done a -> return $ Left a
ca -> fb () >>= \case
Done b -> return $ Right b
cb -> withEither' ca cb
-- | Race two coroutine effects and return the result of the effect that finishes
-- first.
raceEither :: Member Next r => Eff r a -> Eff r b -> Eff r (Either a b)
raceEither effa effb = ((,) <$> interposeC effa <*> interposeC effb) >>= uncurry withEither'
-- | Race all of the coroutine effects until one finishes (which means the
-- computation completes without yielding.)
raceAny :: Member Next r => [Eff r a] -> Eff r a
raceAny effs = mapM interposeC effs >>= checkStats
where checkStats stats = case foldl f (Right []) stats of
Right ts -> next $ sequence ts >>= checkStats
Left a -> return a
f (Left a) _ = Left a
f (Right ts) (Continue () g) = Right $ ts ++ [g ()]
f (Right _ ) (Done a) = Left a
|
schell/odin
|
odin-engine/src/Odin/Engine/Eff/Coroutine.hs
|
mit
| 2,230
| 0
| 14
| 674
| 682
| 343
| 339
| 49
| 4
|
module Problem5 where
import Problem1
smallestMultipleOfAll :: [Int] -> Int
smallestMultipleOfAll = foldl
(\ acc x -> acc * (if acc `multipleOf` x then 1 else x `div` gcd acc x))
1
problem5 :: IO ()
problem5 = print $ smallestMultipleOfAll [1..20]
|
Strikingwolf/project-euler
|
src/Problem5.hs
|
mit
| 254
| 0
| 12
| 48
| 98
| 56
| 42
| 8
| 2
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalyticsv2-applicationreferencedatasource-referenceschema.html
module Stratosphere.ResourceProperties.KinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchema where
import Stratosphere.ResourceImports
import Stratosphere.ResourceProperties.KinesisAnalyticsV2ApplicationReferenceDataSourceRecordColumn
import Stratosphere.ResourceProperties.KinesisAnalyticsV2ApplicationReferenceDataSourceRecordFormat
-- | Full data type definition for
-- KinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchema. See
-- 'kinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchema' for a
-- more convenient constructor.
data KinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchema =
KinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchema
{ _kinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchemaRecordColumns :: [KinesisAnalyticsV2ApplicationReferenceDataSourceRecordColumn]
, _kinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchemaRecordEncoding :: Maybe (Val Text)
, _kinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchemaRecordFormat :: KinesisAnalyticsV2ApplicationReferenceDataSourceRecordFormat
} deriving (Show, Eq)
instance ToJSON KinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchema where
toJSON KinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchema{..} =
object $
catMaybes
[ (Just . ("RecordColumns",) . toJSON) _kinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchemaRecordColumns
, fmap (("RecordEncoding",) . toJSON) _kinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchemaRecordEncoding
, (Just . ("RecordFormat",) . toJSON) _kinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchemaRecordFormat
]
-- | Constructor for
-- 'KinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchema'
-- containing required fields as arguments.
kinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchema
:: [KinesisAnalyticsV2ApplicationReferenceDataSourceRecordColumn] -- ^ 'kavardsrsRecordColumns'
-> KinesisAnalyticsV2ApplicationReferenceDataSourceRecordFormat -- ^ 'kavardsrsRecordFormat'
-> KinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchema
kinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchema recordColumnsarg recordFormatarg =
KinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchema
{ _kinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchemaRecordColumns = recordColumnsarg
, _kinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchemaRecordEncoding = Nothing
, _kinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchemaRecordFormat = recordFormatarg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalyticsv2-applicationreferencedatasource-referenceschema.html#cfn-kinesisanalyticsv2-applicationreferencedatasource-referenceschema-recordcolumns
kavardsrsRecordColumns :: Lens' KinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchema [KinesisAnalyticsV2ApplicationReferenceDataSourceRecordColumn]
kavardsrsRecordColumns = lens _kinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchemaRecordColumns (\s a -> s { _kinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchemaRecordColumns = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalyticsv2-applicationreferencedatasource-referenceschema.html#cfn-kinesisanalyticsv2-applicationreferencedatasource-referenceschema-recordencoding
kavardsrsRecordEncoding :: Lens' KinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchema (Maybe (Val Text))
kavardsrsRecordEncoding = lens _kinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchemaRecordEncoding (\s a -> s { _kinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchemaRecordEncoding = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalyticsv2-applicationreferencedatasource-referenceschema.html#cfn-kinesisanalyticsv2-applicationreferencedatasource-referenceschema-recordformat
kavardsrsRecordFormat :: Lens' KinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchema KinesisAnalyticsV2ApplicationReferenceDataSourceRecordFormat
kavardsrsRecordFormat = lens _kinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchemaRecordFormat (\s a -> s { _kinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchemaRecordFormat = a })
|
frontrowed/stratosphere
|
library-gen/Stratosphere/ResourceProperties/KinesisAnalyticsV2ApplicationReferenceDataSourceReferenceSchema.hs
|
mit
| 4,646
| 0
| 13
| 268
| 356
| 209
| 147
| 36
| 1
|
module Tema_14b_PilaConListas_Spec (main, spec) where
import Tema_14.PilaConListas
import Test.Hspec
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "Tema_14b" $ do
it "e1" $
show (apila 1 (apila 2 (apila 3 vacia))) `shouldBe` "1|2|3|-"
it "e2" $
show (vacia :: Pila Int) `shouldBe` "-"
it "e3" $
show (apila 4 (apila 1 (apila 2 (apila 3 vacia)))) `shouldBe` "4|1|2|3|-"
it "e4" $
cima (apila 1 (apila 2 (apila 3 vacia))) `shouldBe` 1
it "e5" $
show (desapila (apila 1 (apila 2 (apila 3 vacia)))) `shouldBe` "2|3|-"
it "e6" $
esVacia (apila 1 (apila 2 (apila 3 vacia))) `shouldBe` False
it "e7" $
esVacia vacia `shouldBe` True
|
jaalonso/I1M-Cod-Temas
|
test/Tema_14b_PilaConListas_Spec.hs
|
gpl-2.0
| 718
| 0
| 20
| 187
| 336
| 168
| 168
| 22
| 1
|
{-# LANGUAGE ExistentialQuantification #-}
import Text.ParserCombinators.Parsec hiding (spaces)
import System.Environment
import Control.Monad
import Control.Monad.Error
import System.IO
import Data.IORef
data LispVal = Atom String
| List [LispVal]
| DottedList [LispVal] LispVal
| Number Integer
| String String
| Bool Bool
| PrimitiveFunc ([LispVal] -> ThrowsError LispVal)
| IOFunc ([LispVal] -> IOThrowsError LispVal)
| Port Handle
| Func {params :: [String], vararg :: (Maybe String),
body :: [LispVal], closure :: Env}
data LispError = NumArgs Integer [LispVal]
| TypeMismatch String LispVal
| Parser ParseError
| BadSpecialForm String LispVal
| NotFunction String String
| UnboundVar String String
| Default String
--requires {-# LANGUAGE ExistentialQuantification #-}
-- allows to use any unpacker if it satisfies the Eq typeclass
data Unpacker = forall a. Eq a => AnyUnpacker (LispVal -> ThrowsError a)
instance Show LispVal where show = showVal
instance Show LispError where show = showError
instance Error LispError where
noMsg = Default "An error has occurred"
strMsg = Default
type ThrowsError = Either LispError
type Env = IORef [(String, IORef LispVal)]
trapError action = catchError action (return . show)
extractValue :: ThrowsError a -> a
extractValue (Right val) = val
showError :: LispError -> String
showError (UnboundVar message varname) = message ++ ": " ++ varname
showError (BadSpecialForm message form) = message ++ ": " ++ show form
showError (NotFunction message func) = message ++ ": " ++ show func
showError (NumArgs expected found) = "Expected " ++ show expected
++ " args; found values " ++ unwordsList found
showError (TypeMismatch expected found) = "Invalid type: expected " ++ expected
++ ", found " ++ show found
showError (Parser parseErr) = "Parse error at " ++ show parseErr
symbol :: Parser Char
symbol = oneOf "!#$%&|*+-/:<=>?@^_~"
spaces :: Parser ()
spaces = skipMany1 space
parseString :: Parser LispVal
parseString = do
char '"'
x <- many (noneOf "\"")
char '"'
return $ String x
parseAtom :: Parser LispVal
parseAtom = do
first <- letter <|> symbol
rest <- many (letter <|> digit <|> symbol)
let atom = first:rest
return $ case atom of
"#t" -> Bool True
"#f" -> Bool False
_ -> Atom atom
parseNumber :: Parser LispVal
parseNumber = liftM (Number . read) $ many1 digit
parseList :: Parser LispVal
parseList = liftM List (sepBy parseExpr spaces)
parseDottedList :: Parser LispVal
parseDottedList = do
head <- endBy parseExpr spaces
tail <- char '.' >> spaces >> parseExpr
return $ DottedList head tail
parseQuoted :: Parser LispVal
parseQuoted = do
char '\''
x <- parseExpr
return $ List [Atom "quote", x]
parseExpr :: Parser LispVal
parseExpr = parseAtom
<|> parseString
<|> parseNumber
<|> parseQuoted
<|> do
char '('
x <- try parseList <|> parseDottedList
char ')'
return x
readOrThrow :: Parser a -> String -> ThrowsError a
readOrThrow parser input = case parse parser "lisp" input of
Left err -> throwError $ Parser err
Right val -> return val
readExpr :: String -> ThrowsError LispVal
readExpr = readOrThrow parseExpr
readExprList = readOrThrow (endBy parseExpr spaces)
--readExpr input = case parse parseExpr "lisp" input of
-- Left err -> throwError $ Parser err
-- Right val -> return val
showVal :: LispVal -> String
showVal (String contents) = "\"" ++ contents ++ "\""
showVal (Atom name) = name
showVal (Number n) = show n
showVal (Bool True) = "#t"
showVal (Bool False) = "#f"
showVal (List xs) = "(" ++ unwordsList xs ++ ")"
showVal (DottedList head tail) = "(" ++ unwordsList head ++ " . " ++ showVal tail ++ ")"
showVal (PrimitiveFunc _) = "<primitive>"
showVal (IOFunc _) = "<IO primitive>"
showVal (Port _) = "<IO port>"
showVal (Func {params = args, vararg = varargs, body = body, closure = env}) =
"(lambda (" ++ unwords (map show args) ++
(case varargs of
Nothing -> ""
Just arg -> " . " ++ arg) ++ ") ...)"
unwordsList :: [LispVal] -> String
unwordsList = unwords . map showVal
--------------------------------- evaluator -----------------------------------------
eval :: Env -> LispVal -> IOThrowsError LispVal
eval env val@(String _) = return val
eval env val@(Number _) = return val
eval env val@(Bool _) = return val
eval env (Atom id) = getVar env id
eval env (List [Atom "quote", val]) = return val
eval env (List (Atom "quote" : tail)) = return $ List tail
eval env (List [Atom "load", String filename]) = load filename >>= liftM last . mapM (eval env)
eval env (List [Atom "if", pred, clause, elseClause]) = do
res <- eval env pred
case res of
Bool False -> eval env elseClause
otherwise -> eval env clause
eval env (List [Atom "define", (Atom name), form]) = eval env form >>= defineVar env name
eval env (List (Atom "define" : List (Atom var : params) : body)) = makeNormalFunc env params body >>= defineVar env var
eval env (List (Atom "define" : DottedList (Atom var : params) varargs : body)) =
makeVarargs varargs env params body >>= defineVar env var
eval env (List (Atom "lambda" : List params : body)) = makeNormalFunc env params body
eval env (List (Atom "lambda" : DottedList params varargs : body)) =
makeVarargs varargs env params body
eval env (List (Atom "lambda" : varargs@(Atom _) : body)) =
makeVarargs varargs env [] body
eval env (List [Atom "set!", Atom name, form]) = eval env form >>= setVar env name
eval env (List (Atom "cond" : first : rest)) = evalCondClause env first rest
eval env (List (func : args)) = do
func <- eval env func
argVals <- mapM (eval env) args
apply func argVals
eval env badForm = throwError $ BadSpecialForm "Unrecognized special form " badForm
evalCondClause :: Env -> LispVal -> [LispVal] -> IOThrowsError LispVal
evalCondClause env (List (Atom "else" : [stmt])) _ = eval env stmt
evalCondClause env (List (pred : stmt : [])) rest = do
result <- eval env pred
case result of
Bool True -> eval env stmt
otherwise -> evalCondClause env (head rest) (tail rest)
makeFunc varargs env params body = return $ Func (map showVal params) varargs body env
makeNormalFunc = makeFunc Nothing
makeVarargs = makeFunc . Just . showVal
--------------------------------- evaluator -----------------------------------------
-- eval (List ((Atom "+"):args)) = Number $ sum (map unwrappNum args)
-- eval (List ((Atom "*"):args)) = Number $ product (map unwrappNum args)
-- unwrappNum :: LispVal -> Integer
-- unwrappNum (Number n) = n
apply :: LispVal -> [LispVal] -> IOThrowsError LispVal
apply (PrimitiveFunc func) args = liftThrows $ func args
apply (IOFunc func) args = func args
apply (Func params varargs body closure) args =
if num params /= num args && varargs == Nothing
then throwError $ NumArgs (num params) args
else (liftIO $ bindVars closure $ zip params args) >>= bindVarArgs varargs >>= evalBody
where
remainingArgs = drop (length params) args
num = toInteger . length
evalBody env = liftM last $ mapM (eval env) body
bindVarArgs arg env = case arg of
Just argName -> liftIO $ bindVars env [(argName, List $ remainingArgs)]
Nothing -> return env
ioPrimitives :: [(String, [LispVal] -> IOThrowsError LispVal)]
ioPrimitives = [("apply", applyProc),
("open-input-file", makePort ReadMode),
("open-output-file", makePort WriteMode),
("close-input-port", closePort),
("close-output-port", closePort),
("read", readProc),
("write", writeProc),
("read-contents", readContents),
("read-all", readAll)]
makePort :: IOMode -> [LispVal] -> IOThrowsError LispVal
makePort mode [String filename] = liftM Port $ liftIO $ openFile filename mode
closePort :: [LispVal] -> IOThrowsError LispVal
closePort [Port port] = liftIO $ hClose port >> (return $ Bool True)
closePort _ = return $ Bool False
readProc :: [LispVal] -> IOThrowsError LispVal
readProc [] = readProc [Port stdin]
readProc [Port port] = (liftIO $ hGetLine port) >>= liftThrows . readExpr
writeProc :: [LispVal] -> IOThrowsError LispVal
writeProc [obj] = writeProc [obj, Port stdout]
writeProc [obj, Port port] = liftIO $ hPrint port obj >> (return $ Bool True)
readContents :: [LispVal] -> IOThrowsError LispVal
readContents [String filename] = liftM String $ liftIO $ readFile filename
load :: String -> IOThrowsError [LispVal]
load filename = (liftIO $ readFile filename) >>= liftThrows . readExprList
readAll :: [LispVal] -> IOThrowsError LispVal
readAll [String filename] = liftM List $ load filename
applyProc :: [LispVal] -> IOThrowsError LispVal
applyProc [func, List args] = apply func args
applyProc (func : args) = apply func args
primitives :: [(String, [LispVal] -> ThrowsError LispVal)]
primitives = [("+", numericBinop (+)),
("-", numericBinop (-)),
("*", numericBinop (*)),
("/", numericBinop div),
("mod", numericBinop mod),
("quotient", numericBinop quot),
("remainder", numericBinop rem),
("string?", unaryOp symbolp),
("number?", unaryOp numberp),
("list?", unaryOp listp),
("pair?", unaryOp pairp),
("symbol?", unaryOp symbolp),
("symbol->string", unaryOp symbol2String),
("string->symbol", unaryOp string2Symbol),
("=", numBoolBinop (==)),
("<", numBoolBinop (<)),
(">", numBoolBinop (>)),
("/=", numBoolBinop (>=)),
(">=", numBoolBinop (>=)),
("<=", numBoolBinop (<=)),
("&&", boolBoolBinop (&&)),
("||", boolBoolBinop (||)),
("string=?", strBoolBinop (==)),
("string<?", strBoolBinop (<)),
("string>?", strBoolBinop (>)),
("string<=?", strBoolBinop (<=)),
("string>=?", strBoolBinop (>=)),
("car", car),
("cdr", cdr),
("cons", cons),
("eq?", eqv),
("eqv?", eqv),
("equal?", equal)]
primitiveBindings :: IO Env
primitiveBindings = nullEnv >>= (flip bindVars $ map (makeFunc PrimitiveFunc) primitives
++ map (makeFunc IOFunc) ioPrimitives)
where makeFunc constructor (var, func) = (var, constructor func)
unaryOp :: (LispVal -> LispVal) -> [LispVal] -> ThrowsError LispVal
unaryOp f [v] = return $ f v
symbol2String :: LispVal -> LispVal
symbol2String (Atom s) = String s
symbol2String _ = String ""
string2Symbol :: LispVal -> LispVal
string2Symbol (String s) = Atom s
string2Symbol _ = Atom ""
symbolp, numberp, listp, pairp :: LispVal -> LispVal
symbolp (Atom _) = Bool True
symbolp _ = Bool False
numberp (Number _) = Bool True
numberp _ = Bool False
listp (List _) = Bool True
listp _ = Bool False
pairp (List _) = Bool True
pairp (DottedList _ _) = Bool True
pairp _ = Bool False
numBoolBinop = boolBinop unpackNum
strBoolBinop = boolBinop unpackString
boolBoolBinop = boolBinop unpackBool
boolBinop :: (LispVal -> ThrowsError a) -> (a -> a -> Bool) -> [LispVal] -> ThrowsError LispVal
boolBinop unpackFunc op args = if length args /= 2
then throwError $ NumArgs 2 args
else do
left <- unpackFunc $ args !! 0
right <- unpackFunc $ args !! 1
return $ Bool $ left `op` right
unpackNum :: LispVal -> ThrowsError Integer
unpackNum (Number n) = return n
unpackNum notNum = throwError $ TypeMismatch "number" notNum
unpackString :: LispVal -> ThrowsError String
unpackString (String s) = return s
unpackString (Number n) = return $ show n
unpackString (Bool b) = return $ show b
unpackString notString = throwError $ TypeMismatch "string" notString
unpackBool :: LispVal -> ThrowsError Bool
unpackBool (Bool b) = return b
unpackBool notBool = throwError $ TypeMismatch "boolean" notBool
numericBinop :: (Integer -> Integer -> Integer) -> [LispVal] -> ThrowsError LispVal
numericBinop op singleVal@[_] = throwError $ NumArgs 2 singleVal
numericBinop op params = mapM unpackNum params >>= return . Number . foldl1 op
car :: [LispVal] -> ThrowsError LispVal
car [List (x : xs)] = return x
car [DottedList (x : xs) _] = return x
car [badArg] = throwError $ TypeMismatch "pair" badArg
car badArgList = throwError $ NumArgs 1 badArgList
cdr :: [LispVal] -> ThrowsError LispVal
cdr [List (x : xs)] = return $ List xs
cdr [DottedList [_] x] = return x
cdr [DottedList (_ : x) xs] = return $ DottedList x xs
cdr [badArg] = throwError $ TypeMismatch "pair" badArg
cdr badArgList = throwError $ NumArgs 1 badArgList
cons :: [LispVal] -> ThrowsError LispVal
cons [x1, List []] = return $ List [x1]
cons [x1, List xs] = return $ List $ x1 : xs
cons [x1, DottedList xs xlast] = return $ DottedList (x1 : xs) xlast
cons [x1, x2] = return $ DottedList [x1] x2
cons badArgList = throwError $ NumArgs 1 badArgList
eqv :: [LispVal] -> ThrowsError LispVal
eqv [(Bool arg1), (Bool arg2)] = return $ Bool $ arg1 == arg2
eqv [(String arg1), (String arg2)] = return $ Bool $ arg1 == arg2
eqv [(Number arg1), (Number arg2)] = return $ Bool $ arg1 == arg2
eqv [(Atom arg1), (Atom arg2)] = return $ Bool $ arg1 == arg2
eqv [(DottedList xs x), (DottedList ys y)] = eqv [List $ xs ++ [x], List $ ys ++ [y]]
eqv [(List xs), (List ys)] = return $ Bool $ (length xs == length ys)&&
(all eqvPair $ zip xs ys)
where eqvPair (x, y) = case equal [x,y] of
Left err -> False
Right (Bool val) -> val
eqv [_, _] = return $ Bool False
eqv badArgList = throwError $ NumArgs 2 badArgList
unpackEquals :: LispVal -> LispVal -> Unpacker -> ThrowsError Bool
unpackEquals arg1 arg2 (AnyUnpacker unpacker) = do
unpacked1 <- unpacker arg1
unpacked2 <- unpacker arg2
return $ unpacked1 == unpacked2
`catchError` (const $ return False)
equal :: [LispVal] -> ThrowsError LispVal
equal [arg1, arg2] = do
primitiveEquals <- liftM or $ mapM (unpackEquals arg1 arg2)
[AnyUnpacker unpackNum, AnyUnpacker unpackString, AnyUnpacker unpackBool]
eqvEquals <- eqv [arg1, arg2]
return $ Bool $ (primitiveEquals || let (Bool x) = eqvEquals in x)
equal badArgList = throwError $ NumArgs 2 badArgList
-- :main "(a '(dotted . list) test)"
-- :main "(4124 23 (12 (test)) aa)"
-- :main "(+ 23221 1 123 123 1111111)"
-- :main "(+ 5 (* 2 (* 200 2)))"
-- main = getArgs >>= print . eval . readExpr . head
-------------------------------- Variables --------------------------------
nullEnv :: IO Env
nullEnv = newIORef []
type IOThrowsError = ErrorT LispError IO
liftThrows :: ThrowsError a -> IOThrowsError a
liftThrows (Left err) = throwError err
liftThrows (Right val) = return val
runIOThrows :: IOThrowsError String -> IO String
runIOThrows action = runErrorT (trapError action) >>= return . extractValue
isBound :: Env -> String -> IO Bool
isBound envRef varName = readIORef envRef >>= return . maybe False (const True) . lookup varName
getVar :: Env -> String -> IOThrowsError LispVal
getVar envRef var = do
env <- liftIO $ readIORef envRef
maybe (throwError $ UnboundVar "Getting an unbound variable" var)
(liftIO . readIORef)
(lookup var env)
setVar :: Env -> String -> LispVal -> IOThrowsError LispVal
setVar envRef var value = do
env <- liftIO $ readIORef envRef
maybe (throwError $ UnboundVar "Setting an unbound variable" var)
(liftIO . (flip writeIORef value))
(lookup var env)
return value
defineVar :: Env -> String -> LispVal -> IOThrowsError LispVal
defineVar envRef var value = do
alreadyDefined <- liftIO $ isBound envRef var
if alreadyDefined
then setVar envRef var value >> return value
else liftIO $ do
valueRef <- newIORef value
env <- readIORef envRef
writeIORef envRef ((var, valueRef) : env)
return value
bindVars :: Env -> [(String, LispVal)] -> IO Env
bindVars envRef bidings = readIORef envRef >>= extendEnv bidings >>= newIORef
where
extendEnv bindings env = liftM (++ env) (mapM addBinding bindings)
addBinding (var, value) = do
ref <- newIORef value
return (var, ref)
-------------------------------- Variables --------------------------------
-------------------------------- REPL --------------------------------
flushStr :: String -> IO ()
flushStr str = putStr str >> hFlush stdout
readPrompt :: String -> IO String
readPrompt prompt = flushStr prompt >> getLine
evalString :: Env -> String -> IO String
evalString env expr = runIOThrows $ liftM show $ (liftThrows $ readExpr expr ) >>= eval env
evalAndPrint :: Env -> String -> IO ()
evalAndPrint env expr = evalString env expr >>= putStrLn
until_ :: Monad m => (a -> Bool) -> m a -> (a -> m()) -> m ()
until_ pred prompt action = do
result <- prompt
if pred result
then return ()
else action result >> until_ pred prompt action
runOne :: [String] -> IO ()
runOne args = do
env <- primitiveBindings >>= flip bindVars [("args", List $ map String $ drop 1 args)]
(runIOThrows $ liftM show $ eval env (List [Atom "load", String (args !! 0)]))
>>= hPutStrLn stderr
runRepl :: IO ()
runRepl = primitiveBindings >>= until_ (== "quit") (readPrompt "Lisp>>> ") . evalAndPrint
-------------------------------- REPL --------------------------------
-- (define my-count (counter 111))
--(define (counter inc) (lambda (x) (set! inc (+ x inc)) inc))
-- (my-count 1)
main = do
args <- getArgs
if null args
then runRepl
else runOne $ args
|
nabacg/hSchemeParser
|
src/schemeParser.hs
|
gpl-2.0
| 18,503
| 152
| 16
| 4,713
| 6,517
| 3,324
| 3,193
| 383
| 3
|
{-|
Module : Cabal2Ebuild
Copyright : (C) 2005, Duncan Coutts
License : GPL-2+
Maintainer : haskell@gentoo.org
A program for generating a Gentoo ebuild from a .cabal file
-}
-- This library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public License
-- as published by the Free Software Foundation; either version 2
-- of the License, or (at your option) any later version.
-- This library is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
--
module Cabal2Ebuild
(cabal2ebuild
,convertDependencies
,convertDependency) where
import qualified Distribution.PackageDescription as Cabal
( PackageDescription(..), license)
import qualified Distribution.Package as Cabal ( PackageIdentifier(..)
, Dependency(..))
import qualified Distribution.Version as Cabal ( VersionRange
, cataVersionRange, normaliseVersionRange
, majorUpperBound, mkVersion )
import Distribution.Version (VersionRangeF(..))
import Distribution.Pretty (prettyShow)
import qualified Distribution.Utils.ShortText as ST
import Data.Char (isUpper)
import Data.Maybe
import Portage.Dependency
import qualified Portage.Cabal as Portage
import qualified Portage.PackageId as Portage
import qualified Portage.EBuild as Portage
import qualified Portage.EBuild.CabalFeature as Portage
import qualified Portage.Resolve as Portage
import qualified Portage.EBuild as E
import qualified Portage.Overlay as O
import Portage.Version
-- | Generate a 'Portage.EBuild' from a 'Portage.Category' and a 'Cabal.PackageDescription'.
cabal2ebuild :: Portage.Category -> Cabal.PackageDescription -> Portage.EBuild
cabal2ebuild cat pkg = Portage.ebuildTemplate {
E.name = Portage.cabal_pn_to_PN cabal_pn,
E.category = prettyShow cat,
E.hackage_name= cabalPkgName,
E.version = prettyShow (Cabal.pkgVersion (Cabal.package pkg)),
E.description = ST.fromShortText $ if ST.null (Cabal.synopsis pkg)
then Cabal.description pkg
else Cabal.synopsis pkg,
E.homepage = thisHomepage,
E.license = Portage.convertLicense $ Cabal.license pkg,
E.slot = (E.slot E.ebuildTemplate) ++ (if hasLibs then "/${PV}" else ""),
E.my_pn = if any isUpper cabalPkgName then Just cabalPkgName else Nothing,
E.features = E.features E.ebuildTemplate
++ (if hasLibs then ([ Portage.Lib
, Portage.Profile
, Portage.Haddock
, Portage.Hoogle
]
++ if cabalPkgName == "hscolour"
then []
else [Portage.HsColour])
else [])
++ (if hasTests then [Portage.TestSuite]
else [])
} where
cabal_pn = Cabal.pkgName $ Cabal.package pkg
cabalPkgName = prettyShow cabal_pn
hasLibs = isJust (Cabal.library pkg)
hasTests = (not . null) (Cabal.testSuites pkg)
thisHomepage = if (ST.null $ Cabal.homepage pkg)
then E.homepage E.ebuildTemplate
else ST.fromShortText $ Cabal.homepage pkg
-- | Map 'convertDependency' over ['Cabal.Dependency'].
convertDependencies :: O.Overlay -> Portage.Category -> [Cabal.Dependency] -> [Dependency]
convertDependencies overlay category = map (convertDependency overlay category)
-- | Convert 'Cabal.Dependency' into 'Dependency'.
convertDependency :: O.Overlay -> Portage.Category -> Cabal.Dependency -> Dependency
convertDependency overlay category (Cabal.Dependency pname versionRange _lib)
= convert versionRange
where
pn = case Portage.resolveFullPortageName overlay pname of
Just r -> r
Nothing -> Portage.PackageName category (Portage.normalizeCabalPackageName pname)
mk_p :: DRange -> Dependency
mk_p dr = DependAtom (Atom pn dr (DAttr AnySlot []))
p_v v = fromCabalVersion v
convert :: Cabal.VersionRange -> Dependency
convert = Cabal.cataVersionRange alg . Cabal.normaliseVersionRange
where
alg (ThisVersionF v) = mk_p $ DExact $ p_v v
alg (LaterVersionF v) = mk_p $ DRange (StrictLB $ p_v v) InfinityB
alg (EarlierVersionF v) = mk_p $ DRange ZeroB $ StrictUB $ p_v v
alg (OrLaterVersionF v) = if v == Cabal.mkVersion [0] -- any version
then mk_p $ DRange ZeroB InfinityB
else mk_p $ DRange (NonstrictLB $ p_v v)
InfinityB
alg (OrEarlierVersionF v) = mk_p $ DRange ZeroB $ NonstrictUB $ p_v v
alg (MajorBoundVersionF v) = mk_p $ DRange (NonstrictLB $ p_v v)
$ StrictUB $ p_v $ Cabal.majorUpperBound v
alg (UnionVersionRangesF v1 v2) = DependAnyOf [v1, v2]
alg (IntersectVersionRangesF v1 v2) = DependAllOf [v1, v2]
|
gentoo-haskell/hackport
|
Cabal2Ebuild.hs
|
gpl-3.0
| 5,792
| 0
| 16
| 1,994
| 1,154
| 637
| 517
| 81
| 10
|
-- |
-- Copyright : (c) 2010, 2011 Benedikt Schmidt & Simon Meier
-- License : GPL v3 (see LICENSE)
--
-- Maintainer : Simon Meier <iridcode@gmail.com>
-- Portability : GHC only
--
-- Support for interaction with the console: argument parsing.
{-# LANGUAGE TemplateHaskell #-}
module Main.Console (
defaultMain
-- * Static information about the Tamarin prover
, programName
-- * Constructing interaction modes for Tamarin prover
, TamarinMode
, tamarinMode
, helpAndExit
-- * Argument parsing
, Arguments
, ArgKey
, ArgVal
-- ** Setting arguments
, updateArg
, addEmptyArg
, helpFlag
-- ** Retrieving arguments
, getArg
, findArg
, argExists
-- * Pretty printing and console output
, lineWidth
, shortLineWidth
, renderDoc
) where
import Data.Maybe
import Data.Version (showVersion)
import Data.Time
import Safe
import Control.Monad
import System.Console.CmdArgs.Explicit
import System.Console.CmdArgs.Text
import System.Exit
import qualified Text.PrettyPrint.Class as PP
import Paths_tamarin_prover (version)
import Language.Haskell.TH
import Development.GitRev
------------------------------------------------------------------------------
-- Static constants for the tamarin-prover
------------------------------------------------------------------------------
-- | Program name
programName :: String
programName = "tamarin-prover"
-- | Version string
versionStr :: String
versionStr = unlines
[ concat
[ programName
, " "
, showVersion version
, ", (C) David Basin, Cas Cremers, Jannik Dreier, Simon Meier, Ralf Sasse, Benedikt Schmidt, ETH Zurich 2010-2018"
]
, concat
[ "Git revision: "
, $(gitHash)
, case $(gitDirty) of
True -> " (with uncommited changes)"
False -> ""
, ", branch: "
, $(gitBranch)
]
, concat
[ "Compiled at: "
, $(stringE =<< runIO (show `fmap` Data.Time.getCurrentTime))
]
, ""
, "This program comes with ABSOLUTELY NO WARRANTY. It is free software, and you"
, "are welcome to redistribute it according to its LICENSE, see"
, "'https://github.com/tamarin-prover/tamarin-prover/blob/master/LICENSE'."
]
-- | Line width to use.
lineWidth :: Int
lineWidth = 110
shortLineWidth :: Int
shortLineWidth = 78
------------------------------------------------------------------------------
-- A simple generic representation of arguments
------------------------------------------------------------------------------
-- | A name of an argument.
type ArgKey = String
-- | A value of an argument.
type ArgVal = String
-- | It is most convenient to view arguments just as 'String' based key-value
-- pairs. If there are multiple values for the same key, then the left-most
-- one is preferred.
type Arguments = [(ArgKey,ArgVal)]
-- | Does an argument exist.
argExists :: String -> Arguments -> Bool
argExists a = isJust . findArg a
-- | Find the value(s) corresponding to the given key.
findArg :: MonadPlus m => ArgKey -> Arguments -> m ArgVal
findArg a' as = msum [ return v | (a,v) <- as, a == a' ]
-- | Find the value corresponding to the given key. Throw an error if no value
-- exists.
getArg :: ArgKey -> Arguments -> ArgVal
getArg a =
fromMaybe (error $ "getArg: argument '" ++ a ++ "' not found") . findArg a
-- | Add an argument to the from of the list of arguments.
addArg :: ArgKey -> ArgVal -> Arguments -> Arguments
addArg a v = ((a,v):)
-- | Add an argument with the empty string as the value.
addEmptyArg :: String -> Arguments -> Arguments
addEmptyArg a = addArg a ""
-- | Update an argument.
updateArg :: ArgKey -> ArgVal -> Arguments -> Either a Arguments
updateArg a v = Right . addArg a v
-- | Add the help flag.
helpFlag :: Flag Arguments
helpFlag = flagHelpSimple (addEmptyArg "help")
------------------------------------------------------------------------------
-- Modes for using the Tamarin prover
------------------------------------------------------------------------------
-- | A representation of an interaction mode with the Tamarin prover.
data TamarinMode = TamarinMode
{ tmName :: String
, tmCmdArgsMode :: Mode Arguments
-- ^ Run is given a reference to the mode. This enables changing the
-- static information of a mode and keeping the same 'run' function.
-- We use this for implementing the 'main' mode.
, tmRun :: TamarinMode -> Arguments -> IO ()
, tmIsMainMode :: Bool
}
-- | Smart constructor for a 'TamarinMode'.
tamarinMode :: String -> Help
-> (Mode Arguments -> Mode Arguments) -- ^ Changes to default mode.
-> (TamarinMode -> Arguments -> IO ())
-> TamarinMode
tamarinMode name help adaptMode run0 = TamarinMode
{ tmName = name
, tmCmdArgsMode = adaptMode $ Mode
{ modeGroupModes = toGroup []
, modeNames = [name]
, modeValue = []
, modeCheck = updateArg "mode" name
, modeExpandAt = False
, modeReform = const Nothing-- no reform possibility
, modeHelp = help
, modeHelpSuffix = []
, modeArgs = ([], Nothing) -- no positional arguments
, modeGroupFlags = toGroup [] -- no flags
}
, tmRun = run
, tmIsMainMode = False
}
where
run thisMode as
| argExists "help" as = helpAndExit thisMode Nothing
| argExists "version" as = putStrLn versionStr
| otherwise = run0 thisMode as
-- | Disply help message of a tamarin mode and exit.
helpAndExit :: TamarinMode -> Maybe String -> IO ()
helpAndExit tmode mayMsg = do
putStrLn $ showText (Wrap lineWidth)
$ helpText header HelpFormatOne (tmCmdArgsMode tmode)
-- output example info
when (tmIsMainMode tmode) $ do
putStrLn $ unlines
[ separator
, "See 'https://github.com/tamarin-prover/tamarin-prover/blob/master/README.md'"
, "for usage instructions and pointers to examples."
, separator
]
end
where
separator = replicate shortLineWidth '-'
(header, end) = case mayMsg of
Nothing -> ([], return ())
Just msg -> (["error: " ++ msg], exitFailure)
-- | Main function.
defaultMain :: TamarinMode -> [TamarinMode] -> IO ()
defaultMain firstMode otherModes = do
as <- processArgs $ tmCmdArgsMode mainMode
case findArg "mode" as of
Nothing -> error $ "defaultMain: impossible - mode not set"
Just name -> headNote "defaultMain: impossible - no mode found" $ do
tmode <- (mainMode : otherModes)
guard (tmName tmode == name)
return $ tmRun tmode tmode as
where
mainMode = firstMode
{ tmName = programName
, tmCmdArgsMode = (tmCmdArgsMode firstMode)
{ modeNames = [programName]
, modeCheck = updateArg "mode" programName
, modeGroupModes = toGroup (map tmCmdArgsMode $ otherModes)
, modeGroupFlags = (modeGroupFlags $ tmCmdArgsMode firstMode)
{ groupNamed =
[ ("About"
, [ helpFlag
, flagVersion (addEmptyArg "version")
] )
]
}
}
, tmIsMainMode = True
}
------------------------------------------------------------------------------
-- Pretty printing
------------------------------------------------------------------------------
-- | Render a pretty-printing document.
renderDoc :: PP.Doc -> String
renderDoc = PP.renderStyle (PP.defaultStyle { PP.lineLength = lineWidth })
|
rsasse/tamarin-prover
|
src/Main/Console.hs
|
gpl-3.0
| 7,752
| 0
| 18
| 1,999
| 1,397
| 786
| 611
| 143
| 2
|
-- | Example usage of the library.
module Main
where
import Data.ML.DecisionTree
import Data.ML.TestData.Disease
import Text.PrettyPrint.ANSI.Leijen
main :: IO ()
main = do
let t = fitTree disease attrDisease Minimize (impurity `by` missclassificationError) NoPrune dataDisease
print (pretty t)
putStrLn $ "\n\nDecision on following tuple: " ++ show (head dataDisease)
print (decide t (head dataDisease))
|
schnecki/HaskellMachineLearning
|
src/Main.hs
|
gpl-3.0
| 433
| 0
| 12
| 80
| 123
| 65
| 58
| 10
| 1
|
module Test where
f :: Int -> Int
f x = x + 1
foreign export ccall f :: Int -> Int
|
leroux/testsuite
|
tests/rts/Test.hs
|
gpl-3.0
| 85
| 0
| 6
| 24
| 39
| 22
| 17
| 4
| 1
|
{-# LANGUAGE NamedFieldPuns #-}
module LIR where
import qualified Parser as P
import qualified Analyzer as A
import Data.Maybe
import qualified Data.Map.Strict as Map
import Data.List (mapAccumL, intercalate, findIndex, lookup)
import Text.Printf
import Debug.Trace
data VReg = VReg { regsize :: Integer, regid :: Int, regconst :: Bool }
instance Show VReg where
show (VReg { regsize, regid, regconst })
| regconst = "$" ++ (show regid) ++ "c." ++ (show regsize)
| otherwise = "$" ++ (show regid) ++ "." ++ (show regsize)
-- Instructions Take the form:
-- - Operator destination [operand-1, [operand-2, ...]]
-- - Operator operand
data Instr = Call VReg Int [VReg] -- dst, func id, [args]
| ICall VReg VReg [VReg] -- dst, func address, [args]
| Return (Maybe VReg)
| Move VReg VReg
| Store VReg VReg
| StoreArray VReg VReg VReg -- v1[v2] = v3
| LoadArray VReg VReg VReg -- v1 = v2[v3]
| Load VReg VReg
| LoadAddressOf VReg VReg
| LoadFuncAddr VReg Int
| LoadInt VReg Integer
| LoadFloat VReg Float
| LoadStringPtr VReg String
| LoadStructMember VReg VReg Int -- dst src offset (size of load is in the dst register)
| StoreStructMember VReg Int VReg -- obj offset src (size of store is in src register)
| Add VReg VReg VReg
| Mult VReg VReg VReg
instance Show Instr where
show (Call dst funcid regs) = printf "call %s = funcs[%d] (%s)" (show dst) funcid (intercalate ", " $ map show regs)
show (ICall dst func regs) = printf "icall %s = %s (%s)" (show dst) (show func) (intercalate ", " $ map show regs)
show (Return Nothing) = printf "ret"
show (Return (Just reg)) = printf "ret %s" (show reg)
show (Move dst src) = printf "move %s = %s" (show dst) (show src)
show (Load dst src) = printf "load %s = %s" (show dst) (show src)
show (Store dst src) = printf "store %s = %s" (show dst) (show src)
show (LoadArray dst arr offset) = printf "loada %s = %s[%s]" (show dst) (show arr) (show offset)
show (StoreArray arr offset val) = printf "storea %s[%s] = %s" (show arr) (show offset) (show val)
show (LoadAddressOf dst obj) = printf "la %s = & %s" (show dst) (show obj)
show (LoadStructMember dst obj offset) = printf "amember %s = %s[%d:%d]" (show dst) (show obj) offset (offset + (fromIntegral . regsize $ dst))
show (StoreStructMember obj offset src) = printf "smember %s[%d:%d] = %s" (show obj) offset (offset + (fromIntegral . regsize $ src)) (show src)
show (Add dst op1 op2) = printf "add %s = %s + %s" (show dst) (show op1) (show op2)
show (Mult dst op1 op2) = printf "mult %s = %s * %s" (show dst) (show op1) (show op2)
show (LoadFuncAddr dst funcid) = printf "lfa %s = funcs[%d]" (show dst) funcid
show (LoadInt dst int) = printf "lii %s = %d" (show dst) int
show (LoadFloat dst float) = printf "lif %s = %f" (show dst) float
show (LoadStringPtr dst str)
| length str > 10 = printf "lis %s = '%s'..." (show dst) (take 10 str)
| otherwise = printf "lis %s = '%s'" (show dst) str
type Term = P.Term
type SymbolRegTable = Map.Map String (Int, VReg)
-- The [VReg] is a list of the registers used in the body for the
-- parameters of this function.
data FuncInstrs = FuncInstrs [VReg] [Instr] -- arg-registers instructions
deriving (Show)
type FuncTable = Map.Map Int FuncInstrs
data LIRState = LIRState { cnextVRegID :: Int,
cnextFuncID :: Int,
csymbols :: SymbolRegTable,
clambdas :: FuncTable,
scopeLevel :: Int }
-- The ID of the function that gets called before main
setupLambdaID :: Int
setupLambdaID = 0
globalScope :: Int
globalScope = 0
defaultCstate = LIRState { cnextVRegID=1,
cnextFuncID=setupLambdaID + 1,
clambdas=Map.empty,
csymbols=Map.fromList [("null", (globalScope, VReg { regsize=A.pointerSize, regid=0, regconst=True }))],
scopeLevel=globalScope }
(↖) :: LIRState -> LIRState -> LIRState
state ↖ newstate = state { cnextVRegID=cnextVRegID newstate,
cnextFuncID=cnextFuncID newstate,
clambdas=clambdas newstate }
data Program = Program { progLambdas :: FuncTable, progSymbols :: SymbolRegTable }
deriving (Show)
-- Helper function to create a new vreg and update the state
newreg :: LIRState -> Integer -> Bool -> (LIRState, VReg)
newreg state@(LIRState { cnextVRegID }) size const = (state { cnextVRegID=cnextVRegID + 1 },
VReg { regsize=size, regid=cnextVRegID, regconst=const })
newregs :: LIRState -> [(Integer, Bool)] -> (LIRState, [VReg])
newregs = mapAccumL (\st (sz, cnst) -> newreg st sz cnst)
astToLIRToplevel :: [A.TypedTerm] -> Program
astToLIRToplevel terms =
let (finalState, setupCode) = astToLIRToplevel' defaultCstate terms
in Program { progLambdas=Map.insert setupLambdaID (FuncInstrs [] (concat setupCode)) (clambdas finalState),
progSymbols=csymbols finalState }
astToLIRToplevel' :: LIRState -> [A.TypedTerm] -> (LIRState, [[Instr]])
astToLIRToplevel' = mapAccumL astToLIR'
where astToLIR' acc x = let (state, (_, instrs)) = astToLIR acc x
in (state, instrs)
-- Takes a state, boolean if at global scope, and a term and returns a
-- new state, a register that holds the value of the expression of the
-- term, and a list of instructions to compute that value, given that
-- the initial state is what was given to this function. If the given
-- term is a definition, then the LIRState is updated with the name if
-- any, and the instructions for calculating the value of the
-- declaration are returned. If the term is a declaration, LIRState is
-- left unchanged, the Maybe VReg is Nothing, and the instruction list
-- is empty.
astToLIR :: LIRState -> A.TypedTerm -> (LIRState, (Maybe VReg, [Instr]))
astToLIR state@(LIRState { csymbols }) (P.TName { P.tsrepr }) =
case Map.lookup tsrepr csymbols of
Just (_, reg) -> (state, (Just reg, []))
Nothing -> error $ "Bug: Somehow the symbol " ++ tsrepr ++ " was not assigned a vreg"
astToLIR state (P.TIntLiteral { P.tirepr }) =
let (newstate, reg) = newreg state A.intSize True
in (newstate, (Just reg, [LoadInt reg tirepr]))
astToLIR state (P.TFloatLiteral { P.tfrepr }) =
let (newstate, reg) = newreg state A.floatSize True
in (newstate, (Just reg, [LoadFloat reg tfrepr]))
astToLIR state (P.TStringLiteral { P.tsrepr }) =
let (newstate, reg) = newreg state A.pointerSize True
in (newstate, (Just reg, [LoadStringPtr reg tsrepr]))
astToLIR state (P.TFuncall { P.tag, P.tfun, P.targs }) =
let (newstate, (Just funcreg, funcinstrs):operandsLIR) = mapAccumL (astToLIR . (state ↖)) state (tfun:targs)
(newstate', callreg) = newreg newstate (A.sizeFromType tag) True
in (state ↖ newstate', (Just callreg,
concat $ [funcinstrs,
concat $ map snd operandsLIR,
[ICall callreg funcreg $ map (fromJust . fst) operandsLIR]]))
astToLIR state@(LIRState { csymbols, scopeLevel }) (P.TDef { P.vartag, P.tname, P.tvalue=Just val }) =
let statewithsym = state { csymbols=Map.insert tname (scopeLevel, reg) csymbols }
(newstate, (Just valuereg, valueinstrs)) = astToLIR statewithsym val
(newstate', reg) = newreg (statewithsym ↖ newstate) varsize (A.isImmutable vartag)
in ((statewithsym ↖ newstate') ,
(Just reg, valueinstrs ++ [ Move reg valuereg ]))
where varsize = A.sizeFromType vartag
astToLIR state@(LIRState { csymbols, scopeLevel }) (P.TDef { P.vartag, P.tname, P.tvalue=Nothing }) =
let (newstate, reg) = newreg state (A.sizeFromType vartag) (A.isImmutable vartag)
in (newstate { csymbols=Map.insert tname (scopeLevel, reg) csymbols }, (Just reg, []))
astToLIR state@(LIRState { clambdas, cnextFuncID, csymbols, scopeLevel }) (P.TLambda { P.tag, P.tbody, P.tbindings }) =
let (lambdaState, _) = astToLIRToplevel' state tbindings
(bodystate, bodyinstrs) = astToLIRToplevel' (lambdaState { csymbols=symbolTableWithoutLocalValues,
scopeLevel=scopeLevel + 1 }) tbody
(regstate, funcPtrReg) = newreg (state ↖ bodystate) (A.sizeFromType tag) True
argRegs = map (snd . ((LIR.csymbols lambdaState) Map.!) . P.tname) tbindings
in (regstate { clambdas=Map.insert cnextFuncID (FuncInstrs argRegs $ concat bodyinstrs) clambdas,
cnextFuncID=cnextFuncID + 1 },
(Just funcPtrReg, [LoadFuncAddr funcPtrReg cnextFuncID]))
where filterSymbolTable (scope, _) = scope == globalScope
symbolTableWithoutLocalValues = Map.filter filterSymbolTable csymbols
astToLIR state (P.TAssign { P.tavar=n@(P.TName { P.tsrepr }), P.tavalue }) =
let (_, (Just namereg, [])) = astToLIR state n
(newstate, (Just valreg, valinstrs)) = astToLIR state tavalue
in (state ↖ newstate, (Just namereg, valinstrs ++ [Move namereg valreg]))
astToLIR state (P.TAssign { P.tavar=P.TDeref { P.toperand }, P.tavalue }) =
let (newstate, (Just opreg, opinstrs)) = astToLIR state toperand
(newstate', (Just valreg, valinstrs)) = astToLIR (state ↖ newstate) tavalue
in (state ↖ newstate', (Nothing, opinstrs ++ valinstrs ++ [Store opreg valreg]))
astToLIR state (P.TAssign { P.tavar=(P.TSubscript { P.ttarget, P.tsubscript }), P.tavalue }) =
let (newstate, (Just targetreg, targetinstrs)) = astToLIR state ttarget
(newstate', (Just subreg, subinstrs)) = astToLIR (state ↖ newstate) tsubscript
(newstate'', (Just valreg, valinstrs)) = astToLIR (state ↖ newstate') tavalue
(finalState, storeinstrs) = makeStore (state ↖ newstate'') (P.tag ttarget) targetreg subreg valreg
in (finalState, (Nothing, targetinstrs ++ subinstrs ++ valinstrs ++ storeinstrs))
where makeStore state (A.Mutable t) targetreg subreg valreg = makeStore state t targetreg subreg valreg
makeStore state (A.Array _ t) targetreg subreg valreg =
let (newstate, [szreg, offsetreg]) = newregs state [(A.pointerSize, True),
(A.pointerSize, True)]
in (newstate, [LoadInt szreg (A.sizeFromType t),
Mult offsetreg szreg subreg,
StoreArray targetreg offsetreg valreg])
makeStore state (A.Ptr t) targetreg subreg valreg =
let (newstate, [szreg, offsetreg, addrreg]) = newregs state [(A.pointerSize, True),
(A.pointerSize, True),
(A.pointerSize, True)]
in (newstate, [LoadInt szreg (A.sizeFromType t),
Mult offsetreg szreg subreg,
Add addrreg offsetreg targetreg,
Store addrreg valreg])
makeStore _ x _ _ _ = error $ "Bug: Unexpected type in assignment to subscript: " ++ (show x)
astToLIR state (P.TAssign { P.tavar=P.TMemberAccess { P.ttarget, P.tmember }, P.tavalue }) =
let (newstate, (Just objreg, objinstrs)) = astToLIR state ttarget
(newstate', (Just srcreg, srcinstrs)) = astToLIR (state ↖ newstate) tavalue
in (state ↖ newstate',
(Nothing, objinstrs ++ srcinstrs ++ [StoreStructMember objreg (A.memberOffset (P.tag ttarget) tmember) srcreg]))
astToLIR state (P.TAssign { P.tavar }) =
error $ "Bug: Unexpected form in lhs of assignment: " ++ (show tavar)
astToLIR state (P.TReturn { P.tvalue=Just val }) =
let (newstate, (reg, instrs)) = astToLIR state val
in (state ↖ newstate, (Nothing, instrs ++ [Return reg]))
astToLIR state (P.TDeref { P.tag, P.toperand }) =
let (newstate, (Just opreg, instrs)) = astToLIR state toperand
(newstate', valreg) = newreg (state ↖ newstate) (A.sizeFromType tag) (A.isImmutable tag)
in (state ↖ newstate', (Just valreg, instrs ++ [Load valreg opreg]))
astToLIR state (P.TSubscript { P.tag, P.ttarget, P.tsubscript }) =
let (newstate, (Just targetreg, targetinstrs)) = astToLIR state ttarget
(newstate', (Just subreg, subinstrs)) = astToLIR (state ↖ newstate) tsubscript
(newstate'', outreg) = newreg (state ↖ newstate') (A.sizeFromType tag) (A.isImmutable tag)
(finalState, loadinstrs) = makeLoad (state ↖ newstate'') (P.tag ttarget) outreg targetreg subreg
in (finalState, (Just outreg, targetinstrs ++ subinstrs ++ loadinstrs))
where makeLoad state (A.Mutable x) outreg targetreg subreg = makeLoad state x outreg targetreg subreg
makeLoad state (A.Ptr t) outreg targetreg subreg = let (newstate, [szreg, offsetreg, addrreg])
= newregs state [(A.pointerSize, True),
(A.pointerSize, True),
(A.pointerSize, True)]
in (newstate, [LoadInt szreg (A.sizeFromType t),
Mult offsetreg szreg subreg,
Add addrreg targetreg offsetreg,
Load outreg addrreg])
makeLoad state (A.Array _ t) outreg targetreg subreg = let (newstate, [szreg, offsetreg])
= newregs state [(A.pointerSize, True),
(A.pointerSize, True)]
in (newstate, [LoadInt szreg (A.sizeFromType t),
Mult offsetreg szreg subreg,
LoadArray outreg targetreg offsetreg])
makeLoad _ t _ _ _ = error $ "Bug: Unexpected type in load from subscript: " ++ (show t)
astToLIR state (P.TAddr { P.toperand }) =
let (newstate, (Just opreg, instrs)) = astToLIR state toperand
(newstate', dstreg) = newreg newstate A.pointerSize True
in (state ↖ newstate', (Just dstreg, instrs ++ [LoadAddressOf dstreg opreg]))
astToLIR state (P.TMemberAccess { P.ttarget, P.tmember, P.tag }) =
let (newstate, (Just opreg, opinstrs)) = astToLIR state ttarget
(newstate', dstreg) = newreg (state ↖ newstate) (A.sizeFromType tag) (A.isImmutable tag)
in (state ↖ newstate',
(Just dstreg, opinstrs ++ [LoadStructMember dstreg opreg $ A.memberOffset (P.tag ttarget) tmember]))
astToLIR state (P.TStructLiteral { P.tag, P.tfieldvalues }) =
-- we synthesize a variable definition and a bunch of assignments,
-- and return the code generated.
-- We don't need a unique name for #tmp_var here, because it will
-- shadow previous names and later names will shadow it.
let tmpvarname = "#tmp_var"
tmpvar = P.TDef { P.tag=A.voidType, P.vartag=tag, P.tname=tmpvarname, P.ttype=undefined, P.tvalue=Nothing }
assignments = map (makeAssignment tmpvar $ P.TName { P.tag=tag, P.tsrepr=tmpvarname }) tfieldvalues
(newstate, instrs) = astToLIRToplevel' state (tmpvar:assignments)
in (state ↖ newstate, (Just $ snd $ (csymbols newstate) Map.! tmpvarname, concat instrs))
where makeAssignment tmpvar varname (fieldname, fieldvalue) =
P.TAssign { P.tag=A.voidType,
P.tavar=P.TMemberAccess { -- we can just wrap it because it's always immutable
P.tag=A.Mutable (fromJust $ lookup fieldname (structFields tag)),
P.ttarget=varname,
P.tmember=fieldname },
P.tavalue=fieldvalue }
structFields (A.Unqualified (A.Struct { A.fields })) = fields
structFields x = error $ "Unexpected type in getStructFromType: " ++ (show x)
astToLIR state (P.TReturn { P.tvalue=Nothing }) =
(state, (Nothing, [Return Nothing]))
astToLIR state (P.TTypedef { }) = (state, (Nothing, []))
astToLIR state (P.TStruct { }) = (state, (Nothing, []))
astToLIR _ _ = error "Bug: Unexpected form in astToLIR"
|
zc1036/Compiler-project
|
src/LIR.hs
|
gpl-3.0
| 17,026
| 0
| 16
| 5,064
| 5,350
| 2,883
| 2,467
| 228
| 9
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.BigQuery.RowAccessPolicies.GetIAMPolicy
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets the access control policy for a resource. Returns an empty policy
-- if the resource exists and does not have a policy set.
--
-- /See:/ <https://cloud.google.com/bigquery/ BigQuery API Reference> for @bigquery.rowAccessPolicies.getIamPolicy@.
module Network.Google.Resource.BigQuery.RowAccessPolicies.GetIAMPolicy
(
-- * REST Resource
RowAccessPoliciesGetIAMPolicyResource
-- * Creating a Request
, rowAccessPoliciesGetIAMPolicy
, RowAccessPoliciesGetIAMPolicy
-- * Request Lenses
, rapgipPayload
, rapgipResource
) where
import Network.Google.BigQuery.Types
import Network.Google.Prelude
-- | A resource alias for @bigquery.rowAccessPolicies.getIamPolicy@ method which the
-- 'RowAccessPoliciesGetIAMPolicy' request conforms to.
type RowAccessPoliciesGetIAMPolicyResource =
"bigquery" :>
"v2" :>
CaptureMode "resource" "getIamPolicy" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] GetIAMPolicyRequest :>
Post '[JSON] Policy
-- | Gets the access control policy for a resource. Returns an empty policy
-- if the resource exists and does not have a policy set.
--
-- /See:/ 'rowAccessPoliciesGetIAMPolicy' smart constructor.
data RowAccessPoliciesGetIAMPolicy =
RowAccessPoliciesGetIAMPolicy'
{ _rapgipPayload :: !GetIAMPolicyRequest
, _rapgipResource :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'RowAccessPoliciesGetIAMPolicy' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rapgipPayload'
--
-- * 'rapgipResource'
rowAccessPoliciesGetIAMPolicy
:: GetIAMPolicyRequest -- ^ 'rapgipPayload'
-> Text -- ^ 'rapgipResource'
-> RowAccessPoliciesGetIAMPolicy
rowAccessPoliciesGetIAMPolicy pRapgipPayload_ pRapgipResource_ =
RowAccessPoliciesGetIAMPolicy'
{_rapgipPayload = pRapgipPayload_, _rapgipResource = pRapgipResource_}
-- | Multipart request metadata.
rapgipPayload :: Lens' RowAccessPoliciesGetIAMPolicy GetIAMPolicyRequest
rapgipPayload
= lens _rapgipPayload
(\ s a -> s{_rapgipPayload = a})
-- | REQUIRED: The resource for which the policy is being requested. See the
-- operation documentation for the appropriate value for this field.
rapgipResource :: Lens' RowAccessPoliciesGetIAMPolicy Text
rapgipResource
= lens _rapgipResource
(\ s a -> s{_rapgipResource = a})
instance GoogleRequest RowAccessPoliciesGetIAMPolicy
where
type Rs RowAccessPoliciesGetIAMPolicy = Policy
type Scopes RowAccessPoliciesGetIAMPolicy =
'["https://www.googleapis.com/auth/bigquery",
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only"]
requestClient RowAccessPoliciesGetIAMPolicy'{..}
= go _rapgipResource (Just AltJSON) _rapgipPayload
bigQueryService
where go
= buildClient
(Proxy ::
Proxy RowAccessPoliciesGetIAMPolicyResource)
mempty
|
brendanhay/gogol
|
gogol-bigquery/gen/Network/Google/Resource/BigQuery/RowAccessPolicies/GetIAMPolicy.hs
|
mpl-2.0
| 3,958
| 0
| 12
| 812
| 389
| 236
| 153
| 66
| 1
|
-- -*- hindent-style: "chris-done" -*-
module Model.Paste where
import Control.Lens
import Crypto.Random
import qualified Data.ByteString.Base16 as BH
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Time
import Import
import Text.Pandoc
import Yesod.Form.Bootstrap3
import Yesod.Markdown
instance Monad FormResult where
return = pure
FormMissing >>= _ = FormMissing
FormFailure x >>= _ = FormFailure x
FormSuccess x >>= f = f x
makeLensesFor
[("fsAttrs", "_fsAttrs")]
''FieldSettings
pasteForm :: Maybe Markdown -> Html -> MForm Handler (FormResult Paste, Widget)
pasteForm existingText extra =
do let (.=>) = (,)
(markdownRes,markdownView) <-
mreq markdownField
(over _fsAttrs
(mappend ["rows" .=> "20"])
(bfs ("Markdown Input" :: Text)))
existingText
(_,submitButtonView) <- mbootstrapSubmit ("Paste" :: BootstrapSubmit Text)
let widget =
do toWidget [lucius|
textarea {
font-family: monospace;
height: 450px;
}
#pfn-submit-btn {
margin-top: 8px;
margin-right: 30px;
padding-left: 20px;
}
|]
[whamlet|
#{extra}
<div .form-group>
<div .navbar>
<div .container #newpaste-submit>
<ul .nav .navbar-nav .navbar-right>
<li>
<a href="http://pandoc.org/README.html#pandocs-markdown">Pandoc's Markdown
<li>
<a href="https://en.wikibooks.org/wiki/LaTeX/Mathematics">LaTeX Math Guide
<li>
<div #pfn-submit-btn>
^{fvInput submitButtonView}
<div .form-group>
^{fvInput markdownView}
|]
html = markdownRes >>= mkHtml
time <- liftIO getCurrentTime
delKey <- liftIO mkDeleteKey
return (Paste <$> markdownRes <*> html <*> pure time <*> pure delKey
,widget)
markupGuides :: Widget
markupGuides =
[whamlet|
<li>
<a href="http://pandoc.org/demo/example9/pandocs-markdown.html">Markdown guide
<li>
<a href="https://en.wikibooks.org/wiki/LaTeX/Mathematics">LaTeX math guide
|]
errorify :: Either [Text] x -> FormResult x
errorify = \case
Left x -> FormFailure x
Right x -> FormSuccess x
mkHtml :: Markdown -> FormResult Html
mkHtml =
errorify .
over _Right (writePandoc lypasteWriterOptions) .
over _Left (pure . T.pack . show) .
parseMarkdown lypasteReaderOptions
mkDeleteKey :: IO Text
mkDeleteKey = do
entropyPool <- createEntropyPool
let rng :: SystemRNG
rng = cprgCreate entropyPool
(bs, _) = cprgGenerate 20 rng
return (T.decodeUtf8 (BH.encode bs))
lypasteReaderOptions :: ReaderOptions
lypasteReaderOptions =
yesodDefaultReaderOptions { readerExtensions = pandocExtensions }
lypasteWriterOptions :: WriterOptions
lypasteWriterOptions =
yesodDefaultWriterOptions
{ writerHTMLMathMethod = MathJax
"http://static.learnyou.org/mathjax/MathJax.js?config=TeX-AMS-MML_HTMLorMML"
}
|
learnyou/lypaste
|
Model/Paste.hs
|
agpl-3.0
| 3,472
| 0
| 14
| 1,179
| 608
| 320
| 288
| -1
| -1
|
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveTraversable #-}
module Data.Match where
import Control.Applicative
import Control.Monad
import Data.Foldable
import Data.Traversable
import qualified Data.Logic as Logic
data MatchExpr a
= Match a
| Not (MatchExpr a)
| And (MatchExpr a) (MatchExpr a)
| Or (MatchExpr a) (MatchExpr a)
deriving (Show, Eq, Ord, Functor, Foldable, Traversable)
instance Logic.Logic (MatchExpr a) where
and = And
or = Or
not = Not
match :: (a -> Bool) -> MatchExpr a -> Bool
match p = eval . test p
matchM :: (Applicative m, Monad m) => (a -> m Bool) -> MatchExpr a -> m Bool
matchM p = liftM eval . testM p
test :: (a -> Bool) -> MatchExpr a -> MatchExpr Bool
test = fmap
testM :: (Applicative m, Monad m) => (a -> m Bool) -> MatchExpr a -> m (MatchExpr Bool)
testM = traverse
eval :: MatchExpr Bool -> Bool
eval (Match x) = x
eval (Not x) = not (eval x)
eval (And x y) = eval x && eval y
eval (Or x y) = eval x || eval y
|
bflyblue/tag
|
Data/Match.hs
|
unlicense
| 1,041
| 0
| 10
| 243
| 441
| 229
| 212
| 32
| 1
|
module Orlin.Types where
import Control.Applicative
import Data.Ratio
import Data.Map.Strict( Map )
import qualified Data.Map.Strict as Map
import Control.Monad.Trans.State
import Control.Monad.Trans.Class
import Orlin.Tokens( Pn )
import qualified Orlin.AST as AST
import Orlin.AST( Loc(..), Ident(..), getIdent, BinderF(..), ExprF(..), Expr(..), NumF(..) )
import Orlin.Compile
import Orlin.Units
type TC a = StateT TCState Comp a
data GType
= GType (TypeF Unit Kind GType)
| GTypeVar Int
deriving (Eq, Show, Ord)
initTCState = TCState Map.empty
data TCState
= TCState
{ tc_tymap :: Map Int GType
}
runTC :: TC a -> Comp (a, TCState)
runTC m = runStateT m initTCState
type TVar = Int
type TSubst = Map TVar GType
data Kind
= KUnit
| KType
deriving (Eq, Show, Ord)
data Type
= Type Pn (TypeF AST.Unit Kind Type)
deriving (Eq, Show, Ord)
instance Loc Type where
loc (Type pn _) = pn
data TypeF unit kind a
= TyInt
| TyNat
| TyReal unit
| TyIdent Ident
| TyArrow a a
| TyForall Ident (Maybe kind) a
deriving (Eq, Show, Ord)
displayType
:: USubst
-> TSubst
-> GType
-> String
displayType usub tsub (GTypeVar v) =
case Map.lookup v tsub of
Nothing -> "_t" ++ show v
Just ty -> displayType usub tsub ty
displayType usub tsub (GType x) =
case x of
TyInt -> "ℤ"
TyNat -> "ℕ"
TyIdent i -> getIdent i
TyReal (UnitZero) -> "ℝ⟨0⟩"
TyReal u@(Unit m)
| Map.null m -> "ℝ"
| otherwise -> "ℝ〈" ++ displayUnit usub (simplifyUnit u usub) ++ "〉"
TyArrow t1 t2 -> "("++displayType usub tsub t1 ++ " → " ++ displayType usub tsub t2++")"
TyForall i k t -> "∀"++(getIdent i)++", "++displayType usub tsub t
type TypeTable = Map String GType
inferType
:: UnitTable
-> TypeTable
-> Expr ()
-> USubst
-> TSubst
-> Comp (Expr GType, USubst, TSubst)
inferType utab ttab ex@(Expr pn _ x) usub tsub =
case x of
ExprNumber _ -> reduceNumber ex >>= \ex' -> inferNumber utab ttab ex' usub tsub
ExprNumLit nl u ->
do u' <- computeReducedUnit pn utab u
let r = reduceNumLit nl
return (Expr pn (GType (TyReal u')) $ ExprNumber $ NumDec "" r, usub, tsub)
ExprToPower e n ->
do (e',usub1,tsub1) <- inferType utab ttab e usub tsub
uv <- compFreshVar
let ty = GType (TyReal (unitVar uv))
r <- unifyTypes ty (exprTy e') usub1 tsub1
case r of
Just (GType (TyReal u'),_,usub2,tsub2) -> do
u'' <- unitToPower pn u' n
return (Expr pn (GType (TyReal u'')) $ ExprToPower e' n, usub2, tsub2)
_ -> errMsg pn $ "expected real number type"
ExprIdent i ->
case Map.lookup (getIdent i) ttab of
Nothing -> errMsg pn $ unwords ["identifier not in scope:",getIdent i]
Just ty -> return (Expr pn ty $ ExprIdent i, usub, tsub)
ExprApp e1 e2 ->
do (e1',usub1,tsub1) <- inferType utab ttab e1 usub tsub
(e2',usub2,tsub2) <- inferType utab ttab e2 usub1 tsub1
resvar <- compFreshVar
let ty = (GType (TyArrow (exprTy e2') (GTypeVar resvar)))
r <- unifyTypes ty (exprTy e1') usub2 tsub2
case r of
Nothing -> errMsg pn $ unwords ["could not unify"
, displayType usub2 tsub2 (exprTy e1')
, "with"
, displayType usub2 tsub2 ty]
Just (_,_,usub3,tsub3) ->
return (Expr pn (GTypeVar resvar) $ ExprApp e1' e2', usub3, tsub3)
-- ExprAbs i mk e -> do -- FIXME, do kinds!
-- uv <- compFreshVar
-- let utab' = Map.insert (getIdent i) (VarUnitInfo (getIdent i) uv) utab
-- let usub' = Map.insert uv (Left (getIdent i)) usub
-- (e', usub'', tsub') <- inferType utab' ttab e usub' tsub
-- return (Expr pn (GType (TyForall i mk (exprTy e'))) $ ExprTAbs i mk e', usub'', tsub')
-- ExprAbs i mty e -> do
-- targ <- case mty of
-- Just ty -> computeReducedType utab ty
-- Nothing -> fmap GTypeVar $ compFreshVar
-- let ttab' = Map.insert (getIdent i) targ ttab
-- (e',usub',tsub') <- inferType utab ttab' e usub tsub
-- return (Expr pn (GType (TyArrow targ (exprTy e'))) $ ExprAbs i mty e', usub', tsub')
exprTy :: Expr a -> a
exprTy (Expr _ t _) = t
reduceNumLit :: AST.NumLit -> Rational
reduceNumLit (AST.NumLit pn x) =
case x of
NumZero -> 0
NumDec _ r -> r
NumHex _ n -> (n%1)
NumMult x y -> reduceNumLit x * reduceNumLit y
NumDiv x y -> reduceNumLit x / reduceNumLit y
NumPlus x y -> reduceNumLit x + reduceNumLit y
NumMinus x y -> reduceNumLit x - reduceNumLit y
NumNegate x -> - reduceNumLit x
NumToPower x n -> (reduceNumLit x)^n
reduceNumber :: Expr () -> Comp (Expr ())
reduceNumber ex = return ex -- FIXME, implement number folding...
inferNumber
:: UnitTable
-> TypeTable
-> Expr ()
-> USubst
-> TSubst
-> Comp (Expr GType, USubst, TSubst)
inferNumber utab ttab (Expr pn _ (ExprNumber num)) usub tsub =
case num of
NumZero ->
let ty = GType $ TyReal UnitZero
in return (Expr pn ty (ExprNumber NumZero), usub, tsub)
NumDec str r ->
let ty = GType $ TyReal unitDimensionless
in return (Expr pn ty (ExprNumber $ NumDec str r), usub, tsub)
NumHex str r ->
let ty = GType $ TyReal unitDimensionless
in return (Expr pn ty (ExprNumber $ NumHex str r), usub, tsub)
NumMult x y -> do
(x', usub1,tsub1) <- inferType utab ttab x usub tsub
(y', usub2,tsub2) <- inferType utab ttab y usub1 tsub1
ux <- compFreshVar
uy <- compFreshVar
rx <- unifyTypes (GType (TyReal (unitVar ux))) (exprTy x') usub2 tsub2
case rx of
Just (GType (TyReal ux'),_,usub3,tsub3) -> do
ry <- unifyTypes (GType (TyReal (unitVar uy))) (exprTy y') usub3 tsub3
case ry of
Just (GType (TyReal uy'),_,usub4,tsub4) -> do
return (Expr pn (GType (TyReal (unitMul ux' uy'))) $ ExprNumber $ NumMult x' y', usub4, tsub4)
_ -> errMsg pn $ "real number type expected"
_ -> errMsg pn $ "real number type expected"
NumDiv x y -> do
(x', usub1,tsub1) <- inferType utab ttab x usub tsub
(y', usub2,tsub2) <- inferType utab ttab y usub1 tsub1
ux <- compFreshVar
uy <- compFreshVar
rx <- unifyTypes (GType (TyReal (unitVar ux))) (exprTy x') usub2 tsub2
case rx of
Just (GType (TyReal ux'),_,usub3,tsub3) -> do
ry <- unifyTypes (GType (TyReal (unitVar uy))) (exprTy y') usub3 tsub3
case ry of
Just (GType (TyReal uy'),_,usub4,tsub4) -> do
uy'' <- unitInv (loc y) uy'
return (Expr pn (GType (TyReal (unitMul ux' uy''))) $ ExprNumber $ NumDiv x' y', usub4, tsub4)
_ -> errMsg pn $ "real number type expected"
_ -> errMsg pn $ "real number type expected"
NumToPower x n -> do
(x', usub1,tsub1) <- inferType utab ttab x usub tsub
uv <- compFreshVar
let rty = GType (TyReal (unitVar uv))
r <- unifyTypes rty (exprTy x') usub1 tsub1
case r of
Just (GType (TyReal u),_,usub2,tsub2) -> do
u' <- unitToPower (loc x) u n
return (Expr pn (GType (TyReal u')) $ ExprNumber $ NumToPower x' n, usub2, tsub2)
_ -> errMsg pn "real number type required"
NumNegate x -> do
(x', usub1,tsub1) <- inferType utab ttab x usub tsub
uv <- compFreshVar
let rty = GType (TyReal (unitVar uv))
r <- unifyTypes rty (exprTy x') usub1 tsub1
case r of
Nothing -> errMsg pn "real number type required"
Just (t',_,usub2,tsub2) ->
return (Expr pn t' $ ExprNumber $ NumNegate x', usub2, tsub2)
NumPlus x y -> do
(x', usub1,tsub1) <- inferType utab ttab x usub tsub
(y', usub2,tsub2) <- inferType utab ttab y usub1 tsub1
uv <- compFreshVar
let rty = GType (TyReal (unitVar uv))
r <- unifyTypeList [rty,exprTy x',exprTy y'] usub2 tsub2
case r of
Just (rty':_,usub3,tsub3) ->
return (Expr pn rty' (ExprNumber (NumPlus x' y')), usub3, tsub3)
_ -> errMsg pn $ unwords [ "could not unify real number types"
, displayType usub2 tsub2 (exprTy x')
, displayType usub2 tsub2 (exprTy y')
]
NumMinus x y -> do
(x', usub1,tsub1) <- inferType utab ttab x usub tsub
(y', usub2,tsub2) <- inferType utab ttab y usub1 tsub1
uv <- compFreshVar
let rty = GType (TyReal (unitVar uv))
r <- unifyTypeList [rty,exprTy x',exprTy y'] usub2 tsub2
case r of
Just (rty':_,usub3,tsub3) ->
return (Expr pn rty' (ExprNumber (NumMinus x' y')), usub3, tsub3)
_ -> errMsg pn $ unwords [ "could not unify real number types"
, displayType usub2 tsub2 (exprTy x')
, displayType usub2 tsub2 (exprTy y')
]
inferNumber _ _ (Expr pn _ _) _ _ = errMsg pn "Orlin.Types.inferNumber: impossible!"
computeReducedType :: UnitTable -> Type -> Comp GType
computeReducedType utbl (Type pn ty) =
case ty of
TyInt -> return $ GType TyInt
TyNat -> return $ GType TyNat
TyReal u ->
do u' <- computeReducedUnit pn utbl u
return $ GType $ TyReal u'
TyIdent i -> return $ GType $ TyIdent i
TyArrow t1 t2 ->
pure (\x y -> GType $ TyArrow x y)
<*> computeReducedType utbl t1
<*> computeReducedType utbl t2
TyForall i k t ->
fmap (GType . TyForall i k) $ computeReducedType utbl t
unifyTypeList :: [GType] -> USubst -> TSubst -> Comp (Maybe ([GType], USubst, TSubst))
unifyTypeList [] usub tsub = return (Just ([],usub,tsub))
unifyTypeList [t] usub tsub = return (Just ([t],usub,tsub))
unifyTypeList (t1:t2:ts) usub tsub =
do x <- unifyTypes t1 t2 usub tsub
case x of
Nothing -> return Nothing
Just (t1',t2',usub',tsub') ->
do y <- unifyTypeList (t2':ts) usub' tsub'
case y of
Nothing -> return Nothing
Just (ts',usub'',tsub'') -> return (Just (t1':ts',usub'',tsub''))
unifyVar :: TVar -> GType -> USubst -> TSubst -> Comp (Maybe (GType, GType, USubst, TSubst))
unifyVar v ty usub tsub =
case Map.lookup v tsub of
Nothing ->
-- FIXME: need to perform occurs check here...
let tsub' = Map.insert v ty tsub
in return $ Just (ty,ty,usub,tsub')
Just ty' -> unifyTypes ty' ty usub tsub
unifyTypes :: GType -> GType -> USubst -> TSubst -> Comp (Maybe (GType, GType, USubst, TSubst))
unifyTypes (GTypeVar v) t2 usub tsub = unifyVar v t2 usub tsub
unifyTypes t1 (GTypeVar v) usub tsub = fmap (fmap (\(t2',t1',usub',tsub') -> (t1',t2',usub',tsub')))
$ unifyVar v t1 usub tsub
unifyTypes t1@(GType TyInt) t2@(GType TyInt) usub tsub =
return $ Just (t1,t2,usub,tsub)
unifyTypes t1@(GType TyNat) t2@(GType TyNat) usub tsub =
return $ Just (t1,t2,usub,tsub)
unifyTypes t1@(GType (TyReal u1)) t2@(GType (TyReal u2)) usub tsub =
do x <- unifyUnits u1 u2 usub
case x of
Nothing -> return Nothing
Just (u1',u2',usub') ->
return $ Just (GType (TyReal u1'), GType (TyReal u2'), usub', tsub)
unifyTypes t1@(GType (TyIdent i1)) t2@(GType (TyIdent i2)) usub tsub =
if getIdent i1 == getIdent i2
then return $ Just (t1,t2,usub,tsub)
else return Nothing
unifyTypes (GType (TyArrow s1 s2)) (GType (TyArrow t1 t2)) usub tsub =
do x <- unifyTypes s1 t1 usub tsub
case x of
Nothing -> return Nothing
Just (s1',t1',usub',tsub') ->
do y <- unifyTypes s2 t2 usub' tsub'
case y of
Nothing -> return Nothing
Just (s2',t2',usub'',tsub'') ->
return $ Just (GType (TyArrow s1' s2'), GType (TyArrow t1' t2'), usub'', tsub'')
unifyTypes _ _ _ _ = return Nothing
{-
typeCheckNum :: Pn -> NumF (Expr ()) -> Type -> TC (Expr GType)
typeCheckNum pn num (Type ty_pn (TyReal u)) =
case num of
NumZero -> return (Expr pn (GType (TyReal u)) NumZero)
NumDec str r
| u == UZero && r == 0 -> return (Expr pn (GType (TyReal u)) NumZero)
| u == UZero && r <> 0 -> lift $ errMsg pn $ "non-zero constants cannot be assigned unit 0"
| otherwise = return (Expr pn (GType (TyReal u) (NumDec str r)))
NumHex str r
| u == UZero && r == 0 -> return (Expr pn (GType (TyReal u)) NumZero)
| u == UZero && r <> 0 -> lift $ errMsg pn $ "non-zero constants cannot be assigned unit 0"
| otherwise = return (Expr pn (GType (TyReal u) (NumHex str r)))
NumMult
typeCheckNum pn num _ = lift $ errMsg pn $ "numeric expression must have type 'real'"
typeCheck :: Expr () -> Type -> TC (Expr GType)
typeCheck (Expr pn e) ty =
case e of
ExprNumber num -> typeCheckNum pn num ty
ExprToPower
-}
|
robdockins/orlin
|
src/Orlin/Types.hs
|
bsd-2-clause
| 13,446
| 1
| 30
| 4,210
| 4,697
| 2,362
| 2,335
| 274
| 17
|
{-# LANGUAGE OverloadedStrings #-}
{-
MSP430 instruction set.
-}
module Translator.Assembler.Target.MSP430 (Instr430(..), Suffix(..), Reg(..), Op(..),
module Translator.Assembler.Directive) where
import qualified Translator.Assembler.Directive as G
import Translator.Expression
import Translator.Symbol
import Translator.Assembler.Directive
import Translator.Assembler.InstructionSet
data Instr430 = ADD Suffix Op Op
| ADDC Suffix Op Op
| AND Suffix Op Op
| BIS Suffix Op Op
| CALL Op
| CLRC
| DEC Suffix Op
| DECD Suffix Op
| INC Suffix Op
| INCD Suffix Op
| INV Suffix Op
| JC Lab
| JNC Lab
| JZ Lab
| JMP Op
| MOV Suffix Op Op
| POP Suffix Op
| PUSH Suffix Op
| RLA Suffix Op
| RRA Suffix Op
| RRC Suffix Op
| SUB Suffix Op Op
| SUBC Suffix Op Op
| TST Suffix Op
| XOR Suffix Op Op
| Directive GNUDirective
type Lab = String
data Suffix = B | W deriving (Eq, Show)
data Reg = PC | SP | SR | CG1 | CG2 | R4 | R5 | R6 | R7 | R8 | R9 | R10 | R11 | R12 | R13 | R14 | R15
deriving (Eq, Show)
data Op = RegOp Reg | Indexed Expr Reg | Symbolic Expr | Absolute Expr | Indirect Reg | IndirectInc Reg
| Imm Expr
instance Show Op where
show (RegOp reg) = show reg
show (Indexed n reg) = show n ++ "(" ++ show reg ++ ")"
show (Symbolic n) = show n
show (Absolute n) = '&' : show n
show (Indirect reg) = '@' : show reg
show (IndirectInc reg) = '@' : (show reg ++ "+")
show (Imm n) = '#' : show n
instance InstructionSet Instr430 where
disassemble instr =
let disasm (ADD s op1 op2) = dis "add" s op1 (Just op2)
disasm (ADDC s op1 op2) = dis "addc" s op1 (Just op2)
disasm (AND s op1 op2) = dis "and" s op1 (Just op2)
disasm (BIS s op1 op2) = dis "bis" s op1 (Just op2)
disasm (CALL op) = ("call", Just [show op])
disasm CLRC = ("clrc", Nothing)
disasm (DEC s op) = dis "dec" s op Nothing
disasm (DECD s op) = dis "decd" s op Nothing
disasm (INC s op) = dis "inc" s op Nothing
disasm (INCD s op) = dis "incd" s op Nothing
disasm (INV s op) = dis "inv" s op Nothing
disasm (JC lab) = ("jc", Just [lab])
disasm (JNC lab) = ("jnc", Just [lab])
disasm (JZ lab) = ("jz", Just [lab])
disasm (JMP op) = ("jmp", Just [show op])
disasm (MOV s op1 op2) = dis "mov" s op1 (Just op2)
disasm (POP s op) = dis "pop" s op Nothing
disasm (PUSH s op) = dis "push" s op Nothing
disasm (RLA s op) = dis "rla" s op Nothing
disasm (RRA s op) = dis "rra" s op Nothing
disasm (RRC s op) = dis "rrc" s op Nothing
disasm (SUB s op1 op2) = dis "sub" s op1 (Just op2)
disasm (SUBC s op1 op2) = dis "subc" s op1 (Just op2)
disasm (TST s op) = dis "tst" s op Nothing
disasm (XOR s op1 op2) = dis "xor" s op1 (Just op2)
disasm (Directive dir) = disassemble dir
dis mne s op Nothing = (n mne s, Just [show op])
dis mne s op1 (Just (Indirect r)) = (n mne s, Just [show op1, show (Indexed 0 r)])
dis mne s op1 (Just op2) = (n mne s, Just (map show [op1, op2]))
n mne W = mne
n mne B = mne ++ ".b"
in disasm instr
|
hth313/hthforth
|
src/Translator/Assembler/Target/MSP430.hs
|
bsd-2-clause
| 3,781
| 0
| 15
| 1,511
| 1,462
| 766
| 696
| 82
| 0
|
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Util.WindowProperties
-- Copyright : (c) Roman Cheplyaka
-- License : BSD-style (see LICENSE)
--
-- Maintainer : Roman Cheplyaka <roma@ro-che.info>
-- Stability : unstable
-- Portability : unportable
--
-- EDSL for specifying window properties; various utilities related to window
-- properties.
--
-----------------------------------------------------------------------------
module XMonad.Util.WindowProperties (
-- * EDSL for window properties
-- $edsl
Property(..), hasProperty, focusedHasProperty, allWithProperty,
propertyToQuery,
-- * Helper functions
-- $helpers
getProp32, getProp32s)
where
import Control.Monad
import Foreign.C.Types (CLong)
import XMonad
import XMonad.Actions.TagWindows (hasTag)
import qualified XMonad.StackSet as W
-- $edsl
-- Allows to specify window properties, such as title, classname or
-- resource, and to check them.
--
-- In contrast to ManageHook properties, these are instances of Show and Read,
-- so they can be used in layout definitions etc. For example usage see "XMonad.Layout.IM"
-- | Most of the property constructors are quite self-explaining.
data Property = Title String
| ClassName String
| Resource String
| Role String -- ^ WM_WINDOW_ROLE property
| Machine String -- ^ WM_CLIENT_MACHINE property
| And Property Property
| Or Property Property
| Not Property
| Const Bool
| Tagged String -- ^ Tagged via 'XMonad.Actions.TagWindows'
deriving (Read, Show)
infixr 9 `And`
infixr 8 `Or`
-- | Does given window have this property?
hasProperty :: Property -> Window -> X Bool
hasProperty p w = runQuery (propertyToQuery p) w
-- | Does the focused window have this property?
focusedHasProperty :: Property -> X Bool
focusedHasProperty p = do
ws <- gets windowset
let ms = W.stack $ W.workspace $ W.current ws
case ms of
Just s -> hasProperty p $ W.focus s
Nothing -> return False
-- | Find all existing windows with specified property
allWithProperty :: Property -> X [Window]
allWithProperty prop = withDisplay $ \dpy -> do
rootw <- asks theRoot
(_,_,wins) <- io $ queryTree dpy rootw
hasProperty prop `filterM` wins
-- | Convert property to 'Query' 'Bool' (see "XMonad.ManageHook")
propertyToQuery :: Property -> Query Bool
propertyToQuery (Title s) = title =? s
propertyToQuery (Resource s) = resource =? s
propertyToQuery (ClassName s) = className =? s
propertyToQuery (Role s) = stringProperty "WM_WINDOW_ROLE" =? s
propertyToQuery (Machine s) = stringProperty "WM_CLIENT_MACHINE" =? s
propertyToQuery (And p1 p2) = propertyToQuery p1 <&&> propertyToQuery p2
propertyToQuery (Or p1 p2) = propertyToQuery p1 <||> propertyToQuery p2
propertyToQuery (Not p) = not `fmap` propertyToQuery p
propertyToQuery (Const b) = return b
propertyToQuery (Tagged s) = ask >>= \w -> liftX (hasTag s w)
-- $helpers
-- | Get a window property from atom
getProp32 :: Atom -> Window -> X (Maybe [CLong])
getProp32 a w = withDisplay $ \dpy -> io $ getWindowProperty32 dpy a w
-- | Get a window property from string
getProp32s :: String -> Window -> X (Maybe [CLong])
getProp32s str w = do { a <- getAtom str; getProp32 a w }
|
f1u77y/xmonad-contrib
|
XMonad/Util/WindowProperties.hs
|
bsd-3-clause
| 3,398
| 0
| 12
| 715
| 742
| 403
| 339
| 51
| 2
|
module Sexy.Instances.Monad () where
import Sexy.Instances.Monad.Function ()
import Sexy.Instances.Monad.Maybe ()
import Sexy.Instances.Monad.Either ()
import Sexy.Instances.Monad.List ()
import Sexy.Instances.Monad.IO ()
|
DanBurton/sexy
|
src/Sexy/Instances/Monad.hs
|
bsd-3-clause
| 223
| 0
| 4
| 20
| 60
| 42
| 18
| 6
| 0
|
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE TypeInType #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE PartialTypeSignatures #-}
module SAWScript.Heapster.CtxPermImpl where
import Data.Kind
import Data.Parameterized.Ctx
import Data.Parameterized.Context
import Data.Parameterized.NatRepr
import Lang.Crucible.Types
import Lang.Crucible.LLVM.MemModel
import Lang.Crucible.CFG.Core
import SAWScript.Heapster.Permissions
import SAWScript.Heapster.CtxMonad as C
----------------------------------------------------------------------
-- * Permission Implications
----------------------------------------------------------------------
data PermImpl (f :: Ctx CrucibleType -> Data.Kind.*) (ctx :: Ctx CrucibleType) where
Impl_Done :: f ctx -> PermImpl f ctx
-- ^ No more elimination; i.e., implements the rule
--
-- -------------------------------
-- Gin | Pin |- Pin
Impl_Fail :: PermImpl f ctx
-- ^ The empty tree, with no disjunctive possibilities; i.e., implements the
-- rule
--
-- ------------------------------
-- Gin | Pin |- Pany
Impl_Catch :: PermImpl f ctx -> PermImpl f ctx -> PermImpl f ctx
-- ^ Copy the same permissions into two different elimination trees, where an
-- 'Impl_Fail' in the first tree "calls" the second tree, just like a
-- try-catch block for exceptions. This implements the rule:
--
-- pf1 = Gin | Pin |- rets1 pf2 = Gin | Pin |- rets2
-- ----------------------------------------------------
-- Gin | Pin |- rets1, rets2
Impl_ElimOr :: Index ctx a -> PermImpl f ctx -> PermImpl f ctx -> PermImpl f ctx
-- ^ Eliminate a 'ValPerm_Or' on the given variable, replacing it with the
-- left- and right-hand sides in the two sub-eliminations
--
-- pf1 = Gin | Pin, x:p1 |- GsPs1 pf2 = Gin | Pin, x:p2 |- GsPs2
-- -----------------------------------------------------------------
-- Gin | Pin, x:(p1 \/ p2) |- GsPs1, GsPs2
Impl_IntroOrL :: Index ctx a -> PermImpl f ctx -> PermImpl f ctx
-- ^ @Impl_IntroOrL x p2 pf@ is the left disjunction introduction rule
--
-- > pf = Gamma | Pin |- e:p1, Pout
-- > ---------------------------------
-- > Gamma | Pin |- e:(p1 \/ p2), Pout
Impl_IntroOrR :: Index ctx a -> PermImpl f ctx -> PermImpl f ctx
-- ^ @Impl_IntroOrR x p1 pf@ is the right disjunction introduction rule
--
-- > pf = Gamma | Pin |- e:p2, Pout
-- > ---------------------------------
-- > Gamma | Pin |- e:(p1 \/ p2), Pout
Impl_ElimExists :: Index ctx a -> TypeRepr tp -> PermImpl f (ctx ::> tp) ->
PermImpl f ctx
-- ^ Eliminate an existential, i.e., a 'ValPerm_Exists', on the given variable
--
-- pf = Gin, z:tp | Pin, x:p, z:true |- rets
-- ------------------------------------------------------
-- Gin | x:(exists z:tp. p) |- rets
Impl_IntroExists :: Index ctx a -> TypeRepr tp -> PermExpr ctx tp ->
ValuePerm (ctx ::> tp) a ->
PermImpl f ctx -> PermImpl f ctx
-- ^ @Intro_Exists x tp e p pf@ is the existential introduction rule
--
-- > pf = Gamma | Pin |- x:[e'/z]p, Pout
-- > --------------------------------------
-- > Gamma | Pin |- x:(exists z:tp.p), Pout
instance C.Weakenable f => C.Weakenable (PermImpl f) where
weaken _ _ _ = error "FIXME HERE: Weakenable instance for PermImpl"
----------------------------------------------------------------------
-- * Permission Implication Monad
----------------------------------------------------------------------
newtype ValuePerms ctx a = ValuePerms [ValuePerm ctx a]
newtype MultiPermSet ctx =
MultiPermSet (Assignment (ValuePerms ctx) ctx)
instance C.Weakenable MultiPermSet where
weaken w sz (MultiPermSet asgn) =
error "FIXME HERE: need Weakenable for ValuePerm"
type PImplM f =
C.CStateT MultiPermSet (C.CContT (PermImpl f) CIdentity)
newtype Flip f a ctx = Flip { unFlip :: f ctx a }
instance C.Weakenable (Flip ValuePerms a) where
weaken _ _ _ = error "FIXME HERE: Weakenable for Flip"
getMultiPerms :: CExpr MultiPermSet ctx -> CExpr (CVar a) ctx ->
CExpr (Flip ValuePerms a) ctx
getMultiPerms =
cOp2 (\(MultiPermSet perms) (CVar ix) -> Flip (perms ! ix))
lookupPerm :: C.Weakenable f =>
CExpr (CVar a :->: PImplM f (Flip ValuePerms a)) ectx
lookupPerm =
clam $ \x ->
C.cget C.>>>= \perms ->
C.creturn $ getMultiPerms perms x
|
GaloisInc/saw-script
|
heapster-saw/src/Verifier/SAW/Heapster/archival/CtxPermImpl.hs
|
bsd-3-clause
| 4,825
| 0
| 12
| 936
| 772
| 432
| 340
| -1
| -1
|
{-# LANGUAGE Rank2Types #-}
module HO where
ho :: (forall a. f a -> a) -> f a -> a
ho f = f
-- This undefined should have type "forall a. f a -> a", but it has "f a -> a"
-- which means either "forall f a. f a -> a" or "exists f a. f a -> a", both
-- of which is wrong.
f :: f a -> a
f = ho undefined
|
sebastiaanvisser/ghc-goals
|
tests/HO.hs
|
bsd-3-clause
| 303
| 0
| 9
| 80
| 67
| 37
| 30
| 6
| 1
|
-- | Parsing and Printing NextStep style (old style) plist files
module Text.NSPlist (
NSPlistValue(..),
parseNSPlist,
readNSPlistFromFile,
showNSPlist,
writeNSPlistToFile
) where
import Text.Parsec (runParser)
import Text.Parsec.Error (ParseError)
import Text.PrettyPrint (render)
import Text.NSPlist.Types (NSPlistValue(..))
import Text.NSPlist.Parsec (pNSPlist)
import Text.NSPlist.Pretty (prettyNSPlist)
parseNSPlist :: String -> Either ParseError NSPlistValue
parseNSPlist = runParser pNSPlist () ""
readNSPlistFromFile :: FilePath -> IO NSPlistValue
readNSPlistFromFile filePath = do
file <- readFile filePath
case runParser pNSPlist () filePath file of
Left parseError -> error ("Cannot parse file " ++ filePath
++ ", error was " ++ show parseError)
Right obj -> return obj
writeNSPlistToFile :: FilePath -> NSPlistValue -> IO ()
writeNSPlistToFile filePath = writeFile filePath . showNSPlist
showNSPlist :: NSPlistValue -> String
showNSPlist = render . prettyNSPlist
|
plancalculus/nextstep-plist
|
Text/NSPlist.hs
|
bsd-3-clause
| 1,065
| 0
| 14
| 211
| 265
| 143
| 122
| 25
| 2
|
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-}
module Lambdasim.Simulation where
import Control.Parallel.Strategies
import Data.DeriveTH
import Data.Record.Label
import Data.Time (UTCTime)
import Text.Printf (printf)
import Prelude ()
import Lambdasim.Prelude
import Lambdasim.Geographical
import Lambdasim.Time
data Simulation = Simulation {
_time :: UTCTime,
_vessels :: [Vessel]
}
data Vessel = Vessel {
_position :: Geog,
_heading :: Angle,
_rudder :: AngularVelocity,
_speed :: Velocity
}
$(derive makeNFData ''Simulation)
$(derive makeNFData ''Vessel)
$(mkLabels [''Simulation, ''Vessel])
vessels :: Simulation :-> [Vessel]
time :: Simulation :-> UTCTime
position :: Vessel :-> Geog
heading :: Vessel :-> Angle
rudder :: Vessel :-> AngularVelocity
speed :: Vessel :-> Velocity
class AdvanceTime a where
advanceBy :: Time -> a -> a
instance Show Simulation where
show s = "Simulation\n"
++ " Time: " ++ show (get time s) ++ "\n"
++ " Vessels: " ++ show (get vessels s)
instance AdvanceTime Simulation where
advanceBy t s = set time (addTime t $ get time s) $
set vessels (map (advanceBy t) (get vessels s)) s
newSimulation :: UTCTime -> Simulation
newSimulation utc = Simulation
{ _time = utc
, _vessels = []
}
addVessel :: Simulation -> Simulation
addVessel = updateVessels (\vs -> newVessel : vs)
updateVessels :: ([Vessel] -> [Vessel]) -> Simulation -> Simulation
updateVessels f s = set vessels (f $ get vessels s) s
updateFirstVessel :: (Vessel -> Vessel) -> Simulation -> Simulation
updateFirstVessel f = updateVessels update
where
update [] = []
update (v:vs) = f v : vs
instance Show Vessel where
show v = printf "Vessel Pos: %s Hdg: %.2f deg" p h
where
p = show (get position v)
h = get heading v /~ degree
instance AdvanceTime Vessel where
advanceBy t v = set position (translate dst hdg pos) $
set heading (normalize360 $ hdg + (rdr * t)) v
where
pos = get position v
hdg = get heading v
rdr = get rudder v
spd = get speed v
dst = spd * t
normalize360 :: Angle -> Angle
normalize360 x | x < deg0 = normalize360 (x + deg360)
| x >= deg360 = normalize360 (x - deg360)
| otherwise = x
where
deg0 = 0.0 *~ degree
deg360 = 360.0 *~ degree
newVessel :: Vessel
newVessel = Vessel
{ _position = mkGeog (-32) 116 0
, _heading = 0 *~ degree
, _rudder = 2 *~ (degree / second)
, _speed = 5 *~ knot
}
|
jystic/lambdasim
|
src/Lambdasim/Simulation.hs
|
bsd-3-clause
| 2,607
| 0
| 12
| 684
| 887
| 470
| 417
| 74
| 2
|
module Main where
import D16Lib
import D16Lib.Parser as P
import System.Environment (getArgs)
main :: IO ()
main = do
file <- head <$> getArgs
parseResult <- P.parseFile file
let endDancers = run dancers <$> parseResult
putStrLn $ "p1: dancers after dance = " ++ (show endDancers)
let c = cycleLength dancers <$> parseResult
let realIters = (iterations `mod`) <$> c
let endDancersP2 = runTimes dancers <$> parseResult <*> realIters
putStrLn $ "p2: cycle length = " ++ (show c) ++ ", only doing " ++ (show realIters) ++ " instead of a billion"
putStrLn $ "p2: dancers after dance = " ++ (show endDancersP2)
|
wfleming/advent-of-code-2016
|
2017/D16/app/Main.hs
|
bsd-3-clause
| 627
| 0
| 12
| 125
| 202
| 101
| 101
| 15
| 1
|
-----------------------------------------------------------------------------
-- |
-- Module : Numeric.LinearAlgebra.Packed.ST
-- Copyright : Copyright (c) 2010, Patrick Perry <patperry@gmail.com>
-- License : BSD3
-- Maintainer : Patrick Perry <patperry@gmail.com>
-- Stability : experimental
--
-- Mutable packed matrices.
--
module Numeric.LinearAlgebra.Packed.ST (
-- * Mutable packed matrices
STPacked,
IOPacked,
create,
-- * Read-only packed matrices
RPacked(..),
-- * Conversions between mutable and immutable packed matrices
freeze,
-- * Creating new packed matrices
new_,
-- * Copying matrices
newCopy,
-- * Vector views of packed matrices
withVectorM,
-- * Packed matrix views of vectors
withFromVector,
withFromVectorM,
) where
import Numeric.LinearAlgebra.Packed.Base
|
patperry/hs-linear-algebra
|
lib/Numeric/LinearAlgebra/Packed/ST.hs
|
bsd-3-clause
| 882
| 0
| 5
| 198
| 69
| 53
| 16
| 12
| 0
|
{-# LANGUAGE BangPatterns, CPP, DeriveFunctor, ScopedTypeVariables #-}
-- | This module allows for incremental decoding and encoding of CSV
-- data. This is useful if you e.g. want to interleave I\/O with
-- parsing or if you want finer grained control over how you deal with
-- type conversion errors.
--
-- Decoding example:
--
-- > main :: IO ()
-- > main = withFile "salaries.csv" ReadMode $ \ csvFile -> do
-- > let loop !_ (Fail _ errMsg) = putStrLn errMsg >> exitFailure
-- > loop acc (Many rs k) = loop (acc + sumSalaries rs) =<< feed k
-- > loop acc (Done rs) = putStrLn $ "Total salaries: " ++
-- > show (sumSalaries rs + acc)
-- >
-- > feed k = do
-- > isEof <- hIsEOF csvFile
-- > if isEof
-- > then return $ k B.empty
-- > else k `fmap` B.hGetSome csvFile 4096
-- > loop 0 (decode NoHeader)
-- > where
-- > sumSalaries rs = sum [salary | Right (_ :: String, salary :: Int) <- rs]
--
-- Encoding example:
--
-- > data Person = Person { name :: !String, salary :: !Int }
-- > deriving Generic
-- >
-- > instance FromNamedRecord Person
-- > instance ToNamedRecord Person
-- > instance DefaultOrdered Person
-- >
-- > persons :: [Person]
-- > persons = [Person "John" 50000, Person "Jane" 60000]
-- >
-- > main :: IO ()
-- > main = putStrLn $ encodeDefaultOrderedByName (go persons)
-- > where
-- > go (x:xs) = encodeNamedRecord x <> go xs
--
module Data.Csv.Incremental
(
-- * Decoding
HeaderParser(..)
, decodeHeader
, decodeHeaderWith
-- $typeconversion
, Parser(..)
-- ** Index-based record conversion
-- $indexbased
, HasHeader(..)
, decode
, decodeWith
, decodeWithP
-- ** Name-based record conversion
-- $namebased
, decodeByName
, decodeByNameWith
, decodeByNameWithP
-- * Encoding
-- ** Index-based record conversion
-- $indexbased
, encode
, encodeWith
, encodeRecord
, Builder
-- ** Name-based record conversion
-- $namebased
, encodeByName
, encodeDefaultOrderedByName
, encodeByNameWith
, encodeDefaultOrderedByNameWith
, encodeNamedRecord
, NamedBuilder
) where
import Control.Applicative ((<|>))
import qualified Data.Attoparsec.ByteString as A
import Data.Attoparsec.ByteString.Char8 (endOfInput)
import qualified Data.ByteString as B
import qualified Data.ByteString.Builder as Builder
import qualified Data.ByteString.Lazy as L
import Data.Semigroup as Semi (Semigroup, (<>))
import qualified Data.Vector as V
import Data.Word (Word8)
import Data.Csv.Conversion hiding (Parser, header, namedRecord, record,
toNamedRecord)
import qualified Data.Csv.Conversion as Conversion
import qualified Data.Csv.Encoding as Encoding
import Data.Csv.Encoding (EncodeOptions(..), Quoting(..), recordSep)
import Data.Csv.Parser
import Data.Csv.Types
import Data.Csv.Util (endOfLine)
#if !MIN_VERSION_base(4,8,0)
import Data.Monoid (Monoid(mappend, mempty))
import Control.Applicative ((<*))
#endif
-- $feed-header
--
-- These functions are sometimes convenient when working with
-- 'HeaderParser', but don't let you do anything you couldn't already
-- do using the 'HeaderParser' constructors directly.
-- $indexbased
--
-- See documentation on index-based conversion in "Data.Csv" for more
-- information.
-- $namebased
--
-- See documentation on name-based conversion in "Data.Csv" for more
-- information.
-- $feed-records
--
-- These functions are sometimes convenient when working with
-- 'Parser', but don't let you do anything you couldn't already do
-- using the 'Parser' constructors directly.
------------------------------------------------------------------------
-- * Decoding headers
-- | An incremental parser that when fed data eventually returns a
-- parsed 'Header', or an error.
data HeaderParser a =
-- | The input data was malformed. The first field contains any
-- unconsumed input and second field contains information about
-- the parse error.
FailH !B.ByteString String
-- | The parser needs more input data before it can produce a
-- result. Use an 'B.empty' string to indicate that no more
-- input data is available. If fed an 'B.empty string', the
-- continuation is guaranteed to return either 'FailH' or
-- 'DoneH'.
| PartialH (B.ByteString -> HeaderParser a)
-- | The parse succeeded and produced the given 'Header'.
| DoneH !Header a
deriving Functor
instance Show a => Show (HeaderParser a) where
showsPrec d (FailH rest msg) = showParen (d > appPrec) showStr
where
showStr = showString "FailH " . showsPrec (appPrec+1) rest .
showString " " . showsPrec (appPrec+1) msg
showsPrec _ (PartialH _) = showString "PartialH <function>"
showsPrec d (DoneH hdr x) = showParen (d > appPrec) showStr
where
showStr = showString "DoneH " . showsPrec (appPrec+1) hdr .
showString " " . showsPrec (appPrec+1) x
-- Application has precedence one more than the most tightly-binding
-- operator
appPrec :: Int
appPrec = 10
-- | Parse a CSV header in an incremental fashion. When done, the
-- 'HeaderParser' returns any unconsumed input in the second field of
-- the 'DoneH' constructor.
decodeHeader :: HeaderParser B.ByteString
decodeHeader = decodeHeaderWith defaultDecodeOptions
-- | Like 'decodeHeader', but lets you customize how the CSV data is
-- parsed.
decodeHeaderWith :: DecodeOptions -> HeaderParser B.ByteString
decodeHeaderWith !opts = PartialH (go . parser)
where
parser = A.parse (header $ decDelimiter opts)
go (A.Fail rest _ msg) = FailH rest err
where err = "parse error (" ++ msg ++ ")"
-- TODO: Check empty and give attoparsec one last chance to return
-- something:
go (A.Partial k) = PartialH $ \ s -> go (k s)
go (A.Done rest r) = DoneH r rest
------------------------------------------------------------------------
-- * Decoding records
-- $typeconversion
--
-- Just like in the case of non-incremental decoding, there are two
-- ways to convert CSV records to and from and user-defined data
-- types: index-based conversion and name-based conversion.
-- | An incremental parser that when fed data eventually produces some
-- parsed records, converted to the desired type, or an error in case
-- of malformed input data.
data Parser a =
-- | The input data was malformed. The first field contains any
-- unconsumed input and second field contains information about
-- the parse error.
Fail !B.ByteString String
-- | The parser parsed and converted zero or more records. Any
-- records that failed type conversion are returned as @'Left'
-- errMsg@ and the rest as @'Right' val@. Feed a 'B.ByteString'
-- to the continuation to continue parsing. Use an 'B.empty'
-- string to indicate that no more input data is available. If
-- fed an 'B.empty' string, the continuation is guaranteed to
-- return either 'Fail' or 'Done'.
| Many [Either String a] (B.ByteString -> Parser a)
-- | The parser parsed and converted some records. Any records
-- that failed type conversion are returned as @'Left' errMsg@
-- and the rest as @'Right' val@.
| Done [Either String a]
deriving Functor
instance Show a => Show (Parser a) where
showsPrec d (Fail rest msg) = showParen (d > appPrec) showStr
where
showStr = showString "Fail " . showsPrec (appPrec+1) rest .
showString " " . showsPrec (appPrec+1) msg
showsPrec d (Many rs _) = showParen (d > appPrec) showStr
where
showStr = showString "Many " . showsPrec (appPrec+1) rs .
showString " <function>"
showsPrec d (Done rs) = showParen (d > appPrec) showStr
where
showStr = showString "Done " . showsPrec (appPrec+1) rs
-- | Have we read all available input?
data More = Incomplete | Complete
deriving (Eq, Show)
-- | Efficiently deserialize CSV in an incremental fashion. Equivalent
-- to @'decodeWith' 'defaultDecodeOptions'@.
decode :: FromRecord a
=> HasHeader -- ^ Data contains header that should be
-- skipped
-> Parser a
decode = decodeWith defaultDecodeOptions
-- | Like 'decode', but lets you customize how the CSV data is parsed.
decodeWith :: FromRecord a
=> DecodeOptions -- ^ Decoding options
-> HasHeader -- ^ Data contains header that should be
-- skipped
-> Parser a
decodeWith !opts hasHeader = decodeWithP parseRecord opts hasHeader
-- | Like 'decodeWith', but lets you pass an explicit parser value instead of
-- using a typeclass
--
-- @since 0.5.2.0
decodeWithP :: (Record -> Conversion.Parser a)
-> DecodeOptions -- ^ Decoding options
-> HasHeader -- ^ Data contains header that should be
-- skipped
-> Parser a
decodeWithP p !opts hasHeader = case hasHeader of
HasHeader -> go (decodeHeaderWith opts)
NoHeader -> Many [] $ \ s -> decodeWithP' p opts s
where go (FailH rest msg) = Fail rest msg
go (PartialH k) = Many [] $ \ s' -> go (k s')
go (DoneH _ rest) = decodeWithP' p opts rest
------------------------------------------------------------------------
-- | Efficiently deserialize CSV in an incremental fashion. The data
-- is assumed to be preceded by a header. Returns a 'HeaderParser'
-- that when done produces a 'Parser' for parsing the actual records.
-- Equivalent to @'decodeByNameWith' 'defaultDecodeOptions'@.
decodeByName :: FromNamedRecord a
=> HeaderParser (Parser a)
decodeByName = decodeByNameWith defaultDecodeOptions
-- | Like 'decodeByName', but lets you customize how the CSV data is
-- parsed.
decodeByNameWith :: FromNamedRecord a
=> DecodeOptions -- ^ Decoding options
-> HeaderParser (Parser a)
decodeByNameWith !opts = decodeByNameWithP parseNamedRecord opts
-- | Like 'decodeByNameWith', but lets you pass an explicit parser value instead
-- of using a typeclass
--
-- @since 0.5.2.0
decodeByNameWithP :: (NamedRecord -> Conversion.Parser a)
-> DecodeOptions -- ^ Decoding options
-> HeaderParser (Parser a)
decodeByNameWithP p !opts = go (decodeHeaderWith opts)
where
go (FailH rest msg) = FailH rest msg
go (PartialH k) = PartialH $ \ s -> go (k s)
go (DoneH hdr rest) =
DoneH hdr (decodeWithP' (p . toNamedRecord hdr) opts rest)
------------------------------------------------------------------------
-- TODO: 'decodeWithP' should probably not take an initial
-- 'B.ByteString' input.
-- | Like 'decode', but lets you customize how the CSV data is parsed.
decodeWithP' :: (Record -> Conversion.Parser a) -> DecodeOptions -> B.ByteString
-> Parser a
decodeWithP' p !opts = go Incomplete [] . parser
where
go !_ !acc (A.Fail rest _ msg)
| null acc = Fail rest err
| otherwise = Many (reverse acc) (\ s -> Fail (rest `B.append` s) err)
where err = "parse error (" ++ msg ++ ")"
go Incomplete acc (A.Partial k) = Many (reverse acc) cont
where cont s = go m [] (k s)
where m | B.null s = Complete
| otherwise = Incomplete
go Complete _ (A.Partial _) = moduleError "decodeWithP'" msg
where msg = "attoparsec should never return Partial in this case"
go m acc (A.Done rest r)
| B.null rest = case m of
Complete -> Done (reverse acc')
Incomplete -> Many (reverse acc') (cont [])
| otherwise = go m acc' (parser rest)
where cont acc'' s
| B.null s = Done (reverse acc'')
| otherwise = go Incomplete acc'' (parser s)
acc' | blankLine r = acc
| otherwise = let !r' = convert r in r' : acc
parser = A.parse (record (decDelimiter opts) <* (endOfLine <|> endOfInput))
convert = runParser . p
{-# INLINE decodeWithP' #-}
blankLine :: V.Vector B.ByteString -> Bool
blankLine v = V.length v == 1 && (B.null (V.head v))
------------------------------------------------------------------------
-- * Encoding
-- | Efficiently serialize records in an incremental
-- fashion. Equivalent to @'encodeWith' 'defaultEncodeOptions'@.
encode :: ToRecord a => Builder a -> L.ByteString
encode = encodeWith Encoding.defaultEncodeOptions
-- | Like 'encode', but lets you customize how the CSV data is
-- encoded.
encodeWith :: ToRecord a => EncodeOptions -> Builder a
-> L.ByteString
encodeWith opts b =
Builder.toLazyByteString $
runBuilder b (encQuoting opts) (encDelimiter opts) (encUseCrLf opts)
-- | Encode a single record.
encodeRecord :: ToRecord a => a -> Builder a
encodeRecord r = Builder $ \ qtng delim useCrLf ->
Encoding.encodeRecord qtng delim (toRecord r) <> recordSep useCrLf
-- | A builder for building the CSV data incrementally. Just like the
-- @ByteString@ builder, this builder should be used in a
-- right-associative, 'foldr' style. Using '<>' to compose builders in
-- a left-associative, `foldl'` style makes the building not be
-- incremental.
newtype Builder a = Builder {
runBuilder :: Quoting -> Word8 -> Bool -> Builder.Builder
}
-- | @since 0.5.0.0
instance Semi.Semigroup (Builder a) where
Builder f <> Builder g =
Builder $ \ qtng delim useCrlf ->
f qtng delim useCrlf <> g qtng delim useCrlf
instance Monoid (Builder a) where
mempty = Builder (\ _ _ _ -> mempty)
mappend = (Semi.<>)
------------------------------------------------------------------------
-- ** Index-based record conversion
-- | Efficiently serialize named records in an incremental fashion,
-- including the leading header. Equivalent to @'encodeWith'
-- 'defaultEncodeOptions'@. The header is written before any records
-- and dictates the field order.
encodeByName :: ToNamedRecord a => Header -> NamedBuilder a -> L.ByteString
encodeByName = encodeByNameWith Encoding.defaultEncodeOptions
-- | Like 'encodeByName', but header and field order is dictated by
-- the 'Conversion.headerOrder' method.
encodeDefaultOrderedByName :: (DefaultOrdered a, ToNamedRecord a) =>
NamedBuilder a -> L.ByteString
encodeDefaultOrderedByName =
encodeDefaultOrderedByNameWith Encoding.defaultEncodeOptions
-- | Like 'encodeByName', but lets you customize how the CSV data is
-- encoded.
encodeByNameWith :: ToNamedRecord a => EncodeOptions -> Header -> NamedBuilder a
-> L.ByteString
encodeByNameWith opts hdr b =
Builder.toLazyByteString $
encHdr <>
runNamedBuilder b hdr (encQuoting opts) (encDelimiter opts)
(encUseCrLf opts)
where
encHdr
| encIncludeHeader opts =
Encoding.encodeRecord (encQuoting opts) (encDelimiter opts) hdr
<> recordSep (encUseCrLf opts)
| otherwise = mempty
-- | Like 'encodeDefaultOrderedByName', but lets you customize how the
-- CSV data is encoded.
encodeDefaultOrderedByNameWith ::
forall a. (DefaultOrdered a, ToNamedRecord a) =>
EncodeOptions -> NamedBuilder a -> L.ByteString
encodeDefaultOrderedByNameWith opts b =
Builder.toLazyByteString $
encHdr <>
runNamedBuilder b hdr (encQuoting opts)
(encDelimiter opts) (encUseCrLf opts)
where
hdr = Conversion.headerOrder (undefined :: a)
encHdr
| encIncludeHeader opts =
Encoding.encodeRecord (encQuoting opts) (encDelimiter opts) hdr
<> recordSep (encUseCrLf opts)
| otherwise = mempty
-- | Encode a single named record.
encodeNamedRecord :: ToNamedRecord a => a -> NamedBuilder a
encodeNamedRecord nr = NamedBuilder $ \ hdr qtng delim useCrLf ->
Encoding.encodeNamedRecord hdr qtng delim
(Conversion.toNamedRecord nr) <> recordSep useCrLf
-- | A builder for building the CSV data incrementally. Just like the
-- @ByteString@ builder, this builder should be used in a
-- right-associative, 'foldr' style. Using '<>' to compose builders in
-- a left-associative, `foldl'` style makes the building not be
-- incremental.
newtype NamedBuilder a = NamedBuilder {
runNamedBuilder :: Header -> Quoting -> Word8 -> Bool -> Builder.Builder
}
-- | @since 0.5.0.0
instance Semigroup (NamedBuilder a) where
NamedBuilder f <> NamedBuilder g =
NamedBuilder $ \ hdr qtng delim useCrlf ->
f hdr qtng delim useCrlf <> g hdr qtng delim useCrlf
instance Monoid (NamedBuilder a) where
mempty = NamedBuilder (\ _ _ _ _ -> mempty)
mappend = (Semi.<>)
------------------------------------------------------------------------
moduleError :: String -> String -> a
moduleError func msg = error $ "Data.Csv.Incremental." ++ func ++ ": " ++ msg
{-# NOINLINE moduleError #-}
|
hvr/cassava
|
src/Data/Csv/Incremental.hs
|
bsd-3-clause
| 17,033
| 0
| 15
| 3,994
| 3,144
| 1,699
| 1,445
| 218
| 5
|
module Utilities.ParseByteString (parseByteString) where
import Control.Applicative ((<$>))
import Data.Word
import Data.Binary.Get
import qualified Data.ByteString.Lazy as BS
data Action m = A1 (m Word8) | A2 (m Word16) | A4 (m Word32)
action 1 = A1 getWord8
action 2 = A2 getWord16be
action 4 = A4 getWord32be
action _ = A1 getWord8
getActionList xs = map action xs
newtype Id a = Id a
getAction :: Action Get -> Get (Action Id)
getAction (A1 act) = A1 . Id <$> act
getAction (A2 act) = A2 . Id <$> act
getAction (A4 act) = A4 . Id <$> act
getActions :: [Action Get] -> Get [Action Id]
getActions = mapM getAction
toVal (A1 (Id v)) = fromIntegral v :: Int
toVal (A2 (Id v)) = fromIntegral v :: Int
toVal (A4 (Id v)) = fromIntegral v :: Int
parseByteString :: (Num a) => [a] -> BS.ByteString -> [Int]
parseByteString list byteString = cleanup solution
where
cleanup list1 = map snd . filter ((/= 0) . fst) $ zip list list1
solution = map toVal $ runGet actionList byteString
actionList = getActions (getActionList list)
|
ckkashyap/Chitra
|
Utilities/ParseByteString.hs
|
bsd-3-clause
| 1,045
| 0
| 12
| 206
| 458
| 238
| 220
| 26
| 1
|
import Data.Bits
import Crypto.PubKey.ECC.Prim
import Crypto.Types.PubKey.ECC
import System.Random
import System.IO.Unsafe
add = pointAdd secp256r1
mul = pointMul secp256r1
secp256r1 = getCurveByName SEC_p256r1
rev (Point x y) = Point x (- y)
point = Point 10 85
bl = Point 32 95
bl234 = mul 234 bl
bl234rev = rev bl234
point234 = mul 234 point
pointBl = add point bl
pointBl234 = mul 234 pointBl
point234' = add pointBl234 bl234rev
g = ecc_g . common_curve $ secp256r1
n = ecc_n . common_curve $ secp256r1
qlen 0 = 0
qlen n = 1 + qlen (n `shiftR` 1)
getR = randomRIO (1, n - 1)
p = unsafePerformIO getR `mul` g
x = unsafePerformIO getR `mul` g
m = unsafePerformIO getR
mp = m `mul` p
mx = m `mul` x
px = p `add` x
mpx = m `mul` px
mp' = mpx `add` rev mx
constantLen k
| qlen k < qlen n = k * 2 ^ (qlen n - qlen k) + 1
| otherwise = k
mulG k = (constantLen k * n + k) `mul` g
adjustLen k n = k * 2 ^ (qlen n - qlen k) + 1
cPointMul :: Curve -> Integer -> Point -> Point
cPointMul c@(CurveFP (CurvePrime _ cc)) k p = pointMul c (adjustLen k n * n + k) p
where
n = ecc_n cc
countBit :: Integer -> Int
countBit 0 = 0
countBit n = (if testBit n 0 then 1 else 0) + countBit (n `shiftR` 1)
|
YoshikuniJujo/forest
|
subprojects/tls-analysis/ecc/testEcc.hs
|
bsd-3-clause
| 1,216
| 0
| 10
| 285
| 586
| 310
| 276
| 41
| 2
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-}
-- |
-- Module : Data.Array.Nikola.Backend.CUDA.TH.Util
-- Copyright : (c) Geoffrey Mainland 2012
-- License : BSD-style
--
-- Maintainer : Geoffrey Mainland <mainland@apeiron.net>
-- Stability : experimental
-- Portability : non-portable
module Data.Array.Nikola.Backend.CUDA.TH.Util where
import Control.Applicative (Applicative, (<$>), (<*>))
import Control.Monad.State
import Foreign.Storable (Storable)
import Data.Int
import Data.Word
import qualified Foreign.CUDA.Driver as CU
import qualified Foreign.CUDA.ForeignPtr as CU
import Language.Haskell.TH (Q, DecQ, PatQ, ExpQ, StmtQ)
import qualified Language.Haskell.TH as TH
import Data.Array.Nikola.Language.Syntax
import qualified Data.Array.Repa as R
-- Odd that this instance isn't defined anywhere...
instance MonadIO Q where
liftIO = TH.runIO
mkName :: Var -> TH.Name
mkName = TH.mkName . unVar
varP :: Var -> PatQ
varP v = TH.varP (mkName v)
lamsE :: [Var] -> ExpQ -> ExpQ
lamsE vs qe = TH.lamE (map varP vs) qe
varE :: Var -> ExpQ
varE v = TH.varE (mkName v)
valD :: Var -> ExpQ -> DecQ
valD v qe = TH.valD (varP v) (TH.normalB qe) []
letS :: Var -> ExpQ -> StmtQ
letS v qe = TH.letS [valD v qe]
bindS :: Var -> ExpQ -> StmtQ
bindS v qe = TH.bindS (varP v) qe
tupM :: [ExpQ] -> ExpQ
tupM [] = [|()|]
tupM [me] = [|$me|]
tupM (me:mes) = app [|$tupCon <$> $me|] mes
where
tupCon :: ExpQ
tupCon = TH.conE (TH.tupleDataName (1+(length mes)))
app :: ExpQ -> [ExpQ] -> ExpQ
app f [] = f
app f (me:mes) = app [|$f <*> $me|] mes
firstTup :: Int -> ExpQ -> ExpQ
firstTup n qe = do
let x = TH.mkName "x"
let qp = TH.tupP (TH.varP x : replicate (n - 1) TH.wildP)
TH.caseE qe [TH.match qp (TH.normalB (TH.varE x)) []]
-- | @ptrs@ is a singleton or (potentially nested) tuple of 'ForeignDevicePtr's,
-- and @sh@ is a shape.
data NikolaArray ptrs sh = NArray !ptrs !sh
class ToFunParams a where
toFunParams :: a -> ([CU.FunParam] -> IO b) -> IO b
#define baseTypeToFunParams(ty) \
instance ToFunParams ty where { \
; {-# INLINE toFunParams #-} \
; toFunParams x kont = kont [CU.VArg x] \
}
baseTypeToFunParams(Int8)
baseTypeToFunParams(Int16)
baseTypeToFunParams(Int32)
baseTypeToFunParams(Int64)
baseTypeToFunParams(Word8)
baseTypeToFunParams(Word16)
baseTypeToFunParams(Word32)
baseTypeToFunParams(Word64)
baseTypeToFunParams(Float)
baseTypeToFunParams(Double)
instance ToFunParams Int where
{-# INLINE toFunParams #-}
toFunParams i = toFunParams (fromIntegral i :: Int32)
instance ToFunParams Bool where
{-# INLINE toFunParams #-}
toFunParams False = toFunParams (0 :: Word8)
toFunParams True = toFunParams (1 :: Word8)
instance (Storable a) => ToFunParams (CU.ForeignDevicePtr a) where
{-# INLINE toFunParams #-}
toFunParams fdptr kont =
CU.withForeignDevPtr fdptr $ \dptr ->
kont [CU.VArg dptr]
instance ToFunParams R.Z where
{-# INLINE toFunParams #-}
toFunParams R.Z kont = kont []
instance ToFunParams sh => ToFunParams (sh R.:. Int) where
{-# INLINE toFunParams #-}
toFunParams (sh R.:. i) kont =
toFunParams sh $ \fparams_sh -> do
toFunParams i $ \fparams_i -> do
kont (fparams_i ++ fparams_sh)
instance (ToFunParams ptrs, ToFunParams sh) => ToFunParams (NikolaArray ptrs sh) where
{-# INLINE toFunParams #-}
toFunParams (NArray ptrs sh) kont =
toFunParams ptrs $ \fparams_ptrs -> do
toFunParams sh $ \fparams_sh -> do
kont (fparams_ptrs ++ fparams_sh)
instance (ToFunParams a, ToFunParams b) => ToFunParams (a, b) where
{-# INLINE toFunParams #-}
toFunParams (a, b) kont =
toFunParams a $ \fparams_a -> do
toFunParams b $ \fparams_b -> do
kont (fparams_a ++ fparams_b)
|
mainland/nikola
|
src/Data/Array/Nikola/Backend/CUDA/TH/Util.hs
|
bsd-3-clause
| 3,929
| 0
| 16
| 832
| 1,230
| 658
| 572
| -1
| -1
|
{-# OPTIONS -fno-implicit-prelude #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Bool
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : portable
--
-- The 'Bool' type and related functions.
--
-----------------------------------------------------------------------------
module Data.Bool (
-- * Booleans
Bool(..),
-- ** Operations
(&&), -- :: Bool -> Bool -> Bool
(||), -- :: Bool -> Bool -> Bool
not, -- :: Bool -> Bool
otherwise, -- :: Bool
) where
|
OS2World/DEV-UTIL-HUGS
|
libraries/Data/Bool.hs
|
bsd-3-clause
| 706
| 0
| 5
| 140
| 52
| 43
| 9
| 7
| 0
|
-- |
-- Module : Simulation.Aivika.Lattice
-- Copyright : Copyright (c) 2016-2017, David Sorokin <david.sorokin@gmail.com>
-- License : BSD3
-- Maintainer : David Sorokin <david.sorokin@gmail.com>
-- Stability : experimental
-- Tested with: GHC 7.10.3
--
-- This module re-exports the library functionality related to nested computations
-- that can be run within lattice nodes.
--
module Simulation.Aivika.Lattice
(-- * Modules
module Simulation.Aivika.Lattice.LIO,
module Simulation.Aivika.Lattice.Estimate,
module Simulation.Aivika.Lattice.Event,
module Simulation.Aivika.Lattice.Generator,
module Simulation.Aivika.Lattice.QueueStrategy,
module Simulation.Aivika.Lattice.Ref.Base) where
import Simulation.Aivika.Lattice.LIO
import Simulation.Aivika.Lattice.Estimate
import Simulation.Aivika.Lattice.Event
import Simulation.Aivika.Lattice.Generator
import Simulation.Aivika.Lattice.QueueStrategy
import Simulation.Aivika.Lattice.Ref.Base
|
dsorokin/aivika-lattice
|
Simulation/Aivika/Lattice.hs
|
bsd-3-clause
| 1,007
| 0
| 5
| 150
| 113
| 86
| 27
| 14
| 0
|
{-|
Module : Data.Number.MPFR.Special
Description : Special functions
Copyright : (c) Aleš Bizjak
License : BSD3
Maintainer : ales.bizjak0@gmail.com
Stability : experimental
Portability : non-portable
For documentation on particular functions see
<http://www.mpfr.org/mpfr-current/mpfr.html#Special-Functions>.
-}
{-# INCLUDE <mpfr.h> #-}
{-# INCLUDE <chsmpfr.h> #-}
module Data.Number.MPFR.Special where
import Data.Number.MPFR.Internal
log :: RoundMode -> Precision -> MPFR -> MPFR
log r p = fst . log_ r p
log2 :: RoundMode -> Precision -> MPFR -> MPFR
log2 r p = fst . log2_ r p
log10 :: RoundMode -> Precision -> MPFR -> MPFR
log10 r p = fst . log10_ r p
exp :: RoundMode -> Precision -> MPFR -> MPFR
exp r p = fst . exp_ r p
exp2 :: RoundMode -> Precision -> MPFR -> MPFR
exp2 r p = fst . exp2_ r p
exp10 :: RoundMode -> Precision -> MPFR -> MPFR
exp10 r p = fst . exp10_ r p
sin :: RoundMode -> Precision -> MPFR -> MPFR
sin r p = fst . sin_ r p
cos :: RoundMode -> Precision -> MPFR -> MPFR
cos r p = fst . cos_ r p
tan :: RoundMode -> Precision -> MPFR -> MPFR
tan r p = fst . tan_ r p
sec :: RoundMode -> Precision -> MPFR -> MPFR
sec r p = fst . sec_ r p
csc :: RoundMode -> Precision -> MPFR -> MPFR
csc r p = fst . csc_ r p
cot :: RoundMode -> Precision -> MPFR -> MPFR
cot r p = fst . cot_ r p
sincos :: RoundMode
-> Precision -- ^ precision to compute sin
-> Precision -- ^ precision to compute cos
-> MPFR
-> (MPFR, MPFR) -- ^ return (sin x, cos x)
sincos r p p' d = case sincos_ r p p' d of
(a, b, _) -> (a, b)
asin :: RoundMode -> Precision -> MPFR -> MPFR
asin r p = fst . asin_ r p
acos :: RoundMode -> Precision -> MPFR -> MPFR
acos r p = fst . acos_ r p
atan :: RoundMode -> Precision -> MPFR -> MPFR
atan r p = fst . atan_ r p
atan2 :: RoundMode -> Precision -> MPFR -> MPFR -> MPFR
atan2 r p d = fst . atan2_ r p d
sinh :: RoundMode -> Precision -> MPFR -> MPFR
sinh r p = fst . sinh_ r p
cosh :: RoundMode -> Precision -> MPFR -> MPFR
cosh r p = fst . cosh_ r p
tanh :: RoundMode -> Precision -> MPFR -> MPFR
tanh r p = fst . tanh_ r p
sinhcosh :: RoundMode
-> Precision -- ^ precision to compute sin
-> Precision -- ^ precision to compute cos
-> MPFR
-> (MPFR, MPFR) -- ^ return (sin x, cos x)
sinhcosh r p p' d = case sinhcosh_ r p p' d of
(a, b, _) -> (a, b)
sech :: RoundMode -> Precision -> MPFR -> MPFR
sech r p = fst . sech_ r p
csch :: RoundMode -> Precision -> MPFR -> MPFR
csch r p = fst . csch_ r p
coth :: RoundMode -> Precision -> MPFR -> MPFR
coth r p = fst . coth_ r p
acosh :: RoundMode -> Precision -> MPFR -> MPFR
acosh r p = fst . acosh_ r p
asinh :: RoundMode -> Precision -> MPFR -> MPFR
asinh r p = fst . asinh_ r p
atanh :: RoundMode -> Precision -> MPFR -> MPFR
atanh r p = fst . atanh_ r p
facw :: RoundMode -> Precision -> Word -> MPFR
facw r p = fst . facw_ r p
log1p :: RoundMode -> Precision -> MPFR -> MPFR
log1p r p = fst . log1p_ r p
expm1 :: RoundMode -> Precision -> MPFR -> MPFR
expm1 r p = fst . expm1_ r p
eint :: RoundMode -> Precision -> MPFR -> MPFR
eint r p = fst . eint_ r p
li2 :: RoundMode -> Precision -> MPFR -> MPFR
li2 r p = fst . li2_ r p
gamma :: RoundMode -> Precision -> MPFR -> MPFR
gamma r p = fst . gamma_ r p
lngamma :: RoundMode -> Precision -> MPFR -> MPFR
lngamma r p = fst . lngamma_ r p
lgamma :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
lgamma r p d = case lgamma_ r p d of
(a, b, _) -> (a,b)
zeta :: RoundMode -> Precision -> MPFR -> MPFR
zeta r p = fst . zeta_ r p
zetaw :: RoundMode -> Precision -> Word -> MPFR
zetaw r p = fst . zetaw_ r p
erf :: RoundMode -> Precision -> MPFR -> MPFR
erf r p = fst . erf_ r p
erfc :: RoundMode -> Precision -> MPFR -> MPFR
erfc r p = fst . erfc_ r p
j0 :: RoundMode -> Precision -> MPFR -> MPFR
j0 r p = fst . j0_ r p
j1 :: RoundMode -> Precision -> MPFR -> MPFR
j1 r p = fst . j1_ r p
jn :: RoundMode -> Precision -> Int -> MPFR -> MPFR
jn r p w = fst . jn_ r p w
y0 :: RoundMode -> Precision -> MPFR -> MPFR
y0 r p = fst . y0_ r p
y1 :: RoundMode -> Precision -> MPFR -> MPFR
y1 r p = fst . y1_ r p
yn :: RoundMode -> Precision -> Int -> MPFR -> MPFR
yn r p w = fst . yn_ r p w
fma :: RoundMode -> Precision -> MPFR -> MPFR -> MPFR -> MPFR
fma r p d1 d2 = fst . fma_ r p d1 d2
fms :: RoundMode -> Precision -> MPFR -> MPFR -> MPFR -> MPFR
fms r p d1 d2 = fst . fms_ r p d1 d2
agm :: RoundMode -> Precision -> MPFR -> MPFR -> MPFR
agm r p d1 = fst . agm_ r p d1
hypot :: RoundMode -> Precision -> MPFR -> MPFR -> MPFR
hypot r p d1 = fst . hypot_ r p d1
pi :: RoundMode -> Precision -> MPFR
pi r = fst . pi_ r
log2c :: RoundMode -> Precision -> MPFR
log2c r = fst . log2c_ r
euler :: RoundMode -> Precision -> MPFR
euler r = fst . euler_ r
catalan :: RoundMode -> Precision -> MPFR
catalan r = fst . catalan_ r
log_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
log_ r p d = withMPFR r p d mpfr_log
log2_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
log2_ r p d = withMPFR r p d mpfr_log2
log10_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
log10_ r p d = withMPFR r p d mpfr_log10
exp_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
exp_ r p d = withMPFR r p d mpfr_exp
exp2_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
exp2_ r p d = withMPFR r p d mpfr_exp2
exp10_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
exp10_ r p d = withMPFR r p d mpfr_exp10
sin_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
sin_ r p d = withMPFR r p d mpfr_sin
cos_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
cos_ r p d = withMPFR r p d mpfr_cos
tan_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
tan_ r p d = withMPFR r p d mpfr_tan
sec_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
sec_ r p d = withMPFR r p d mpfr_sec
csc_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
csc_ r p d = withMPFR r p d mpfr_csc
cot_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
cot_ r p d = withMPFR r p d mpfr_cot
sincos_ :: RoundMode
-> Precision -- ^ precision to compute sin
-> Precision -- ^ precision to compute cos
-> MPFR
-> (MPFR, MPFR, Int)
sincos_ r p p' d = unsafePerformIO go
where go = do ls <- mpfr_custom_get_size (fromIntegral p)
fp <- mallocForeignPtrBytes (fromIntegral ls)
ls' <- mpfr_custom_get_size (fromIntegral p')
fp' <- mallocForeignPtrBytes (fromIntegral ls')
alloca $ \p1 -> do
pokeDummy p1 fp (fromIntegral ls)
alloca $ \p2 -> do
pokeDummy p2 fp' (fromIntegral ls')
with d $ \p3 -> do
r3 <- mpfr_sin_cos p1 p2 p3 ((fromIntegral . fromEnum) r)
r1 <- peekP p1 fp
r2 <- peekP p2 fp'
return (r1, r2, fromIntegral r3)
asin_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
asin_ r p d = withMPFR r p d mpfr_asin
acos_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
acos_ r p d = withMPFR r p d mpfr_acos
atan_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
atan_ r p d = withMPFR r p d mpfr_atan
atan2_ :: RoundMode -> Precision -> MPFR -> MPFR -> (MPFR, Int)
atan2_ r p d d' = withMPFRsBA r p d d' mpfr_atan2
sinh_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
sinh_ r p d = withMPFR r p d mpfr_sinh
cosh_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
cosh_ r p d = withMPFR r p d mpfr_cosh
tanh_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
tanh_ r p d = withMPFR r p d mpfr_tanh
sinhcosh_ :: RoundMode
-> Precision -- ^ precision to compute sinh
-> Precision -- ^ precision to compute cosh
-> MPFR
-> (MPFR, MPFR, Int)
sinhcosh_ r p p' d = unsafePerformIO go
where go = do ls <- mpfr_custom_get_size (fromIntegral p)
fp <- mallocForeignPtrBytes (fromIntegral ls)
ls' <- mpfr_custom_get_size (fromIntegral p')
fp' <- mallocForeignPtrBytes (fromIntegral ls')
alloca $ \p1 -> do
pokeDummy p1 fp (fromIntegral ls)
alloca $ \p2 -> do
pokeDummy p2 fp' (fromIntegral ls')
with d $ \p3 -> do
r3 <- mpfr_sinh_cosh p1 p2 p3 ((fromIntegral . fromEnum) r)
r1 <- peekP p1 fp
r2 <- peekP p2 fp'
return (r1, r2, fromIntegral r3)
sech_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
sech_ r p d = withMPFR r p d mpfr_sech
csch_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
csch_ r p d = withMPFR r p d mpfr_csch
coth_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
coth_ r p d = withMPFR r p d mpfr_coth
acosh_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
acosh_ r p d = withMPFR r p d mpfr_acosh
asinh_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
asinh_ r p d = withMPFR r p d mpfr_asinh
atanh_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
atanh_ r p d = withMPFR r p d mpfr_atanh
facw_ :: RoundMode -> Precision -> Word -> (MPFR, Int)
facw_ r p w = withMPFRUI r p w mpfr_fac_ui
log1p_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
log1p_ r p d = withMPFR r p d mpfr_log1p
expm1_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
expm1_ r p d = withMPFR r p d mpfr_expm1
eint_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
eint_ r p d = withMPFR r p d mpfr_eint
li2_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
li2_ r p d = withMPFR r p d mpfr_li2
gamma_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
gamma_ r p d = withMPFR r p d mpfr_gamma
lngamma_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
lngamma_ r p d = withMPFR r p d mpfr_lngamma
lgamma_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int, Int)
lgamma_ r p d = unsafePerformIO go
where go = do ls <- mpfr_custom_get_size (fromIntegral p)
fp <- mallocForeignPtrBytes (fromIntegral ls)
alloca $ \p1 -> do
pokeDummy p1 fp (fromIntegral ls)
with d $ \p2 ->
alloca $ \p3 -> do
r3 <- mpfr_lgamma p1 p3 p2 ((fromIntegral . fromEnum) r)
r2 <- peek p3
r1 <- peekP p1 fp
return (r1, fromIntegral r2, fromIntegral r3)
zeta_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
zeta_ r p d = withMPFR r p d mpfr_zeta
zetaw_ :: RoundMode -> Precision -> Word -> (MPFR, Int)
zetaw_ r p d = withMPFRUI r p d mpfr_zeta_ui
erf_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
erf_ r p d = withMPFR r p d mpfr_erf
erfc_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
erfc_ r p d = withMPFR r p d mpfr_erfc
j0_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
j0_ r p d = withMPFR r p d mpfr_j0
j1_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
j1_ r p d = withMPFR r p d mpfr_j1
jn_ :: RoundMode -> Precision -> Int -> MPFR -> (MPFR, Int)
jn_ r p i d = withMPFRBAis r p (fromIntegral i) d mpfr_jn
y0_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
y0_ r p d = withMPFR r p d mpfr_y0
y1_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int)
y1_ r p d = withMPFR r p d mpfr_y1
yn_ :: RoundMode -> Precision -> Int -> MPFR -> (MPFR, Int)
yn_ r p i d = withMPFRBAis r p (fromIntegral i) d mpfr_yn
fma_ :: RoundMode -> Precision -> MPFR -> MPFR -> MPFR -> (MPFR, Int)
fma_ r p mp1 mp2 mp3 = unsafePerformIO go
where go = withDummy p $ \p1 ->
with mp1 $ \p2 ->
with mp2 $ \p3 ->
with mp3 $ \p4 ->
mpfr_fma p1 p2 p3 p4 ((fromIntegral . fromEnum) r)
fms_ :: RoundMode -> Precision -> MPFR -> MPFR -> MPFR -> (MPFR, Int)
fms_ r p mp1 mp2 mp3 = unsafePerformIO go
where go = withDummy p $ \p1 ->
with mp1 $ \p2 ->
with mp2 $ \p3 ->
with mp3 $ \p4 ->
mpfr_fms p1 p2 p3 p4 ((fromIntegral . fromEnum) r)
agm_ :: RoundMode -> Precision -> MPFR -> MPFR -> (MPFR,Int)
agm_ r p d1 d2 = withMPFRsBA r p d1 d2 mpfr_agm
hypot_ :: RoundMode -> Precision -> MPFR -> MPFR -> (MPFR,Int)
hypot_ r p d1 d2 = withMPFRsBA r p d1 d2 mpfr_hypot
pi_ :: RoundMode -> Precision -> (MPFR, Int)
pi_ r p = withMPFRC r p mpfr_const_pi
log2c_ :: RoundMode -> Precision -> (MPFR, Int)
log2c_ r p = withMPFRC r p mpfr_const_log2
euler_ :: RoundMode -> Precision -> (MPFR, Int)
euler_ r p = withMPFRC r p mpfr_const_euler
catalan_ :: RoundMode -> Precision -> (MPFR, Int)
catalan_ r p = withMPFRC r p mpfr_const_catalan
freeCache :: IO ()
freeCache = mpfr_free_cache
|
ekmett/hmpfr
|
src/Data/Number/MPFR/Special.hs
|
bsd-3-clause
| 13,527
| 0
| 26
| 4,425
| 5,214
| 2,661
| 2,553
| 281
| 1
|
module Main where
import Control.Concurrent (forkIO, threadDelay)
import Control.Concurrent.STM.TChan (TChan, dupTChan, newTChan,
readTChan, writeTChan)
import Control.Exception (bracket)
import Control.Monad (forever, unless, when)
import Control.Monad.STM (atomically)
import qualified Data.ByteString.Lazy as ByteString
import Data.Digest.Pure.MD5 (md5)
import Data.Maybe
import Data.Monoid
import Data.Text (isInfixOf, pack)
import Options.Applicative
import Paths_yesod_fast_devel
import System.Console.ANSI
import System.Directory (copyFile, doesDirectoryExist,
findExecutable)
import System.Exit
import System.FilePath (takeDirectory)
import System.FilePath.Glob
import System.FilePath.Posix (takeBaseName)
import System.FSNotify (Event (..), watchTree,
withManager)
import System.IO (BufferMode (..), Handle,
hPutStrLn, hSetBuffering, stderr,
stdout)
import System.Process
data Options
= PatchDevelMain { pdmFilePath :: FilePath}
| PrintPatchedMain
| StartServer { ssFilePath :: FilePath}
options :: ParserInfo Options
options =
info
(allCommands <**> helper)
(header "Faster yesod-devel with GHCi and Browser Sync")
where
allCommands =
subparser
(patchDevelMain <> printPatchedMain <> startServer <>
metavar "patch | server| print-patched-main")
printPatchedMain =
command
"print-patched-main"
(info (pure PrintPatchedMain) (progDesc "Print the patched DevelMain"))
startServer =
command
"server"
(info
(StartServer . fromMaybe "app/DevelMain.hs" <$>
optional (argument str (metavar "devel-main-path")) <**>
helper)
(progDesc "Start the development servers"))
patchDevelMain =
command
"patch"
(info
(PatchDevelMain . fromMaybe "app/DevelMain.hs" <$>
optional (argument str (metavar "devel-main-path")) <**>
helper)
(progDesc "Patch your devel main with browser-sync"))
main :: IO ()
main = do
cmd <- execParser options
case cmd of
PatchDevelMain fp -> initYesodFastDevel fp
StartServer fp -> go fp
PrintPatchedMain ->
putStrLn =<< readFile =<< getDataFileName "PatchedDevelMain.hs"
where
go develMainPth = do
hSetBuffering stdout LineBuffering
hSetBuffering stderr LineBuffering
chan <- atomically newTChan
_ <- forkIO $ do
hPutStrLn stderr "Watching files for changes..."
watchThread chan
_ <- forkIO $ do
hPutStrLn stderr "Spawning browser-sync..."
browserSyncThread
hPutStrLn stderr "Spawning GHCi..."
_ <- replThread develMainPth chan
return ()
initYesodFastDevel :: FilePath -> IO ()
initYesodFastDevel develMainPth = do
verifyDirectory
verifyDevelMain
patchedDevelMain <- getDataFileName "PatchedDevelMain.hs"
copyFile patchedDevelMain develMainPth
putStrLn "Patched `DevelMain.hs`"
browserSyncPth <- findExecutable "browser-sync"
putStrLn "Make sure you have `foreign-store` on your cabal file"
when (isNothing browserSyncPth) $
putStrLn "Install `browser-sync` to have livereload at port 4000"
exitSuccess
where
verifyDirectory = do
let dir = takeDirectory develMainPth
putStrLn ("Verifying `" ++ dir ++ "` exists")
dexists <- doesDirectoryExist dir
unless dexists $ do
hPutStrLn stderr ("Directory `" ++ dir ++ "` not found")
exitFailure
verifyDevelMain = do
putStrLn "Verifying `DevelMain.hs` isn't modified"
userDevelMd5 <- md5 <$> ByteString.readFile develMainPth
originalDevelMd5 <-
md5 <$> (ByteString.readFile =<< getDataFileName "OriginalDevelMain.hs")
patchedDevelMd5 <-
md5 <$> (ByteString.readFile =<< getDataFileName "PatchedDevelMain.hs")
when (userDevelMd5 == patchedDevelMd5) $ do
putStrLn "DevelMain.hs is already patched"
exitSuccess
when (userDevelMd5 /= originalDevelMd5) $ do
hPutStrLn stderr "Found a weird DevelMain.hs on your project"
hPutStrLn stderr "Use `yesod-fast-devel print-patched-main`"
exitFailure
browserSyncThread :: IO ()
browserSyncThread = do
browserSyncPth <- findExecutable "browser-sync"
when (isJust browserSyncPth) $ callCommand cmd
where
cmd =
"browser-sync start --no-open --files=\"devel-main-since\" --proxy \"localhost:3000\" --port 4000"
watchThread :: TChan Event -> IO ()
watchThread writeChan =
withManager $ \mgr
-- start a watching job (in the background)
-> do
_ <- watchTree mgr "." shouldReload (reloadApplication writeChan)
-- sleep forever (until interrupted)
forever $ threadDelay 1000000000
replThread :: FilePath -> TChan Event -> IO ()
replThread develMainPth chan = do
readChan <- atomically (dupTChan chan)
bracket newRepl onError (onSuccess readChan)
where
onError (_, _, _, process) = do
interruptProcessGroupOf process
threadDelay 100000
terminateProcess process
threadDelay 100000
waitForProcess process
onSuccess readChan (Just replIn, _, _, _) = do
hSetBuffering replIn LineBuffering
threadDelay 1000000
hPutStrLn replIn loadString
hPutStrLn replIn startString
forever $ do
event <- atomically (readTChan readChan)
putStrLn "-----------------------------"
setSGR [SetColor Foreground Vivid Yellow]
print event
setSGR [Reset]
putStrLn "-----------------------------"
hPutStrLn replIn loadString
hPutStrLn replIn startString
onSuccess _ (_, _, _, _) = do
hPutStrLn stderr "Can't open GHCi's stdin"
exitFailure
startString = "update"
loadString = ":load " ++ develMainPth
shouldReload :: Event -> Bool
shouldReload event = not (or conditions)
where
fp =
case event of
Added filePath _ -> filePath
Modified filePath _ -> filePath
Removed filePath _ -> filePath
conditions =
[ notInPath ".git"
, notInPath "yesod-devel"
, notInPath "dist"
, notInFile "#"
, notInPath ".cabal-sandbox"
, notInFile "flycheck_"
, notInPath ".stack-work"
, notInGlob (compile "**/*.sqlite3-*")
, notInGlob (compile "*.sqlite3-*")
, notInFile "stack.yaml"
, notInGlob (compile "*.hi")
, notInGlob (compile "**/*.hi")
, notInGlob (compile "*.o")
, notInGlob (compile "**/*.o")
, notInFile "devel-main-since"
]
notInPath t = t `isInfixOf` pack fp
notInFile t = t `isInfixOf` pack (takeBaseName fp)
notInGlob pt = match pt fp
reloadApplication :: TChan Event -> Event -> IO ()
reloadApplication chan event = atomically (writeTChan chan event)
newRepl :: IO (Maybe Handle, Maybe Handle, Maybe Handle, ProcessHandle)
newRepl =
createProcess $ newProc "stack" ["ghci", "--ghc-options", "-O0 -fobject-code"]
newProc :: FilePath -> [String] -> CreateProcess
newProc cmd args =
CreateProcess
{ cmdspec = RawCommand cmd args
, cwd = Nothing
, env = Nothing
, std_in = CreatePipe
, std_out = Inherit
, std_err = Inherit
, close_fds = False
, create_group = True
, delegate_ctlc = False
}
|
haskellbr/yesod-fast-devel
|
Main.hs
|
bsd-3-clause
| 7,801
| 0
| 17
| 2,255
| 1,814
| 904
| 910
| 198
| 3
|
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE PostfixOperators #-}
{-# LANGUAGE NoImplicitPrelude #-}
module CatEye.IOPrimitives where
import qualified UTF8Prelude as P
import qualified Control.Monad.Error as E
import MPSUTF8 hiding (none_of, apply)
import UTF8Prelude hiding ((.), (^), (>), (+), (*), any, not)
import Control.Monad.Error hiding (Error)
import System.IO.UTF8
import CatEye.Type
import CatEye.Constant
import CatEye.Parser
import CatEye.Env
import CatEye.Evaluator
io_primitives :: [([String], [Value] -> IOThrowsError Value)]
io_primitives =
[ x _apply apply_proc
, x _open_input_file (make_port ReadMode)
, x _open_output_file (make_port WriteMode)
, x _close_input_port close_port
, x _close_output_port close_port
, x _read read_proc
, x _write write_proc
, x _read_contents read_contents
, x _read_all read_all
]
where x a b = (a, b)
apply_proc :: [Value] -> IOThrowsError Value
apply_proc [func, List args] = apply func args
apply_proc (func:args) = apply func args
apply_proc _ = not_match
make_port :: IOMode -> [Value] -> IOThrowsError Value
make_port mode [String filename] = openFile filename mode .liftIO ^ Port
make_port _ _ = not_match
close_port :: [Value] -> IOThrowsError Value
close_port [Port port] = liftIO $ hClose port >> (Bool True .return)
close_port _ = return $ Bool False
read_proc :: [Value] -> IOThrowsError Value
read_proc [] = read_proc [Port stdin]
read_proc [Port port] = (liftIO $ hGetLine port)
>>= read_expr > lift_throws
read_proc _ = not_match
write_proc :: [Value] -> IOThrowsError Value
write_proc [x] = write_proc [x, Port stdout]
write_proc [x, Port port] = liftIO $ hPutStrLn port (x.show)
>> Bool True .return
write_proc _ = not_match
read_contents :: [Value] -> IOThrowsError Value
read_contents [String filename] = readFile filename .liftIO ^ String
read_contents _ = not_match
read_all :: [Value] -> IOThrowsError Value
read_all [String filename] = load filename ^ List
read_all _ = not_match
|
nfjinjing/cateye
|
src/CatEye/IOPrimitives.hs
|
bsd-3-clause
| 2,113
| 6
| 9
| 388
| 693
| 376
| 317
| 54
| 1
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ViewPatterns #-}
-- | Compile FFI definitions.
module Fay.Compiler.FFI
(emitFayToJs
,emitJsToFay
,compileFFIExp
,jsToFayHash
,fayToJsHash
,typeArity
) where
import Fay.Compiler.Prelude
import Fay.Compiler.Misc
import Fay.Compiler.Print (printJSString)
import Fay.Compiler.QName
import Fay.Exts.NoAnnotation (unAnn)
import qualified Fay.Exts.NoAnnotation as N
import qualified Fay.Exts.Scoped as S
import Fay.Types
import Control.Monad.Error
import Control.Monad.Writer
import Data.Generics.Schemes
import Language.ECMAScript3.Parser as JS
import Language.ECMAScript3.Syntax
import Language.Haskell.Exts.Annotated (SrcSpanInfo, prettyPrint)
import Language.Haskell.Exts.Annotated.Syntax
-- | Compile an FFI expression (also used when compiling top level definitions).
compileFFIExp :: SrcSpanInfo -> Maybe (Name a) -> String -> S.Type -> Compile JsExp
compileFFIExp loc (fmap unAnn -> nameopt) formatstr sig' =
-- substitute newtypes with their child types before calling
-- real compileFFI
compileFFI' . unAnn =<< rmNewtys sig'
where
rmNewtys :: S.Type -> Compile N.Type
rmNewtys typ = case typ of
TyForall _ b c t -> TyForall () (fmap (map unAnn) b) (fmap unAnn c) <$> rmNewtys t
TyFun _ t1 t2 -> TyFun () <$> rmNewtys t1 <*> rmNewtys t2
TyTuple _ b tl -> TyTuple () b <$> mapM rmNewtys tl
TyList _ t -> TyList () <$> rmNewtys t
TyApp _ t1 t2 -> TyApp () <$> rmNewtys t1 <*> rmNewtys t2
t@TyVar{} -> return $ unAnn t
TyCon _ qname -> maybe (TyCon () (unAnn qname)) snd <$> lookupNewtypeConst qname
TyParen _ t -> TyParen () <$> rmNewtys t
TyInfix _ t1 q t2 -> flip (TyInfix ()) (unAnn q) <$> rmNewtys t1 <*> rmNewtys t2
TyKind _ t k -> flip (TyKind ()) (unAnn k) <$> rmNewtys t
TyPromoted {} -> return $ unAnn typ
compileFFI' :: N.Type -> Compile JsExp
compileFFI' sig = do
let name = fromMaybe "<exp>" nameopt
inner <- formatFFI loc formatstr (zip params funcFundamentalTypes)
case JS.parse JS.expression (prettyPrint name) (printJSString (wrapReturn inner)) of
Left err -> throwError (FfiFormatInvalidJavaScript loc inner (show err))
Right exp -> do
config' <- config id
when (configGClosure config') $ warnDotUses loc inner exp
return (body inner)
where
body inner = foldr wrapParam (wrapReturn inner) params
wrapParam pname inner = JsFun Nothing [pname] [] (Just inner)
params = zipWith const uniqueNames [1..typeArity sig]
wrapReturn :: String -> JsExp
wrapReturn inner = thunk $
case lastMay funcFundamentalTypes of
-- Returns a “pure” value;
Just{} -> jsToFay SerializeAnywhere returnType (JsRawExp inner)
-- Base case:
Nothing -> JsRawExp inner
funcFundamentalTypes = functionTypeArgs sig
returnType = last funcFundamentalTypes
-- | Warn about uses of naked x.y which will not play nicely with Google Closure.
warnDotUses :: SrcSpanInfo -> String -> Expression SourcePos -> Compile ()
warnDotUses srcSpanInfo string expr =
when anyrefs $
warn $ printSrcSpanInfo srcSpanInfo ++ ":\nDot ref syntax used in FFI JS code: " ++ string
where
anyrefs = not (null (listify dotref expr)) ||
not (null (listify ldot expr))
dotref :: Expression SourcePos -> Bool
dotref x = case x of
DotRef _ (VarRef _ (Id _ name)) _
| name `elem` globalNames -> False
DotRef{} -> True
_ -> False
ldot :: LValue SourcePos -> Bool
ldot x =
case x of
LDot _ (VarRef _ (Id _ name)) _
| name `elem` globalNames -> False
LDot{} -> True
_ -> False
globalNames = ["Math","console","JSON"]
-- | Make a Fay→JS encoder.
emitFayToJs :: Name a -> [TyVarBind b] -> [([Name c], BangType d)] -> Compile ()
emitFayToJs (unAnn -> name) (map unAnn -> tyvars) (explodeFields -> fieldTypes) = do
qname <- qualify name
let ctrName = printJSString $ unQual qname
tell $ mempty { writerFayToJs = [(ctrName, translator)] }
where
translator =
JsFun Nothing
[JsNameVar "type", argTypes, transcodingObjForced]
(obj : fieldStmts (map (getIndex name tyvars) fieldTypes))
(Just $ JsName obj_)
obj :: JsStmt
obj = JsVar obj_ $
JsObj [("instance",JsLit (JsStr (printJSString (unAnn name))))]
fieldStmts :: [(Int,(N.Name,N.BangType))] -> [JsStmt]
fieldStmts [] = []
fieldStmts ((i,fieldType):fts) =
JsVar obj_v field :
JsIf (JsNeq JsUndefined (JsName obj_v))
[JsSetPropExtern obj_ decl (JsName obj_v)]
[] :
fieldStmts fts
where
obj_v = JsNameVar $ UnQual () (Ident () $ "obj_" ++ d)
decl = JsNameVar $ UnQual () (Ident () d)
(d, field) = declField i fieldType
obj_ = JsNameVar "obj_"
-- Declare/encode Fay→JS field
declField :: Int -> (N.Name,N.BangType) -> (String,JsExp)
declField i (fname,typ) =
(prettyPrint fname
,fayToJs (SerializeUserArg i)
(argType (bangType typ))
(JsGetProp (JsName transcodingObjForced)
(JsNameVar (UnQual () fname))))
-- | A name used for transcoding.
transcodingObj :: JsName
transcodingObj = JsNameVar "obj"
-- | The name used for the forced version of a transcoding variable.
transcodingObjForced :: JsName
transcodingObjForced = JsNameVar "_obj"
-- | Get arg types of a function type.
functionTypeArgs :: N.Type -> [FundamentalType]
functionTypeArgs t = case t of
TyForall _ _ _ i -> functionTypeArgs i
TyFun _ a b -> argType a : functionTypeArgs b
TyParen _ st -> functionTypeArgs st
r -> [argType r]
-- | Convert a Haskell type to an internal FFI representation.
argType :: N.Type -> FundamentalType
argType t = case t of
TyCon _ (UnQual _ (Ident _ "String")) -> StringType
TyCon _ (UnQual _ (Ident _ "Double")) -> DoubleType
TyCon _ (UnQual _ (Ident _ "Int")) -> IntType
TyCon _ (UnQual _ (Ident _ "Bool")) -> BoolType
TyApp _ (TyCon _ (UnQual _ (Ident _ "Ptr"))) _ -> PtrType
TyApp _ (TyCon _ (UnQual _ (Ident _ "Automatic"))) _ -> Automatic
TyApp _ (TyCon _ (UnQual _ (Ident _ "Defined"))) a -> Defined (argType a)
TyApp _ (TyCon _ (UnQual _ (Ident _ "Nullable"))) a -> Nullable (argType a)
TyApp _ (TyCon _ (UnQual _ (Ident _ "Fay"))) a -> JsType (argType a)
TyFun _ x xs -> FunctionType (argType x : functionTypeArgs xs)
TyList _ x -> ListType (argType x)
TyTuple _ _ xs -> TupleType (map argType xs)
TyParen _ st -> argType st
TyApp _ op arg -> userDefined (reverse (arg : expandApp op))
_ ->
-- No semantic point to this, merely to avoid GHC's broken
-- warning.
case t of
TyCon _ (UnQual _ user) -> UserDefined user []
_ -> UnknownType
-- | Extract the type.
bangType :: N.BangType -> N.Type
bangType typ = case typ of
BangedTy _ ty -> ty
UnBangedTy _ ty -> ty
UnpackedTy _ ty -> ty
-- | Expand a type application.
expandApp :: N.Type -> [N.Type]
expandApp (TyParen _ t) = expandApp t
expandApp (TyApp _ op arg) = arg : expandApp op
expandApp x = [x]
-- | Generate a user-defined type.
userDefined :: [N.Type] -> FundamentalType
userDefined (TyCon _ (UnQual _ name):typs) = UserDefined name (map argType typs)
userDefined _ = UnknownType
-- | Translate: JS → Fay.
jsToFay :: SerializeContext -> FundamentalType -> JsExp -> JsExp
jsToFay = translate "jsToFay"
-- | Translate: Fay → JS.
fayToJs :: SerializeContext -> FundamentalType -> JsExp -> JsExp
fayToJs = translate "fayToJs"
-- | Make a translator.
translate :: String -> SerializeContext -> FundamentalType -> JsExp -> JsExp
translate method context typ exp = case typ of
-- Unserialized types
PtrType -> exp
-- Flat types
StringType -> flat "string"
DoubleType -> flat "double"
IntType -> flat "int"
BoolType -> flat "bool"
-- Collapse monad
JsType x | method == "jsToFay" -> js x
-- Otherwise recursive stuff needs the big guns
_ -> recursive
where flat specialize =
JsApp (JsName (JsBuiltIn (Ident () (method ++ "_" ++ specialize))))
[exp]
recursive =
JsApp (JsName (JsBuiltIn (Ident () method)))
[typeRep context typ
,exp]
js ty' =
JsNew (JsBuiltIn "Monad")
[translate method context ty' exp]
-- | Get a JS-representation of a fundamental type for encoding/decoding.
typeRep :: SerializeContext -> FundamentalType -> JsExp
typeRep context typ = case typ of
FunctionType xs -> JsList [JsLit $ JsStr "function",JsList (map (typeRep context) xs)]
JsType x -> JsList [JsLit $ JsStr "action",JsList [typeRep context x]]
ListType x -> JsList [JsLit $ JsStr "list",JsList [typeRep context x]]
TupleType xs -> JsList [JsLit $ JsStr "tuple",JsList (map (typeRep context) xs)]
UserDefined name xs -> JsList [JsLit $ JsStr "user"
,JsLit $ JsStr (unname name)
,JsList (zipWith (\t i -> typeRep (setArg i context) t) xs [0..])]
Defined x -> JsList [JsLit $ JsStr "defined",JsList [typeRep context x]]
Nullable x -> JsList [JsLit $ JsStr "nullable",JsList [typeRep context x]]
_ -> nom
where
setArg i SerializeUserArg{} = SerializeUserArg i
setArg _ c = c
ret = JsList . return . JsLit . JsStr
nom = case typ of
StringType -> ret "string"
DoubleType -> ret "double"
PtrType -> ret "ptr"
Automatic -> ret "automatic"
IntType -> ret "int"
BoolType -> ret "bool"
DateType -> ret "date"
_ ->
case context of
SerializeAnywhere -> ret "unknown"
SerializeUserArg i ->
let args = JsName argTypes
automatic = JsIndex 0 (JsName JsParametrizedType)
thisArg = JsIndex i args
in JsTernaryIf (JsInfix "&&" args thisArg)
thisArg
(JsTernaryIf (JsEq automatic (JsLit "automatic"))
(ret "automatic")
(ret "unknown"))
-- | Get the arity of a type.
typeArity :: Type a -> Int
typeArity t = case t of
TyForall _ _ _ i -> typeArity i
TyFun _ _ b -> 1 + typeArity b
TyParen _ st -> typeArity st
_ -> 0
-- | Format the FFI format string with the given arguments.
formatFFI :: SrcSpanInfo -- ^ Source Location.
-> String -- ^ The format string.
-> [(JsName,FundamentalType)] -- ^ Arguments.
-> Compile String -- ^ The JS code.
formatFFI loc formatstr args = go formatstr where
go ('%':'*':xs) = do
these <- mapM inject (zipWith const [1..] args)
rest <- go xs
return (intercalate "," these ++ rest)
go ('%':'%':xs) = do
rest <- go xs
return ('%' : rest)
go ['%'] = throwError (FfiFormatIncompleteArg loc)
go ('%':(span isDigit -> (op,xs))) =
case readMay op of
Nothing -> throwError (FfiFormatBadChars loc op)
Just n -> do
this <- inject n
rest <- go xs
return (this ++ rest)
go (x:xs) = do rest <- go xs
return (x : rest)
go [] = return []
inject n =
case listToMaybe (drop (n-1) args) of
Nothing -> throwError (FfiFormatNoSuchArg loc n)
Just (arg,typ) ->
return (printJSString (fayToJs SerializeAnywhere typ (JsName arg)))
-- | Generate n name-typ pairs from the given list.
explodeFields :: [([a], t)] -> [(a, t)]
explodeFields = concatMap $ \(names,typ) -> map (,typ) names
-- | Generate Fay→JS encoding.
fayToJsHash :: [(String, JsExp)] -> [JsStmt]
fayToJsHash cases = [JsExpStmt $ JsApp (JsName $ JsBuiltIn "objConcat") [JsName $ JsBuiltIn "fayToJsHash", JsObj cases]]
-- | Generate JS→Fay decoding.
jsToFayHash :: [(String, JsExp)] -> [JsStmt]
jsToFayHash cases = [JsExpStmt $ JsApp (JsName $ JsBuiltIn "objConcat") [JsName $ JsBuiltIn "jsToFayHash", JsObj cases]]
-- | Make a JS→Fay decoder.
emitJsToFay :: Name a -> [TyVarBind b] -> [([Name c],BangType d)] -> Compile ()
emitJsToFay (unAnn -> name) (map unAnn -> tyvars) (map (unAnn *** unAnn) . explodeFields -> fieldTypes) = do
qname <- qualify name
tell (mempty { writerJsToFay = [(printJSString (unAnn name), translator qname)] })
where
translator qname =
JsFun Nothing [JsNameVar "type", argTypes, transcodingObj] []
(Just $ JsNew (JsConstructor qname)
(map (decodeField . getIndex name tyvars) fieldTypes))
-- Decode JS→Fay field
decodeField :: (Int,(N.Name,N.BangType)) -> JsExp
decodeField (i,(fname,typ)) =
jsToFay (SerializeUserArg i)
(argType (bangType typ))
(JsGetPropExtern (JsName transcodingObj)
(prettyPrint fname))
-- | The argument types used in serialization of parametrized user-defined types.
argTypes :: JsName
argTypes = JsNameVar "argTypes"
-- | Get the index of a name from the set of type variables bindings.
getIndex :: Name a -> [TyVarBind b] -> (Name c,BangType d) -> (Int,(N.Name,N.BangType))
getIndex (unAnn -> name) (map unAnn -> tyvars) (unAnn -> sname,unAnn -> ty) =
case bangType ty of
TyVar _ tyname -> case elemIndex tyname (map tyvar tyvars) of
Nothing -> error $ "unknown type variable " ++ prettyPrint tyname ++
" for " ++ prettyPrint name ++ "." ++ prettyPrint sname ++ "," ++
" vars were: " ++ unwords (map prettyPrint tyvars) ++ ", rest: " ++ show tyvars
Just i -> (i,(sname,ty))
_ -> (0,(sname,ty))
-- | Extract the name from a possibly-kinded tyvar.
tyvar :: N.TyVarBind -> N.Name
tyvar (UnkindedVar _ v) = v
tyvar (KindedVar _ v _) = v
|
fpco/fay
|
src/Fay/Compiler/FFI.hs
|
bsd-3-clause
| 14,618
| 0
| 21
| 4,319
| 4,796
| 2,416
| 2,380
| 282
| 17
|
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Desugaring exporessions.
-}
{-# LANGUAGE CPP, MultiWayIf #-}
{-# LANGUAGE TypeFamilies #-}
module DsExpr ( dsExpr, dsLExpr, dsLExprNoLP, dsLocalBinds
, dsValBinds, dsLit, dsSyntaxExpr ) where
#include "HsVersions.h"
import GhcPrelude
import Match
import MatchLit
import DsBinds
import DsGRHSs
import DsListComp
import DsUtils
import DsArrows
import DsMonad
import Name
import NameEnv
import FamInstEnv( topNormaliseType )
import DsMeta
import HsSyn
-- NB: The desugarer, which straddles the source and Core worlds, sometimes
-- needs to see source types
import TcType
import TcEvidence
import TcRnMonad
import TcHsSyn
import Type
import CoreSyn
import CoreUtils
import MkCore
import DynFlags
import CostCentre
import Id
import MkId
import Module
import ConLike
import DataCon
import TysWiredIn
import PrelNames
import BasicTypes
import Maybes
import VarEnv
import SrcLoc
import Util
import Bag
import Outputable
import PatSyn
import Control.Monad
{-
************************************************************************
* *
dsLocalBinds, dsValBinds
* *
************************************************************************
-}
dsLocalBinds :: LHsLocalBinds GhcTc -> CoreExpr -> DsM CoreExpr
dsLocalBinds (L _ EmptyLocalBinds) body = return body
dsLocalBinds (L loc (HsValBinds binds)) body = putSrcSpanDs loc $
dsValBinds binds body
dsLocalBinds (L _ (HsIPBinds binds)) body = dsIPBinds binds body
-------------------------
-- caller sets location
dsValBinds :: HsValBinds GhcTc -> CoreExpr -> DsM CoreExpr
dsValBinds (ValBindsOut binds _) body = foldrM ds_val_bind body binds
dsValBinds (ValBindsIn {}) _ = panic "dsValBinds ValBindsIn"
-------------------------
dsIPBinds :: HsIPBinds GhcTc -> CoreExpr -> DsM CoreExpr
dsIPBinds (IPBinds ip_binds ev_binds) body
= do { ds_binds <- dsTcEvBinds ev_binds
; let inner = mkCoreLets ds_binds body
-- The dict bindings may not be in
-- dependency order; hence Rec
; foldrM ds_ip_bind inner ip_binds }
where
ds_ip_bind (L _ (IPBind ~(Right n) e)) body
= do e' <- dsLExpr e
return (Let (NonRec n e') body)
-------------------------
-- caller sets location
ds_val_bind :: (RecFlag, LHsBinds GhcTc) -> CoreExpr -> DsM CoreExpr
-- Special case for bindings which bind unlifted variables
-- We need to do a case right away, rather than building
-- a tuple and doing selections.
-- Silently ignore INLINE and SPECIALISE pragmas...
ds_val_bind (NonRecursive, hsbinds) body
| [L loc bind] <- bagToList hsbinds
-- Non-recursive, non-overloaded bindings only come in ones
-- ToDo: in some bizarre case it's conceivable that there
-- could be dict binds in the 'binds'. (See the notes
-- below. Then pattern-match would fail. Urk.)
, isUnliftedHsBind bind
= putSrcSpanDs loc $
-- see Note [Strict binds checks] in DsBinds
if is_polymorphic bind
then errDsCoreExpr (poly_bind_err bind)
-- data Ptr a = Ptr Addr#
-- f x = let p@(Ptr y) = ... in ...
-- Here the binding for 'p' is polymorphic, but does
-- not mix with an unlifted binding for 'y'. You should
-- use a bang pattern. Trac #6078.
else do { when (looksLazyPatBind bind) $
warnIfSetDs Opt_WarnUnbangedStrictPatterns (unlifted_must_be_bang bind)
-- Complain about a binding that looks lazy
-- e.g. let I# y = x in ...
-- Remember, in checkStrictBinds we are going to do strict
-- matching, so (for software engineering reasons) we insist
-- that the strictness is manifest on each binding
-- However, lone (unboxed) variables are ok
; dsUnliftedBind bind body }
where
is_polymorphic (AbsBinds { abs_tvs = tvs, abs_ev_vars = evs })
= not (null tvs && null evs)
is_polymorphic _ = False
unlifted_must_be_bang bind
= hang (text "Pattern bindings containing unlifted types should use" $$
text "an outermost bang pattern:")
2 (ppr bind)
poly_bind_err bind
= hang (text "You can't mix polymorphic and unlifted bindings:")
2 (ppr bind) $$
text "Probable fix: add a type signature"
ds_val_bind (is_rec, binds) _body
| anyBag (isUnliftedHsBind . unLoc) binds -- see Note [Strict binds checks] in DsBinds
= ASSERT( isRec is_rec )
errDsCoreExpr $
hang (text "Recursive bindings for unlifted types aren't allowed:")
2 (vcat (map ppr (bagToList binds)))
-- Ordinary case for bindings; none should be unlifted
ds_val_bind (is_rec, binds) body
= do { MASSERT( isRec is_rec || isSingletonBag binds )
-- we should never produce a non-recursive list of multiple binds
; (force_vars,prs) <- dsLHsBinds binds
; let body' = foldr seqVar body force_vars
; ASSERT2( not (any (isUnliftedType . idType . fst) prs), ppr is_rec $$ ppr binds )
case prs of
[] -> return body
_ -> return (Let (Rec prs) body') }
-- Use a Rec regardless of is_rec.
-- Why? Because it allows the binds to be all
-- mixed up, which is what happens in one rare case
-- Namely, for an AbsBind with no tyvars and no dicts,
-- but which does have dictionary bindings.
-- See notes with TcSimplify.inferLoop [NO TYVARS]
-- It turned out that wrapping a Rec here was the easiest solution
--
-- NB The previous case dealt with unlifted bindings, so we
-- only have to deal with lifted ones now; so Rec is ok
------------------
dsUnliftedBind :: HsBind GhcTc -> CoreExpr -> DsM CoreExpr
dsUnliftedBind (AbsBinds { abs_tvs = [], abs_ev_vars = []
, abs_exports = exports
, abs_ev_binds = ev_binds
, abs_binds = lbinds }) body
= do { let body1 = foldr bind_export body exports
bind_export export b = bindNonRec (abe_poly export) (Var (abe_mono export)) b
; body2 <- foldlBagM (\body lbind -> dsUnliftedBind (unLoc lbind) body)
body1 lbinds
; ds_binds <- dsTcEvBinds_s ev_binds
; return (mkCoreLets ds_binds body2) }
dsUnliftedBind (FunBind { fun_id = L l fun
, fun_matches = matches
, fun_co_fn = co_fn
, fun_tick = tick }) body
-- Can't be a bang pattern (that looks like a PatBind)
-- so must be simply unboxed
= do { (args, rhs) <- matchWrapper (mkPrefixFunRhs (L l $ idName fun))
Nothing matches
; MASSERT( null args ) -- Functions aren't lifted
; MASSERT( isIdHsWrapper co_fn )
; let rhs' = mkOptTickBox tick rhs
; return (bindNonRec fun rhs' body) }
dsUnliftedBind (PatBind {pat_lhs = pat, pat_rhs = grhss, pat_rhs_ty = ty }) body
= -- let C x# y# = rhs in body
-- ==> case rhs of C x# y# -> body
do { rhs <- dsGuarded grhss ty
; let upat = unLoc pat
eqn = EqnInfo { eqn_pats = [upat],
eqn_rhs = cantFailMatchResult body }
; var <- selectMatchVar upat
; result <- matchEquations PatBindRhs [var] [eqn] (exprType body)
; return (bindNonRec var rhs result) }
dsUnliftedBind bind body = pprPanic "dsLet: unlifted" (ppr bind $$ ppr body)
{-
************************************************************************
* *
\subsection[DsExpr-vars-and-cons]{Variables, constructors, literals}
* *
************************************************************************
-}
dsLExpr :: LHsExpr GhcTc -> DsM CoreExpr
dsLExpr (L loc e)
= putSrcSpanDs loc $
do { core_expr <- dsExpr e
-- uncomment this check to test the hsExprType function in TcHsSyn
-- ; MASSERT2( exprType core_expr `eqType` hsExprType e
-- , ppr e <+> dcolon <+> ppr (hsExprType e) $$
-- ppr core_expr <+> dcolon <+> ppr (exprType core_expr) )
; return core_expr }
-- | Variant of 'dsLExpr' that ensures that the result is not levity
-- polymorphic. This should be used when the resulting expression will
-- be an argument to some other function.
-- See Note [Levity polymorphism checking] in DsMonad
-- See Note [Levity polymorphism invariants] in CoreSyn
dsLExprNoLP :: LHsExpr GhcTc -> DsM CoreExpr
dsLExprNoLP (L loc e)
= putSrcSpanDs loc $
do { e' <- dsExpr e
; dsNoLevPolyExpr e' (text "In the type of expression:" <+> ppr e)
; return e' }
dsExpr :: HsExpr GhcTc -> DsM CoreExpr
dsExpr = ds_expr False
ds_expr :: Bool -- are we directly inside an HsWrap?
-- See Wrinkle in Note [Detecting forced eta expansion]
-> HsExpr GhcTc -> DsM CoreExpr
ds_expr _ (HsPar e) = dsLExpr e
ds_expr _ (ExprWithTySigOut e _) = dsLExpr e
ds_expr w (HsVar (L _ var)) = dsHsVar w var
ds_expr _ (HsUnboundVar {}) = panic "dsExpr: HsUnboundVar" -- Typechecker eliminates them
ds_expr w (HsConLikeOut con) = dsConLike w con
ds_expr _ (HsIPVar _) = panic "dsExpr: HsIPVar"
ds_expr _ (HsOverLabel{}) = panic "dsExpr: HsOverLabel"
ds_expr _ (HsLit lit) = dsLit (convertLit lit)
ds_expr _ (HsOverLit lit) = dsOverLit lit
ds_expr _ (HsWrap co_fn e)
= do { e' <- ds_expr True e
; wrap' <- dsHsWrapper co_fn
; dflags <- getDynFlags
; let wrapped_e = wrap' e'
wrapped_ty = exprType wrapped_e
; checkForcedEtaExpansion e wrapped_ty -- See Note [Detecting forced eta expansion]
; warnAboutIdentities dflags e' wrapped_ty
; return wrapped_e }
ds_expr _ (NegApp (L loc (HsOverLit lit@(OverLit { ol_val = HsIntegral i })))
neg_expr)
= do { expr' <- putSrcSpanDs loc $ do
{ dflags <- getDynFlags
; warnAboutOverflowedLiterals dflags
(lit { ol_val = HsIntegral (negateIntegralLit i) })
; dsOverLit' dflags lit }
; dsSyntaxExpr neg_expr [expr'] }
ds_expr _ (NegApp expr neg_expr)
= do { expr' <- dsLExpr expr
; dsSyntaxExpr neg_expr [expr'] }
ds_expr _ (HsLam a_Match)
= uncurry mkLams <$> matchWrapper LambdaExpr Nothing a_Match
ds_expr _ (HsLamCase matches)
= do { ([discrim_var], matching_code) <- matchWrapper CaseAlt Nothing matches
; return $ Lam discrim_var matching_code }
ds_expr _ e@(HsApp fun arg)
= do { fun' <- dsLExpr fun
; dsWhenNoErrs (dsLExprNoLP arg)
(\arg' -> mkCoreAppDs (text "HsApp" <+> ppr e) fun' arg') }
ds_expr _ (HsAppTypeOut e _)
-- ignore type arguments here; they're in the wrappers instead at this point
= dsLExpr e
{-
Note [Desugaring vars]
~~~~~~~~~~~~~~~~~~~~~~
In one situation we can get a *coercion* variable in a HsVar, namely
the support method for an equality superclass:
class (a~b) => C a b where ...
instance (blah) => C (T a) (T b) where ..
Then we get
$dfCT :: forall ab. blah => C (T a) (T b)
$dfCT ab blah = MkC ($c$p1C a blah) ($cop a blah)
$c$p1C :: forall ab. blah => (T a ~ T b)
$c$p1C ab blah = let ...; g :: T a ~ T b = ... } in g
That 'g' in the 'in' part is an evidence variable, and when
converting to core it must become a CO.
Operator sections. At first it looks as if we can convert
\begin{verbatim}
(expr op)
\end{verbatim}
to
\begin{verbatim}
\x -> op expr x
\end{verbatim}
But no! expr might be a redex, and we can lose laziness badly this
way. Consider
\begin{verbatim}
map (expr op) xs
\end{verbatim}
for example. So we convert instead to
\begin{verbatim}
let y = expr in \x -> op y x
\end{verbatim}
If \tr{expr} is actually just a variable, say, then the simplifier
will sort it out.
-}
ds_expr _ e@(OpApp e1 op _ e2)
= -- for the type of y, we need the type of op's 2nd argument
do { op' <- dsLExpr op
; dsWhenNoErrs (mapM dsLExprNoLP [e1, e2])
(\exprs' -> mkCoreAppsDs (text "opapp" <+> ppr e) op' exprs') }
ds_expr _ (SectionL expr op) -- Desugar (e !) to ((!) e)
= do { op' <- dsLExpr op
; dsWhenNoErrs (dsLExprNoLP expr)
(\expr' -> mkCoreAppDs (text "sectionl" <+> ppr expr) op' expr') }
-- dsLExpr (SectionR op expr) -- \ x -> op x expr
ds_expr _ e@(SectionR op expr) = do
core_op <- dsLExpr op
-- for the type of x, we need the type of op's 2nd argument
let (x_ty:y_ty:_, _) = splitFunTys (exprType core_op)
-- See comment with SectionL
y_core <- dsLExpr expr
dsWhenNoErrs (mapM newSysLocalDsNoLP [x_ty, y_ty])
(\[x_id, y_id] -> bindNonRec y_id y_core $
Lam x_id (mkCoreAppsDs (text "sectionr" <+> ppr e)
core_op [Var x_id, Var y_id]))
ds_expr _ (ExplicitTuple tup_args boxity)
= do { let go (lam_vars, args) (L _ (Missing ty))
-- For every missing expression, we need
-- another lambda in the desugaring.
= do { lam_var <- newSysLocalDsNoLP ty
; return (lam_var : lam_vars, Var lam_var : args) }
go (lam_vars, args) (L _ (Present expr))
-- Expressions that are present don't generate
-- lambdas, just arguments.
= do { core_expr <- dsLExprNoLP expr
; return (lam_vars, core_expr : args) }
; dsWhenNoErrs (foldM go ([], []) (reverse tup_args))
-- The reverse is because foldM goes left-to-right
(\(lam_vars, args) -> mkCoreLams lam_vars $
mkCoreTupBoxity boxity args) }
ds_expr _ (ExplicitSum alt arity expr types)
= do { dsWhenNoErrs (dsLExprNoLP expr)
(\core_expr -> mkCoreConApps (sumDataCon alt arity)
(map (Type . getRuntimeRep) types ++
map Type types ++
[core_expr]) ) }
ds_expr _ (HsSCC _ cc expr@(L loc _)) = do
dflags <- getDynFlags
if gopt Opt_SccProfilingOn dflags
then do
mod_name <- getModule
count <- goptM Opt_ProfCountEntries
uniq <- newUnique
Tick (ProfNote (mkUserCC (sl_fs cc) mod_name loc uniq) count True)
<$> dsLExpr expr
else dsLExpr expr
ds_expr _ (HsCoreAnn _ _ expr)
= dsLExpr expr
ds_expr _ (HsCase discrim matches)
= do { core_discrim <- dsLExpr discrim
; ([discrim_var], matching_code) <- matchWrapper CaseAlt (Just discrim) matches
; return (bindNonRec discrim_var core_discrim matching_code) }
-- Pepe: The binds are in scope in the body but NOT in the binding group
-- This is to avoid silliness in breakpoints
ds_expr _ (HsLet binds body) = do
body' <- dsLExpr body
dsLocalBinds binds body'
-- We need the `ListComp' form to use `deListComp' (rather than the "do" form)
-- because the interpretation of `stmts' depends on what sort of thing it is.
--
ds_expr _ (HsDo ListComp (L _ stmts) res_ty) = dsListComp stmts res_ty
ds_expr _ (HsDo PArrComp (L _ stmts) _) = dsPArrComp (map unLoc stmts)
ds_expr _ (HsDo DoExpr (L _ stmts) _) = dsDo stmts
ds_expr _ (HsDo GhciStmtCtxt (L _ stmts) _) = dsDo stmts
ds_expr _ (HsDo MDoExpr (L _ stmts) _) = dsDo stmts
ds_expr _ (HsDo MonadComp (L _ stmts) _) = dsMonadComp stmts
ds_expr _ (HsIf mb_fun guard_expr then_expr else_expr)
= do { pred <- dsLExpr guard_expr
; b1 <- dsLExpr then_expr
; b2 <- dsLExpr else_expr
; case mb_fun of
Just fun -> dsSyntaxExpr fun [pred, b1, b2]
Nothing -> return $ mkIfThenElse pred b1 b2 }
ds_expr _ (HsMultiIf res_ty alts)
| null alts
= mkErrorExpr
| otherwise
= do { match_result <- liftM (foldr1 combineMatchResults)
(mapM (dsGRHS IfAlt res_ty) alts)
; error_expr <- mkErrorExpr
; extractMatchResult match_result error_expr }
where
mkErrorExpr = mkErrorAppDs nON_EXHAUSTIVE_GUARDS_ERROR_ID res_ty
(text "multi-way if")
{-
\noindent
\underline{\bf Various data construction things}
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-}
ds_expr _ (ExplicitList elt_ty wit xs)
= dsExplicitList elt_ty wit xs
-- We desugar [:x1, ..., xn:] as
-- singletonP x1 +:+ ... +:+ singletonP xn
--
ds_expr _ (ExplicitPArr ty []) = do
emptyP <- dsDPHBuiltin emptyPVar
return (Var emptyP `App` Type ty)
ds_expr _ (ExplicitPArr ty xs) = do
singletonP <- dsDPHBuiltin singletonPVar
appP <- dsDPHBuiltin appPVar
xs' <- mapM dsLExprNoLP xs
let unary fn x = mkApps (Var fn) [Type ty, x]
binary fn x y = mkApps (Var fn) [Type ty, x, y]
return . foldr1 (binary appP) $ map (unary singletonP) xs'
ds_expr _ (ArithSeq expr witness seq)
= case witness of
Nothing -> dsArithSeq expr seq
Just fl -> do { newArithSeq <- dsArithSeq expr seq
; dsSyntaxExpr fl [newArithSeq] }
ds_expr _ (PArrSeq expr (FromTo from to))
= mkApps <$> dsExpr expr <*> mapM dsLExprNoLP [from, to]
ds_expr _ (PArrSeq expr (FromThenTo from thn to))
= mkApps <$> dsExpr expr <*> mapM dsLExprNoLP [from, thn, to]
ds_expr _ (PArrSeq _ _)
= panic "DsExpr.dsExpr: Infinite parallel array!"
-- the parser shouldn't have generated it and the renamer and typechecker
-- shouldn't have let it through
{-
Static Pointers
~~~~~~~~~~~~~~~
See Note [Grand plan for static forms] in StaticPtrTable for an overview.
g = ... static f ...
==>
g = ... makeStatic loc f ...
-}
ds_expr _ (HsStatic _ expr@(L loc _)) = do
expr_ds <- dsLExprNoLP expr
let ty = exprType expr_ds
makeStaticId <- dsLookupGlobalId makeStaticName
dflags <- getDynFlags
let (line, col) = case loc of
RealSrcSpan r -> ( srcLocLine $ realSrcSpanStart r
, srcLocCol $ realSrcSpanStart r
)
_ -> (0, 0)
srcLoc = mkCoreConApps (tupleDataCon Boxed 2)
[ Type intTy , Type intTy
, mkIntExprInt dflags line, mkIntExprInt dflags col
]
putSrcSpanDs loc $ return $
mkCoreApps (Var makeStaticId) [ Type ty, srcLoc, expr_ds ]
{-
\noindent
\underline{\bf Record construction and update}
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For record construction we do this (assuming T has three arguments)
\begin{verbatim}
T { op2 = e }
==>
let err = /\a -> recConErr a
T (recConErr t1 "M.hs/230/op1")
e
(recConErr t1 "M.hs/230/op3")
\end{verbatim}
@recConErr@ then converts its argument string into a proper message
before printing it as
\begin{verbatim}
M.hs, line 230: missing field op1 was evaluated
\end{verbatim}
We also handle @C{}@ as valid construction syntax for an unlabelled
constructor @C@, setting all of @C@'s fields to bottom.
-}
ds_expr _ (RecordCon { rcon_con_expr = con_expr, rcon_flds = rbinds
, rcon_con_like = con_like })
= do { con_expr' <- dsExpr con_expr
; let
(arg_tys, _) = tcSplitFunTys (exprType con_expr')
-- A newtype in the corner should be opaque;
-- hence TcType.tcSplitFunTys
mk_arg (arg_ty, fl)
= case findField (rec_flds rbinds) (flSelector fl) of
(rhs:rhss) -> ASSERT( null rhss )
dsLExprNoLP rhs
[] -> mkErrorAppDs rEC_CON_ERROR_ID arg_ty (ppr (flLabel fl))
unlabelled_bottom arg_ty = mkErrorAppDs rEC_CON_ERROR_ID arg_ty Outputable.empty
labels = conLikeFieldLabels con_like
; con_args <- if null labels
then mapM unlabelled_bottom arg_tys
else mapM mk_arg (zipEqual "dsExpr:RecordCon" arg_tys labels)
; return (mkCoreApps con_expr' con_args) }
{-
Record update is a little harder. Suppose we have the decl:
\begin{verbatim}
data T = T1 {op1, op2, op3 :: Int}
| T2 {op4, op2 :: Int}
| T3
\end{verbatim}
Then we translate as follows:
\begin{verbatim}
r { op2 = e }
===>
let op2 = e in
case r of
T1 op1 _ op3 -> T1 op1 op2 op3
T2 op4 _ -> T2 op4 op2
other -> recUpdError "M.hs/230"
\end{verbatim}
It's important that we use the constructor Ids for @T1@, @T2@ etc on the
RHSs, and do not generate a Core constructor application directly, because the constructor
might do some argument-evaluation first; and may have to throw away some
dictionaries.
Note [Update for GADTs]
~~~~~~~~~~~~~~~~~~~~~~~
Consider
data T a b where
T1 :: { f1 :: a } -> T a Int
Then the wrapper function for T1 has type
$WT1 :: a -> T a Int
But if x::T a b, then
x { f1 = v } :: T a b (not T a Int!)
So we need to cast (T a Int) to (T a b). Sigh.
-}
ds_expr _ expr@(RecordUpd { rupd_expr = record_expr, rupd_flds = fields
, rupd_cons = cons_to_upd
, rupd_in_tys = in_inst_tys, rupd_out_tys = out_inst_tys
, rupd_wrap = dict_req_wrap } )
| null fields
= dsLExpr record_expr
| otherwise
= ASSERT2( notNull cons_to_upd, ppr expr )
do { record_expr' <- dsLExpr record_expr
; field_binds' <- mapM ds_field fields
; let upd_fld_env :: NameEnv Id -- Maps field name to the LocalId of the field binding
upd_fld_env = mkNameEnv [(f,l) | (f,l,_) <- field_binds']
-- It's important to generate the match with matchWrapper,
-- and the right hand sides with applications of the wrapper Id
-- so that everything works when we are doing fancy unboxing on the
-- constructor arguments.
; alts <- mapM (mk_alt upd_fld_env) cons_to_upd
; ([discrim_var], matching_code)
<- matchWrapper RecUpd Nothing (MG { mg_alts = noLoc alts
, mg_arg_tys = [in_ty]
, mg_res_ty = out_ty, mg_origin = FromSource })
-- FromSource is not strictly right, but we
-- want incomplete pattern-match warnings
; return (add_field_binds field_binds' $
bindNonRec discrim_var record_expr' matching_code) }
where
ds_field :: LHsRecUpdField GhcTc -> DsM (Name, Id, CoreExpr)
-- Clone the Id in the HsRecField, because its Name is that
-- of the record selector, and we must not make that a local binder
-- else we shadow other uses of the record selector
-- Hence 'lcl_id'. Cf Trac #2735
ds_field (L _ rec_field) = do { rhs <- dsLExpr (hsRecFieldArg rec_field)
; let fld_id = unLoc (hsRecUpdFieldId rec_field)
; lcl_id <- newSysLocalDs (idType fld_id)
; return (idName fld_id, lcl_id, rhs) }
add_field_binds [] expr = expr
add_field_binds ((_,b,r):bs) expr = bindNonRec b r (add_field_binds bs expr)
-- Awkwardly, for families, the match goes
-- from instance type to family type
(in_ty, out_ty) =
case (head cons_to_upd) of
RealDataCon data_con ->
let tycon = dataConTyCon data_con in
(mkTyConApp tycon in_inst_tys, mkFamilyTyConApp tycon out_inst_tys)
PatSynCon pat_syn ->
( patSynInstResTy pat_syn in_inst_tys
, patSynInstResTy pat_syn out_inst_tys)
mk_alt upd_fld_env con
= do { let (univ_tvs, ex_tvs, eq_spec,
prov_theta, _req_theta, arg_tys, _) = conLikeFullSig con
subst = zipTvSubst univ_tvs in_inst_tys
-- I'm not bothering to clone the ex_tvs
; eqs_vars <- mapM newPredVarDs (substTheta subst (eqSpecPreds eq_spec))
; theta_vars <- mapM newPredVarDs (substTheta subst prov_theta)
; arg_ids <- newSysLocalsDs (substTysUnchecked subst arg_tys)
; let field_labels = conLikeFieldLabels con
val_args = zipWithEqual "dsExpr:RecordUpd" mk_val_arg
field_labels arg_ids
mk_val_arg fl pat_arg_id
= nlHsVar (lookupNameEnv upd_fld_env (flSelector fl) `orElse` pat_arg_id)
inst_con = noLoc $ mkHsWrap wrap (HsConLikeOut con)
-- Reconstruct with the WrapId so that unpacking happens
-- The order here is because of the order in `TcPatSyn`.
wrap = mkWpEvVarApps theta_vars <.>
dict_req_wrap <.>
mkWpTyApps (mkTyVarTys ex_tvs) <.>
mkWpTyApps [ ty
| (tv, ty) <- univ_tvs `zip` out_inst_tys
, not (tv `elemVarEnv` wrap_subst) ]
rhs = foldl (\a b -> nlHsApp a b) inst_con val_args
-- Tediously wrap the application in a cast
-- Note [Update for GADTs]
wrapped_rhs =
case con of
RealDataCon data_con ->
let
wrap_co =
mkTcTyConAppCo Nominal
(dataConTyCon data_con)
[ lookup tv ty
| (tv,ty) <- univ_tvs `zip` out_inst_tys ]
lookup univ_tv ty =
case lookupVarEnv wrap_subst univ_tv of
Just co' -> co'
Nothing -> mkTcReflCo Nominal ty
in if null eq_spec
then rhs
else mkLHsWrap (mkWpCastN wrap_co) rhs
-- eq_spec is always null for a PatSynCon
PatSynCon _ -> rhs
wrap_subst =
mkVarEnv [ (tv, mkTcSymCo (mkTcCoVarCo eq_var))
| (spec, eq_var) <- eq_spec `zip` eqs_vars
, let tv = eqSpecTyVar spec ]
req_wrap = dict_req_wrap <.> mkWpTyApps in_inst_tys
pat = noLoc $ ConPatOut { pat_con = noLoc con
, pat_tvs = ex_tvs
, pat_dicts = eqs_vars ++ theta_vars
, pat_binds = emptyTcEvBinds
, pat_args = PrefixCon $ map nlVarPat arg_ids
, pat_arg_tys = in_inst_tys
, pat_wrap = req_wrap }
; return (mkSimpleMatch RecUpd [pat] wrapped_rhs) }
-- Here is where we desugar the Template Haskell brackets and escapes
-- Template Haskell stuff
ds_expr _ (HsRnBracketOut _ _) = panic "dsExpr HsRnBracketOut"
ds_expr _ (HsTcBracketOut x ps) = dsBracket x ps
ds_expr _ (HsSpliceE s) = pprPanic "dsExpr:splice" (ppr s)
-- Arrow notation extension
ds_expr _ (HsProc pat cmd) = dsProcExpr pat cmd
-- Hpc Support
ds_expr _ (HsTick tickish e) = do
e' <- dsLExpr e
return (Tick tickish e')
-- There is a problem here. The then and else branches
-- have no free variables, so they are open to lifting.
-- We need someway of stopping this.
-- This will make no difference to binary coverage
-- (did you go here: YES or NO), but will effect accurate
-- tick counting.
ds_expr _ (HsBinTick ixT ixF e) = do
e2 <- dsLExpr e
do { ASSERT(exprType e2 `eqType` boolTy)
mkBinaryTickBox ixT ixF e2
}
ds_expr _ (HsTickPragma _ _ _ expr) = do
dflags <- getDynFlags
if gopt Opt_Hpc dflags
then panic "dsExpr:HsTickPragma"
else dsLExpr expr
-- HsSyn constructs that just shouldn't be here:
ds_expr _ (ExprWithTySig {}) = panic "dsExpr:ExprWithTySig"
ds_expr _ (HsBracket {}) = panic "dsExpr:HsBracket"
ds_expr _ (HsArrApp {}) = panic "dsExpr:HsArrApp"
ds_expr _ (HsArrForm {}) = panic "dsExpr:HsArrForm"
ds_expr _ (EWildPat {}) = panic "dsExpr:EWildPat"
ds_expr _ (EAsPat {}) = panic "dsExpr:EAsPat"
ds_expr _ (EViewPat {}) = panic "dsExpr:EViewPat"
ds_expr _ (ELazyPat {}) = panic "dsExpr:ELazyPat"
ds_expr _ (HsAppType {}) = panic "dsExpr:HsAppType" -- removed by typechecker
ds_expr _ (HsDo {}) = panic "dsExpr:HsDo"
ds_expr _ (HsRecFld {}) = panic "dsExpr:HsRecFld"
------------------------------
dsSyntaxExpr :: SyntaxExpr GhcTc -> [CoreExpr] -> DsM CoreExpr
dsSyntaxExpr (SyntaxExpr { syn_expr = expr
, syn_arg_wraps = arg_wraps
, syn_res_wrap = res_wrap })
arg_exprs
= do { fun <- dsExpr expr
; core_arg_wraps <- mapM dsHsWrapper arg_wraps
; core_res_wrap <- dsHsWrapper res_wrap
; let wrapped_args = zipWith ($) core_arg_wraps arg_exprs
; dsWhenNoErrs (zipWithM_ dsNoLevPolyExpr wrapped_args [ mk_doc n | n <- [1..] ])
(\_ -> core_res_wrap (mkApps fun wrapped_args)) }
where
mk_doc n = text "In the" <+> speakNth n <+> text "argument of" <+> quotes (ppr expr)
findField :: [LHsRecField GhcTc arg] -> Name -> [arg]
findField rbinds sel
= [hsRecFieldArg fld | L _ fld <- rbinds
, sel == idName (unLoc $ hsRecFieldId fld) ]
{-
%--------------------------------------------------------------------
Note [Desugaring explicit lists]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Explicit lists are desugared in a cleverer way to prevent some
fruitless allocations. Essentially, whenever we see a list literal
[x_1, ..., x_n] we generate the corresponding expression in terms of
build:
Explicit lists (literals) are desugared to allow build/foldr fusion when
beneficial. This is a bit of a trade-off,
* build/foldr fusion can generate far larger code than the corresponding
cons-chain (e.g. see #11707)
* even when it doesn't produce more code, build can still fail to fuse,
requiring that the simplifier do more work to bring the expression
back into cons-chain form; this costs compile time
* when it works, fusion can be a significant win. Allocations are reduced
by up to 25% in some nofib programs. Specifically,
Program Size Allocs Runtime CompTime
rewrite +0.0% -26.3% 0.02 -1.8%
ansi -0.3% -13.8% 0.00 +0.0%
lift +0.0% -8.7% 0.00 -2.3%
At the moment we use a simple heuristic to determine whether build will be
fruitful: for small lists we assume the benefits of fusion will be worthwhile;
for long lists we assume that the benefits will be outweighted by the cost of
code duplication. This magic length threshold is @maxBuildLength@. Also, fusion
won't work at all if rewrite rules are disabled, so we don't use the build-based
desugaring in this case.
We used to have a more complex heuristic which would try to break the list into
"static" and "dynamic" parts and only build-desugar the dynamic part.
Unfortunately, determining "static-ness" reliably is a bit tricky and the
heuristic at times produced surprising behavior (see #11710) so it was dropped.
-}
{- | The longest list length which we will desugar using @build@.
This is essentially a magic number and its setting is unfortunate rather
arbitrary. The idea here, as mentioned in Note [Desugaring explicit lists],
is to avoid deforesting large static data into large(r) code. Ideally we'd
want a smaller threshold with larger consumers and vice-versa, but we have no
way of knowing what will be consuming our list in the desugaring impossible to
set generally correctly.
The effect of reducing this number will be that 'build' fusion is applied
less often. From a runtime performance perspective, applying 'build' more
liberally on "moderately" sized lists should rarely hurt and will often it can
only expose further optimization opportunities; if no fusion is possible it will
eventually get rule-rewritten back to a list). We do, however, pay in compile
time.
-}
maxBuildLength :: Int
maxBuildLength = 32
dsExplicitList :: Type -> Maybe (SyntaxExpr GhcTc) -> [LHsExpr GhcTc]
-> DsM CoreExpr
-- See Note [Desugaring explicit lists]
dsExplicitList elt_ty Nothing xs
= do { dflags <- getDynFlags
; xs' <- mapM dsLExprNoLP xs
; if xs' `lengthExceeds` maxBuildLength
-- Don't generate builds if the list is very long.
|| null xs'
-- Don't generate builds when the [] constructor will do
|| not (gopt Opt_EnableRewriteRules dflags) -- Rewrite rules off
-- Don't generate a build if there are no rules to eliminate it!
-- See Note [Desugaring RULE left hand sides] in Desugar
then return $ mkListExpr elt_ty xs'
else mkBuildExpr elt_ty (mk_build_list xs') }
where
mk_build_list xs' (cons, _) (nil, _)
= return (foldr (App . App (Var cons)) (Var nil) xs')
dsExplicitList elt_ty (Just fln) xs
= do { list <- dsExplicitList elt_ty Nothing xs
; dflags <- getDynFlags
; dsSyntaxExpr fln [mkIntExprInt dflags (length xs), list] }
dsArithSeq :: PostTcExpr -> (ArithSeqInfo GhcTc) -> DsM CoreExpr
dsArithSeq expr (From from)
= App <$> dsExpr expr <*> dsLExprNoLP from
dsArithSeq expr (FromTo from to)
= do dflags <- getDynFlags
warnAboutEmptyEnumerations dflags from Nothing to
expr' <- dsExpr expr
from' <- dsLExprNoLP from
to' <- dsLExprNoLP to
return $ mkApps expr' [from', to']
dsArithSeq expr (FromThen from thn)
= mkApps <$> dsExpr expr <*> mapM dsLExprNoLP [from, thn]
dsArithSeq expr (FromThenTo from thn to)
= do dflags <- getDynFlags
warnAboutEmptyEnumerations dflags from (Just thn) to
expr' <- dsExpr expr
from' <- dsLExprNoLP from
thn' <- dsLExprNoLP thn
to' <- dsLExprNoLP to
return $ mkApps expr' [from', thn', to']
{-
Desugar 'do' and 'mdo' expressions (NOT list comprehensions, they're
handled in DsListComp). Basically does the translation given in the
Haskell 98 report:
-}
dsDo :: [ExprLStmt GhcTc] -> DsM CoreExpr
dsDo stmts
= goL stmts
where
goL [] = panic "dsDo"
goL (L loc stmt:lstmts) = putSrcSpanDs loc (go loc stmt lstmts)
go _ (LastStmt body _ _) stmts
= ASSERT( null stmts ) dsLExpr body
-- The 'return' op isn't used for 'do' expressions
go _ (BodyStmt rhs then_expr _ _) stmts
= do { rhs2 <- dsLExpr rhs
; warnDiscardedDoBindings rhs (exprType rhs2)
; rest <- goL stmts
; dsSyntaxExpr then_expr [rhs2, rest] }
go _ (LetStmt binds) stmts
= do { rest <- goL stmts
; dsLocalBinds binds rest }
go _ (BindStmt pat rhs bind_op fail_op res1_ty) stmts
= do { body <- goL stmts
; rhs' <- dsLExpr rhs
; var <- selectSimpleMatchVarL pat
; match <- matchSinglePat (Var var) (StmtCtxt DoExpr) pat
res1_ty (cantFailMatchResult body)
; match_code <- handle_failure pat match fail_op
; dsSyntaxExpr bind_op [rhs', Lam var match_code] }
go _ (ApplicativeStmt args mb_join body_ty) stmts
= do {
let
(pats, rhss) = unzip (map (do_arg . snd) args)
do_arg (ApplicativeArgOne pat expr _) =
(pat, dsLExpr expr)
do_arg (ApplicativeArgMany stmts ret pat) =
(pat, dsDo (stmts ++ [noLoc $ mkLastStmt (noLoc ret)]))
arg_tys = map hsLPatType pats
; rhss' <- sequence rhss
; let body' = noLoc $ HsDo DoExpr (noLoc stmts) body_ty
; let fun = L noSrcSpan $ HsLam $
MG { mg_alts = noLoc [mkSimpleMatch LambdaExpr pats
body']
, mg_arg_tys = arg_tys
, mg_res_ty = body_ty
, mg_origin = Generated }
; fun' <- dsLExpr fun
; let mk_ap_call l (op,r) = dsSyntaxExpr op [l,r]
; expr <- foldlM mk_ap_call fun' (zip (map fst args) rhss')
; case mb_join of
Nothing -> return expr
Just join_op -> dsSyntaxExpr join_op [expr] }
go loc (RecStmt { recS_stmts = rec_stmts, recS_later_ids = later_ids
, recS_rec_ids = rec_ids, recS_ret_fn = return_op
, recS_mfix_fn = mfix_op, recS_bind_fn = bind_op
, recS_bind_ty = bind_ty
, recS_rec_rets = rec_rets, recS_ret_ty = body_ty }) stmts
= goL (new_bind_stmt : stmts) -- rec_ids can be empty; eg rec { print 'x' }
where
new_bind_stmt = L loc $ BindStmt (mkBigLHsPatTupId later_pats)
mfix_app bind_op
noSyntaxExpr -- Tuple cannot fail
bind_ty
tup_ids = rec_ids ++ filterOut (`elem` rec_ids) later_ids
tup_ty = mkBigCoreTupTy (map idType tup_ids) -- Deals with singleton case
rec_tup_pats = map nlVarPat tup_ids
later_pats = rec_tup_pats
rets = map noLoc rec_rets
mfix_app = nlHsSyntaxApps mfix_op [mfix_arg]
mfix_arg = noLoc $ HsLam
(MG { mg_alts = noLoc [mkSimpleMatch
LambdaExpr
[mfix_pat] body]
, mg_arg_tys = [tup_ty], mg_res_ty = body_ty
, mg_origin = Generated })
mfix_pat = noLoc $ LazyPat $ mkBigLHsPatTupId rec_tup_pats
body = noLoc $ HsDo
DoExpr (noLoc (rec_stmts ++ [ret_stmt])) body_ty
ret_app = nlHsSyntaxApps return_op [mkBigLHsTupId rets]
ret_stmt = noLoc $ mkLastStmt ret_app
-- This LastStmt will be desugared with dsDo,
-- which ignores the return_op in the LastStmt,
-- so we must apply the return_op explicitly
go _ (ParStmt {}) _ = panic "dsDo ParStmt"
go _ (TransStmt {}) _ = panic "dsDo TransStmt"
handle_failure :: LPat GhcTc -> MatchResult -> SyntaxExpr GhcTc -> DsM CoreExpr
-- In a do expression, pattern-match failure just calls
-- the monadic 'fail' rather than throwing an exception
handle_failure pat match fail_op
| matchCanFail match
= do { dflags <- getDynFlags
; fail_msg <- mkStringExpr (mk_fail_msg dflags pat)
; fail_expr <- dsSyntaxExpr fail_op [fail_msg]
; extractMatchResult match fail_expr }
| otherwise
= extractMatchResult match (error "It can't fail")
mk_fail_msg :: DynFlags -> Located e -> String
mk_fail_msg dflags pat = "Pattern match failure in do expression at " ++
showPpr dflags (getLoc pat)
{-
************************************************************************
* *
Desugaring Variables
* *
************************************************************************
-}
dsHsVar :: Bool -- are we directly inside an HsWrap?
-- See Wrinkle in Note [Detecting forced eta expansion]
-> Id -> DsM CoreExpr
dsHsVar w var
| not w
, let bad_tys = badUseOfLevPolyPrimop var ty
, not (null bad_tys)
= do { levPolyPrimopErr var ty bad_tys
; return unitExpr } -- return something eminently safe
| otherwise
= return (varToCoreExpr var) -- See Note [Desugaring vars]
where
ty = idType var
dsConLike :: Bool -- as in dsHsVar
-> ConLike -> DsM CoreExpr
dsConLike w (RealDataCon dc) = dsHsVar w (dataConWrapId dc)
dsConLike _ (PatSynCon ps) = return $ case patSynBuilder ps of
Just (id, add_void)
| add_void -> mkCoreApp (text "dsConLike" <+> ppr ps) (Var id) (Var voidPrimId)
| otherwise -> Var id
_ -> pprPanic "dsConLike" (ppr ps)
{-
************************************************************************
* *
\subsection{Errors and contexts}
* *
************************************************************************
-}
-- Warn about certain types of values discarded in monadic bindings (#3263)
warnDiscardedDoBindings :: LHsExpr GhcTc -> Type -> DsM ()
warnDiscardedDoBindings rhs rhs_ty
| Just (m_ty, elt_ty) <- tcSplitAppTy_maybe rhs_ty
= do { warn_unused <- woptM Opt_WarnUnusedDoBind
; warn_wrong <- woptM Opt_WarnWrongDoBind
; when (warn_unused || warn_wrong) $
do { fam_inst_envs <- dsGetFamInstEnvs
; let norm_elt_ty = topNormaliseType fam_inst_envs elt_ty
-- Warn about discarding non-() things in 'monadic' binding
; if warn_unused && not (isUnitTy norm_elt_ty)
then warnDs (Reason Opt_WarnUnusedDoBind)
(badMonadBind rhs elt_ty)
else
-- Warn about discarding m a things in 'monadic' binding of the same type,
-- but only if we didn't already warn due to Opt_WarnUnusedDoBind
when warn_wrong $
do { case tcSplitAppTy_maybe norm_elt_ty of
Just (elt_m_ty, _)
| m_ty `eqType` topNormaliseType fam_inst_envs elt_m_ty
-> warnDs (Reason Opt_WarnWrongDoBind)
(badMonadBind rhs elt_ty)
_ -> return () } } }
| otherwise -- RHS does have type of form (m ty), which is weird
= return () -- but at lesat this warning is irrelevant
badMonadBind :: LHsExpr GhcTc -> Type -> SDoc
badMonadBind rhs elt_ty
= vcat [ hang (text "A do-notation statement discarded a result of type")
2 (quotes (ppr elt_ty))
, hang (text "Suppress this warning by saying")
2 (quotes $ text "_ <-" <+> ppr rhs)
]
{-
************************************************************************
* *
Forced eta expansion and levity polymorphism
* *
************************************************************************
Note [Detecting forced eta expansion]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We cannot have levity polymorphic function arguments. See
Note [Levity polymorphism invariants] in CoreSyn. But we *can* have
functions that take levity polymorphism arguments, as long as these
functions are eta-reduced. (See #12708 for an example.)
However, we absolutely cannot do this for functions that have no
binding (i.e., say True to Id.hasNoBinding), like primops and unboxed
tuple constructors. These get eta-expanded in CorePrep.maybeSaturate.
Detecting when this is about to happen is a bit tricky, though. When
the desugarer is looking at the Id itself (let's be concrete and
suppose we have (#,#)), we don't know whether it will be levity
polymorphic. So the right spot seems to be to look after the Id has
been applied to its type arguments. To make the algorithm efficient,
it's important to be able to spot ((#,#) @a @b @c @d) without looking
past all the type arguments. We thus require that
* The body of an HsWrap is not an HsWrap.
With that representation invariant, we simply look inside every HsWrap
to see if its body is an HsVar whose Id hasNoBinding. Then, we look
at the wrapped type. If it has any levity polymorphic arguments, reject.
Interestingly, this approach does not look to see whether the Id in
question will be eta expanded. The logic is this:
* Either the Id in question is saturated or not.
* If it is, then it surely can't have levity polymorphic arguments.
If its wrapped type contains levity polymorphic arguments, reject.
* If it's not, then it can't be eta expanded with levity polymorphic
argument. If its wrapped type contains levity polymorphic arguments, reject.
So, either way, we're good to reject.
Wrinkle
~~~~~~~
Not all polymorphic Ids are wrapped in
HsWrap, due to the lazy instantiation of TypeApplications. (See "Visible type
application", ESOP '16.) But if we spot a levity-polymorphic hasNoBinding Id
without a wrapper, then that is surely problem and we can reject.
We thus have a parameter to `dsExpr` that tracks whether or not we are
directly in an HsWrap. If we find a levity-polymorphic hasNoBinding Id when
we're not directly in an HsWrap, reject.
-}
-- | Takes an expression and its instantiated type. If the expression is an
-- HsVar with a hasNoBinding primop and the type has levity-polymorphic arguments,
-- issue an error. See Note [Detecting forced eta expansion]
checkForcedEtaExpansion :: HsExpr GhcTc -> Type -> DsM ()
checkForcedEtaExpansion expr ty
| Just var <- case expr of
HsVar (L _ var) -> Just var
HsConLikeOut (RealDataCon dc) -> Just (dataConWrapId dc)
_ -> Nothing
, let bad_tys = badUseOfLevPolyPrimop var ty
, not (null bad_tys)
= levPolyPrimopErr var ty bad_tys
checkForcedEtaExpansion _ _ = return ()
-- | Is this a hasNoBinding Id with a levity-polymorphic type?
-- Returns the arguments that are levity polymorphic if they are bad;
-- or an empty list otherwise
-- See Note [Detecting forced eta expansion]
badUseOfLevPolyPrimop :: Id -> Type -> [Type]
badUseOfLevPolyPrimop id ty
| hasNoBinding id
= filter isTypeLevPoly arg_tys
| otherwise
= []
where
(binders, _) = splitPiTys ty
arg_tys = mapMaybe binderRelevantType_maybe binders
levPolyPrimopErr :: Id -> Type -> [Type] -> DsM ()
levPolyPrimopErr primop ty bad_tys
= errDs $ vcat [ hang (text "Cannot use primitive with levity-polymorphic arguments:")
2 (ppr primop <+> dcolon <+> ppr ty)
, hang (text "Levity-polymorphic arguments:")
2 (vcat (map (\t -> ppr t <+> dcolon <+> ppr (typeKind t)) bad_tys)) ]
|
ezyang/ghc
|
compiler/deSugar/DsExpr.hs
|
bsd-3-clause
| 47,346
| 7
| 24
| 14,866
| 9,210
| 4,685
| 4,525
| -1
| -1
|
{-# LANGUAGE OverloadedStrings #-}
module Network.Syncthing.Types.Ignore
( Ignore(..)
) where
import Control.Applicative ((<$>), (<*>))
import Control.Monad (MonadPlus (mzero))
import Data.Aeson (FromJSON, Value (..), parseJSON, (.:?))
import Data.Text (Text)
-- | Contains the ignores list and a list of all compiled ignore patterns.
data Ignore = Ignore {
getIgnores :: Maybe [Text]
, getPatterns :: Maybe [Text]
} deriving (Eq, Show)
instance FromJSON Ignore where
parseJSON (Object v) =
Ignore <$> (v .:? "ignore")
<*> (v .:? "patterns")
parseJSON _ = mzero
|
jetho/syncthing-hs
|
Network/Syncthing/Types/Ignore.hs
|
bsd-3-clause
| 697
| 0
| 10
| 216
| 181
| 110
| 71
| 16
| 0
|
{-
(c) The University of Glasgow 2006
Functions for working with the typechecker environment (setters, getters...).
-}
{-# LANGUAGE CPP, ExplicitForAll, FlexibleInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module TcRnMonad(
module TcRnMonad,
module TcRnTypes,
module IOEnv
) where
#include "HsVersions.h"
import TcRnTypes -- Re-export all
import IOEnv -- Re-export all
import TcEvidence
import HsSyn hiding (LIE)
import HscTypes
import Module
import RdrName
import Name
import Type
import TcType
import InstEnv
import FamInstEnv
import PrelNames
import Id
import VarSet
import VarEnv
import ErrUtils
import SrcLoc
import NameEnv
import NameSet
import Bag
import Outputable
import UniqSupply
import UniqFM
import DynFlags
import StaticFlags
import FastString
import Panic
import Util
import Annotations
import BasicTypes( TopLevelFlag )
import Control.Exception
import Data.IORef
import qualified Data.Set as Set
import Control.Monad
#ifdef GHCI
import qualified Data.Map as Map
#endif
{-
************************************************************************
* *
initTc
* *
************************************************************************
-}
-- | Setup the initial typechecking environment
initTc :: HscEnv
-> HscSource
-> Bool -- True <=> retain renamed syntax trees
-> Module
-> RealSrcSpan
-> TcM r
-> IO (Messages, Maybe r)
-- Nothing => error thrown by the thing inside
-- (error messages should have been printed already)
initTc hsc_env hsc_src keep_rn_syntax mod loc do_this
= do { errs_var <- newIORef (emptyBag, emptyBag) ;
tvs_var <- newIORef emptyVarSet ;
keep_var <- newIORef emptyNameSet ;
used_rdr_var <- newIORef Set.empty ;
th_var <- newIORef False ;
th_splice_var<- newIORef False ;
infer_var <- newIORef True ;
lie_var <- newIORef emptyWC ;
dfun_n_var <- newIORef emptyOccSet ;
type_env_var <- case hsc_type_env_var hsc_env of {
Just (_mod, te_var) -> return te_var ;
Nothing -> newIORef emptyNameEnv } ;
dependent_files_var <- newIORef [] ;
static_wc_var <- newIORef emptyWC ;
#ifdef GHCI
th_topdecls_var <- newIORef [] ;
th_topnames_var <- newIORef emptyNameSet ;
th_modfinalizers_var <- newIORef [] ;
th_state_var <- newIORef Map.empty ;
#endif /* GHCI */
let {
dflags = hsc_dflags hsc_env ;
maybe_rn_syntax :: forall a. a -> Maybe a ;
maybe_rn_syntax empty_val
| keep_rn_syntax = Just empty_val
| otherwise = Nothing ;
gbl_env = TcGblEnv {
#ifdef GHCI
tcg_th_topdecls = th_topdecls_var,
tcg_th_topnames = th_topnames_var,
tcg_th_modfinalizers = th_modfinalizers_var,
tcg_th_state = th_state_var,
#endif /* GHCI */
tcg_mod = mod,
tcg_src = hsc_src,
tcg_sig_of = getSigOf dflags (moduleName mod),
tcg_impl_rdr_env = Nothing,
tcg_rdr_env = emptyGlobalRdrEnv,
tcg_fix_env = emptyNameEnv,
tcg_field_env = RecFields emptyNameEnv emptyNameSet,
tcg_default = Nothing,
tcg_type_env = emptyNameEnv,
tcg_type_env_var = type_env_var,
tcg_inst_env = emptyInstEnv,
tcg_fam_inst_env = emptyFamInstEnv,
tcg_ann_env = emptyAnnEnv,
tcg_visible_orphan_mods = mkModuleSet [mod],
tcg_th_used = th_var,
tcg_th_splice_used = th_splice_var,
tcg_exports = [],
tcg_imports = emptyImportAvails,
tcg_used_rdrnames = used_rdr_var,
tcg_dus = emptyDUs,
tcg_rn_imports = [],
tcg_rn_exports = maybe_rn_syntax [],
tcg_rn_decls = maybe_rn_syntax emptyRnGroup,
tcg_binds = emptyLHsBinds,
tcg_imp_specs = [],
tcg_sigs = emptyNameSet,
tcg_ev_binds = emptyBag,
tcg_warns = NoWarnings,
tcg_anns = [],
tcg_tcs = [],
tcg_insts = [],
tcg_fam_insts = [],
tcg_rules = [],
tcg_fords = [],
tcg_vects = [],
tcg_patsyns = [],
tcg_dfun_n = dfun_n_var,
tcg_keep = keep_var,
tcg_doc_hdr = Nothing,
tcg_hpc = False,
tcg_main = Nothing,
tcg_safeInfer = infer_var,
tcg_dependent_files = dependent_files_var,
tcg_tc_plugins = [],
tcg_static_wc = static_wc_var
} ;
lcl_env = TcLclEnv {
tcl_errs = errs_var,
tcl_loc = loc, -- Should be over-ridden very soon!
tcl_ctxt = [],
tcl_rdr = emptyLocalRdrEnv,
tcl_th_ctxt = topStage,
tcl_th_bndrs = emptyNameEnv,
tcl_arrow_ctxt = NoArrowCtxt,
tcl_env = emptyNameEnv,
tcl_bndrs = [],
tcl_tidy = emptyTidyEnv,
tcl_tyvars = tvs_var,
tcl_lie = lie_var,
tcl_tclvl = topTcLevel
} ;
} ;
-- OK, here's the business end!
maybe_res <- initTcRnIf 'a' hsc_env gbl_env lcl_env $
do { r <- tryM do_this
; case r of
Right res -> return (Just res)
Left _ -> return Nothing } ;
-- Check for unsolved constraints
lie <- readIORef lie_var ;
if isEmptyWC lie
then return ()
else pprPanic "initTc: unsolved constraints" (ppr lie) ;
-- Collect any error messages
msgs <- readIORef errs_var ;
let { final_res | errorsFound dflags msgs = Nothing
| otherwise = maybe_res } ;
return (msgs, final_res)
}
initTcInteractive :: HscEnv -> TcM a -> IO (Messages, Maybe a)
-- Initialise the type checker monad for use in GHCi
initTcInteractive hsc_env thing_inside
= initTc hsc_env HsSrcFile False
(icInteractiveModule (hsc_IC hsc_env))
(realSrcLocSpan interactive_src_loc)
thing_inside
where
interactive_src_loc = mkRealSrcLoc (fsLit "<interactive>") 1 1
initTcForLookup :: HscEnv -> TcM a -> IO a
-- The thing_inside is just going to look up something
-- in the environment, so we don't need much setup
initTcForLookup hsc_env thing_inside
= do { (msgs, m) <- initTcInteractive hsc_env thing_inside
; case m of
Nothing -> throwIO $ mkSrcErr $ snd msgs
Just x -> return x }
{-
************************************************************************
* *
Initialisation
* *
************************************************************************
-}
initTcRnIf :: Char -- Tag for unique supply
-> HscEnv
-> gbl -> lcl
-> TcRnIf gbl lcl a
-> IO a
initTcRnIf uniq_tag hsc_env gbl_env lcl_env thing_inside
= do { us <- mkSplitUniqSupply uniq_tag ;
; us_var <- newIORef us ;
; let { env = Env { env_top = hsc_env,
env_us = us_var,
env_gbl = gbl_env,
env_lcl = lcl_env} }
; runIOEnv env thing_inside
}
{-
************************************************************************
* *
Simple accessors
* *
************************************************************************
-}
discardResult :: TcM a -> TcM ()
discardResult a = a >> return ()
getTopEnv :: TcRnIf gbl lcl HscEnv
getTopEnv = do { env <- getEnv; return (env_top env) }
getGblEnv :: TcRnIf gbl lcl gbl
getGblEnv = do { env <- getEnv; return (env_gbl env) }
updGblEnv :: (gbl -> gbl) -> TcRnIf gbl lcl a -> TcRnIf gbl lcl a
updGblEnv upd = updEnv (\ env@(Env { env_gbl = gbl }) ->
env { env_gbl = upd gbl })
setGblEnv :: gbl -> TcRnIf gbl lcl a -> TcRnIf gbl lcl a
setGblEnv gbl_env = updEnv (\ env -> env { env_gbl = gbl_env })
getLclEnv :: TcRnIf gbl lcl lcl
getLclEnv = do { env <- getEnv; return (env_lcl env) }
updLclEnv :: (lcl -> lcl) -> TcRnIf gbl lcl a -> TcRnIf gbl lcl a
updLclEnv upd = updEnv (\ env@(Env { env_lcl = lcl }) ->
env { env_lcl = upd lcl })
setLclEnv :: lcl' -> TcRnIf gbl lcl' a -> TcRnIf gbl lcl a
setLclEnv lcl_env = updEnv (\ env -> env { env_lcl = lcl_env })
getEnvs :: TcRnIf gbl lcl (gbl, lcl)
getEnvs = do { env <- getEnv; return (env_gbl env, env_lcl env) }
setEnvs :: (gbl', lcl') -> TcRnIf gbl' lcl' a -> TcRnIf gbl lcl a
setEnvs (gbl_env, lcl_env) = updEnv (\ env -> env { env_gbl = gbl_env, env_lcl = lcl_env })
-- Command-line flags
xoptM :: ExtensionFlag -> TcRnIf gbl lcl Bool
xoptM flag = do { dflags <- getDynFlags; return (xopt flag dflags) }
doptM :: DumpFlag -> TcRnIf gbl lcl Bool
doptM flag = do { dflags <- getDynFlags; return (dopt flag dflags) }
goptM :: GeneralFlag -> TcRnIf gbl lcl Bool
goptM flag = do { dflags <- getDynFlags; return (gopt flag dflags) }
woptM :: WarningFlag -> TcRnIf gbl lcl Bool
woptM flag = do { dflags <- getDynFlags; return (wopt flag dflags) }
setXOptM :: ExtensionFlag -> TcRnIf gbl lcl a -> TcRnIf gbl lcl a
setXOptM flag = updEnv (\ env@(Env { env_top = top }) ->
env { env_top = top { hsc_dflags = xopt_set (hsc_dflags top) flag}} )
unsetGOptM :: GeneralFlag -> TcRnIf gbl lcl a -> TcRnIf gbl lcl a
unsetGOptM flag = updEnv (\ env@(Env { env_top = top }) ->
env { env_top = top { hsc_dflags = gopt_unset (hsc_dflags top) flag}} )
unsetWOptM :: WarningFlag -> TcRnIf gbl lcl a -> TcRnIf gbl lcl a
unsetWOptM flag = updEnv (\ env@(Env { env_top = top }) ->
env { env_top = top { hsc_dflags = wopt_unset (hsc_dflags top) flag}} )
-- | Do it flag is true
whenDOptM :: DumpFlag -> TcRnIf gbl lcl () -> TcRnIf gbl lcl ()
whenDOptM flag thing_inside = do b <- doptM flag
when b thing_inside
whenGOptM :: GeneralFlag -> TcRnIf gbl lcl () -> TcRnIf gbl lcl ()
whenGOptM flag thing_inside = do b <- goptM flag
when b thing_inside
whenWOptM :: WarningFlag -> TcRnIf gbl lcl () -> TcRnIf gbl lcl ()
whenWOptM flag thing_inside = do b <- woptM flag
when b thing_inside
whenXOptM :: ExtensionFlag -> TcRnIf gbl lcl () -> TcRnIf gbl lcl ()
whenXOptM flag thing_inside = do b <- xoptM flag
when b thing_inside
getGhcMode :: TcRnIf gbl lcl GhcMode
getGhcMode = do { env <- getTopEnv; return (ghcMode (hsc_dflags env)) }
withDoDynamicToo :: TcRnIf gbl lcl a -> TcRnIf gbl lcl a
withDoDynamicToo m = do env <- getEnv
let dflags = extractDynFlags env
dflags' = dynamicTooMkDynamicDynFlags dflags
env' = replaceDynFlags env dflags'
setEnv env' m
getEpsVar :: TcRnIf gbl lcl (TcRef ExternalPackageState)
getEpsVar = do { env <- getTopEnv; return (hsc_EPS env) }
getEps :: TcRnIf gbl lcl ExternalPackageState
getEps = do { env <- getTopEnv; readMutVar (hsc_EPS env) }
-- | Update the external package state. Returns the second result of the
-- modifier function.
--
-- This is an atomic operation and forces evaluation of the modified EPS in
-- order to avoid space leaks.
updateEps :: (ExternalPackageState -> (ExternalPackageState, a))
-> TcRnIf gbl lcl a
updateEps upd_fn = do
traceIf (text "updating EPS")
eps_var <- getEpsVar
atomicUpdMutVar' eps_var upd_fn
-- | Update the external package state.
--
-- This is an atomic operation and forces evaluation of the modified EPS in
-- order to avoid space leaks.
updateEps_ :: (ExternalPackageState -> ExternalPackageState)
-> TcRnIf gbl lcl ()
updateEps_ upd_fn = do
traceIf (text "updating EPS_")
eps_var <- getEpsVar
atomicUpdMutVar' eps_var (\eps -> (upd_fn eps, ()))
getHpt :: TcRnIf gbl lcl HomePackageTable
getHpt = do { env <- getTopEnv; return (hsc_HPT env) }
getEpsAndHpt :: TcRnIf gbl lcl (ExternalPackageState, HomePackageTable)
getEpsAndHpt = do { env <- getTopEnv; eps <- readMutVar (hsc_EPS env)
; return (eps, hsc_HPT env) }
{-
************************************************************************
* *
Arrow scopes
* *
************************************************************************
-}
newArrowScope :: TcM a -> TcM a
newArrowScope
= updLclEnv $ \env -> env { tcl_arrow_ctxt = ArrowCtxt (tcl_rdr env) (tcl_lie env) }
-- Return to the stored environment (from the enclosing proc)
escapeArrowScope :: TcM a -> TcM a
escapeArrowScope
= updLclEnv $ \ env ->
case tcl_arrow_ctxt env of
NoArrowCtxt -> env
ArrowCtxt rdr_env lie -> env { tcl_arrow_ctxt = NoArrowCtxt
, tcl_lie = lie
, tcl_rdr = rdr_env }
{-
************************************************************************
* *
Unique supply
* *
************************************************************************
-}
newUnique :: TcRnIf gbl lcl Unique
newUnique
= do { env <- getEnv ;
let { u_var = env_us env } ;
us <- readMutVar u_var ;
case takeUniqFromSupply us of { (uniq, us') -> do {
writeMutVar u_var us' ;
return $! uniq }}}
-- NOTE 1: we strictly split the supply, to avoid the possibility of leaving
-- a chain of unevaluated supplies behind.
-- NOTE 2: we use the uniq in the supply from the MutVar directly, and
-- throw away one half of the new split supply. This is safe because this
-- is the only place we use that unique. Using the other half of the split
-- supply is safer, but slower.
newUniqueSupply :: TcRnIf gbl lcl UniqSupply
newUniqueSupply
= do { env <- getEnv ;
let { u_var = env_us env } ;
us <- readMutVar u_var ;
case splitUniqSupply us of { (us1,us2) -> do {
writeMutVar u_var us1 ;
return us2 }}}
newLocalName :: Name -> TcM Name
newLocalName name = newName (nameOccName name)
newName :: OccName -> TcM Name
newName occ
= do { uniq <- newUnique
; loc <- getSrcSpanM
; return (mkInternalName uniq occ loc) }
newSysName :: OccName -> TcM Name
newSysName occ
= do { uniq <- newUnique
; return (mkSystemName uniq occ) }
newSysLocalId :: FastString -> TcType -> TcRnIf gbl lcl TcId
newSysLocalId fs ty
= do { u <- newUnique
; return (mkSysLocal fs u ty) }
newSysLocalIds :: FastString -> [TcType] -> TcRnIf gbl lcl [TcId]
newSysLocalIds fs tys
= do { us <- newUniqueSupply
; return (zipWith (mkSysLocal fs) (uniqsFromSupply us) tys) }
instance MonadUnique (IOEnv (Env gbl lcl)) where
getUniqueM = newUnique
getUniqueSupplyM = newUniqueSupply
{-
************************************************************************
* *
Debugging
* *
************************************************************************
-}
newTcRef :: a -> TcRnIf gbl lcl (TcRef a)
newTcRef = newMutVar
readTcRef :: TcRef a -> TcRnIf gbl lcl a
readTcRef = readMutVar
writeTcRef :: TcRef a -> a -> TcRnIf gbl lcl ()
writeTcRef = writeMutVar
updTcRef :: TcRef a -> (a -> a) -> TcRnIf gbl lcl ()
-- Returns ()
updTcRef ref fn = liftIO $ do { old <- readIORef ref
; writeIORef ref (fn old) }
updTcRefX :: TcRef a -> (a -> a) -> TcRnIf gbl lcl a
-- Returns previous value
updTcRefX ref fn = liftIO $ do { old <- readIORef ref
; writeIORef ref (fn old)
; return old }
{-
************************************************************************
* *
Debugging
* *
************************************************************************
-}
traceTc :: String -> SDoc -> TcRn ()
traceTc herald doc = traceTcN 1 (hang (text herald) 2 doc)
-- | Typechecker trace
traceTcN :: Int -> SDoc -> TcRn ()
traceTcN level doc
= do dflags <- getDynFlags
when (level <= traceLevel dflags && not opt_NoDebugOutput) $
traceOptTcRn Opt_D_dump_tc_trace doc
traceRn :: SDoc -> TcRn ()
traceRn = traceOptTcRn Opt_D_dump_rn_trace -- Renamer Trace
-- | Output a doc if the given 'DumpFlag' is set.
--
-- By default this logs to stdout
-- However, if the `-ddump-to-file` flag is set,
-- then this will dump output to a file
--
-- Just a wrapper for 'dumpSDoc'
traceOptTcRn :: DumpFlag -> SDoc -> TcRn ()
traceOptTcRn flag doc
= do { dflags <- getDynFlags
; when (dopt flag dflags) (traceTcRn flag doc)
}
traceTcRn :: DumpFlag -> SDoc -> TcRn ()
-- ^ Unconditionally dump some trace output
--
-- The DumpFlag is used only to set the output filename
-- for --dump-to-file, not to decide whether or not to output
-- That part is done by the caller
traceTcRn flag doc
= do { real_doc <- prettyDoc doc
; dflags <- getDynFlags
; printer <- getPrintUnqualified dflags
; liftIO $ dumpSDoc dflags printer flag "" real_doc }
where
-- Add current location if opt_PprStyle_Debug
prettyDoc :: SDoc -> TcRn SDoc
prettyDoc doc = if opt_PprStyle_Debug
then do { loc <- getSrcSpanM; return $ mkLocMessage SevOutput loc doc }
else return doc -- The full location is usually way too much
getPrintUnqualified :: DynFlags -> TcRn PrintUnqualified
getPrintUnqualified dflags
= do { rdr_env <- getGlobalRdrEnv
; return $ mkPrintUnqualified dflags rdr_env }
-- | Like logInfoTcRn, but for user consumption
printForUserTcRn :: SDoc -> TcRn ()
printForUserTcRn doc
= do { dflags <- getDynFlags
; printer <- getPrintUnqualified dflags
; liftIO (printInfoForUser dflags printer doc) }
-- | Typechecker debug
debugDumpTcRn :: SDoc -> TcRn ()
debugDumpTcRn doc = unless opt_NoDebugOutput $
traceOptTcRn Opt_D_dump_tc doc
{-
traceIf and traceHiDiffs work in the TcRnIf monad, where no RdrEnv is
available. Alas, they behave inconsistently with the other stuff;
e.g. are unaffected by -dump-to-file.
-}
traceIf, traceHiDiffs :: SDoc -> TcRnIf m n ()
traceIf = traceOptIf Opt_D_dump_if_trace
traceHiDiffs = traceOptIf Opt_D_dump_hi_diffs
traceOptIf :: DumpFlag -> SDoc -> TcRnIf m n ()
traceOptIf flag doc
= whenDOptM flag $ -- No RdrEnv available, so qualify everything
do { dflags <- getDynFlags
; liftIO (putMsg dflags doc) }
{-
************************************************************************
* *
Typechecker global environment
* *
************************************************************************
-}
setModule :: Module -> TcRn a -> TcRn a
setModule mod thing_inside = updGblEnv (\env -> env { tcg_mod = mod }) thing_inside
getIsGHCi :: TcRn Bool
getIsGHCi = do { mod <- getModule
; return (isInteractiveModule mod) }
getGHCiMonad :: TcRn Name
getGHCiMonad = do { hsc <- getTopEnv; return (ic_monad $ hsc_IC hsc) }
getInteractivePrintName :: TcRn Name
getInteractivePrintName = do { hsc <- getTopEnv; return (ic_int_print $ hsc_IC hsc) }
tcIsHsBootOrSig :: TcRn Bool
tcIsHsBootOrSig = do { env <- getGblEnv; return (isHsBootOrSig (tcg_src env)) }
getGlobalRdrEnv :: TcRn GlobalRdrEnv
getGlobalRdrEnv = do { env <- getGblEnv; return (tcg_rdr_env env) }
getRdrEnvs :: TcRn (GlobalRdrEnv, LocalRdrEnv)
getRdrEnvs = do { (gbl,lcl) <- getEnvs; return (tcg_rdr_env gbl, tcl_rdr lcl) }
getImports :: TcRn ImportAvails
getImports = do { env <- getGblEnv; return (tcg_imports env) }
getFixityEnv :: TcRn FixityEnv
getFixityEnv = do { env <- getGblEnv; return (tcg_fix_env env) }
extendFixityEnv :: [(Name,FixItem)] -> RnM a -> RnM a
extendFixityEnv new_bit
= updGblEnv (\env@(TcGblEnv { tcg_fix_env = old_fix_env }) ->
env {tcg_fix_env = extendNameEnvList old_fix_env new_bit})
getRecFieldEnv :: TcRn RecFieldEnv
getRecFieldEnv = do { env <- getGblEnv; return (tcg_field_env env) }
getDeclaredDefaultTys :: TcRn (Maybe [Type])
getDeclaredDefaultTys = do { env <- getGblEnv; return (tcg_default env) }
addDependentFiles :: [FilePath] -> TcRn ()
addDependentFiles fs = do
ref <- fmap tcg_dependent_files getGblEnv
dep_files <- readTcRef ref
writeTcRef ref (fs ++ dep_files)
{-
************************************************************************
* *
Error management
* *
************************************************************************
-}
getSrcSpanM :: TcRn SrcSpan
-- Avoid clash with Name.getSrcLoc
getSrcSpanM = do { env <- getLclEnv; return (RealSrcSpan (tcl_loc env)) }
setSrcSpan :: SrcSpan -> TcRn a -> TcRn a
setSrcSpan (RealSrcSpan real_loc) thing_inside
= updLclEnv (\env -> env { tcl_loc = real_loc }) thing_inside
-- Don't overwrite useful info with useless:
setSrcSpan (UnhelpfulSpan _) thing_inside = thing_inside
addLocM :: (a -> TcM b) -> Located a -> TcM b
addLocM fn (L loc a) = setSrcSpan loc $ fn a
wrapLocM :: (a -> TcM b) -> Located a -> TcM (Located b)
wrapLocM fn (L loc a) = setSrcSpan loc $ do b <- fn a; return (L loc b)
wrapLocFstM :: (a -> TcM (b,c)) -> Located a -> TcM (Located b, c)
wrapLocFstM fn (L loc a) =
setSrcSpan loc $ do
(b,c) <- fn a
return (L loc b, c)
wrapLocSndM :: (a -> TcM (b,c)) -> Located a -> TcM (b, Located c)
wrapLocSndM fn (L loc a) =
setSrcSpan loc $ do
(b,c) <- fn a
return (b, L loc c)
-- Reporting errors
getErrsVar :: TcRn (TcRef Messages)
getErrsVar = do { env <- getLclEnv; return (tcl_errs env) }
setErrsVar :: TcRef Messages -> TcRn a -> TcRn a
setErrsVar v = updLclEnv (\ env -> env { tcl_errs = v })
addErr :: MsgDoc -> TcRn () -- Ignores the context stack
addErr msg = do { loc <- getSrcSpanM; addErrAt loc msg }
failWith :: MsgDoc -> TcRn a
failWith msg = addErr msg >> failM
addErrAt :: SrcSpan -> MsgDoc -> TcRn ()
-- addErrAt is mainly (exclusively?) used by the renamer, where
-- tidying is not an issue, but it's all lazy so the extra
-- work doesn't matter
addErrAt loc msg = do { ctxt <- getErrCtxt
; tidy_env <- tcInitTidyEnv
; err_info <- mkErrInfo tidy_env ctxt
; addLongErrAt loc msg err_info }
addErrs :: [(SrcSpan,MsgDoc)] -> TcRn ()
addErrs msgs = mapM_ add msgs
where
add (loc,msg) = addErrAt loc msg
checkErr :: Bool -> MsgDoc -> TcRn ()
-- Add the error if the bool is False
checkErr ok msg = unless ok (addErr msg)
warnIf :: Bool -> MsgDoc -> TcRn ()
warnIf True msg = addWarn msg
warnIf False _ = return ()
addMessages :: Messages -> TcRn ()
addMessages (m_warns, m_errs)
= do { errs_var <- getErrsVar ;
(warns, errs) <- readTcRef errs_var ;
writeTcRef errs_var (warns `unionBags` m_warns,
errs `unionBags` m_errs) }
discardWarnings :: TcRn a -> TcRn a
-- Ignore warnings inside the thing inside;
-- used to ignore-unused-variable warnings inside derived code
discardWarnings thing_inside
= do { errs_var <- getErrsVar
; (old_warns, _) <- readTcRef errs_var ;
; result <- thing_inside
-- Revert warnings to old_warns
; (_new_warns, new_errs) <- readTcRef errs_var
; writeTcRef errs_var (old_warns, new_errs)
; return result }
{-
************************************************************************
* *
Shared error message stuff: renamer and typechecker
* *
************************************************************************
-}
mkLongErrAt :: SrcSpan -> MsgDoc -> MsgDoc -> TcRn ErrMsg
mkLongErrAt loc msg extra
= do { dflags <- getDynFlags ;
printer <- getPrintUnqualified dflags ;
return $ mkLongErrMsg dflags loc printer msg extra }
addLongErrAt :: SrcSpan -> MsgDoc -> MsgDoc -> TcRn ()
addLongErrAt loc msg extra = mkLongErrAt loc msg extra >>= reportError
reportErrors :: [ErrMsg] -> TcM ()
reportErrors = mapM_ reportError
reportError :: ErrMsg -> TcRn ()
reportError err
= do { traceTc "Adding error:" (pprLocErrMsg err) ;
errs_var <- getErrsVar ;
(warns, errs) <- readTcRef errs_var ;
writeTcRef errs_var (warns, errs `snocBag` err) }
reportWarning :: ErrMsg -> TcRn ()
reportWarning err
= do { let warn = makeIntoWarning err
-- 'err' was build by mkLongErrMsg or something like that,
-- so it's of error severity. For a warning we downgrade
-- its severity to SevWarning
; traceTc "Adding warning:" (pprLocErrMsg warn)
; errs_var <- getErrsVar
; (warns, errs) <- readTcRef errs_var
; writeTcRef errs_var (warns `snocBag` warn, errs) }
try_m :: TcRn r -> TcRn (Either IOEnvFailure r)
-- Does try_m, with a debug-trace on failure
try_m thing
= do { mb_r <- tryM thing ;
case mb_r of
Left exn -> do { traceTc "tryTc/recoverM recovering from" $
text (showException exn)
; return mb_r }
Right _ -> return mb_r }
-----------------------
recoverM :: TcRn r -- Recovery action; do this if the main one fails
-> TcRn r -- Main action: do this first
-> TcRn r
-- Errors in 'thing' are retained
recoverM recover thing
= do { mb_res <- try_m thing ;
case mb_res of
Left _ -> recover
Right res -> return res }
-----------------------
mapAndRecoverM :: (a -> TcRn b) -> [a] -> TcRn [b]
-- Drop elements of the input that fail, so the result
-- list can be shorter than the argument list
mapAndRecoverM _ [] = return []
mapAndRecoverM f (x:xs) = do { mb_r <- try_m (f x)
; rs <- mapAndRecoverM f xs
; return (case mb_r of
Left _ -> rs
Right r -> r:rs) }
-- | Succeeds if applying the argument to all members of the lists succeeds,
-- but nevertheless runs it on all arguments, to collect all errors.
mapAndReportM :: (a -> TcRn b) -> [a] -> TcRn [b]
mapAndReportM f xs = checkNoErrs (mapAndRecoverM f xs)
-----------------------
tryTc :: TcRn a -> TcRn (Messages, Maybe a)
-- (tryTc m) executes m, and returns
-- Just r, if m succeeds (returning r)
-- Nothing, if m fails
-- It also returns all the errors and warnings accumulated by m
-- It always succeeds (never raises an exception)
tryTc m
= do { errs_var <- newTcRef emptyMessages ;
res <- try_m (setErrsVar errs_var m) ;
msgs <- readTcRef errs_var ;
return (msgs, case res of
Left _ -> Nothing
Right val -> Just val)
-- The exception is always the IOEnv built-in
-- in exception; see IOEnv.failM
}
-----------------------
tryTcErrs :: TcRn a -> TcRn (Messages, Maybe a)
-- Run the thing, returning
-- Just r, if m succceeds with no error messages
-- Nothing, if m fails, or if it succeeds but has error messages
-- Either way, the messages are returned; even in the Just case
-- there might be warnings
tryTcErrs thing
= do { (msgs, res) <- tryTc thing
; dflags <- getDynFlags
; let errs_found = errorsFound dflags msgs
; return (msgs, case res of
Nothing -> Nothing
Just val | errs_found -> Nothing
| otherwise -> Just val)
}
-----------------------
tryTcLIE :: TcM a -> TcM (Messages, Maybe a)
-- Just like tryTcErrs, except that it ensures that the LIE
-- for the thing is propagated only if there are no errors
-- Hence it's restricted to the type-check monad
tryTcLIE thing_inside
= do { ((msgs, mb_res), lie) <- captureConstraints (tryTcErrs thing_inside) ;
; case mb_res of
Nothing -> return (msgs, Nothing)
Just val -> do { emitConstraints lie; return (msgs, Just val) }
}
-----------------------
tryTcLIE_ :: TcM r -> TcM r -> TcM r
-- (tryTcLIE_ r m) tries m;
-- if m succeeds with no error messages, it's the answer
-- otherwise tryTcLIE_ drops everything from m and tries r instead.
tryTcLIE_ recover main
= do { (msgs, mb_res) <- tryTcLIE main
; case mb_res of
Just val -> do { addMessages msgs -- There might be warnings
; return val }
Nothing -> recover -- Discard all msgs
}
-----------------------
checkNoErrs :: TcM r -> TcM r
-- (checkNoErrs m) succeeds iff m succeeds and generates no errors
-- If m fails then (checkNoErrsTc m) fails.
-- If m succeeds, it checks whether m generated any errors messages
-- (it might have recovered internally)
-- If so, it fails too.
-- Regardless, any errors generated by m are propagated to the enclosing context.
checkNoErrs main
= do { (msgs, mb_res) <- tryTcLIE main
; addMessages msgs
; case mb_res of
Nothing -> failM
Just val -> return val
}
whenNoErrs :: TcM () -> TcM ()
whenNoErrs thing = ifErrsM (return ()) thing
ifErrsM :: TcRn r -> TcRn r -> TcRn r
-- ifErrsM bale_out normal
-- does 'bale_out' if there are errors in errors collection
-- otherwise does 'normal'
ifErrsM bale_out normal
= do { errs_var <- getErrsVar ;
msgs <- readTcRef errs_var ;
dflags <- getDynFlags ;
if errorsFound dflags msgs then
bale_out
else
normal }
failIfErrsM :: TcRn ()
-- Useful to avoid error cascades
failIfErrsM = ifErrsM failM (return ())
#ifdef GHCI
checkTH :: a -> String -> TcRn ()
checkTH _ _ = return () -- OK
#else
checkTH :: Outputable a => a -> String -> TcRn ()
checkTH e what = failTH e what -- Raise an error in a stage-1 compiler
#endif
failTH :: Outputable a => a -> String -> TcRn x
failTH e what -- Raise an error in a stage-1 compiler
= failWithTc (vcat [ hang (char 'A' <+> text what
<+> ptext (sLit "requires GHC with interpreter support:"))
2 (ppr e)
, ptext (sLit "Perhaps you are using a stage-1 compiler?") ])
{-
************************************************************************
* *
Context management for the type checker
* *
************************************************************************
-}
getErrCtxt :: TcM [ErrCtxt]
getErrCtxt = do { env <- getLclEnv; return (tcl_ctxt env) }
setErrCtxt :: [ErrCtxt] -> TcM a -> TcM a
setErrCtxt ctxt = updLclEnv (\ env -> env { tcl_ctxt = ctxt })
addErrCtxt :: MsgDoc -> TcM a -> TcM a
addErrCtxt msg = addErrCtxtM (\env -> return (env, msg))
addErrCtxtM :: (TidyEnv -> TcM (TidyEnv, MsgDoc)) -> TcM a -> TcM a
addErrCtxtM ctxt = updCtxt (\ ctxts -> (False, ctxt) : ctxts)
addLandmarkErrCtxt :: MsgDoc -> TcM a -> TcM a
addLandmarkErrCtxt msg = updCtxt (\ctxts -> (True, \env -> return (env,msg)) : ctxts)
-- Helper function for the above
updCtxt :: ([ErrCtxt] -> [ErrCtxt]) -> TcM a -> TcM a
updCtxt upd = updLclEnv (\ env@(TcLclEnv { tcl_ctxt = ctxt }) ->
env { tcl_ctxt = upd ctxt })
popErrCtxt :: TcM a -> TcM a
popErrCtxt = updCtxt (\ msgs -> case msgs of { [] -> []; (_ : ms) -> ms })
getCtLoc :: CtOrigin -> TcM CtLoc
getCtLoc origin
= do { env <- getLclEnv
; return (CtLoc { ctl_origin = origin
, ctl_env = env
, ctl_depth = initialSubGoalDepth }) }
setCtLoc :: CtLoc -> TcM a -> TcM a
-- Set the SrcSpan and error context from the CtLoc
setCtLoc (CtLoc { ctl_env = lcl }) thing_inside
= updLclEnv (\env -> env { tcl_loc = tcl_loc lcl
, tcl_bndrs = tcl_bndrs lcl
, tcl_ctxt = tcl_ctxt lcl })
thing_inside
{-
************************************************************************
* *
Error message generation (type checker)
* *
************************************************************************
The addErrTc functions add an error message, but do not cause failure.
The 'M' variants pass a TidyEnv that has already been used to
tidy up the message; we then use it to tidy the context messages
-}
addErrTc :: MsgDoc -> TcM ()
addErrTc err_msg = do { env0 <- tcInitTidyEnv
; addErrTcM (env0, err_msg) }
addErrsTc :: [MsgDoc] -> TcM ()
addErrsTc err_msgs = mapM_ addErrTc err_msgs
addErrTcM :: (TidyEnv, MsgDoc) -> TcM ()
addErrTcM (tidy_env, err_msg)
= do { ctxt <- getErrCtxt ;
loc <- getSrcSpanM ;
add_err_tcm tidy_env err_msg loc ctxt }
-- Return the error message, instead of reporting it straight away
mkErrTcM :: (TidyEnv, MsgDoc) -> TcM ErrMsg
mkErrTcM (tidy_env, err_msg)
= do { ctxt <- getErrCtxt ;
loc <- getSrcSpanM ;
err_info <- mkErrInfo tidy_env ctxt ;
mkLongErrAt loc err_msg err_info }
-- The failWith functions add an error message and cause failure
failWithTc :: MsgDoc -> TcM a -- Add an error message and fail
failWithTc err_msg
= addErrTc err_msg >> failM
failWithTcM :: (TidyEnv, MsgDoc) -> TcM a -- Add an error message and fail
failWithTcM local_and_msg
= addErrTcM local_and_msg >> failM
checkTc :: Bool -> MsgDoc -> TcM () -- Check that the boolean is true
checkTc True _ = return ()
checkTc False err = failWithTc err
-- Warnings have no 'M' variant, nor failure
warnTc :: Bool -> MsgDoc -> TcM ()
warnTc warn_if_true warn_msg
| warn_if_true = addWarnTc warn_msg
| otherwise = return ()
addWarnTc :: MsgDoc -> TcM ()
addWarnTc msg = do { env0 <- tcInitTidyEnv
; addWarnTcM (env0, msg) }
addWarnTcM :: (TidyEnv, MsgDoc) -> TcM ()
addWarnTcM (env0, msg)
= do { ctxt <- getErrCtxt ;
err_info <- mkErrInfo env0 ctxt ;
add_warn msg err_info }
addWarn :: MsgDoc -> TcRn ()
addWarn msg = add_warn msg Outputable.empty
addWarnAt :: SrcSpan -> MsgDoc -> TcRn ()
addWarnAt loc msg = add_warn_at loc msg Outputable.empty
add_warn :: MsgDoc -> MsgDoc -> TcRn ()
add_warn msg extra_info
= do { loc <- getSrcSpanM
; add_warn_at loc msg extra_info }
add_warn_at :: SrcSpan -> MsgDoc -> MsgDoc -> TcRn ()
add_warn_at loc msg extra_info
= do { dflags <- getDynFlags ;
printer <- getPrintUnqualified dflags ;
let { warn = mkLongWarnMsg dflags loc printer
msg extra_info } ;
reportWarning warn }
tcInitTidyEnv :: TcM TidyEnv
tcInitTidyEnv
= do { lcl_env <- getLclEnv
; return (tcl_tidy lcl_env) }
{-
-----------------------------------
Other helper functions
-}
add_err_tcm :: TidyEnv -> MsgDoc -> SrcSpan
-> [ErrCtxt]
-> TcM ()
add_err_tcm tidy_env err_msg loc ctxt
= do { err_info <- mkErrInfo tidy_env ctxt ;
addLongErrAt loc err_msg err_info }
mkErrInfo :: TidyEnv -> [ErrCtxt] -> TcM SDoc
-- Tidy the error info, trimming excessive contexts
mkErrInfo env ctxts
-- | opt_PprStyle_Debug -- In -dppr-debug style the output
-- = return empty -- just becomes too voluminous
| otherwise
= go 0 env ctxts
where
go :: Int -> TidyEnv -> [ErrCtxt] -> TcM SDoc
go _ _ [] = return Outputable.empty
go n env ((is_landmark, ctxt) : ctxts)
| is_landmark || n < mAX_CONTEXTS -- Too verbose || opt_PprStyle_Debug
= do { (env', msg) <- ctxt env
; let n' = if is_landmark then n else n+1
; rest <- go n' env' ctxts
; return (msg $$ rest) }
| otherwise
= go n env ctxts
mAX_CONTEXTS :: Int -- No more than this number of non-landmark contexts
mAX_CONTEXTS = 3
-- debugTc is useful for monadic debugging code
debugTc :: TcM () -> TcM ()
debugTc thing
| debugIsOn = thing
| otherwise = return ()
{-
************************************************************************
* *
Type constraints
* *
************************************************************************
-}
newTcEvBinds :: TcM EvBindsVar
newTcEvBinds = do { ref <- newTcRef emptyEvBindMap
; uniq <- newUnique
; return (EvBindsVar ref uniq) }
addTcEvBind :: EvBindsVar -> EvBind -> TcM ()
-- Add a binding to the TcEvBinds by side effect
addTcEvBind (EvBindsVar ev_ref _) ev_bind
= do { traceTc "addTcEvBind" $ ppr ev_bind
; bnds <- readTcRef ev_ref
; writeTcRef ev_ref (extendEvBinds bnds ev_bind) }
getTcEvBinds :: EvBindsVar -> TcM (Bag EvBind)
getTcEvBinds (EvBindsVar ev_ref _)
= do { bnds <- readTcRef ev_ref
; return (evBindMapBinds bnds) }
chooseUniqueOccTc :: (OccSet -> OccName) -> TcM OccName
chooseUniqueOccTc fn =
do { env <- getGblEnv
; let dfun_n_var = tcg_dfun_n env
; set <- readTcRef dfun_n_var
; let occ = fn set
; writeTcRef dfun_n_var (extendOccSet set occ)
; return occ }
getConstraintVar :: TcM (TcRef WantedConstraints)
getConstraintVar = do { env <- getLclEnv; return (tcl_lie env) }
setConstraintVar :: TcRef WantedConstraints -> TcM a -> TcM a
setConstraintVar lie_var = updLclEnv (\ env -> env { tcl_lie = lie_var })
emitConstraints :: WantedConstraints -> TcM ()
emitConstraints ct
= do { lie_var <- getConstraintVar ;
updTcRef lie_var (`andWC` ct) }
emitSimple :: Ct -> TcM ()
emitSimple ct
= do { lie_var <- getConstraintVar ;
updTcRef lie_var (`addSimples` unitBag ct) }
emitSimples :: Cts -> TcM ()
emitSimples cts
= do { lie_var <- getConstraintVar ;
updTcRef lie_var (`addSimples` cts) }
emitImplication :: Implication -> TcM ()
emitImplication ct
= do { lie_var <- getConstraintVar ;
updTcRef lie_var (`addImplics` unitBag ct) }
emitImplications :: Bag Implication -> TcM ()
emitImplications ct
= do { lie_var <- getConstraintVar ;
updTcRef lie_var (`addImplics` ct) }
emitInsoluble :: Ct -> TcM ()
emitInsoluble ct
= do { lie_var <- getConstraintVar ;
updTcRef lie_var (`addInsols` unitBag ct) ;
v <- readTcRef lie_var ;
traceTc "emitInsoluble" (ppr v) }
captureConstraints :: TcM a -> TcM (a, WantedConstraints)
-- (captureConstraints m) runs m, and returns the type constraints it generates
captureConstraints thing_inside
= do { lie_var <- newTcRef emptyWC ;
res <- updLclEnv (\ env -> env { tcl_lie = lie_var })
thing_inside ;
lie <- readTcRef lie_var ;
return (res, lie) }
pushLevelAndCaptureConstraints :: TcM a -> TcM (a, TcLevel, WantedConstraints)
pushLevelAndCaptureConstraints thing_inside
= do { env <- getLclEnv
; lie_var <- newTcRef emptyWC ;
; let tclvl' = pushTcLevel (tcl_tclvl env)
; res <- setLclEnv (env { tcl_tclvl = tclvl'
, tcl_lie = lie_var })
thing_inside
; lie <- readTcRef lie_var
; return (res, tclvl', lie) }
pushTcLevelM_ :: TcM a -> TcM a
pushTcLevelM_ = updLclEnv (\ env -> env { tcl_tclvl = pushTcLevel (tcl_tclvl env) })
pushTcLevelM :: TcM a -> TcM (a, TcLevel)
pushTcLevelM thing_inside
= do { env <- getLclEnv
; let tclvl' = pushTcLevel (tcl_tclvl env)
; res <- setLclEnv (env { tcl_tclvl = tclvl' })
thing_inside
; return (res, tclvl') }
getTcLevel :: TcM TcLevel
getTcLevel = do { env <- getLclEnv
; return (tcl_tclvl env) }
setTcLevel :: TcLevel -> TcM a -> TcM a
setTcLevel tclvl thing_inside
= updLclEnv (\env -> env { tcl_tclvl = tclvl }) thing_inside
isTouchableTcM :: TcTyVar -> TcM Bool
isTouchableTcM tv
= do { env <- getLclEnv
; return (isTouchableMetaTyVar (tcl_tclvl env) tv) }
getLclTypeEnv :: TcM TcTypeEnv
getLclTypeEnv = do { env <- getLclEnv; return (tcl_env env) }
setLclTypeEnv :: TcLclEnv -> TcM a -> TcM a
-- Set the local type envt, but do *not* disturb other fields,
-- notably the lie_var
setLclTypeEnv lcl_env thing_inside
= updLclEnv upd thing_inside
where
upd env = env { tcl_env = tcl_env lcl_env,
tcl_tyvars = tcl_tyvars lcl_env }
traceTcConstraints :: String -> TcM ()
traceTcConstraints msg
= do { lie_var <- getConstraintVar
; lie <- readTcRef lie_var
; traceTc (msg ++ ": LIE:") (ppr lie)
}
emitWildcardHoleConstraints :: [(Name, TcTyVar)] -> TcM ()
emitWildcardHoleConstraints wcs
= do { ctLoc <- getCtLoc HoleOrigin
; forM_ wcs $ \(name, tv) -> do {
; let real_span = case nameSrcSpan name of
RealSrcSpan span -> span
UnhelpfulSpan str -> pprPanic "emitWildcardHoleConstraints"
(ppr name <+> quotes (ftext str))
-- Wildcards are defined locally, and so have RealSrcSpans
ctLoc' = setCtLocSpan ctLoc real_span
ty = mkTyVarTy tv
ev = mkLocalId name ty
can = CHoleCan { cc_ev = CtWanted ty ev ctLoc'
, cc_occ = occName name
, cc_hole = TypeHole }
; emitInsoluble can } }
{-
************************************************************************
* *
Template Haskell context
* *
************************************************************************
-}
recordThUse :: TcM ()
recordThUse = do { env <- getGblEnv; writeTcRef (tcg_th_used env) True }
recordThSpliceUse :: TcM ()
recordThSpliceUse = do { env <- getGblEnv; writeTcRef (tcg_th_splice_used env) True }
keepAlive :: Name -> TcRn () -- Record the name in the keep-alive set
keepAlive name
= do { env <- getGblEnv
; traceRn (ptext (sLit "keep alive") <+> ppr name)
; updTcRef (tcg_keep env) (`extendNameSet` name) }
getStage :: TcM ThStage
getStage = do { env <- getLclEnv; return (tcl_th_ctxt env) }
getStageAndBindLevel :: Name -> TcRn (Maybe (TopLevelFlag, ThLevel, ThStage))
getStageAndBindLevel name
= do { env <- getLclEnv;
; case lookupNameEnv (tcl_th_bndrs env) name of
Nothing -> return Nothing
Just (top_lvl, bind_lvl) -> return (Just (top_lvl, bind_lvl, tcl_th_ctxt env)) }
setStage :: ThStage -> TcM a -> TcRn a
setStage s = updLclEnv (\ env -> env { tcl_th_ctxt = s })
{-
************************************************************************
* *
Safe Haskell context
* *
************************************************************************
-}
-- | Mark that safe inference has failed
recordUnsafeInfer :: TcM ()
recordUnsafeInfer = getGblEnv >>= \env -> writeTcRef (tcg_safeInfer env) False
-- | Figure out the final correct safe haskell mode
finalSafeMode :: DynFlags -> TcGblEnv -> IO SafeHaskellMode
finalSafeMode dflags tcg_env = do
safeInf <- readIORef (tcg_safeInfer tcg_env)
return $ case safeHaskell dflags of
Sf_None | safeInferOn dflags && safeInf -> Sf_Safe
| otherwise -> Sf_None
s -> s
{-
************************************************************************
* *
Stuff for the renamer's local env
* *
************************************************************************
-}
getLocalRdrEnv :: RnM LocalRdrEnv
getLocalRdrEnv = do { env <- getLclEnv; return (tcl_rdr env) }
setLocalRdrEnv :: LocalRdrEnv -> RnM a -> RnM a
setLocalRdrEnv rdr_env thing_inside
= updLclEnv (\env -> env {tcl_rdr = rdr_env}) thing_inside
{-
************************************************************************
* *
Stuff for interface decls
* *
************************************************************************
-}
mkIfLclEnv :: Module -> SDoc -> IfLclEnv
mkIfLclEnv mod loc = IfLclEnv { if_mod = mod,
if_loc = loc,
if_tv_env = emptyUFM,
if_id_env = emptyUFM }
-- | Run an 'IfG' (top-level interface monad) computation inside an existing
-- 'TcRn' (typecheck-renaming monad) computation by initializing an 'IfGblEnv'
-- based on 'TcGblEnv'.
initIfaceTcRn :: IfG a -> TcRn a
initIfaceTcRn thing_inside
= do { tcg_env <- getGblEnv
; let { if_env = IfGblEnv {
if_rec_types = Just (tcg_mod tcg_env, get_type_env)
}
; get_type_env = readTcRef (tcg_type_env_var tcg_env) }
; setEnvs (if_env, ()) thing_inside }
initIfaceCheck :: HscEnv -> IfG a -> IO a
-- Used when checking the up-to-date-ness of the old Iface
-- Initialise the environment with no useful info at all
initIfaceCheck hsc_env do_this
= do let rec_types = case hsc_type_env_var hsc_env of
Just (mod,var) -> Just (mod, readTcRef var)
Nothing -> Nothing
gbl_env = IfGblEnv { if_rec_types = rec_types }
initTcRnIf 'i' hsc_env gbl_env () do_this
initIfaceTc :: ModIface
-> (TcRef TypeEnv -> IfL a) -> TcRnIf gbl lcl a
-- Used when type-checking checking an up-to-date interface file
-- No type envt from the current module, but we do know the module dependencies
initIfaceTc iface do_this
= do { tc_env_var <- newTcRef emptyTypeEnv
; let { gbl_env = IfGblEnv {
if_rec_types = Just (mod, readTcRef tc_env_var)
} ;
; if_lenv = mkIfLclEnv mod doc
}
; setEnvs (gbl_env, if_lenv) (do_this tc_env_var)
}
where
mod = mi_module iface
doc = ptext (sLit "The interface for") <+> quotes (ppr mod)
initIfaceLcl :: Module -> SDoc -> IfL a -> IfM lcl a
initIfaceLcl mod loc_doc thing_inside
= setLclEnv (mkIfLclEnv mod loc_doc) thing_inside
getIfModule :: IfL Module
getIfModule = do { env <- getLclEnv; return (if_mod env) }
--------------------
failIfM :: MsgDoc -> IfL a
-- The Iface monad doesn't have a place to accumulate errors, so we
-- just fall over fast if one happens; it "shouldnt happen".
-- We use IfL here so that we can get context info out of the local env
failIfM msg
= do { env <- getLclEnv
; let full_msg = (if_loc env <> colon) $$ nest 2 msg
; dflags <- getDynFlags
; liftIO (log_action dflags dflags SevFatal noSrcSpan (defaultErrStyle dflags) full_msg)
; failM }
--------------------
forkM_maybe :: SDoc -> IfL a -> IfL (Maybe a)
-- Run thing_inside in an interleaved thread.
-- It shares everything with the parent thread, so this is DANGEROUS.
--
-- It returns Nothing if the computation fails
--
-- It's used for lazily type-checking interface
-- signatures, which is pretty benign
forkM_maybe doc thing_inside
-- NB: Don't share the mutable env_us with the interleaved thread since env_us
-- does not get updated atomically (e.g. in newUnique and newUniqueSupply).
= do { child_us <- newUniqueSupply
; child_env_us <- newMutVar child_us
-- see Note [Masking exceptions in forkM_maybe]
; unsafeInterleaveM $ uninterruptibleMaskM_ $ updEnv (\env -> env { env_us = child_env_us }) $
do { traceIf (text "Starting fork {" <+> doc)
; mb_res <- tryM $
updLclEnv (\env -> env { if_loc = if_loc env $$ doc }) $
thing_inside
; case mb_res of
Right r -> do { traceIf (text "} ending fork" <+> doc)
; return (Just r) }
Left exn -> do {
-- Bleat about errors in the forked thread, if -ddump-if-trace is on
-- Otherwise we silently discard errors. Errors can legitimately
-- happen when compiling interface signatures (see tcInterfaceSigs)
whenDOptM Opt_D_dump_if_trace $ do
dflags <- getDynFlags
let msg = hang (text "forkM failed:" <+> doc)
2 (text (show exn))
liftIO $ log_action dflags dflags SevFatal noSrcSpan (defaultErrStyle dflags) msg
; traceIf (text "} ending fork (badly)" <+> doc)
; return Nothing }
}}
forkM :: SDoc -> IfL a -> IfL a
forkM doc thing_inside
= do { mb_res <- forkM_maybe doc thing_inside
; return (case mb_res of
Nothing -> pgmError "Cannot continue after interface file error"
-- pprPanic "forkM" doc
Just r -> r) }
{-
Note [Masking exceptions in forkM_maybe]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When using GHC-as-API it must be possible to interrupt snippets of code
executed using runStmt (#1381). Since commit 02c4ab04 this is almost possible
by throwing an asynchronous interrupt to the GHC thread. However, there is a
subtle problem: runStmt first typechecks the code before running it, and the
exception might interrupt the type checker rather than the code. Moreover, the
typechecker might be inside an unsafeInterleaveIO (through forkM_maybe), and
more importantly might be inside an exception handler inside that
unsafeInterleaveIO. If that is the case, the exception handler will rethrow the
asynchronous exception as a synchronous exception, and the exception will end
up as the value of the unsafeInterleaveIO thunk (see #8006 for a detailed
discussion). We don't currently know a general solution to this problem, but
we can use uninterruptibleMask_ to avoid the situation.
-}
|
green-haskell/ghc
|
compiler/typecheck/TcRnMonad.hs
|
bsd-3-clause
| 52,814
| 51
| 25
| 16,775
| 12,384
| 6,446
| 5,938
| 867
| 4
|
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -Wall -fwarn-tabs #-}
----------------------------------------------------------------
-- ~ 2021.10.17
-- |
-- Module : Data.Number.LogFloat.IArrayTest
-- Copyright : Copyright (c) 2007--2021 wren gayle romano
-- License : BSD3
-- Maintainer : wren@cpan.org
-- Stability : stable
-- Portability : portable (with CPP)
--
-- This module tests the 'unsafeCoerce' version of @IArray UArray LogFloat@
-- instance. That instance and these tests were provided by Felipe
-- Lessa. For GHC 6.8 and above, the instance is automatically
-- derived and so these tests are unnecessary (but should still
-- pass).
--
-- Since SmallCheck isn't included in Hugs (Sept 2006), pass a flag
-- to enable SmallCheck tests. An invocation like the following
-- should suffice:
--
-- @hugs -98 +o -F'cpp -P -traditional -D__HUGS__=200609 -D__USE_SMALLCHECK__'@
----------------------------------------------------------------
module Data.Number.LogFloat.IArrayTest where
import Data.Number.LogFloat
import Data.Array.Unboxed as U
import Test.QuickCheck
#ifdef __USE_SMALLCHECK__
import Test.SmallCheck
#endif
----------------------------------------------------------------
prop_listArray :: [Double] -> Bool
prop_listArray xs = xs' == U.elems arr
where
xs' = map (logFloat . abs) xs
arr :: UArray Int LogFloat
arr = U.listArray (1, length xs') xs'
prop_accumArray :: [Double] -> Bool
prop_accumArray xs = product xs' == arr U.! 1
where
xs' = map (logFloat . abs) xs
arr :: UArray Int LogFloat
arr = U.accumArray (*) 1 (1, 1) [(1,x) | x <- xs']
main :: IO ()
main = do
quickCheck prop_listArray
quickCheck prop_accumArray
-- Trying to guard on the length of the list won't work. Using
-- SmallCheck instead
#ifdef __USE_SMALLCHECK__
smallCheck 5 prop_listArray
smallCheck 5 prop_accumArray
#endif
checkMore 1000 prop_listArray
checkMore 1000 prop_accumArray
where
checkMore n = check (defaultConfig
{ configMaxTest = n
, configMaxFail = n `div` 10
})
----------------------------------------------------------------
----------------------------------------------------------- fin.
|
wrengr/logfloat
|
test/Data/Number/LogFloat/IArrayTest.hs
|
bsd-3-clause
| 2,333
| 0
| 11
| 519
| 342
| 197
| 145
| 25
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Storage.InfluxDB where
import Control.Exception (catch, throw)
import Control.Monad (unless, when)
import Data.Aeson (FromJSON (..), ToJSON (..),
Value (..), decode, encode, object,
(.:), (.=))
import Data.ByteString.Char8 (ByteString, pack)
import Data.ByteString.Lazy (fromChunks)
import Data.Conduit (($$+-))
import Data.Conduit.List (consume)
import qualified Data.HashMap.Strict as HM
import Data.Maybe
import Data.Monoid ((<>))
import Data.Scientific (floatingOrInteger)
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding (encodeUtf8)
import qualified Data.Vector as V
import qualified Data.Yaml as A
import Network.HTTP.Conduit hiding (host, port)
import Network.HTTP.Types.Status
import Prelude hiding (putStr)
import Types.Dynamic
import Types.Shared (Check (ctype, cname),
TaskResult (..))
-- import Debug.Trace
type DB = String
type User = String
type Password = String
type Host = String
type Port = Int
type EnableSSL = Bool
data InfluxDB = InfluxDB
{ base :: !DB
, user :: !User
, pass :: !Password
, host :: !Host
, port :: !Port
, ssl :: !EnableSSL
} deriving Show
config :: InfluxDB
config = InfluxDB "fixmon" "fixmon" "fixmon" "fixmon" 8086 False
data InfluxQueryType = IWrite | IQuery
instance Show InfluxQueryType where
show IWrite = "/write"
show IQuery = "/query"
influxUrl :: InfluxQueryType -> InfluxDB -> String
influxUrl itype db =
let scheme = if ssl db
then "https://"
else "http://"
port' = show . port $ db
in scheme <> host db <> ":" <> port' <> show itype
data Series = Series
{ seriesName :: !Text
, seriesData :: !SeriesData
} deriving (Show, Eq)
instance ToJSON Series where
toJSON Series {..} = object
[ "name" .= seriesName
, "columns" .= columns
, "points" .= points
]
where
SeriesData {..} = seriesData
instance FromJSON Series where
parseJSON (A.Array k) = do
if V.null k
then throw $ EmptyException
else do
let (Object v) = V.head k
n <- v .: "name"
c <- v .: "columns"
p <- v .: "points"
return $ Series n (toSD c p)
parseJSON e = error $ show e
toSD :: [Text] -> [[Value]] -> SeriesData
toSD c p = let col = c
po = map (map valToDyn) p
in SeriesData col po
valToDyn :: Value -> Dyn
valToDyn (String x) = to x
valToDyn (Number x) = case floatingOrInteger x of
Left y -> to (y :: Double)
Right y -> to (y :: Int)
valToDyn (A.Bool x) = to x
valToDyn Null = to ("null here" :: Text)
valToDyn e = error $ "bad val " ++ show e
data SeriesData = SeriesData
{ columns :: ![Column]
, points :: ![[Dyn]]
} deriving (Show, Eq)
type Column = Text
complexToSeriesData :: Counter -> Complex -> SeriesData
complexToSeriesData _prefixCounter _x = error "complexToSeriesData, not implemented"
{-- let (c', p') = unzip x
prefixCounter' = prefixCounter <> "."
columns' = map (\y -> prefixCounter' <> y) c'
in SeriesData columns' [p']
--}
rebuildComplex :: InfluxDB -> [TaskResult] -> Complex
rebuildComplex db cs = object
[ "database" .= base db
, "user" .= user db
, "password" .= pass db
, "points" .= A.array points
]
where
points = concatMap convert cs
convert :: TaskResult -> [Complex]
convert (TaskResult vHostname vCheck vTriggers vErrorMSG vCheckStatus (Array a)) =
concatMap (convert . TaskResult vHostname vCheck vTriggers vErrorMSG vCheckStatus) $ V.toList a
convert (TaskResult vHostname vCheck _ _ vCheckStatus (Object c)) =
let vName:vTag:_ = map String $ T.splitOn "." $ ctype vCheck
addition = HM.fromList
[ ("_check_name_", to $ cname vCheck)
, ("_check_type_", to $ ctype vCheck)
, ("_host_name_", to $ vHostname)
]
result = Object $ HM.fromList
[ ("name", vName)
, ("tags", object
[ "host" .= (to vHostname :: Dyn)
, "check_status" .= (String $ T.pack $ show vCheckStatus)
, "type" .= vTag
])
, ("fields", Object $ HM.union c addition)
]
in [result]
convert (TaskResult vHostname vCheck _ vErrorMSG vCheckStatus Null) =
let vName:vTag:_ = map String $ T.splitOn "." $ ctype vCheck
-- TODO add id (see buildCache)
errorComplex = Object $ HM.fromList
[ ("_check_name_", to $ cname vCheck)
, ("_check_type_", to $ ctype vCheck)
, ("_host_name_", to $ vHostname)
, ("_error_", String $ T.pack $ show vErrorMSG)
]
result = Object $ HM.fromList
[ ("name", vName)
, ("tags", object
[ "host" .= (to vHostname :: Dyn)
, "check_status" .= (String $ T.pack $ show vCheckStatus)
, "type" .= vTag
])
, ("fields", errorComplex)
]
in [result]
convert (TaskResult _ _ _ e s c) = error $ "\n\nbad object in rebuildComplex:\nobject: "
++ show c
++ "\nstatus: " ++ show s
++ "\nerror: " ++ show e
saveData :: InfluxDB -> [TaskResult] -> IO ()
saveData db forSave = do
request' <- parseUrl $ influxUrl IWrite db
let series = rebuildComplex db forSave
request = request'
{ method = "POST"
, checkStatus = \_ _ _ -> Nothing
, requestBody = RequestBodyLBS $ encode series
}
-- print $ (" ---------- send json" :: String)
-- putStr $ A.encodePretty series
response <- catch (withManager $ \manager -> responseStatus <$> http request manager) catchConduit
unless (response == ok200 || response == status204 ) $ throw $ DBException $ "Write Influx problem: status = " ++ show response ++ " request = " ++ show request
return ()
where
catchConduit :: HttpException -> IO Status
catchConduit e = throw $ HTTPException $ "Write Influx problem: http exception = " ++ show e
-- 'http://fixmon:8086/query?pretty=true&db=fixmon' --data-urlencode "q=select * from payments limit 10"
rawRequest :: InfluxDB -> Counter -> Text -> IO Dyn
rawRequest db _c raw = do
request' <- parseUrl $ influxUrl IQuery db
let addQueryStr = setQueryString [("db", Just (pack $ base db)), ("u", Just (pack $ user db)), ("p", Just (pack $ pass db)), ("q", Just $ encodeUtf8 raw)]
request'' = request'
{ method = "GET"
, checkStatus = \_ _ _ -> Nothing
}
request = addQueryStr request''
print request
response <- catch (withManager $ \manager -> do
r <- http request manager
result <- responseBody r $$+- consume
return (responseStatus r, result)
) catchConduit
unless (fst response == ok200) $ throw $ DBException $ "Influx problem: status = " ++ show response ++ " request = " ++ show request
let s = (decode . fromChunks . snd $ response :: Maybe Series)
-- print s
when (isNothing s) $ throw $ DBException $ "Influx problem: cant parse result"
return $ undefined -- seriesToDyn c (fromJust s)
where
catchConduit :: HttpException -> IO (Status, [ByteString])
catchConduit e = throw $ DBException $ "Influx problem: http exception = " ++ show e
getData :: InfluxDB -> Text -> Fun -> IO Dyn
-- get last value
getData db vHost (LastFun c p) = do
let (pole, addition) = unCounter c
print c
print vHost
xs <- rawRequest db c $ "select "<> pole <>" from " <> vHost <> addition <> " limit " <> ptt p
case xs of
-- Array xss -> return $ last xss
y -> return y
where
-- counterToIdAndWhere counter =
-- case T.splitOn ":" counter of
-- vId:bucketTypeName:[] -> undefined
getData _ _ _ = error "getData not imlemented"
{--
getData db vHost (ChangeFun c) = do
let (pole, addition) = unCounter c
r <- rawRequest db c $ "select " <> pole <> " from " <> vHost <> addition <> " limit 2"
case r of
-- Array (x:y:[]) -> return $ to (x /= y)
_ -> throw EmptyException
getData db vHost (PrevFun c) = do
let (pole, addition) = unCounter c
rawRequest db "last" $ "select last(" <> pole <> ") from " <> vHost <> addition
getData db vHost (EnvValFun c) = do
let (pole, addition) = unCounter c
xs <- rawRequest db c $ "select "<> pole <>" from " <> vHost <> addition <> " limit 1"
case xs of
-- Array xss -> return $ last xss
y -> return y
getData db vHost (AvgFun c p) = do
let (pole, addition) = unCounter c
rawRequest db "mean" $ "select mean(" <> pole <> ") from " <> vHost <> " group by time(" <> pt p <> ") where time > now() - " <> pt p <> withAnd addition
getData db vHost (MinFun c p) = do
let (pole, addition) = unCounter c
rawRequest db "min" $ "select min(" <> pole <> ") from " <> vHost <> " group by time(" <> pt p <> ") where time > now() - " <> pt p <> withAnd addition
getData db vHost (MaxFun c p) = do
let (pole, addition) = unCounter c
rawRequest db "max" $ "select max(" <> pole <> ") from " <> vHost <> " group by time(" <> pt p <> ") where time > now() - " <> pt p <> withAnd addition
getData db vHost (NoDataFun c p) = do
let (pole, addition) = unCounter c
r <- try $ rawRequest db c $ "select " <> pole <> " from " <> vHost <> " where time > now() - " <> pt p <> withAnd addition <> " limit 1"
case r of
Right _ -> return $ to False
Left EmptyException -> return $ to True
Left e -> throw e
--}
withAnd :: Text -> Text
withAnd "" = ""
withAnd x = " and " <> T.drop 6 x
unCounter :: Counter -> (Text, Text)
unCounter _x = undefined {-- case T.splitOn ":" x of
[a] -> (a, T.empty)
[a, b] -> (b, " where " <> T.dropWhileEnd (/= '.') b <> "id = '" <> a <> "' ")
_ -> throw $ DBException "bad counter"
--}
pt :: Period Int -> Text
pt x = (T.pack . show $ (fromIntegral $ un x :: Double)/1000000) <> "s"
ptt :: Period Int -> Text
ptt = T.pack . show . un
seriesToDyn :: Text -> Series -> Dyn
seriesToDyn c s = let col = columns . seriesData $ s
poi = points . seriesData $ s
mapped = catMaybes $ map (\x -> lookup c (zip col x)) poi
in case mapped of
[] -> throw $ TypeException $ "Influx problem: cant found result for counter " ++ show c
[x] -> x
-- xs -> Array xs
_ -> error "ops series to dyn"
-- curl -G 'http://localhost:8086/db/fixmon/series?u=fixmon&p=fixmon' --data-urlencode "q=select status from localhost limit 1"
|
chemist/fixmon
|
src/Storage/InfluxDB.hs
|
bsd-3-clause
| 11,687
| 0
| 21
| 3,940
| 2,749
| 1,455
| 1,294
| 217
| 4
|
-----------------------------------------------------------------------------
-- |
-- Module : Data.SBV.BitVectors.Rounding
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : erkokl@gmail.com
-- Stability : experimental
--
-- Implementation of floating-point operations that know about rounding-modes
-----------------------------------------------------------------------------
module Data.SBV.BitVectors.Rounding (RoundingFloat(..)) where
import Data.SBV.BitVectors.Data
import Data.SBV.BitVectors.Model () -- instances only
-- | A class of floating-point (IEEE754) operations that behave
-- differently based on rounding modes. Note that we will never
-- concretely evaluate these, but rather pass down to the SMT solver
-- even when we have a concrete rounding mode supported by Haskell.
-- (i.e., round-to-nearest even.) The extra complexity is just not
-- worth it to support constant folding in that rare case; and if
-- the rounding mode is already round-to-nearest-even then end-users simply
-- use the usual Num instances. (Except for FMA obviously, which has no
-- Haskell equivalent.)
class (SymWord a, Floating a) => RoundingFloat a where
fpAdd :: SRoundingMode -> SBV a -> SBV a -> SBV a
fpSub :: SRoundingMode -> SBV a -> SBV a -> SBV a
fpMul :: SRoundingMode -> SBV a -> SBV a -> SBV a
fpDiv :: SRoundingMode -> SBV a -> SBV a -> SBV a
fpFMA :: SRoundingMode -> SBV a -> SBV a -> SBV a -> SBV a
fpSqrt :: SRoundingMode -> SBV a -> SBV a
-- Default definitions simply piggy back onto FPRound
fpAdd = lift2Rm "fp.add"
fpSub = lift2Rm "fp.sub"
fpMul = lift2Rm "fp.mul"
fpDiv = lift2Rm "fp.div"
fpFMA = lift3Rm "fp.fma"
fpSqrt = lift1Rm "fp.sqrt"
-- | Lift a 1 arg floating point UOP
lift1Rm :: (SymWord a, Floating a) => String -> SRoundingMode -> SBV a -> SBV a
lift1Rm w m a = SBV $ SVal k $ Right $ cache r
where k = kindOf a
r st = do swm <- sbvToSW st m
swa <- sbvToSW st a
newExpr st k (SBVApp (FPRound w) [swm, swa])
-- | Lift a 2 arg floating point UOP
lift2Rm :: (SymWord a, Floating a) => String -> SRoundingMode -> SBV a -> SBV a -> SBV a
lift2Rm w m a b = SBV $ SVal k $ Right $ cache r
where k = kindOf a
r st = do swm <- sbvToSW st m
swa <- sbvToSW st a
swb <- sbvToSW st b
newExpr st k (SBVApp (FPRound w) [swm, swa, swb])
-- | Lift a 3 arg floating point UOP
lift3Rm :: (SymWord a, Floating a) => String -> SRoundingMode -> SBV a -> SBV a -> SBV a -> SBV a
lift3Rm w m a b c = SBV $ SVal k $ Right $ cache r
where k = kindOf a
r st = do swm <- sbvToSW st m
swa <- sbvToSW st a
swb <- sbvToSW st b
swc <- sbvToSW st c
newExpr st k (SBVApp (FPRound w) [swm, swa, swb, swc])
-- | SFloat instance
instance RoundingFloat Float
-- | SDouble instance
instance RoundingFloat Double
{-# ANN module ("HLint: ignore Reduce duplication" :: String) #-}
|
Copilot-Language/sbv-for-copilot
|
Data/SBV/BitVectors/Rounding.hs
|
bsd-3-clause
| 3,042
| 0
| 13
| 765
| 795
| 399
| 396
| 40
| 1
|
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE Arrows #-}
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Opaleye
import Data.Profunctor.Product (p3)
import Data.Profunctor.Product.Default
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.FromField (FromField(..))
import Control.Arrow
userTable :: Table
(Column PGInt4, Column PGText, Column PGText)
(Column PGInt4, Column PGText, Column PGText)
userTable = Table "users" (p3 (
required "id",
required "name",
required "email"
))
newtype UserId = UserId Int deriving (Show)
instance FromField UserId where
fromField field bs = UserId <$> fromField field bs
instance QueryRunnerColumnDefault PGInt4 UserId where
queryRunnerColumnDefault = fieldQueryRunnerColumn
data User = User { id :: UserId, name :: String, email :: String } deriving (Show)
makeUserFromTuple :: (Int, String, String) -> User
makeUserFromTuple (id_, name_, e_mail) = User (UserId id_) name_ e_mail
instance Default QueryRunner (Column PGInt4, Column PGText, Column PGText) User where
def = makeUserFromTuple <$> def
getUserRows :: IO [User]
getUserRows = do
conn <- connect defaultConnectInfo { connectDatabase = "scratch"}
runQuery conn $ proc () ->
do
user@(_, pgName, _) <- queryTable userTable -< ()
restrict -< (pgName .== (pgStrictText "John"))
returnA -< user
main :: IO ()
main = do
rows <- getUserRows
putStrLn $ show rows
-- Output
-- >main
-- [User {id = UserId 1, name = "John", email = "john@mail.com"}]
|
vacationlabs/haskell-webapps
|
doc/docs/opaleye/code/opaleye-select-with-condition.hs
|
mit
| 1,603
| 1
| 15
| 304
| 457
| 249
| 208
| 40
| 1
|
--
--
--
-----------------
-- Exercise 5.24.
-----------------
--
--
--
module E'5'24 where
pushRight :: String -> Integer -> String
pushRight string lineLength
= [ ' ' | currentLength <- [1 .. blankCount] ]
++ string
where
blankCount :: Integer
blankCount = lineLength - (toInteger (length string))
{-
pushRight "##" 1
pushRight "##" 2
pushRight "##" 3
pushRight "##" 4
-}
-- "##"
-- "##"
-- " ##"
-- " ##"
-- Other solutions for "pushRight":
-- Just for fun and because the length function wasn't hinted in the book yet.
pushRight' :: String -> Integer -> String
pushRight' string lineLength
| lineLength < 0
|| string == [ character | currentLength <- [1 .. (lineLength - 1)],
character <- string ] = string
| otherwise = " " ++ (pushRight string (lineLength - 1))
-- Q: What are the disadvantages of this?
-- A: Harder to read, ...
pushRight'3 :: String -> Int -> String
pushRight'3 string lineLength
= ( replicate ( lineLength - (length string) ) ' ' )
++ string
|
pascal-knodel/haskell-craft
|
_/links/E'5'24.hs
|
mit
| 1,156
| 0
| 14
| 362
| 243
| 137
| 106
| 17
| 1
|
head' (x:xs) = x
drop' n [] = []
drop' 0 xs = xs
drop' n (x:xs) = drop' (pred n) xs
getName = do
line <- getLine
return line
main = do
line <- getName
if line == "jojon" then putStrLn "ssakit" else main
|
squest/Zenx-trainer-training
|
haskell/teta.hs
|
epl-1.0
| 217
| 0
| 8
| 59
| 116
| 57
| 59
| 10
| 2
|
{-# LANGUAGE Rank2Types #-}
{-| Implementation of the Ganeti Query2 filterning.
The filtering of results should be done in two phases.
In the first phase, before contacting any remote nodes for runtime
data, the filtering should be executed with 'Nothing' for the runtime
context. This will make all non-runtime filters filter correctly,
whereas all runtime filters will respond successfully. As described in
the Python version too, this makes for example /Or/ filters very
inefficient if they contain runtime fields.
Once this first filtering phase has been done, we hopefully eliminated
some remote nodes out of the list of candidates, we run the remote
data gathering, and we evaluate the filter again, this time with a
'Just' runtime context. This will make all filters work correctly.
Note that the second run will re-evaluate the config/simple fields,
without caching; this is not perfect, but we consider config accesses
very cheap (and the configuration snapshot we have won't change
between the two runs, hence we will not get inconsistent results).
-}
{-
Copyright (C) 2012, 2013 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.Query.Filter
( compileFilter
, evaluateFilter
, requestedNames
, makeSimpleFilter
) where
import Control.Applicative
import Control.Monad (liftM)
import qualified Data.Map as Map
import Data.Traversable (traverse)
import Text.JSON (JSValue(..), fromJSString)
import Text.JSON.Pretty (pp_value)
import qualified Text.Regex.PCRE as PCRE
import Ganeti.BasicTypes
import Ganeti.Errors
import Ganeti.Objects
import Ganeti.Query.Language
import Ganeti.Query.Types
import Ganeti.JSON
-- | Compiles a filter based on field names to one based on getters.
compileFilter :: FieldMap a b
-> Filter FilterField
-> ErrorResult (Filter (FieldGetter a b, QffMode))
compileFilter fm =
traverse (\field -> maybe
(Bad . ParameterError $ "Can't find field named '" ++
field ++ "'")
(\(_, g, q) -> Ok (g, q)) (field `Map.lookup` fm))
-- | Processes a field value given a QffMode.
qffField :: QffMode -> JSValue -> ErrorResult JSValue
qffField QffNormal v = Ok v
qffField QffHostname v = Ok v
qffField QffTimestamp v =
case v of
JSArray [secs@(JSRational _ _), JSRational _ _] -> return secs
_ -> Bad $ ProgrammerError
"Internal error: Getter returned non-timestamp for QffTimestamp"
-- | Wraps a getter, filter pair. If the getter is 'FieldRuntime' but
-- we don't have a runtime context, we skip the filtering, returning
-- \"pass\". Otherwise, we pass the actual value to the filter.
wrapGetter :: ConfigData
-> Maybe b
-> a
-> (FieldGetter a b, QffMode)
-> (QffMode -> JSValue -> ErrorResult Bool)
-> ErrorResult Bool
wrapGetter cfg b a (getter, qff) faction =
case tryGetter cfg b a getter of
Nothing -> Ok True -- runtime missing, accepting the value
Just v ->
case v of
ResultEntry RSNormal (Just fval) -> qffField qff fval >>= faction qff
ResultEntry RSNormal Nothing ->
Bad $ ProgrammerError
"Internal error: Getter returned RSNormal/Nothing"
_ -> Ok True -- filter has no data to work, accepting it
-- | Wrapper alias over field functions to ignore their first Qff argument.
ignoreMode :: a -> QffMode -> a
ignoreMode = const
-- | Helper to evaluate a filter getter (and the value it generates) in
-- a boolean context.
trueFilter :: JSValue -> ErrorResult Bool
trueFilter (JSBool x) = Ok $! x
trueFilter v = Bad . ParameterError $
"Unexpected value '" ++ show (pp_value v) ++
"' in boolean context"
-- | A type synonim for a rank-2 comparator function. This is used so
-- that we can pass the usual '<=', '>', '==' functions to 'binOpFilter'
-- and for them to be used in multiple contexts.
type Comparator = (Eq a, Ord a) => a -> a -> Bool
-- | Equality checker.
--
-- This will handle hostnames correctly, if the mode is set to
-- 'QffHostname'.
eqFilter :: FilterValue -> QffMode -> JSValue -> ErrorResult Bool
-- send 'QffNormal' queries to 'binOpFilter'
eqFilter flv QffNormal jsv = binOpFilter (==) flv jsv
-- and 'QffTimestamp' as well
eqFilter flv QffTimestamp jsv = binOpFilter (==) flv jsv
-- error out if we set 'QffHostname' on a non-string field
eqFilter _ QffHostname (JSRational _ _) =
Bad . ProgrammerError $ "QffHostname field returned a numeric value"
-- test strings via 'compareNameComponent'
eqFilter (QuotedString y) QffHostname (JSString x) =
Ok $ goodLookupResult (fromJSString x `compareNameComponent` y)
-- send all other combinations (all errors) to 'binOpFilter', which
-- has good error messages
eqFilter flv _ jsv = binOpFilter (==) flv jsv
-- | Helper to evaluate a filder getter (and the value it generates)
-- in a boolean context. Note the order of arguments is reversed from
-- the filter definitions (due to the call chain), make sure to
-- compare in the reverse order too!.
binOpFilter :: Comparator -> FilterValue -> JSValue -> ErrorResult Bool
binOpFilter comp (QuotedString y) (JSString x) =
Ok $! fromJSString x `comp` y
binOpFilter comp (NumericValue y) (JSRational _ x) =
Ok $! x `comp` fromIntegral y
binOpFilter _ expr actual =
Bad . ParameterError $ "Invalid types in comparison, trying to compare " ++
show (pp_value actual) ++ " with '" ++ show expr ++ "'"
-- | Implements the 'RegexpFilter' matching.
regexpFilter :: FilterRegex -> JSValue -> ErrorResult Bool
regexpFilter re (JSString val) =
Ok $! PCRE.match (compiledRegex re) (fromJSString val)
regexpFilter _ x =
Bad . ParameterError $ "Invalid field value used in regexp matching,\
\ expecting string but got '" ++ show (pp_value x) ++ "'"
-- | Implements the 'ContainsFilter' matching.
containsFilter :: FilterValue -> JSValue -> ErrorResult Bool
-- note: the next two implementations are the same, but we have to
-- repeat them due to the encapsulation done by FilterValue
containsFilter (QuotedString val) lst = do
lst' <- fromJVal lst
return $! val `elem` lst'
containsFilter (NumericValue val) lst = do
lst' <- fromJVal lst
return $! val `elem` lst'
-- | Verifies if a given item passes a filter. The runtime context
-- might be missing, in which case most of the filters will consider
-- this as passing the filter.
--
-- Note: we use explicit recursion to reduce unneeded memory use;
-- 'any' and 'all' do not play nice with monadic values, resulting in
-- either too much memory use or in too many thunks being created.
evaluateFilter :: ConfigData -> Maybe b -> a
-> Filter (FieldGetter a b, QffMode)
-> ErrorResult Bool
evaluateFilter _ _ _ EmptyFilter = Ok True
evaluateFilter c mb a (AndFilter flts) = helper flts
where helper [] = Ok True
helper (f:fs) = do
v <- evaluateFilter c mb a f
if v
then helper fs
else Ok False
evaluateFilter c mb a (OrFilter flts) = helper flts
where helper [] = Ok False
helper (f:fs) = do
v <- evaluateFilter c mb a f
if v
then Ok True
else helper fs
evaluateFilter c mb a (NotFilter flt) =
not <$> evaluateFilter c mb a flt
evaluateFilter c mb a (TrueFilter getter) =
wrapGetter c mb a getter $ ignoreMode trueFilter
evaluateFilter c mb a (EQFilter getter val) =
wrapGetter c mb a getter (eqFilter val)
evaluateFilter c mb a (LTFilter getter val) =
wrapGetter c mb a getter $ ignoreMode (binOpFilter (<) val)
evaluateFilter c mb a (LEFilter getter val) =
wrapGetter c mb a getter $ ignoreMode (binOpFilter (<=) val)
evaluateFilter c mb a (GTFilter getter val) =
wrapGetter c mb a getter $ ignoreMode (binOpFilter (>) val)
evaluateFilter c mb a (GEFilter getter val) =
wrapGetter c mb a getter $ ignoreMode (binOpFilter (>=) val)
evaluateFilter c mb a (RegexpFilter getter re) =
wrapGetter c mb a getter $ ignoreMode (regexpFilter re)
evaluateFilter c mb a (ContainsFilter getter val) =
wrapGetter c mb a getter $ ignoreMode (containsFilter val)
-- | Runs a getter with potentially missing runtime context.
tryGetter :: ConfigData -> Maybe b -> a -> FieldGetter a b -> Maybe ResultEntry
tryGetter _ _ item (FieldSimple getter) = Just $ getter item
tryGetter cfg _ item (FieldConfig getter) = Just $ getter cfg item
tryGetter _ rt item (FieldRuntime getter) =
maybe Nothing (\rt' -> Just $ getter rt' item) rt
tryGetter _ _ _ FieldUnknown = Just $
ResultEntry RSUnknown Nothing
-- | Computes the requested names, if only names were requested (and
-- with equality). Otherwise returns 'Nothing'.
requestedNames :: FilterField -> Filter FilterField -> Maybe [FilterValue]
requestedNames _ EmptyFilter = Just []
requestedNames namefield (OrFilter flts) =
liftM concat $ mapM (requestedNames namefield) flts
requestedNames namefield (EQFilter fld val) =
if namefield == fld
then Just [val]
else Nothing
requestedNames _ _ = Nothing
-- | Builds a simple filter from a list of names.
makeSimpleFilter :: String -> [Either String Integer] -> Filter FilterField
makeSimpleFilter _ [] = EmptyFilter
makeSimpleFilter namefield vals =
OrFilter $ map (EQFilter namefield . either QuotedString NumericValue) vals
|
narurien/ganeti-ceph
|
src/Ganeti/Query/Filter.hs
|
gpl-2.0
| 10,026
| 0
| 13
| 2,152
| 2,115
| 1,087
| 1,028
| 144
| 5
|
{-# OPTIONS -O2 -Wall #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Data.Store.Rev.Branch
(Branch, new, move, curVersion, newVersion)
where
import Control.Monad (liftM)
import Data.Store.Rev.Change (Change)
import Data.Store.Rev.Version (Version)
import Data.Store.Rev.ViewBranchInternal (BranchData(..), Branch(..), moveView)
import Data.Store.Transaction (Transaction)
import qualified Data.Store.Rev.Version as Version
import qualified Data.Store.Transaction as Transaction
move :: Monad m => Branch -> Version -> Transaction t m ()
move (Branch dataIRef) destVersion = do
BranchData srcVersion views <- Transaction.readIRef dataIRef
mapM_ (moveToDest srcVersion) views
Transaction.writeIRef dataIRef (BranchData destVersion views)
where
moveToDest srcVersion view = moveView view srcVersion destVersion
curVersion :: Monad m => Branch -> Transaction t m Version
curVersion (Branch dataIRef) = brVersion `liftM` Transaction.readIRef dataIRef
-- | A Branch is a mutable version ptr
new :: Monad m => Version -> Transaction t m Branch
new version = Branch `liftM`
Transaction.newIRef (BranchData version [])
newVersion :: Monad m => Branch -> [Change] -> Transaction t m ()
newVersion branch changes = do
version <- curVersion branch
move branch =<< Version.newVersion version changes
|
alonho/bottle
|
src/Data/Store/Rev/Branch.hs
|
gpl-3.0
| 1,334
| 0
| 9
| 202
| 400
| 216
| 184
| 26
| 1
|
module GrLang.ReplIntegrationSpec (spec) where
import System.Directory (listDirectory)
import System.Exit (ExitCode (..))
import System.FilePath (takeBaseName, takeExtension)
import System.Process (readProcessWithExitCode)
import Test.Hspec
spec :: Spec
spec = do
files <- runIO $ listDirectory "tests/GrLang/ReplIntegrationSpec"
let cases = map takeBaseName . filter ((== ".lua") . takeExtension) $ files
mapM_ testCase cases
testCase :: String -> SpecWith (Arg (IO ()))
testCase name = it name $ do
(exitCode, stdOut, stdErr) <- readProcessWithExitCode
"stack" ["exec", "verigraph-repl", "--", prefix ++ name ++ ".lua"] ""
exitCode `shouldBe` ExitSuccess
expectedErr <- readFile (prefix ++ name ++ ".stderr")
stdErr `shouldBe` expectedErr
expectedOut <- readFile (prefix ++ name ++ ".stdout")
stdOut `shouldBe` expectedOut
where prefix = "tests/GrLang/ReplIntegrationSpec/"
|
rodrigo-machado/verigraph
|
tests/GrLang/ReplIntegrationSpec.hs
|
gpl-3.0
| 961
| 0
| 15
| 192
| 289
| 154
| 135
| 21
| 1
|
{-# LANGUAGE CPP #-}
#if MIN_VERSION_transformers(0,5,6)
-----------------------------------------------------------------------------
-- |
-- Module : Control.Monad.RWS.Strict
-- Copyright : (c) Andy Gill 2001,
-- (c) Oregon Graduate Institute of Science and Technology, 2001
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : non-portable (multi-param classes, functional dependencies)
--
-- Strict RWS monad that uses continuation-passing-style to achieve constant
-- space usage.
--
-- Inspired by the paper
-- /Functional Programming with Overloading and Higher-Order Polymorphism/,
-- Mark P Jones (<http://web.cecs.pdx.edu/~mpj/>)
-- Advanced School of Functional Programming, 1995.
--
-- /Since: mtl-2.3, transformers-0.5.6/
-----------------------------------------------------------------------------
module Control.Monad.RWS.CPS (
-- * The RWS monad
RWS,
rws,
runRWS,
evalRWS,
execRWS,
mapRWS,
withRWS,
-- * The RWST monad transformer
RWST,
runRWST,
evalRWST,
execRWST,
mapRWST,
withRWST,
-- * Strict Reader-writer-state monads
module Control.Monad.RWS.Class,
module Control.Monad,
module Control.Monad.Fix,
module Control.Monad.Trans,
module Data.Monoid,
) where
import Control.Monad.RWS.Class
import Control.Monad.Trans
import Control.Monad.Trans.RWS.CPS (
RWS, rws, runRWS, evalRWS, execRWS, mapRWS, withRWS,
RWST, runRWST, evalRWST, execRWST, mapRWST, withRWST)
import Control.Monad
import Control.Monad.Fix
import Data.Monoid
#else
-- | This module ordinarily re-exports @Control.Monad.Trans.RWS.CPS@ from
-- @transformers >= 0.5.6@, which is not currently installed. Therefore, this
-- module currently provides nothing; use "Control.Monad.RWS.Lazy" or
-- "Control.Monad.RWS.Strict" instead.
module Control.Monad.RWS.CPS () where
#endif
|
ekmett/mtl
|
Control/Monad/RWS/CPS.hs
|
bsd-3-clause
| 1,995
| 0
| 5
| 367
| 190
| 138
| 52
| 2
| 0
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module RouteTableSpec where
#if __GLASGOW_HASKELL__ < 709
import Control.Applicative
#endif
import Control.Monad
import Data.IP
import Data.IP.RouteTable.Internal
import Data.List (sort, nub)
import Test.Hspec
import Test.Hspec.QuickCheck (prop)
import Test.QuickCheck
----------------------------------------------------------------
--
-- Arbitrary
--
instance Arbitrary (AddrRange IPv4) where
arbitrary = arbitraryIP arbitrary 32
instance Arbitrary (AddrRange IPv6) where
arbitrary = arbitraryIP arbitrary 128
instance Arbitrary IPv4 where
arbitrary = arbitraryAdr toIPv4 255 4
instance Arbitrary IPv6 where
arbitrary = arbitraryAdr toIPv6 65535 8
arbitraryAdr :: Routable a => ([Int] -> a) -> Int -> Int -> Gen a
arbitraryAdr func width adrlen = func <$> replicateM adrlen (choose (0, width))
arbitraryIP :: Routable a => Gen a -> Int -> Gen (AddrRange a)
arbitraryIP adrGen msklen = makeAddrRange <$> adrGen <*> choose (0,msklen)
----------------------------------------------------------------
--
-- Spec
--
spec :: Spec
spec = do
describe "fromList" $ do
prop "creates the same tree for random input and ordered input"
(sort_ip :: [AddrRange IPv4] -> Bool)
prop "creates the same tree for random input and ordered input"
(sort_ip :: [AddrRange IPv6] -> Bool)
prop "stores input in the incremental order"
(ord_ip :: [AddrRange IPv4] -> Bool)
prop "stores input in the incremental order"
(ord_ip :: [AddrRange IPv6] -> Bool)
describe "toList" $ do
prop "expands as sorted"
(fromto_ip :: [AddrRange IPv4] -> Bool)
prop "expands as sorted"
(fromto_ip :: [AddrRange IPv6] -> Bool)
sort_ip :: (Routable a, Ord a) => [AddrRange a] -> Bool
sort_ip xs = fromList (zip xs xs) == fromList (zip xs' xs')
where
xs' = sort xs
fromto_ip :: (Routable a, Ord a) => [AddrRange a] -> Bool
fromto_ip xs = nub (sort xs) == nub (sort ys)
where
ys = map fst . toList . fromList $ zip xs xs
ord_ip :: Routable a => [AddrRange a] -> Bool
ord_ip xs = isOrdered . fromList $ zip xs xs
isOrdered :: Routable k => IPRTable k a -> Bool
isOrdered = foldt (\x v -> v && ordered x) True
ordered :: Routable k => IPRTable k a -> Bool
ordered Nil = True
ordered (Node k _ _ l r) = ordered' k l && ordered' k r
where
ordered' _ Nil = True
ordered' k1 (Node k2 _ _ _ _) = k1 >:> k2
|
DanielG/iproute
|
test/RouteTableSpec.hs
|
bsd-3-clause
| 2,526
| 0
| 14
| 563
| 821
| 422
| 399
| 55
| 2
|
{-#
LANGUAGE ExistentialQuantification, KindSignatures,
NoImplicitPrelude, StandaloneDeriving #-}
{-# OPTIONS -Wall #-}
-- | the components for constructing Orthotope Machine data flow draph.
-- Most components take three arguments:
--
-- [@vector :: * -> *@] The array dimension. It is a 'Vector' that
-- defines the dimension of the Orthotope on which the OM operates.
--
-- [@gauge :: *@] The array index. The combination @vector gauge@
-- needs to be an instance of 'Algebra.Additive.C' if you want to
-- perform @Shift@ operation.
--
-- [@anot :: *@] The annotations put on each node. If you want to use
-- Annotation, @anot@ needs to be an instance of 'Data.Monoid'.
module Language.Paraiso.OM.Graph
(
Setup(..), Kernel(..), Graph, nmap, imap, getA,
Node(..), Edge(..),
StaticIdx(..),
Inst(..),
)where
import Data.Dynamic
import Data.Tensor.TypeLevel
import qualified Data.Vector as V
import qualified Data.Graph.Inductive as FGL
import Language.Paraiso.Name
import Language.Paraiso.OM.Arithmetic as A
import Language.Paraiso.OM.Reduce as R
import Language.Paraiso.OM.DynValue
import NumericPrelude
-- | An OM Setup, a set of information needed before you start building a 'Kernel'.
data Setup (vector :: * -> *) gauge anot =
Setup {
-- | The list of static orthotopes
-- (its identifier, Realm and Type carried in the form of 'NamedValue')
staticValues :: V.Vector (Named DynValue),
-- | The machine-global annotations
globalAnnotation :: anot
} deriving (Eq, Show)
-- | A 'Kernel' for OM perfor a block of calculations on OM.
data Kernel vector gauge anot =
Kernel {
kernelName :: Name,
dataflow :: Graph vector gauge anot
}
deriving (Show)
instance Nameable (Kernel v g a) where
name = kernelName
-- | The dataflow graph for Orthotope Machine. anot is an additional annotation.
type Graph vector gauge anot = FGL.Gr (Node vector gauge anot) Edge
-- | Map the 'Graph' annotation from one type to another. Unfortunately we cannot make one data
-- both the instances of 'FGL.Graph' and 'Functor', so 'nmap' is a standalone function.
nmap :: (a -> b) -> Graph v g a -> Graph v g b
nmap f = FGL.nmap (napply f)
where
napply f0 (NValue x a0) = (NValue x $ f0 a0)
napply f0 (NInst x a0) = (NInst x $ f0 a0)
-- | Map the 'Graph' annotation from one type to another, while referring to the node indices.
imap :: (FGL.Node -> a -> b) -> Graph v g a -> Graph v g b
imap f graph = FGL.mkGraph (map (\(i,a) -> (i, update i a)) $ FGL.labNodes graph) (FGL.labEdges graph)
where
update i (NValue x a0) = (NValue x $ f i a0)
update i (NInst x a0) = (NInst x $ f i a0)
-- | The 'Node' for the dataflow 'Graph' of the Orthotope machine.
-- The dataflow graph is a 2-part graph consisting of 'NValue' and 'NInst' nodes.
data Node vector gauge anot =
-- | A value node. An 'NValue' node only connects to 'NInst' nodes.
-- An 'NValue' node has one and only one input edge, and has arbitrary number of output edges.
NValue DynValue anot |
-- | An instruction node. An 'NInst' node only connects to 'NValue' nodes.
-- The number of input and output edges an 'NValue' node has is specified by its 'Arity'.
NInst (Inst vector gauge) anot
deriving (Show)
-- | The 'Edge' label for the dataflow 'Graph'.
-- | It keeps track of the order of the arguments.
data Edge =
-- | an unordered edge.
EUnord |
-- | edges where the order matters.
EOrd Int deriving (Eq, Ord, Show)
-- | get annotation of the node.
getA :: Node v g a -> a
getA nd = case nd of
NValue _ x -> x
NInst _ x -> x
instance Functor (Node v g) where
fmap f (NValue x y) = (NValue x (f y))
fmap f (NInst x y) = (NInst x (f y))
newtype StaticIdx = StaticIdx { fromStaticIdx :: Int}
instance Show StaticIdx where
show (StaticIdx x) = "static[" ++ show x ++ "]"
data Inst vector gauge
= Load StaticIdx
| Store StaticIdx
| Reduce R.Operator
| Broadcast
| LoadIndex (Axis vector)
| LoadSize (Axis vector)
| Shift (vector gauge)
| Imm Dynamic
| Arith A.Operator
deriving (Show)
instance Arity (Inst vector gauge) where
arity a = case a of
Load _ -> (0,1)
Store _ -> (1,0)
Reduce _ -> (1,1)
Broadcast -> (1,1)
LoadIndex _ -> (0,1)
LoadSize _ -> (0,1)
Shift _ -> (1,1)
Imm _ -> (0,1)
Arith op -> arity op
|
nushio3/Paraiso
|
Language/Paraiso/OM/Graph.hs
|
bsd-3-clause
| 4,516
| 0
| 12
| 1,119
| 1,068
| 604
| 464
| 79
| 2
|
{-# LANGUAGE NoImplicitPrelude, MagicHash, UnboxedTuples, BangPatterns #-}
-----------------------------------------------------------------------------
-- |
-- Module : Java.PrimitiveBase
-- Copyright : (c) Rahul Muttineni 2016-2017
--
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : rahulmutt@gmail.com
-- Stability : provisional
-- Portability : portable
--
-- Dealing with native Java primitives.
--
-----------------------------------------------------------------------------
module Java.PrimitiveBase
( Byte(..)
, Short(..)
, JChar(..) )
where
import Data.Bits
import Data.Maybe
import GHC.Prim
import GHC.Base
import GHC.Enum
import GHC.Num
import GHC.Real
import GHC.Read
import GHC.Arr
import GHC.Word hiding (uncheckedShiftL64#, uncheckedShiftRL64#)
import GHC.Show
-- TODO: Add rewrite rules
{- | The Byte type (8-bit signed integer) with associated instances. -}
data Byte = B# JByte#
instance Eq Byte where
(==) (B# x) (B# y) = isTrue# ((jbyte2int# x) ==# (jbyte2int# y))
instance Ord Byte where
compare (B# x) (B# y) = compareInt# (jbyte2int# x) (jbyte2int# y)
(<) (B# x) (B# y) = isTrue# ((jbyte2int# x) <# (jbyte2int# y))
(<=) (B# x) (B# y) = isTrue# ((jbyte2int# x) <=# (jbyte2int# y))
(>=) (B# x) (B# y) = isTrue# ((jbyte2int# x) >=# (jbyte2int# y))
(>) (B# x) (B# y) = isTrue# ((jbyte2int# x) ># (jbyte2int# y))
instance Num Byte where
(B# x#) + (B# y#) = B# (int2jbyte# ((jbyte2int# x#) +# (jbyte2int# y#)))
(B# x#) - (B# y#) = B# (int2jbyte# ((jbyte2int# x#) -# (jbyte2int# y#)))
(B# x#) * (B# y#) = B# (int2jbyte# ((jbyte2int# x#) *# (jbyte2int# y#)))
negate (B# x#) = B# (int2jbyte# (negateInt# (jbyte2int# x#)))
abs x | x >= 0 = x
| otherwise = negate x
signum x | x > 0 = 1
signum 0 = 0
signum _ = -1
fromInteger i = B# (int2jbyte# (integerToInt i))
instance Real Byte where
toRational x = toInteger x % 1
instance Bounded Byte where
minBound = -0x80
maxBound = 0x7F
instance Enum Byte where
succ x
| x /= maxBound = x + 1
| otherwise = succError "Byte"
pred x
| x /= minBound = x - 1
| otherwise = predError "Byte"
toEnum i@(I# i#)
| i >= fromIntegral (minBound::Byte) && i <= fromIntegral (maxBound::Byte)
= B# (int2jbyte# i#)
| otherwise = toEnumError "Byte" i (minBound::Byte, maxBound::Byte)
fromEnum (B# x#) = I# (jbyte2int# x#)
enumFrom = boundedEnumFrom
enumFromThen = boundedEnumFromThen
instance Integral Byte where
quot x@(B# x#) y@(B# y#)
| y == 0 = divZeroError
| y == (-1) && x == minBound = overflowError -- Note [Order of tests]
| otherwise = B# (int2jbyte# ((jbyte2int# x#) `quotInt#` (jbyte2int# y#)))
rem (B# x#) y@(B# y#)
| y == 0 = divZeroError
| otherwise = B# (int2jbyte# ((jbyte2int# x#) `remInt#` (jbyte2int# y#)))
div x@(B# x#) y@(B# y#)
| y == 0 = divZeroError
| y == (-1) && x == minBound = overflowError -- Note [Order of tests]
| otherwise = B# (int2jbyte# ((jbyte2int# x#) `divInt#` (jbyte2int# y#)))
mod (B# x#) y@(B# y#)
| y == 0 = divZeroError
| otherwise = B# (int2jbyte# ((jbyte2int# x#) `modInt#` (jbyte2int# y#)))
quotRem x@(B# x#) y@(B# y#)
| y == 0 = divZeroError
-- Note [Order of tests]
| y == (-1) && x == minBound = (overflowError, 0)
| otherwise = case (jbyte2int# x#) `quotRemInt#` (jbyte2int# y#) of
(# q, r #) ->
(B# (int2jbyte# q),
B# (int2jbyte# r))
divMod x@(B# x#) y@(B# y#)
| y == 0 = divZeroError
-- Note [Order of tests]
| y == (-1) && x == minBound = (overflowError, 0)
| otherwise = case (jbyte2int# x#) `divModInt#` (jbyte2int# y#) of
(# d, m #) ->
(B# (int2jbyte# d),
B# (int2jbyte# m))
toInteger (B# x#) = smallInteger (jbyte2int# x#)
instance Ix Byte where
range (m,n) = [m..n]
unsafeIndex (m,_) i = fromIntegral i - fromIntegral m
inRange (m,n) i = m <= i && i <= n
instance Show Byte where
showsPrec p x = showsPrec p (fromIntegral x :: Int)
instance Read Byte where
readsPrec p s = [(fromIntegral (x::Int), r) | (x, r) <- readsPrec p s]
instance Bits Byte where
{-# INLINE shift #-}
{-# INLINE bit #-}
{-# INLINE testBit #-}
(B# x#) .&. (B# y#) = B# (int2jbyte# (word2Int# (int2Word# (jbyte2int# x#) `and#` int2Word# (jbyte2int# y#))))
(B# x#) .|. (B# y#) = B# (int2jbyte# (word2Int# (int2Word# (jbyte2int# x#) `or#` int2Word# (jbyte2int# y#))))
(B# x#) `xor` (B# y#) = B# (int2jbyte# (word2Int# (int2Word# (jbyte2int# x#) `xor#` int2Word# (jbyte2int# y#))))
complement (B# x#) = B# (int2jbyte# (word2Int# (not# (int2Word# (jbyte2int# x#)))))
(B# x#) `shift` (I# i#)
| isTrue# (i# >=# 0#) = B# (int2jbyte# (narrow8Int# ((jbyte2int# x#) `iShiftL#` i#)))
| otherwise = B# (int2jbyte# ((jbyte2int# x#) `iShiftRA#` negateInt# i#))
(B# x#) `shiftL` (I# i#) = B# (int2jbyte# (narrow8Int# ((jbyte2int# x#) `iShiftL#` i#)))
(B# x#) `unsafeShiftL` (I# i#) = B# (int2jbyte# (narrow8Int# ((jbyte2int# x#) `uncheckedIShiftL#` i#)))
(B# x#) `shiftR` (I# i#) = B# (int2jbyte# ((jbyte2int# x#) `iShiftRA#` i#))
(B# x#) `unsafeShiftR` (I# i#) = B# (int2jbyte# ((jbyte2int# x#) `uncheckedIShiftRA#` i#))
(B# x#) `rotate` (I# i#)
| isTrue# (i'# ==# 0#)
= B# x#
| otherwise
= B# (int2jbyte# (narrow8Int# (word2Int# ((x'# `uncheckedShiftL#` i'#) `or#`
(x'# `uncheckedShiftRL#` (8# -# i'#))))))
where
!x'# = narrow8Word# (int2Word# (jbyte2int# x#))
!i'# = word2Int# (int2Word# i# `and#` 7##)
bitSizeMaybe i = Just (finiteBitSize i)
bitSize i = finiteBitSize i
isSigned _ = True
popCount (B# x#) = I# (word2Int# (popCnt8# (int2Word# (jbyte2int# x#))))
bit = bitDefault
testBit = testBitDefault
instance FiniteBits Byte where
finiteBitSize _ = 8
countLeadingZeros (B# x#) = I# (word2Int# (clz8# (int2Word# (jbyte2int# x#))))
countTrailingZeros (B# x#) = I# (word2Int# (ctz8# (int2Word# (jbyte2int# x#))))
{- | The Short type (16-bit signed integer) with associated instances. -}
data Short = S# JShort#
instance Eq Short where
(==) (S# x) (S# y) = isTrue# ((jshort2int# x) ==# (jshort2int# y))
instance Ord Short where
compare (S# x) (S# y) = compareInt# (jshort2int# x) (jshort2int# y)
(<) (S# x) (S# y) = isTrue# ((jshort2int# x) <# (jshort2int# y))
(<=) (S# x) (S# y) = isTrue# ((jshort2int# x) <=# (jshort2int# y))
(>=) (S# x) (S# y) = isTrue# ((jshort2int# x) >=# (jshort2int# y))
(>) (S# x) (S# y) = isTrue# ((jshort2int# x) ># (jshort2int# y))
instance Num Short where
(S# x#) + (S# y#) = S# (int2jshort# ((jshort2int# x#) +# (jshort2int# y#)))
(S# x#) - (S# y#) = S# (int2jshort# ((jshort2int# x#) -# (jshort2int# y#)))
(S# x#) * (S# y#) = S# (int2jshort# ((jshort2int# x#) *# (jshort2int# y#)))
negate (S# x#) = S# (int2jshort# (negateInt# (jshort2int# x#)))
abs x | x >= 0 = x
| otherwise = negate x
signum x | x > 0 = 1
signum 0 = 0
signum _ = -1
fromInteger i = S# (int2jshort# (integerToInt i))
instance Real Short where
toRational x = toInteger x % 1
instance Bounded Short where
minBound = -0x8000
maxBound = 0x7FFF
instance Enum Short where
succ x
| x /= maxBound = x + 1
| otherwise = succError "Short"
pred x
| x /= minBound = x - 1
| otherwise = predError "Short"
toEnum i@(I# i#)
| i >= fromIntegral (minBound::Short) && i <= fromIntegral (maxBound::Short)
= S# (int2jshort# i#)
| otherwise = toEnumError "Short" i (minBound::Short, maxBound::Short)
fromEnum (S# x#) = I# (jshort2int# x#)
enumFrom = boundedEnumFrom
enumFromThen = boundedEnumFromThen
instance Integral Short where
quot x@(S# x#) y@(S# y#)
| y == 0 = divZeroError
| y == (-1) && x == minBound = overflowError -- Note [Order of tests]
| otherwise = S# (int2jshort# ((jshort2int# x#) `quotInt#` (jshort2int# y#)))
rem (S# x#) y@(S# y#)
| y == 0 = divZeroError
| otherwise = S# (int2jshort# ((jshort2int# x#) `remInt#` (jshort2int# y#)))
div x@(S# x#) y@(S# y#)
| y == 0 = divZeroError
| y == (-1) && x == minBound = overflowError -- Note [Order of tests]
| otherwise = S# (int2jshort# ((jshort2int# x#) `divInt#` (jshort2int# y#)))
mod (S# x#) y@(S# y#)
| y == 0 = divZeroError
| otherwise = S# (int2jshort# ((jshort2int# x#) `modInt#` (jshort2int# y#)))
quotRem x@(S# x#) y@(S# y#)
| y == 0 = divZeroError
-- Note [Order of tests]
| y == (-1) && x == minBound = (overflowError, 0)
| otherwise = case (jshort2int# x#) `quotRemInt#` (jshort2int# y#) of
(# q, r #) ->
(S# (int2jshort# q),
S# (int2jshort# r))
divMod x@(S# x#) y@(S# y#)
| y == 0 = divZeroError
-- Note [Order of tests]
| y == (-1) && x == minBound = (overflowError, 0)
| otherwise = case (jshort2int# x#) `divModInt#` (jshort2int# y#) of
(# d, m #) ->
(S# (int2jshort# d),
S# (int2jshort# m))
toInteger (S# x#) = smallInteger (jshort2int# x#)
instance Ix Short where
range (m,n) = [m..n]
unsafeIndex (m,_) i = fromIntegral i - fromIntegral m
inRange (m,n) i = m <= i && i <= n
instance Show Short where
showsPrec p x = showsPrec p (fromIntegral x :: Int)
instance Read Short where
readsPrec p s = [(fromIntegral (x::Int), r) | (x, r) <- readsPrec p s]
instance Bits Short where
{-# INLINE shift #-}
{-# INLINE bit #-}
{-# INLINE testBit #-}
(S# x#) .&. (S# y#) = S# (int2jshort# (word2Int# (int2Word# (jshort2int# x#) `and#` int2Word# (jshort2int# y#))))
(S# x#) .|. (S# y#) = S# (int2jshort# (word2Int# (int2Word# (jshort2int# x#) `or#` int2Word# (jshort2int# y#))))
(S# x#) `xor` (S# y#) = S# (int2jshort# (word2Int# (int2Word# (jshort2int# x#) `xor#` int2Word# (jshort2int# y#))))
complement (S# x#) = S# (int2jshort# (word2Int# (not# (int2Word# (jshort2int# x#)))))
(S# x#) `shift` (I# i#)
| isTrue# (i# >=# 0#) = S# (int2jshort# (narrow16Int# ((jshort2int# x#) `iShiftL#` i#)))
| otherwise = S# (int2jshort# ((jshort2int# x#) `iShiftRA#` negateInt# i#))
(S# x#) `shiftL` (I# i#) = S# (int2jshort# (narrow16Int# ((jshort2int# x#) `iShiftL#` i#)))
(S# x#) `unsafeShiftL` (I# i#) = S# (int2jshort# (narrow16Int# ((jshort2int# x#) `uncheckedIShiftL#` i#)))
(S# x#) `shiftR` (I# i#) = S# (int2jshort# ((jshort2int# x#) `iShiftRA#` i#))
(S# x#) `unsafeShiftR` (I# i#) = S# (int2jshort# ((jshort2int# x#) `uncheckedIShiftRA#` i#))
(S# x#) `rotate` (I# i#)
| isTrue# (i'# ==# 0#)
= S# x#
| otherwise
= S# (int2jshort# (narrow16Int# (word2Int# ((x'# `uncheckedShiftL#` i'#) `or#`
(x'# `uncheckedShiftRL#` (16# -# i'#))))))
where
!x'# = narrow16Word# (int2Word# (jshort2int# x#))
!i'# = word2Int# (int2Word# i# `and#` 15##)
bitSizeMaybe i = Just (finiteBitSize i)
bitSize i = finiteBitSize i
isSigned _ = True
popCount (S# x#) = I# (word2Int# (popCnt16# (int2Word# (jshort2int# x#))))
bit = bitDefault
testBit = testBitDefault
instance FiniteBits Short where
finiteBitSize _ = 16
countLeadingZeros (S# x#) = I# (word2Int# (clz16# (int2Word# (jshort2int# x#))))
countTrailingZeros (S# x#) = I# (word2Int# (ctz16# (int2Word# (jshort2int# x#))))
{- | The JChar type (16-bit unsigned integer) with associated instances. -}
data JChar = JC# JChar#
instance Eq JChar where
(==) (JC# x) (JC# y) = isTrue# ((jchar2word# x) `eqWord#` (jchar2word# y))
instance Ord JChar where
compare (JC# x) (JC# y) = compareWord# (jchar2word# x) (jchar2word# y)
(<) (JC# x) (JC# y) = isTrue# ((jchar2word# x) `ltWord#` (jchar2word# y))
(<=) (JC# x) (JC# y) = isTrue# ((jchar2word# x) `leWord#` (jchar2word# y))
(>=) (JC# x) (JC# y) = isTrue# ((jchar2word# x) `geWord#` (jchar2word# y))
(>) (JC# x) (JC# y) = isTrue# ((jchar2word# x) `gtWord#` (jchar2word# y))
instance Num JChar where
(JC# x#) + (JC# y#) = JC# (word2jchar# ((jchar2word# x#) `plusWord#` (jchar2word# y#)))
(JC# x#) - (JC# y#) = JC# (word2jchar# ((jchar2word# x#) `minusWord#` (jchar2word# y#)))
(JC# x#) * (JC# y#) = JC# (word2jchar# ((jchar2word# x#) `timesWord#` (jchar2word# y#)))
negate (JC# x#) = JC# (word2jchar# (int2Word# (negateInt# (word2Int# (jchar2word# x#)))))
abs x = x
signum 0 = 0
signum _ = 1
fromInteger i = JC# (word2jchar# (integerToWord i))
instance Real JChar where
toRational x = toInteger x % 1
instance Bounded JChar where
minBound = 0
maxBound = 0xFFFF
instance Enum JChar where
succ x
| x /= maxBound = x + 1
| otherwise = succError "JChar"
pred x
| x /= minBound = x - 1
| otherwise = predError "JChar"
toEnum i@(I# i#)
| i >= 0 && i <= fromIntegral (maxBound::JChar)
= JC# (word2jchar# (int2Word# i#))
| otherwise = toEnumError "JChar" i (minBound::JChar, maxBound::JChar)
fromEnum (JC# x#) = I# (word2Int# (jchar2word# x#))
enumFrom = boundedEnumFrom
enumFromThen = boundedEnumFromThen
instance Integral JChar where
quot x@(JC# x#) y@(JC# y#)
| y /= 0 = JC# (word2jchar# ((jchar2word# x#) `quotWord#` (jchar2word# y#)))
| otherwise = divZeroError
rem (JC# x#) y@(JC# y#)
| y /= 0 = JC# (word2jchar# ((jchar2word# x#) `remWord#` (jchar2word# y#)))
| otherwise = divZeroError
div x@(JC# x#) y@(JC# y#)
| y /= 0 = JC# (word2jchar# ((jchar2word# x#) `quotWord#` (jchar2word# y#)))
| otherwise = divZeroError
mod (JC# x#) y@(JC# y#)
| y /= 0 = JC# (word2jchar# ((jchar2word# x#) `remWord#` (jchar2word# y#)))
| otherwise = divZeroError
quotRem x@(JC# x#) y@(JC# y#)
| y /= 0 = case (jchar2word# x#) `quotRemWord#` (jchar2word# y#) of
(# q, r #) ->
(JC# (word2jchar# q),
JC# (word2jchar# r))
| otherwise = divZeroError
divMod (JC# x#) y@(JC# y#)
| y /= 0 = (JC# (word2jchar# ((jchar2word# x#) `quotWord#` (jchar2word# y#))), JC# (word2jchar# ((jchar2word# x#) `remWord#` (jchar2word# y#))))
| otherwise = divZeroError
toInteger (JC# x#) = smallInteger (word2Int# (jchar2word# x#))
instance Ix JChar where
range (m,n) = [m..n]
unsafeIndex (m,_) i = fromIntegral (i - m)
inRange (m,n) i = m <= i && i <= n
instance Show JChar where
showsPrec p x = showsPrec p (fromIntegral x :: Int)
instance Read JChar where
readsPrec p s = [(fromIntegral (x::Int), r) | (x, r) <- readsPrec p s]
instance Bits JChar where
{-# INLINE shift #-}
{-# INLINE bit #-}
{-# INLINE testBit #-}
(JC# x#) .&. (JC# y#) = JC# (word2jchar# ((jchar2word# x#) `and#` (jchar2word# y#)))
(JC# x#) .|. (JC# y#) = JC# (word2jchar# ((jchar2word# x#) `or#` (jchar2word# y#)))
(JC# x#) `xor` (JC# y#) = JC# (word2jchar# ((jchar2word# x#) `xor#` (jchar2word# y#)))
complement (JC# x#) = JC# (word2jchar# ((jchar2word# x#) `xor#` (jchar2word# mb#)))
where !(JC# mb#) = maxBound
(JC# x#) `shift` (I# i#)
| isTrue# (i# >=# 0#) = JC# (word2jchar# ((jchar2word# x#) `shiftL#` i#))
| otherwise = JC# (word2jchar# ((jchar2word# x#) `shiftRL#` negateInt# i#))
(JC# x#) `shiftL` (I# i#) = JC# (word2jchar# ((jchar2word# x#) `shiftL#` i#))
(JC# x#) `unsafeShiftL` (I# i#) = JC# (word2jchar# ((jchar2word# x#) `uncheckedShiftL#` i#))
(JC# x#) `shiftR` (I# i#) = JC# (word2jchar# ((jchar2word# x#) `shiftRL#` i#))
(JC# x#) `unsafeShiftR` (I# i#) = JC# (word2jchar# ((jchar2word# x#) `uncheckedShiftRL#` i#))
(JC# x#) `rotate` (I# i#)
| isTrue# (i'# ==# 0#) = JC# x#
| otherwise = JC# (word2jchar# (((jchar2word# x#) `uncheckedShiftL#` i'#) `or#`
((jchar2word# x#) `uncheckedShiftRL#` (16# -# i'#))))
where
!i'# = word2Int# (int2Word# i# `and#` 15##)
bitSizeMaybe i = Just (finiteBitSize i)
bitSize i = finiteBitSize i
isSigned _ = True
popCount (JC# x#) = I# (word2Int# (popCnt16# (jchar2word# x#)))
bit = bitDefault
testBit = testBitDefault
instance FiniteBits JChar where
finiteBitSize _ = 16
countLeadingZeros (JC# x#) = I# (word2Int# (clz16# (jchar2word# x#)))
countTrailingZeros (JC# x#) = I# (word2Int# (ctz16# (jchar2word# x#)))
|
rahulmutt/ghcvm
|
libraries/base/Java/PrimitiveBase.hs
|
bsd-3-clause
| 18,887
| 0
| 19
| 6,330
| 7,586
| 3,878
| 3,708
| 335
| 0
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Network.Helics
( HelicsConfig(..)
, withHelics
, sampler
-- * metric
, recordMetric
, recordCpuUsage
, recordMemoryUsage
-- * transaction
, TransactionType(..)
, TransactionId
, withTransaction
, addAttribute
, setRequestUrl
, setMaxTraceSegments
, TransactionError(..)
, setError
, noticeError
, clearError
-- * segment
, SegmentId
, autoScope
, rootSegment
, genericSegment
, Operation(..)
, DatastoreSegment(..)
, datastoreSegment
, externalSegment
-- * status code
, StatusCode
, statusShutdown
, statusStarting
, statusStopping
, statusStarted
-- * reexports
, def
) where
import Data.IORef
import Data.Default.Class
import qualified Data.ByteString as S
import Network.Helics.Internal.Types
autoScope, rootSegment :: SegmentId
autoScope = SegmentId 0
rootSegment = SegmentId 1
statusShutdown, statusStarting, statusStopping, statusStarted :: StatusCode
statusShutdown = StatusCode 0
statusStarting = StatusCode 1
statusStopping = StatusCode 2
statusStarted = StatusCode 3
-- | start new relic® collector client.
-- you must call this function when embed-mode.
withHelics :: HelicsConfig -> IO a -> IO a
withHelics _ m = m
-- | record custom metric.
recordMetric :: S.ByteString -> Double -> IO ()
recordMetric _ _ = return ()
-- | sample and send metric of cpu/memory usage.
sampler :: Int -- ^ sampling frequency (sec)
-> IO ()
sampler _ = return ()
-- | record CPU usage. Normally, you don't need to call this function. use sampler.
recordCpuUsage :: Double -> Double -> IO ()
recordCpuUsage _ _ = return ()
-- | record memory usage. Normally, you don't need to call this function. use sampler.
recordMemoryUsage :: Double -> IO ()
recordMemoryUsage _ = return ()
withTransaction :: S.ByteString -- ^ name of transaction
-> TransactionType -> (TransactionId -> IO c) -> IO c
withTransaction _ _ m = do
ref <- newIORef Nothing
m (TransactionId 0 ref)
genericSegment :: SegmentId -- ^ parent segment id
-> S.ByteString -- ^ name of represent segment
-> IO c -- ^ action in segment
-> TransactionId
-> IO c
genericSegment _ _ m _ = m
datastoreSegment :: SegmentId -> DatastoreSegment -> IO a -> TransactionId -> IO a
datastoreSegment _ _ m _ = m
externalSegment :: SegmentId
-> S.ByteString -- ^ host of segment
-> S.ByteString -- ^ name of segment
-> IO a -> TransactionId -> IO a
externalSegment _ _ _ m _ = m
addAttribute :: S.ByteString -> S.ByteString -> TransactionId -> IO ()
addAttribute _ _ _ = return ()
setRequestUrl :: S.ByteString -> TransactionId -> IO ()
setRequestUrl _ _ = return ()
setMaxTraceSegments :: Int -> TransactionId -> IO ()
setMaxTraceSegments _ _ = return ()
setError :: Maybe TransactionError -> TransactionId -> IO ()
setError _ _ = return ()
noticeError :: TransactionError -> TransactionId -> IO ()
noticeError _ _ = return ()
clearError :: TransactionId -> IO ()
clearError _ = return ()
|
dzotokan/helics
|
dummy/Network/Helics.hs
|
mit
| 3,247
| 0
| 10
| 793
| 792
| 428
| 364
| 87
| 1
|
module Language
( languageField
, languageList
) where
import Import
languageField :: Field Handler Text
languageField = selectFieldList languageList
languageList :: [(Text, Text)]
languageList =
[ ("Afrikaans", "af")
, ("Albanian", "sq")
, ("Basque", "eu")
, ("Belarusian", "be")
, ("Bulgarian", "bg")
, ("Catalan", "ca")
, ("Chinese (Simplified)", "zh-cn")
, ("Chinese (Traditional)", "zh-tw")
, ("Croatian", "hr")
, ("Czech", "cs")
, ("Danish", "da")
, ("Dutch", "nl")
, ("Dutch (Belgium)", "nl-be")
, ("Dutch (Netherlands)", "nl-nl")
, ("English", "en")
, ("English (Australia)", "en-au")
, ("English (Belize)", "en-bz")
, ("English (Canada)", "en-ca")
, ("English (Ireland)", "en-ie")
, ("English (Jamaica)", "en-jm")
, ("English (New Zealand)", "en-nz")
, ("English (Phillipines)", "en-ph")
, ("English (South Africa)", "en-za")
, ("English (Trinidad)", "en-tt")
, ("English (United Kingdom)", "en-gb")
, ("English (United States)", "en-us")
, ("English (Zimbabwe)", "en-zw")
, ("Estonian", "et")
, ("Faeroese", "fo")
, ("Finnish", "fi")
, ("French", "fr")
, ("French (Belgium)", "fr-be")
, ("French (Canada)", "fr-ca")
, ("French (France)", "fr-fr")
, ("French (Luxembourg)", "fr-lu")
, ("French (Monaco)", "fr-mc")
, ("French (Switzerland)", "fr-ch")
, ("Galician", "gl")
, ("Gaelic", "gd")
, ("German", "de")
, ("German (Austria)", "de-at")
, ("German (Germany)", "de-de")
, ("German (Liechtenstein)", "de-li")
, ("German (Luxembourg)", "de-lu")
, ("German (Switzerland)", "de-ch")
, ("Greek", "el")
, ("Hawaiian", "haw")
, ("Hungarian", "hu")
, ("Icelandic", "is")
, ("Indonesian", "in")
, ("Irish", "ga")
, ("Italian", "it")
, ("Italian (Italy)", "it-it")
, ("Italian (Switzerland)", "it-ch")
, ("Japanese", "ja")
, ("Korean", "ko")
, ("Macedonian", "mk")
, ("Norwegian", "no")
, ("Polish", "pl")
, ("Portuguese", "pt")
, ("Portuguese (Brazil)", "pt-br")
, ("Portuguese (Portugal)", "pt-pt")
, ("Romanian", "ro")
, ("Romanian (Moldova)", "ro-mo")
, ("Romanian (Romania)", "ro-ro")
, ("Russian", "ru")
, ("Russian (Moldova)", "ru-mo")
, ("Russian (Russia)", "ru-ru")
, ("Serbian", "sr")
, ("Slovak", "sk")
, ("Slovenian", "sl")
, ("Spanish", "es")
, ("Spanish (Argentina)", "es-ar")
, ("Spanish (Bolivia)", "es-bo")
, ("Spanish (Chile)", "es-cl")
, ("Spanish (Colombia)", "es-co")
, ("Spanish (Costa Rica)", "es-cr")
, ("Spanish (Dominican Republic)", "es-do")
, ("Spanish (Ecuador)", "es-ec")
, ("Spanish (El Salvador)", "es-sv")
, ("Spanish (Guatemala)", "es-gt")
, ("Spanish (Honduras)", "es-hn")
, ("Spanish (Mexico)", "es-mx")
, ("Spanish (Nicaragua)", "es-ni")
, ("Spanish (Panama)", "es-pa")
, ("Spanish (Paraguay)", "es-py")
, ("Spanish (Peru)", "es-pe")
, ("Spanish (Puerto Rico)", "es-pr")
, ("Spanish (Spain)", "es-es")
, ("Spanish (Uruguay)", "es-uy")
, ("Spanish (Venezuela)", "es-ve")
, ("Swedish", "sv")
, ("Swedish (Finland)", "sv-fi")
, ("Swedish (Sweden)", "sv-se")
, ("Turkish", "tr")
, ("Ukranian", "uk")
]
|
vaporware/carnival
|
Language.hs
|
mit
| 3,340
| 0
| 6
| 781
| 916
| 607
| 309
| 104
| 1
|
{-# LANGUAGE ScopedTypeVariables, RecursiveDo #-}
import Data.Char
import System.Environment
import Control.Applicative
import Text.Earley
data Expr
= Expr :+: Expr
| Expr :*: Expr
| Var String
| Lit Int
deriving (Show)
grammar :: forall r. Grammar r (Prod r String Char Expr)
grammar = mdo
whitespace <- rule $ many $ satisfy isSpace
let token :: Prod r String Char a -> Prod r String Char a
token p = whitespace *> p
sym x = token $ symbol x <?> [x]
ident = token $ (:) <$> satisfy isAlpha <*> many (satisfy isAlphaNum) <?> "identifier"
num = token $ some (satisfy isDigit) <?> "number"
expr0 <- rule
$ (Lit . read) <$> num
<|> Var <$> ident
<|> sym '(' *> expr2 <* sym ')'
expr1 <- rule
$ (:*:) <$> expr1 <* sym '*' <*> expr0
<|> expr0
expr2 <- rule
$ (:+:) <$> expr2 <* sym '+' <*> expr1
<|> expr1
return $ expr2 <* whitespace
main :: IO ()
main = do
x:_ <- getArgs
print $ fullParses (parser grammar) x
|
bitemyapp/Earley
|
examples/Expr2.hs
|
bsd-3-clause
| 1,006
| 0
| 16
| 273
| 396
| 199
| 197
| 34
| 1
|
{-# LANGUAGE OverloadedStrings #-}
-- | Convenience module for debugging streams. Provides stream transformers
-- that wrap 'InputStream's and 'OutputStream's, sending a description of all
-- data to an 'OutputStream' for debugging.
module System.IO.Streams.Debug
( -- * Debuggers
debugInput
, debugOutput
, debugInputBS
, debugOutputBS
) where
------------------------------------------------------------------------------
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as S
------------------------------------------------------------------------------
import System.IO.Streams.Internal (InputStream (..), OutputStream)
import qualified System.IO.Streams.Internal as Streams
------------------------------------------------------------------------------
debugInput ::
(a -> ByteString) -- ^ function to convert stream elements to
-- 'ByteString'
-> ByteString -- ^ name of this debug stream, will be
-- prepended to debug output
-> OutputStream ByteString -- ^ stream the debug info will be sent to
-> InputStream a -- ^ input stream
-> IO (InputStream a)
debugInput toBS name debugStream inputStream = return $ InputStream produce pb
where
produce = do
m <- Streams.read inputStream
Streams.write (Just $! describe m) debugStream
return m
pb c = do
let s = S.concat [name, ": pushback: ", toBS c, "\n"]
Streams.write (Just s) debugStream
Streams.unRead c inputStream
describe m = S.concat [name, ": got ", describeChunk m, "\n"]
describeChunk Nothing = "EOF"
describeChunk (Just s) = S.concat [ "chunk: ", toBS s ]
------------------------------------------------------------------------------
debugInputBS ::
ByteString -- ^ name of this debug stream, will be
-- prepended to debug output
-> OutputStream ByteString -- ^ stream the debug info will be sent to
-> InputStream ByteString -- ^ input stream
-> IO (InputStream ByteString)
debugInputBS = debugInput condense
------------------------------------------------------------------------------
debugOutput :: (a -> ByteString) -- ^ function to convert stream
-- elements to 'ByteString'
-> ByteString -- ^ name of this debug stream, will be
-- prepended to debug output
-> OutputStream ByteString -- ^ debug stream
-> OutputStream a -- ^ output stream
-> IO (OutputStream a)
debugOutput toBS name debugStream outputStream =
Streams.makeOutputStream f
where
f m = do
Streams.write (Just $ describe m) debugStream
Streams.write m outputStream
describe m = S.concat [name, ": got ", describeChunk m, "\n"]
describeChunk Nothing = "EOF"
describeChunk (Just s) = S.concat [ "chunk: ", toBS s]
------------------------------------------------------------------------------
debugOutputBS ::
ByteString -- ^ name of this debug stream, will be
-- prepended to debug output
-> OutputStream ByteString -- ^ stream the debug info will be sent to
-> OutputStream ByteString -- ^ output stream
-> IO (OutputStream ByteString)
debugOutputBS = debugOutput condense
------------------------------------------------------------------------------
condense :: ByteString -> ByteString
condense s | l < 32 = S.concat [ "\"", s, "\"" ]
| otherwise = S.concat [
"\""
, S.take k s
, " ... "
, S.drop (l - k) s
, "\" ("
, S.pack (show l)
, " bytes)"
]
where
k = 14
l = S.length s
|
LukeHoersten/io-streams
|
src/System/IO/Streams/Debug.hs
|
bsd-3-clause
| 4,042
| 0
| 14
| 1,240
| 712
| 381
| 331
| 66
| 2
|
import Yi
-- Import the desired UI as needed.
-- Some are not complied in, so we import none here.
-- import Yi.UI.Vty (start)
-- import Yi.UI.Pango (start)
myConfig :: Config
myConfig = defaultCuaConfig -- replace with defaultVimConfig or defaultCuaConfig
defaultUIConfig :: UIConfig
defaultUIConfig = configUI myConfig
-- Change the below to your needs, following the explanation in comments. See
-- module Yi.Config for more information on configuration. Other configuration
-- examples can be found in the examples directory. You can also use or copy
-- another user configuration, which can be found in modules Yi.Users.*
main :: IO ()
main = yi $ myConfig
{
-- Keymap Configuration
defaultKm = defaultKm myConfig,
-- UI Configuration
-- Override the default UI as such:
startFrontEnd = startFrontEnd myConfig,
-- Yi.UI.Vty.start -- for Vty
-- (can be overridden at the command line)
-- Options:
configUI = defaultUIConfig
{
configFontSize = Nothing,
-- 'Just 10' for specifying the size.
configTheme = configTheme defaultUIConfig,
-- darkBlueTheme -- Change the color scheme here.
configWindowFill = ' '
}
}
|
coreyoconnor/yi
|
example-configs/yi-cua.hs
|
gpl-2.0
| 1,248
| 0
| 10
| 308
| 112
| 71
| 41
| 13
| 1
|
{-# OPTIONS_GHC -fno-warn-redundant-constraints #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances #-}
module SingletonsBug where
import Control.Applicative
import Data.Traversable (for)
import Data.Kind (Type, Constraint)
-----------------------------------
-- From 'constraints' library
-- import Data.Constraint (Dict(..))
data Dict :: Constraint -> Type where
Dict :: a => Dict a
-----------------------------------
-- From 'singletons' library
-- import Data.Singletons hiding( withSomeSing )
class SingI (a :: k) where
-- | Produce the singleton explicitly. You will likely need the @ScopedTypeVariables@
-- extension to use this method the way you want.
sing :: Sing a
data family Sing (a :: k)
data KProxy (a :: Type) = KProxy
data SomeSing (kproxy :: KProxy k) where
SomeSing :: Sing (a :: k) -> SomeSing ('KProxy :: KProxy k)
-- SingKind :: forall k. KProxy k -> Constraint
class (kparam ~ 'KProxy) => SingKind (kparam :: KProxy k) where
-- | Get a base type from a proxy for the promoted kind. For example,
-- @DemoteRep ('KProxy :: KProxy Bool)@ will be the type @Bool@.
type DemoteRep kparam :: Type
-- | Convert a singleton to its unrefined version.
fromSing :: Sing (a :: k) -> DemoteRep kparam
-- | Convert an unrefined type to an existentially-quantified singleton type.
toSing :: DemoteRep kparam -> SomeSing kparam
withSomeSing :: SingKind ('KProxy :: KProxy k)
=> DemoteRep ('KProxy :: KProxy k)
-> (forall (a :: k). Sing a -> r)
-> r
withSomeSing _ _ = error "urk"
-----------------------------------
data SubscriptionChannel = BookingsChannel
type BookingsChannelSym0 = BookingsChannel
data instance Sing (z_a5I7 :: SubscriptionChannel) where
SBookingsChannel :: Sing BookingsChannel
instance SingKind ('KProxy :: KProxy SubscriptionChannel) where
type DemoteRep ('KProxy :: KProxy SubscriptionChannel) = SubscriptionChannel
fromSing SBookingsChannel = BookingsChannel
toSing BookingsChannel = SomeSing SBookingsChannel
instance SingI BookingsChannel where
sing = SBookingsChannel
type family T (c :: SubscriptionChannel) :: Type
type instance T 'BookingsChannel = Bool
witnessC :: Sing channel -> Dict (Show (T channel), SingI channel)
witnessC SBookingsChannel = Dict
forAllSubscriptionChannels
:: forall m r. (Applicative m)
=> (forall channel. (SingI channel, Show (T channel)) => Sing channel -> m r)
-> m r
forAllSubscriptionChannels f =
withSomeSing BookingsChannel $ \(sChannel) ->
case witnessC sChannel of
Dict -> f sChannel
|
sdiehl/ghc
|
testsuite/tests/indexed-types/should_compile/T9316.hs
|
bsd-3-clause
| 2,822
| 5
| 13
| 503
| 613
| 342
| 271
| 54
| 1
|
{-# LANGUAGE OverloadedStrings, CPP #-}
module Haste.Timer (Timer, Interval (..), setTimer, stopTimer) where
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative
#endif
import Control.Monad.IO.Class
import Haste.Foreign
import Haste.Events.Core
type Identifier = Int
-- | Timer handle.
data Timer = Timer !Identifier !Interval
-- | Interval and repeat for timers.
data Interval
= Once !Int -- ^ Fire once, in n milliseconds.
| Repeat !Int -- ^ Fire every n milliseconds.
-- | Set a timer.
setTimer :: MonadEvent m
=> Interval -- ^ Milliseconds until timer fires.
-> m () -- ^ Function to call when timer fires.
-> m Timer -- ^ Timer handle for interacting with the timer.
setTimer i f = do
f' <- mkHandler $ const f
liftIO $ do
flip Timer i <$> case i of
Once n -> timeout n (f' ())
Repeat n -> interval n (f' ())
timeout :: Int -> IO () -> IO Int
timeout = ffi "(function(t,f){window.setTimeout(f,t);})"
interval :: Int -> IO () -> IO Int
interval = ffi "(function(t,f){window.setInterval(f,t);})"
-- | Stop a timer.
stopTimer :: MonadIO m => Timer -> m ()
stopTimer (Timer ident (Once _)) = liftIO $ clearTimeout ident
stopTimer (Timer ident (Repeat _)) = liftIO $ clearInterval ident
clearTimeout :: Int -> IO ()
clearTimeout = ffi "(function(id){window.clearTimeout(id);})"
clearInterval :: Int -> IO ()
clearInterval = ffi "(function(id){window.clearInterval(id);})"
|
jtojnar/haste-compiler
|
libraries/haste-lib/src/Haste/Timer.hs
|
bsd-3-clause
| 1,455
| 0
| 17
| 302
| 394
| 203
| 191
| 40
| 2
|
{-# LANGUAGE CPP #-}
-- ---------------------------------------------------------------------------
-- |
-- Module : Data.Vector.Algorithms.Optimal
-- Copyright : (c) 2008-2010 Dan Doel
-- Maintainer : Dan Doel
-- Stability : Experimental
-- Portability : Portable
--
-- Optimal sorts for very small array sizes, or for small numbers of
-- particular indices in a larger array (to be used, for instance, for
-- sorting a median of 3 values into the lowest position in an array
-- for a median-of-3 quicksort).
-- The code herein was adapted from a C algorithm for optimal sorts
-- of small arrays. The original code was produced for the article
-- /Sorting Revisited/ by Paul Hsieh, available here:
--
-- http://www.azillionmonkeys.com/qed/sort.html
--
-- The LICENSE file contains the relevant copyright information for
-- the reference C code.
module Data.Vector.Algorithms.Optimal
( sort2ByIndex
, sort2ByOffset
, sort3ByIndex
, sort3ByOffset
, sort4ByIndex
, sort4ByOffset
, Comparison
) where
import Prelude hiding (read, length)
import Control.Monad.Primitive
import Data.Vector.Generic.Mutable
import Data.Vector.Algorithms.Common (Comparison)
-- LIQUID: seems to break compilation
#include "../../../include/vector.h"
-- | Sorts the elements at the positions 'off' and 'off + 1' in the given
-- array using the comparison.
{-@ sort2ByOffset
:: (PrimMonad m, MVector v e)
=> Comparison e -> vec:(v (PrimState m) e) -> {v:Nat | (OkRng v vec 1)} -> m ()
@-}
sort2ByOffset :: (PrimMonad m, MVector v e)
=> Comparison e -> v (PrimState m) e -> Int -> m ()
sort2ByOffset cmp a off = sort2ByIndex cmp a off (off + 1)
{-# INLINABLE sort2ByOffset #-}
-- | Sorts the elements at the two given indices using the comparison. This
-- is essentially a compare-and-swap, although the first index is assumed to
-- be the 'lower' of the two.
{-@ sort2ByIndex
:: (PrimMonad m, MVector v e)
=> Comparison e -> vec:(v (PrimState m) e)
-> {v:Nat | (OkRng v vec 0)}
-> {v:Nat | (OkRng v vec 0)}
-> m ()
@-}
sort2ByIndex :: (PrimMonad m, MVector v e)
=> Comparison e -> v (PrimState m) e -> Int -> Int -> m ()
sort2ByIndex cmp a i j = UNSAFE_CHECK(checkIndex) "sort2ByIndex" i (length a)
$ UNSAFE_CHECK(checkIndex) "sort2ByIndex" j (length a) $ do
a0 <- unsafeRead a i
a1 <- unsafeRead a j
case cmp a0 a1 of
GT -> unsafeWrite a i a1 >> unsafeWrite a j a0
_ -> return ()
{-# INLINABLE sort2ByIndex #-}
-- | Sorts the three elements starting at the given offset in the array.
{-@ sort3ByOffset
:: (PrimMonad m, MVector v e)
=> Comparison e -> vec:(v (PrimState m) e) -> {v:Nat | (OkRng v vec 2)} -> m ()
@-}
sort3ByOffset :: (PrimMonad m, MVector v e)
=> Comparison e -> v (PrimState m) e -> Int -> m ()
sort3ByOffset cmp a off = sort3ByIndex cmp a off (off + 1) (off + 2)
{-# INLINABLE sort3ByOffset #-}
-- | Sorts the elements at the three given indices. The indices are assumed
-- to be given from lowest to highest, so if 'l < m < u' then
-- 'sort3ByIndex cmp a m l u' essentially sorts the median of three into the
-- lowest position in the array.
{-@ sort3ByIndex
:: (PrimMonad m, MVector v e)
=> Comparison e -> vec:(v (PrimState m) e)
-> {v:Nat | (OkRng v vec 0)}
-> {v:Nat | (OkRng v vec 0)}
-> {v:Nat | (OkRng v vec 0)}
-> m ()
@-}
sort3ByIndex :: (PrimMonad m, MVector v e)
=> Comparison e -> v (PrimState m) e -> Int -> Int -> Int -> m ()
sort3ByIndex cmp a i j k = UNSAFE_CHECK(checkIndex) "sort3ByIndex" i (length a)
$ UNSAFE_CHECK(checkIndex) "sort3ByIndex" j (length a)
$ UNSAFE_CHECK(checkIndex) "sort3ByIndex" k (length a) $ do
a0 <- unsafeRead a i
a1 <- unsafeRead a j
a2 <- unsafeRead a k
case cmp a0 a1 of
GT -> case cmp a0 a2 of
GT -> case cmp a2 a1 of
LT -> do unsafeWrite a i a2
unsafeWrite a k a0
_ -> do unsafeWrite a i a1
unsafeWrite a j a2
unsafeWrite a k a0
_ -> do unsafeWrite a i a1
unsafeWrite a j a0
_ -> case cmp a1 a2 of
GT -> case cmp a0 a2 of
GT -> do unsafeWrite a i a2
unsafeWrite a j a0
unsafeWrite a k a1
_ -> do unsafeWrite a j a2
unsafeWrite a k a1
_ -> return ()
{-# INLINABLE sort3ByIndex #-}
-- | Sorts the four elements beginning at the offset.
{-@ sort4ByOffset
:: (PrimMonad m, MVector v e)
=> Comparison e -> vec:(v (PrimState m) e) -> {v:Nat | (OkRng v vec 3)} -> m ()
@-}
sort4ByOffset :: (PrimMonad m, MVector v e)
=> Comparison e -> v (PrimState m) e -> Int -> m ()
sort4ByOffset cmp a off = sort4ByIndex cmp a off (off + 1) (off + 2) (off + 3)
{-# INLINABLE sort4ByOffset #-}
-- The horror...
-- | Sorts the elements at the four given indices. Like the 2 and 3 element
-- versions, this assumes that the indices are given in increasing order, so
-- it can be used to sort medians into particular positions and so on.
{-@ sort4ByIndex
:: (PrimMonad m, MVector v e)
=> Comparison e -> vec:(v (PrimState m) e)
-> {v:Nat | (OkRng v vec 0)}
-> {v:Nat | (OkRng v vec 0)}
-> {v:Nat | (OkRng v vec 0)}
-> {v:Nat | (OkRng v vec 0)}
-> m ()
@-}
sort4ByIndex :: (PrimMonad m, MVector v e)
=> Comparison e -> v (PrimState m) e -> Int -> Int -> Int -> Int -> m ()
sort4ByIndex cmp a i j k l = UNSAFE_CHECK(checkIndex) "sort4ByIndex" i (length a)
$ UNSAFE_CHECK(checkIndex) "sort4ByIndex" j (length a)
$ UNSAFE_CHECK(checkIndex) "sort4ByIndex" k (length a)
$ UNSAFE_CHECK(checkIndex) "sort4ByIndex" l (length a) $ do
a0 <- unsafeRead a i
a1 <- unsafeRead a j
a2 <- unsafeRead a k
a3 <- unsafeRead a l
case cmp a0 a1 of
GT -> case cmp a0 a2 of
GT -> case cmp a1 a2 of
GT -> case cmp a1 a3 of
GT -> case cmp a2 a3 of
GT -> do unsafeWrite a i a3
unsafeWrite a j a2
unsafeWrite a k a1
unsafeWrite a l a0
_ -> do unsafeWrite a i a2
unsafeWrite a j a3
unsafeWrite a k a1
unsafeWrite a l a0
_ -> case cmp a0 a3 of
GT -> do unsafeWrite a i a2
unsafeWrite a j a1
unsafeWrite a k a3
unsafeWrite a l a0
_ -> do unsafeWrite a i a2
unsafeWrite a j a1
unsafeWrite a k a0
unsafeWrite a l a3
_ -> case cmp a2 a3 of
GT -> case cmp a1 a3 of
GT -> do unsafeWrite a i a3
unsafeWrite a j a1
unsafeWrite a k a2
unsafeWrite a l a0
_ -> do unsafeWrite a i a1
unsafeWrite a j a3
unsafeWrite a k a2
unsafeWrite a l a0
_ -> case cmp a0 a3 of
GT -> do unsafeWrite a i a1
unsafeWrite a j a2
unsafeWrite a k a3
unsafeWrite a l a0
_ -> do unsafeWrite a i a1
unsafeWrite a j a2
unsafeWrite a k a0
-- unsafeWrite a l a3
_ -> case cmp a0 a3 of
GT -> case cmp a1 a3 of
GT -> do unsafeWrite a i a3
-- unsafeWrite a j a1
unsafeWrite a k a0
unsafeWrite a l a2
_ -> do unsafeWrite a i a1
unsafeWrite a j a3
unsafeWrite a k a0
unsafeWrite a l a2
_ -> case cmp a2 a3 of
GT -> do unsafeWrite a i a1
unsafeWrite a j a0
unsafeWrite a k a3
unsafeWrite a l a2
_ -> do unsafeWrite a i a1
unsafeWrite a j a0
-- unsafeWrite a k a2
-- unsafeWrite a l a3
_ -> case cmp a1 a2 of
GT -> case cmp a0 a2 of
GT -> case cmp a0 a3 of
GT -> case cmp a2 a3 of
GT -> do unsafeWrite a i a3
unsafeWrite a j a2
unsafeWrite a k a0
unsafeWrite a l a1
_ -> do unsafeWrite a i a2
unsafeWrite a j a3
unsafeWrite a k a0
unsafeWrite a l a1
_ -> case cmp a1 a3 of
GT -> do unsafeWrite a i a2
unsafeWrite a j a0
unsafeWrite a k a3
unsafeWrite a l a1
_ -> do unsafeWrite a i a2
unsafeWrite a j a0
unsafeWrite a k a1
-- unsafeWrite a l a3
_ -> case cmp a2 a3 of
GT -> case cmp a0 a3 of
GT -> do unsafeWrite a i a3
unsafeWrite a j a0
-- unsafeWrite a k a2
unsafeWrite a l a1
_ -> do -- unsafeWrite a i a0
unsafeWrite a j a3
-- unsafeWrite a k a2
unsafeWrite a l a1
_ -> case cmp a1 a3 of
GT -> do -- unsafeWrite a i a0
unsafeWrite a j a2
unsafeWrite a k a3
unsafeWrite a l a1
_ -> do -- unsafeWrite a i a0
unsafeWrite a j a2
unsafeWrite a k a1
-- unsafeWrite a l a3
_ -> case cmp a1 a3 of
GT -> case cmp a0 a3 of
GT -> do unsafeWrite a i a3
unsafeWrite a j a0
unsafeWrite a k a1
unsafeWrite a l a2
_ -> do -- unsafeWrite a i a0
unsafeWrite a j a3
unsafeWrite a k a1
unsafeWrite a l a2
_ -> case cmp a2 a3 of
GT -> do -- unsafeWrite a i a0
-- unsafeWrite a j a1
unsafeWrite a k a3
unsafeWrite a l a2
_ -> do -- unsafeWrite a i a0
-- unsafeWrite a j a1
-- unsafeWrite a k a2
-- unsafeWrite a l a3
return ()
{-# INLINABLE sort4ByIndex #-}
|
ssaavedra/liquidhaskell
|
benchmarks/vector-algorithms-0.5.4.2/Data/Vector/Algorithms/Optimal.hs
|
bsd-3-clause
| 13,173
| 0
| 32
| 6,816
| 2,498
| 1,175
| 1,323
| 175
| 24
|
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
************************************************************************
* *
\section[OccurAnal]{Occurrence analysis pass}
* *
************************************************************************
The occurrence analyser re-typechecks a core expression, returning a new
core expression with (hopefully) improved usage information.
-}
{-# LANGUAGE CPP, BangPatterns #-}
module OccurAnal (
occurAnalysePgm, occurAnalyseExpr, occurAnalyseExpr_NoBinderSwap
) where
#include "HsVersions.h"
import CoreSyn
import CoreFVs
import CoreUtils ( exprIsTrivial, isDefaultAlt, isExpandableApp,
stripTicksTopE, mkTicks )
import Id
import Name( localiseName )
import BasicTypes
import Module( Module )
import Coercion
import VarSet
import VarEnv
import Var
import Demand ( argOneShots, argsOneShots )
import Maybes ( orElse )
import Digraph ( SCC(..), stronglyConnCompFromEdgedVerticesR )
import Unique
import UniqFM
import Util
import Outputable
import FastString
import Data.List
import Control.Arrow ( second )
{-
************************************************************************
* *
\subsection[OccurAnal-main]{Counting occurrences: main function}
* *
************************************************************************
Here's the externally-callable interface:
-}
occurAnalysePgm :: Module -- Used only in debug output
-> (Activation -> Bool)
-> [CoreRule] -> [CoreVect] -> VarSet
-> CoreProgram -> CoreProgram
occurAnalysePgm this_mod active_rule imp_rules vects vectVars binds
| isEmptyVarEnv final_usage
= occ_anald_binds
| otherwise -- See Note [Glomming]
= WARN( True, hang (text "Glomming in" <+> ppr this_mod <> colon)
2 (ppr final_usage ) )
occ_anald_glommed_binds
where
init_env = initOccEnv active_rule
(final_usage, occ_anald_binds) = go init_env binds
(_, occ_anald_glommed_binds) = occAnalRecBind init_env imp_rule_edges
(flattenBinds occ_anald_binds)
initial_uds
-- It's crucial to re-analyse the glommed-together bindings
-- so that we establish the right loop breakers. Otherwise
-- we can easily create an infinite loop (Trac #9583 is an example)
initial_uds = addIdOccs emptyDetails
(rulesFreeVars imp_rules `unionVarSet`
vectsFreeVars vects `unionVarSet`
vectVars)
-- The RULES and VECTORISE declarations keep things alive! (For VECTORISE declarations,
-- we only get them *until* the vectoriser runs. Afterwards, these dependencies are
-- reflected in 'vectors' — see Note [Vectorisation declarations and occurrences].)
-- Note [Preventing loops due to imported functions rules]
imp_rule_edges = foldr (plusVarEnv_C unionVarSet) emptyVarEnv
[ mapVarEnv (const maps_to) (exprFreeIds arg `delVarSetList` ru_bndrs imp_rule)
| imp_rule <- imp_rules
, let maps_to = exprFreeIds (ru_rhs imp_rule)
`delVarSetList` ru_bndrs imp_rule
, arg <- ru_args imp_rule ]
go :: OccEnv -> [CoreBind] -> (UsageDetails, [CoreBind])
go _ []
= (initial_uds, [])
go env (bind:binds)
= (final_usage, bind' ++ binds')
where
(bs_usage, binds') = go env binds
(final_usage, bind') = occAnalBind env imp_rule_edges bind bs_usage
occurAnalyseExpr :: CoreExpr -> CoreExpr
-- Do occurrence analysis, and discard occurrence info returned
occurAnalyseExpr = occurAnalyseExpr' True -- do binder swap
occurAnalyseExpr_NoBinderSwap :: CoreExpr -> CoreExpr
occurAnalyseExpr_NoBinderSwap = occurAnalyseExpr' False -- do not do binder swap
occurAnalyseExpr' :: Bool -> CoreExpr -> CoreExpr
occurAnalyseExpr' enable_binder_swap expr
= snd (occAnal env expr)
where
env = (initOccEnv all_active_rules) {occ_binder_swap = enable_binder_swap}
-- To be conservative, we say that all inlines and rules are active
all_active_rules = \_ -> True
{-
************************************************************************
* *
\subsection[OccurAnal-main]{Counting occurrences: main function}
* *
************************************************************************
Bindings
~~~~~~~~
-}
type ImpRuleEdges = IdEnv IdSet -- Mapping from FVs of imported RULE LHSs to RHS FVs
noImpRuleEdges :: ImpRuleEdges
noImpRuleEdges = emptyVarEnv
occAnalBind :: OccEnv -- The incoming OccEnv
-> ImpRuleEdges
-> CoreBind
-> UsageDetails -- Usage details of scope
-> (UsageDetails, -- Of the whole let(rec)
[CoreBind])
occAnalBind env top_env (NonRec binder rhs) body_usage
= occAnalNonRecBind env top_env binder rhs body_usage
occAnalBind env top_env (Rec pairs) body_usage
= occAnalRecBind env top_env pairs body_usage
-----------------
occAnalNonRecBind :: OccEnv -> ImpRuleEdges -> Var -> CoreExpr
-> UsageDetails -> (UsageDetails, [CoreBind])
occAnalNonRecBind env imp_rule_edges binder rhs body_usage
| isTyVar binder -- A type let; we don't gather usage info
= (body_usage, [NonRec binder rhs])
| not (binder `usedIn` body_usage) -- It's not mentioned
= (body_usage, [])
| otherwise -- It's mentioned in the body
= (body_usage' +++ rhs_usage4, [NonRec tagged_binder rhs'])
where
(body_usage', tagged_binder) = tagBinder body_usage binder
(rhs_usage1, rhs') = occAnalNonRecRhs env tagged_binder rhs
rhs_usage2 = addIdOccs rhs_usage1 (idUnfoldingVars binder)
rhs_usage3 = addIdOccs rhs_usage2 (idRuleVars binder)
-- See Note [Rules are extra RHSs] and Note [Rule dependency info]
rhs_usage4 = maybe rhs_usage3 (addIdOccs rhs_usage3) $
lookupVarEnv imp_rule_edges binder
-- See Note [Preventing loops due to imported functions rules]
-----------------
occAnalRecBind :: OccEnv -> ImpRuleEdges -> [(Var,CoreExpr)]
-> UsageDetails -> (UsageDetails, [CoreBind])
occAnalRecBind env imp_rule_edges pairs body_usage
= foldr occAnalRec (body_usage, []) sccs
-- For a recursive group, we
-- * occ-analyse all the RHSs
-- * compute strongly-connected components
-- * feed those components to occAnalRec
where
bndr_set = mkVarSet (map fst pairs)
sccs :: [SCC (Node Details)]
sccs = {-# SCC "occAnalBind.scc" #-} stronglyConnCompFromEdgedVerticesR nodes
nodes :: [Node Details]
nodes = {-# SCC "occAnalBind.assoc" #-} map (makeNode env imp_rule_edges bndr_set) pairs
{-
Note [Dead code]
~~~~~~~~~~~~~~~~
Dropping dead code for a cyclic Strongly Connected Component is done
in a very simple way:
the entire SCC is dropped if none of its binders are mentioned
in the body; otherwise the whole thing is kept.
The key observation is that dead code elimination happens after
dependency analysis: so 'occAnalBind' processes SCCs instead of the
original term's binding groups.
Thus 'occAnalBind' does indeed drop 'f' in an example like
letrec f = ...g...
g = ...(...g...)...
in
...g...
when 'g' no longer uses 'f' at all (eg 'f' does not occur in a RULE in
'g'). 'occAnalBind' first consumes 'CyclicSCC g' and then it consumes
'AcyclicSCC f', where 'body_usage' won't contain 'f'.
------------------------------------------------------------
Note [Forming Rec groups]
~~~~~~~~~~~~~~~~~~~~~~~~~
We put bindings {f = ef; g = eg } in a Rec group if "f uses g"
and "g uses f", no matter how indirectly. We do a SCC analysis
with an edge f -> g if "f uses g".
More precisely, "f uses g" iff g should be in scope wherever f is.
That is, g is free in:
a) the rhs 'ef'
b) or the RHS of a rule for f (Note [Rules are extra RHSs])
c) or the LHS or a rule for f (Note [Rule dependency info])
These conditions apply regardless of the activation of the RULE (eg it might be
inactive in this phase but become active later). Once a Rec is broken up
it can never be put back together, so we must be conservative.
The principle is that, regardless of rule firings, every variable is
always in scope.
* Note [Rules are extra RHSs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
A RULE for 'f' is like an extra RHS for 'f'. That way the "parent"
keeps the specialised "children" alive. If the parent dies
(because it isn't referenced any more), then the children will die
too (unless they are already referenced directly).
To that end, we build a Rec group for each cyclic strongly
connected component,
*treating f's rules as extra RHSs for 'f'*.
More concretely, the SCC analysis runs on a graph with an edge
from f -> g iff g is mentioned in
(a) f's rhs
(b) f's RULES
These are rec_edges.
Under (b) we include variables free in *either* LHS *or* RHS of
the rule. The former might seems silly, but see Note [Rule
dependency info]. So in Example [eftInt], eftInt and eftIntFB
will be put in the same Rec, even though their 'main' RHSs are
both non-recursive.
* Note [Rule dependency info]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
The VarSet in a SpecInfo is used for dependency analysis in the
occurrence analyser. We must track free vars in *both* lhs and rhs.
Hence use of idRuleVars, rather than idRuleRhsVars in occAnalBind.
Why both? Consider
x = y
RULE f x = v+4
Then if we substitute y for x, we'd better do so in the
rule's LHS too, so we'd better ensure the RULE appears to mention 'x'
as well as 'v'
* Note [Rules are visible in their own rec group]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We want the rules for 'f' to be visible in f's right-hand side.
And we'd like them to be visible in other functions in f's Rec
group. E.g. in Note [Specialisation rules] we want f' rule
to be visible in both f's RHS, and fs's RHS.
This means that we must simplify the RULEs first, before looking
at any of the definitions. This is done by Simplify.simplRecBind,
when it calls addLetIdInfo.
------------------------------------------------------------
Note [Choosing loop breakers]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Loop breaking is surprisingly subtle. First read the section 4 of
"Secrets of the GHC inliner". This describes our basic plan.
We avoid infinite inlinings by choosing loop breakers, and
ensuring that a loop breaker cuts each loop.
Fundamentally, we do SCC analysis on a graph. For each recursive
group we choose a loop breaker, delete all edges to that node,
re-analyse the SCC, and iterate.
But what is the graph? NOT the same graph as was used for Note
[Forming Rec groups]! In particular, a RULE is like an equation for
'f' that is *always* inlined if it is applicable. We do *not* disable
rules for loop-breakers. It's up to whoever makes the rules to make
sure that the rules themselves always terminate. See Note [Rules for
recursive functions] in Simplify.hs
Hence, if
f's RHS (or its INLINE template if it has one) mentions g, and
g has a RULE that mentions h, and
h has a RULE that mentions f
then we *must* choose f to be a loop breaker. Example: see Note
[Specialisation rules].
In general, take the free variables of f's RHS, and augment it with
all the variables reachable by RULES from those starting points. That
is the whole reason for computing rule_fv_env in occAnalBind. (Of
course we only consider free vars that are also binders in this Rec
group.) See also Note [Finding rule RHS free vars]
Note that when we compute this rule_fv_env, we only consider variables
free in the *RHS* of the rule, in contrast to the way we build the
Rec group in the first place (Note [Rule dependency info])
Note that if 'g' has RHS that mentions 'w', we should add w to
g's loop-breaker edges. More concretely there is an edge from f -> g
iff
(a) g is mentioned in f's RHS `xor` f's INLINE rhs
(see Note [Inline rules])
(b) or h is mentioned in f's RHS, and
g appears in the RHS of an active RULE of h
or a transitive sequence of active rules starting with h
Why "active rules"? See Note [Finding rule RHS free vars]
Note that in Example [eftInt], *neither* eftInt *nor* eftIntFB is
chosen as a loop breaker, because their RHSs don't mention each other.
And indeed both can be inlined safely.
Note again that the edges of the graph we use for computing loop breakers
are not the same as the edges we use for computing the Rec blocks.
That's why we compute
- rec_edges for the Rec block analysis
- loop_breaker_edges for the loop breaker analysis
* Note [Finding rule RHS free vars]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this real example from Data Parallel Haskell
tagZero :: Array Int -> Array Tag
{-# INLINE [1] tagZeroes #-}
tagZero xs = pmap (\x -> fromBool (x==0)) xs
{-# RULES "tagZero" [~1] forall xs n.
pmap fromBool <blah blah> = tagZero xs #-}
So tagZero's RHS mentions pmap, and pmap's RULE mentions tagZero.
However, tagZero can only be inlined in phase 1 and later, while
the RULE is only active *before* phase 1. So there's no problem.
To make this work, we look for the RHS free vars only for
*active* rules. That's the reason for the occ_rule_act field
of the OccEnv.
* Note [Weak loop breakers]
~~~~~~~~~~~~~~~~~~~~~~~~~
There is a last nasty wrinkle. Suppose we have
Rec { f = f_rhs
RULE f [] = g
h = h_rhs
g = h
...more...
}
Remember that we simplify the RULES before any RHS (see Note
[Rules are visible in their own rec group] above).
So we must *not* postInlineUnconditionally 'g', even though
its RHS turns out to be trivial. (I'm assuming that 'g' is
not choosen as a loop breaker.) Why not? Because then we
drop the binding for 'g', which leaves it out of scope in the
RULE!
Here's a somewhat different example of the same thing
Rec { g = h
; h = ...f...
; f = f_rhs
RULE f [] = g }
Here the RULE is "below" g, but we *still* can't postInlineUnconditionally
g, because the RULE for f is active throughout. So the RHS of h
might rewrite to h = ...g...
So g must remain in scope in the output program!
We "solve" this by:
Make g a "weak" loop breaker (OccInfo = IAmLoopBreaker True)
iff g is a "missing free variable" of the Rec group
A "missing free variable" x is one that is mentioned in an RHS or
INLINE or RULE of a binding in the Rec group, but where the
dependency on x may not show up in the loop_breaker_edges (see
note [Choosing loop breakers} above).
A normal "strong" loop breaker has IAmLoopBreaker False. So
Inline postInlineUnconditionally
strong IAmLoopBreaker False no no
weak IAmLoopBreaker True yes no
other yes yes
The **sole** reason for this kind of loop breaker is so that
postInlineUnconditionally does not fire. Ugh. (Typically it'll
inline via the usual callSiteInline stuff, so it'll be dead in the
next pass, so the main Ugh is the tiresome complication.)
Note [Rules for imported functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this
f = /\a. B.g a
RULE B.g Int = 1 + f Int
Note that
* The RULE is for an imported function.
* f is non-recursive
Now we
can get
f Int --> B.g Int Inlining f
--> 1 + f Int Firing RULE
and so the simplifier goes into an infinite loop. This
would not happen if the RULE was for a local function,
because we keep track of dependencies through rules. But
that is pretty much impossible to do for imported Ids. Suppose
f's definition had been
f = /\a. C.h a
where (by some long and devious process), C.h eventually inlines to
B.g. We could only spot such loops by exhaustively following
unfoldings of C.h etc, in case we reach B.g, and hence (via the RULE)
f.
Note that RULES for imported functions are important in practice; they
occur a lot in the libraries.
We regard this potential infinite loop as a *programmer* error.
It's up the programmer not to write silly rules like
RULE f x = f x
and the example above is just a more complicated version.
Note [Preventing loops due to imported functions rules]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider:
import GHC.Base (foldr)
{-# RULES "filterList" forall p. foldr (filterFB (:) p) [] = filter p #-}
filter p xs = build (\c n -> foldr (filterFB c p) n xs)
filterFB c p = ...
f = filter p xs
Note that filter is not a loop-breaker, so what happens is:
f = filter p xs
= {inline} build (\c n -> foldr (filterFB c p) n xs)
= {inline} foldr (filterFB (:) p) [] xs
= {RULE} filter p xs
We are in an infinite loop.
A more elaborate example (that I actually saw in practice when I went to
mark GHC.List.filter as INLINABLE) is as follows. Say I have this module:
{-# LANGUAGE RankNTypes #-}
module GHCList where
import Prelude hiding (filter)
import GHC.Base (build)
{-# INLINABLE filter #-}
filter :: (a -> Bool) -> [a] -> [a]
filter p [] = []
filter p (x:xs) = if p x then x : filter p xs else filter p xs
{-# NOINLINE [0] filterFB #-}
filterFB :: (a -> b -> b) -> (a -> Bool) -> a -> b -> b
filterFB c p x r | p x = x `c` r
| otherwise = r
{-# RULES
"filter" [~1] forall p xs. filter p xs = build (\c n -> foldr
(filterFB c p) n xs)
"filterList" [1] forall p. foldr (filterFB (:) p) [] = filter p
#-}
Then (because RULES are applied inside INLINABLE unfoldings, but inlinings
are not), the unfolding given to "filter" in the interface file will be:
filter p [] = []
filter p (x:xs) = if p x then x : build (\c n -> foldr (filterFB c p) n xs)
else build (\c n -> foldr (filterFB c p) n xs
Note that because this unfolding does not mention "filter", filter is not
marked as a strong loop breaker. Therefore at a use site in another module:
filter p xs
= {inline}
case xs of [] -> []
(x:xs) -> if p x then x : build (\c n -> foldr (filterFB c p) n xs)
else build (\c n -> foldr (filterFB c p) n xs)
build (\c n -> foldr (filterFB c p) n xs)
= {inline} foldr (filterFB (:) p) [] xs
= {RULE} filter p xs
And we are in an infinite loop again, except that this time the loop is producing an
infinitely large *term* (an unrolling of filter) and so the simplifier finally
dies with "ticks exhausted"
Because of this problem, we make a small change in the occurrence analyser
designed to mark functions like "filter" as strong loop breakers on the basis that:
1. The RHS of filter mentions the local function "filterFB"
2. We have a rule which mentions "filterFB" on the LHS and "filter" on the RHS
So for each RULE for an *imported* function we are going to add
dependency edges between the *local* FVS of the rule LHS and the
*local* FVS of the rule RHS. We don't do anything special for RULES on
local functions because the standard occurrence analysis stuff is
pretty good at getting loop-breakerness correct there.
It is important to note that even with this extra hack we aren't always going to get
things right. For example, it might be that the rule LHS mentions an imported Id,
and another module has a RULE that can rewrite that imported Id to one of our local
Ids.
Note [Specialising imported functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
BUT for *automatically-generated* rules, the programmer can't be
responsible for the "programmer error" in Note [Rules for imported
functions]. In paricular, consider specialising a recursive function
defined in another module. If we specialise a recursive function B.g,
we get
g_spec = .....(B.g Int).....
RULE B.g Int = g_spec
Here, g_spec doesn't look recursive, but when the rule fires, it
becomes so. And if B.g was mutually recursive, the loop might
not be as obvious as it is here.
To avoid this,
* When specialising a function that is a loop breaker,
give a NOINLINE pragma to the specialised function
Note [Glomming]
~~~~~~~~~~~~~~~
RULES for imported Ids can make something at the top refer to something at the bottom:
f = \x -> B.g (q x)
h = \y -> 3
RULE: B.g (q x) = h x
Applying this rule makes f refer to h, although f doesn't appear to
depend on h. (And, as in Note [Rules for imported functions], the
dependency might be more indirect. For example, f might mention C.t
rather than B.g, where C.t eventually inlines to B.g.)
NOTICE that this cannot happen for rules whose head is a
locally-defined function, because we accurately track dependencies
through RULES. It only happens for rules whose head is an imported
function (B.g in the example above).
Solution:
- When simplifying, bring all top level identifiers into
scope at the start, ignoring the Rec/NonRec structure, so
that when 'h' pops up in f's rhs, we find it in the in-scope set
(as the simplifier generally expects). This happens in simplTopBinds.
- In the occurrence analyser, if there are any out-of-scope
occurrences that pop out of the top, which will happen after
firing the rule: f = \x -> h x
h = \y -> 3
then just glom all the bindings into a single Rec, so that
the *next* iteration of the occurrence analyser will sort
them all out. This part happens in occurAnalysePgm.
------------------------------------------------------------
Note [Inline rules]
~~~~~~~~~~~~~~~~~~~
None of the above stuff about RULES applies to Inline Rules,
stored in a CoreUnfolding. The unfolding, if any, is simplified
at the same time as the regular RHS of the function (ie *not* like
Note [Rules are visible in their own rec group]), so it should be
treated *exactly* like an extra RHS.
Or, rather, when computing loop-breaker edges,
* If f has an INLINE pragma, and it is active, we treat the
INLINE rhs as f's rhs
* If it's inactive, we treat f as having no rhs
* If it has no INLINE pragma, we look at f's actual rhs
There is a danger that we'll be sub-optimal if we see this
f = ...f...
[INLINE f = ..no f...]
where f is recursive, but the INLINE is not. This can just about
happen with a sufficiently odd set of rules; eg
foo :: Int -> Int
{-# INLINE [1] foo #-}
foo x = x+1
bar :: Int -> Int
{-# INLINE [1] bar #-}
bar x = foo x + 1
{-# RULES "foo" [~1] forall x. foo x = bar x #-}
Here the RULE makes bar recursive; but it's INLINE pragma remains
non-recursive. It's tempting to then say that 'bar' should not be
a loop breaker, but an attempt to do so goes wrong in two ways:
a) We may get
$df = ...$cfoo...
$cfoo = ...$df....
[INLINE $cfoo = ...no-$df...]
But we want $cfoo to depend on $df explicitly so that we
put the bindings in the right order to inline $df in $cfoo
and perhaps break the loop altogether. (Maybe this
b)
Example [eftInt]
~~~~~~~~~~~~~~~
Example (from GHC.Enum):
eftInt :: Int# -> Int# -> [Int]
eftInt x y = ...(non-recursive)...
{-# INLINE [0] eftIntFB #-}
eftIntFB :: (Int -> r -> r) -> r -> Int# -> Int# -> r
eftIntFB c n x y = ...(non-recursive)...
{-# RULES
"eftInt" [~1] forall x y. eftInt x y = build (\ c n -> eftIntFB c n x y)
"eftIntList" [1] eftIntFB (:) [] = eftInt
#-}
Note [Specialisation rules]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this group, which is typical of what SpecConstr builds:
fs a = ....f (C a)....
f x = ....f (C a)....
{-# RULE f (C a) = fs a #-}
So 'f' and 'fs' are in the same Rec group (since f refers to fs via its RULE).
But watch out! If 'fs' is not chosen as a loop breaker, we may get an infinite loop:
- the RULE is applied in f's RHS (see Note [Self-recursive rules] in Simplify
- fs is inlined (say it's small)
- now there's another opportunity to apply the RULE
This showed up when compiling Control.Concurrent.Chan.getChanContents.
-}
type Node details = (details, Unique, [Unique]) -- The Ints are gotten from the Unique,
-- which is gotten from the Id.
data Details
= ND { nd_bndr :: Id -- Binder
, nd_rhs :: CoreExpr -- RHS, already occ-analysed
, nd_uds :: UsageDetails -- Usage from RHS, and RULES, and stable unfoldings
-- ignoring phase (ie assuming all are active)
-- See Note [Forming Rec groups]
, nd_inl :: IdSet -- Free variables of
-- the stable unfolding (if present and active)
-- or the RHS (if not)
-- but excluding any RULES
-- This is the IdSet that may be used if the Id is inlined
, nd_weak :: IdSet -- Binders of this Rec that are mentioned in nd_uds
-- but are *not* in nd_inl. These are the ones whose
-- dependencies might not be respected by loop_breaker_edges
-- See Note [Weak loop breakers]
, nd_active_rule_fvs :: IdSet -- Free variables of the RHS of active RULES
}
instance Outputable Details where
ppr nd = ptext (sLit "ND") <> braces
(sep [ ptext (sLit "bndr =") <+> ppr (nd_bndr nd)
, ptext (sLit "uds =") <+> ppr (nd_uds nd)
, ptext (sLit "inl =") <+> ppr (nd_inl nd)
, ptext (sLit "weak =") <+> ppr (nd_weak nd)
, ptext (sLit "rule =") <+> ppr (nd_active_rule_fvs nd)
])
makeNode :: OccEnv -> ImpRuleEdges -> VarSet -> (Var, CoreExpr) -> Node Details
makeNode env imp_rule_edges bndr_set (bndr, rhs)
= (details, varUnique bndr, keysUFM node_fvs)
where
details = ND { nd_bndr = bndr
, nd_rhs = rhs'
, nd_uds = rhs_usage3
, nd_weak = node_fvs `minusVarSet` inl_fvs
, nd_inl = inl_fvs
, nd_active_rule_fvs = active_rule_fvs }
-- Constructing the edges for the main Rec computation
-- See Note [Forming Rec groups]
(rhs_usage1, rhs') = occAnalRecRhs env rhs
rhs_usage2 = addIdOccs rhs_usage1 all_rule_fvs -- Note [Rules are extra RHSs]
-- Note [Rule dependency info]
rhs_usage3 = case mb_unf_fvs of
Just unf_fvs -> addIdOccs rhs_usage2 unf_fvs
Nothing -> rhs_usage2
node_fvs = udFreeVars bndr_set rhs_usage3
-- Finding the free variables of the rules
is_active = occ_rule_act env :: Activation -> Bool
rules = filterOut isBuiltinRule (idCoreRules bndr)
rules_w_fvs :: [(Activation, VarSet)] -- Find the RHS fvs
rules_w_fvs = maybe id (\ids -> ((AlwaysActive, ids):)) (lookupVarEnv imp_rule_edges bndr)
-- See Note [Preventing loops due to imported functions rules]
[ (ru_act rule, fvs)
| rule <- rules
, let fvs = exprFreeVars (ru_rhs rule)
`delVarSetList` ru_bndrs rule
, not (isEmptyVarSet fvs) ]
all_rule_fvs = rule_lhs_fvs `unionVarSet` rule_rhs_fvs
rule_rhs_fvs = mapUnionVarSet snd rules_w_fvs
rule_lhs_fvs = mapUnionVarSet (\ru -> exprsFreeVars (ru_args ru)
`delVarSetList` ru_bndrs ru) rules
active_rule_fvs = unionVarSets [fvs | (a,fvs) <- rules_w_fvs, is_active a]
-- Finding the free variables of the INLINE pragma (if any)
unf = realIdUnfolding bndr -- Ignore any current loop-breaker flag
mb_unf_fvs = stableUnfoldingVars unf
-- Find the "nd_inl" free vars; for the loop-breaker phase
inl_fvs = case mb_unf_fvs of
Nothing -> udFreeVars bndr_set rhs_usage1 -- No INLINE, use RHS
Just unf_fvs -> unf_fvs
-- We could check for an *active* INLINE (returning
-- emptyVarSet for an inactive one), but is_active
-- isn't the right thing (it tells about
-- RULE activation), so we'd need more plumbing
-----------------------------
occAnalRec :: SCC (Node Details)
-> (UsageDetails, [CoreBind])
-> (UsageDetails, [CoreBind])
-- The NonRec case is just like a Let (NonRec ...) above
occAnalRec (AcyclicSCC (ND { nd_bndr = bndr, nd_rhs = rhs, nd_uds = rhs_uds}, _, _))
(body_uds, binds)
| not (bndr `usedIn` body_uds)
= (body_uds, binds) -- See Note [Dead code]
| otherwise -- It's mentioned in the body
= (body_uds' +++ rhs_uds,
NonRec tagged_bndr rhs : binds)
where
(body_uds', tagged_bndr) = tagBinder body_uds bndr
-- The Rec case is the interesting one
-- See Note [Loop breaking]
occAnalRec (CyclicSCC nodes) (body_uds, binds)
| not (any (`usedIn` body_uds) bndrs) -- NB: look at body_uds, not total_uds
= (body_uds, binds) -- See Note [Dead code]
| otherwise -- At this point we always build a single Rec
= -- pprTrace "occAnalRec" (vcat
-- [ text "tagged nodes" <+> ppr tagged_nodes
-- , text "lb edges" <+> ppr loop_breaker_edges])
(final_uds, Rec pairs : binds)
where
bndrs = [b | (ND { nd_bndr = b }, _, _) <- nodes]
bndr_set = mkVarSet bndrs
----------------------------
-- Tag the binders with their occurrence info
tagged_nodes = map tag_node nodes
total_uds = foldl add_uds body_uds nodes
final_uds = total_uds `minusVarEnv` bndr_set
add_uds usage_so_far (nd, _, _) = usage_so_far +++ nd_uds nd
tag_node :: Node Details -> Node Details
tag_node (details@ND { nd_bndr = bndr }, k, ks)
| let bndr1 = setBinderOcc total_uds bndr
= (details { nd_bndr = bndr1 }, k, ks)
---------------------------
-- Now reconstruct the cycle
pairs :: [(Id,CoreExpr)]
pairs | isEmptyVarSet weak_fvs = reOrderNodes 0 bndr_set weak_fvs tagged_nodes []
| otherwise = loopBreakNodes 0 bndr_set weak_fvs loop_breaker_edges []
-- If weak_fvs is empty, the loop_breaker_edges will include all
-- the edges in tagged_nodes, so there isn't any point in doing
-- a fresh SCC computation that will yield a single CyclicSCC result.
weak_fvs :: VarSet
weak_fvs = mapUnionVarSet (nd_weak . fstOf3) nodes
-- See Note [Choosing loop breakers] for loop_breaker_edges
loop_breaker_edges = map mk_node tagged_nodes
mk_node (details@(ND { nd_inl = inl_fvs }), k, _)
= (details, k, keysUFM (extendFvs_ rule_fv_env inl_fvs))
------------------------------------
rule_fv_env :: IdEnv IdSet
-- Maps a variable f to the variables from this group
-- mentioned in RHS of active rules for f
-- Domain is *subset* of bound vars (others have no rule fvs)
rule_fv_env = transClosureFV (mkVarEnv init_rule_fvs)
init_rule_fvs -- See Note [Finding rule RHS free vars]
= [ (b, trimmed_rule_fvs)
| (ND { nd_bndr = b, nd_active_rule_fvs = rule_fvs },_,_) <- nodes
, let trimmed_rule_fvs = rule_fvs `intersectVarSet` bndr_set
, not (isEmptyVarSet trimmed_rule_fvs)]
{-
@loopBreakSCC@ is applied to the list of (binder,rhs) pairs for a cyclic
strongly connected component (there's guaranteed to be a cycle). It returns the
same pairs, but
a) in a better order,
b) with some of the Ids having a IAmALoopBreaker pragma
The "loop-breaker" Ids are sufficient to break all cycles in the SCC. This means
that the simplifier can guarantee not to loop provided it never records an inlining
for these no-inline guys.
Furthermore, the order of the binds is such that if we neglect dependencies
on the no-inline Ids then the binds are topologically sorted. This means
that the simplifier will generally do a good job if it works from top bottom,
recording inlinings for any Ids which aren't marked as "no-inline" as it goes.
-}
type Binding = (Id,CoreExpr)
mk_loop_breaker :: Node Details -> Binding
mk_loop_breaker (ND { nd_bndr = bndr, nd_rhs = rhs}, _, _)
= (setIdOccInfo bndr strongLoopBreaker, rhs)
mk_non_loop_breaker :: VarSet -> Node Details -> Binding
-- See Note [Weak loop breakers]
mk_non_loop_breaker used_in_rules (ND { nd_bndr = bndr, nd_rhs = rhs}, _, _)
| bndr `elemVarSet` used_in_rules = (setIdOccInfo bndr weakLoopBreaker, rhs)
| otherwise = (bndr, rhs)
udFreeVars :: VarSet -> UsageDetails -> VarSet
-- Find the subset of bndrs that are mentioned in uds
udFreeVars bndrs uds = intersectUFM_C (\b _ -> b) bndrs uds
loopBreakNodes :: Int
-> VarSet -- All binders
-> VarSet -- Binders whose dependencies may be "missing"
-- See Note [Weak loop breakers]
-> [Node Details]
-> [Binding] -- Append these to the end
-> [Binding]
-- Return the bindings sorted into a plausible order, and marked with loop breakers.
loopBreakNodes depth bndr_set weak_fvs nodes binds
= go (stronglyConnCompFromEdgedVerticesR nodes) binds
where
go [] binds = binds
go (scc:sccs) binds = loop_break_scc scc (go sccs binds)
loop_break_scc scc binds
= case scc of
AcyclicSCC node -> mk_non_loop_breaker weak_fvs node : binds
CyclicSCC [node] -> mk_loop_breaker node : binds
CyclicSCC nodes -> reOrderNodes depth bndr_set weak_fvs nodes binds
reOrderNodes :: Int -> VarSet -> VarSet -> [Node Details] -> [Binding] -> [Binding]
-- Choose a loop breaker, mark it no-inline,
-- do SCC analysis on the rest, and recursively sort them out
reOrderNodes _ _ _ [] _ = panic "reOrderNodes"
reOrderNodes depth bndr_set weak_fvs (node : nodes) binds
= -- pprTrace "reOrderNodes" (text "unchosen" <+> ppr unchosen $$
-- text "chosen" <+> ppr chosen_nodes) $
loopBreakNodes new_depth bndr_set weak_fvs unchosen $
(map mk_loop_breaker chosen_nodes ++ binds)
where
(chosen_nodes, unchosen) = choose_loop_breaker (score node) [node] [] nodes
approximate_loop_breaker = depth >= 2
new_depth | approximate_loop_breaker = 0
| otherwise = depth+1
-- After two iterations (d=0, d=1) give up
-- and approximate, returning to d=0
choose_loop_breaker :: Int -- Best score so far
-> [Node Details] -- Nodes with this score
-> [Node Details] -- Nodes with higher scores
-> [Node Details] -- Unprocessed nodes
-> ([Node Details], [Node Details])
-- This loop looks for the bind with the lowest score
-- to pick as the loop breaker. The rest accumulate in
choose_loop_breaker _ loop_nodes acc []
= (loop_nodes, acc) -- Done
-- If approximate_loop_breaker is True, we pick *all*
-- nodes with lowest score, else just one
-- See Note [Complexity of loop breaking]
choose_loop_breaker loop_sc loop_nodes acc (node : nodes)
| sc < loop_sc -- Lower score so pick this new one
= choose_loop_breaker sc [node] (loop_nodes ++ acc) nodes
| approximate_loop_breaker && sc == loop_sc
= choose_loop_breaker loop_sc (node : loop_nodes) acc nodes
| otherwise -- Higher score so don't pick it
= choose_loop_breaker loop_sc loop_nodes (node : acc) nodes
where
sc = score node
score :: Node Details -> Int -- Higher score => less likely to be picked as loop breaker
score (ND { nd_bndr = bndr, nd_rhs = rhs }, _, _)
| not (isId bndr) = 100 -- A type or cercion variable is never a loop breaker
| isDFunId bndr = 9 -- Never choose a DFun as a loop breaker
-- Note [DFuns should not be loop breakers]
| Just be_very_keen <- hasStableCoreUnfolding_maybe (idUnfolding bndr)
= if be_very_keen then 6 -- Note [Loop breakers and INLINE/INLINEABLE pragmas]
else 3
-- Data structures are more important than INLINE pragmas
-- so that dictionary/method recursion unravels
-- Note that this case hits all stable unfoldings, so we
-- never look at 'rhs' for stable unfoldings. That's right, because
-- 'rhs' is irrelevant for inlining things with a stable unfolding
| is_con_app rhs = 5 -- Data types help with cases: Note [Constructor applications]
| exprIsTrivial rhs = 10 -- Practically certain to be inlined
-- Used to have also: && not (isExportedId bndr)
-- But I found this sometimes cost an extra iteration when we have
-- rec { d = (a,b); a = ...df...; b = ...df...; df = d }
-- where df is the exported dictionary. Then df makes a really
-- bad choice for loop breaker
-- If an Id is marked "never inline" then it makes a great loop breaker
-- The only reason for not checking that here is that it is rare
-- and I've never seen a situation where it makes a difference,
-- so it probably isn't worth the time to test on every binder
-- | isNeverActive (idInlinePragma bndr) = -10
| isOneOcc (idOccInfo bndr) = 2 -- Likely to be inlined
| canUnfold (realIdUnfolding bndr) = 1
-- The Id has some kind of unfolding
-- Ignore loop-breaker-ness here because that is what we are setting!
| otherwise = 0
-- Checking for a constructor application
-- Cheap and cheerful; the simplifer moves casts out of the way
-- The lambda case is important to spot x = /\a. C (f a)
-- which comes up when C is a dictionary constructor and
-- f is a default method.
-- Example: the instance for Show (ST s a) in GHC.ST
--
-- However we *also* treat (\x. C p q) as a con-app-like thing,
-- Note [Closure conversion]
is_con_app (Var v) = isConLikeId v
is_con_app (App f _) = is_con_app f
is_con_app (Lam _ e) = is_con_app e
is_con_app (Tick _ e) = is_con_app e
is_con_app _ = False
{-
Note [Complexity of loop breaking]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The loop-breaking algorithm knocks out one binder at a time, and
performs a new SCC analysis on the remaining binders. That can
behave very badly in tightly-coupled groups of bindings; in the
worst case it can be (N**2)*log N, because it does a full SCC
on N, then N-1, then N-2 and so on.
To avoid this, we switch plans after 2 (or whatever) attempts:
Plan A: pick one binder with the lowest score, make it
a loop breaker, and try again
Plan B: pick *all* binders with the lowest score, make them
all loop breakers, and try again
Since there are only a small finite number of scores, this will
terminate in a constant number of iterations, rather than O(N)
iterations.
You might thing that it's very unlikely, but RULES make it much
more likely. Here's a real example from Trac #1969:
Rec { $dm = \d.\x. op d
{-# RULES forall d. $dm Int d = $s$dm1
forall d. $dm Bool d = $s$dm2 #-}
dInt = MkD .... opInt ...
dInt = MkD .... opBool ...
opInt = $dm dInt
opBool = $dm dBool
$s$dm1 = \x. op dInt
$s$dm2 = \x. op dBool }
The RULES stuff means that we can't choose $dm as a loop breaker
(Note [Choosing loop breakers]), so we must choose at least (say)
opInt *and* opBool, and so on. The number of loop breakders is
linear in the number of instance declarations.
Note [Loop breakers and INLINE/INLINEABLE pragmas]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Avoid choosing a function with an INLINE pramga as the loop breaker!
If such a function is mutually-recursive with a non-INLINE thing,
then the latter should be the loop-breaker.
It's vital to distinguish between INLINE and INLINEABLE (the
Bool returned by hasStableCoreUnfolding_maybe). If we start with
Rec { {-# INLINEABLE f #-}
f x = ...f... }
and then worker/wrapper it through strictness analysis, we'll get
Rec { {-# INLINEABLE $wf #-}
$wf p q = let x = (p,q) in ...f...
{-# INLINE f #-}
f x = case x of (p,q) -> $wf p q }
Now it is vital that we choose $wf as the loop breaker, so we can
inline 'f' in '$wf'.
Note [DFuns should not be loop breakers]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's particularly bad to make a DFun into a loop breaker. See
Note [How instance declarations are translated] in TcInstDcls
We give DFuns a higher score than ordinary CONLIKE things because
if there's a choice we want the DFun to be the non-looop breker. Eg
rec { sc = /\ a \$dC. $fBWrap (T a) ($fCT @ a $dC)
$fCT :: forall a_afE. (Roman.C a_afE) => Roman.C (Roman.T a_afE)
{-# DFUN #-}
$fCT = /\a \$dC. MkD (T a) ((sc @ a $dC) |> blah) ($ctoF @ a $dC)
}
Here 'sc' (the superclass) looks CONLIKE, but we'll never get to it
if we can't unravel the DFun first.
Note [Constructor applications]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's really really important to inline dictionaries. Real
example (the Enum Ordering instance from GHC.Base):
rec f = \ x -> case d of (p,q,r) -> p x
g = \ x -> case d of (p,q,r) -> q x
d = (v, f, g)
Here, f and g occur just once; but we can't inline them into d.
On the other hand we *could* simplify those case expressions if
we didn't stupidly choose d as the loop breaker.
But we won't because constructor args are marked "Many".
Inlining dictionaries is really essential to unravelling
the loops in static numeric dictionaries, see GHC.Float.
Note [Closure conversion]
~~~~~~~~~~~~~~~~~~~~~~~~~
We treat (\x. C p q) as a high-score candidate in the letrec scoring algorithm.
The immediate motivation came from the result of a closure-conversion transformation
which generated code like this:
data Clo a b = forall c. Clo (c -> a -> b) c
($:) :: Clo a b -> a -> b
Clo f env $: x = f env x
rec { plus = Clo plus1 ()
; plus1 _ n = Clo plus2 n
; plus2 Zero n = n
; plus2 (Succ m) n = Succ (plus $: m $: n) }
If we inline 'plus' and 'plus1', everything unravels nicely. But if
we choose 'plus1' as the loop breaker (which is entirely possible
otherwise), the loop does not unravel nicely.
@occAnalRhs@ deals with the question of bindings where the Id is marked
by an INLINE pragma. For these we record that anything which occurs
in its RHS occurs many times. This pessimistically assumes that ths
inlined binder also occurs many times in its scope, but if it doesn't
we'll catch it next time round. At worst this costs an extra simplifier pass.
ToDo: try using the occurrence info for the inline'd binder.
[March 97] We do the same for atomic RHSs. Reason: see notes with loopBreakSCC.
[June 98, SLPJ] I've undone this change; I don't understand it. See notes with loopBreakSCC.
-}
occAnalRecRhs :: OccEnv -> CoreExpr -- Rhs
-> (UsageDetails, CoreExpr)
-- Returned usage details covers only the RHS,
-- and *not* the RULE or INLINE template for the Id
occAnalRecRhs env rhs = occAnal (rhsCtxt env) rhs
occAnalNonRecRhs :: OccEnv
-> Id -> CoreExpr -- Binder and rhs
-- Binder is already tagged with occurrence info
-> (UsageDetails, CoreExpr)
-- Returned usage details covers only the RHS,
-- and *not* the RULE or INLINE template for the Id
occAnalNonRecRhs env bndr rhs
= occAnal rhs_env rhs
where
-- See Note [Use one-shot info]
env1 = env { occ_one_shots = argOneShots OneShotLam dmd }
-- See Note [Cascading inlines]
rhs_env | certainly_inline = env1
| otherwise = rhsCtxt env1
certainly_inline -- See Note [Cascading inlines]
= case idOccInfo bndr of
OneOcc in_lam one_br _ -> not in_lam && one_br && active && not_stable
_ -> False
dmd = idDemandInfo bndr
active = isAlwaysActive (idInlineActivation bndr)
not_stable = not (isStableUnfolding (idUnfolding bndr))
addIdOccs :: UsageDetails -> VarSet -> UsageDetails
addIdOccs usage id_set = foldVarSet addIdOcc usage id_set
addIdOcc :: Id -> UsageDetails -> UsageDetails
addIdOcc v u | isId v = addOneOcc u v NoOccInfo
| otherwise = u
-- Give a non-committal binder info (i.e NoOccInfo) because
-- a) Many copies of the specialised thing can appear
-- b) We don't want to substitute a BIG expression inside a RULE
-- even if that's the only occurrence of the thing
-- (Same goes for INLINE.)
{-
Note [Cascading inlines]
~~~~~~~~~~~~~~~~~~~~~~~~
By default we use an rhsCtxt for the RHS of a binding. This tells the
occ anal n that it's looking at an RHS, which has an effect in
occAnalApp. In particular, for constructor applications, it makes
the arguments appear to have NoOccInfo, so that we don't inline into
them. Thus x = f y
k = Just x
we do not want to inline x.
But there's a problem. Consider
x1 = a0 : []
x2 = a1 : x1
x3 = a2 : x2
g = f x3
First time round, it looks as if x1 and x2 occur as an arg of a
let-bound constructor ==> give them a many-occurrence.
But then x3 is inlined (unconditionally as it happens) and
next time round, x2 will be, and the next time round x1 will be
Result: multiple simplifier iterations. Sigh.
So, when analysing the RHS of x3 we notice that x3 will itself
definitely inline the next time round, and so we analyse x3's rhs in
an ordinary context, not rhsCtxt. Hence the "certainly_inline" stuff.
Annoyingly, we have to approximate SimplUtils.preInlineUnconditionally.
If we say "yes" when preInlineUnconditionally says "no" the simplifier iterates
indefinitely:
x = f y
k = Just x
inline ==>
k = Just (f y)
float ==>
x1 = f y
k = Just x1
This is worse than the slow cascade, so we only want to say "certainly_inline"
if it really is certain. Look at the note with preInlineUnconditionally
for the various clauses.
Expressions
~~~~~~~~~~~
-}
occAnal :: OccEnv
-> CoreExpr
-> (UsageDetails, -- Gives info only about the "interesting" Ids
CoreExpr)
occAnal _ expr@(Type _) = (emptyDetails, expr)
occAnal _ expr@(Lit _) = (emptyDetails, expr)
occAnal env expr@(Var v) = (mkOneOcc env v False, expr)
-- At one stage, I gathered the idRuleVars for v here too,
-- which in a way is the right thing to do.
-- But that went wrong right after specialisation, when
-- the *occurrences* of the overloaded function didn't have any
-- rules in them, so the *specialised* versions looked as if they
-- weren't used at all.
occAnal _ (Coercion co)
= (addIdOccs emptyDetails (coVarsOfCo co), Coercion co)
-- See Note [Gather occurrences of coercion variables]
{-
Note [Gather occurrences of coercion variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We need to gather info about what coercion variables appear, so that
we can sort them into the right place when doing dependency analysis.
-}
occAnal env (Tick tickish body)
| tickish `tickishScopesLike` SoftScope
= (usage, Tick tickish body')
| Breakpoint _ ids <- tickish
= (usage_lam +++ mkVarEnv (zip ids (repeat NoOccInfo)), Tick tickish body')
-- never substitute for any of the Ids in a Breakpoint
| otherwise
= (usage_lam, Tick tickish body')
where
!(usage,body') = occAnal env body
-- for a non-soft tick scope, we can inline lambdas only
usage_lam = mapVarEnv markInsideLam usage
occAnal env (Cast expr co)
= case occAnal env expr of { (usage, expr') ->
let usage1 = markManyIf (isRhsEnv env) usage
usage2 = addIdOccs usage1 (coVarsOfCo co)
-- See Note [Gather occurrences of coercion variables]
in (usage2, Cast expr' co)
-- If we see let x = y `cast` co
-- then mark y as 'Many' so that we don't
-- immediately inline y again.
}
occAnal env app@(App _ _)
= occAnalApp env (collectArgsTicks tickishFloatable app)
-- Ignore type variables altogether
-- (a) occurrences inside type lambdas only not marked as InsideLam
-- (b) type variables not in environment
occAnal env (Lam x body) | isTyVar x
= case occAnal env body of { (body_usage, body') ->
(body_usage, Lam x body')
}
-- For value lambdas we do a special hack. Consider
-- (\x. \y. ...x...)
-- If we did nothing, x is used inside the \y, so would be marked
-- as dangerous to dup. But in the common case where the abstraction
-- is applied to two arguments this is over-pessimistic.
-- So instead, we just mark each binder with its occurrence
-- info in the *body* of the multiple lambda.
-- Then, the simplifier is careful when partially applying lambdas.
occAnal env expr@(Lam _ _)
= case occAnal env_body body of { (body_usage, body') ->
let
(final_usage, tagged_binders) = tagLamBinders body_usage binders'
-- Use binders' to put one-shot info on the lambdas
really_final_usage
| all isOneShotBndr binders' = final_usage
| otherwise = mapVarEnv markInsideLam final_usage
in
(really_final_usage, mkLams tagged_binders body') }
where
(binders, body) = collectBinders expr
(env_body, binders') = oneShotGroup env binders
occAnal env (Case scrut bndr ty alts)
= case occ_anal_scrut scrut alts of { (scrut_usage, scrut') ->
case mapAndUnzip occ_anal_alt alts of { (alts_usage_s, alts') ->
let
alts_usage = foldr combineAltsUsageDetails emptyDetails alts_usage_s
(alts_usage1, tagged_bndr) = tag_case_bndr alts_usage bndr
total_usage = scrut_usage +++ alts_usage1
in
total_usage `seq` (total_usage, Case scrut' tagged_bndr ty alts') }}
where
-- Note [Case binder usage]
-- ~~~~~~~~~~~~~~~~~~~~~~~~
-- The case binder gets a usage of either "many" or "dead", never "one".
-- Reason: we like to inline single occurrences, to eliminate a binding,
-- but inlining a case binder *doesn't* eliminate a binding.
-- We *don't* want to transform
-- case x of w { (p,q) -> f w }
-- into
-- case x of w { (p,q) -> f (p,q) }
tag_case_bndr usage bndr
= case lookupVarEnv usage bndr of
Nothing -> (usage, setIdOccInfo bndr IAmDead)
Just _ -> (usage `delVarEnv` bndr, setIdOccInfo bndr NoOccInfo)
alt_env = mkAltEnv env scrut bndr
occ_anal_alt = occAnalAlt alt_env
occ_anal_scrut (Var v) (alt1 : other_alts)
| not (null other_alts) || not (isDefaultAlt alt1)
= (mkOneOcc env v True, Var v) -- The 'True' says that the variable occurs
-- in an interesting context; the case has
-- at least one non-default alternative
occ_anal_scrut (Tick t e) alts
| t `tickishScopesLike` SoftScope
-- No reason to not look through all ticks here, but only
-- for soft-scoped ticks we can do so without having to
-- update returned occurance info (see occAnal)
= second (Tick t) $ occ_anal_scrut e alts
occ_anal_scrut scrut _alts
= occAnal (vanillaCtxt env) scrut -- No need for rhsCtxt
occAnal env (Let bind body)
= case occAnal env body of { (body_usage, body') ->
case occAnalBind env noImpRuleEdges bind body_usage of { (final_usage, new_binds) ->
(final_usage, mkLets new_binds body') }}
occAnalArgs :: OccEnv -> [CoreExpr] -> [OneShots] -> (UsageDetails, [CoreExpr])
occAnalArgs _ [] _
= (emptyDetails, [])
occAnalArgs env (arg:args) one_shots
| isTypeArg arg
= case occAnalArgs env args one_shots of { (uds, args') ->
(uds, arg:args') }
| otherwise
= case argCtxt env one_shots of { (arg_env, one_shots') ->
case occAnal arg_env arg of { (uds1, arg') ->
case occAnalArgs env args one_shots' of { (uds2, args') ->
(uds1 +++ uds2, arg':args') }}}
{-
Applications are dealt with specially because we want
the "build hack" to work.
Note [Arguments of let-bound constructors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
f x = let y = expensive x in
let z = (True,y) in
(case z of {(p,q)->q}, case z of {(p,q)->q})
We feel free to duplicate the WHNF (True,y), but that means
that y may be duplicated thereby.
If we aren't careful we duplicate the (expensive x) call!
Constructors are rather like lambdas in this way.
-}
occAnalApp :: OccEnv
-> (Expr CoreBndr, [Arg CoreBndr], [Tickish Id])
-> (UsageDetails, Expr CoreBndr)
occAnalApp env (Var fun, args, ticks)
| null ticks = (uds, mkApps (Var fun) args')
| otherwise = (uds, mkTicks ticks $ mkApps (Var fun) args')
where
uds = fun_uds +++ final_args_uds
!(args_uds, args') = occAnalArgs env args one_shots
!final_args_uds = markManyIf (isRhsEnv env && is_exp) args_uds
-- We mark the free vars of the argument of a constructor or PAP
-- as "many", if it is the RHS of a let(rec).
-- This means that nothing gets inlined into a constructor argument
-- position, which is what we want. Typically those constructor
-- arguments are just variables, or trivial expressions.
--
-- This is the *whole point* of the isRhsEnv predicate
-- See Note [Arguments of let-bound constructors]
n_val_args = valArgCount args
fun_uds = mkOneOcc env fun (n_val_args > 0)
is_exp = isExpandableApp fun n_val_args
-- See Note [CONLIKE pragma] in BasicTypes
-- The definition of is_exp should match that in
-- Simplify.prepareRhs
one_shots = argsOneShots (idStrictness fun) n_val_args
-- See Note [Use one-shot info]
occAnalApp env (fun, args, ticks)
= (fun_uds +++ args_uds, mkTicks ticks $ mkApps fun' args')
where
!(fun_uds, fun') = occAnal (addAppCtxt env args) fun
-- The addAppCtxt is a bit cunning. One iteration of the simplifier
-- often leaves behind beta redexs like
-- (\x y -> e) a1 a2
-- Here we would like to mark x,y as one-shot, and treat the whole
-- thing much like a let. We do this by pushing some True items
-- onto the context stack.
!(args_uds, args') = occAnalArgs env args []
markManyIf :: Bool -- If this is true
-> UsageDetails -- Then do markMany on this
-> UsageDetails
markManyIf True uds = mapVarEnv markMany uds
markManyIf False uds = uds
{-
Note [Use one-shot information]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The occurrrence analyser propagates one-shot-lambda information in two situation
* Applications: eg build (\cn -> blah)
Propagate one-shot info from the strictness signature of 'build' to
the \cn
* Let-bindings: eg let f = \c. let ... in \n -> blah
in (build f, build f)
Propagate one-shot info from the demanand-info on 'f' to the
lambdas in its RHS (which may not be syntactically at the top)
Some of this is done by the demand analyser, but this way it happens
much earlier, taking advantage of the strictness signature of
imported functions.
Note [Binders in case alternatives]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
case x of y { (a,b) -> f y }
We treat 'a', 'b' as dead, because they don't physically occur in the
case alternative. (Indeed, a variable is dead iff it doesn't occur in
its scope in the output of OccAnal.) It really helps to know when
binders are unused. See esp the call to isDeadBinder in
Simplify.mkDupableAlt
In this example, though, the Simplifier will bring 'a' and 'b' back to
life, beause it binds 'y' to (a,b) (imagine got inlined and
scrutinised y).
-}
occAnalAlt :: (OccEnv, Maybe (Id, CoreExpr))
-> CoreAlt
-> (UsageDetails, Alt IdWithOccInfo)
occAnalAlt (env, scrut_bind) (con, bndrs, rhs)
= case occAnal env rhs of { (rhs_usage1, rhs1) ->
let
(alt_usg, tagged_bndrs) = tagLamBinders rhs_usage1 bndrs
-- See Note [Binders in case alternatives]
(alt_usg', rhs2) =
wrapAltRHS env scrut_bind alt_usg tagged_bndrs rhs1
in
(alt_usg', (con, tagged_bndrs, rhs2)) }
wrapAltRHS :: OccEnv
-> Maybe (Id, CoreExpr) -- proxy mapping generated by mkAltEnv
-> UsageDetails -- usage for entire alt (p -> rhs)
-> [Var] -- alt binders
-> CoreExpr -- alt RHS
-> (UsageDetails, CoreExpr)
wrapAltRHS env (Just (scrut_var, let_rhs)) alt_usg bndrs alt_rhs
| occ_binder_swap env
, scrut_var `usedIn` alt_usg -- bndrs are not be present in alt_usg so this
-- handles condition (a) in Note [Binder swap]
, not captured -- See condition (b) in Note [Binder swap]
= ( alt_usg' +++ let_rhs_usg
, Let (NonRec tagged_scrut_var let_rhs') alt_rhs )
where
captured = any (`usedIn` let_rhs_usg) bndrs
-- The rhs of the let may include coercion variables
-- if the scrutinee was a cast, so we must gather their
-- usage. See Note [Gather occurrences of coercion variables]
(let_rhs_usg, let_rhs') = occAnal env let_rhs
(alt_usg', tagged_scrut_var) = tagBinder alt_usg scrut_var
wrapAltRHS _ _ alt_usg _ alt_rhs
= (alt_usg, alt_rhs)
{-
************************************************************************
* *
OccEnv
* *
************************************************************************
-}
data OccEnv
= OccEnv { occ_encl :: !OccEncl -- Enclosing context information
, occ_one_shots :: !OneShots -- Tells about linearity
, occ_gbl_scrut :: GlobalScruts
, occ_rule_act :: Activation -> Bool -- Which rules are active
-- See Note [Finding rule RHS free vars]
, occ_binder_swap :: !Bool -- enable the binder_swap
-- See CorePrep Note [Dead code in CorePrep]
}
type GlobalScruts = IdSet -- See Note [Binder swap on GlobalId scrutinees]
-----------------------------
-- OccEncl is used to control whether to inline into constructor arguments
-- For example:
-- x = (p,q) -- Don't inline p or q
-- y = /\a -> (p a, q a) -- Still don't inline p or q
-- z = f (p,q) -- Do inline p,q; it may make a rule fire
-- So OccEncl tells enought about the context to know what to do when
-- we encounter a contructor application or PAP.
data OccEncl
= OccRhs -- RHS of let(rec), albeit perhaps inside a type lambda
-- Don't inline into constructor args here
| OccVanilla -- Argument of function, body of lambda, scruintee of case etc.
-- Do inline into constructor args here
instance Outputable OccEncl where
ppr OccRhs = ptext (sLit "occRhs")
ppr OccVanilla = ptext (sLit "occVanilla")
type OneShots = [OneShotInfo]
-- [] No info
--
-- one_shot_info:ctxt Analysing a function-valued expression that
-- will be applied as described by one_shot_info
initOccEnv :: (Activation -> Bool) -> OccEnv
initOccEnv active_rule
= OccEnv { occ_encl = OccVanilla
, occ_one_shots = []
, occ_gbl_scrut = emptyVarSet
, occ_rule_act = active_rule
, occ_binder_swap = True }
vanillaCtxt :: OccEnv -> OccEnv
vanillaCtxt env = env { occ_encl = OccVanilla, occ_one_shots = [] }
rhsCtxt :: OccEnv -> OccEnv
rhsCtxt env = env { occ_encl = OccRhs, occ_one_shots = [] }
argCtxt :: OccEnv -> [OneShots] -> (OccEnv, [OneShots])
argCtxt env []
= (env { occ_encl = OccVanilla, occ_one_shots = [] }, [])
argCtxt env (one_shots:one_shots_s)
= (env { occ_encl = OccVanilla, occ_one_shots = one_shots }, one_shots_s)
isRhsEnv :: OccEnv -> Bool
isRhsEnv (OccEnv { occ_encl = OccRhs }) = True
isRhsEnv (OccEnv { occ_encl = OccVanilla }) = False
oneShotGroup :: OccEnv -> [CoreBndr]
-> ( OccEnv
, [CoreBndr] )
-- The result binders have one-shot-ness set that they might not have had originally.
-- This happens in (build (\cn -> e)). Here the occurrence analyser
-- linearity context knows that c,n are one-shot, and it records that fact in
-- the binder. This is useful to guide subsequent float-in/float-out tranformations
oneShotGroup env@(OccEnv { occ_one_shots = ctxt }) bndrs
= go ctxt bndrs []
where
go ctxt [] rev_bndrs
= ( env { occ_one_shots = ctxt, occ_encl = OccVanilla }
, reverse rev_bndrs )
go [] bndrs rev_bndrs
= ( env { occ_one_shots = [], occ_encl = OccVanilla }
, reverse rev_bndrs ++ bndrs )
go ctxt (bndr:bndrs) rev_bndrs
| isId bndr
= case ctxt of
[] -> go [] bndrs (bndr : rev_bndrs)
(one_shot : ctxt) -> go ctxt bndrs (bndr': rev_bndrs)
where
bndr' = updOneShotInfo bndr one_shot
| otherwise
= go ctxt bndrs (bndr:rev_bndrs)
addAppCtxt :: OccEnv -> [Arg CoreBndr] -> OccEnv
addAppCtxt env@(OccEnv { occ_one_shots = ctxt }) args
= env { occ_one_shots = replicate (valArgCount args) OneShotLam ++ ctxt }
transClosureFV :: UniqFM VarSet -> UniqFM VarSet
-- If (f,g), (g,h) are in the input, then (f,h) is in the output
-- as well as (f,g), (g,h)
transClosureFV env
| no_change = env
| otherwise = transClosureFV (listToUFM new_fv_list)
where
(no_change, new_fv_list) = mapAccumL bump True (ufmToList env)
bump no_change (b,fvs)
| no_change_here = (no_change, (b,fvs))
| otherwise = (False, (b,new_fvs))
where
(new_fvs, no_change_here) = extendFvs env fvs
-------------
extendFvs_ :: UniqFM VarSet -> VarSet -> VarSet
extendFvs_ env s = fst (extendFvs env s) -- Discard the Bool flag
extendFvs :: UniqFM VarSet -> VarSet -> (VarSet, Bool)
-- (extendFVs env s) returns
-- (s `union` env(s), env(s) `subset` s)
extendFvs env s
| isNullUFM env
= (s, True)
| otherwise
= (s `unionVarSet` extras, extras `subVarSet` s)
where
extras :: VarSet -- env(s)
extras = foldUFM unionVarSet emptyVarSet $
intersectUFM_C (\x _ -> x) env s
{-
************************************************************************
* *
Binder swap
* *
************************************************************************
Note [Binder swap]
~~~~~~~~~~~~~~~~~~
We do these two transformations right here:
(1) case x of b { pi -> ri }
==>
case x of b { pi -> let x=b in ri }
(2) case (x |> co) of b { pi -> ri }
==>
case (x |> co) of b { pi -> let x = b |> sym co in ri }
Why (2)? See Note [Case of cast]
In both cases, in a particular alternative (pi -> ri), we only
add the binding if
(a) x occurs free in (pi -> ri)
(ie it occurs in ri, but is not bound in pi)
(b) the pi does not bind b (or the free vars of co)
We need (a) and (b) for the inserted binding to be correct.
For the alternatives where we inject the binding, we can transfer
all x's OccInfo to b. And that is the point.
Notice that
* The deliberate shadowing of 'x'.
* That (a) rapidly becomes false, so no bindings are injected.
The reason for doing these transformations here is because it allows
us to adjust the OccInfo for 'x' and 'b' as we go.
* Suppose the only occurrences of 'x' are the scrutinee and in the
ri; then this transformation makes it occur just once, and hence
get inlined right away.
* If we do this in the Simplifier, we don't know whether 'x' is used
in ri, so we are forced to pessimistically zap b's OccInfo even
though it is typically dead (ie neither it nor x appear in the
ri). There's nothing actually wrong with zapping it, except that
it's kind of nice to know which variables are dead. My nose
tells me to keep this information as robustly as possible.
The Maybe (Id,CoreExpr) passed to occAnalAlt is the extra let-binding
{x=b}; it's Nothing if the binder-swap doesn't happen.
There is a danger though. Consider
let v = x +# y
in case (f v) of w -> ...v...v...
And suppose that (f v) expands to just v. Then we'd like to
use 'w' instead of 'v' in the alternative. But it may be too
late; we may have substituted the (cheap) x+#y for v in the
same simplifier pass that reduced (f v) to v.
I think this is just too bad. CSE will recover some of it.
Note [Case of cast]
~~~~~~~~~~~~~~~~~~~
Consider case (x `cast` co) of b { I# ->
... (case (x `cast` co) of {...}) ...
We'd like to eliminate the inner case. That is the motivation for
equation (2) in Note [Binder swap]. When we get to the inner case, we
inline x, cancel the casts, and away we go.
Note [Binder swap on GlobalId scrutinees]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When the scrutinee is a GlobalId we must take care in two ways
i) In order to *know* whether 'x' occurs free in the RHS, we need its
occurrence info. BUT, we don't gather occurrence info for
GlobalIds. That's the reason for the (small) occ_gbl_scrut env in
OccEnv is for: it says "gather occurrence info for these".
ii) We must call localiseId on 'x' first, in case it's a GlobalId, or
has an External Name. See, for example, SimplEnv Note [Global Ids in
the substitution].
Note [Zap case binders in proxy bindings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
From the original
case x of cb(dead) { p -> ...x... }
we will get
case x of cb(live) { p -> let x = cb in ...x... }
Core Lint never expects to find an *occurrence* of an Id marked
as Dead, so we must zap the OccInfo on cb before making the
binding x = cb. See Trac #5028.
Historical note [no-case-of-case]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We *used* to suppress the binder-swap in case expressions when
-fno-case-of-case is on. Old remarks:
"This happens in the first simplifier pass,
and enhances full laziness. Here's the bad case:
f = \ y -> ...(case x of I# v -> ...(case x of ...) ... )
If we eliminate the inner case, we trap it inside the I# v -> arm,
which might prevent some full laziness happening. I've seen this
in action in spectral/cichelli/Prog.hs:
[(m,n) | m <- [1..max], n <- [1..max]]
Hence the check for NoCaseOfCase."
However, now the full-laziness pass itself reverses the binder-swap, so this
check is no longer necessary.
Historical note [Suppressing the case binder-swap]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This old note describes a problem that is also fixed by doing the
binder-swap in OccAnal:
There is another situation when it might make sense to suppress the
case-expression binde-swap. If we have
case x of w1 { DEFAULT -> case x of w2 { A -> e1; B -> e2 }
...other cases .... }
We'll perform the binder-swap for the outer case, giving
case x of w1 { DEFAULT -> case w1 of w2 { A -> e1; B -> e2 }
...other cases .... }
But there is no point in doing it for the inner case, because w1 can't
be inlined anyway. Furthermore, doing the case-swapping involves
zapping w2's occurrence info (see paragraphs that follow), and that
forces us to bind w2 when doing case merging. So we get
case x of w1 { A -> let w2 = w1 in e1
B -> let w2 = w1 in e2
...other cases .... }
This is plain silly in the common case where w2 is dead.
Even so, I can't see a good way to implement this idea. I tried
not doing the binder-swap if the scrutinee was already evaluated
but that failed big-time:
data T = MkT !Int
case v of w { MkT x ->
case x of x1 { I# y1 ->
case x of x2 { I# y2 -> ...
Notice that because MkT is strict, x is marked "evaluated". But to
eliminate the last case, we must either make sure that x (as well as
x1) has unfolding MkT y1. The straightforward thing to do is to do
the binder-swap. So this whole note is a no-op.
It's fixed by doing the binder-swap in OccAnal because we can do the
binder-swap unconditionally and still get occurrence analysis
information right.
-}
mkAltEnv :: OccEnv -> CoreExpr -> Id -> (OccEnv, Maybe (Id, CoreExpr))
-- Does two things: a) makes the occ_one_shots = OccVanilla
-- b) extends the GlobalScruts if possible
-- c) returns a proxy mapping, binding the scrutinee
-- to the case binder, if possible
mkAltEnv env@(OccEnv { occ_gbl_scrut = pe }) scrut case_bndr
= case stripTicksTopE (const True) scrut of
Var v -> add_scrut v case_bndr'
Cast (Var v) co -> add_scrut v (Cast case_bndr' (mkSymCo co))
-- See Note [Case of cast]
_ -> (env { occ_encl = OccVanilla }, Nothing)
where
add_scrut v rhs = ( env { occ_encl = OccVanilla, occ_gbl_scrut = pe `extendVarSet` v }
, Just (localise v, rhs) )
case_bndr' = Var (zapIdOccInfo case_bndr) -- See Note [Zap case binders in proxy bindings]
localise scrut_var = mkLocalId (localiseName (idName scrut_var)) (idType scrut_var)
-- Localise the scrut_var before shadowing it; we're making a
-- new binding for it, and it might have an External Name, or
-- even be a GlobalId; Note [Binder swap on GlobalId scrutinees]
-- Also we don't want any INLINE or NOINLINE pragmas!
{-
************************************************************************
* *
\subsection[OccurAnal-types]{OccEnv}
* *
************************************************************************
-}
type UsageDetails = IdEnv OccInfo -- A finite map from ids to their usage
-- INVARIANT: never IAmDead
-- (Deadness is signalled by not being in the map at all)
(+++), combineAltsUsageDetails
:: UsageDetails -> UsageDetails -> UsageDetails
(+++) usage1 usage2
= plusVarEnv_C addOccInfo usage1 usage2
combineAltsUsageDetails usage1 usage2
= plusVarEnv_C orOccInfo usage1 usage2
addOneOcc :: UsageDetails -> Id -> OccInfo -> UsageDetails
addOneOcc usage id info
= plusVarEnv_C addOccInfo usage (unitVarEnv id info)
-- ToDo: make this more efficient
emptyDetails :: UsageDetails
emptyDetails = (emptyVarEnv :: UsageDetails)
usedIn :: Id -> UsageDetails -> Bool
v `usedIn` details = isExportedId v || v `elemVarEnv` details
type IdWithOccInfo = Id
tagLamBinders :: UsageDetails -- Of scope
-> [Id] -- Binders
-> (UsageDetails, -- Details with binders removed
[IdWithOccInfo]) -- Tagged binders
-- Used for lambda and case binders
-- It copes with the fact that lambda bindings can have a
-- stable unfolding, used for join points
tagLamBinders usage binders = usage' `seq` (usage', bndrs')
where
(usage', bndrs') = mapAccumR tag_lam usage binders
tag_lam usage bndr = (usage2, setBinderOcc usage bndr)
where
usage1 = usage `delVarEnv` bndr
usage2 | isId bndr = addIdOccs usage1 (idUnfoldingVars bndr)
| otherwise = usage1
tagBinder :: UsageDetails -- Of scope
-> Id -- Binders
-> (UsageDetails, -- Details with binders removed
IdWithOccInfo) -- Tagged binders
tagBinder usage binder
= let
usage' = usage `delVarEnv` binder
binder' = setBinderOcc usage binder
in
usage' `seq` (usage', binder')
setBinderOcc :: UsageDetails -> CoreBndr -> CoreBndr
setBinderOcc usage bndr
| isTyVar bndr = bndr
| isExportedId bndr = case idOccInfo bndr of
NoOccInfo -> bndr
_ -> setIdOccInfo bndr NoOccInfo
-- Don't use local usage info for visible-elsewhere things
-- BUT *do* erase any IAmALoopBreaker annotation, because we're
-- about to re-generate it and it shouldn't be "sticky"
| otherwise = setIdOccInfo bndr occ_info
where
occ_info = lookupVarEnv usage bndr `orElse` IAmDead
{-
************************************************************************
* *
\subsection{Operations over OccInfo}
* *
************************************************************************
-}
mkOneOcc :: OccEnv -> Id -> InterestingCxt -> UsageDetails
mkOneOcc env id int_cxt
| isLocalId id
= unitVarEnv id (OneOcc False True int_cxt)
| id `elemVarEnv` occ_gbl_scrut env
= unitVarEnv id NoOccInfo
| otherwise
= emptyDetails
markMany, markInsideLam :: OccInfo -> OccInfo
markMany _ = NoOccInfo
markInsideLam (OneOcc _ one_br int_cxt) = OneOcc True one_br int_cxt
markInsideLam occ = occ
addOccInfo, orOccInfo :: OccInfo -> OccInfo -> OccInfo
addOccInfo a1 a2 = ASSERT( not (isDeadOcc a1 || isDeadOcc a2) )
NoOccInfo -- Both branches are at least One
-- (Argument is never IAmDead)
-- (orOccInfo orig new) is used
-- when combining occurrence info from branches of a case
orOccInfo (OneOcc in_lam1 _ int_cxt1)
(OneOcc in_lam2 _ int_cxt2)
= OneOcc (in_lam1 || in_lam2)
False -- False, because it occurs in both branches
(int_cxt1 && int_cxt2)
orOccInfo a1 a2 = ASSERT( not (isDeadOcc a1 || isDeadOcc a2) )
NoOccInfo
|
urbanslug/ghc
|
compiler/simplCore/OccurAnal.hs
|
bsd-3-clause
| 76,124
| 0
| 16
| 21,190
| 8,018
| 4,377
| 3,641
| 564
| 7
|
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP, NoImplicitPrelude #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Eq
-- Copyright : (c) The University of Glasgow 2005
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : stable
-- Portability : portable
--
-- Equality
--
-----------------------------------------------------------------------------
module Data.Eq (
Eq(..),
) where
#if __GLASGOW_HASKELL__
import GHC.Base
#endif
|
beni55/haste-compiler
|
libraries/ghc-7.8/base/Data/Eq.hs
|
bsd-3-clause
| 577
| 0
| 5
| 91
| 38
| 31
| 7
| 4
| 0
|
{-# LANGUAGE PatternSynonyms #-}
module ShouldCompile where
pattern Single{x} = [x]
-- Selector
selector :: Int
selector = x [5]
update :: [String]
update = ["String"] { x = "updated" }
|
ezyang/ghc
|
testsuite/tests/patsyn/should_compile/records-compile.hs
|
bsd-3-clause
| 189
| 0
| 7
| 34
| 63
| 38
| 25
| 7
| 1
|
module Data.Binary.Format.DWG.ObjectMap (ObjectMap) where
import Control.Applicative
import Data.Binary
import Data.Binary.Get (runGet)
import qualified Data.Binary.Bits.Get as Bits
import Data.Binary.Format.DWG.Types
import Data.Binary.Format.DWG.Util
import Data.Binary.Format.DWG.Bitcoded
import qualified Data.Binary.Format.DWG.Bitcoded as Bits
type Section = (Int, [(DWG_MC, DWG_MC)])
newtype ObjectMap = ObjectMap [(DWG_H, DWG_MC)] deriving (Show)
handlePlusOffset :: DWG_H -> DWG_MC -> DWG_H
handlePlusOffset h o =
case h of
(DWG_H code handle) -> DWG_H code (handle + fromIntegral o)
decodeSection :: Bits.BitGet Section
decodeSection = do
size <- Bits.getWord16be 16
-- "each section is cut off at maximim length of 2032"
let size' = if size > 2032 then 2032 else fromIntegral size
bs <- Bits.getLazyByteString (size' - 2) -- because crc
let s = flip runGet bs $ Bits.runBitGet $ many $ do
-- handle offset from previous handle
ho <- Bits.get
-- location offset
lo <- Bits.get
return (ho, lo)
crc <- Bits.getWord16be 16
return (size', s)
instance Binary ObjectMap where
put = undefined
get = do
let lastHandle = DWG_H 0 0
lastLoc = 0
ss <- concatMap snd <$> (Bits.runBitGet $ repeatUntil ((== 2) . fst) $ decodeSection)
return $ ObjectMap $ tail $ scanl (\(h,l) (ho, lo) -> (handlePlusOffset h ho, l + lo)) (lastHandle, lastLoc) ss
|
polachok/yodawg
|
src/Data/Binary/Format/DWG/ObjectMap.hs
|
isc
| 1,541
| 0
| 16
| 398
| 480
| 267
| 213
| 33
| 2
|
-- | Settings are centralized, as much as possible, into this file. This
-- includes database connection settings, static file locations, etc.
-- In addition, you can configure a number of different aspects of Yesod
-- by overriding methods in the Yesod typeclass. That instance is
-- declared in the Foundation.hs file.
module Settings where
import ClassyPrelude.Yesod
import Control.Exception as CE (throw)
import Data.Aeson (Result (..), fromJSON, withObject, (.!=),
(.:?))
import Data.FileEmbed (embedFile)
import Data.Yaml (decodeEither')
import Language.Haskell.TH.Syntax (Exp, Name, Q)
import Network.Wai.Handler.Warp (HostPreference)
import Yesod.Default.Config2 (applyEnvValue, configSettingsYml)
import Yesod.Default.Util (WidgetFileSettings, widgetFileNoReload,
widgetFileReload)
-- | Runtime settings to configure this application. These settings can be
-- loaded from various sources: defaults, environment variables, config files,
-- theoretically even a database.
data AppSettings = AppSettings
{ appStaticDir :: String
-- ^ Directory from which to serve static files.
, appRoot :: Text
-- ^ Base for all generated URLs.
, appHost :: HostPreference
-- ^ Host/interface the server should bind to.
, appPort :: Int
-- ^ Port to listen on
, appIpFromHeader :: Bool
-- ^ Get the IP address from the header when logging. Useful when sitting
-- behind a reverse proxy.
, appDetailedRequestLogging :: Bool
-- ^ Use detailed request logging system
, appShouldLogAll :: Bool
-- ^ Should all log messages be displayed?
, appReloadTemplates :: Bool
-- ^ Use the reload version of templates
, appMutableStatic :: Bool
-- ^ Assume that files in the static dir may change after compilation
, appSkipCombining :: Bool
-- ^ Perform no stylesheet/script combining
-- Example app-specific configuration values.
, appCopyright :: Text
-- ^ Copyright text to appear in the footer of the page
, appAnalytics :: Maybe Text
-- ^ Google Analytics code
}
instance FromJSON AppSettings where
parseJSON = withObject "AppSettings" $ \o -> do
let defaultDev =
#if DEVELOPMENT
True
#else
False
#endif
appStaticDir <- o .: "static-dir"
appRoot <- o .: "approot"
appHost <- fromString <$> o .: "host"
appPort <- o .: "port"
appIpFromHeader <- o .: "ip-from-header"
appDetailedRequestLogging <- o .:? "detailed-logging" .!= defaultDev
appShouldLogAll <- o .:? "should-log-all" .!= defaultDev
appReloadTemplates <- o .:? "reload-templates" .!= defaultDev
appMutableStatic <- o .:? "mutable-static" .!= defaultDev
appSkipCombining <- o .:? "skip-combining" .!= defaultDev
appCopyright <- o .: "copyright"
appAnalytics <- o .:? "analytics"
return AppSettings {..}
-- | Settings for 'widgetFile', such as which template languages to support and
-- default Hamlet settings.
--
-- For more information on modifying behavior, see:
--
-- https://github.com/yesodweb/yesod/wiki/Overriding-widgetFile
widgetFileSettings :: WidgetFileSettings
widgetFileSettings = def
-- | How static files should be combined.
combineSettings :: CombineSettings
combineSettings = def
-- The rest of this file contains settings which rarely need changing by a
-- user.
widgetFile :: String -> Q Exp
widgetFile = (if appReloadTemplates compileTimeAppSettings
then widgetFileReload
else widgetFileNoReload)
widgetFileSettings
-- | Raw bytes at compile time of @config/settings.yml@
configSettingsYmlBS :: ByteString
configSettingsYmlBS = $(embedFile configSettingsYml)
-- | @config/settings.yml@, parsed to a @Value@.
configSettingsYmlValue :: Value
configSettingsYmlValue = either CE.throw id $ decodeEither' configSettingsYmlBS
-- | A version of @AppSettings@ parsed at compile time from @config/settings.yml@.
compileTimeAppSettings :: AppSettings
compileTimeAppSettings =
case fromJSON $ applyEnvValue False mempty configSettingsYmlValue of
Error e -> error e
Success settings -> settings
-- The following two functions can be used to combine multiple CSS or JS files
-- at compile time to decrease the number of http requests.
-- Sample usage (inside a Widget):
--
-- > $(combineStylesheets 'StaticR [style1_css, style2_css])
combineStylesheets :: Name -> [Route Static] -> Q Exp
combineStylesheets = combineStylesheets'
(appSkipCombining compileTimeAppSettings)
combineSettings
combineScripts :: Name -> [Route Static] -> Q Exp
combineScripts = combineScripts'
(appSkipCombining compileTimeAppSettings)
combineSettings
|
nek0/yocage
|
Settings.hs
|
mit
| 5,163
| 0
| 12
| 1,388
| 676
| 388
| 288
| -1
| -1
|
-- | Transactional variables, for use with 'MonadSTM'.
module Control.Concurrent.STM.CTVar
( -- * @CTVar@s
CTVar
, newCTVar
, readCTVar
, writeCTVar
, modifyCTVar
, modifyCTVar'
, swapCTVar
) where
import Control.Monad.STM.Class
-- * @CTVar@s
-- | Mutate the contents of a 'CTVar'. This is non-strict.
modifyCTVar :: MonadSTM m => CTVar m a -> (a -> a) -> m ()
modifyCTVar ctvar f = do
a <- readCTVar ctvar
writeCTVar ctvar $ f a
-- | Mutate the contents of a 'CTVar' strictly.
modifyCTVar' :: MonadSTM m => CTVar m a -> (a -> a) -> m ()
modifyCTVar' ctvar f = do
a <- readCTVar ctvar
writeCTVar ctvar $! f a
-- | Swap the contents of a 'CTVar', returning the old value.
swapCTVar :: MonadSTM m => CTVar m a -> a -> m a
swapCTVar ctvar a = do
old <- readCTVar ctvar
writeCTVar ctvar a
return old
|
bitemyapp/dejafu
|
Control/Concurrent/STM/CTVar.hs
|
mit
| 835
| 0
| 9
| 190
| 246
| 123
| 123
| 23
| 1
|
module VM.Core where
import qualified Data.Bits as B
import qualified Data.Vector as V
data CPU a = CPU Int (V.Vector Int) (V.Vector Int) deriving (Eq)
instance (Show a) => Show (CPU a) where
show (CPU c rs _) = "CPU " ++ (show c) ++ " " ++ (show rs)
data Arg a = Reg Int | Mem Int | Val a deriving (Show, Eq)
data Instruction a = Nop
| Mov (Arg a) (Arg a)
| Add (Arg a) (Arg a) (Arg a)
| Sub (Arg a) (Arg a) (Arg a)
| Mul (Arg a) (Arg a) (Arg a)
| And (Arg a) (Arg a) (Arg a)
| Or (Arg a) (Arg a) (Arg a)
| Xor (Arg a) (Arg a) (Arg a)
| Sll (Arg a) (Arg a) (Arg a)
| Srl (Arg a) (Arg a) (Arg a)
| Jmp Int
| Bne (Arg a) (Arg a) Int
| Beq (Arg a) (Arg a) Int
| Blt (Arg a) (Arg a) Int
| Bgt (Arg a) (Arg a) Int
deriving (Show, Eq)
valOf (Reg r) (CPU _ rs _) = rs V.! r
valOf (Mem m) (CPU _ _ mem) = mem V.! m
valOf (Val a) _ = a
final f r1 r2 cpu = f (valOf r1 cpu) (valOf r2 cpu)
updatePair dst f r1 r2 cpu = V.fromList [(dst, (final f r1 r2 cpu))]
combine f (Reg dst) r1 r2 cpu@(CPU c rs mem) = CPU (c+1) (V.update rs (updatePair dst f r1 r2 cpu)) mem
combine f (Mem dst) r1 r2 cpu@(CPU c rs mem) = CPU (c+1) rs (V.update mem (updatePair dst f r1 r2 cpu))
mov dst src cpu = combine (\x _ -> x) dst src dst cpu
add = combine (+)
sub = combine (-)
mul = combine (*)
-- div = combine (/)
xor = combine B.xor
and = combine (B..&.)
or = combine (B..|.)
sll = combine B.shiftL
srl = combine B.shiftR
recount (CPU _ rs mem) c = CPU c rs mem
run :: V.Vector (Instruction Int) -> CPU Int -> CPU Int
run is cpu@(CPU c _ _) =
case (is V.!? c) of
Nothing -> cpu
(Just i) -> run is $ runInstruction i cpu
runPrint :: V.Vector (Instruction Int) -> CPU Int -> IO ()
runPrint is cpu@(CPU c _ _) = do
case (is V.!? c) of
Nothing -> print cpu
(Just i) -> do
print i
let final = runInstruction i cpu
print final
runPrint is final
runInstruction :: Instruction Int -> CPU Int -> CPU Int
runInstruction (Mov dst src) cpu = mov dst src cpu
runInstruction (Add dst r1 r2) cpu = add dst r1 r2 cpu
runInstruction (Sub dst r1 r2) cpu = sub dst r1 r2 cpu
runInstruction (Mul dst r1 r2) cpu = mul dst r1 r2 cpu
runInstruction (And dst r1 r2) cpu = VM.Core.and dst r1 r2 cpu
runInstruction (Xor dst r1 r2) cpu = xor dst r1 r2 cpu
runInstruction (Or dst r1 r2) cpu = VM.Core.or dst r1 r2 cpu
runInstruction (Sll dst r1 r2) cpu = sll dst r1 r2 cpu
runInstruction (Srl dst r1 r2) cpu = srl dst r1 r2 cpu
runInstruction (Jmp idx) cpu = recount cpu idx
runInstruction (Bne r1 r2 idx) cpu = branchIf (/=) r1 r2 idx cpu
runInstruction (Beq r1 r2 idx) cpu = branchIf (==) r1 r2 idx cpu
runInstruction (Blt r1 r2 idx) cpu = branchIf (<) r1 r2 idx cpu
runInstruction (Bgt r1 r2 idx) cpu = branchIf (>) r1 r2 idx cpu
runInstruction Nop cpu@(CPU c _ _) = recount cpu (c+1)
branchIf f r1 r2 idx cpu@(CPU c rs mem)
| f (valOf r1 cpu) (valOf r2 cpu) = recount cpu idx
| otherwise = CPU (c+1) rs mem
|
gabrielPeart/VM.hs
|
src/VM/Core.hs
|
mit
| 3,297
| 0
| 15
| 1,070
| 1,725
| 877
| 848
| 73
| 2
|
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.RTCDTMFToneChangeEvent
(js_getTone, getTone, RTCDTMFToneChangeEvent,
castToRTCDTMFToneChangeEvent, gTypeRTCDTMFToneChangeEvent)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"tone\"]" js_getTone ::
RTCDTMFToneChangeEvent -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/RTCDTMFToneChangeEvent.tone Mozilla RTCDTMFToneChangeEvent.tone documentation>
getTone ::
(MonadIO m, FromJSString result) =>
RTCDTMFToneChangeEvent -> m result
getTone self = liftIO (fromJSString <$> (js_getTone (self)))
|
manyoo/ghcjs-dom
|
ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/RTCDTMFToneChangeEvent.hs
|
mit
| 1,417
| 6
| 10
| 169
| 365
| 233
| 132
| 24
| 1
|
-- Parser (Syntax Analyzer)
-- Version: 14/06/2017
module Parser where
-- External imports
import Control.Monad.IO.Class
import Text.Parsec
-- Internal imports
import Lexer
import Types
import State
-- -----------------------------------------------------------------------------
-- Parser to Tokens
-- -----------------------------------------------------------------------------
-- - Program Token
programToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
programToken = tokenPrim show updatePositon getToken where
getToken (Program pos) = Just (Program pos)
getToken _ = Nothing
-- - End Token
endToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
endToken = tokenPrim show updatePositon getToken where
getToken (End pos) = Just (End pos)
getToken _ = Nothing
-- - ID Token
idToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
idToken = tokenPrim show updatePositon getToken where
getToken (Id x pos) = Just (Id x pos)
getToken _ = Nothing
-- - Colon Token
colonToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
colonToken = tokenPrim show updatePositon getToken where
getToken (Colon pos) = Just (Colon pos)
getToken _ = Nothing
-- - Comma Token
commaToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
commaToken = tokenPrim show updatePositon getToken where
getToken (Comma pos) = Just (Comma pos)
getToken _ = Nothing
-- - Semicolon Token
semiColonToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
semiColonToken = tokenPrim show updatePositon getToken where
getToken (SemiColon pos) = Just (SemiColon pos)
getToken _ = Nothing
-- - Assign Token
assignToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
assignToken = tokenPrim show updatePositon getToken where
getToken (Assign pos) = Just (Assign pos)
getToken _ = Nothing
-- --------------------------------------------------------
-- Type tokens
-- --------------------------------------------------------
-- - Type Token
typeToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
typeToken = tokenPrim show updatePositon getToken where
getToken (Type x pos) = Just (Type x pos)
getToken _ = Nothing
-- - Nat Token
natToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
natToken = tokenPrim show updatePositon getToken where
getToken (Nat x pos) = if x < 0 then error "Invalid assignment."
else Just (Nat x pos)
getToken _ = Nothing
-- - Int Token
intToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
intToken = tokenPrim show updatePositon getToken where
getToken (Nat x pos) = Just (Int x pos)
getToken (Int x pos) = Just (Int x pos)
getToken _ = Nothing
-- - Real Token
realToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
realToken = tokenPrim show updatePositon getToken where
getToken (Nat x pos) = let y = integerToFloat(x) in Just (Real y pos)
getToken (Int x pos) = let y = integerToFloat(x) in Just (Real y pos)
getToken (Real x pos) = Just (Real x pos)
getToken _ = Nothing
-- - Bool Token
boolToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
boolToken = tokenPrim show updatePositon getToken where
getToken (Bool x pos) = Just (Bool x pos)
getToken _ = Nothing
-- - Text Token
textToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
textToken = tokenPrim show updatePositon getToken where
getToken (Text x pos) = let y = removeQuote x in Just (Text y pos)
getToken _ = Nothing
-- - Array Token
arrayToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
arrayToken = tokenPrim show updatePositon getToken where
getToken (Array x pos) = Just (Array x pos)
getToken _ = Nothing
-- --------------------------------------------------------
-- Operator tokens
-- --------------------------------------------------------
-- - Addition Token
additionToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
additionToken = tokenPrim show updatePositon getToken where
getToken (Addition p) = Just (Addition p)
getToken _ = Nothing
-- - Subtraction Token
subtractionToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
subtractionToken = tokenPrim show updatePositon getToken where
getToken (Subtraction p) = Just (Subtraction p)
getToken _ = Nothing
-- - Multiplication Token
multiplicationToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
multiplicationToken = tokenPrim show updatePositon getToken where
getToken (Multiplication p) = Just (Multiplication p)
getToken _ = Nothing
-- - Division Token
divisionToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
divisionToken = tokenPrim show updatePositon getToken where
getToken (Division p) = Just (Division p)
getToken _ = Nothing
-- - Equality Token
equalityToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
equalityToken = tokenPrim show updatePositon getToken where
getToken (Equality p) = Just (Equality p)
getToken _ = Nothing
-- - Greater Token
greaterToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
greaterToken = tokenPrim show updatePositon getToken where
getToken (Greater p) = Just (Greater p)
getToken _ = Nothing
-- - Greater or equal Token
greaterOrEqualToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
greaterOrEqualToken = tokenPrim show updatePositon getToken where
getToken (GreaterOrEqual p) = Just (GreaterOrEqual p)
getToken _ = Nothing
-- - Smaller Token
smallerToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
smallerToken = tokenPrim show updatePositon getToken where
getToken (Smaller p) = Just (Smaller p)
getToken _ = Nothing
-- - Smaller or equal Token
smallerOrEqualToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
smallerOrEqualToken = tokenPrim show updatePositon getToken where
getToken (SmallerOrEqual p) = Just (SmallerOrEqual p)
getToken _ = Nothing
-- - Denial Token
denialToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
denialToken = tokenPrim show updatePositon getToken where
getToken (Denial p) = Just (Denial p)
getToken _ = Nothing
-- - Dot Token
dotToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
dotToken = tokenPrim show updatePositon getToken where
getToken (Dot p) = Just (Dot p)
getToken _ = Nothing
-- - And Token
andToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
andToken = tokenPrim show updatePositon getToken where
getToken (And p) = Just (And p)
getToken _ = Nothing
-- - Or Token
orToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
orToken = tokenPrim show updatePositon getToken where
getToken (Or p) = Just (Or p)
getToken _ = Nothing
-- --------------------------------------------------------
-- Native function tokens
-- --------------------------------------------------------
-- - Print Token
printToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
printToken = tokenPrim show updatePositon getToken where
getToken (Print p) = Just (Print p)
getToken _ = Nothing
-- - Input Token
inputToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
inputToken = tokenPrim show updatePositon getToken where
getToken (Input p) = Just (Input p)
getToken _ = Nothing
-- - If Token
ifToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
ifToken = tokenPrim show updatePositon getToken where
getToken (If p) = Just (If p)
getToken _ = Nothing
-- - Else Token
elseToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
elseToken = tokenPrim show updatePositon getToken where
getToken (Else p) = Just (Else p)
getToken _ = Nothing
-- - Else if Token
elseIfToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
elseIfToken = tokenPrim show updatePositon getToken where
getToken (Else_If p) = Just (Else_If p)
getToken _ = Nothing
-- - End if Token
endIfToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
endIfToken = tokenPrim show updatePositon getToken where
getToken (End_If p) = Just (End_If p)
getToken _ = Nothing
-- - While Token
whileToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
whileToken = tokenPrim show updatePositon getToken where
getToken (While p) = Just (While p)
getToken _ = Nothing
-- - End while Token
endWhileToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
endWhileToken = tokenPrim show updatePositon getToken where
getToken (End_While p) = Just (End_While p)
getToken _ = Nothing
-- --------------------------------------------------------
-- Other tokens
-- --------------------------------------------------------
-- - OpenParentheses Token
openParenthesesToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
openParenthesesToken = tokenPrim show updatePositon getToken where
getToken (Open_Parentheses p) = Just (Open_Parentheses p)
getToken _ = Nothing
-- - CloseParentheses Token
closeParenthesesToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
closeParenthesesToken = tokenPrim show updatePositon getToken where
getToken (Close_Parentheses p) = Just (Close_Parentheses p)
getToken _ = Nothing
-- - OpenParentheses Token
openBracketToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
openBracketToken = tokenPrim show updatePositon getToken where
getToken (Open_Bracket p) = Just (Open_Bracket p)
getToken _ = Nothing
-- - CloseBracket Token
closeBracketToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
closeBracketToken = tokenPrim show updatePositon getToken where
getToken (Close_Bracket p) = Just (Close_Bracket p)
getToken _ = Nothing
-- -----------------------------------------------------------------------------
-- Other necessary functions
-- -----------------------------------------------------------------------------
-- - Ignore Token
ignoreToken :: ParsecT [Token] (Scope, [Var], [Statement]) IO(Token)
ignoreToken = tokenPrim show updatePositon getToken where
getToken t = Just t
-- - Update position
-- SourcePos Position
-- Token Token
-- [Token] All tokens
-- Return Updated position
updatePositon :: SourcePos -> Token -> [Token] -> SourcePos
updatePositon position _ (token:_) = position -- necessita melhoria
updatePositon position _ [] = position
|
brenov/set
|
interpreter/Parser.hs
|
mit
| 10,906
| 0
| 12
| 2,255
| 3,502
| 1,908
| 1,594
| 172
| 4
|
module Main where
import Static.Checker
main :: IO ()
main = return ()
|
mooreniemi/pfpl
|
app/Main.hs
|
mit
| 73
| 0
| 6
| 15
| 29
| 16
| 13
| 4
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
module Web.TodoMVC.Servant.Shared (
Store,
LnAPI,
todoAPI,
newBigState,
runApp,
runApp_Maybe,
apply2
) where
import Control.Concurrent.STM
import Control.Monad.IO.Class
import Control.Monad.State
import Control.Monad.Trans.Either
import Control.Lens
import Servant
import Todo
data BigState = BigState {
_todoApp :: TodoApp
}
makeLenses ''BigState
type Store = TVar BigState
type LnAPI =
"html" :> Raw
:<|> "dist" :> Raw
:<|> "static" :> Raw
-- GET /todos
-- POST /todos , body = Todo
-- DELETE /todos
-- GET /todos/:todo_id
-- DELETE /todos/:todo_id
-- PUT /todos/:todo_id , body = Todo
:<|> "todos" :> Get '[JSON] [Todo]
:<|> "todos" :> ReqBody '[JSON] Todo :> Post '[JSON] Todo
:<|> "todos" :> Delete '[JSON] Bool
:<|> "todos" :> Capture "todo_id" TodoId :> Get '[JSON] Todo
:<|> "todos" :> Capture "todo_id" TodoId :> Delete '[JSON] TodoId
:<|> "todos" :> Capture "todo_id" TodoId :> ReqBody '[JSON] Todo :> Put '[JSON] Todo
todoAPI :: Proxy LnAPI
todoAPI = Proxy
-- | newBigState
--
newBigState :: IO (TVar BigState)
newBigState = newTVarIO $ BigState newTodoApp
-- | runApp
--
-- simple todo application helper
--
runApp :: MonadIO m => Store -> State TodoApp b -> EitherT ServantErr m b
runApp store cb = do
liftIO $ atomically $ do
v <- readTVar store
let (a, s) = runState cb (_todoApp v)
writeTVar store (set todoApp s v)
return a
-- | runApp_Maybe
--
-- returns an error if our todo action returns Nothing
--
runApp_Maybe :: MonadIO m => Store -> State TodoApp (Maybe b) -> EitherT ServantErr m b
runApp_Maybe store cb = runApp store cb >>= maybe (left err400) return
-- | apply2
--
-- bleh: having some weird type errors
--
apply2 :: MonadIO m => (t -> t1 -> State TodoApp (Maybe b)) -> Store -> t -> t1 -> EitherT ServantErr m b
apply2 f s x y = runApp_Maybe s (f x y)
|
adarqui/todomvc-haskell-servant-purescript
|
haskell_src/Web/TodoMVC/Servant/Shared.hs
|
mit
| 2,177
| 0
| 29
| 508
| 624
| 331
| 293
| 51
| 1
|
{-# LANGUAGE TypeOperators #-}
module Language.CL.C.Types.Pointer where
import Language.CL.C.CodeGen.TypeRepr
import Language.CL.C.HOAS.AST
import Language.CL.C.Types.Classes
import Language.CL.C.Types.Scalar
data Pointer a = Pointer Int
instance LangType a => LangType (Pointer a) where
typeOf = mkPtrTR . typeOf . unsafeUnlift
where unsafeUnlift :: Pointer a -> a
unsafeUnlift = error "unsafe unlift pointer type"
instance LangType a => ParamType (Pointer a)
instance LangType a => RetType (Pointer a)
instance Literal (Pointer a) where
showLit (Pointer a) = show a
instance LangType a => CLEq (Pointer a)
instance LangType a => CLOrd (Pointer a)
ref :: ParamType a => Expression a -> Expression (Pointer a)
ref = mkBuiltInUOp "&"
deref :: RetType a => Expression (Pointer a) -> Expression a
deref = mkBuiltInUOp "*"
(*+*) :: LangType a => Expression (Pointer a) -> Expression CLSize -> Expression (Pointer a)
(*+*) = mkBuiltInBOp "+"
nextP :: LangType a => Expression (Pointer a) -> Expression (Pointer a)
nextP = (*+* 1)
(*-*) :: LangType a => Expression (Pointer a) -> Expression CLSize -> Expression (Pointer a)
(*-*) = mkBuiltInBOp "-"
nullPtr :: Expression (Pointer a)
nullPtr = mkLit $ Pointer 0
|
pxqr/language-cl-c
|
Language/CL/C/Types/Pointer.hs
|
mit
| 1,238
| 0
| 10
| 217
| 466
| 240
| 226
| 29
| 1
|
{-# LANGUAGE MultiParamTypeClasses, TypeSynonymInstances, FlexibleInstances #-}
{- |
Module : $Header$
Description : Coding a description language into CASL
Copyright : (c) Stef Joosten, Christian Maeder DFKI GmbH 2010
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : experimental
Portability : non-portable (imports Logic.Logic)
The translating comorphism from Adl to CASL.
-}
module Comorphisms.Adl2CASL
( Adl2CASL (..)
) where
import Logic.Logic
import Logic.Comorphism
-- Adl
import Adl.Logic_Adl as A
import Adl.As
import Adl.Sign as A
-- CASL
import CASL.Logic_CASL
import CASL.AS_Basic_CASL
import CASL.Sublogic
import CASL.Sign as C
import CASL.Morphism as C
import CASL.Fold
import CASL.Overload
import Common.AS_Annotation
import Common.DefaultMorphism
import Common.DocUtils
import Common.Id
import Common.ProofTree
import Common.Result
import Common.Token
import qualified Common.Lib.Rel as Rel
import qualified Common.Lib.MapSet as MapSet
import Common.Lib.State
import qualified Data.Set as Set
-- | lid of the morphism
data Adl2CASL = Adl2CASL deriving Show
instance Language Adl2CASL -- default is ok
instance Comorphism Adl2CASL
Adl
()
Context
Sen
()
()
A.Sign
A.Morphism
A.Symbol
A.RawSymbol
ProofTree
CASL
CASL_Sublogics
CASLBasicSpec
CASLFORMULA
SYMB_ITEMS
SYMB_MAP_ITEMS
CASLSign
CASLMor
C.Symbol
C.RawSymbol
ProofTree
where
sourceLogic Adl2CASL = Adl
sourceSublogic Adl2CASL = ()
targetLogic Adl2CASL = CASL
mapSublogic Adl2CASL _ = Just $ caslTop
{ has_part = False
, sub_features = LocFilSub
, cons_features = NoSortGen }
map_theory Adl2CASL = mapTheory
map_sentence Adl2CASL s = return . mapSen (mapSign s)
map_morphism Adl2CASL = mapMor
map_symbol Adl2CASL _ = Set.singleton . mapSym
is_model_transportable Adl2CASL = True
has_model_expansion Adl2CASL = True
is_weakly_amalgamable Adl2CASL = True
isInclusionComorphism Adl2CASL = True
mapTheory :: (A.Sign, [Named Sen]) -> Result (CASLSign, [Named CASLFORMULA])
mapTheory (s, ns) = let cs = mapSign s in
return (cs, map (mapNamed $ mapSen cs) ns)
relTypeToPred :: RelType -> PredType
relTypeToPred (RelType c1 c2) = PredType [conceptToId c1, conceptToId c2]
mapSign :: A.Sign -> CASLSign
mapSign s = (C.emptySign ())
{ sortRel = Rel.transClosure $ Rel.union
(Rel.fromKeysSet $ Set.fold (\ sy -> case sy of
Con (C i) -> Set.insert $ simpleIdToId i
_ -> id) Set.empty $ A.symOf s)
$ Rel.map conceptToId $ isas s
, predMap = MapSet.fromList
. map (\ (i, l) -> (transRelId $ idToSimpleId i, l))
. MapSet.toList . MapSet.map relTypeToPred . MapSet.fromMap $ rels s
}
transRelId :: Token -> Id
transRelId t@(Token s p) = simpleIdToId $
if elem s casl_reserved_fwords then Token ("P_" ++ s) p else t
mapSen :: CASLSign -> Sen -> CASLFORMULA
mapSen sig s = case s of
DeclProp r p -> getRelProp sig r p
Assertion _ r ->
let ((v1, s1), (v2, s2), f) = evalState (transRule sig r) 1 in
mkForall [mkVarDecl v1 s1, mkVarDecl v2 s2] f
-- | Translation of morphisms
mapMor :: A.Morphism -> Result CASLMor
mapMor mor = return $ embedMorphism ()
(mapSign $ domOfDefaultMorphism mor) $ mapSign $ codOfDefaultMorphism mor
mapSym :: A.Symbol -> C.Symbol
mapSym s = case s of
Con c -> idToSortSymbol $ conceptToId c
Rel (Sgn n t) -> idToPredSymbol (transRelId n) $ relTypeToPred t
next :: State Int Int
next = do
i <- get
put $ i + 1
return i
getRelPred :: CASLSign -> Relation -> PRED_SYMB
getRelPred sig m@(Sgn t (RelType c1 c2)) = let
ty1 = conceptToId c1
ty2 = conceptToId c2
i = transRelId t
cs = filter (\ pt -> case predArgs pt of
[fr, to] -> leqSort sig ty1 fr && leqSort sig ty2 to
_ -> False)
$ Set.toList $ MapSet.lookup i $ predMap sig
in case cs of
ty : _ ->
Qual_pred_name i (toPRED_TYPE ty) $ tokPos t
_ -> error $ "getRelPred " ++ showDoc m ""
getRelProp :: CASLSign -> Relation -> RangedProp -> CASLFORMULA
getRelProp sig r p =
let qp@(Qual_pred_name _ (Pred_type [fr, to] _) _) = getRelPred sig r
q = propRange p
q1 = Var_decl [mkSimpleId "a"] fr q
q2 = Var_decl [mkSimpleId "b"] to q
q3 = Var_decl [mkSimpleId "c"] fr q
t1 = toQualVar q1
t2 = toQualVar q2
t3 = toQualVar q3
pAppl = Predication qp [t1, t2] q
eqs = fr == to
in case propProp p of
Uni -> mkForallRange [q1, q2, q3]
(Implication
(Conjunction
[pAppl, Predication qp [t3, t2] q] q)
(Strong_equation t1 t3 q)
True q) q
Tot -> mkForallRange [q1] (Quantification Existential [q2] pAppl q) q
Sur -> mkForallRange [q2] (Quantification Existential [q1] pAppl q) q
Inj -> let
q4 = Var_decl [mkSimpleId "c"] to q
t4 = toQualVar q4
in mkForallRange [q1, q2, q4]
(Implication
(Conjunction
[pAppl, Predication qp [t1, t4] q] q)
(Strong_equation t2 t4 q)
True q) q
Sym | eqs -> mkForallRange [q1, q2]
(Equivalence pAppl (Predication qp [t2, t1] q) q) q
Asy | eqs -> mkForallRange [q1, q2]
(Implication pAppl (Negation (Predication qp [t2, t1] q) q) True q) q
Trn | eqs -> mkForallRange [q1, q2, q3]
(Implication
(Conjunction [pAppl, Predication qp [t2, t3] q] q)
(Predication qp [t1, t3] q)
True q) q
Rfx | eqs -> mkForallRange [q1] (Predication qp [t1, t1] q) q
pr -> error $ "getRelProp " ++ showDoc pr ""
transRule :: CASLSign -> Rule
-> State Int ((VAR, SORT), (VAR, SORT), CASLFORMULA)
transRule sig rule =
let myMin v@(ta, sa) (_, sb) =
if leqSort sig sa sb then v else
if leqSort sig sb sa then (ta, sb) else
error $ "transRule.myMin " ++ showDoc (sa, sb) "\n "
++ showDoc rule ""
myVarDecl = uncurry mkVarDecl
in case rule of
Tm m@(Sgn (Token s p) (RelType c1 c2)) -> do
i <- next
j <- next
let v1 = mkNumVar "a" i
v2 = mkNumVar "b" j
isI = s == "I"
ty1' = conceptToId c1
ty2' = conceptToId c2
ty1 = if isI && leqSort sig ty2 ty1 then ty2' else ty1'
ty2 = if isI && leqSort sig ty1 ty2 then ty1' else ty2'
q1 = Qual_var v1 ty1 p
q2 = Qual_var v2 ty2 p
return ((v1, ty1), (v2, ty2),
if s == "V" then True_atom p else
if isI then
if ty1 == ty2 then Strong_equation q1 q2 p else
error $ "transRule.I " ++ showDoc rule ""
else
let qp@(Qual_pred_name _ (Pred_type [fr, to] _) _) = getRelPred sig m
in Predication qp
[ if ty1 == fr then q1 else Sorted_term q1 fr p
, if ty2 == to then q2 else Sorted_term q2 to p] p)
UnExp o e -> do
(v1, v2@(t2, _), f) <- transRule sig e
case o of
Co -> return (v2, v1, f)
Cp -> return (v1, v2, negateForm f nullRange)
_ -> do
k <- next
let v@(_, s) = myMin v1 v2
w = (t2, s)
nf = renameVar sig v1 v $ renameVar sig v2 w f
z = (mkNumVar "c" k, s)
cf = mkExist [myVarDecl z]
$ conjunct [renameVar sig v z nf, renameVar sig w z nf]
-- this is (and always will be) incomplete wrt to compositions
return (v, w, disjunct
$ [ mkStEq (toQualVar $ myVarDecl v) $ toQualVar $ myVarDecl w
| o == K0] ++ [nf, cf])
MulExp o es -> case es of
[] -> error "transRule2"
r : t -> if null t then transRule sig r else do
(v1, v2, f1) <- transRule sig r
(v3, v4, f2) <- transRule sig $ MulExp o t
if elem o [Fc, Fd] then return (v1, v4,
let v23 = myMin v2 v3
f3 = renameVar sig v2 v23 f1
f4 = renameVar sig v3 v23 f2
vs = [myVarDecl v23]
cs = [f3, f4]
in if o == Fc then mkExist vs $ conjunct cs
else mkForall vs $ disjunct cs)
else do
let v13 = myMin v1 v3
v24 = myMin v2 v4
f3 = renameVar sig v1 v13 $ renameVar sig v2 v24 f1
f4 = renameVar sig v3 v13 $ renameVar sig v4 v24 f2
return (v13, v24, case o of
Fi -> conjunct [f3, f4]
Fu -> disjunct [f3, f4]
Ri -> mkImpl f3 f4
Rr -> Implication f4 f3 False nullRange
Re -> mkEqv f3 f4
_ -> error "transRule,MulExp")
renameVarRecord :: CASLSign -> (VAR, SORT) -> (VAR, SORT)
-> Record () CASLFORMULA (TERM ())
renameVarRecord sig from to = (mapRecord id)
{ foldQual_var = \ _ v ty p ->
let (nv, nty) = if (v, ty) == from then to else (v, ty)
qv = Qual_var nv nty p
in if nty == ty then qv else
if leqSort sig nty ty then Sorted_term qv ty p else
error "renameVar"
}
renameVar :: CASLSign -> (VAR, SORT) -> (VAR, SORT) -> CASLFORMULA
-> CASLFORMULA
renameVar sig v = foldFormula . renameVarRecord sig v
|
nevrenato/Hets_Fork
|
Comorphisms/Adl2CASL.hs
|
gpl-2.0
| 9,381
| 0
| 27
| 2,977
| 3,376
| 1,753
| 1,623
| 240
| 23
|
{-# LANGUAGE DeriveDataTypeable #-}
module LexML.Linker.LexerPrim where
import Text.HTML.TagSoup
--import Text.HTML.TagSoup.Parser
import qualified Data.Foldable as F
import Data.Maybe
import Data.Char
import Data.Typeable
import Control.Monad.Except
import qualified Data.Set as S
import Data.Word (Word8)
import Codec.Binary.UTF8.String (encode)
data LexerState =
LexerState {
lsPrevPos :: Maybe TokenPos,
lsCurrentPos :: Int,
lsStream :: [(Int,String)],
lsPrevCharBytes :: [Word8],
lsPrevChars :: String
} deriving (Show)
type AlexInput = LexerState
alexGetByte :: AlexInput -> Maybe (Word8,AlexInput)
alexGetByte ls =
case lsPrevCharBytes ls of
[] -> case lsStream ls of
((_,"") : rest) -> alexGetByte (ls { lsCurrentPos = 0, lsStream = rest })
((p,c:cs) : rest) ->
let (b:bs) = encode [c] in
Just (b, ls { lsCurrentPos = lsCurrentPos ls + 1,
lsPrevPos = Just $ maybe (p,lsCurrentPos ls) id (lsPrevPos ls),
lsPrevChars = c : lsPrevChars ls,
lsPrevCharBytes = bs,
lsStream = (p,cs) : rest})
[] -> Nothing
(b:bs) -> Just (b,ls { lsPrevCharBytes = bs})
alexInputPrevChar :: AlexInput -> Char
alexInputPrevChar = head . lsPrevChars
makeLexStream tags =
LexerState {
lsCurrentPos = 0,
lsPrevChars = [],
lsPrevCharBytes = [],
lsPrevPos = Nothing,
lsStream = [(p,text) | (p, TagText text) <- zip [0 ..] tags ]
}
extract n ls = (reverse . take n . lsPrevChars $ ls, ls { lsPrevChars = [], lsPrevPos = Nothing } )
skip _ ls = ls
data Genero = Masc | Fem deriving (Eq,Ord,Show,Typeable)
type Token = (TokenPos,TokenData)
type TokenPos = (Int,Int)
data TokenData =
Numero !Integer !String
| Palavra !String
| Ponto
| Virgula
| Barra
| PontoeVirgula
| Hifen
| IndicadorOrdinal !Genero
| Paragrafos
| Paragrafo
| Ordinal !Integer !String
| Simbolo !Char
| Ignored !Int
deriving (Eq,Ord,Show,Typeable)
tokenDataLength :: TokenData -> Int
tokenDataLength (Numero _ s) = length s
tokenDataLength (Palavra s) = length s
tokenDataLength Paragrafos = 2
tokenDataLength (Ignored n) = n
tokenDataLength _ = 1
type Action = String -> Maybe TokenData
whitespace :: Action
whitespace = const Nothing
numero :: Action
numero s = case filter isDigit s of
"" -> Nothing
s' -> Just $ Numero (read s') s
palavra :: Action
palavra = Just . Palavra
ponto :: Action
ponto = Just . const Ponto
barra :: Action
barra = Just . const Barra
ordinal :: Action
ordinal s = case filter isDigit s of
"" -> Nothing
s' -> Just $ Ordinal (read s') s
virgula :: Action
virgula = Just . const Virgula
pontoeVirgula :: Action
pontoeVirgula = Just . const PontoeVirgula
hifen :: Action
hifen = Just . const Hifen
indicadorOrdinal :: Action
indicadorOrdinal "\xba" = Just $ IndicadorOrdinal Masc
indicadorOrdinal "\xaa" = Just $ IndicadorOrdinal Fem
indicadorOrdinal "\xb0" = Just $ IndicadorOrdinal Masc
paragrafos :: Action
paragrafos = Just . const Paragrafos
paragrafo :: Action
paragrafo = Just . const Paragrafo
simbolo :: Action
simbolo = Just . Simbolo . head
extractToken :: Int -> Action -> LexerState -> (Maybe Token,LexerState)
extractToken len action ls = ( (,) pos <$> action tokenText, ls')
where
pos = prevPos ls
(tokenText,ls') = extract len ls
prevPos :: LexerState -> (Int,Int)
prevPos ls = fromJust $ lsPrevPos ls -- fst . head . lsStream $ ls, lsCurrentPos ls)
currentPos :: LexerState -> (Int,Int)
currentPos ls =
let ((p,_):_) = lsStream ls in (p,lsCurrentPos ls)
newtype LexError = LexError (Maybe String,Maybe TokenPos) deriving (Eq,Ord,Typeable)
instance Show LexError where
show (LexError (Nothing,p)) = show (LexError (Just "Erro não-específico na análise léxica.",p))
show (LexError (Just m, Nothing)) = m
show (LexError (Just m, Just p)) = m ++ " em " ++ show p
{- instance Except LexError where
noMsg = LexError (Nothing, Nothing)
strMsg s = LexError (Just s,Nothing)
-}
|
lexml/lexml-linker
|
src/main/haskell/LexML/Linker/LexerPrim.hs
|
gpl-2.0
| 4,214
| 0
| 20
| 1,054
| 1,415
| 773
| 642
| 132
| 4
|
module Model
(
NewAgentState
, NewAgentMsg (..)
, NewAgentEnvironment
, NewAgentDef
, NewAgentBehaviour
, NewAgentIn
, NewAgentOut
, NewAgentObservable
) where
import FRP.Chimera
------------------------------------------------------------------------------------------------------------------------
-- DOMAIN-SPECIFIC AGENT-DEFINITIONS
------------------------------------------------------------------------------------------------------------------------
type NewAgentState = Int
data NewAgentMsg = None
type NewAgentEnvironment = ()
type NewAgentDef = AgentDef NewAgentState NewAgentMsg NewAgentEnvironment
type NewAgentBehaviour = AgentBehaviour NewAgentState NewAgentMsg NewAgentEnvironment
type NewAgentIn = AgentIn NewAgentState NewAgentMsg NewAgentEnvironment
type NewAgentOut = AgentOut NewAgentState NewAgentMsg NewAgentEnvironment
type NewAgentObservable = AgentObservable NewAgentState
------------------------------------------------------------------------------------------------------------------------
|
thalerjonathan/phd
|
coding/libraries/chimera/examples/ABS/NewAgents/Model.hs
|
gpl-3.0
| 1,099
| 0
| 5
| 153
| 125
| 76
| 49
| 19
| 0
|
module Carbon.Website.Files (serve) where
{-
This module serves the static files on the website
which are located in /files/.
It uses the Config.File module for configuration.
-}
import Control.Monad.State
import Happstack.Server (Browsing(..), serveDirectory, Response)
import qualified Happstack.Server as S
import Carbon.Config (Config(..))
import Carbon.Website.Monad
serve :: OBW Response
serve = do
conf <- gets config
let b = browsing $ allowBrowsing conf
serveDirectory b fallbackFiles $ fileStorage conf
browsing :: Bool -> Browsing
browsing True = EnableBrowsing
browsing False = DisableBrowsing
{-
serve will try to serve these if the requested file is not found.
-}
fallbackFiles :: [String]
fallbackFiles = ["404.html"]
|
runjak/carbon-adf
|
Carbon/Website/Files.hs
|
gpl-3.0
| 754
| 0
| 11
| 123
| 163
| 93
| 70
| 16
| 1
|
import Prelude hiding (catch)
import System.Environment
import System.IO
import System.Directory
import System.Posix.Files
import Control.Monad
import Control.Concurrent
import Control.Exception
import System.Console.Terminfo
import Text.Printf
import Data.Char (isDigit)
readcontent :: String -> String
readcontent x =
let corr = head $ lines x
in corr
drawbar :: String -> Double -> Double -> IO ()
drawbar path msize dw = do
curpos <- fmap readcontent $ readFile path
let (_, posinfo) = break (== '\t') curpos
dp = read posinfo :: Double
w = round ((dw-10)*dp/msize) :: Int
line = replicate w '*'
spaces = replicate ( (round dw) - w - 10 ) ' '
pct = 100*dp/msize :: Double
putStr $ "\r" ++ line ++ spaces
void $ printf "%.2f" pct
hFlush stdout
threadDelay 500000
goodlink :: FilePath -> FilePath -> IO Bool
goodlink filename fd = do
sl <- fmap isSymbolicLink (getSymbolicLinkStatus fd)
if sl
then fmap (== filename) (readSymbolicLink fd)
else return False
getFilesInDir :: String -> IO [String]
getFilesInDir dirname = fmap (map ((++) dirname)) $ getDirectoryContents dirname
readCmdLine :: String -> IO (Maybe String)
readCmdLine cmdfile = do
filecontent <- fmap Just (readFile cmdfile) `catch` (const $ return Nothing :: IOException -> IO (Maybe String))
case fmap (reverse . takeWhile (/= '/') . reverse . takeWhile (\x -> x /= '\NUL' && x /= ' ')) filecontent of
Just "" -> return Nothing
x -> return x
getPid :: String -> IO Int
getPid programname = do
allpids <- fmap (filter (all isDigit . drop 6)) (getFilesInDir "/proc/")
let cmdlinefiles = map (++ "/cmdline") allpids
cmdlines <- fmap (zip allpids) $ mapM readCmdLine cmdlinefiles
let validcmdlines = filter (\(_,x) -> Just programname == x) cmdlines
toPid = read . drop 6 . fst
case validcmdlines of
[] -> error $ "Could not find binary " ++ programname
[x] -> return $ toPid x
xs -> do
hPutStrLn stderr "Warning, more than one process found, using the first one"
return $ toPid $ head xs
main :: IO ()
main = do
(programname,filename) <- getArgs >>=
\x -> case x of
[p,f] -> return (p,f)
_ -> error "Usage: ProgressBar programname filename"
pid <- getPid programname
let dirname = "/proc/" ++ show pid ++ "/fd/"
links <- getFilesInDir dirname
rlinks <- filterM (goodlink filename) links
if null rlinks
then error $ "Can't find " ++ filename
else do
let fd = (reverse . fst . break (=='/') . reverse . head) rlinks
fdinfo = "/proc/" ++ show pid ++ "/fdinfo/" ++ fd
fsize <- fmap (toInteger . fileSize) (getFileStatus filename)
dwm <- fmap (\x -> getCapability x termColumns) (setupTerm "vt100")
case dwm of
Just dw -> forever (drawbar fdinfo (fromIntegral fsize) (fromIntegral dw))
Nothing -> error "Can't get term width"
|
bartavelle/progressbar
|
ProgressBar.hs
|
gpl-3.0
| 3,082
| 0
| 19
| 848
| 1,117
| 554
| 563
| 75
| 4
|
{-# LANGUAGE TemplateHaskell #-}
import qualified Aws as Aws
import qualified Aws.S3 as S3
import qualified Data.ByteString.Lazy as BL (hGetContents)
import Data.Text (pack, unpack)
import Control.Monad (when)
import Data.Conduit (($$+-))
import Data.Maybe (isJust, fromJust)
import Data.Conduit.Binary (sinkFile)
import HFlags
import Network.URI (URI, uriScheme, uriAuthority, uriRegName, uriPath,
parseURI, parseURIReference)
import Network.HTTP.Conduit (withManager, responseBody, RequestBody (..))
import qualified System.IO as IO (withFile, IOMode (ReadMode))
import System.Exit (exitFailure)
newtype S3Path = S3Path URI
deriving Show
newtype FPath = FPath URI
deriving Show
data HSPath = S3P S3Path | FP FPath
deriving Show
data FromTo = FromTo HSPath HSPath
deriving Show
data S3Response = Get S3.GetObjectResponse | Put S3.PutObjectResponse
-- * Commandline args, messaging.
version :: String
version = "v0.1.0.0"
whoami, greeting, usage :: String
whoami = "hs3 simple s3 file upload / download utility."
usage = "\nhs3 <from_uri> <to_uri>"
greeting = whoami ++ version ++ usage
defineFlag "v:verbose" False "Verbose output."
s3Prefix :: String
s3Prefix = "s3://"
say :: String -> IO ()
say = putStrLn
ifv :: Monad m => m () -> m ()
ifv e = when flags_verbose e
parsePaths :: String -> String -> Maybe FromTo
parsePaths p1 p2 =
case parseS3URI p1 of -- one and only one of p1, p2 is a s3:// uri.
Just s3uri -> do
furi <- parseURIReference p2
return $ FromTo (S3P $ S3Path s3uri) (FP $ FPath furi)
Nothing -> do
furi <- parseURIReference p1
s3uri <- parseS3URI p2
return $ FromTo (FP $ FPath furi) (S3P $ S3Path s3uri)
where
s3scheme uri = uriScheme uri == "s3:" && hasBucket uri
hasBucket uri = isJust $ do
auth <- uriAuthority uri
return $ uriRegName auth
parseS3URI uri =
case parseURI uri of
(Just s3uri) | s3scheme s3uri -> Just s3uri
_ -> Nothing
checkPaths :: [String] -> IO FromTo
checkPaths [p1, p2] = do
case parsePaths p1 p2 of
Just ps -> return ps
Nothing -> do
putStrLn "One and only one path must be in the form of an s3 uri. \
\e.g s3://bucket/b.dat"
exitFailure
checkPaths _ = do
putStrLn "Two paths are required, e.g. hs3 /tmp/a.dat s3://bucket/b.dat"
exitFailure
s3BucketPath :: S3Path -> (S3.Bucket, FilePath)
s3BucketPath (S3Path s3uri) =
let bucket = pack $ getBucket s3uri
bpath = uriPath s3uri
in (bucket, drop 1 bpath)
where
getBucket = uriRegName . fromJust . uriAuthority
showFetch :: S3.Bucket -> FilePath -> FilePath -> IO ()
showFetch bucket bpath fpath = do
say $ "Fetching from bucket " ++ b ++ " at " ++ bpath ++ " to " ++ fpath
where
b = unpack bucket
responseMessage :: S3Response -> String
responseMessage rsp =
case rsp of
Put r -> show $ r
Get (S3.GetObjectResponse meta _) -> show meta
-- -- * Fetch a file from S3.
simpleFetch :: S3Path -> FPath -> IO S3.GetObjectResponse
simpleFetch s3uri (FPath furi) = do
cfg <- Aws.baseConfiguration
let (bucket, bpath) = s3BucketPath s3uri
let localpath = uriPath furi
ifv $ showFetch bucket bpath localpath
let s3cfg = Aws.defServiceConfig :: S3.S3Configuration Aws.NormalQuery
withManager $ \mgr -> do
grsp@S3.GetObjectResponse { S3.gorResponse = rsp } <-
Aws.pureAws cfg s3cfg mgr $ S3.getObject bucket (pack bpath)
responseBody rsp $$+- sinkFile localpath
return grsp
-- * Store a file up on S3.
simpleStore :: FPath -> S3Path -> IO S3.PutObjectResponse
simpleStore (FPath furi) s3uri = do
awscfg <- Aws.baseConfiguration
let s3cfg = Aws.defServiceConfig :: S3.S3Configuration Aws.NormalQuery
let (bucket, bpath) = s3BucketPath s3uri
let fpath = uriPath furi
IO.withFile fpath IO.ReadMode $ \hIn -> do
bytes <- BL.hGetContents hIn
withManager $ \mgr -> do
let req = S3.putObject bucket (pack bpath) (RequestBodyLBS bytes)
resp <- Aws.aws awscfg s3cfg mgr req
Aws.readResponseIO resp
-- | Move up/down a file.
s3MoveFile :: FromTo -> IO S3Response
s3MoveFile (FromTo (S3P s3uri) (FP furi)) = do
rsp <- simpleFetch s3uri furi
return $ Get rsp
s3MoveFile (FromTo (FP furi) (S3P s3uri)) = do
rsp <- simpleStore furi s3uri
return $ Put rsp
s3MoveFile _ = do
putStrLn "From to paths must be one S3 uri and one local file path."
exitFailure
main :: IO ()
main = do
args <- $initHFlags greeting
fromto <- checkPaths args
rsp <- s3MoveFile fromto
ifv (say $ responseMessage rsp)
|
RayRacine/hs3
|
src/hs3.hs
|
gpl-3.0
| 4,875
| 0
| 20
| 1,306
| 1,483
| 744
| 739
| 120
| 3
|
{-
CC_Clones - Classic games reimplemented
© Callum Lowcay 2006-2011
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
{-# LANGUAGE RecordWildCards #-}
module Tetris.GameState (
GameMode(..), GameState(..), ScoreState(..),
Sfx(..), Channels(..),
Tile(..), Brick(..), Rotation(..), SlideAction(..),
allTiles, clearField, srsCoords,
srsSpawnHeight, tile, tileS,
updateGame, randomBag, nextBrick, resetScoreState
) where
import Common.AniTimer
import Common.Counters
import Common.HighScores
import Common.Util
import Control.Monad.State
import Data.Array
import Data.List
import Data.Maybe
import Debug.Trace
import qualified Common.Queue as Q
import System.Random
data GameMode =
IntroMode | InGameMode | PausedMode | GameOverMode |
HighScoreMode | AllClearBonusMode
deriving (Enum, Eq, Show)
data Tile = Digits | Paused | GameOverTile |
FrameH | FrameV | SidePanel |
RedTile | PinkTile | YellowTile |
OrangeTile | BlueTile | GreyTile | GreenTile
deriving (Enum, Ord, Eq, Show)
allTiles = enumFrom Digits -- A list of all the tiles
data Brick = IBrick | JBrick | LBrick | OBrick | SBrick | TBrick | ZBrick
deriving (Enum, Bounded, Ord, Eq, Show)
data Rotation = RUp | RDown | RLeft | RRight
deriving (Enum, Ord, Eq, Show)
data SlideAction = SlideLeft | SlideRight
deriving (Enum, Ord, Eq, Show)
type Field = Array (Int, Int) (Maybe Tile)
-- The complete state of the game at any point in time
data GameState = GameState {
mode :: GameMode,
highScores :: HighScoreState,
randomState :: StdGen,
brickQueue :: Q.Queue Brick,
gracePeriod :: Bool,
currentBrick :: Brick,
currentRotation :: Rotation,
currentHeight :: Int, -- 0 indexed, axis bottom to top
currentPos :: Int, -- 0 indexed, axis goes left to right
currentSlide :: SlideAction,
slideActive :: Bool,
queuedRotations :: Int, -- number of rotations to perform
fullLines :: [Int], -- completed lines
field :: Field,
initClearField :: Bool, -- True if this is an initial (clear) field
downTimer :: AniTimer,
slideTimer :: AniTimer,
lineTimer :: AniTimer,
-- FTA = Frames To Alignment
downFTA :: Int,
slideFTA :: Int,
lineFTA :: Int,
scoreState :: ScoreState,
sfxEvents :: [(Sfx, Channels)], -- sounds to be played after rendering
dropKey :: Bool,
showPreview :: Bool,
allClearCheat :: Bool -- True if the allClear cheat is in use
} deriving (Show)
data ScoreState = ScoreState {
level :: Int, levelCounter :: CounterState,
score :: Int, scoreCounter :: CounterState,
lastLines :: Int,
totalLines :: Int
} deriving (Show)
-- How many bricks to show in the preview
previewBricks = 1
clearField :: Array (Int, Int) (Maybe Tile)
clearField = array ((0, 0), (9, 21))
[((x, y), Nothing)|x <- [0..9], y <- [0..21]]
data Sfx = SfxTurn | SfxLine
deriving (Enum, Ord, Eq, Show)
data Channels = SfxChannel | ChannelCount
deriving (Enum, Ord, Eq, Show)
-- Coordinates of tiles according to the SRS rotation scheme
-- coords are relative to a 4x4 grid. See http://tetris.wikia.com/wiki/SRS
srsCoords :: Brick -> Rotation -> [(Int, Int)]
srsCoords IBrick RUp = [(0, 1), (1, 1), (2, 1), (3, 1)]
srsCoords IBrick RRight = [(2, 0), (2, 1), (2, 2), (2, 3)]
srsCoords IBrick RDown = [(0, 2), (1, 2), (2, 2), (3, 2)]
srsCoords IBrick RLeft = [(1, 0), (1, 1), (1, 2), (1, 3)]
srsCoords JBrick RUp = [(0, 0), (0, 1), (1, 1), (2, 1)]
srsCoords JBrick RRight = [(1, 0), (2, 0), (1, 1), (1, 2)]
srsCoords JBrick RDown = [(0, 1), (1, 1), (2, 1), (2, 2)]
srsCoords JBrick RLeft = [(1, 0), (1, 1), (1, 2), (0, 2)]
srsCoords LBrick RUp = [(0, 1), (1, 1), (2, 1), (2, 0)]
srsCoords LBrick RRight = [(1, 0), (1, 1), (1, 2), (2, 2)]
srsCoords LBrick RDown = [(0, 2), (0, 1), (1, 1), (2, 1)]
srsCoords LBrick RLeft = [(0, 0), (1, 0), (1, 1), (1, 2)]
srsCoords OBrick _ = [(1, 0), (2, 0), (2, 1), (1, 1)]
srsCoords SBrick RUp = [(0, 1), (1, 1), (1, 0), (2, 0)]
srsCoords SBrick RRight = [(1, 0), (1, 1), (2, 1), (2, 2)]
srsCoords SBrick RDown = [(0, 2), (1, 2), (1, 1), (2, 1)]
srsCoords SBrick RLeft = [(0, 0), (0, 1), (1, 1), (1, 2)]
srsCoords TBrick RUp = [(1, 0), (0, 1), (1, 1), (2, 1)]
srsCoords TBrick RRight = [(2, 1), (1, 0), (1, 1), (1, 2)]
srsCoords TBrick RDown = [(1, 2), (0, 1), (1, 1), (2, 1)]
srsCoords TBrick RLeft = [(0, 1), (1, 0), (1, 1), (1, 2)]
srsCoords ZBrick RUp = [(0, 0), (1, 0), (1, 1), (2, 1)]
srsCoords ZBrick RRight = [(2, 0), (2, 1), (1, 1), (1, 2)]
srsCoords ZBrick RDown = [(0, 1), (1, 1), (1, 2), (2, 2)]
srsCoords ZBrick RLeft = [(1, 0), (1, 1), (0, 1), (0, 2)]
-- Offsets to try for wallkicks, according to the SRS rotation scheme
-- Of course, we try (0, 0) first, no need to encode that here...
srsWallkick :: Brick -> Rotation -> Rotation -> [(Int, Int)]
srsWallkick IBrick RUp RRight = [(-2, 0), ( 1, 0), (-2,-1), ( 1, 2)]
srsWallkick IBrick RRight RUp = [( 2, 0), (-1, 0), ( 2, 1), (-1,-2)]
srsWallkick IBrick RRight RDown = [(-1, 0), ( 2, 0), (-1, 2), ( 2,-1)]
srsWallkick IBrick RDown RRight = [( 1, 0), (-2, 0), ( 1,-2), (-2, 1)]
srsWallkick IBrick RDown RLeft = [( 2, 0), (-1, 0), ( 2, 1), (-1,-2)]
srsWallkick IBrick RLeft RDown = [(-2, 0), ( 1, 0), (-2,-1), ( 1, 2)]
srsWallkick IBrick RLeft RUp = [( 1, 0), (-2, 0), ( 1,-2), (-2, 1)]
srsWallkick IBrick RUp RLeft = [(-1, 0), ( 2, 0), (-1, 2), ( 2,-1)]
srsWallkick _ RUp RRight = [(-1, 0), (-1, 1), ( 0,-2), (-1,-2)]
srsWallkick _ RRight RUp = [( 1, 0), ( 1,-1), ( 0, 2), ( 1, 2)]
srsWallkick _ RRight RDown = [( 1, 0), ( 1,-1), ( 0, 2), ( 1, 2)]
srsWallkick _ RDown RRight = [(-1, 0), (-1, 1), ( 0,-2), (-1,-2)]
srsWallkick _ RDown RLeft = [( 1, 0), ( 1, 1), ( 0,-2), ( 1,-2)]
srsWallkick _ RLeft RDown = [(-1, 0), (-1,-1), ( 0, 2), (-1, 2)]
srsWallkick _ RLeft RUp = [(-1, 0), (-1,-1), ( 0, 2), (-1, 2)]
srsWallkick _ RUp RLeft = [( 1, 0), ( 1, 1), ( 0,-2), ( 1,-2)]
-- Spawning data
srsSpawnHeight = 21
srsSpawnPos = 3
-- Get the result of a rotation attempt, including wallkick offsets
getRotation :: Int -> Int -> Brick ->
Rotation -> Rotation -> Field -> (Rotation, Int, Int)
-- The OBrick can be rotated to any orientation without wallkicks
getRotation height pos OBrick _ to _ = (to, height, pos)
-- The other bricks may involve wallkicks, or may fail
getRotation height pos brick from to field =
let
trials = (0, 0):(srsWallkick brick from to)
fit = find (\trial -> isValidPosition (offsetCoords trial) field) trials
in
case fit of
Nothing -> (from, height, pos)
Just (xOff, yOff) -> (to, height + yOff, pos + xOff)
where
toCoords = toFieldCoords height pos (srsCoords brick to)
offsetCoords (xOff, yOff) =
map (\(x, y) -> (x + xOff, y + yOff)) toCoords
-- Do a clockwise rotation
rotateR :: Field -> Brick -> (Rotation, Int, Int) -> (Rotation, Int, Int)
rotateR field brick (from, height, pos) =
getRotation height pos brick from to field
where
to = case from of
RUp -> RRight
RDown -> RLeft
RLeft -> RUp
RRight -> RDown
-- Do an anti-clockwise rotation
rotateL :: Field -> Brick -> (Rotation, Int, Int) -> (Rotation, Int, Int)
rotateL field brick (from, height, pos) =
getRotation height pos brick from to field
where
to = case from of
RRight -> RUp
RLeft -> RDown
RUp -> RLeft
RDown -> RRight
-- Convert block coordinates to field coordinates
toFieldCoords :: Int -> Int -> [(Int, Int)] -> [(Int, Int)]
toFieldCoords height pos = map (\(x, y) -> (x + pos, height - y))
-- Determine if a list of field coordinates are a valid block position
isValidPosition :: [(Int, Int)] -> Field -> Bool
isValidPosition coords field =
all (\(x, y) ->
y >= 0 && y < 22 && x >= 0 && x < 10 && (isNothing$ field ! (x, y))) coords
-- Generate a random bag of blocks
randomBag :: RandomGen r => State r [Brick]
randomBag = permutation [minBound..maxBound]
-- How big is a cell
tileS = 26 :: Int
-- Define the colours of the bricks
tile :: Brick -> Tile
tile IBrick = RedTile
tile JBrick = PinkTile
tile LBrick = YellowTile
tile OBrick = OrangeTile
tile SBrick = BlueTile
tile TBrick = GreyTile
tile ZBrick = GreenTile
-- The delay between frames
getDropDelay :: Int -> Bool -> Double
getDropDelay level dropkey = ((1 :: Double) * 10^3) / divisor
where divisor =
(((fromIntegral level) * 12) + 24) * (if dropkey then 10 else 1)
-- How long to display the game over message, in milliseconds
gameOverDelay :: Double
gameOverDelay = ((1 :: Double) * 10^3) * 4
slideDelay :: Double
slideDelay = ((1 :: Double) * 10 ^ 3) / ((fromIntegral tileS) * 4)
lineDelay :: Double
lineDelay = ((1 :: Double) * 10 ^ 3) / ((fromIntegral tileS) * 4)
-- Determine the next GameState from the current GameState
updateGame :: Int -> GameState -> GameState
updateGame delay (state@(GameState {mode = GameOverMode, ..})) =
let
(frames, downTimer') =
runState (advanceFrames delay gameOverDelay) downTimer
done = frames > 0
in state {
mode = if done then IntroMode else GameOverMode,
downTimer = if done then resetTimer else downTimer',
field = if done then clearField else field,
initClearField = done,
showPreview = if done then False else showPreview,
scoreState = if done then resetScoreState scoreState else scoreState,
sfxEvents = []
}
updateGame delay (state@(GameState {mode = HighScoreMode, ..})) =
state {
sfxEvents = [],
scoreState = updateScore delay scoreState
}
updateGame delay (state@(GameState {mode = AllClearBonusMode, ..})) =
let
(downFrames, downTimer') =
runState (advanceFrames delay (getDropDelay 5 False)) downTimer
downFTA' = downFTA - downFrames
in state {
mode = if downFTA' <= 0 then InGameMode else AllClearBonusMode,
initClearField = downFTA' <= 0,
scoreState = updateScore delay scoreState,
downFTA = downFTA',
downTimer = downTimer'
}
updateGame _ (state@(GameState {mode = PausedMode})) = state
updateGame _ (state@(GameState {mode = IntroMode})) = state
updateGame delay (state@(GameState {mode = InGameMode, ..})) = let
(state', doNextBrick) = doTranslation delay state
in
if doNextBrick
then nextBrick (detectLines state')
else doRotations (detectAllClearBonus state')
-- process translations (gravity and sliding), returns True when the
-- current brick has been placed
doTranslation :: Int -> GameState -> (GameState, Bool)
doTranslation delay (state@(GameState {..})) = let
(downFrames, downTimer') =
runState (advanceFrames delay dropDelay) downTimer
(slideFrames, slideTimer') =
runState (advanceFrames delay slideDelay) slideTimer
(lineFrames, lineTimer') =
runState (advanceFrames delay lineDelay) lineTimer
downOffset' = downFTA - downFrames
downFTA' = if downOffset' < 0
then downOffset' `mod` tileS else downOffset'
downCells = if downOffset' < 0
then (abs downOffset' `div` tileS) + 1 else 0
((currentHeight', gracePeriod'), field') =
runState (updateBrick downCells) field
slideOffset' = slideFTA - slideFrames
slideFTA' = if slideOffset' < 0
then if slideActive && (validSlide slideCells /= 0)
then slideOffset' `mod` tileS
else 0
else slideOffset'
slideCellsN = if slideOffset' < 0 && slideActive
then (abs slideOffset' `div` tileS) + 1 else 0
slideCells = case currentSlide of
SlideLeft -> - slideCellsN
SlideRight -> slideCellsN
currentPos' = currentPos + (validSlide slideCells)
lineFTA' = lineFTA - lineFrames
in
(state {
downFTA = downFTA',
slideFTA = slideFTA',
lineFTA = if lineFTA' < 0 then 0 else lineFTA',
downTimer = downTimer',
slideTimer = if slideActive || slideFTA' > 0
then slideTimer' else resetTimer,
lineTimer = if lineFTA' < 0 then resetTimer else lineTimer',
field = if (lineFTA' < 0) && (not$null fullLines)
then if allClearCheat
then clearField else clearLines field' fullLines
else field',
fullLines = if lineFTA' < 0 then [] else fullLines,
gracePeriod = gracePeriod',
currentHeight = if currentHeight' < 0
then srsSpawnHeight else currentHeight',
currentPos = currentPos',
scoreState = updateScore delay scoreState,
sfxEvents = [],
allClearCheat =
allClearCheat && not ((lineFTA' < 0) && (not$null fullLines))
}, currentHeight' < 0)
where
dropDelay = getDropDelay (level scoreState) dropKey
brickFieldCoords height pos = toFieldCoords height pos
(srsCoords currentBrick currentRotation)
-- returns (height, grace)
blockDown 0 height = (height, False)
blockDown downCells height =
if isValidPosition (brickFieldCoords (height - 1) currentPos) field
then blockDown (downCells - 1) (height - 1)
else (height, True)
updateBrick :: Int -> State Field (Int, Bool)
updateBrick downCells = do
let (height', grace') = blockDown downCells currentHeight
if (height' /= currentHeight)
then return (height', grace')
else if grace' && gracePeriod
then do
currentField <- get
put$ mergeField currentField
(brickFieldCoords currentHeight currentPos) (tile currentBrick)
return (-1, False)
else return (currentHeight, gracePeriod || grace')
validSlide 0 = 0
validSlide cells =
if isValidPosition
(brickFieldCoords currentHeight (currentPos + cells)) field
then cells
else if cells < 0
then validSlide (cells + 1) else validSlide (cells - 1)
-- process any rotations that have been queued
doRotations (state@(GameState {..})) = let
(currentRotation', currentHeight', currentPos') =
if queuedRotations > 0
then times queuedRotations (rotateL field currentBrick)$
(currentRotation, currentHeight, currentPos)
else (currentRotation, currentHeight, currentPos)
in
state {
queuedRotations = 0,
currentRotation = currentRotation',
currentHeight = currentHeight',
currentPos = currentPos',
sfxEvents =
(replicate queuedRotations (SfxTurn, SfxChannel)) ++ sfxEvents
}
-- work out the next brick
nextBrick (state@(GameState {..})) = let
(brick, bricks') = Q.dequeue brickQueue
emptyBag = Q.length bricks' < previewBricks
(newBag, randomState') = runState randomBag randomState
gameOver = (not$isEmpty 20) && (not$isEmpty 21)
gameOverKind = if isNewHighScore (score scoreState) highScores
then HighScoreMode else GameOverMode
in
state {
mode = if gameOver then gameOverKind else mode,
initClearField = False,
highScores = if gameOver && gameOverKind == HighScoreMode
then insertHighScore (score scoreState) highScores else highScores,
-- Reset the down timer if we go to game over, this is because I'm
-- reusing the down timer as the game over timer (naughty)
downTimer = if gameOver then setTimer gameOverDelay else resetTimer,
randomState = if emptyBag then randomState' else randomState,
gracePeriod = False,
brickQueue = if emptyBag
then bricks' `Q.enqueueMany` newBag else bricks',
currentBrick = brick,
currentHeight = srsSpawnHeight,
currentPos = srsSpawnPos,
currentRotation = RUp,
queuedRotations = 0, -- cancel all rotations
slideTimer = resetTimer, -- and slides
slideFTA = 0
}
where
isEmpty = emptyLine field
-- Switch to AllClearBonusMode if the condition is met
detectAllClearBonus (state@(GameState {..})) =
if not (fieldEmpty field && (not initClearField)) then state
else state {
mode = AllClearBonusMode,
downTimer = resetTimer,
downFTA = 17 * tileS,
scoreState = scoreAllClear scoreState
}
-- Determine if a line is empty
emptyLine :: Field -> Int -> Bool
emptyLine field y = all (isNothing) [field!(x, y) | x <- [0..9]]
-- Determine if the entire field is empty
fieldEmpty :: Field -> Bool
fieldEmpty field = field == clearField
-- find lines and schedule them for removal
detectLines (state@(GameState {..})) = let
fullLines' = filter (isLine) [0..19]
in
state {
fullLines = fullLines',
sfxEvents = sfxEvents ++
(if null fullLines' then [] else [(SfxLine, SfxChannel)]),
lineTimer = resetTimer,
lineFTA = 19,
scoreState = if (not$null fullLines')
then scoreLines fullLines' scoreState else scoreDrop scoreState
}
where
isLine y = all (\x -> isJust$ field!(x, y)) [0..9]
-- clear all lines scheduled for removal
clearLines :: Field -> [Int] -> Field
clearLines field ys = let
keepLines = filter (not.(`elem` ys)) [0..21]
allLines = take 22$
(map (getLine) keepLines) ++ (repeat blankLine)
in
array ((0, 0), (9, 21))
[((x, y), tile) |
(y, line) <- [0..] `zip` allLines,
(x, tile) <- [0..] `zip` line]
where
getLine y = map (\x -> field!(x, y)) [0..9]
blankLine = replicate 10 Nothing
mergeField :: Field -> [(Int, Int)] -> Tile -> Field
mergeField field coords tile = field // (coords `zip` (repeat$ Just tile))
-- reset the score state
resetScoreState :: ScoreState -> ScoreState
resetScoreState scoreState =
ScoreState {
level = 0, levelCounter = resetCounter 0 (levelCounter scoreState),
score = 0, scoreCounter = resetCounter 0 (scoreCounter scoreState),
lastLines = 0, totalLines = 0
}
-- update the counters mainly
updateScore :: Int -> ScoreState -> ScoreState
updateScore delay (state@(ScoreState {..})) =
state {
scoreCounter = updateCounter delay scoreCounter,
levelCounter = updateCounter delay levelCounter
}
-- update the score when lines are detected
scoreLines :: [Int] -> ScoreState -> ScoreState
scoreLines lines (state@(ScoreState {..})) = let
-- reward splits
points = case length lines of
1 -> 2
2 -> if contiguous lines then 8 else 12
3 -> if contiguous lines then 16 else 24
4 -> 32
_ -> error ("Detected more than 4 lines, this cannot happen")
-- reward high levels and back-to-back combos
totalPoints = points * level + lastLines
scoreCounter' = addCounter totalPoints scoreCounter
totalLines' = totalLines + (length lines)
level' = (totalLines' `div` 20) + 1
levelCounter' = if level' > level
then addCounter (level' - level) levelCounter
else levelCounter
in
state {
scoreCounter = scoreCounter',
levelCounter = levelCounter',
lastLines = length lines,
level = level',
score = score + totalPoints,
totalLines = totalLines'
}
where
contiguous xs = all (uncurry (==)) $xs `zip` [(head xs)..]
-- update the score when a piece is dropped (without any lines)
scoreDrop :: ScoreState -> ScoreState
scoreDrop (state@(ScoreState {..})) =
state {
lastLines = 0
}
-- update the score when the field is cleared
scoreAllClear :: ScoreState -> ScoreState
scoreAllClear (state@(ScoreState {..})) = let
totalPoints = level * 18 * 20
in
state {
score = score + totalPoints,
scoreCounter = addCounter totalPoints scoreCounter,
lastLines = 0
}
|
CLowcay/CC_Clones
|
src/Tetris/GameState.hs
|
gpl-3.0
| 19,117
| 417
| 16
| 3,816
| 7,335
| 4,265
| 3,070
| 410
| 21
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.ServiceBroker.GetIAMPolicy
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets the access control policy for a resource. Returns an empty policy
-- if the resource exists and does not have a policy set.
--
-- /See:/ <https://cloud.google.com/kubernetes-engine/docs/concepts/add-on/service-broker Service Broker API Reference> for @servicebroker.getIamPolicy@.
module Network.Google.Resource.ServiceBroker.GetIAMPolicy
(
-- * REST Resource
GetIAMPolicyResource
-- * Creating a Request
, getIAMPolicy
, GetIAMPolicy
-- * Request Lenses
, gipOptionsRequestedPolicyVersion
, gipXgafv
, gipUploadProtocol
, gipAccessToken
, gipUploadType
, gipResource
, gipCallback
) where
import Network.Google.Prelude
import Network.Google.ServiceBroker.Types
-- | A resource alias for @servicebroker.getIamPolicy@ method which the
-- 'GetIAMPolicy' request conforms to.
type GetIAMPolicyResource =
"v1" :>
CaptureMode "resource" "getIamPolicy" Text :>
QueryParam "options.requestedPolicyVersion"
(Textual Int32)
:>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] GoogleIAMV1__Policy
-- | Gets the access control policy for a resource. Returns an empty policy
-- if the resource exists and does not have a policy set.
--
-- /See:/ 'getIAMPolicy' smart constructor.
data GetIAMPolicy =
GetIAMPolicy'
{ _gipOptionsRequestedPolicyVersion :: !(Maybe (Textual Int32))
, _gipXgafv :: !(Maybe Xgafv)
, _gipUploadProtocol :: !(Maybe Text)
, _gipAccessToken :: !(Maybe Text)
, _gipUploadType :: !(Maybe Text)
, _gipResource :: !Text
, _gipCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'GetIAMPolicy' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gipOptionsRequestedPolicyVersion'
--
-- * 'gipXgafv'
--
-- * 'gipUploadProtocol'
--
-- * 'gipAccessToken'
--
-- * 'gipUploadType'
--
-- * 'gipResource'
--
-- * 'gipCallback'
getIAMPolicy
:: Text -- ^ 'gipResource'
-> GetIAMPolicy
getIAMPolicy pGipResource_ =
GetIAMPolicy'
{ _gipOptionsRequestedPolicyVersion = Nothing
, _gipXgafv = Nothing
, _gipUploadProtocol = Nothing
, _gipAccessToken = Nothing
, _gipUploadType = Nothing
, _gipResource = pGipResource_
, _gipCallback = Nothing
}
-- | Optional. The policy format version to be returned. Valid values are 0,
-- 1, and 3. Requests specifying an invalid value will be rejected.
-- Requests for policies with any conditional bindings must specify version
-- 3. Policies without any conditional bindings may specify any valid value
-- or leave the field unset.
gipOptionsRequestedPolicyVersion :: Lens' GetIAMPolicy (Maybe Int32)
gipOptionsRequestedPolicyVersion
= lens _gipOptionsRequestedPolicyVersion
(\ s a -> s{_gipOptionsRequestedPolicyVersion = a})
. mapping _Coerce
-- | V1 error format.
gipXgafv :: Lens' GetIAMPolicy (Maybe Xgafv)
gipXgafv = lens _gipXgafv (\ s a -> s{_gipXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
gipUploadProtocol :: Lens' GetIAMPolicy (Maybe Text)
gipUploadProtocol
= lens _gipUploadProtocol
(\ s a -> s{_gipUploadProtocol = a})
-- | OAuth access token.
gipAccessToken :: Lens' GetIAMPolicy (Maybe Text)
gipAccessToken
= lens _gipAccessToken
(\ s a -> s{_gipAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
gipUploadType :: Lens' GetIAMPolicy (Maybe Text)
gipUploadType
= lens _gipUploadType
(\ s a -> s{_gipUploadType = a})
-- | REQUIRED: The resource for which the policy is being requested. See the
-- operation documentation for the appropriate value for this field.
gipResource :: Lens' GetIAMPolicy Text
gipResource
= lens _gipResource (\ s a -> s{_gipResource = a})
-- | JSONP
gipCallback :: Lens' GetIAMPolicy (Maybe Text)
gipCallback
= lens _gipCallback (\ s a -> s{_gipCallback = a})
instance GoogleRequest GetIAMPolicy where
type Rs GetIAMPolicy = GoogleIAMV1__Policy
type Scopes GetIAMPolicy =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient GetIAMPolicy'{..}
= go _gipResource _gipOptionsRequestedPolicyVersion
_gipXgafv
_gipUploadProtocol
_gipAccessToken
_gipUploadType
_gipCallback
(Just AltJSON)
serviceBrokerService
where go
= buildClient (Proxy :: Proxy GetIAMPolicyResource)
mempty
|
brendanhay/gogol
|
gogol-servicebroker/gen/Network/Google/Resource/ServiceBroker/GetIAMPolicy.hs
|
mpl-2.0
| 5,639
| 0
| 16
| 1,275
| 801
| 466
| 335
| 116
| 1
|
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverlappingInstances #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeFamilies #-}
module Data.Function.Decorator.ConstraintLogic where
import GHC.Prim (Constraint)
----------------------------------------------------------------
-- Unused constraint logic stuff.
-- XXX: I can't partially apply a synonym:
{-
type ShowWith c t = (Show t , c t)
-}
-- Did I work around this in a similar example when I was doing the
-- het lists for Max? The point is that I'd like a 'Show' instance
-- that works for 'LogTree c' with any constraint 'c' that implies
-- 'Show'.
--
-- Making a some special classes kind of works, e.g.:
class (c1 t , c2 t) => (c1 :&&: c2) t
instance (c1 t , c2 t) => (c1 :&&: c2) t
-- but this doesn't really give implication. We can almost get
-- implication for constraints built from the combinators with
-- ??? Is there some way to existentially quantifier a class parameter
-- ??? This does not work: 'Not in scope: type variable `p''
{-
class c p => Exists c
instance c p => Exists c
-}
data Where = Here | L Where | R Where
class ((c :: * -> Constraint) `Elem` (cs :: * -> Constraint)) (evidence::Where)
instance (c `Elem` c) Here
instance (c `Elem` cs) evidence => (c `Elem` (c' :&&: cs)) (R evidence)
instance (c `Elem` cs) evidence => (c `Elem` (cs :&&: c')) (L evidence)
{-
type family Lookup c cs (evidence::Where) :: *
type instance Lookup c c Here = c
type instance (c `Elem` cs) evidence =>
-}
-- class Exists (c2 `Elem` c1) => (c1 :: * -> Constraint) :=>: (c2 :: * -> Constraint)
-- instance c :=>: c
-- instance (c1 :=>: c) => (c1 :&&: c2) :=>: c
{-
instance (c2 :=>: c) => (c1 :&&: c2) :=>: c
-}
{-
data Side = L | R
class (c1 :||: c2) (which::Side) t
instance c1 t => (c1 :||: c2) L t
instance c2 t => (c1 :||: c2) R t
-}
-- except the last instance overlaps with the previous one, so we
-- might need something like instance chains here.
|
ntc2/haskell-call-trace
|
experiments/constraint-logic.hs
|
mpl-2.0
| 2,108
| 5
| 11
| 394
| 272
| 167
| 105
| -1
| -1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Directory.Users.SignOut
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Signs a user out of all web and device sessions and reset their sign-in
-- cookies. User will have to sign in by authenticating again.
--
-- /See:/ <https://developers.google.com/admin-sdk/ Admin SDK API Reference> for @directory.users.signOut@.
module Network.Google.Resource.Directory.Users.SignOut
(
-- * REST Resource
UsersSignOutResource
-- * Creating a Request
, usersSignOut
, UsersSignOut
-- * Request Lenses
, usoXgafv
, usoUploadProtocol
, usoAccessToken
, usoUploadType
, usoUserKey
, usoCallback
) where
import Network.Google.Directory.Types
import Network.Google.Prelude
-- | A resource alias for @directory.users.signOut@ method which the
-- 'UsersSignOut' request conforms to.
type UsersSignOutResource =
"admin" :>
"directory" :>
"v1" :>
"users" :>
Capture "userKey" Text :>
"signOut" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Post '[JSON] ()
-- | Signs a user out of all web and device sessions and reset their sign-in
-- cookies. User will have to sign in by authenticating again.
--
-- /See:/ 'usersSignOut' smart constructor.
data UsersSignOut =
UsersSignOut'
{ _usoXgafv :: !(Maybe Xgafv)
, _usoUploadProtocol :: !(Maybe Text)
, _usoAccessToken :: !(Maybe Text)
, _usoUploadType :: !(Maybe Text)
, _usoUserKey :: !Text
, _usoCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'UsersSignOut' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'usoXgafv'
--
-- * 'usoUploadProtocol'
--
-- * 'usoAccessToken'
--
-- * 'usoUploadType'
--
-- * 'usoUserKey'
--
-- * 'usoCallback'
usersSignOut
:: Text -- ^ 'usoUserKey'
-> UsersSignOut
usersSignOut pUsoUserKey_ =
UsersSignOut'
{ _usoXgafv = Nothing
, _usoUploadProtocol = Nothing
, _usoAccessToken = Nothing
, _usoUploadType = Nothing
, _usoUserKey = pUsoUserKey_
, _usoCallback = Nothing
}
-- | V1 error format.
usoXgafv :: Lens' UsersSignOut (Maybe Xgafv)
usoXgafv = lens _usoXgafv (\ s a -> s{_usoXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
usoUploadProtocol :: Lens' UsersSignOut (Maybe Text)
usoUploadProtocol
= lens _usoUploadProtocol
(\ s a -> s{_usoUploadProtocol = a})
-- | OAuth access token.
usoAccessToken :: Lens' UsersSignOut (Maybe Text)
usoAccessToken
= lens _usoAccessToken
(\ s a -> s{_usoAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
usoUploadType :: Lens' UsersSignOut (Maybe Text)
usoUploadType
= lens _usoUploadType
(\ s a -> s{_usoUploadType = a})
-- | Identifies the target user in the API request. The value can be the
-- user\'s primary email address, alias email address, or unique user ID.
usoUserKey :: Lens' UsersSignOut Text
usoUserKey
= lens _usoUserKey (\ s a -> s{_usoUserKey = a})
-- | JSONP
usoCallback :: Lens' UsersSignOut (Maybe Text)
usoCallback
= lens _usoCallback (\ s a -> s{_usoCallback = a})
instance GoogleRequest UsersSignOut where
type Rs UsersSignOut = ()
type Scopes UsersSignOut =
'["https://www.googleapis.com/auth/admin.directory.user.security"]
requestClient UsersSignOut'{..}
= go _usoUserKey _usoXgafv _usoUploadProtocol
_usoAccessToken
_usoUploadType
_usoCallback
(Just AltJSON)
directoryService
where go
= buildClient (Proxy :: Proxy UsersSignOutResource)
mempty
|
brendanhay/gogol
|
gogol-admin-directory/gen/Network/Google/Resource/Directory/Users/SignOut.hs
|
mpl-2.0
| 4,740
| 0
| 19
| 1,165
| 717
| 418
| 299
| 104
| 1
|
{-# LANGUAGE TemplateHaskell #-}
deriveFromJSON (unPrefix "assignPost") ''AssignmentPost
|
lspitzner/brittany
|
data/Test322.hs
|
agpl-3.0
| 89
| 0
| 7
| 8
| 18
| 8
| 10
| 2
| 0
|
{-# LANGUAGE OverloadedStrings #-}
module Core.Request.ContentDispositionSpec where
import Core.Request.ContentDisposition
import qualified Misc.Parser as P
import SpecHelper
spec :: Spec
spec =
describe "Core.Request.ContentSpec" $
context "Simple parsing" $
it "parses Content-Disposition" $ do
let contDisp = "form-data; name=\"data\"; filename=\"theFile.png\""
P.parseOnly parse contDisp `shouldBe`
Right (ContentDisposition FormData (Just "data") (Just "theFile.png"))
main :: IO ()
main = hspec spec
|
inq/agitpunkt
|
spec/Core/Request/ContentDispositionSpec.hs
|
agpl-3.0
| 589
| 0
| 13
| 141
| 124
| 66
| 58
| 15
| 1
|
import Test.Hspec
import Primes
main :: IO()
main = hspec $ do
describe "Dummy" $ do
it "dummy test 1" $ do
(1 + 1) > 1 `shouldBe` True
describe "Divisible" $ do
it "Empty list" $ do
divisible [] 1 `shouldBe` False
divisible [] 2 `shouldBe` False
divisible [] 3 `shouldBe` False
it "Not divisible" $ do
divisible [2] 1 `shouldBe` False
divisible [3, 4, 5] 2 `shouldBe` False
divisible [4, 5, 6, 7, 8] 3 `shouldBe` False
divisible [3, 6, 7, 8] 10 `shouldBe` False
it "Divisible" $ do
divisible [2] 2 `shouldBe` True
divisible [2] 4 `shouldBe` True
divisible [2] 6 `shouldBe` True
divisible [3, 4, 5] 15 `shouldBe` True
divisible [4, 5, 6, 7, 8] 8 `shouldBe` True
divisible [3, 6, 7, 8] 21 `shouldBe` True
it "Primes_3" $ do
take 1 primes `shouldBe` [2]
take 2 primes `shouldBe` [2, 3]
take 3 primes `shouldBe` [2, 3, 5]
take 4 primes `shouldBe` [2, 3, 5, 7]
take 5 primes `shouldBe` [2, 3, 5, 7, 11]
take 6 primes `shouldBe` [2, 3, 5, 7, 11, 13]
take 168 primes `shouldBe` [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127, 131, 137, 139, 149, 151, 157, 163, 167, 173, 179, 181, 191, 193, 197, 199, 211, 223, 227, 229, 233, 239, 241, 251, 257, 263, 269, 271, 277, 281, 283, 293, 307, 311, 313, 317, 331, 337, 347, 349, 353, 359, 367, 373, 379, 383, 389, 397, 401, 409, 419, 421, 431, 433, 439, 443, 449, 457, 461, 463, 467, 479, 487, 491, 499, 503, 509, 521, 523, 541, 547, 557, 563, 569, 571, 577, 587, 593, 599, 601, 607, 613, 617, 619, 631, 641, 643, 647, 653, 659, 661, 673, 677, 683, 691, 701, 709, 719, 727, 733, 739, 743, 751, 757, 761, 769, 773, 787, 797, 809, 811, 821, 823, 827, 829, 839, 853, 857, 859, 863, 877, 881, 883, 887, 907, 911, 919, 929, 937, 941, 947, 953, 967, 971, 977, 983, 991, 997]
|
orbitgray/ProjectEuler
|
haskell/common/Primes_test.hs
|
lgpl-3.0
| 2,036
| 0
| 17
| 603
| 1,055
| 631
| 424
| 32
| 1
|
module ItemLookup where
import qualified Data.Map as Map
import Data.Map (Map)
itemLookup :: Map Int String
itemLookup = Map.fromList
[ (0, "Air")
, (1, "Stone")
, (2, "Grass Block")
, (3, "Dirt")
, (4, "Cobblestone")
, (5, "Wood Planks")
, (6, "Sapling")
, (7, "Bedrock")
, (8, "Water")
, (9, "Water")
, (10, "Lava")
, (11, "Lava")
, (12, "Sand")
, (13, "Gravel")
, (14, "Gold Ore")
, (15, "Iron Ore")
, (16, "Coal Ore")
, (17, "Wood")
, (18, "Leaves")
, (19, "Sponge")
, (20, "Glass")
, (21, "Lapis Lazuli Ore")
, (22, "Lapis Lazuli Block")
, (23, "Dispenser")
, (24, "Sandstone")
, (25, "Note Block")
, (26, "Bed")
, (27, "Powered Rail")
, (28, "Detector Rail")
, (29, "Sticky Piston")
, (30, "Cobweb")
, (31, "Grass")
, (32, "Dead Bush")
, (33, "Piston")
, (34, "Piston Head")
, (35, "Wool")
, (36, "Block moved by Piston")
, (37, "Dandelion")
, (38, "Poppy")
, (39, "Brown Mushroom")
, (40, "Red Mushroom")
, (41, "Block of Gold")
, (42, "Block of Iron")
, (43, "Double Stone Slab")
, (44, "Stone Slab")
, (45, "Bricks")
, (46, "TNT")
, (47, "Bookshelf")
, (48, "Moss Stone")
, (49, "Obsidian")
, (50, "Torch")
, (51, "Fire")
, (52, "Monster Spawner")
, (53, "Oak Wood Stairs")
, (54, "Chest")
, (55, "Redstone Wire")
, (56, "Diamond Ore")
, (57, "Block of Diamond")
, (58, "Crafting Table")
, (59, "Wheat")
, (60, "Farmland")
, (61, "Furnace")
, (62, "Burning Furnace")
, (63, "Sign")
, (64, "Oak Door")
, (65, "Ladder")
, (66, "Rail")
, (67, "Cobblestone Stairs")
, (68, "Sign")
, (69, "Lever")
, (70, "Stone Pressure Plate")
, (71, "Iron Door")
, (72, "Wooden Pressure Plate")
, (73, "Redstone Ore")
, (74, "Redstone Ore")
, (75, "Redstone Torch (inactive)")
, (76, "Redstone Torch (active)")
, (77, "Stone Button")
, (78, "Snow (layer)")
, (79, "Ice")
, (80, "Snow")
, (81, "Cactus")
, (82, "Clay")
, (83, "Sugar Cane")
, (84, "Jukebox")
, (85, "Oak Fence")
, (86, "Pumpkin")
, (87, "Netherrack")
, (88, "Soul Sand")
, (89, "Glowstone")
, (90, "Portal")
, (91, "Jack o'Lantern")
, (92, "Cake")
, (93, "Redstone Repeater (inactive)")
, (94, "Redstone Repeater (active)")
, (95, "Stained Glass")
, (96, "Trapdoor")
, (97, "Monster Egg")
, (98, "Stone Bricks")
, (99, "Brown Mushroom (block)")
, (100, "Red Mushroom (block)")
, (101, "Iron Bars")
, (102, "Glass Pane")
, (103, "Melon")
, (104, "Pumpkin Stem")
, (105, "Melon Stem")
, (106, "Vines")
, (107, "Fence Gate")
, (108, "Brick Stairs")
, (109, "Stone Brick Stairs")
, (110, "Mycelium")
, (111, "Lily Pad")
, (112, "Nether Brick")
, (113, "Nether Brick Fence")
, (114, "Nether Brick Stairs")
, (115, "Nether Wart")
, (116, "Enchantment Table")
, (117, "Brewing Stand")
, (118, "Cauldron")
, (119, "End Portal")
, (120, "End Portal Frame")
, (121, "End Stone")
, (122, "Dragon Egg")
, (123, "Redstone Lamp (inactive)")
, (124, "Redstone Lamp (active)")
, (125, "Double Wooden Slab")
, (126, "Wooden Slab")
, (127, "Cocoa")
, (128, "Sandstone Stairs")
, (129, "Emerald Ore")
, (130, "Ender Chest")
, (131, "Tripwire Hook")
, (132, "Tripwire")
, (133, "Block of Emerald")
, (134, "Spruce Wood Stairs")
, (135, "Birch Wood Stairs")
, (136, "Jungle Wood Stairs")
, (137, "Command Block")
, (138, "Beacon")
, (139, "Cobblestone Wall")
, (140, "Flower Pot")
, (141, "Carrot")
, (142, "Potato")
, (143, "Wooden Button")
, (144, "Mob head")
, (145, "Anvil")
, (146, "Trapped Chest")
, (147, "Weighted Pressure Plate (Light)")
, (148, "Weighted Pressure Plate (Heavy)")
, (149, "Redstone Comparator")
, (150, "Redstone Comparator (deprecated)")
, (151, "Daylight Sensor")
, (152, "Block of Redstone")
, (153, "Nether Quartz Ore")
, (154, "Hopper")
, (155, "Block of Quartz")
, (156, "Quartz Stairs")
, (157, "Activator Rail")
, (158, "Dropper")
, (159, "Stained Clay")
, (160, "Stained Glass Pane")
, (161, "Leaves (Acacia/Dark Oak)")
, (162, "Wood (Acacia/Dark Oak)")
, (163, "Acacia Wood Stairs")
, (164, "Dark Oak Wood Stairs")
, (165, "Slime Block")
, (166, "Barrier")
, (167, "Iron Trapdoor")
, (168, "Prismarine")
, (169, "Sea Lantern")
, (170, "Hay Bale")
, (171, "Carpet")
, (172, "Hardened Clay")
, (173, "Block of Coal")
, (174, "Packed Ice")
, (175, "Large Flowers")
, (176, "Banner")
, (177, "Banner")
, (178, "Inverted Daylight Sensor")
, (179, "Red Sandstone")
, (180, "Red Sandstone Stairs")
, (181, "Double Red Sandstone Slab")
, (182, "Red Sandstone Slab")
, (183, "Spruce Fence Gate")
, (184, "Birch Fence Gate")
, (185, "Jungle Fence Gate")
, (186, "Dark Oak Fence Gate")
, (187, "Acacia Fence Gate")
, (188, "Spruce Fence")
, (189, "Birch Fence")
, (190, "Jungle Fence")
, (191, "Dark Oak Fence")
, (192, "Acacia Fence")
, (193, "Spruce Door")
, (194, "Birch Door")
, (195, "Jungle Door")
, (196, "Acacia Door")
, (197, "Dark Oak Door")
, (198, "End Rod")
, (199, "Chorus Plant")
, (200, "Chorus Flower")
, (201, "Purpur Block")
, (202, "Purpur Pillar")
, (203, "Purpur Stairs")
, (204, "Purpur Double Slab")
, (205, "Purpur Slab")
, (206, "End Stone Bricks")
, (207, "Beetroot Seeds")
, (208, "Grass Path")
, (209, "End Gateway")
, (210, "Repeating Command Block")
, (211, "Chain Command Block")
, (212, "Frosted Ice")
, (213, "Magma Block")
, (214, "Nether Wart Block")
, (215, "Red Nether Brick")
, (216, "Bone Block")
, (217, "Structure Void")
, (218, "Observer")
, (219, "White Shulker Box")
, (220, "Orange Shulker Box")
, (221, "Magenta Shulker Box")
, (222, "Light Blue Shulker Box")
, (223, "Yellow Shulker Box")
, (224, "Lime Shulker Box")
, (225, "Pink Shulker Box")
, (226, "Gray Shulker Box")
, (227, "Light Gray Shulker Box")
, (228, "Cyan Shulker Box")
, (229, "Purple Shulker Box")
, (230, "Blue Shulker Box")
, (231, "Brown Shulker Box")
, (232, "Green Shulker Box")
, (233, "Red Shulker Box")
, (234, "Black Shulker Box")
, (235, "White Glazed Terracotta")
, (236, "Orange Glazed Terracotta")
, (237, "Magenta Glazed Terracotta")
, (238, "Light Blue Glazed Terracotta")
, (239, "Yellow Glazed Terracotta")
, (240, "Lime Glazed Terracotta")
, (241, "Pink Glazed Terracotta")
, (242, "Gray Glazed Terracotta")
, (243, "Light Gray Glazed Terracotta")
, (244, "Cyan Glazed Terracotta")
, (245, "Purple Glazed Terracotta")
, (246, "Blue Glazed Terracotta")
, (247, "Brown Glazed Terracotta")
, (248, "Green Glazed Terracotta")
, (249, "Red Glazed Terracotta")
, (250, "Black Glazed Terracotta")
, (251, "Concrete")
, (252, "Concrete Powder")
, (255, "Structure Block")
, (256, "Iron Shovel")
, (257, "Iron Pickaxe")
, (258, "Iron Axe")
, (259, "Flint and Steel")
, (260, "Apple")
, (261, "Bow")
, (262, "Arrow")
, (263, "Coal")
, (264, "Diamond")
, (265, "Iron Ingot")
, (266, "Gold Ingot")
, (267, "Iron Sword")
, (268, "Wooden Sword")
, (269, "Wooden Shovel")
, (270, "Wooden Pickaxe")
, (271, "Wooden Axe")
, (272, "Stone Sword")
, (273, "Stone Shovel")
, (274, "Stone Pickaxe")
, (275, "Stone Axe")
, (276, "Diamond Sword")
, (277, "Diamond Shovel")
, (278, "Diamond Pickaxe")
, (279, "Diamond Axe")
, (280, "Stick")
, (281, "Bowl")
, (282, "Mushroom Stew")
, (283, "Golden Sword")
, (284, "Golden Shovel")
, (285, "Golden Pickaxe")
, (286, "Golden Axe")
, (287, "String")
, (288, "Feather")
, (289, "Gunpowder")
, (290, "Wooden Hoe")
, (291, "Stone Hoe")
, (292, "Iron Hoe")
, (293, "Diamond Hoe")
, (294, "Golden Hoe")
, (295, "Seeds")
, (296, "Wheat")
, (297, "Bread")
, (298, "Leather Cap")
, (299, "Leather Tunic")
, (300, "Leather Pants")
, (301, "Leather Boots")
, (302, "Chain Helmet")
, (303, "Chain Chestplate")
, (304, "Chain Leggings")
, (305, "Chain Boots")
, (306, "Iron Helmet")
, (307, "Iron Chestplate")
, (308, "Iron Leggings")
, (309, "Iron Boots")
, (310, "Diamond Helmet")
, (311, "Diamond Chestplate")
, (312, "Diamond Leggings")
, (313, "Diamond Boots")
, (314, "Golden Helmet")
, (315, "Golden Chestplate")
, (316, "Golden Leggings")
, (317, "Golden Boots")
, (318, "Flint")
, (319, "Raw Porkchop")
, (320, "Cooked Porkchop")
, (321, "Painting")
, (322, "Golden Apple")
, (323, "Sign")
, (324, "Oak Door")
, (325, "Bucket")
, (326, "Water Bucket")
, (327, "Lava Bucket")
, (328, "Minecart")
, (329, "Saddle")
, (330, "Iron Door")
, (331, "Redstone")
, (332, "Snowball")
, (333, "Boat")
, (334, "Leather")
, (335, "Milk")
, (336, "Brick")
, (337, "Clay")
, (338, "Sugar Cane")
, (339, "Paper")
, (340, "Book")
, (341, "Slimeball")
, (342, "Minecart with Chest")
, (343, "Minecart with Furnace")
, (344, "Egg")
, (345, "Compass")
, (346, "Fishing Rod")
, (347, "Clock")
, (348, "Glowstone Dust")
, (349, "Raw Fish")
, (350, "Cooked Fish")
, (351, "Dye")
, (352, "Bone")
, (353, "Sugar")
, (354, "Cake")
, (355, "Bed")
, (356, "Redstone Repeater")
, (357, "Cookie")
, (358, "Map")
, (359, "Shears")
, (360, "Melon")
, (361, "Pumpkin Seeds")
, (362, "Melon Seeds")
, (363, "Raw Beef")
, (364, "Steak")
, (365, "Raw Chicken")
, (366, "Cooked Chicken")
, (367, "Rotten Flesh")
, (368, "Ender Pearl")
, (369, "Blaze Rod")
, (370, "Ghast Tear")
, (371, "Gold Nugget")
, (372, "Nether Wart")
, (373, "Potion")
, (374, "Glass Bottle")
, (375, "Spider Eye")
, (376, "Fermented Spider Eye")
, (377, "Blaze Powder")
, (378, "Magma Cream")
, (379, "Brewing Stand")
, (380, "Cauldron")
, (381, "Eye of Ender")
, (382, "Glistering Melon")
, (383, "Spawn Egg")
, (384, "Bottle o' Enchanting")
, (385, "Fire Charge")
, (386, "Book and Quill")
, (387, "Written Book")
, (388, "Emerald")
, (389, "Item Frame")
, (390, "Flower Pot")
, (391, "Carrot")
, (392, "Potato")
, (393, "Baked Potato")
, (394, "Poisonous Potato")
, (395, "Empty Map")
, (396, "Golden Carrot")
, (397, "Mob head")
, (398, "Carrot on a Stick")
, (399, "Nether Star")
, (400, "Pumpkin Pie")
, (401, "Firework Rocket")
, (402, "Firework Star")
, (403, "Enchanted Book")
, (404, "Redstone Comparator")
, (405, "Nether Brick")
, (406, "Nether Quartz")
, (407, "Minecart with TNT")
, (408, "Minecart with Hopper")
, (409, "Prismarine Shard")
, (410, "Prismarine Crystals")
, (411, "Raw Rabbit")
, (412, "Cooked Rabbit")
, (413, "Rabbit Stew")
, (414, "Rabbit's Foot")
, (415, "Rabbit Hide")
, (416, "Armor Stand")
, (417, "Iron Horse Armor")
, (418, "Golden Horse Armor")
, (419, "Diamond Horse Armor")
, (420, "Lead")
, (421, "Name Tag")
, (422, "Minecart with Command Block")
, (423, "Raw Mutton")
, (424, "Cooked Mutton")
, (425, "Banner")
, (426, "End Crystal")
, (427, "Spruce Door")
, (428, "Birch Door")
, (429, "Jungle Door")
, (430, "Acacia Door")
, (431, "Dark Oak Door")
, (432, "Chorus Fruit")
, (433, "Popped Chorus Fruit")
, (434, "Beetroot")
, (435, "Beetroot Seeds")
, (436, "Beetroot Soup")
, (437, "Dragon's Breath")
, (438, "Splash Potion")
, (439, "Spectral Arrow")
, (440, "Tipped Arrow")
, (441, "Lingering Potion")
, (442, "Shield")
, (443, "Elytra")
, (444, "Spruce Boat")
, (445, "Birch Boat")
, (446, "Jungle Boat")
, (447, "Acacia Boat")
, (448, "Dark Oak Boat")
, (449, "Totem of Undying")
, (450, "Shulker Shell")
, (452, "Iron Nugget")
, (2256, "13 Disc")
, (2257, "Cat Disc")
, (2258, "Blocks Disc")
, (2259, "Chirp Disc")
, (2260, "Far Disc")
, (2261, "Mall Disc")
, (2262, "Mellohi Disc")
, (2263, "Stal Disc")
, (2264, "Strad Disc")
, (2265, "Ward Disc")
, (2266, "11 Disc")
, (2267, "Wait Disc")
]
|
Xandaros/MinecraftCLI
|
app/ItemLookup.hs
|
bsd-2-clause
| 12,957
| 0
| 7
| 3,727
| 4,200
| 2,797
| 1,403
| 467
| 1
|
{-# LANGUAGE TypeFamilies, QuasiQuotes, TemplateHaskell #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE CPP #-}
module Yesod.Helpers.Crud
( Item (..)
, Crud (..)
, CrudRoute (..)
, defaultCrud
) where
import Yesod.Core
import Text.Hamlet
import Yesod.Form
import Language.Haskell.TH.Syntax
import Yesod.Persist
import Data.Text (Text)
import Web.Routes.Quasi (toSinglePiece, fromSinglePiece)
-- | An entity which can be displayed by the Crud subsite.
class Item a where
-- | The title of an entity, to be displayed in the list of all entities.
itemTitle :: a -> Text
-- | Defines all of the CRUD operations (Create, Read, Update, Delete)
-- necessary to implement this subsite. When using the "Yesod.Form" module and
-- 'ToForm' typeclass, you can probably just use 'defaultCrud'.
data Crud master item = Crud
{ crudSelect :: GHandler (Crud master item) master [(Key item, item)]
, crudReplace :: Key item -> item -> GHandler (Crud master item) master ()
, crudInsert :: item -> GHandler (Crud master item) master (Key item)
, crudGet :: Key item -> GHandler (Crud master item) master (Maybe item)
, crudDelete :: Key item -> GHandler (Crud master item) master ()
}
mkYesodSub "Crud master item"
[ ClassP ''Item [VarT $ mkName "item"]
, ClassP ''SinglePiece [ConT ''Key `AppT` VarT (mkName "item")]
, ClassP ''ToForm [VarT $ mkName "item", VarT $ mkName "master"]
]
#if __GLASGOW_HASKELL__ >= 700
[parseRoutes|
#else
[$parseRoutes|
#endif
/ CrudListR GET
/add CrudAddR GET POST
/edit/#Text CrudEditR GET POST
/delete/#Text CrudDeleteR GET POST
|]
getCrudListR :: (Yesod master, Item item, SinglePiece (Key item))
=> GHandler (Crud master item) master RepHtml
getCrudListR = do
items <- getYesodSub >>= crudSelect
toMaster <- getRouteToMaster
defaultLayout $ do
setTitle "Items"
addWidget
#if __GLASGOW_HASKELL__ >= 700
[hamlet|
#else
[$hamlet|
#endif
<h1>Items
<ul>
$forall item <- items
<li>
<a href="@{toMaster (CrudEditR (toSinglePiece (fst item)))}">
\#{itemTitle (snd item)}
<p>
<a href="@{toMaster CrudAddR}">Add new item
|]
getCrudAddR :: (Yesod master, Item item, SinglePiece (Key item),
ToForm item master)
=> GHandler (Crud master item) master RepHtml
getCrudAddR = crudHelper
"Add new"
(Nothing :: Maybe (Key item, item))
False
postCrudAddR :: (Yesod master, Item item, SinglePiece (Key item),
ToForm item master)
=> GHandler (Crud master item) master RepHtml
postCrudAddR = crudHelper
"Add new"
(Nothing :: Maybe (Key item, item))
True
getCrudEditR :: (Yesod master, Item item, SinglePiece (Key item),
ToForm item master)
=> Text -> GHandler (Crud master item) master RepHtml
getCrudEditR s = do
itemId <- maybe notFound return $ fromSinglePiece s
crud <- getYesodSub
item <- crudGet crud itemId >>= maybe notFound return
crudHelper
"Edit item"
(Just (itemId, item))
False
postCrudEditR :: (Yesod master, Item item, SinglePiece (Key item),
ToForm item master)
=> Text -> GHandler (Crud master item) master RepHtml
postCrudEditR s = do
itemId <- maybe notFound return $ fromSinglePiece s
crud <- getYesodSub
item <- crudGet crud itemId >>= maybe notFound return
crudHelper
"Edit item"
(Just (itemId, item))
True
getCrudDeleteR :: (Yesod master, Item item, SinglePiece (Key item))
=> Text -> GHandler (Crud master item) master RepHtml
getCrudDeleteR s = do
itemId <- maybe notFound return $ fromSinglePiece s
crud <- getYesodSub
item <- crudGet crud itemId >>= maybe notFound return -- Just ensure it exists
toMaster <- getRouteToMaster
defaultLayout $ do
setTitle "Confirm delete"
addWidget
#if __GLASGOW_HASKELL__ >= 700
[hamlet|
#else
[$hamlet|
#endif
<form method="post" action="@{toMaster (CrudDeleteR s)}">
<h1>Really delete?
<p>Do you really want to delete #{itemTitle item}?
<p>
<input type="submit" value="Yes">
\
<a href="@{toMaster CrudListR}">No
|]
postCrudDeleteR :: (Yesod master, Item item, SinglePiece (Key item))
=> Text -> GHandler (Crud master item) master RepHtml
postCrudDeleteR s = do
itemId <- maybe notFound return $ fromSinglePiece s
crud <- getYesodSub
toMaster <- getRouteToMaster
crudDelete crud itemId
redirect RedirectTemporary $ toMaster CrudListR
crudHelper
:: (Item a, Yesod master, SinglePiece (Key a), ToForm a master)
=> Text -> Maybe (Key a, a) -> Bool
-> GHandler (Crud master a) master RepHtml
crudHelper title me isPost = do
crud <- getYesodSub
(errs, form, enctype, hidden) <- runFormPost $ toForm $ fmap snd me
toMaster <- getRouteToMaster
case (isPost, errs) of
(True, FormSuccess a) -> do
eid <- case me of
Just (eid, _) -> do
crudReplace crud eid a
return eid
Nothing -> crudInsert crud a
redirect RedirectTemporary $ toMaster $ CrudEditR
$ toSinglePiece eid
_ -> return ()
defaultLayout $ do
setTitle $ toHtml title
addWidget
#if __GLASGOW_HASKELL__ >= 700
[hamlet|
#else
[$hamlet|
#endif
<p>
<a href="@{toMaster CrudListR}">Return to list
<h1>#{title}
<form method="post" enctype="#{enctype}">
<table>
\^{form}
<tr>
<td colspan="2">
\#{hidden}
<input type="submit">
$maybe e <- me
\
<a href="@{toMaster (CrudDeleteR (toSinglePiece (fst e)))}">Delete
|]
-- | A default 'Crud' value which relies about persistent and "Yesod.Form".
defaultCrud
:: (PersistEntity i, PersistBackend (YesodDB a (GGHandler (Crud a i) a IO)),
YesodPersist a)
=> a -> Crud a i
defaultCrud = const Crud
{ crudSelect = runDB $ selectList [] [] 0 0
, crudReplace = \a -> runDB . replace a
, crudInsert = runDB . insert
, crudGet = runDB . get
, crudDelete = runDB . delete
}
|
chreekat/yesod
|
yesod-form/Yesod/Helpers/Crud.hs
|
bsd-2-clause
| 6,693
| 0
| 18
| 1,986
| 1,593
| 813
| 780
| 128
| 3
|
module Utils.Drasil.Phrase where
import Language.Drasil
import qualified Language.Drasil.Development as D
import Control.Lens ((^.))
import Utils.Drasil.Sentence (sAnd, sOf)
-- | Creates an NP by combining two 'NamedIdea's with the word "and" between
-- their terms. Plural is defaulted to @(phrase t1) "of" (plural t2)@
and_ :: (NamedIdea c, NamedIdea d) => c -> d -> NP
and_ t1 t2 = nounPhrase''
(phrase t1 `sAnd` phrase t2)
(phrase t1 `sAnd` plural t2)
(Replace (atStart t1 `sAnd` phrase t2))
(Replace (titleize t1 `sAnd` titleize t2))
-- | Same as `and_` combinator, except phrase default of second term is plural instead of phrase
and_' :: (NamedIdea c, NamedIdea d) => c -> d -> NP
and_' t1 t2 = nounPhrase''
(phrase t1 `sAnd` plural t2)
(phrase t1 `sAnd` plural t2)
(Replace (atStart t1 `sAnd` plural t2))
(Replace (titleize t1 `sAnd` titleize' t2))
-- | Customizable `and` combinator
andRT :: (NamedIdea c, NamedIdea d) =>
(c -> Sentence) -> (d -> Sentence) -> c -> d -> NP
andRT f1 f2 t1 t2 = nounPhrase''
(phrase t1 `sAnd` plural t2)
(phrase t1 `sAnd` phrase t2)
(Replace (atStart t1 `sAnd` phrase t2))
(Replace (f1 t1 `sAnd` f2 t2))
-- Case with "T1s with T2", as opposed to "T1 with T2", i.e.
-- phrase defaults to @(plural t1) "with" (phrase t2)@, plural pluralizes both.
with :: (NamedIdea c, NamedIdea d) => c -> d -> NP
with t1 t2 = nounPhrase''
(plural t1 +:+ S "with" +:+ phrase t2)
(plural t1 +:+ S "with" +:+ plural t2)
(Replace (atStart' t1 +:+ S "with" +:+ phrase t2))
(Replace (titleize' t1 +:+ S "with" +:+ titleize t2))
-- | Creates a noun phrase by combining two 'NamedIdea's with the word "of" between
-- their terms. Plural is defaulted to @(phrase t1) "of" (plural t2)@
of_ :: (NamedIdea c, NamedIdea d) => c -> d -> NP
of_ t1 t2 = nounPhrase''
(phrase t1 `sOf` phrase t2)
(phrase t1 `sOf` plural t2)
(Replace (atStart t1 `sOf` phrase t2))
(Replace (titleize t1 `sOf` titleize t2))
ofN_ :: (NamedIdea c, NounPhrase d) => c -> d -> NP
ofN_ t1 t2 = nounPhrase''
(phrase t1 `sOf` phraseNP t2)
(phrase t1 `sOf` pluralNP t2)
(Replace (atStart t1 `sOf` phraseNP t2))
(Replace (titleize t1 `sOf` titleizeNP t2))
-- | Creates a noun phrase by combining two 'NamedIdea's with the word "of" between
-- them. 'phrase' is defaulted to @(phrase t1) "of" (plural t2)@. Plural is the same.
of_' :: (NamedIdea c, NamedIdea d) => c -> d -> NP
of_' t1 t2 = nounPhrase''
(phrase t1 `sOf` plural t2)
(phrase t1 `sOf` plural t2)
(Replace (atStart t1 `sOf` plural t2))
(Replace (titleize t1 `sOf` titleize' t2))
-- | Same as of_, except plural default of second term is phrase
of_'' :: (NamedIdea c, NamedIdea d) => c -> d -> NP
of_'' t1 t2 = nounPhrase''
(phrase t1 `sOf` phrase t2)
(plural t1 `sOf` phrase t2)
(Replace (atStart t1 `sOf` phrase t2))
(Replace (titleize t1 `sOf` titleize t2))
-- | Same as of_, except phrase default of first term is plural instead of phrase
of__ :: (NamedIdea c, NamedIdea d) => c -> d -> NP
of__ t1 t2 = nounPhrase''
(plural t1 `sOf` phrase t2)
(plural t1 `sOf` phrase t2)
(Replace (atStart' t1 `sOf` phrase t2))
(Replace (titleize' t1 `sOf` titleize t2))
-- | Same as of__, except combining Sentence piece is "of a"
ofA :: (NamedIdea c, NamedIdea d) => c -> d -> NP
ofA t1 t2 = nounPhrase''
(plural t1 +:+ S "of a" +:+ phrase t2)
(plural t1 +:+ S "of a" +:+ phrase t2)
(Replace (atStart' t1 +:+ S "of a" +:+ phrase t2))
(Replace (titleize' t1 +:+ S "of a" +:+ titleize t2))
--FIXME: As mentioned in issue #487, the following should be re-examined later,
-- as they may embody a deeper idea in some cases.
-- we might want to eventually restrict the use of these via
-- some kind of type system, which asserts that:
-- 1. t1 `for` t2 means that t1 is a view of part of the reason behind t2
-- 2. t1 `of_` t2 means that t1 is a view of part of the structure of t2
-- | Inserts the word "for" between the titleized versions of
-- two terms
for :: (NamedIdea c, NamedIdea d) => c -> d -> Sentence
for t1 t2 = titleize t1 +:+ S "for" +:+ titleize t2
-- | Similar to 'for', but uses titleized version of term 1 with the abbreviation
-- (if it exists, phrase otherwise) for term 2
for' :: (NamedIdea c, Idea d) => c -> d -> Sentence
for' t1 t2 = titleize t1 +:+ S "for" +:+ short t2
-- | Similar to 'for', but allows one to specify the function to use on each term
-- before inserting for. For example one could use @for'' phrase plural t1 t2@
for'' :: (c -> Sentence) -> (d -> Sentence) -> c -> d -> Sentence
for'' f1 f2 t1 t2 = f1 t1 +:+ S "for" +:+ f2 t2
the' :: (NamedIdea t) => t -> NP
the' t = nounPhrase'' (S "the" +:+ titleize t) (S "the" +:+ titleize' t) CapWords CapWords
the :: (NamedIdea t) => t -> NP
the t = nounPhrase'' (S "the" +:+ phrase t) (S "the" +:+ plural t) CapWords CapWords
theCustom :: (t -> Sentence) -> t -> NP
theCustom f t = nounPhrase''(S "the" +:+ f t) (S "the" +:+ f t) CapFirst CapWords
-- | Combinator for combining two 'NamedChunk's into one.
-- /Does not preserve abbreviations/
compoundNC :: (NamedIdea a, NamedIdea b) => a -> b -> NamedChunk
compoundNC t1 t2 = nc
(t1 ^. uid ++ t2^.uid) (compoundPhrase (t1 ^. term) (t2 ^. term))
compoundNC' :: (NamedIdea a, NamedIdea b) => a -> b -> NamedChunk
compoundNC' t1 t2 = nc
(t1 ^. uid ++ t2 ^. uid) (compoundPhrase'' D.pluralNP D.pluralNP (t1 ^. term) (t2 ^. term))
compoundNC'' :: (NamedIdea a, NamedIdea b) =>
(NP -> Sentence) -> (NP -> Sentence) -> a -> b -> NamedChunk
compoundNC'' f1 f2 t1 t2 = nc
(t1 ^. uid ++ t2 ^. uid) (compoundPhrase'' f1 f2 (t1 ^. term) (t2 ^. term))
compoundNCPlPh :: NamedChunk -> NamedChunk -> NamedChunk
compoundNCPlPh = compoundNC'' D.pluralNP D.phraseNP
compoundNCPlPl :: NamedChunk -> NamedChunk -> NamedChunk
compoundNCPlPl = compoundNC'' D.pluralNP D.pluralNP
-- hack for Solution Characteristics Specification, calling upon plural will pluralize
-- Characteristics as it is the end of the first term (solutionCharacteristic)
compoundNC''' :: (NamedIdea a, NamedIdea b) => (NP -> Sentence) -> a -> b -> NamedChunk
compoundNC''' f1 t1 t2 = nc
(t1 ^. uid ++ t2 ^. uid) (compoundPhrase''' f1 (t1 ^. term) (t2 ^. term))
compoundNCP1 :: NamedChunk -> NamedChunk -> NamedChunk
compoundNCP1 = compoundNC''' D.pluralNP
|
JacquesCarette/literate-scientific-software
|
code/drasil-utils/Utils/Drasil/Phrase.hs
|
bsd-2-clause
| 6,306
| 0
| 11
| 1,267
| 2,145
| 1,118
| 1,027
| 97
| 1
|
module MB.Processors.Mathjax
( mathjaxProcessor
)
where
import qualified Text.Pandoc as Pandoc
import MB.Types
mathjaxProcessor :: Processor
mathjaxProcessor =
nullProcessor { applyWriterOptions = Just mathjaxOpts
}
mathjaxOpts :: Pandoc.WriterOptions -> Pandoc.WriterOptions
mathjaxOpts opts =
opts { Pandoc.writerHTMLMathMethod = Pandoc.MathJax "MathJax/MathJax.js"
}
|
jtdaugherty/mathblog
|
src/MB/Processors/Mathjax.hs
|
bsd-3-clause
| 416
| 0
| 8
| 86
| 82
| 48
| 34
| 10
| 1
|
module Test(main) where
import TestGen
import TestUtil
import TestCustom
-- Check that we managed to export everything
_unused1 x = whenJust
_unused2 x = (&&^)
_unused3 x = system_
_unused4 x = word1
_unused5 x = readFile'
_unused6 x = x :: Seconds
main :: IO ()
main = runTests $ do
testSetup
tests
testCustom
|
ndmitchell/extra
|
test/Test.hs
|
bsd-3-clause
| 328
| 0
| 7
| 72
| 100
| 54
| 46
| 15
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE ViewPatterns #-}
------------------------------------------------------------------------------------
-- |
-- Copyright : (c) Hans Hoglund 2012
--
-- License : BSD-style
--
-- Maintainer : hans@hanshoglund.se
-- Stability : experimental
-- Portability : non-portable (TF,GNTD)
--
-- Provides miscellaneous instances.
--
-------------------------------------------------------------------------------------
module Music.Prelude.Instances () where
import Data.AffineSpace.Point
import Data.Typeable
import Control.Comonad (extract)
import Music.Dynamics
import Music.Parts
import Music.Pitch
import Music.Score hiding (Fifths, Interval, Note, Pitch)
-- import qualified Data.Music.Lilypond as Lilypond
-- import qualified Data.Music.MusicXml.Simple as Xml
import qualified Music.Score as Score
import Data.Functor.Adjunction (unzipR)
instance HasBackendNote Midi Semitones where
exportNote b = exportNote b . fmap toInteger
exportChord b = exportChord b . fmap (fmap toInteger)
instance HasBackendNote Midi Pitch where
exportNote b = exportNote b . fmap (\p -> semitones (p .-. c))
exportChord b = exportChord b . fmap (fmap (\p -> semitones (p .-. c)))
instance HasBackendNote SuperCollider Semitones where
exportNote b = exportNote b . fmap toInteger
exportChord b = exportChord b . fmap (fmap toInteger)
instance HasBackendNote SuperCollider Pitch where
exportNote b = exportNote b . fmap (\p -> semitones (p .-. c))
exportChord b = exportChord b . fmap (fmap (\p -> semitones (p .-. c)))
-- instance HasBackendNote MusicXml Pitch where
-- exportNote _ (XmlContext d Nothing) = Xml.rest (realToFrac d)
-- exportNote _ (XmlContext d (Just x)) = (`Xml.note` realToFrac d) . snd3 Just . spellPitch 4 $ x
--
-- exportChord _ (XmlContext d Nothing) = Xml.rest (realToFrac d)
-- exportChord _ (XmlContext d (Just xs)) = (`Xml.chord` (realToFrac d)) . fmap (snd3 Just . spellPitch 4) $ xs
--
-- instance HasBackendNote Lilypond Pitch where
-- exportNote _ (LyContext d Nothing) = (^*realToFrac (4*d)) Lilypond.rest
-- exportNote _ (LyContext d (Just x)) = (^*realToFrac (d*4)) . Lilypond.note . pitchLilypond . Lilypond.Pitch . spellPitch 5 $ x
--
-- exportChord _ (LyContext d Nothing) = (^*realToFrac (4*d)) Lilypond.rest
-- exportChord _ (LyContext d (Just xs)) = (^*realToFrac (d*4)) . Lilypond.chord . fmap (pitchLilypond . Lilypond.Pitch . spellPitch 5) $ xs
-- TODO move
snd3 f (a, b, c) = (a, f b, c)
-- pitchLilypond a = Lilypond.NotePitch a Nothing
spellPitch :: (Enum p, Num a, Num o) => Octaves -> Pitch -> (p, a, o)
spellPitch referenceOctave p = (pitchName, pitchAccidental, octave)
where
pitchName = toEnum $ fromEnum $ name p
pitchAccidental = fromIntegral $ accidental p
octave = fromIntegral $ (+ referenceOctave) $ octaves (p .-. c)
instance HasMidiProgram BasicPart where
getMidiChannel _ = 0
getMidiProgram _ = 0
instance HasMidiProgram Music.Parts.Part where
getMidiChannel = defaultMidiChannel
getMidiProgram = fixStrings . defaultMidiProgram
where
fixStrings x = case x of
40 -> 48
41 -> 48
42 -> 48
x -> x
-- instance HasLilypondInstrument BasicPart where
-- getLilypondClef = 0
--
-- instance HasLilypondInstrument Music.Parts.Part where
-- getLilypondClef = defaultClef
--
-- instance HasMusicXmlInstrument BasicPart where
-- getMusicXmlClef = 0
-- getMusicXmlNumberOfStaves = 1
--
-- instance HasMusicXmlInstrument Music.Parts.Part where
-- getMusicXmlClef = defaultClef
-- getMusicXmlNumberOfStaves p
-- | p == harp = 2
-- | p^._instrument == piano = 2
-- | p^._instrument == celesta = 2
-- | otherwise = 1
instance HasDuration Pitch where
_duration = const 1
instance HasDuration a => HasDuration (PartT p a) where
_duration = _duration . extract
instance HasDuration a => HasDuration (ColorT a) where
_duration = _duration . extract
instance HasDuration a => HasDuration (TextT a) where
_duration = _duration . extract
instance HasDuration a => HasDuration (TremoloT a) where
_duration = _duration . extract
instance HasDuration a => HasDuration (HarmonicT a) where
_duration = _duration . extract
instance HasDuration a => HasDuration (SlideT a) where
_duration = _duration . extract
instance HasDuration a => HasDuration (ArticulationT b a) where
_duration = _duration . extract
instance HasDuration a => HasDuration (DynamicT b a) where
_duration = _duration . extract
instance HasDuration a => HasDuration (TieT a) where
_duration = _duration . extract
instance Splittable Pitch where
split _ x = (x,x)
instance Splittable a => Splittable (PartT p a) where
split t = unzipR . fmap (split t)
instance Splittable a => Splittable (ColorT a) where
split t = unzipR . fmap (split t)
instance Splittable a => Splittable (TextT a) where
split t = unzipR . fmap (split t)
instance Splittable a => Splittable (TremoloT a) where
split t = unzipR . fmap (split t)
instance Splittable a => Splittable (HarmonicT a) where
split t = unzipR . fmap (split t)
instance Splittable a => Splittable (SlideT a) where
split t = unzipR . fmap (split t)
instance Splittable a => Splittable (ArticulationT b a) where
split t = unzipR . fmap (split t)
instance Splittable a => Splittable (DynamicT b a) where
split t = unzipR . fmap (split t)
instance Splittable a => Splittable (TieT a) where
split t = unzipR . fmap (split t)
instance Reversible Pitch where
rev = id
instance Reversible (Score a ) where
rev = revDefault
|
music-suite/music-preludes
|
src/Music/Prelude/Instances.hs
|
bsd-3-clause
| 6,127
| 0
| 14
| 1,370
| 1,389
| 733
| 656
| 91
| 1
|
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[Id]{@Ids@: Value and constructor identifiers}
-}
{-# LANGUAGE CPP #-}
-- |
-- #name_types#
-- GHC uses several kinds of name internally:
--
-- * 'OccName.OccName': see "OccName#name_types"
--
-- * 'RdrName.RdrName': see "RdrName#name_types"
--
-- * 'Name.Name': see "Name#name_types"
--
-- * 'Id.Id' represents names that not only have a 'Name.Name' but also a 'TypeRep.Type' and some additional
-- details (a 'IdInfo.IdInfo' and one of 'Var.LocalIdDetails' or 'IdInfo.GlobalIdDetails') that
-- are added, modified and inspected by various compiler passes. These 'Var.Var' names may either
-- be global or local, see "Var#globalvslocal"
--
-- * 'Var.Var': see "Var#name_types"
module Id (
-- * The main types
Var, Id, isId,
-- ** Simple construction
mkGlobalId, mkVanillaGlobal, mkVanillaGlobalWithInfo,
mkLocalId, mkLocalIdWithInfo, mkExportedLocalId,
mkSysLocal, mkSysLocalM, mkUserLocal, mkUserLocalM,
mkDerivedLocalM,
mkTemplateLocals, mkTemplateLocalsNum, mkTemplateLocal,
mkWorkerId, mkWiredInIdName,
-- ** Taking an Id apart
idName, idType, idUnique, idInfo, idDetails, idRepArity,
recordSelectorFieldLabel,
-- ** Modifying an Id
setIdName, setIdUnique, Id.setIdType,
setIdExported, setIdNotExported,
globaliseId, localiseId,
setIdInfo, lazySetIdInfo, modifyIdInfo, maybeModifyIdInfo,
zapLamIdInfo, zapIdDemandInfo, zapIdUsageInfo, zapFragileIdInfo,
zapIdStrictness,
transferPolyIdInfo,
-- ** Predicates on Ids
isImplicitId, isDeadBinder,
isStrictId,
isExportedId, isLocalId, isGlobalId,
isRecordSelector, isNaughtyRecordSelector,
isClassOpId_maybe, isDFunId,
isPrimOpId, isPrimOpId_maybe,
isFCallId, isFCallId_maybe,
isDataConWorkId, isDataConWorkId_maybe, isDataConId_maybe, idDataCon,
isConLikeId, isBottomingId, idIsFrom,
hasNoBinding,
-- ** Evidence variables
DictId, isDictId, dfunNSilent, isEvVar,
-- ** Inline pragma stuff
idInlinePragma, setInlinePragma, modifyInlinePragma,
idInlineActivation, setInlineActivation, idRuleMatchInfo,
-- ** One-shot lambdas
isOneShotBndr, isOneShotLambda, isProbablyOneShotLambda,
setOneShotLambda, clearOneShotLambda,
updOneShotInfo, setIdOneShotInfo,
isStateHackType, stateHackOneShot, typeOneShot,
-- ** Reading 'IdInfo' fields
idArity,
idCallArity,
idUnfolding, realIdUnfolding,
idSpecialisation, idCoreRules, idHasRules,
idCafInfo,
idOneShotInfo,
idOccInfo,
-- ** Writing 'IdInfo' fields
setIdUnfoldingLazily,
setIdUnfolding,
setIdArity,
setIdCallArity,
setIdSpecialisation,
setIdCafInfo,
setIdOccInfo, zapIdOccInfo,
setIdDemandInfo,
setIdStrictness,
idDemandInfo,
idStrictness,
) where
#include "HsVersions.h"
import CoreSyn ( CoreRule, Unfolding( NoUnfolding ) )
import IdInfo
import BasicTypes
-- Imported and re-exported
import Var( Id, DictId,
idInfo, idDetails, globaliseId, varType,
isId, isLocalId, isGlobalId, isExportedId )
import qualified Var
import TyCon
import Type
import TysPrim
import DataCon
import Demand
import Name
import Module
import Class
import {-# SOURCE #-} PrimOp (PrimOp)
import ForeignCall
import Maybes
import SrcLoc
import Outputable
import Unique
import UniqSupply
import FastString
import Util
import StaticFlags
-- infixl so you can say (id `set` a `set` b)
infixl 1 `setIdUnfoldingLazily`,
`setIdUnfolding`,
`setIdArity`,
`setIdCallArity`,
`setIdOccInfo`,
`setIdOneShotInfo`,
`setIdSpecialisation`,
`setInlinePragma`,
`setInlineActivation`,
`idCafInfo`,
`setIdDemandInfo`,
`setIdStrictness`
{-
************************************************************************
* *
\subsection{Basic Id manipulation}
* *
************************************************************************
-}
idName :: Id -> Name
idName = Var.varName
idUnique :: Id -> Unique
idUnique = Var.varUnique
idType :: Id -> Kind
idType = Var.varType
setIdName :: Id -> Name -> Id
setIdName = Var.setVarName
setIdUnique :: Id -> Unique -> Id
setIdUnique = Var.setVarUnique
-- | Not only does this set the 'Id' 'Type', it also evaluates the type to try and
-- reduce space usage
setIdType :: Id -> Type -> Id
setIdType id ty = seqType ty `seq` Var.setVarType id ty
setIdExported :: Id -> Id
setIdExported = Var.setIdExported
setIdNotExported :: Id -> Id
setIdNotExported = Var.setIdNotExported
localiseId :: Id -> Id
-- Make an with the same unique and type as the
-- incoming Id, but with an *Internal* Name and *LocalId* flavour
localiseId id
| ASSERT( isId id ) isLocalId id && isInternalName name
= id
| otherwise
= mkLocalIdWithInfo (localiseName name) (idType id) (idInfo id)
where
name = idName id
lazySetIdInfo :: Id -> IdInfo -> Id
lazySetIdInfo = Var.lazySetIdInfo
setIdInfo :: Id -> IdInfo -> Id
setIdInfo id info = seqIdInfo info `seq` (lazySetIdInfo id info)
-- Try to avoid spack leaks by seq'ing
modifyIdInfo :: (IdInfo -> IdInfo) -> Id -> Id
modifyIdInfo fn id = setIdInfo id (fn (idInfo id))
-- maybeModifyIdInfo tries to avoid unnecesary thrashing
maybeModifyIdInfo :: Maybe IdInfo -> Id -> Id
maybeModifyIdInfo (Just new_info) id = lazySetIdInfo id new_info
maybeModifyIdInfo Nothing id = id
{-
************************************************************************
* *
\subsection{Simple Id construction}
* *
************************************************************************
Absolutely all Ids are made by mkId. It is just like Var.mkId,
but in addition it pins free-tyvar-info onto the Id's type,
where it can easily be found.
Note [Free type variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~
At one time we cached the free type variables of the type of an Id
at the root of the type in a TyNote. The idea was to avoid repeating
the free-type-variable calculation. But it turned out to slow down
the compiler overall. I don't quite know why; perhaps finding free
type variables of an Id isn't all that common whereas applying a
substitution (which changes the free type variables) is more common.
Anyway, we removed it in March 2008.
-}
-- | For an explanation of global vs. local 'Id's, see "Var#globalvslocal"
mkGlobalId :: IdDetails -> Name -> Type -> IdInfo -> Id
mkGlobalId = Var.mkGlobalVar
-- | Make a global 'Id' without any extra information at all
mkVanillaGlobal :: Name -> Type -> Id
mkVanillaGlobal name ty = mkVanillaGlobalWithInfo name ty vanillaIdInfo
-- | Make a global 'Id' with no global information but some generic 'IdInfo'
mkVanillaGlobalWithInfo :: Name -> Type -> IdInfo -> Id
mkVanillaGlobalWithInfo = mkGlobalId VanillaId
-- | For an explanation of global vs. local 'Id's, see "Var#globalvslocal"
mkLocalId :: Name -> Type -> Id
mkLocalId name ty = mkLocalIdWithInfo name ty
(vanillaIdInfo `setOneShotInfo` typeOneShot ty)
mkLocalIdWithInfo :: Name -> Type -> IdInfo -> Id
mkLocalIdWithInfo name ty info = Var.mkLocalVar VanillaId name ty info
-- Note [Free type variables]
-- | Create a local 'Id' that is marked as exported.
-- This prevents things attached to it from being removed as dead code.
-- See Note [Exported LocalIds]
mkExportedLocalId :: IdDetails -> Name -> Type -> Id
mkExportedLocalId details name ty = Var.mkExportedLocalVar details name ty vanillaIdInfo
-- Note [Free type variables]
-- | Create a system local 'Id'. These are local 'Id's (see "Var#globalvslocal")
-- that are created by the compiler out of thin air
mkSysLocal :: FastString -> Unique -> Type -> Id
mkSysLocal fs uniq ty = mkLocalId (mkSystemVarName uniq fs) ty
mkSysLocalM :: MonadUnique m => FastString -> Type -> m Id
mkSysLocalM fs ty = getUniqueM >>= (\uniq -> return (mkSysLocal fs uniq ty))
-- | Create a user local 'Id'. These are local 'Id's (see "Var#globalvslocal") with a name and location that the user might recognize
mkUserLocal :: OccName -> Unique -> Type -> SrcSpan -> Id
mkUserLocal occ uniq ty loc = mkLocalId (mkInternalName uniq occ loc) ty
mkUserLocalM :: MonadUnique m => OccName -> Type -> SrcSpan -> m Id
mkUserLocalM occ ty loc = getUniqueM >>= (\uniq -> return (mkUserLocal occ uniq ty loc))
mkDerivedLocalM :: MonadUnique m => (OccName -> OccName) -> Id -> Type -> m Id
mkDerivedLocalM deriv_name id ty
= getUniqueM >>= (\uniq -> return (mkLocalId (mkDerivedInternalName deriv_name uniq (getName id)) ty))
mkWiredInIdName :: Module -> FastString -> Unique -> Id -> Name
mkWiredInIdName mod fs uniq id
= mkWiredInName mod (mkOccNameFS varName fs) uniq (AnId id) UserSyntax
{-
Make some local @Ids@ for a template @CoreExpr@. These have bogus
@Uniques@, but that's OK because the templates are supposed to be
instantiated before use.
-}
-- | Workers get local names. "CoreTidy" will externalise these if necessary
mkWorkerId :: Unique -> Id -> Type -> Id
mkWorkerId uniq unwrkr ty
= mkLocalId (mkDerivedInternalName mkWorkerOcc uniq (getName unwrkr)) ty
-- | Create a /template local/: a family of system local 'Id's in bijection with @Int@s, typically used in unfoldings
mkTemplateLocal :: Int -> Type -> Id
mkTemplateLocal i ty = mkSysLocal (fsLit "tpl") (mkBuiltinUnique i) ty
-- | Create a template local for a series of types
mkTemplateLocals :: [Type] -> [Id]
mkTemplateLocals = mkTemplateLocalsNum 1
-- | Create a template local for a series of type, but start from a specified template local
mkTemplateLocalsNum :: Int -> [Type] -> [Id]
mkTemplateLocalsNum n tys = zipWith mkTemplateLocal [n..] tys
{-
Note [Exported LocalIds]
~~~~~~~~~~~~~~~~~~~~~~~~
We use mkExportedLocalId for things like
- Dictionary functions (DFunId)
- Wrapper and matcher Ids for pattern synonyms
- Default methods for classes
- Pattern-synonym matcher and builder Ids
- etc
They marked as "exported" in the sense that they should be kept alive
even if apparently unused in other bindings, and not dropped as dead
code by the occurrence analyser. (But "exported" here does not mean
"brought into lexical scope by an import declaration". Indeed these
things are always internal Ids that the user never sees.)
It's very important that they are *LocalIds*, not GlobalIs, for lots
of reasons:
* We want to treat them as free variables for the purpose of
dependency analysis (e.g. CoreFVs.exprFreeVars).
* Look them up in the current substitution when we come across
occurrences of them (in Subst.lookupIdSubst). Lacking this we
can get an out-of-date unfolding, which can in turn make the
simplifier go into an infinite loop (Trac #9857)
* Ensure that for dfuns that the specialiser does not float dict uses
above their defns, which would prevent good simplifications happening.
* The strictness analyser treats a occurrence of a GlobalId as
imported and assumes it contains strictness in its IdInfo, which
isn't true if the thing is bound in the same module as the
occurrence.
In CoreTidy we must make all these LocalIds into GlobalIds, so that in
importing modules (in --make mode) we treat them as properly global.
That is what is happening in, say tidy_insts in TidyPgm.
************************************************************************
* *
\subsection{Special Ids}
* *
************************************************************************
-}
-- | If the 'Id' is that for a record selector, extract the 'sel_tycon' and label. Panic otherwise
recordSelectorFieldLabel :: Id -> (TyCon, FieldLabel)
recordSelectorFieldLabel id
= case Var.idDetails id of
RecSelId { sel_tycon = tycon } -> (tycon, idName id)
_ -> panic "recordSelectorFieldLabel"
isRecordSelector :: Id -> Bool
isNaughtyRecordSelector :: Id -> Bool
isPrimOpId :: Id -> Bool
isFCallId :: Id -> Bool
isDataConWorkId :: Id -> Bool
isDFunId :: Id -> Bool
isClassOpId_maybe :: Id -> Maybe Class
isPrimOpId_maybe :: Id -> Maybe PrimOp
isFCallId_maybe :: Id -> Maybe ForeignCall
isDataConWorkId_maybe :: Id -> Maybe DataCon
isRecordSelector id = case Var.idDetails id of
RecSelId {} -> True
_ -> False
isNaughtyRecordSelector id = case Var.idDetails id of
RecSelId { sel_naughty = n } -> n
_ -> False
isClassOpId_maybe id = case Var.idDetails id of
ClassOpId cls -> Just cls
_other -> Nothing
isPrimOpId id = case Var.idDetails id of
PrimOpId _ -> True
_ -> False
isDFunId id = case Var.idDetails id of
DFunId {} -> True
_ -> False
dfunNSilent :: Id -> Int
dfunNSilent id = case Var.idDetails id of
DFunId ns _ -> ns
_ -> pprPanic "dfunSilent: not a dfun:" (ppr id)
isPrimOpId_maybe id = case Var.idDetails id of
PrimOpId op -> Just op
_ -> Nothing
isFCallId id = case Var.idDetails id of
FCallId _ -> True
_ -> False
isFCallId_maybe id = case Var.idDetails id of
FCallId call -> Just call
_ -> Nothing
isDataConWorkId id = case Var.idDetails id of
DataConWorkId _ -> True
_ -> False
isDataConWorkId_maybe id = case Var.idDetails id of
DataConWorkId con -> Just con
_ -> Nothing
isDataConId_maybe :: Id -> Maybe DataCon
isDataConId_maybe id = case Var.idDetails id of
DataConWorkId con -> Just con
DataConWrapId con -> Just con
_ -> Nothing
idDataCon :: Id -> DataCon
-- ^ Get from either the worker or the wrapper 'Id' to the 'DataCon'. Currently used only in the desugarer.
--
-- INVARIANT: @idDataCon (dataConWrapId d) = d@: remember, 'dataConWrapId' can return either the wrapper or the worker
idDataCon id = isDataConId_maybe id `orElse` pprPanic "idDataCon" (ppr id)
hasNoBinding :: Id -> Bool
-- ^ Returns @True@ of an 'Id' which may not have a
-- binding, even though it is defined in this module.
-- Data constructor workers used to be things of this kind, but
-- they aren't any more. Instead, we inject a binding for
-- them at the CorePrep stage.
-- EXCEPT: unboxed tuples, which definitely have no binding
hasNoBinding id = case Var.idDetails id of
PrimOpId _ -> True -- See Note [Primop wrappers]
FCallId _ -> True
DataConWorkId dc -> isUnboxedTupleCon dc
_ -> False
isImplicitId :: Id -> Bool
-- ^ 'isImplicitId' tells whether an 'Id's info is implied by other
-- declarations, so we don't need to put its signature in an interface
-- file, even if it's mentioned in some other interface unfolding.
isImplicitId id
= case Var.idDetails id of
FCallId {} -> True
ClassOpId {} -> True
PrimOpId {} -> True
DataConWorkId {} -> True
DataConWrapId {} -> True
-- These are are implied by their type or class decl;
-- remember that all type and class decls appear in the interface file.
-- The dfun id is not an implicit Id; it must *not* be omitted, because
-- it carries version info for the instance decl
_ -> False
idIsFrom :: Module -> Id -> Bool
idIsFrom mod id = nameIsLocalOrFrom mod (idName id)
{-
Note [Primop wrappers]
~~~~~~~~~~~~~~~~~~~~~~
Currently hasNoBinding claims that PrimOpIds don't have a curried
function definition. But actually they do, in GHC.PrimopWrappers,
which is auto-generated from prelude/primops.txt.pp. So actually, hasNoBinding
could return 'False' for PrimOpIds.
But we'd need to add something in CoreToStg to swizzle any unsaturated
applications of GHC.Prim.plusInt# to GHC.PrimopWrappers.plusInt#.
Nota Bene: GHC.PrimopWrappers is needed *regardless*, because it's
used by GHCi, which does not implement primops direct at all.
-}
isDeadBinder :: Id -> Bool
isDeadBinder bndr | isId bndr = isDeadOcc (idOccInfo bndr)
| otherwise = False -- TyVars count as not dead
{-
************************************************************************
* *
Evidence variables
* *
************************************************************************
-}
isEvVar :: Var -> Bool
isEvVar var = isPredTy (varType var)
isDictId :: Id -> Bool
isDictId id = isDictTy (idType id)
{-
************************************************************************
* *
\subsection{IdInfo stuff}
* *
************************************************************************
-}
---------------------------------
-- ARITY
idArity :: Id -> Arity
idArity id = arityInfo (idInfo id)
setIdArity :: Id -> Arity -> Id
setIdArity id arity = modifyIdInfo (`setArityInfo` arity) id
idCallArity :: Id -> Arity
idCallArity id = callArityInfo (idInfo id)
setIdCallArity :: Id -> Arity -> Id
setIdCallArity id arity = modifyIdInfo (`setCallArityInfo` arity) id
idRepArity :: Id -> RepArity
idRepArity x = typeRepArity (idArity x) (idType x)
-- | Returns true if an application to n args would diverge
isBottomingId :: Id -> Bool
isBottomingId id = isBottomingSig (idStrictness id)
idStrictness :: Id -> StrictSig
idStrictness id = strictnessInfo (idInfo id)
setIdStrictness :: Id -> StrictSig -> Id
setIdStrictness id sig = modifyIdInfo (`setStrictnessInfo` sig) id
zapIdStrictness :: Id -> Id
zapIdStrictness id = modifyIdInfo (`setStrictnessInfo` nopSig) id
-- | This predicate says whether the 'Id' has a strict demand placed on it or
-- has a type such that it can always be evaluated strictly (i.e an
-- unlifted type, as of GHC 7.6). We need to
-- check separately whether the 'Id' has a so-called \"strict type\" because if
-- the demand for the given @id@ hasn't been computed yet but @id@ has a strict
-- type, we still want @isStrictId id@ to be @True@.
isStrictId :: Id -> Bool
isStrictId id
= ASSERT2( isId id, text "isStrictId: not an id: " <+> ppr id )
(isStrictType (idType id)) ||
-- Take the best of both strictnesses - old and new
(isStrictDmd (idDemandInfo id))
---------------------------------
-- UNFOLDING
idUnfolding :: Id -> Unfolding
-- Do not expose the unfolding of a loop breaker!
idUnfolding id
| isStrongLoopBreaker (occInfo info) = NoUnfolding
| otherwise = unfoldingInfo info
where
info = idInfo id
realIdUnfolding :: Id -> Unfolding
-- Expose the unfolding if there is one, including for loop breakers
realIdUnfolding id = unfoldingInfo (idInfo id)
setIdUnfoldingLazily :: Id -> Unfolding -> Id
setIdUnfoldingLazily id unfolding = modifyIdInfo (`setUnfoldingInfoLazily` unfolding) id
setIdUnfolding :: Id -> Unfolding -> Id
setIdUnfolding id unfolding = modifyIdInfo (`setUnfoldingInfo` unfolding) id
idDemandInfo :: Id -> Demand
idDemandInfo id = demandInfo (idInfo id)
setIdDemandInfo :: Id -> Demand -> Id
setIdDemandInfo id dmd = modifyIdInfo (`setDemandInfo` dmd) id
---------------------------------
-- SPECIALISATION
-- See Note [Specialisations and RULES in IdInfo] in IdInfo.lhs
idSpecialisation :: Id -> SpecInfo
idSpecialisation id = specInfo (idInfo id)
idCoreRules :: Id -> [CoreRule]
idCoreRules id = specInfoRules (idSpecialisation id)
idHasRules :: Id -> Bool
idHasRules id = not (isEmptySpecInfo (idSpecialisation id))
setIdSpecialisation :: Id -> SpecInfo -> Id
setIdSpecialisation id spec_info = modifyIdInfo (`setSpecInfo` spec_info) id
---------------------------------
-- CAF INFO
idCafInfo :: Id -> CafInfo
idCafInfo id = cafInfo (idInfo id)
setIdCafInfo :: Id -> CafInfo -> Id
setIdCafInfo id caf_info = modifyIdInfo (`setCafInfo` caf_info) id
---------------------------------
-- Occcurrence INFO
idOccInfo :: Id -> OccInfo
idOccInfo id = occInfo (idInfo id)
setIdOccInfo :: Id -> OccInfo -> Id
setIdOccInfo id occ_info = modifyIdInfo (`setOccInfo` occ_info) id
zapIdOccInfo :: Id -> Id
zapIdOccInfo b = b `setIdOccInfo` NoOccInfo
{-
---------------------------------
-- INLINING
The inline pragma tells us to be very keen to inline this Id, but it's still
OK not to if optimisation is switched off.
-}
idInlinePragma :: Id -> InlinePragma
idInlinePragma id = inlinePragInfo (idInfo id)
setInlinePragma :: Id -> InlinePragma -> Id
setInlinePragma id prag = modifyIdInfo (`setInlinePragInfo` prag) id
modifyInlinePragma :: Id -> (InlinePragma -> InlinePragma) -> Id
modifyInlinePragma id fn = modifyIdInfo (\info -> info `setInlinePragInfo` (fn (inlinePragInfo info))) id
idInlineActivation :: Id -> Activation
idInlineActivation id = inlinePragmaActivation (idInlinePragma id)
setInlineActivation :: Id -> Activation -> Id
setInlineActivation id act = modifyInlinePragma id (\prag -> setInlinePragmaActivation prag act)
idRuleMatchInfo :: Id -> RuleMatchInfo
idRuleMatchInfo id = inlinePragmaRuleMatchInfo (idInlinePragma id)
isConLikeId :: Id -> Bool
isConLikeId id = isDataConWorkId id || isConLike (idRuleMatchInfo id)
{-
---------------------------------
-- ONE-SHOT LAMBDAS
-}
idOneShotInfo :: Id -> OneShotInfo
idOneShotInfo id = oneShotInfo (idInfo id)
-- | Returns whether the lambda associated with the 'Id' is certainly applied at most once
-- This one is the "business end", called externally.
-- It works on type variables as well as Ids, returning True
-- Its main purpose is to encapsulate the Horrible State Hack
isOneShotBndr :: Var -> Bool
isOneShotBndr var
| isTyVar var = True
| otherwise = isOneShotLambda var
-- | Should we apply the state hack to values of this 'Type'?
stateHackOneShot :: OneShotInfo
stateHackOneShot = OneShotLam -- Or maybe ProbOneShot?
typeOneShot :: Type -> OneShotInfo
typeOneShot ty
| isStateHackType ty = stateHackOneShot
| otherwise = NoOneShotInfo
isStateHackType :: Type -> Bool
isStateHackType ty
| opt_NoStateHack
= False
| otherwise
= case tyConAppTyCon_maybe ty of
Just tycon -> tycon == statePrimTyCon
_ -> False
-- This is a gross hack. It claims that
-- every function over realWorldStatePrimTy is a one-shot
-- function. This is pretty true in practice, and makes a big
-- difference. For example, consider
-- a `thenST` \ r -> ...E...
-- The early full laziness pass, if it doesn't know that r is one-shot
-- will pull out E (let's say it doesn't mention r) to give
-- let lvl = E in a `thenST` \ r -> ...lvl...
-- When `thenST` gets inlined, we end up with
-- let lvl = E in \s -> case a s of (r, s') -> ...lvl...
-- and we don't re-inline E.
--
-- It would be better to spot that r was one-shot to start with, but
-- I don't want to rely on that.
--
-- Another good example is in fill_in in PrelPack.lhs. We should be able to
-- spot that fill_in has arity 2 (and when Keith is done, we will) but we can't yet.
-- | Returns whether the lambda associated with the 'Id' is certainly applied at most once.
-- You probably want to use 'isOneShotBndr' instead
isOneShotLambda :: Id -> Bool
isOneShotLambda id = case idOneShotInfo id of
OneShotLam -> True
_ -> False
isProbablyOneShotLambda :: Id -> Bool
isProbablyOneShotLambda id = case idOneShotInfo id of
OneShotLam -> True
ProbOneShot -> True
NoOneShotInfo -> False
setOneShotLambda :: Id -> Id
setOneShotLambda id = modifyIdInfo (`setOneShotInfo` OneShotLam) id
clearOneShotLambda :: Id -> Id
clearOneShotLambda id = modifyIdInfo (`setOneShotInfo` NoOneShotInfo) id
setIdOneShotInfo :: Id -> OneShotInfo -> Id
setIdOneShotInfo id one_shot = modifyIdInfo (`setOneShotInfo` one_shot) id
updOneShotInfo :: Id -> OneShotInfo -> Id
-- Combine the info in the Id with new info
updOneShotInfo id one_shot
| do_upd = setIdOneShotInfo id one_shot
| otherwise = id
where
do_upd = case (idOneShotInfo id, one_shot) of
(NoOneShotInfo, _) -> True
(OneShotLam, _) -> False
(_, NoOneShotInfo) -> False
_ -> True
-- The OneShotLambda functions simply fiddle with the IdInfo flag
-- But watch out: this may change the type of something else
-- f = \x -> e
-- If we change the one-shot-ness of x, f's type changes
zapInfo :: (IdInfo -> Maybe IdInfo) -> Id -> Id
zapInfo zapper id = maybeModifyIdInfo (zapper (idInfo id)) id
zapLamIdInfo :: Id -> Id
zapLamIdInfo = zapInfo zapLamInfo
zapFragileIdInfo :: Id -> Id
zapFragileIdInfo = zapInfo zapFragileInfo
zapIdDemandInfo :: Id -> Id
zapIdDemandInfo = zapInfo zapDemandInfo
zapIdUsageInfo :: Id -> Id
zapIdUsageInfo = zapInfo zapUsageInfo
{-
Note [transferPolyIdInfo]
~~~~~~~~~~~~~~~~~~~~~~~~~
This transfer is used in two places:
FloatOut (long-distance let-floating)
SimplUtils.abstractFloats (short-distance let-floating)
Consider the short-distance let-floating:
f = /\a. let g = rhs in ...
Then if we float thus
g' = /\a. rhs
f = /\a. ...[g' a/g]....
we *do not* want to lose g's
* strictness information
* arity
* inline pragma (though that is bit more debatable)
* occurrence info
Mostly this is just an optimisation, but it's *vital* to
transfer the occurrence info. Consider
NonRec { f = /\a. let Rec { g* = ..g.. } in ... }
where the '*' means 'LoopBreaker'. Then if we float we must get
Rec { g'* = /\a. ...(g' a)... }
NonRec { f = /\a. ...[g' a/g]....}
where g' is also marked as LoopBreaker. If not, terrible things
can happen if we re-simplify the binding (and the Simplifier does
sometimes simplify a term twice); see Trac #4345.
It's not so simple to retain
* worker info
* rules
so we simply discard those. Sooner or later this may bite us.
If we abstract wrt one or more *value* binders, we must modify the
arity and strictness info before transferring it. E.g.
f = \x. e
-->
g' = \y. \x. e
+ substitute (g' y) for g
Notice that g' has an arity one more than the original g
-}
transferPolyIdInfo :: Id -- Original Id
-> [Var] -- Abstract wrt these variables
-> Id -- New Id
-> Id
transferPolyIdInfo old_id abstract_wrt new_id
= modifyIdInfo transfer new_id
where
arity_increase = count isId abstract_wrt -- Arity increases by the
-- number of value binders
old_info = idInfo old_id
old_arity = arityInfo old_info
old_inline_prag = inlinePragInfo old_info
old_occ_info = occInfo old_info
new_arity = old_arity + arity_increase
old_strictness = strictnessInfo old_info
new_strictness = increaseStrictSigArity arity_increase old_strictness
transfer new_info = new_info `setArityInfo` new_arity
`setInlinePragInfo` old_inline_prag
`setOccInfo` old_occ_info
`setStrictnessInfo` new_strictness
|
forked-upstream-packages-for-ghcjs/ghc
|
compiler/basicTypes/Id.hs
|
bsd-3-clause
| 28,863
| 0
| 14
| 7,552
| 4,455
| 2,425
| 2,030
| 380
| 6
|
{-|
Module : Data.Validation
Copyright : (c) Marcin Mrotek, 2016
License : BSD3
Maintainer : marcin.jan.mrotek@gmail.com
Stability : experimental
Accumulating Either-like type.
-}
{-# LANGUAGE
DeriveDataTypeable
, DeriveFunctor
, DeriveFoldable
, DeriveGeneric
#-}
module Data.Validation (Validation(..)) where
import Control.Applicative
import Control.Lens
import Data.Bifunctor
import Data.Bifoldable
import Data.Bitraversable
import Data.Data
import Data.Functor.Alt
import Data.Semigroup
import GHC.Generics
data Validation e a = Success a | Failure e
deriving (Show, Eq, Ord, Data, Functor, Foldable, Generic)
instance Semigroup e => Semigroup (Validation e a) where
(<>) = app
where
app (Failure e1) (Failure e2) = Failure (e1 <> e2)
app v@(Success _) _ = v
app _ v@(Success _) = v
instance Monoid e => Monoid (Validation e a) where
mempty = Failure mempty
mappend = app
where
app (Failure e1) (Failure e2) = Failure (e1 `mappend` e2)
app v@(Success _) _ = v
app _ v@(Success _) = v
instance Semigroup e => Applicative (Validation e) where
pure = Success
(<*>) = app
where
app (Success f) (Success a) = Success (f a)
app (Failure e) (Success _) = Failure e
app (Success _) (Failure e) = Failure e
app (Failure e1) (Failure e2) = Failure (e1 <> e2)
altValidation :: Validation e a -> Validation e a -> Validation e a
altValidation (Failure _) v = v
altValidation v@(Success _) _ = v
instance (Monoid e, Semigroup e) => Alternative (Validation e) where
empty = Failure mempty
(<|>) = altValidation
instance Semigroup e => Apply (Validation e) where
(<.>) = (<*>)
instance Alt (Validation e) where
(<!>) = altValidation
instance Traversable (Validation e) where
traverse f v =
case v of
Success a -> Success <$> f a
Failure e -> pure $ Failure e
instance Swapped Validation where
swapped = iso swap swap
where
swap (Success a) = Failure a
swap (Failure e) = Success e
instance Bifunctor Validation where
bimap f g v =
case v of
Success a -> Success (g a)
Failure e -> Failure (f e)
first f v =
case v of
Success a -> Success a
Failure e -> Failure (f e)
second = fmap
instance Bifoldable Validation where
bifoldMap f g v =
case v of
Success a -> g a
Failure e -> f e
instance Bitraversable Validation where
bitraverse f g v =
case v of
Success a -> Success <$> g a
Failure e -> Failure <$> f e
|
marcinmrotek/pipes-key-value-csv
|
src/Data/Validation.hs
|
bsd-3-clause
| 2,592
| 0
| 11
| 694
| 955
| 487
| 468
| 74
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Analyzer.Checker where
import System.FilePath.Find
import System.FilePath.Posix
import qualified Data.ByteString as BS
import qualified Data.Map as M
import Control.Applicative
import qualified Data.ByteString.Char8 as BSC
import Text.Regex
import Data.Maybe(isJust)
import qualified Data.List as L
import Data.List(foldl')
import qualified Data.Text as T
import qualified Data.Text.Encoding as E
-- import Debug.Trace
-- trc xs = trace (concat xs) -- debug on
trc _ a = a -- debug off
type TrieMap a = M.Map a (Trie a)
data Trie a = Empty | Node (Maybe [a]) (TrieMap a) deriving (Show)
insert :: (Eq a,Ord a) => [a] -> Trie a -> Trie a
insert = insert' [] where
insert' rs c Empty = insert' rs c (Node Nothing M.empty)
insert' rs [] (Node _ d) = Node (Just rs) d
insert' rs (x:xs) (Node b dic)
| x `M.member` dic = Node b (M.adjust (insert' (x:rs) xs) x dic)
| otherwise = Node b (M.insert x (insert' (x:rs) xs Empty) dic)
member :: (Ord a,Show a) => [a] -> Trie a -> Maybe [a]
member _ Empty = Nothing
member [] (Node b _) = b
member (x:xs) (Node _ tr) = maybe Nothing (member xs) (M.lookup x tr)
fromList = foldr insert Empty
(=~) :: String -> String -> Bool
(=~) inp pat = isJust $ matchRegex (mkRegex pat) inp
headerFiles = find always (extension ==? ".h")
headerAndSourceFiles = find always (extension ==? ".h" ||? extension ==? ".c")
newtype CaseInsensitive = CI { content :: T.Text }
instance Eq CaseInsensitive where
(==) (CI a) (CI b) = T.toCaseFold a == T.toCaseFold b
instance Ord CaseInsensitive where
compare (CI a) (CI b) = compare (T.toCaseFold a) (T.toCaseFold b)
instance Show CaseInsensitive where show (CI a) = T.unpack a
toTrie :: [String] -> Trie CaseInsensitive
toTrie hss = fromList [map CI ts | ts <- validPaths]
where allPaths = map T.pack hss
validPaths = concatMap reverseSplitAll allPaths
reverseSplitAll = map reverse . L.tails . splitP
splitP = map T.pack . splitDirectories . T.unpack
type Include = (Int,BS.ByteString)
cutIncludePath :: T.Text -> (T.Text,T.Text,T.Text)
cutIncludePath i
| T.unpack i =~ "^#include[ ]{1,}\"" = ((head . tail . T.split (== '"')) i,"\"","\"")
| T.unpack i =~ "^#include[ ]{1,}<" =
(T.dropAround (\c->(c == '<') || (c == '>')) (fst $ T.breakOn ">" $ snd $ T.breakOn "<" i),"<",">")
| otherwise = error $ "no match for include path" ++ T.unpack i
fixIncludes :: BSC.ByteString -> Trie CaseInsensitive -> Maybe BSC.ByteString
fixIncludes _content availablePaths
| n == 0 = Nothing
| otherwise = Just $ BSC.unlines $ reverse finalLines
where allLines = BSC.lines _content
(finalLines,n) = foldl' maybeFixLine ([],0) allLines
maybeFixLine (acc,nn) x =
maybe (x:acc,nn)
(\f->(E.encodeUtf8 f:acc,nn+1))
(fixForLine x)
fixForLine x
| BSC.unpack x =~ "^#include[ ]{1,}[<\"]" =
m >>= \s -> return (T.concat ["#include ",start,s,end])
| otherwise = Nothing
where m = bestMatch cut availablePaths
(cut,start,end) = cutIncludePath (E.decodeUtf8 x)
-- | Finds the correct cased match for the include string
-- takes the include string and a list of possible file paths
-- e.g., if include = "a/bin/test.h" and the possible file paths are
-- ["a/BIN/Test.h","bin/Test.h"], it will result in Just "a/BIN/Test.h"
bestMatch :: T.Text -> Trie CaseInsensitive -> Maybe T.Text
bestMatch include validCaseTrie = member reversedIncludeCI validCaseTrie >>=
\matchedPath ->
if (content <$> matchedPath) == (content <$> includeCI)
then Nothing -- include path was spelled correctly
else Just $ T.intercalate "/" [content x | x <- matchedPath]
where
reversedIncludeCI = map CI (reverse $ splitP include)
includeCI = map CI (splitP include)
splitP = map T.pack . splitDirectories . T.unpack
|
marcmo/includeSpellChecker
|
src/Analyzer/Checker.hs
|
bsd-3-clause
| 3,929
| 0
| 14
| 850
| 1,488
| 785
| 703
| 77
| 3
|
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.EXT.CopyTexture
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.EXT.CopyTexture (
-- * Extension Support
glGetEXTCopyTexture,
gl_EXT_copy_texture,
-- * Functions
glCopyTexImage1DEXT,
glCopyTexImage2DEXT,
glCopyTexSubImage1DEXT,
glCopyTexSubImage2DEXT,
glCopyTexSubImage3DEXT
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Functions
|
haskell-opengl/OpenGLRaw
|
src/Graphics/GL/EXT/CopyTexture.hs
|
bsd-3-clause
| 709
| 0
| 4
| 98
| 56
| 42
| 14
| 10
| 0
|
module GUI where
import Run (run,numberOfImages)
import ImageQuery (
ImageQueryStatement(GetImageQueryResult,SetImageQueryParameter),
ImageQuery(ImageOfAverage,IslandImage,LineImage,TableQuery,AreaHistogram),
Polarity(Dark,Bright),
Orientation(Horizontal,Vertical),
Channel(Red,Green,Blue),
ImageQueryParameter(Channel,SubRect,StencilImage,Threshold,Smoothing,Polarity),
TableQuery(ValueInPoint,AverageAroundPoint,AverageOfImage,IslandQuery),
IslandQuery(NumberOfIslands,AverageAreaOfIslands,AverageOutlineOfIslands),
Power(One,OneOverTwo,ThreeOverTwo),
forStencil)
import ImageQuery.Parser (imageQueriesParser)
import ImageQuery.Printer (imageQueriesPrinter,imageQueryStatementPrinter)
import ImageLoading (loadImage)
import ImageProcessing (juicyToImage,binarize,chooseChannel,RGB(red))
import Text.Parsec.String (parseFromFile)
import Graphics.UI.WX (
start,
frame,Frame,button,Button,
singleListBox,SingleListBox,
fileSaveDialog,fileOpenDialog,errorDialog,dirOpenDialog,infoDialog,
Prop((:=)),set,text,items,itemCount,sz,position,pt,selection,text,
on,command,
Layout,layout,widget,row,column,minsize,boxed,fill,label,
panel,Panel,choice,entry,staticText,StaticText)
import qualified Graphics.UI.WX as Wx (get,set)
import MVC (
runMVC,Model,asPipe,
managed,asSink,asInput,
spawn,Buffer(Single),atomically,forkIO,
Output,send,
Input,recv)
import Pipes (await,yield)
import Control.Monad.State.Class (get,put,gets)
import Control.Exception (catch,SomeException)
import Control.Monad (forever,when)
import Control.Applicative (Applicative(pure,(<*>)),(<$>),(*>))
import Data.Monoid (mconcat)
-- | The state of the application is a program to be run.
data Program = Program {
imageQueryStatements :: [ImageQueryStatement],
-- ^ The list of statements to be run.
inputPath :: InputPath}
-- ^ The path where the input images are from.
-- | User requests.
data Request =
RequestSaveProgram FilePath |
-- ^ Save the program to the location.
RequestLoadProgram [ImageQueryStatement] |
-- ^ Replace the current program with the list of statements.
RequestRunProgram |
-- ^ Run the current program.
RequestAddStatement Int ImageQueryStatement |
-- ^ Insert the statement at the position into the current list of statements.
RequestInputPath InputPath |
-- ^ Change the input path.
RequestDeleteStatement Int
-- ^ Delete the statement.
-- | Actions on the outside world.
data Response =
ResponseSaveProgram FilePath [ImageQueryStatement] |
-- ^ Save the list of statements to the path.
ResponseProgramChanged [ImageQueryStatement] |
-- ^ Update the GUI because the program has changed.
ResponseRunProgram InputPath [ImageQueryStatement] |
-- ^ Run the program on the input path.
ResponseInputPath InputPath
-- ^ Update the GUI because the input path has changed.
-- | A path to folder with image files used as input.
type InputPath = FilePath
-- | Effect of the given request on the program and responses.
model :: Model Program Request Response
model = asPipe (forever (do
request <- await
case request of
RequestSaveProgram filepath -> do
imagequerystatements <- gets imageQueryStatements
yield (ResponseSaveProgram filepath imagequerystatements)
RequestLoadProgram imagequerystatements -> do
inputpath <- gets inputPath
put (Program imagequerystatements inputpath)
yield (ResponseProgramChanged imagequerystatements)
RequestRunProgram -> do
Program imagequerystatements inputpath <- get
yield (ResponseRunProgram inputpath imagequerystatements)
RequestAddStatement index imagequerystatement -> do
imagequerystatements <- gets imageQueryStatements
inputpath <- gets inputPath
let (prefix,suffix) = splitAt index imagequerystatements
imagequerystatements' = prefix ++ [imagequerystatement] ++ suffix
put (Program imagequerystatements' inputpath)
yield (ResponseProgramChanged imagequerystatements')
RequestInputPath inputpath -> do
program <- get
put (program {inputPath = inputpath})
yield (ResponseInputPath inputpath)
RequestDeleteStatement index -> do
imagequerystatements <- gets imageQueryStatements
inputpath <- gets inputPath
when (index < length imagequerystatements) (do
let (prefix,suffix) = splitAt index imagequerystatements
imagequerystatements' = prefix ++ tail suffix
put (Program imagequerystatements' inputpath)
yield (ResponseProgramChanged imagequerystatements'))))
-- | The GUI.
gui :: IO ()
gui = start (do
-- Channels for interaction between the GUI and the pure model.
(saveProgramO,saveProgramI) <- spawn Single
(loadProgramO,loadProgramI) <- spawn Single
(runProgramO,runProgramI) <- spawn Single
(addStatementO,addStatementI) <- spawn Single
(programChangedO,programChangedI) <- spawn Single
(changeInputPathO,changeInputPathI) <- spawn Single
(inputPathChangedO,inputPathChangedI) <- spawn Single
(deleteStatementO,deleteStatementI) <- spawn Single
(progressO,progressI) <- spawn Single
-- GUI elements.
parentFrame <- frame [text := "Image Processing",position := pt 100 100]
saveProgramButton <- createSaveProgramButton parentFrame saveProgramO
loadProgramButton <- createLoadProgramButton parentFrame loadProgramO
runProgramButton <- createRunProgramButton parentFrame runProgramO
programListBox <- createProgramListBox parentFrame programChangedI
addStatementPanel <- createAddStatementPanel programListBox parentFrame addStatementO
inputPathButton <- createInputPathButton parentFrame changeInputPathO
inputPathText <- createInputPathText parentFrame inputPathChangedI
deleteStatementButton <- createDeleteStatementButton programListBox parentFrame deleteStatementO
progressText <- createProgressText parentFrame progressI
-- Initialize the GUI.
let wx = managed (\k -> let
inputs = [saveProgramI,loadProgramI,runProgramI,addStatementI,changeInputPathI,deleteStatementI]
sink (ResponseSaveProgram filepath imagequerystatements) = do
writeFile filepath (imageQueriesPrinter imagequerystatements)
sink (ResponseProgramChanged imagequerystatements) = do
atomically (send programChangedO imagequerystatements)
return ()
sink (ResponseRunProgram inputpath imagequerystatements) = do
n <- numberOfImages inputpath
catch (run (\i -> progress i n) inputpath imagequerystatements) (\e ->
errorDialog parentFrame "Error during run" (show (e :: SomeException)))
infoDialog parentFrame "Run finished!" "Success!"
sink (ResponseInputPath inputpath) = do
atomically (send inputPathChangedO inputpath)
return ()
in k (asSink sink,asInput (mconcat inputs)))
progress i n = atomically (send progressO (i,n)) >> return ()
frameLayout = row 5 [
column 5 [
minsize (sz 500 500) (widget programListBox),
row 5 [
widget deleteStatementButton,
widget loadProgramButton,
widget saveProgramButton,
widget runProgramButton],
widget progressText],
column 5 [
widget addStatementPanel,
boxed "Inputs" (row 5 [
widget inputPathButton,
fill (widget inputPathText)])]]
-- Fork a process with the pure model
forkIO (runMVC (Program [] ".") model wx >> return ())
-- Set the layout of the parent frame 'frameLayout'
set parentFrame [layout := frameLayout])
-- | Create a button to save the program.
createSaveProgramButton :: Frame () -> Output Request -> IO (Button ())
createSaveProgramButton parentFrame saveProgramO = button parentFrame attributes where
attributes = [text := "Save", on command := sendSaveProgramRequest]
sendSaveProgramRequest = do
maybeFilepath <- fileSaveDialog
parentFrame True True "Save Image Queries"
[("Image Query File",["*.imagequery"])] "" ""
case maybeFilepath of
Nothing -> return ()
Just filepath -> do
atomically (send saveProgramO (RequestSaveProgram filepath))
return ()
-- | Create a button to load a program.
createLoadProgramButton :: Frame () -> Output Request -> IO (Button ())
createLoadProgramButton parentFrame loadProgramO = button parentFrame attributes where
attributes = [text := "Load", on command := sendLoadProgramRequest]
sendLoadProgramRequest = do
maybeFilepath <- fileOpenDialog
parentFrame True True "Load Image Queries"
[("Image Query File",["*.imagequery"]),("All Files",["*"])] "" ""
case maybeFilepath of
Nothing -> return ()
Just filepath -> do
parseResult <- parseFromFile imageQueriesParser filepath
case parseResult of
Left message -> errorDialog parentFrame "Parse Error" (show message)
Right imagequerystatements -> do
imagequerystatements' <- forStencil imagequerystatements ((\(StencilImage stencilpath _ ) -> do
image <- loadImage stencilpath
return (StencilImage stencilpath (Just (binarize 0 (chooseChannel red (juicyToImage image)))))))
atomically (send loadProgramO (RequestLoadProgram imagequerystatements'))
return ()
-- | Create a list box to hold the current program
createProgramListBox :: Frame () -> Input [ImageQueryStatement] -> IO (SingleListBox ())
createProgramListBox parentFrame programChangedI = do
programListBox <- singleListBox parentFrame [
items := ["NEW STATEMENT"],
selection := 0]
forkIO (forever (do
maybeImageQueryStatements <- atomically (recv programChangedI)
case maybeImageQueryStatements of
Nothing -> return ()
Just imagequerystatements -> do
index <- Wx.get programListBox selection
itemcount <- Wx.get programListBox itemCount
set programListBox [
items := map imageQueryStatementPrinter imagequerystatements ++ ["NEW STATEMENT"],
selection := if length imagequerystatements >= itemcount
then index + 1
else index]))
return programListBox
-- | Create a button to run the program.
createRunProgramButton :: Frame () -> Output Request -> IO (Button ())
createRunProgramButton parentFrame runProgramO = button parentFrame attributes where
attributes = [text := "Run", on command := sendRunProgramRequest]
sendRunProgramRequest = do
atomically (send runProgramO RequestRunProgram)
return ()
-- | Create a button to delete a statement.
createDeleteStatementButton :: SingleListBox () -> Frame () -> Output Request -> IO (Button ())
createDeleteStatementButton programListBox parentFrame deleteStatementO = button parentFrame attributes where
attributes = [text := "Delete statement", on command := sendDeleteStatementRequest]
sendDeleteStatementRequest = do
index <- Wx.get programListBox selection
atomically (send deleteStatementO (RequestDeleteStatement index))
return ()
-- | Create a button to change the input path.
createInputPathButton :: Frame () -> Output Request -> IO (Button ())
createInputPathButton parentFrame inputPathO =
button parentFrame [
text := "Choose input path",
on command := do
maybeFilepath <- dirOpenDialog parentFrame False "Choose input path" ""
case maybeFilepath of
Nothing -> return ()
Just inputpath -> do
atomically (send inputPathO (RequestInputPath inputpath))
return ()]
-- | Create a text field to contain the chosen input path.
createInputPathText :: Frame () -> Input InputPath -> IO (StaticText ())
createInputPathText parentFrame inputPathChangedI = do
inputPathText <- staticText parentFrame [text := "."]
forkIO (forever (do
maybeInputPath <- atomically (recv inputPathChangedI)
case maybeInputPath of
Nothing -> return ()
Just inputpath -> Wx.set inputPathText [text := inputpath]))
return inputPathText
-- | Create a text field to contain the progress.
createProgressText :: Frame () -> Input (Int,Int) -> IO (StaticText ())
createProgressText parentFrame progressI = do
progressText <- staticText parentFrame [text := "Waiting"]
forkIO (forever (do
maybeProgress <- atomically (recv progressI)
case maybeProgress of
Nothing -> return ()
Just (i,n) -> Wx.set progressText [text := "Running: " ++ show i ++ "/" ++ show n]))
return progressText
-- | Create a panel with various controls and buttons to add program statements.
createAddStatementPanel :: SingleListBox () -> Frame () -> Output Request -> IO (Panel ())
createAddStatementPanel programListBox parentFrame addStatementO = do
addStatementPanel <- panel parentFrame []
let createStatementPanel buttonText statementControl = do
statementPanel <- panel addStatementPanel []
(optionLayouts,getStatement) <- unStatementControl statementControl statementPanel
let sendStatement = do
index <- Wx.get programListBox selection
statement <- getStatement
atomically (send addStatementO (RequestAddStatement index statement))
return ()
statementButton <- button statementPanel [text := buttonText, on command := sendStatement]
Wx.set statementPanel [layout := row 5 (widget statementButton:optionLayouts)]
return statementPanel
averageImagePanel <- createStatementPanel "Average image" averageImageControl
islandImagePanel <- createStatementPanel "Island images" islandImageControl
lineImagePanel <- createStatementPanel "Line image" lineImageControl
areaHistogramPanel <- createStatementPanel "Area histograms" areaHistogramControl
channelPanel <- createStatementPanel "Channel" channelControl
subrectPanel <- createStatementPanel "Subrect" subrectControl
stencilPanel <- createStatementPanel "Stencil" stencilControl
thresholdPanel <- createStatementPanel "Threshold" thresholdControl
smoothingPanel <- createStatementPanel "Smoothing" smoothingControl
polarityPanel <- createStatementPanel "Polarity" polarityControl
valueInPointPanel <- createStatementPanel "Value in point" valueInPointControl
averageAroundPointPanel <- createStatementPanel "Average around point" averageAroundPointControl
averageOfImagePanel <- createStatementPanel "Average of image" averageOfImageControl
islandQueryPanel <- createStatementPanel "Island query" islandQueryControl
Wx.set addStatementPanel [layout := column 5 [
boxed "Parameters" (column 5 [
widget channelPanel,
widget subrectPanel,
widget stencilPanel,
widget thresholdPanel,
widget smoothingPanel,
widget polarityPanel]),
boxed "Output" (column 5 [
widget averageImagePanel,
widget islandImagePanel,
widget lineImagePanel,
widget areaHistogramPanel]),
boxed "Table Entry" (column 5 [
widget valueInPointPanel,
widget averageAroundPointPanel,
widget averageOfImagePanel,
widget islandQueryPanel])]]
return addStatementPanel
-- | A statement control is an input field that allows to set the arguments of a statement.
data StatementControl a = StatementControl {unStatementControl :: Panel () -> IO ([Layout],IO a)}
instance Functor StatementControl where
fmap function = StatementControl . fmap (fmap (fmap (fmap function))) . unStatementControl
instance Applicative StatementControl where
pure value = StatementControl (const (return ([],return value)))
(<*>) functionStatementConstrol valueStatementConstrol = StatementControl (\parentPanel -> do
(functionLayouts,getFunction) <- unStatementControl functionStatementConstrol parentPanel
(valueLayouts,getValue) <- unStatementControl valueStatementConstrol parentPanel
return (functionLayouts ++ valueLayouts,do
function <- getFunction
value <- getValue
return (function value)))
-- | Given a list of names associated with values yields a statement control
-- that is a drop down menu with the given names.
choiceControl :: [(String,a)] -> StatementControl a
choiceControl choices = StatementControl (\parentPanel -> do
c <- choice parentPanel [items := map fst choices,selection := 0]
let getChoice = do
s <- Wx.get c selection
return (map snd choices !! s)
return ([widget c],getChoice))
-- | An input field for a number.
numberControl :: (Read a) => StatementControl a
numberControl = StatementControl (\parentPanel -> do
e <- entry parentPanel [text := "0"]
let getNumber = do
n <- Wx.get e text
return (read n)
return ([widget e],getNumber))
-- | Give a label to a statement control.
tag :: String -> StatementControl ()
tag name = StatementControl (\_ -> do
return ([label name],return ()))
-- | Average image has no arguments.
averageImageControl :: StatementControl ImageQueryStatement
averageImageControl = pure (GetImageQueryResult ImageOfAverage)
-- | Island image has no arguments
islandImageControl :: StatementControl ImageQueryStatement
islandImageControl = pure (GetImageQueryResult IslandImage)
-- | Line image has the arguments "orientation", "start x", "start y" and "length".
lineImageControl :: StatementControl ImageQueryStatement
lineImageControl =
(\orientation x y l -> GetImageQueryResult (LineImage orientation x y l)) <$>
(tag "orientation:" *> choiceControl [("Horizontal",Horizontal),("Vertical",Vertical)]) <*>
(tag "start x:" *> numberControl) <*>
(tag "start y:" *> numberControl) <*>
(tag "length:" *> numberControl)
-- | Area histogram has the arguments "bin size" and "exponent"
areaHistogramControl :: StatementControl ImageQueryStatement
areaHistogramControl =
(\binsize power -> GetImageQueryResult (AreaHistogram binsize power)) <$>
(tag "bin size:" *> numberControl) <*>
(tag "exponent" *> choiceControl [("One",One),("One over two",OneOverTwo),("Three over two",ThreeOverTwo)])
-- | Channel choice has one argument "color".
channelControl :: StatementControl ImageQueryStatement
channelControl =
SetImageQueryParameter . Channel <$>
choiceControl [("Red",Red),("Green",Green),("Blue",Blue)]
-- | Subrect choice has four arguments "upper left x", "upper left y", "width" and "height"
subrectControl :: StatementControl ImageQueryStatement
subrectControl =
(\x y w h -> SetImageQueryParameter (SubRect (x,y,w,h))) <$>
(tag "upper left x:" *> numberControl) <*>
(tag "upper left y:" *> numberControl) <*>
(tag "width" *> numberControl) <*>
(tag "height" *> numberControl)
-- | Stencil choice prompts the user to choose an input image
stencilControl :: StatementControl ImageQueryStatement
stencilControl = StatementControl (\parentPanel -> do
let getStatement = do
maybeFilepath <- fileOpenDialog
parentPanel True True "Stencil Image"
[("Image Files",["*.png","*.bmp","*.gif"])] "" ""
case maybeFilepath of
Nothing -> return (SetImageQueryParameter (StencilImage "" Nothing))
Just filepath -> catch (do
image <- loadImage filepath
return (SetImageQueryParameter (
StencilImage filepath (Just (
binarize 0 (chooseChannel red (juicyToImage image)))))))
(\e -> do
errorDialog parentPanel "Loading Stencil Failed!" (show (e :: SomeException))
return (SetImageQueryParameter (StencilImage "" Nothing)))
return ([],getStatement))
-- | Threshold choice has a single number argument.
thresholdControl :: StatementControl ImageQueryStatement
thresholdControl =
SetImageQueryParameter . Threshold <$>
numberControl
-- | Smoothing choice has a single number argument.
smoothingControl :: StatementControl ImageQueryStatement
smoothingControl =
SetImageQueryParameter . Smoothing <$>
(tag "half width:" *> numberControl)
-- | Polarity is the choice between dark islands and bright islands.
polarityControl :: StatementControl ImageQueryStatement
polarityControl =
SetImageQueryParameter . Polarity <$>
choiceControl [("Dark",Dark),("Bright",Bright)]
-- | Query for a value at a point has two arguments "x" and "y"
valueInPointControl :: StatementControl ImageQueryStatement
valueInPointControl =
(\x y -> GetImageQueryResult (TableQuery (ValueInPoint x y))) <$>
(tag "x:" *> numberControl) <*>
(tag "y:" *> numberControl)
-- | Query for the average around a point has three arguments "x", "y" and "half width".
averageAroundPointControl :: StatementControl ImageQueryStatement
averageAroundPointControl =
(\x y r -> GetImageQueryResult (TableQuery (AverageAroundPoint x y r))) <$>
(tag "x:" *> numberControl) <*>
(tag "y:" *> numberControl) <*>
(tag "half width:" *> numberControl)
-- | Average image has no arguments.
averageOfImageControl :: StatementControl ImageQueryStatement
averageOfImageControl = pure (GetImageQueryResult (TableQuery AverageOfImage))
-- | Queries concerning islands are a choice of "number", "average area" and "average outline"
islandQueryControl :: StatementControl ImageQueryStatement
islandQueryControl =
GetImageQueryResult . TableQuery . IslandQuery <$>
choiceControl [
("Number",NumberOfIslands),
("Average area",AverageAreaOfIslands),
("Average outline",AverageOutlineOfIslands)]
|
phischu/pem-images
|
src/GUI.hs
|
bsd-3-clause
| 22,858
| 0
| 36
| 5,479
| 5,257
| 2,690
| 2,567
| 386
| 6
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE Trustworthy #-}
{-# OPTIONS_HADDOCK show-extensions #-}
{-|
Module : $Header$
Copyright : (c) 2016 Deakin Software & Technology Innovation Lab
License : BSD3
Maintainer : Rhys Adams <rhys.adams@deakin.edu.au>
Stability : unstable
Portability : portable
Eclogues REST API definition. See also the <https://github.com/dstil/eclogues#api README>.
-}
module Eclogues.API (
API
, Get, Mutating, Location, Pointer
-- * Specific endpoints
, ListJobs, JobStatus, KillJob, PatchJob, JobOnScheduler
, DeleteJob, CreateJob
, ListBoxes, BoxStatus, SealBox, CreateBox, DeleteBox
, ListEntities
, UploadFile, DownloadFile
, ListContainers, GetContainer, AddContainer, DeleteContainer
, AtomicActions
-- * Error details
, JobError (..), displayError
, StageExpectation (..), displayStageExpectation, satisfiesStageExpectation
-- * Other types
, AbsoluteURI (..), parseAbsoluteURI, uriPath
, Action ( ActCreateJob, ActKillJob, ActPatchJob, ActDeleteJob
, ActCreateBox, ActSealBox, ActDeleteBox )
, DependencyPatch (..)
, actCreate
) where
import qualified Eclogues.Job as Job
import Control.Applicative (Alternative, (<|>), empty)
import Control.DeepSeq (NFData)
import Data.Aeson (ToJSON (..), FromJSON (..))
import qualified Data.Aeson as Aeson
import Data.Aeson.TH (deriveJSON, defaultOptions)
import Data.Char (toLower)
import Data.List (dropWhileEnd)
import Data.List.NonEmpty (NonEmpty, toList)
import Data.Monoid ((<>))
import Data.Text (Text, intercalate, pack, unpack)
import GHC.Generics (Generic)
import Lens.Micro (Lens', has, lens)
import qualified Network.URI as URI
import Servant.API
( (:>), (:<|>), Capture, JSON, OctetStream, ReqBody, StdMethod (..) )
import qualified Servant.API as S
import Servant.Streaming (StreamBody, StreamResponseGet)
import Web.HttpApiData (FromHttpApiData (..), ToHttpApiData (..))
type Get = S.Get '[JSON]
type Mutating (method :: StdMethod) = S.Verb method 204 '[S.PlainText] S.NoContent
type Location a = S.Headers '[S.Header "Location" AbsoluteURI] a
type Pointer (method :: StdMethod) code a = S.Verb method code '[S.PlainText] (Location a)
type ListJobs = "jobs" :> Get [Job.Status]
type JobStatus = "jobs" :> Capture "id" Job.Name :> Get Job.Status
type KillJob = "jobs" :> Capture "id" Job.Name :> "kill" :> Mutating 'POST
type PatchJob = "jobs" :> Capture "id" Job.Name :> ReqBody '[JSON] DependencyPatch :> Mutating 'PATCH
type JobOnScheduler = "jobs" :> Capture "id" Job.Name :> "scheduler" :> Pointer 'GET 303 S.NoContent
type DeleteJob = "jobs" :> Capture "id" Job.Name :> Mutating 'DELETE
type CreateJob = "jobs" :> ReqBody '[JSON] Job.InputSpec :> Pointer 'POST 201 Job.Name
type ListBoxes = "boxes" :> Get [Job.BoxStatus]
type BoxStatus = "boxes" :> Capture "id" Job.Name :> Get Job.BoxStatus
type SealBox = "boxes" :> Capture "id" Job.Name :> "seal" :> Mutating 'POST
type CreateBox = "boxes" :> ReqBody '[JSON] Job.BoxSpec :> Pointer 'POST 201 S.NoContent
type DeleteBox = "boxes" :> Capture "id" Job.Name :> Mutating 'DELETE
type ListEntities = "entities" :> Get [Job.AnyStatus]
type UploadFile = "files" :> Capture "id" Job.Name :> Capture "filename" Job.FileId :> StreamBody '[OctetStream] :> Mutating 'PUT
type DownloadFile = "files" :> Capture "id" Job.Name :> Capture "filename" Job.FileId :> StreamResponseGet '[OctetStream]
type ListContainers = "containers" :> Get [Job.ContainerId]
type AddContainer = "containers" :> Capture "id" Job.ContainerId :> StreamBody '[OctetStream] :> Mutating 'PUT
type GetContainer = "containers" :> Capture "id" Job.ContainerId :> StreamResponseGet '[OctetStream]
type DeleteContainer = "containers" :> Capture "id" Job.ContainerId :> Mutating 'DELETE
type AtomicActions = "atomic" :> ReqBody '[JSON] [Action] :> Mutating 'POST
-- NB: Make sure the module header is updated with this.
-- | Eclogues API definition.
type API = ListJobs :<|> JobStatus :<|> KillJob
:<|> PatchJob :<|> JobOnScheduler :<|> DeleteJob :<|> CreateJob
:<|> ListBoxes :<|> BoxStatus :<|> CreateBox :<|> SealBox
:<|> DeleteBox
:<|> ListEntities
:<|> UploadFile :<|> DownloadFile
:<|> ListContainers :<|> GetContainer :<|> AddContainer
:<|> DeleteContainer
:<|> AtomicActions
data DependencyPatch = DependencyPatch { addDependsOn :: Job.Dependencies
, removeDependsOn :: [Job.Name] }
deriving (Show, Eq, Generic, NFData)
data Action = ActCreateJob { _a1_spec :: Job.Spec } -- field names just for JSON
| ActKillJob Job.Name
| ActPatchJob { _a2_name :: Job.Name, _a2_patch :: DependencyPatch }
| ActDeleteJob Job.Name
| ActCreateBox { _a3_spec :: Job.BoxSpec }
| ActSealBox Job.Name
| ActDeleteBox Job.Name
deriving (Show, Eq, Generic, NFData)
actCreate :: Job.AnySpec -> Action
actCreate (Job.JobSpec s) = ActCreateJob s
actCreate (Job.BoxSpec s) = ActCreateBox s
-- | A URI with specified scheme and host. The 'uriPath' never ends with @/@.
newtype AbsoluteURI = AbsoluteURI { getAbsoluteURI :: URI.URI } deriving (Show, Eq, Generic, NFData)
parseAbsoluteURI :: (Alternative f) => String -> f AbsoluteURI
parseAbsoluteURI = maybe empty (pure . AbsoluteURI . stripSlash) . URI.parseAbsoluteURI
uriPath :: Lens' AbsoluteURI String
uriPath = lens (URI.uriPath . getAbsoluteURI) (\(AbsoluteURI u) p' -> AbsoluteURI . stripSlash $ u { URI.uriPath = preSlash p' })
where
preSlash [] = []
preSlash s@('/':_) = s
preSlash s = '/':s
stripSlash :: URI.URI -> URI.URI
stripSlash u = u{ URI.uriPath = dropWhileEnd (== '/') $ URI.uriPath u }
instance ToJSON AbsoluteURI where
toJSON = Aeson.String . toUrlPiece
instance FromJSON AbsoluteURI where
parseJSON (Aeson.String s) = parseAbsoluteURI (unpack s) <|> fail "not a valid absolute URI"
parseJSON _ = fail "URI should be string"
instance FromHttpApiData AbsoluteURI where
parseUrlPiece = maybe (Left "not a valid absolute URI") Right . parseAbsoluteURI . unpack
instance ToHttpApiData AbsoluteURI where
toUrlPiece = pack . show . getAbsoluteURI
-- | An expecation of the stage of a job that must be met for an action to be
-- successful.
data StageExpectation = ExpectExtant
| ExpectWaiting -- ^ @has 'Job._Waiting'@
| ExpectTerminated -- ^ 'Job.isTerminationStage'
| ExpectNonFailed -- ^ @(/= 'Job.NeverAvailable') . 'Job.jobAvailability'@
deriving (Show, Eq, Generic, NFData)
satisfiesStageExpectation :: StageExpectation -> Job.Stage -> Bool
satisfiesStageExpectation ExpectExtant = const True
satisfiesStageExpectation ExpectWaiting = has Job._Waiting
satisfiesStageExpectation ExpectTerminated = Job.isTerminationStage
satisfiesStageExpectation ExpectNonFailed = (/= Job.NeverAvailable) . Job.jobAvailability
displayStageExpectation :: StageExpectation -> Text
displayStageExpectation ExpectExtant = "must exist"
displayStageExpectation ExpectTerminated = "must have terminated"
displayStageExpectation ExpectWaiting = "must be waiting for a dependency"
displayStageExpectation ExpectNonFailed = "cannot have failed"
-- | The various ways requests can fail.
data JobError =
NameUsed
-- | Target resource does not exist.
| NoSuch
| JobMustBe { _id :: Job.Name, _expects :: StageExpectation }
| ContainerMustExist Job.ContainerId
| FileAlreadyExists { _job :: Job.Name, _filename :: Job.FileId }
| UploadClosed Job.Name
-- | A job cannot be deleted because some dependants have not terminated.
| OutstandingDependants (NonEmpty Job.Name)
-- | Cannot transition from the current stage to the requested stage.
| InvalidStageTransition Text
-- | Cannot contact the scheduler.
| SchedulerInaccessible
| AtomicActionError { _action :: Action, _error :: JobError }
deriving (Show, Eq, Generic, NFData)
-- | A user-friendly description of a 'JobError'.
displayError :: JobError -> Text
displayError NameUsed = "Id already in use"
displayError NoSuch = "No such resource"
displayError (ContainerMustExist n) = "Container " <> Job.nameText (Job.getContainerName n) <> " must already exist"
displayError (JobMustBe n ex) = "Job " <> Job.nameText n <> " " <> displayStageExpectation ex
displayError (FileAlreadyExists jn fn) = "Job " <> Job.nameText jn <> " already has a file " <> Job.getFileIdText fn
displayError (UploadClosed jn) = "Job " <> Job.nameText jn <> " is closed to new files"
displayError (OutstandingDependants ns) = "The following dependants have not completed: " <> intercalate ", " (Job.nameText <$> toList ns)
displayError (InvalidStageTransition msg) = msg
displayError (AtomicActionError a e) = "Error when processing " <> pack (show a) <> ": " <> displayError e
displayError SchedulerInaccessible = "Cannot contact the backend scheduler"
$(deriveJSON defaultOptions{ Aeson.constructorTagModifier = drop 6 } ''StageExpectation)
$(deriveJSON defaultOptions{ Aeson.fieldLabelModifier = drop 1 } ''JobError)
$(deriveJSON defaultOptions{ Aeson.fieldLabelModifier = uncurry (++) . fmap (fmap toLower) . splitAt 7 } ''DependencyPatch)
$(deriveJSON defaultOptions{ Aeson.sumEncoding = Aeson.TaggedObject "action" "id", Aeson.constructorTagModifier = drop 3, Aeson.fieldLabelModifier = drop 4 } ''Action)
|
rimmington/eclogues
|
eclogues/src/Eclogues/API.hs
|
bsd-3-clause
| 9,925
| 0
| 23
| 2,009
| 2,462
| 1,360
| 1,102
| 156
| 3
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.