code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
module Zendesk.Internal.MockServer where
import Zendesk.API
import Data.Monoid ((<>))
import qualified Network.Wai.Handler.Warp as Warp
import Servant
emptyUser :: User
emptyUser = User Nothing Nothing Nothing Nothing
issac :: User
issac = emptyUser
{ userId = Just 1
, userName = Just "Issac Newton"
}
albert :: User
albert = emptyUser
{ userId = Just 2
, userName = Just "Albert Einstein"
}
fred :: User
fred = emptyUser
{ userId = Just 3
, userName = Just "Fred Flintstone"
, userEmail = Just "fred.flintstone@gmail.com"
}
users :: [User]
users = [ issac, albert, fred ]
ticket1 :: Ticket
ticket1 = Ticket
{ ticketId = Just 1
, ticketUrl = Nothing
, ticketSubject = Just "This is a subject"
, ticketDescription = Just "This is a description"
, ticketTags = Nothing
}
ticket2 :: Ticket
ticket2 = Ticket
{ ticketId = Just 2
, ticketUrl = Nothing
, ticketSubject = Just "This is another subject"
, ticketDescription = Just "This is another description"
, ticketTags = Nothing
}
exampleTickets :: [Ticket]
exampleTickets = [ticket1, ticket2]
ticketPage :: TicketPage
ticketPage = TicketPage
{ ticketPageCount = length exampleTickets
, ticketPageNextPage = Nothing
, ticketPagePrevPage = Nothing
, ticketPageTickets = exampleTickets
}
-- | 'BasicAuthCheck' holds the handler we'll use to verify a username and password.
authCheck :: BasicAuthCheck User
authCheck =
let check (BasicAuthData username password) =
if username == "fred" && password == "password"
then return (Authorized fred)
else return Unauthorized
in BasicAuthCheck check
-- |
-- We need to supply our handlers with the right Context. In this case,
-- Basic Authentication requires a Context Entry with the 'BasicAuthCheck'
-- value tagged with "foo-tag". This context is then supplied to 'server' and
-- threaded to the BasicAuth HasServer handlers.
basicAuthServerContext :: Context (BasicAuthCheck User ': '[])
basicAuthServerContext = authCheck :. EmptyContext
-- |
-- An implementation of our server. Here is where we pass all the handlers to
-- our endpoints. In particular, for the BasicAuth protected handler, we need
-- to supply a function that takes 'User' as an argument.
basicAuthServer :: Server API
basicAuthServer =
let
getUsers _user = return (Users users)
getTickets _user = return ticketPage
postTicket _user _ticketCreate = return (TicketCreateResponse (Just 0))
in getUsers
:<|> getTickets
:<|> postTicket
server :: Server API
server = basicAuthServer
app :: Application
app = serveWithContext api basicAuthServerContext server
port :: Int
port = 8080
-- | Start the mock server.
main :: IO ()
main = do
putStrLn $ "Listening on " <> show port
Warp.run port app
|
steshaw/zendesk
|
src/Zendesk/Internal/MockServer.hs
|
bsd-2-clause
| 2,916
| 0
| 13
| 572
| 614
| 346
| 268
| 76
| 2
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Graphics.GL.Low.Internal.Types where
import Data.Data (Data, Typeable)
import Foreign.Storable (Storable)
import Graphics.GL (GLuint)
import Graphics.GL.Low.Classes
newtype TextureUnit = TextureUnit { fromTextureUnit :: GLuint }
deriving (Eq, Ord, Read, Show, Num, Integral, Real, Enum, Storable)
newtype AttribLocation = AttribLocation { fromAttribLocation :: GLuint }
deriving (Eq, Ord, Read, Show, Num, Integral, Real, Enum, Storable)
newtype UniformLocation = UniformLocation { fromUniformLocation :: GLuint }
deriving (Eq, Ord, Read, Show, Num, Integral, Real, Enum, Storable)
-- | Handle to a shader program.
newtype Program = Program { fromProgram :: GLuint }
deriving (Eq, Ord, Read, Show, Storable, Data, Typeable)
-- | Handle to a shader object.
newtype Shader = Shader { fromShader :: GLuint }
deriving (Eq, Ord, Read, Show, Storable, Data, Typeable)
-- | Handle to a VBO.
newtype VBO = VBO { fromVBO :: GLuint }
deriving (Eq, Ord, Read, Show, Storable, Data, Typeable)
instance GLObject VBO where
glObjectName (VBO n) = fromIntegral n
instance BufferObject VBO
-- | Handle to an element array buffer object.
newtype ElementArray = ElementArray { fromElementArray :: GLuint }
deriving (Eq, Ord, Read, Show, Storable, Data, Typeable)
instance GLObject ElementArray where
glObjectName (ElementArray n) = fromIntegral n
instance BufferObject ElementArray
-- | A framebuffer object is an alternative rendering destination. Once an FBO
-- is bound to framebuffer binding target, it is possible to attach images
-- (textures or RBOs) for color, depth, or stencil rendering.
newtype FBO = FBO { fromFBO :: GLuint }
deriving (Eq, Ord, Read, Show, Storable, Data, Typeable)
instance Framebuffer FBO where
framebufferName = glObjectName
instance GLObject FBO where
glObjectName (FBO n) = fromIntegral n
-- | An RBO is a kind of image object used for rendering. The only thing
-- you can do with an RBO is attach it to an FBO.
newtype RBO a = RBO { unRBO :: GLuint }
deriving (Eq, Ord, Read, Show, Storable, Data, Typeable)
instance GLObject (RBO a) where
glObjectName (RBO n) = fromIntegral n
-- | A 2D texture. A program can sample a texture if it has been bound to
-- the appropriate texture unit.
newtype Tex2D a = Tex2D { fromTex2D :: GLuint }
deriving (Eq, Ord, Read, Show, Storable, Data, Typeable)
instance Texture (Tex2D a) where
instance GLObject (Tex2D a) where
glObjectName (Tex2D n) = fromIntegral n
-- | A cubemap texture is just six 2D textures. A program can sample a cubemap
-- texture if it has been bound to the appropriate texture unit.
newtype CubeMap a = CubeMap { fromCubeMap :: GLuint }
deriving (Eq, Ord, Read, Show, Storable, Data, Typeable)
instance Texture (CubeMap a) where
instance GLObject (CubeMap a) where
glObjectName (CubeMap n) = fromIntegral n
-- | Handle to a VAO.
newtype VAO = VAO { fromVAO :: GLuint }
deriving (Eq, Ord, Read, Show, Storable, Data, Typeable)
instance GLObject VAO where
glObjectName (VAO n) = fromIntegral n
|
sgraf812/lowgl
|
Graphics/GL/Low/Internal/Types.hs
|
bsd-2-clause
| 3,164
| 0
| 8
| 593
| 868
| 483
| 385
| 51
| 0
|
-- | The standard \(n\)-simplex
module Math.Topology.SSet.NSimplex where
import Math.Topology.SSet
import Math.Topology.SSet.Effective
newtype NSimplex = NSimplex { simplexDimension :: Int }
instance Show NSimplex where
show (NSimplex n) = "Δ^" ++ show n
newtype NSimplexSimplex = NSimplexSimplex [Int]
deriving (Eq, Ord, Show)
isOrdered :: (Ord a) => [a] -> Bool
isOrdered [] = True
isOrdered [x] = True
isOrdered (x:y:xs) = (x <= y) && isOrdered (y:xs)
deleteAt :: Int -> [a] -> [a]
deleteAt _ [] = []
deleteAt 0 (x:xs) = xs
deleteAt i (x:xs) = x : deleteAt (i - 1) xs
instance SSet NSimplex where
type GeomSimplex NSimplex = NSimplexSimplex
geomSimplexDim (NSimplex d) (NSimplexSimplex vs) = length vs - 1
isGeomSimplex (NSimplex d) (NSimplexSimplex vs) = length vs <= d+1 && isOrdered vs
geomFace (NSimplex d) (NSimplexSimplex vs) i = NonDegen (NSimplexSimplex (deleteAt i vs))
-- TODO: could be more efficient
choose :: Int -> [a] -> [[a]]
choose 0 _ = [[]]
choose i [] = []
choose i (x:xs) = fmap (x:) (choose (i-1) xs) ++ choose i xs
instance FiniteType NSimplex where
geomBasis (NSimplex d) i = NSimplexSimplex <$> choose (i + 1) [0..d]
instance Pointed NSimplex where
basepoint _ = NSimplexSimplex [0]
instance Effective NSimplex
|
mvr/at
|
src/Math/Topology/SSet/NSimplex.hs
|
bsd-3-clause
| 1,268
| 0
| 10
| 231
| 558
| 296
| 262
| -1
| -1
|
module Package08e where
import GHC.Hs.MyTypes
import GHC.Hs.Types
import GHC.Hs.Utils
import UniqFM
|
sdiehl/ghc
|
testsuite/tests/package/package08e.hs
|
bsd-3-clause
| 100
| 0
| 4
| 11
| 25
| 17
| 8
| 5
| 0
|
{-# OPTIONS -Wall #-}
{-# LANGUAGE OverloadedStrings #-}
module Web.ChatWork
(
me
, Me(..)
, myStatus
, MyStatus(..)
, myTasks
, createRoomMessage
, CreateMessage(..)
, RateLimit(..)
, getChatWorkTokenFromEnv
) where
import Data.ByteString.Char8 as BS
import System.Environment ( lookupEnv )
import Web.ChatWork.Internal
import Web.ChatWork.Endpoints.Base
import Web.ChatWork.Endpoints.Me as Me
import Web.ChatWork.Endpoints.My as My
import Web.ChatWork.Endpoints.Room
me :: ByteString -> IO (ChatWorkAPI Me)
me token = get token Me.endpoint
myStatus :: ByteString -> IO (ChatWorkAPI MyStatus)
myStatus token = get token My.statusEndpoint
myTasks ::
ByteString -> IO (ChatWorkAPI [Task])
myTasks token = get token $ My.tasksEndpoint
getChatWorkTokenFromEnv :: IO (Maybe ByteString)
getChatWorkTokenFromEnv = fmap (fmap BS.pack) $ lookupEnv "CHATWORK_TOKEN"
|
eiel/haskell-chatwork
|
src/Web/ChatWork.hs
|
bsd-3-clause
| 891
| 0
| 9
| 140
| 250
| 146
| 104
| 29
| 1
|
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-|
Module : Database.Memcache.SASL
Description : SASL Authentication
Copyright : (c) David Terei, 2016
License : BSD
Maintainer : code@davidterei.com
Stability : stable
Portability : GHC
SASL authentication support for Memcached.
-}
module Database.Memcache.SASL (
-- * Types
Authentication(..), Username, Password,
-- * Operations
authenticate
) where
import Database.Memcache.Errors
import Database.Memcache.Socket
import Database.Memcache.Types
import Control.Exception (throwIO)
import Control.Monad
import Data.ByteString.Char8 as B8 (ByteString, pack, singleton)
-- | Perform SASL authentication with the server.
authenticate :: Socket -> Authentication -> IO ()
{-# INLINE authenticate #-}
authenticate _ NoAuth = return ()
authenticate s (Auth u p) = saslAuthPlain s u p
-- NOTE: For correctness really should check that PLAIN auth is supported first
-- but we'll just assume it is as that's all mainline and other implementations
-- support and one exception is nearly as good as another.
-- | Perform SASL PLAIN authentication.
saslAuthPlain :: Socket -> Username -> Password -> IO ()
{-# INLINE saslAuthPlain #-}
saslAuthPlain s u p = do
let credentials = singleton '\0' <> u <> singleton '\0' <> p
msg = emptyReq { reqOp = ReqSASLStart (B8.pack "PLAIN") credentials }
send s msg
r <- recv s
when (resOp r /= ResSASLStart) $
throwIO $ wrongOp r "SASL_START"
case resStatus r of
NoError -> return ()
rs -> throwIO $ OpError rs
-- | List available SASL authentication methods. We could call this but as we
-- only support PLAIN as does the Memcached server, we simply assume PLAIN
-- authentication is supprted and try that.
saslListMechs :: Socket -> IO B8.ByteString
{-# INLINE saslListMechs #-}
saslListMechs s = do
let msg = emptyReq { reqOp = ReqSASLList }
send s msg
r <- recv s
v <- case resOp r of
ResSASLList v -> return v
_ -> throwIO $ wrongOp r "SASL_LIST"
case resStatus r of
NoError -> return v
rs -> throwIO $ OpError rs
|
dterei/memcache-hs
|
Database/Memcache/SASL.hs
|
bsd-3-clause
| 2,178
| 0
| 15
| 509
| 448
| 231
| 217
| 38
| 3
|
{-# LANGUAGE AutoDeriveTypeable #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE Trustworthy #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Functor.Identity
-- Copyright : (c) Andy Gill 2001,
-- (c) Oregon Graduate Institute of Science and Technology 2001
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : ross@soi.city.ac.uk
-- Stability : experimental
-- Portability : portable
--
-- The identity functor and monad.
--
-- This trivial type constructor serves two purposes:
--
-- * It can be used with functions parameterized by functor or monad classes.
--
-- * It can be used as a base monad to which a series of monad
-- transformers may be applied to construct a composite monad.
-- Most monad transformer modules include the special case of
-- applying the transformer to 'Identity'. For example, @State s@
-- is an abbreviation for @StateT s 'Identity'@.
--
-- /Since: 4.8.0.0/
-----------------------------------------------------------------------------
module Data.Functor.Identity (
Identity(..),
) where
import Control.Monad.Fix
import Data.Coerce
import Data.Foldable
-- | Identity functor and monad. (a non-strict monad)
--
-- /Since: 4.8.0.0/
newtype Identity a = Identity { runIdentity :: a }
deriving (Eq, Ord, Traversable)
-- | This instance would be equivalent to the derived instances of the
-- 'Identity' newtype if the 'runIdentity' field were removed
instance (Read a) => Read (Identity a) where
readsPrec d = readParen (d > 10) $ \ r ->
[(Identity x,t) | ("Identity",s) <- lex r, (x,t) <- readsPrec 11 s]
-- | This instance would be equivalent to the derived instances of the
-- 'Identity' newtype if the 'runIdentity' field were removed
instance (Show a) => Show (Identity a) where
showsPrec d (Identity x) = showParen (d > 10) $
showString "Identity " . showsPrec 11 x
-- ---------------------------------------------------------------------------
-- Identity instances for Functor and Monad
instance Foldable Identity where
foldMap = coerce
elem = (. runIdentity) #. (==)
foldl = coerce
foldl' = coerce
foldl1 _ = runIdentity
foldr f z (Identity x) = f x z
foldr' = foldr
foldr1 _ = runIdentity
length _ = 1
maximum = runIdentity
minimum = runIdentity
null _ = False
product = runIdentity
sum = runIdentity
toList (Identity x) = [x]
instance Functor Identity where
fmap = coerce
instance Applicative Identity where
pure = Identity
(<*>) = coerce
instance Monad Identity where
return = Identity
m >>= k = k (runIdentity m)
instance MonadFix Identity where
mfix f = Identity (fix (runIdentity . f))
-- | Internal (non-exported) 'Coercible' helper for 'elem'
--
-- See Note [Function coercion] in "Data.Foldable" for more details.
(#.) :: Coercible b c => (b -> c) -> (a -> b) -> a -> c
(#.) _f = coerce
|
jstolarek/ghc
|
libraries/base/Data/Functor/Identity.hs
|
bsd-3-clause
| 3,203
| 0
| 11
| 843
| 550
| 318
| 232
| 44
| 1
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Health.Instances where
import Numeric.Units.Dimensional.Prelude hiding (Activity)
import Data.HasTime
import Data.Fitness.SetRep
import Data.Fitness.TimeDistance
import Data.Vitals
import LuminescentDreams.Database.TimeSeries
import Control.Lens ((^.))
import Data.Aeson
instance ToJSON Weight where
toJSON w = object [ "date" .= (w ^. wTime)
, "weight" .= ((w ^. wWeight) /~ kilo gram)
]
instance FromJSON Weight where
parseJSON (Object obj) = Weight <$> obj .: "date" <*> ((*~ kilo gram) `fmap` (obj .: "weight"))
parseJSON _ = mempty
instance Storable Weight where
timestamp = (^. wTime)
instance ToJSON TimeDistance where
toJSON td = object [ "date" .= (td ^. tdTime)
, "activity" .= (td ^. tdActivity)
, "distance" .= ((/~ meter) <$> (td ^. tdDistance))
, "duration" .= ((/~ second) <$> (td ^. tdDuration))
, "comments" .= (td ^. tdComments)
]
instance FromJSON TimeDistance where
parseJSON (Object obj) = do
date <- obj .: "date"
activity <- obj .: "activity"
distance <- obj .:? "distance" .!= Nothing
duration <- obj .:? "duration" .!= Nothing
comments <- obj .:? "comments" .!= Nothing
pure $ TimeDistance date
activity
((*~ meter) `fmap` distance)
((*~ second) `fmap` duration)
comments
parseJSON _ = mempty
instance ToJSON TimeDistanceActivity where
toJSON Cycling = String "Cycling"
toJSON Running = String "Running"
instance FromJSON TimeDistanceActivity where
parseJSON (String "Cycling") = pure Cycling
parseJSON (String "Running") = pure Running
parseJSON _ = mempty
instance Storable TimeDistance where
timestamp = (^. tdTime)
instance ToJSON SetRep where
toJSON sr =
object [ "date" .= (sr ^. srTime)
, "activity" .= (sr ^. srActivity)
, "sets" .= (sr ^. srSets)
]
instance FromJSON SetRep where
parseJSON (Object obj) = do
date <- obj .: "date"
activity <- obj .: "activity"
sets <- obj .: "sets"
pure $ SetRep date activity (read `fmap` words sets)
parseJSON _ = mempty
instance ToJSON SetRepActivity where
toJSON Crunches = String "Crunches"
toJSON Pushups = String "Pushups"
toJSON Situps = String "Situps"
instance FromJSON SetRepActivity where
parseJSON (String "Crunches") = pure Crunches
parseJSON (String "Pushups") = pure Pushups
parseJSON (String "Situps") = pure Situps
parseJSON _ = mempty
instance Storable SetRep where
timestamp = (^. srTime)
instance (HasTime obj, Storable obj) => HasTime (Sample obj) where
time' = sData . time'
instance ToJSON obj => ToJSON (Sample obj) where
toJSON (Sample id_ val) =
object [ "id" .= id_
, "data" .= val
]
instance FromJSON obj => FromJSON (Sample obj) where
parseJSON (Object obj) =
Sample <$> obj .: "id"
<*> obj .: "data"
parseJSON _ = mempty
|
savannidgerinel/health
|
src/Health/Instances.hs
|
bsd-3-clause
| 3,472
| 0
| 12
| 1,123
| 982
| 517
| 465
| 83
| 0
|
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE FlexibleContexts #-}
module Math.Automata.Simple
( NFA
, DFA
, flattenDFAStates
, faToDot
, textToNFA
, subsetConstruction
) where
import Control.Arrow ( first )
import Control.Applicative ( (<$>), (<*>), (<*), (*>) )
import Control.Lens ( use, _1, _2, (.=), (%=) )
import Control.Monad ( unless, void )
import Control.Monad.Trans.RWS ( evalRWS, tell )
import Control.Monad.Trans.State ( get, put, execState )
import Data.Foldable ( Foldable, foldr, for_, any )
import Data.Functor.Identity ( Identity( Identity ) )
import Data.Hashable ( Hashable )
import Data.HashMap.Lazy ( HashMap )
import qualified Data.HashMap.Lazy as HM
import Data.HashSet ( HashSet )
import qualified Data.HashSet as HS
import qualified Data.List.NonEmpty as NEL
import Data.Maybe ( fromMaybe, mapMaybe )
import Data.Text.Lazy ( Text )
import qualified Data.Text.Lazy as T
import qualified Data.Text.Format as TF
import Text.Parsec ( oneOf, noneOf, many1, between, string, newline, option
, char, sepBy1, many, try, parse, eof, (<?>), lookAhead )
import Text.Parsec.Text.Lazy ( Parser )
import Prelude hiding ( foldr, any )
import qualified Data.HashSet.NonEmpty as NES
import qualified Data.HashMap.NonEmpty as NEHM
-- |Represents a finite automata. The main data structure is the transition
-- map, from states to lables to states. Abstract over DFA/NFA by taking
-- a Foldable parameter @f@ that determines the shape of the targets of
-- a given source/label Using NES means we have at least one initial state
-- and NEHM means that if we have an entry in the trans map, there is at
-- least one transition out of the src state.
data FA f s l where
FA :: (Eq s, Eq l, Hashable s, Hashable l, Foldable f)
=> HashMap s (NEHM.NonEmpty l (f s)) -- ^ Transitions
-> NES.NonEmpty s -- ^ Initial states
-> HashSet s -- ^ Final states
-> FA f s l
type NFA s l = FA NES.NonEmpty s l
-- TODO: enforce that each state has a transition with each label.
type DFA s l = FA Identity s l
deriving instance (Show (f s), Show s, Show l) => Show (FA f s l)
-- |Render an FA to a graphviz DOT format graph.
faToDot :: forall f . (Foldable f) => FA f Text Text -> Text
faToDot (FA trans inits finals) =
T.unlines [ "digraph NFA {"
, "rankdir = LR;"
, "node [shape = point]; init"
-- Intercalate avoids an extra newline after transitions
, T.intercalate "\n" stateTransLines
, "}"
]
where
-- Processed source states, next state ID, state -> ID map
initState = (HS.empty, (0 :: Integer, HM.empty))
((), stateTransLines) = evalRWS (for_ inits go) () initState
twistedTrans = twistTrans trans
-- We want to output a single transition for each (src,tgt) pair, so we
-- need to "twist" the transition map into a map: s -> s -> NonEmptySet l
-- We use the polymorphic type here, to make sure we don't make a mistake
-- since instantiated at Text, the input/result maps have the same type!
twistTrans :: (Eq s, Hashable s, Eq l, Hashable l)
=> HashMap s (NEHM.NonEmpty l (f s))
-> HashMap s (NEHM.NonEmpty s (NES.NonEmpty l))
twistTrans =
-- Here, we don't know if (f s) will have any elements - if it
-- doesn't, we don't want to add an entry in the new target -> lbls map
HM.foldrWithKey srcMappingFolder HM.empty
where
srcMappingFolder src lbls =
case foldLbls lbls of
Nothing -> id
Just nehm -> HM.insert src nehm
foldLbls = NEHM.foldrWithKey lblFolder Nothing
-- For each target, @t@, add a mapping t -> lbl
lblFolder l ts m = foldr (insertNESUnion l) m ts
-- If we already have a NEHM, union in the lbl/target mapping,
-- otherwise construct a new NEHM
insertNESUnion lbl tgt mbNEHM =
let sLbl = NES.singleton lbl
in Just $ case mbNEHM of
Nothing -> NEHM.singleton tgt sLbl
Just nehm -> NEHM.insertWith NES.union tgt sLbl nehm
emitLine l = tell [l]
go src = do
processed <- haveProcessedSrc src
unless processed $ do
markSrcProcessed src
let srcTrans = NEHM.toList <$> src `HM.lookup` twistedTrans
case srcTrans of
Nothing ->
-- If there are no transitions, simply emit the src state
void $ getOrGenerateStateID src
Just tgtLbls -> for_ tgtLbls $ \(tgt, lbls) -> do
transLine src lbls tgt >>= emitLine
go tgt
haveProcessedSrc s = (s `HS.member`) <$> use _1
markSrcProcessed s = _1 %= (s `HS.insert`)
getOrGenerateStateID s = do
(nextID, nameMap) <- use _2
case s `HM.lookup` nameMap of
Just sid -> return sid
-- A new state; emit it, and update the next id and id map
Nothing -> do
stateLine s nextID >>= emitLine
_2 .= (succ nextID, HM.insert s nextID nameMap)
return nextID
stateLine s sid = do
let -- If this state is initial, add an arrow from the init point to it
initText = if s `NES.member` inits
then TF.format " init -> {};" (TF.Only sid)
else ""
shape :: Text
shape = if s `HS.member` finals then "doublecircle" else "circle"
fmtString = "{} [label=\"{}\" shape=\"{}\" ];{}"
return $ TF.format fmtString (sid, s, shape, initText)
transLine s ls t = do
sid <- getOrGenerateStateID s
tid <- getOrGenerateStateID t
let fmtString = "{} -> {} [label=\"{}\"];"
return $ TF.format fmtString (sid, tid, showNES ls)
-- If the NES has >1 item show it in braces, else show the item alone.
showNES nes = case NES.toList nes of
[t] -> t
ts -> addBraces . T.intercalate ", " $ ts
where
addBraces to = T.concat ["{", to, "}"]
-- |Used to turn the states of a DFA produced by subsetConstruction into
-- a DFA that can be converted to DOT, by forming a textual representation
-- of the sets of NFA states.
flattenDFAStates :: DFA (NES.NonEmpty T.Text) T.Text -> DFA T.Text T.Text
flattenDFAStates (FA trans inits finals) =
FA trans' (NES.map showNES inits) (HS.map showNES finals)
where
trans' = renameTargets . renameSources $ trans
-- fmap inside the two HashMaps and the Identity
renameTargets = fmap (fmap (fmap showNES))
-- Yuck, but it seems it's the best we can do!
renameSources = HM.fromList . fmap (first showNES) . HM.toList
showNES :: NES.NonEmpty T.Text -> T.Text
showNES nes = T.concat [ "{"
, T.intercalate "," $ NES.toList nes
, "}"
]
-- subsetConstruction converts a NFA to a DFA, by consider sets of NFA
-- states to be a single DFA state.
subsetConstruction :: forall s l . (Show s, Show l) => NFA s l
-> DFA (NES.NonEmpty s) l
subsetConstruction (FA trans inits finals) = FA trans' inits' finals'
where
inits' :: NES.NonEmpty (NES.NonEmpty s)
inits' = NES.singleton inits
(trans', states') = execState (go inits) (HM.empty, inits')
-- The new finals are any state set that contains an original final state
finals' = HS.filter (any (`HS.member` finals)) $ NES.toHashSet states'
-- |Lift transitions from single sources to sets of sources, unioning the
-- underlying maps
liftTrans :: NES.NonEmpty s -> Maybe (NEHM.NonEmpty l (NES.NonEmpty s))
liftTrans s =
case mapMaybe getTrans . NES.toList $ s of
[] -> Nothing
nehms -> Just $ foldr1 (NEHM.unionWith NES.union) nehms
where
getTrans :: s -> Maybe (NEHM.NonEmpty l (NES.NonEmpty s))
getTrans src = src `HM.lookup` trans
-- |Get the set of targets that the liftedTransitions map to.
getTransTargets :: NEHM.NonEmpty l (NES.NonEmpty s) -> [NES.NonEmpty s]
getTransTargets = map snd . NEHM.toList
go s = case liftTrans s of
Nothing -> return ()
Just sTrans -> do
let newTransTargets = getTransTargets sTrans
(transSoFar, done) <- get
-- Which new states haven't we seen before?
let todoStates = filter (not . (`NES.member` done)) newTransTargets
-- Each Set of NFA states is now a single set of the DFA,
-- furthermore, each label now maps to a single such state, as
-- indicated by Identity as the functor parameter of FA
singleTargets = fmap Identity sTrans
transSoFar' = HM.insert s singleTargets transSoFar
done' = foldr NES.insert done todoStates
put (transSoFar', done')
for_ todoStates go
-- |Parse a NFA from an input Text.
textToNFA :: Text -> Either String (NFA Text Text)
textToNFA = (nfaDefToNFA <$>) . doParse
where
doParse = showLeft . parse parseNFADef ""
showLeft = either (Left . show) Right
-- |Convert a NFADef into a NFA. At this stage, duplicate states/transitions
-- are removed.
nfaDefToNFA :: NFADef -> NFA Text Text
nfaDefToNFA (NFADef is ts fs) =
FA trans (NES.fromNonEmptyList is) (HS.fromList fs)
where
trans = HM.fromListWith (NEHM.unionWith NES.union) singleTrans
-- ss and ls can be treated as non-empty lists; we use the list Monad to
-- construct a single list of pairs from source states to a (singleton)
-- HashMap from labels to targets
singleTrans = do
(NFATransDef srcs ls trgs) <- ts
s <- NEL.toList srcs
l <- NEL.toList ls
return (s, NEHM.singleton l (NES.fromNonEmptyList trgs))
data NFADef = NFADef (NEL.NonEmpty Text) -- ^ Initial states
[NFATransDef] -- ^ Transitions
[Text] -- ^ Final states
deriving Show
data NFATransDef = NFATransDef (NEL.NonEmpty Text) -- ^ Source states
(NEL.NonEmpty Text) -- ^ Labels
(NEL.NonEmpty Text) -- ^ Target states
deriving Show
-- Parse a NFA definition:
-- NFA ::= STATES, "\n", { TRANS, "\n" }, [ STATES "\n" ];
--
-- STATE ::= CHARS
--
-- STATES ::= STATE { ",", STATE };
--
-- TRANS ::= STATES, "--", LABELS, "->", STATES;
--
-- LABELS ::= LABEL { ",", LABELS };
--
-- LABEL ::= CHARS
--
-- CHARS ::= {- Unicode String that doesn't contain unescaped ',', '\', '>', '-' or '\n' -};
--
-- E.g.:
-- 0
-- 0--b,c->1
-- 0--a->0,1
-- 1
--
-- is the NFA that has two states and accepts any string a*(a|b|c).
parseNFADef :: Parser NFADef
parseNFADef =
NFADef <$> (parseStates "initial" <* newline)
<*> many (isTransLine *> parseNFATrans <* newline)
<*> option []
(NEL.toList <$>
parseStates "final" <* option '\n' newline)
<* eof
where
commaSep1 x = sepBy1 x (char ',')
-- Since the initial portion of a trans line looks like a state, we use
-- @try@ to backtrack out if we fail by not being on a trans line. We
-- don't just use try, because if we fail inside the trans line (e.g.
-- invalid escape) we don't want to backtrack and attempt to parse the
-- final states.
isTransLine = try . lookAhead $ parseStates "source" >> string "--"
parseTextString c = T.pack <$> many1 c
parseStates pType =
toNELOrError (pType ++ " states") <$> commaSep1 parseState
parseState = parseTextString parseChar
-- Allow simple escape sequences:
-- \\ -> \
-- \- -> -
-- \, -> ,
-- \> -> >
parseChar = do
c <- noneOf "-,>\n"
if c == '\\'
then oneOf "\\-,>" <?> "valid escape char: '\\', or '-' or '>'"
else return c
parseNFATrans =
NFATransDef <$> parseStates "source"
<*> between (string "--") (string "->") parseLabels
<*> parseStates "target"
parseLabels = toNELOrError "labels" <$>
(commaSep1 . parseTextString $ parseChar)
toNELOrError elemType es = fromMaybe err (NEL.nonEmpty es)
where
err = error $ "Must have non-empty set of " ++ elemType
|
owst/NFAToDFA
|
src/Math/Automata/Simple.hs
|
bsd-3-clause
| 12,630
| 0
| 20
| 3,796
| 2,836
| 1,533
| 1,303
| 196
| 8
|
module Signal.Wavelet.Repa2 where
import Data.Array.Repa as R
import Data.Array.Repa.Unsafe (unsafeTraverse)
import Signal.Wavelet.Repa.Common
{-# INLINE dwtS #-}
{-# INLINE dwtP #-}
{-# INLINE idwtS #-}
{-# INLINE idwtP #-}
dwtS, dwtP, idwtS, idwtP :: Array U DIM1 Double
-> Array U DIM1 Double
-> Array U DIM1 Double
dwtS !angles !signal = dwtWorkerS extendEnd angles signal
dwtP !angles !signal = dwtWorkerP extendEnd angles signal
idwtS !angles !signal = dwtWorkerS extendFront angles signal
idwtP !angles !signal = dwtWorkerP extendFront angles signal
-- See: Note [Higher order functions interfere with fusion] in Repa1.hs
{-# INLINE dwtWorkerS #-}
dwtWorkerS, dwtWorkerP :: (Source r Double)
=> (Int -> Array r DIM1 Double -> Array D DIM1 Double)
-> Array U DIM1 Double
-> Array r DIM1 Double
-> Array U DIM1 Double
dwtWorkerS extendF !angles !signal = go layers extendedSignal
where
!extendedSignal = forceS $ extendF layers signal
!layers = size . extent $ angles
{-# INLINE go #-}
go :: Int -> Array U DIM1 Double -> Array U DIM1 Double
go !n sig
| n == 0 = sig
| n == 1 = forceS . lattice (sin_, cos_) $ sig
| otherwise = go (n - 1) (forceS . trim . lattice (sin_, cos_) $ sig)
where !sin_ = sin $ angles `unsafeIndex` (Z :. (layers - n))
!cos_ = cos $ angles `unsafeIndex` (Z :. (layers - n))
{-# INLINE dwtWorkerP #-}
dwtWorkerP extendF !angles !signal = go layers extendedSignal
where
!extendedSignal = forceP $ extendF layers signal
!layers = size . extent $ angles
{-# INLINE go #-}
go :: Int -> Array U DIM1 Double -> Array U DIM1 Double
go !n !sig
| n == 0 = sig
| n == 1 = forceP . lattice (sin_, cos_) $ sig
| otherwise = go (n - 1) (forceP . trim . lattice (sin_, cos_) $ sig)
where !sin_ = sin $ angles `unsafeIndex` (Z :. (layers - n))
!cos_ = cos $ angles `unsafeIndex` (Z :. (layers - n))
{-# INLINE lattice #-}
lattice :: (Shape sh)
=> (Double, Double)
-> Array U (sh :. Int) Double
-> Array D (sh :. Int) Double
lattice !(!s, !c) !signal = unsafeTraverse signal id baseOp
where
{-# INLINE baseOp #-}
baseOp f !(sh :. i)
| even i = let x = f (sh :. i )
y = f (sh :. i + 1)
in x * c + y * s
| otherwise = let x = f (sh :. i - 1)
y = f (sh :. i )
in x * s - y * c
{-# INLINE extendFront #-}
extendFront :: (Source r Double, Shape sh)
=> Int
-> Array r (sh :. Int) Double
-> Array D (sh :. Int) Double
extendFront !layers !signal = go (delay signal) initExt initSigSize
where !initExt = 2 * layers - 2 :: Int
!initSigSize = size . extent $ signal :: Int
!(sh :. _) = extent signal
{-# INLINE go #-}
go !sig !ln !sigSize
| extSize <= 0 = sig
| otherwise = go extSignal (ln - extSize) (sigSize + extSize)
where !extSize = min sigSize ln :: Int
!extSignal = extract (sh :. sigSize - extSize)
(sh :. extSize) sig R.++ sig
{-# INLINE extendEnd #-}
extendEnd :: (Source r Double, Shape sh)
=> Int
-> Array r (sh :. Int) Double
-> Array D (sh :. Int) Double
extendEnd !layers !signal = go (delay signal) initExt initSigSize
where !initExt = 2 * layers - 2 :: Int
!initSigSize = size . extent $ signal :: Int
!(sh :. _) = extent signal
{-# INLINE go #-}
go !sig !ln !sigSize
| extSize <= 0 = sig
| otherwise = go extSignal (ln - extSize) (sigSize + extSize)
where !extSize = min sigSize ln :: Int
!extSignal = sig R.++ extract (sh :. 0) (sh :. extSize) sig
{-# INLINE trim #-}
trim :: (Source r Double, Shape sh)
=> Array r (sh :. Int) Double
-> Array D (sh :. Int) Double
trim !signal = unsafeTraverse signal trimExtent mapElems
where
{-# INLINE trimExtent #-}
trimExtent !(sh :. i) = (sh :. max (i - 2) 0)
{-# INLINE mapElems #-}
mapElems f !(sh :. i) = f (sh :. (i + 1))
|
jstolarek/lattice-structure-hs
|
src/Signal/Wavelet/Repa2.hs
|
bsd-3-clause
| 4,532
| 0
| 15
| 1,652
| 1,582
| 791
| 791
| -1
| -1
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverloadedLists #-}
module Foreign.Erlang.TermSpec
( spec
)
where
import Data.Binary ( decode
, encode
)
import Data.ByteString.Char8 ( )
import qualified Data.ByteString.Lazy as B
import Data.Word ( )
import Data.List.NonEmpty (NonEmpty(..))
import Foreign.Erlang.Term
import Test.Hspec
import Test.QuickCheck
import Data.Vector ( fromList )
spec :: Spec
spec = do
describe "Pid" $ do
it "has a Binary instance such that decode is the inverse of encode"
$ property
$ \(p :: Pid) -> fromTerm (decode (encode (toTerm p))) `shouldBe` (Just p)
it "represents all valid Erlang pids" $ property $ \x y z ->
let p = pid "nodename" x y z
in fromTerm (decode (encode (toTerm p))) `shouldBe` (Just p)
describe "FromTerm/ToTerm" $ do
it "converts '[a]' back and forth"
$ property
$ \(xs :: [Integer]) -> fromTerms (toTerms xs) `shouldBe` Just xs
it "converts 'Maybe a' back and forth"
$ property
$ \(x :: Maybe Bool) -> fromTerm (toTerm x) `shouldBe` Just x
it "converts 'Either a b' back and forth"
$ property
$ \(x :: Either Integer Double) -> fromTerm (toTerm x) `shouldBe` Just x
it "converts 'NonEmpty a' back and forth"
$ property
$ \(h :: Integer) (t :: [Integer]) -> let xs = h :| t in fromTerm (toTerm xs) `shouldBe` Just xs
describe "Integer"
$ it "has a Binary instance such that decode is the inverse of encode"
$ property
$ \(i :: Integer) ->
fromTerm (decode (encode (integer i))) `shouldBe` (Just i)
describe "The largest small_big_ext Integer" $ do
let i = 2 ^ (8 * 255) - 1
it "has a Binary instance such that decode is the inverse of encode"
$ fromTerm (decode (encode (integer i)))
`shouldBe` (Just i)
it "is converted to a valid erlang binary"
$ B.unpack (encode (integer i))
`shouldBe` [ 110
, 255
, 0
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
, 255
]
describe "The smallest large_big_ext Integer"
$ it "has a Binary instance such that decode is the inverse of encode"
$ let i = 2 ^ (8 * 255)
in fromTerm (decode (encode (integer i))) `shouldBe` (Just i)
describe "Term" $ do
it "has a Binary instance such that decode is the inverse of encode"
$ property
$ \(t :: Term) -> decode (encode t) `shouldBe` t
it "has an IsString instance that makes atoms" $ "testatom" `shouldBe` atom
"testatom"
describe "Pattern Synonyms" $ do
it "Tuple3" $ do
toTerm (Atom "test-atom", integer 1, integer 2)
`shouldBe` Tuple3 "test-atom" (integer 1) (integer 2)
(case toTerm (Atom "test-atom", integer 1, integer 2) of
Tuple3 "test-atom" _ _ -> True
_ -> False
)
`shouldBe` True
it "List4" $ do
List (fromList [integer 0, integer 1, integer 2]) Nil
`shouldBe` toTerm (List3 (integer 0) (integer 1) (integer 2))
(case List (fromList [integer 0, integer 1, integer 2]) Nil of
List3 _ _ _ -> True
_ -> False
)
`shouldBe` True
it "Map2" $ do
Map (fromList [MapEntry "k1" "v1", MapEntry "k2" "v2"])
`shouldBe` toTerm (Map2 ("k1" :=> "v1") ("k2" :=> "v2"))
(case Map (fromList [MapEntry "k1" "v1", MapEntry "k2" "v2"]) of
Map2 ("k1" :=> "v1") ("k2" :=> "v2") -> True
_ -> False
)
`shouldBe` True
it "has an IsList that generates lists" $ do
["a1", "a2"]
`shouldBe` toTerm (List (fromList ["a1", "a2"]) Nil)
|
LTI2000/hinterface
|
test/Foreign/Erlang/TermSpec.hs
|
bsd-3-clause
| 9,887
| 0
| 23
| 6,233
| 2,024
| 1,149
| 875
| 343
| 4
|
{-# LANGUAGE MultiWayIf #-}
-- | This module provides the /MSum/ processor.
--
-- The /MSum/ processor runs two strategies in parallel and provides a "sum" on the resulting proof trees and
-- certificates. Informally, the processor behaves like a monoid where open (sub)prooftrees and unknown bounds behave
-- like neutral elements in the proof construction. Closed prooftrees are integrated as left and right child. The
-- certificates are combined, that is `min` for upper bounds and `max` for lower bounds and `Unknown` complexity is
-- treated as a neutral element.
--
-- Motivating usage: msum lower_bound_strategy upper_bound_strategy
module Tct.Core.Processor.MSum
( madd
, maddDeclaration)
where
import Tct.Core.Data
import Tct.Core.Processor.Failing (close)
data Gabel a = None | One a | Two a a
deriving (Functor, Foldable, Traversable)
data Sum i o = Sum
{ left :: Strategy i o
, right :: Strategy i o
} deriving Show
bounded :: Certificate -> Bool
bounded = not . isUnbounded
succeed :: (ProofObject p ~ (), Forking p ~ Gabel, Monad m) => Gabel (ProofTree (Out p)) -> m (Return p)
succeed po = return $ Progress () certf po where
certf None = unbounded
certf (One c) = c
certf (Two c1 c2) = Certificate
{ spaceUB = spaceUB c1 `min` spaceUB c2
, spaceLB = spaceLB c1 `maz` spaceLB c2
, timeUB = timeUB c1 `min` timeUB c2
, timeLB = timeLB c1 `maz` timeLB c2 }
Unknown `maz` b = b
b `maz` Unknown = b
a `maz` b = a `max` b
instance (ProofData i, ProofData o) => Processor (Sum i o) where
type ProofObject (Sum i o) = ()
type In (Sum i o) = i
type Out (Sum i o) = o
type Forking (Sum i o) = Gabel
execute p prob = do
let k pr = evaluate (timeoutRemaining $ pr p) (Open prob)
(lpt,rpt) <- concurrently (k left) (k right)
let
lc = certificate lpt
rc = certificate rpt
if
| bounded lc && bounded rc -> succeed $ Two lpt rpt
| bounded lc -> succeed $ One lpt
| bounded rc -> succeed $ One rpt
| otherwise -> abortWith "None"
maddDeclaration :: (Declared i o, ProofData i, ProofData o) => Declaration(
'[ Argument 'Required (Strategy i o)
, Argument 'Required (Strategy i o)]
:-> Strategy i o)
maddDeclaration =
declare
"sum"
["This processor runs both strategies in parallel and returns the successful ones."]
(strat "left" ["The left strategy."], strat "right" ["The right strategy."])
madd
madd :: (ProofData i, ProofData o, Show p) => Strategy i o -> Strategy i o -> Strategy i p
madd st1 st2 = processor Sum{left=st1,right=st2} .>>> close
|
ComputationWithBoundedResources/tct-core
|
src/Tct/Core/Processor/MSum.hs
|
bsd-3-clause
| 2,700
| 0
| 14
| 692
| 854
| 446
| 408
| -1
| -1
|
{-# LANGUAGE TypeSynonymInstances, MultiParamTypeClasses #-}
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Layout.MultiToggle.TabBarDecoration
-- Copyright : (c) 2018 Lucian Poston
-- License : BSD-style (see LICENSE)
--
-- Maintainer : <lucianposton@pm.me>
-- Stability : unstable
-- Portability : unportable
--
-- Provides a simple transformer for use with "XMonad.Layout.MultiToggle" to
-- dynamically toggle "XMonad.Layout.TabBarDecoration".
-----------------------------------------------------------------------------
module XMonad.Layout.MultiToggle.TabBarDecoration (
SimpleTabBar(..)
) where
import XMonad.Layout.MultiToggle
import XMonad
import XMonad.Layout.LayoutModifier
import XMonad.Layout.TabBarDecoration
-- $usage
-- To use this module with "XMonad.Layout.MultiToggle", add the @SIMPLETABBAR@
-- to your layout For example, from a basic layout like
--
-- > layout = tiled ||| Full
--
-- Add @SIMPLETABBAR@ by changing it this to
--
-- > layout = mkToggle (single SIMPLETABBAR) (tiled ||| Full)
--
-- You can now dynamically toggle the 'XMonad.Layout.TabBarDecoration'
-- transformation by adding a key binding such as @mod-x@ as follows.
--
-- > ...
-- > , ((modm, xK_x ), sendMessage $ Toggle SIMPLETABBAR)
-- > ...
-- | Transformer for "XMonad.Layout.TabBarDecoration".
data SimpleTabBar = SIMPLETABBAR deriving (Read, Show, Eq)
instance Transformer SimpleTabBar Window where
transform _ x k = k (simpleTabBar x) (\(ModifiedLayout _ (ModifiedLayout _ x')) -> x')
|
xmonad/xmonad-contrib
|
XMonad/Layout/MultiToggle/TabBarDecoration.hs
|
bsd-3-clause
| 1,594
| 0
| 12
| 245
| 148
| 98
| 50
| 10
| 0
|
-- | Matrices
------------------------------------------------------------------------------
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE DataKinds #-}
module Value.Matrix where
------------------------------------------------------------------------------
import Structure
import Value.Number
import Value.Vector
import Constraint.Addable
import Constraint.Multipliable
import Constraint.VectorSpace
import GHC.TypeLits
import CLaSH.Sized.Vector as V
import Prelude as P
import Data.List
------------------------------------------------------------------------------
-- | Matrix data type
data Matrix (m :: Nat) (n :: Nat) r where
Matrix :: (Expression e r, VectorSpace r) => Vec m (Vec n e) -> Matrix m n r
------------------------------------------------------------------------------
-- ALL MATRICES ARE VALUES
-- ALL VALUES ARE EXPRESSIONS THAT EVALUATE TO THEMSELVES
instance Expression (Matrix m n r) (Matrix m n r) where
evaluate a = Success a
instance Value (Matrix m n r)
-- ALL EXPRESSIONS MUST BE SHOWABLE
instance Show (Matrix m n r) where
show (Matrix rows) = "[" P.++ (intercalate ", " (P.map (\col -> "[" P.++ (intercalate ", " (P.map show (toList col))) P.++ "]") (toList rows))) P.++ "]"
------------------------------------------------------------------------------
-- CONSTRAINT & OPERATOR IMPLEMENTATION
-- | Matrix Addition
instance Addable (Matrix m n r) (Matrix m n r) (Matrix m n r) where
add (Success (Matrix a)) (Success (Matrix b)) = if hasSucceeded c then Success (Matrix (fromSuccess c)) else Failure (failureMessage c)
where
c = unwrapGuardedVec $ V.map (\row -> unwrapGuardedVec row) $ v -- unwrap the Guards to the outside
v = V.zipWith (\ra rb -> V.zipWith add (V.map evaluate ra) (V.map evaluate rb)) a b
add (Failure s) _ = Failure s
add _ (Failure s) = Failure s
-- | Matrix Scalar Multiplication
instance Multipliable Number (Matrix m n r) (Matrix m n r) where
multiply (Success a) (Success (Matrix b)) = if hasSucceeded c then Success (Matrix (fromSuccess c)) else Failure (failureMessage c)
where
c = unwrapGuardedVec $ V.map (\row -> unwrapGuardedVec row) $ v -- unwrap the Guards to the outside
v = V.map (\row -> V.map (multiply (Success a)) (V.map evaluate row)) b
multiply (Failure s) _ = Failure s
multiply _ (Failure s) = Failure s
-- | Matrix Scalar Multiplication
instance Multipliable (Matrix m n r) Number (Matrix m n r) where
multiply a b = multiply b a
-- | Matrix is a Vector Space
instance (KnownNat m, KnownNat n, VectorSpace r) => VectorSpace (Matrix m n r) where
zeroVector = Matrix $ V.repeat (V.repeat zeroVector)
negateVector (Success (Matrix a)) = if hasSucceeded c then Success (Matrix (fromSuccess c)) else Failure (failureMessage c)
where
c = unwrapGuardedVec $ V.map (\row -> unwrapGuardedVec row) $ v -- unwrap the Guards to the outside
v = V.map (\row -> V.map negateVector (V.map evaluate row)) a
negateVector (Failure s) = Failure s
-- SPECIFIC MATRIX TYPE INSTANCES
-- | Matrix Multiplication
instance (KnownNat m, KnownNat n, KnownNat k, Multipliable r r r) => Multipliable (Matrix m n r) (Matrix n k r) (Matrix m k r) where
multiply (Success (Matrix a)) (Success (Matrix b)) = if hasSucceeded v then Success (Matrix (fromSuccess v)) else Failure $ failureMessage v
where
b' = V.transpose b
dot j k = V.foldr add (Success zeroVector) $ V.zipWith multiply (V.map evaluate j) (V.map evaluate k)
v = unwrapGuardedVec $ V.map (\r -> unwrapGuardedVec $ V.map (dot r) b') a
multiply (Failure s) _ = Failure s
multiply _ (Failure s) = Failure s
-- SPECIFIC VECTOR TYPE OPERATORS
-- | Transpose Operator
data Transpose m n r where
Transpose :: Matrix m n r -> Transpose m n r
-- | all operators are expressions
instance (KnownNat m, KnownNat n) => Expression (Transpose m n r) (Matrix n m r) where
--evaluate (Transpose Nil) = Success Nil
evaluate (Transpose (Matrix vec)) = Success $ Matrix (V.transpose vec)
-- | all expressions are showable
instance Show (Transpose m n r) where
show (Transpose a) = "(" P.++ show a P.++ "^T)"
|
Conflagrationator/HMath
|
src/Value/Matrix.hs
|
bsd-3-clause
| 4,289
| 0
| 21
| 836
| 1,460
| 757
| 703
| 56
| 0
|
-----------------------------------------------------------------------------
-- | Module : Control.Monad.Trans.Chronicle
--
-- The 'ChronicleT' monad, a hybrid error/writer monad that allows
-- both accumulating outputs and aborting computation with a final
-- output.
-----------------------------------------------------------------------------
module Control.Monad.Chronicle (
-- * Type class for Chronicle-style monads
MonadChronicle(..)
-- * The ChronicleT monad transformer
, Chronicle, runChronicle, ChronicleT(..)
, module Data.Monoid
, module Control.Monad
, module Control.Monad.Trans
) where
import Data.Monoid (Monoid(..))
import Control.Monad
import Control.Monad.Trans
import Control.Monad.Trans.Chronicle (Chronicle)
import Control.Monad.Chronicle.Class
|
Zemyla/these
|
Control/Monad/Chronicle.hs
|
bsd-3-clause
| 1,028
| 0
| 6
| 332
| 101
| 71
| 30
| 11
| 0
|
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
The @TyCon@ datatype
-}
{-# LANGUAGE CPP, DeriveDataTypeable #-}
module ETA.Types.TyCon(
-- * Main TyCon data types
TyCon, FieldLabel,
AlgTyConRhs(..), visibleDataCons,
TyConParent(..), isNoParent,
FamTyConFlav(..), Role(..),
-- ** Constructing TyCons
mkAlgTyCon,
mkClassTyCon,
mkFunTyCon,
mkPrimTyCon,
mkKindTyCon,
mkLiftedPrimTyCon,
mkTupleTyCon,
mkSynonymTyCon,
mkFamilyTyCon,
mkPromotedDataCon,
mkPromotedTyCon,
-- ** Predicates on TyCons
isAlgTyCon,
isClassTyCon, isFamInstTyCon,
isFunTyCon,
isPrimTyCon,
isTupleTyCon, isUnboxedTupleTyCon, isBoxedTupleTyCon,
isTypeSynonymTyCon,
isDecomposableTyCon,
isPromotedDataCon, isPromotedTyCon,
isPromotedDataCon_maybe, isPromotedTyCon_maybe,
promotableTyCon_maybe, promoteTyCon,
isDataTyCon, isProductTyCon, isDataProductTyCon_maybe,
isEnumerationTyCon,
isNewTyCon, isAbstractTyCon,
isFamilyTyCon, isOpenFamilyTyCon,
isTypeFamilyTyCon, isDataFamilyTyCon,
isOpenTypeFamilyTyCon, isClosedSynFamilyTyCon_maybe,
isBuiltInSynFamTyCon_maybe,
isUnLiftedTyCon,
isGadtSyntaxTyCon, isDistinctTyCon, isDistinctAlgRhs,
isInjectiveTyCon, isGenerativeTyCon, isGenInjAlgRhs,
isTyConAssoc, tyConAssoc_maybe,
isRecursiveTyCon,
isImplicitTyCon,
-- ETA-specific
isObjectTyCon,
-- ** Extracting information out of TyCons
tyConName,
tyConKind,
tyConUnique,
tyConTyVars,
tyConCType, tyConCType_maybe,
tyConDataCons, tyConDataCons_maybe,
tyConSingleDataCon_maybe, tyConSingleAlgDataCon_maybe,
tyConFamilySize,
tyConStupidTheta,
tyConArity,
tyConRoles,
tyConParent,
tyConTuple_maybe, tyConClass_maybe,
tyConFamInst_maybe, tyConFamInstSig_maybe, tyConFamilyCoercion_maybe,
synTyConDefn_maybe, synTyConRhs_maybe, famTyConFlav_maybe,
algTyConRhs,
newTyConRhs, newTyConEtadArity, newTyConEtadRhs,
unwrapNewTyCon_maybe, unwrapNewTyConEtad_maybe,
tupleTyConBoxity, tupleTyConSort, tupleTyConArity,
-- ** Manipulating TyCons
tcExpandTyCon_maybe, coreExpandTyCon_maybe,
makeTyConAbstract,
newTyConCo, newTyConCo_maybe,
pprPromotionQuote,
-- * Primitive representations of Types
PrimRep(..), --PrimElemRep(..),
tyConPrimRep, isVoidRep, isGcPtrRep, isObjectRep, getObjectClass,
primRepSizeW, --primElemRepSizeB,
-- * Recursion breaking
RecTcChecker, initRecTc, checkRecTc
) where
#include "HsVersions.h"
import {-# SOURCE #-} ETA.Types.TypeRep ( Kind, Type, PredType )
import {-# SOURCE #-} ETA.BasicTypes.DataCon ( DataCon, isVanillaDataCon )
import ETA.BasicTypes.Var
import ETA.Types.Class
import ETA.BasicTypes.BasicTypes
import ETA.Main.DynFlags
import ETA.Prelude.ForeignCall
import ETA.BasicTypes.Name
import ETA.BasicTypes.NameSet
import ETA.Types.CoAxiom
import ETA.Prelude.PrelNames
import ETA.Utils.Maybes
import ETA.Utils.Outputable
import ETA.Main.Constants
import ETA.Utils.Util
import qualified Data.Data as Data
import Data.Typeable (Typeable)
-- TODO: Refactor to FastString?
import Data.Text (Text)
{-
-----------------------------------------------
Notes about type families
-----------------------------------------------
Note [Type synonym families]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* Type synonym families, also known as "type functions", map directly
onto the type functions in FC:
type family F a :: *
type instance F Int = Bool
..etc...
* Reply "yes" to isTypeFamilyTyCon, and isFamilyTyCon
* From the user's point of view (F Int) and Bool are simply
equivalent types.
* A Haskell 98 type synonym is a degenerate form of a type synonym
family.
* Type functions can't appear in the LHS of a type function:
type instance F (F Int) = ... -- BAD!
* Translation of type family decl:
type family F a :: *
translates to
a FamilyTyCon 'F', whose FamTyConFlav is OpenSynFamilyTyCon
type family G a :: * where
G Int = Bool
G Bool = Char
G a = ()
translates to
a FamilyTyCon 'G', whose FamTyConFlav is ClosedSynFamilyTyCon, with the
appropriate CoAxiom representing the equations
* In the future we might want to support
* injective type families (allow decomposition)
but we don't at the moment [2013]
Note [Data type families]
~~~~~~~~~~~~~~~~~~~~~~~~~
See also Note [Wrappers for data instance tycons] in MkId.lhs
* Data type families are declared thus
data family T a :: *
data instance T Int = T1 | T2 Bool
Here T is the "family TyCon".
* Reply "yes" to isDataFamilyTyCon, and isFamilyTyCon
* The user does not see any "equivalent types" as he did with type
synonym families. He just sees constructors with types
T1 :: T Int
T2 :: Bool -> T Int
* Here's the FC version of the above declarations:
data T a
data R:TInt = T1 | T2 Bool
axiom ax_ti : T Int ~ R:TInt
The R:TInt is the "representation TyCons".
It has an AlgTyConParent of
FamInstTyCon T [Int] ax_ti
* The axiom ax_ti may be eta-reduced; see
Note [Eta reduction for data family axioms] in TcInstDcls
* The data contructor T2 has a wrapper (which is what the
source-level "T2" invokes):
$WT2 :: Bool -> T Int
$WT2 b = T2 b `cast` sym ax_ti
* A data instance can declare a fully-fledged GADT:
data instance T (a,b) where
X1 :: T (Int,Bool)
X2 :: a -> b -> T (a,b)
Here's the FC version of the above declaration:
data R:TPair a where
X1 :: R:TPair Int Bool
X2 :: a -> b -> R:TPair a b
axiom ax_pr :: T (a,b) ~ R:TPair a b
$WX1 :: forall a b. a -> b -> T (a,b)
$WX1 a b (x::a) (y::b) = X2 a b x y `cast` sym (ax_pr a b)
The R:TPair are the "representation TyCons".
We have a bit of work to do, to unpick the result types of the
data instance declaration for T (a,b), to get the result type in the
representation; e.g. T (a,b) --> R:TPair a b
The representation TyCon R:TList, has an AlgTyConParent of
FamInstTyCon T [(a,b)] ax_pr
* Notice that T is NOT translated to a FC type function; it just
becomes a "data type" with no constructors, which can be coerced inot
into R:TInt, R:TPair by the axioms. These axioms
axioms come into play when (and *only* when) you
- use a data constructor
- do pattern matching
Rather like newtype, in fact
As a result
- T behaves just like a data type so far as decomposition is concerned
- (T Int) is not implicitly converted to R:TInt during type inference.
Indeed the latter type is unknown to the programmer.
- There *is* an instance for (T Int) in the type-family instance
environment, but it is only used for overlap checking
- It's fine to have T in the LHS of a type function:
type instance F (T a) = [a]
It was this last point that confused me! The big thing is that you
should not think of a data family T as a *type function* at all, not
even an injective one! We can't allow even injective type functions
on the LHS of a type function:
type family injective G a :: *
type instance F (G Int) = Bool
is no good, even if G is injective, because consider
type instance G Int = Bool
type instance F Bool = Char
So a data type family is not an injective type function. It's just a
data type with some axioms that connect it to other data types.
Note [Associated families and their parent class]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*Associated* families are just like *non-associated* families, except
that they have a TyConParent of AssocFamilyTyCon, which identifies the
parent class.
However there is an important sharing relationship between
* the tyConTyVars of the parent Class
* the tyConTyvars of the associated TyCon
class C a b where
data T p a
type F a q b
Here the 'a' and 'b' are shared with the 'Class'; that is, they have
the same Unique.
This is important. In an instance declaration we expect
* all the shared variables to be instantiated the same way
* the non-shared variables of the associated type should not
be instantiated at all
instance C [x] (Tree y) where
data T p [x] = T1 x | T2 p
type F [x] q (Tree y) = (x,y,q)
Note [TyCon Role signatures]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Every tycon has a role signature, assigning a role to each of the tyConTyVars
(or of equal length to the tyConArity, if there are no tyConTyVars). An
example demonstrates these best: say we have a tycon T, with parameters a at
nominal, b at representational, and c at phantom. Then, to prove
representational equality between T a1 b1 c1 and T a2 b2 c2, we need to have
nominal equality between a1 and a2, representational equality between b1 and
b2, and nothing in particular (i.e., phantom equality) between c1 and c2. This
might happen, say, with the following declaration:
data T a b c where
MkT :: b -> T Int b c
Data and class tycons have their roles inferred (see inferRoles in TcTyDecls),
as do vanilla synonym tycons. Family tycons have all parameters at role N,
though it is conceivable that we could relax this restriction. (->)'s and
tuples' parameters are at role R. Each primitive tycon declares its roles;
it's worth noting that (~#)'s parameters are at role N. Promoted data
constructors' type arguments are at role R. All kind arguments are at role
N.
************************************************************************
* *
\subsection{The data type}
* *
************************************************************************
-}
-- | TyCons represent type constructors. Type constructors are introduced by
-- things such as:
--
-- 1) Data declarations: @data Foo = ...@ creates the @Foo@ type constructor of
-- kind @*@
--
-- 2) Type synonyms: @type Foo = ...@ creates the @Foo@ type constructor
--
-- 3) Newtypes: @newtype Foo a = MkFoo ...@ creates the @Foo@ type constructor
-- of kind @* -> *@
--
-- 4) Class declarations: @class Foo where@ creates the @Foo@ type constructor
-- of kind @*@
--
-- This data type also encodes a number of primitive, built in type constructors
-- such as those for function and tuple types.
-- If you edit this type, you may need to update the GHC formalism
-- See Note [GHC Formalism] in coreSyn/CoreLint.lhs
data TyCon
= -- | The function type constructor, @(->)@
FunTyCon {
tyConUnique :: Unique, -- ^ A Unique of this TyCon. Invariant:
-- identical to Unique of Name stored in
-- tyConName field.
tyConName :: Name, -- ^ Name of the constructor
tyConKind :: Kind, -- ^ Kind of this TyCon (full kind, not just
-- the return kind)
tyConArity :: Arity -- ^ Number of arguments this TyCon must
-- receive to be considered saturated
-- (including implicit kind variables)
}
-- | Algebraic type constructors, which are defined to be those
-- arising @data@ type and @newtype@ declarations. All these
-- constructors are lifted and boxed. See 'AlgTyConRhs' for more
-- information.
| AlgTyCon {
tyConUnique :: Unique, -- ^ A Unique of this TyCon. Invariant:
-- identical to Unique of Name stored in
-- tyConName field.
tyConName :: Name, -- ^ Name of the constructor
tyConKind :: Kind, -- ^ Kind of this TyCon (full kind, not just
-- the return kind)
tyConArity :: Arity, -- ^ Number of arguments this TyCon must
-- receive to be considered saturated
-- (including implicit kind variables)
tyConTyVars :: [TyVar], -- ^ The kind and type variables used in the
-- type constructor.
-- Invariant: length tyvars = arity
-- Precisely, this list scopes over:
--
-- 1. The 'algTcStupidTheta'
-- 2. The cached types in algTyConRhs.NewTyCon
-- 3. The family instance types if present
--
-- Note that it does /not/ scope over the data
-- constructors.
tcRoles :: [Role], -- ^ The role for each type variable
-- This list has the same length as tyConTyVars
-- See also Note [TyCon Role signatures]
tyConCType :: Maybe CType,-- ^ The C type that should be used
-- for this type when using the FFI
-- and CAPI
algTcGadtSyntax :: Bool, -- ^ Was the data type declared with GADT
-- syntax? If so, that doesn't mean it's a
-- true GADT; only that the "where" form
-- was used. This field is used only to
-- guide pretty-printing
algTcStupidTheta :: [PredType], -- ^ The \"stupid theta\" for the data
-- type (always empty for GADTs). A
-- \"stupid theta\" is the context to
-- the left of an algebraic type
-- declaration, e.g. @Eq a@ in the
-- declaration @data Eq a => T a ...@.
algTcRhs :: AlgTyConRhs, -- ^ Contains information about the
-- data constructors of the algebraic type
algTcRec :: RecFlag, -- ^ Tells us whether the data type is part
-- of a mutually-recursive group or not
algTcParent :: TyConParent, -- ^ Gives the class or family declaration
-- 'TyCon' for derived 'TyCon's representing
-- class or family instances, respectively.
-- See also 'synTcParent'
tcPromoted :: Maybe TyCon -- ^ Promoted TyCon, if any
}
-- | Represents the infinite family of tuple type constructors,
-- @()@, @(a,b)@, @(# a, b #)@ etc.
| TupleTyCon {
tyConUnique :: Unique, -- ^ A Unique of this TyCon. Invariant:
-- identical to Unique of Name stored in
-- tyConName field.
tyConName :: Name, -- ^ Name of the constructor
tyConKind :: Kind, -- ^ Kind of this TyCon (full kind, not just
-- the return kind)
tyConArity :: Arity, -- ^ Number of arguments this TyCon must
-- receive to be considered saturated
-- (including implicit kind variables)
tyConTupleSort :: TupleSort,-- ^ Is this a boxed, unboxed or constraint
-- tuple?
tyConTyVars :: [TyVar], -- ^ List of type and kind variables in this
-- TyCon. Includes implicit kind variables.
-- Invariant:
-- length tyConTyVars = tyConArity
dataCon :: DataCon, -- ^ Corresponding tuple data constructor
tcPromoted :: Maybe TyCon
-- ^ Nothing for unboxed tuples
}
-- | Represents type synonyms
| SynonymTyCon {
tyConUnique :: Unique, -- ^ A Unique of this TyCon. Invariant:
-- identical to Unique of Name stored in
-- tyConName field.
tyConName :: Name, -- ^ Name of the constructor
tyConKind :: Kind, -- ^ Kind of this TyCon (full kind, not just
-- the return kind)
tyConArity :: Arity, -- ^ Number of arguments this TyCon must
-- receive to be considered saturated
-- (including implicit kind variables)
tyConTyVars :: [TyVar], -- ^ List of type and kind variables in this
-- TyCon. Includes implicit kind variables.
-- Invariant: length tyConTyVars = tyConArity
tcRoles :: [Role], -- ^ The role for each type variable
-- This list has the same length as tyConTyVars
-- See also Note [TyCon Role signatures]
synTcRhs :: Type -- ^ Contains information about the expansion
-- of the synonym
}
-- | Represents type families
| FamilyTyCon {
tyConUnique :: Unique, -- ^ A Unique of this TyCon. Invariant:
-- identical to Unique of Name stored in
-- tyConName field.
tyConName :: Name, -- ^ Name of the constructor
tyConKind :: Kind, -- ^ Kind of this TyCon (full kind, not just
-- the return kind)
tyConArity :: Arity, -- ^ Number of arguments this TyCon must
-- receive to be considered saturated
-- (including implicit kind variables)
tyConTyVars :: [TyVar], -- ^ The kind and type variables used in the
-- type constructor.
-- Invariant: length tyvars = arity
-- Precisely, this list scopes over:
--
-- 1. The 'algTcStupidTheta'
-- 2. The cached types in 'algTyConRhs.NewTyCon'
-- 3. The family instance types if present
--
-- Note that it does /not/ scope over the data
-- constructors.
famTcFlav :: FamTyConFlav, -- ^ Type family flavour: open, closed,
-- abstract, built-in. See comments for
-- FamTyConFlav
famTcParent :: TyConParent -- ^ TyCon of enclosing class for
-- associated type families
}
-- | Primitive types; cannot be defined in Haskell. This includes
-- the usual suspects (such as @Int#@) as well as foreign-imported
-- types and kinds
| PrimTyCon {
tyConUnique :: Unique, -- ^ A Unique of this TyCon. Invariant:
-- identical to Unique of Name stored in
-- tyConName field.
tyConName :: Name, -- ^ Name of the constructor
tyConKind :: Kind, -- ^ Kind of this TyCon (full kind, not just
-- the return kind)
tyConArity :: Arity, -- ^ Number of arguments this TyCon must
-- receive to be considered saturated
-- (including implicit kind variables)
tcRoles :: [Role], -- ^ The role for each type variable
-- This list has the same length as tyConTyVars
-- See also Note [TyCon Role signatures]
primTyConRep :: PrimRep,-- ^ Many primitive tycons are unboxed, but
-- some are boxed (represented by
-- pointers). This 'PrimRep' holds that
-- information. Only relevant if tyConKind = *
isUnLifted :: Bool -- ^ Most primitive tycons are unlifted (may
-- not contain bottom) but other are lifted,
-- e.g. @RealWorld@
}
-- | Represents promoted data constructor.
| PromotedDataCon { -- See Note [Promoted data constructors]
tyConUnique :: Unique, -- ^ Same Unique as the data constructor
tyConName :: Name, -- ^ Same Name as the data constructor
tyConArity :: Arity,
tyConKind :: Kind, -- ^ Translated type of the data constructor
tcRoles :: [Role], -- ^ Roles: N for kind vars, R for type vars
dataCon :: DataCon -- ^ Corresponding data constructor
}
-- | Represents promoted type constructor.
| PromotedTyCon {
tyConUnique :: Unique, -- ^ Same Unique as the type constructor
tyConName :: Name, -- ^ Same Name as the type constructor
tyConArity :: Arity, -- ^ n if ty_con :: * -> ... -> * n times
tyConKind :: Kind, -- ^ Always TysPrim.superKind
ty_con :: TyCon -- ^ Corresponding type constructor
}
deriving Typeable
-- | Names of the fields in an algebraic record type
type FieldLabel = Name
-- | Represents right-hand-sides of 'TyCon's for algebraic types
data AlgTyConRhs
-- | Says that we know nothing about this data type, except that
-- it's represented by a pointer. Used when we export a data type
-- abstractly into an .hi file.
= AbstractTyCon
Bool -- True <=> It's definitely a distinct data type,
-- equal only to itself; ie not a newtype
-- False <=> Not sure
-- See Note [AbstractTyCon and type equality]
-- | Represents an open type family without a fixed right hand
-- side. Additional instances can appear at any time.
--
-- These are introduced by either a top level declaration:
--
-- > data T a :: *
--
-- Or an associated data type declaration, within a class declaration:
--
-- > class C a b where
-- > data T b :: *
| DataFamilyTyCon
-- | Information about those 'TyCon's derived from a @data@
-- declaration. This includes data types with no constructors at
-- all.
| DataTyCon {
data_cons :: [DataCon],
-- ^ The data type constructors; can be empty if the
-- user declares the type to have no constructors
--
-- INVARIANT: Kept in order of increasing 'DataCon'
-- tag (see the tag assignment in DataCon.mkDataCon)
is_enum :: Bool -- ^ Cached value: is this an enumeration type?
-- See Note [Enumeration types]
}
-- | Information about those 'TyCon's derived from a @newtype@ declaration
| NewTyCon {
data_con :: DataCon, -- ^ The unique constructor for the @newtype@.
-- It has no existentials
nt_rhs :: Type, -- ^ Cached value: the argument type of the
-- constructor, which is just the representation
-- type of the 'TyCon' (remember that @newtype@s
-- do not exist at runtime so need a different
-- representation type).
--
-- The free 'TyVar's of this type are the
-- 'tyConTyVars' from the corresponding 'TyCon'
nt_etad_rhs :: ([TyVar], Type),
-- ^ Same as the 'nt_rhs', but this time eta-reduced.
-- Hence the list of 'TyVar's in this field may be
-- shorter than the declared arity of the 'TyCon'.
-- See Note [Newtype eta]
nt_co :: CoAxiom Unbranched
-- The axiom coercion that creates the @newtype@
-- from the representation 'Type'.
-- See Note [Newtype coercions]
-- Invariant: arity = #tvs in nt_etad_rhs;
-- See Note [Newtype eta]
-- Watch out! If any newtypes become transparent
-- again check Trac #1072.
}
{-
Note [AbstractTyCon and type equality]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
TODO
-}
-- | Extract those 'DataCon's that we are able to learn about. Note
-- that visibility in this sense does not correspond to visibility in
-- the context of any particular user program!
visibleDataCons :: AlgTyConRhs -> [DataCon]
visibleDataCons (AbstractTyCon {}) = []
visibleDataCons DataFamilyTyCon {} = []
visibleDataCons (DataTyCon{ data_cons = cs }) = cs
visibleDataCons (NewTyCon{ data_con = c }) = [c]
-- ^ Both type classes as well as family instances imply implicit
-- type constructors. These implicit type constructors refer to their parent
-- structure (ie, the class or family from which they derive) using a type of
-- the following form. We use 'TyConParent' for both algebraic and synonym
-- types, but the variant 'ClassTyCon' will only be used by algebraic 'TyCon's.
data TyConParent
= -- | An ordinary type constructor has no parent.
NoParentTyCon
-- | Type constructors representing a class dictionary.
-- See Note [ATyCon for classes] in TypeRep
| ClassTyCon
Class -- INVARIANT: the classTyCon of this Class is the
-- current tycon
-- | An *associated* type of a class.
| AssocFamilyTyCon
Class -- The class in whose declaration the family is declared
-- See Note [Associated families and their parent class]
-- | Type constructors representing an instance of a *data* family.
-- Parameters:
--
-- 1) The type family in question
--
-- 2) Instance types; free variables are the 'tyConTyVars'
-- of the current 'TyCon' (not the family one). INVARIANT:
-- the number of types matches the arity of the family 'TyCon'
--
-- 3) A 'CoTyCon' identifying the representation
-- type with the type instance family
| FamInstTyCon -- See Note [Data type families]
(CoAxiom Unbranched) -- The coercion axiom.
-- Generally of kind T ty1 ty2 ~ R:T a b c
-- where T is the family TyCon,
-- and R:T is the representation TyCon (ie this one)
-- and a,b,c are the tyConTyVars of this TyCon
--
-- BUT may be eta-reduced; see TcInstDcls
-- Note [Eta reduction for data family axioms]
-- Cached fields of the CoAxiom, but adjusted to
-- use the tyConTyVars of this TyCon
TyCon -- The family TyCon
[Type] -- Argument types (mentions the tyConTyVars of this TyCon)
-- Match in length the tyConTyVars of the family TyCon
-- E.g. data intance T [a] = ...
-- gives a representation tycon:
-- data R:TList a = ...
-- axiom co a :: T [a] ~ R:TList a
-- with R:TList's algTcParent = FamInstTyCon T [a] co
instance Outputable TyConParent where
ppr NoParentTyCon = text "No parent"
ppr (ClassTyCon cls) = text "Class parent" <+> ppr cls
ppr (AssocFamilyTyCon cls) =
text "Class parent (assoc. family)" <+> ppr cls
ppr (FamInstTyCon _ tc tys) =
text "Family parent (family instance)" <+> ppr tc <+> sep (map ppr tys)
-- | Checks the invariants of a 'TyConParent' given the appropriate type class
-- name, if any
okParent :: Name -> TyConParent -> Bool
okParent _ NoParentTyCon = True
okParent tc_name (AssocFamilyTyCon cls) = tc_name `elem` map tyConName (classATs cls)
okParent tc_name (ClassTyCon cls) = tc_name == tyConName (classTyCon cls)
okParent _ (FamInstTyCon _ fam_tc tys) = tyConArity fam_tc == length tys
isNoParent :: TyConParent -> Bool
isNoParent NoParentTyCon = True
isNoParent _ = False
--------------------
-- | Information pertaining to the expansion of a type synonym (@type@)
data FamTyConFlav
= -- | An open type synonym family e.g. @type family F x y :: * -> *@
OpenSynFamilyTyCon
-- | A closed type synonym family e.g.
-- @type family F x where { F Int = Bool }@
| ClosedSynFamilyTyCon
(CoAxiom Branched) -- The one axiom for this family
-- | A closed type synonym family declared in an hs-boot file with
-- type family F a where ..
| AbstractClosedSynFamilyTyCon
-- | Built-in type family used by the TypeNats solver
| BuiltInSynFamTyCon BuiltInSynFamily
{-
Note [Closed type families]
~~~~~~~~~~~~~~~~~~~~~~~~~
* In an open type family you can add new instances later. This is the
usual case.
* In a closed type family you can only put equations where the family
is defined.
Note [Promoted data constructors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A data constructor can be promoted to become a type constructor,
via the PromotedTyCon alternative in TyCon.
* Only data constructors with
(a) no kind polymorphism
(b) no constraints in its type (eg GADTs)
are promoted. Existentials are ok; see Trac #7347.
* The TyCon promoted from a DataCon has the *same* Name and Unique as
the DataCon. Eg. If the data constructor Data.Maybe.Just(unique 78,
say) is promoted to a TyCon whose name is Data.Maybe.Just(unique 78)
* The *kind* of a promoted DataCon may be polymorphic. Example:
type of DataCon Just :: forall (a:*). a -> Maybe a
kind of (promoted) tycon Just :: forall (a:box). a -> Maybe a
The kind is not identical to the type, because of the */box
kind signature on the forall'd variable; so the tyConKind field of
PromotedTyCon is not identical to the dataConUserType of the
DataCon. But it's the same modulo changing the variable kinds,
done by DataCon.promoteType.
* Small note: We promote the *user* type of the DataCon. Eg
data T = MkT {-# UNPACK #-} !(Bool, Bool)
The promoted kind is
MkT :: (Bool,Bool) -> T
*not*
MkT :: Bool -> Bool -> T
Note [Enumeration types]
~~~~~~~~~~~~~~~~~~~~~~~~
We define datatypes with no constructors to *not* be
enumerations; this fixes trac #2578, Otherwise we
end up generating an empty table for
<mod>_<type>_closure_tbl
which is used by tagToEnum# to map Int# to constructors
in an enumeration. The empty table apparently upset
the linker.
Moreover, all the data constructor must be enumerations, meaning
they have type (forall abc. T a b c). GADTs are not enumerations.
For example consider
data T a where
T1 :: T Int
T2 :: T Bool
T3 :: T a
What would [T1 ..] be? [T1,T3] :: T Int? Easiest thing is to exclude them.
See Trac #4528.
Note [Newtype coercions]
~~~~~~~~~~~~~~~~~~~~~~~~
The NewTyCon field nt_co is a CoAxiom which is used for coercing from
the representation type of the newtype, to the newtype itself. For
example,
newtype T a = MkT (a -> a)
the NewTyCon for T will contain nt_co = CoT where CoT t : T t ~ t -> t.
In the case that the right hand side is a type application
ending with the same type variables as the left hand side, we
"eta-contract" the coercion. So if we had
newtype S a = MkT [a]
then we would generate the arity 0 axiom CoS : S ~ []. The
primary reason we do this is to make newtype deriving cleaner.
In the paper we'd write
axiom CoT : (forall t. T t) ~ (forall t. [t])
and then when we used CoT at a particular type, s, we'd say
CoT @ s
which encodes as (TyConApp instCoercionTyCon [TyConApp CoT [], s])
Note [Newtype eta]
~~~~~~~~~~~~~~~~~~
Consider
newtype Parser a = MkParser (IO a) deriving Monad
Are these two types equal (to Core)?
Monad Parser
Monad IO
which we need to make the derived instance for Monad Parser.
Well, yes. But to see that easily we eta-reduce the RHS type of
Parser, in this case to ([], Froogle), so that even unsaturated applications
of Parser will work right. This eta reduction is done when the type
constructor is built, and cached in NewTyCon. The cached field is
only used in coreExpandTyCon_maybe.
Here's an example that I think showed up in practice
Source code:
newtype T a = MkT [a]
newtype Foo m = MkFoo (forall a. m a -> Int)
w1 :: Foo []
w1 = ...
w2 :: Foo T
w2 = MkFoo (\(MkT x) -> case w1 of MkFoo f -> f x)
After desugaring, and discarding the data constructors for the newtypes,
we get:
w2 :: Foo T
w2 = w1
And now Lint complains unless Foo T == Foo [], and that requires T==[]
This point carries over to the newtype coercion, because we need to
say
w2 = w1 `cast` Foo CoT
so the coercion tycon CoT must have
kind: T ~ []
and arity: 0
************************************************************************
* *
\subsection{PrimRep}
* *
************************************************************************
Note [rep swamp]
GHC has a rich selection of types that represent "primitive types" of
one kind or another. Each of them makes a different set of
distinctions, and mostly the differences are for good reasons,
although it's probably true that we could merge some of these.
Roughly in order of "includes more information":
- A Width (cmm/CmmType) is simply a binary value with the specified
number of bits. It may represent a signed or unsigned integer, a
floating-point value, or an address.
data Width = W8 | W16 | W32 | W64 | W80 | W128
- Size, which is used in the native code generator, is Width +
floating point information.
data Size = II8 | II16 | II32 | II64 | FF32 | FF64 | FF80
it is necessary because e.g. the instruction to move a 64-bit float
on x86 (movsd) is different from the instruction to move a 64-bit
integer (movq), so the mov instruction is parameterised by Size.
- CmmType wraps Width with more information: GC ptr, float, or
other value.
data CmmType = CmmType CmmCat Width
data CmmCat -- "Category" (not exported)
= GcPtrCat -- GC pointer
| BitsCat -- Non-pointer
| FloatCat -- Float
It is important to have GcPtr information in Cmm, since we generate
info tables containing pointerhood for the GC from this. As for
why we have float (and not signed/unsigned) here, see Note [Signed
vs unsigned].
- ArgRep makes only the distinctions necessary for the call and
return conventions of the STG machine. It is essentially CmmType
+ void.
- PrimRep makes a few more distinctions than ArgRep: it divides
non-GC-pointers into signed/unsigned and addresses, information
that is necessary for passing these values to foreign functions.
There's another tension here: whether the type encodes its size in
bytes, or whether its size depends on the machine word size. Width
and CmmType have the size built-in, whereas ArgRep and PrimRep do not.
This means to turn an ArgRep/PrimRep into a CmmType requires DynFlags.
On the other hand, CmmType includes some "nonsense" values, such as
CmmType GcPtrCat W32 on a 64-bit machine.
-}
-- | A 'PrimRep' is an abstraction of a type. It contains information that
-- the code generator needs in order to pass arguments, return results,
-- and store values of this type.
data PrimRep
= VoidRep
| PtrRep
| IntRep -- ^ Signed, word-sized value
| WordRep -- ^ Unsigned, word-sized value
| Int64Rep -- ^ Signed, 64 bit value (with 32-bit words only)
| Word64Rep -- ^ Unsigned, 64 bit value (with 32-bit words only)
| AddrRep -- ^ A pointer, but /not/ to a Haskell value (use 'PtrRep')
| FloatRep
| DoubleRep
-- ETA-specific
| BoolRep
| CharRep
| ByteRep
| ShortRep
| ObjectRep Text
| ArrayRep PrimRep
-- | VecRep Int PrimElemRep -- ^ A vector
deriving( Eq, Show )
-- data PrimElemRep
-- = Int8ElemRep
-- | Int16ElemRep
-- | Int32ElemRep
-- | Int64ElemRep
-- | Word8ElemRep
-- | Word16ElemRep
-- | Word32ElemRep
-- | Word64ElemRep
-- | FloatElemRep
-- | DoubleElemRep
-- deriving( Eq, Show )
instance Outputable PrimRep where
ppr r = text (show r)
-- instance Outputable PrimElemRep where
-- ppr r = text (show r)
isVoidRep :: PrimRep -> Bool
isVoidRep VoidRep = True
isVoidRep _ = False
isGcPtrRep :: PrimRep -> Bool
isGcPtrRep PtrRep = True
isGcPtrRep _ = False
isObjectRep :: PrimRep -> Bool
isObjectRep (ObjectRep _) = True
isObjectRep (ArrayRep _) = True
isObjectRep _ = False
getObjectClass :: PrimRep -> Text
getObjectClass (ObjectRep t) = t
getObjectClass (ArrayRep rep) = error $ "getObjectClass: Array type"
-- arrayWrap . objectWrap $ getObjectClass rep
getObjectClass rep = error $ "getObjectClass: " ++ show rep
-- | Find the size of a 'PrimRep', in words
primRepSizeW :: DynFlags -> PrimRep -> Int
primRepSizeW _ IntRep = 1
primRepSizeW _ WordRep = 1
primRepSizeW dflags Int64Rep = 2
primRepSizeW dflags Word64Rep = 2
primRepSizeW _ FloatRep = 1 -- NB. might not take a full word
primRepSizeW dflags DoubleRep = 2
primRepSizeW _ AddrRep = 1
primRepSizeW _ PtrRep = 1
primRepSizeW _ VoidRep = 0
primRepSizeW _ BoolRep = 1
primRepSizeW _ CharRep = 1
primRepSizeW _ ByteRep = 1
primRepSizeW _ ShortRep = 1
primRepSizeW _ (ObjectRep _) = 1
-- primRepSizeW dflags (VecRep len rep) = len * primElemRepSizeB rep `quot` 4
-- primElemRepSizeB :: PrimElemRep -> Int
-- primElemRepSizeB Int8ElemRep = 1
-- primElemRepSizeB Int16ElemRep = 2
-- primElemRepSizeB Int32ElemRep = 4
-- primElemRepSizeB Int64ElemRep = 8
-- primElemRepSizeB Word8ElemRep = 1
-- primElemRepSizeB Word16ElemRep = 2
-- primElemRepSizeB Word32ElemRep = 4
-- primElemRepSizeB Word64ElemRep = 8
-- primElemRepSizeB FloatElemRep = 4
-- primElemRepSizeB DoubleElemRep = 8
{-
************************************************************************
* *
\subsection{TyCon Construction}
* *
************************************************************************
Note: the TyCon constructors all take a Kind as one argument, even though
they could, in principle, work out their Kind from their other arguments.
But to do so they need functions from Types, and that makes a nasty
module mutual-recursion. And they aren't called from many places.
So we compromise, and move their Kind calculation to the call site.
-}
-- | Given the name of the function type constructor and it's kind, create the
-- corresponding 'TyCon'. It is reccomended to use 'TypeRep.funTyCon' if you want
-- this functionality
mkFunTyCon :: Name -> Kind -> TyCon
mkFunTyCon name kind
= FunTyCon {
tyConUnique = nameUnique name,
tyConName = name,
tyConKind = kind,
tyConArity = 2
}
-- | This is the making of an algebraic 'TyCon'. Notably, you have to
-- pass in the generic (in the -XGenerics sense) information about the
-- type constructor - you can get hold of it easily (see Generics
-- module)
mkAlgTyCon :: Name
-> Kind -- ^ Kind of the resulting 'TyCon'
-> [TyVar] -- ^ 'TyVar's scoped over: see 'tyConTyVars'.
-- Arity is inferred from the length of this
-- list
-> [Role] -- ^ The roles for each TyVar
-> Maybe CType -- ^ The C type this type corresponds to
-- when using the CAPI FFI
-> [PredType] -- ^ Stupid theta: see 'algTcStupidTheta'
-> AlgTyConRhs -- ^ Information about dat aconstructors
-> TyConParent
-> RecFlag -- ^ Is the 'TyCon' recursive?
-> Bool -- ^ Was the 'TyCon' declared with GADT syntax?
-> Maybe TyCon -- ^ Promoted version
-> TyCon
mkAlgTyCon name kind tyvars roles cType stupid rhs parent is_rec gadt_syn prom_tc
= AlgTyCon {
tyConName = name,
tyConUnique = nameUnique name,
tyConKind = kind,
tyConArity = length tyvars,
tyConTyVars = tyvars,
tcRoles = roles,
tyConCType = cType,
algTcStupidTheta = stupid,
algTcRhs = rhs,
algTcParent = ASSERT2( okParent name parent, ppr name $$ ppr parent ) parent,
algTcRec = is_rec,
algTcGadtSyntax = gadt_syn,
tcPromoted = prom_tc
}
-- | Simpler specialization of 'mkAlgTyCon' for classes
mkClassTyCon :: Name -> Kind -> [TyVar] -> [Role] -> AlgTyConRhs -> Class
-> RecFlag -> TyCon
mkClassTyCon name kind tyvars roles rhs clas is_rec
= mkAlgTyCon name kind tyvars roles Nothing [] rhs (ClassTyCon clas)
is_rec False
Nothing -- Class TyCons are not pormoted
mkTupleTyCon :: Name
-> Kind -- ^ Kind of the resulting 'TyCon'
-> Arity -- ^ Arity of the tuple
-> [TyVar] -- ^ 'TyVar's scoped over: see 'tyConTyVars'
-> DataCon
-> TupleSort -- ^ Whether the tuple is boxed or unboxed
-> Maybe TyCon -- ^ Promoted version
-> TyCon
mkTupleTyCon name kind arity tyvars con sort prom_tc
= TupleTyCon {
tyConUnique = nameUnique name,
tyConName = name,
tyConKind = kind,
tyConArity = arity,
tyConTupleSort = sort,
tyConTyVars = tyvars,
dataCon = con,
tcPromoted = prom_tc
}
-- | Create an unlifted primitive 'TyCon', such as @Int#@
mkPrimTyCon :: Name -> Kind -> [Role] -> PrimRep -> TyCon
mkPrimTyCon name kind roles rep
= mkPrimTyCon' name kind roles rep True
-- | Kind constructors
mkKindTyCon :: Name -> Kind -> TyCon
mkKindTyCon name kind
= mkPrimTyCon' name kind [] VoidRep True
-- | Create a lifted primitive 'TyCon' such as @RealWorld@
mkLiftedPrimTyCon :: Name -> Kind -> [Role] -> PrimRep -> TyCon
mkLiftedPrimTyCon name kind roles rep
= mkPrimTyCon' name kind roles rep False
mkPrimTyCon' :: Name -> Kind -> [Role] -> PrimRep -> Bool -> TyCon
mkPrimTyCon' name kind roles rep is_unlifted
= PrimTyCon {
tyConName = name,
tyConUnique = nameUnique name,
tyConKind = kind,
tyConArity = length roles,
tcRoles = roles,
primTyConRep = rep,
isUnLifted = is_unlifted
}
-- | Create a type synonym 'TyCon'
mkSynonymTyCon :: Name -> Kind -> [TyVar] -> [Role] -> Type -> TyCon
mkSynonymTyCon name kind tyvars roles rhs
= SynonymTyCon {
tyConName = name,
tyConUnique = nameUnique name,
tyConKind = kind,
tyConArity = length tyvars,
tyConTyVars = tyvars,
tcRoles = roles,
synTcRhs = rhs
}
-- | Create a type family 'TyCon'
mkFamilyTyCon:: Name -> Kind -> [TyVar] -> FamTyConFlav -> TyConParent
-> TyCon
mkFamilyTyCon name kind tyvars flav parent
= FamilyTyCon
{ tyConUnique = nameUnique name
, tyConName = name
, tyConKind = kind
, tyConArity = length tyvars
, tyConTyVars = tyvars
, famTcFlav = flav
, famTcParent = parent
}
-- | Create a promoted data constructor 'TyCon'
-- Somewhat dodgily, we give it the same Name
-- as the data constructor itself; when we pretty-print
-- the TyCon we add a quote; see the Outputable TyCon instance
mkPromotedDataCon :: DataCon -> Name -> Unique -> Kind -> [Role] -> TyCon
mkPromotedDataCon con name unique kind roles
= PromotedDataCon {
tyConName = name,
tyConUnique = unique,
tyConArity = arity,
tcRoles = roles,
tyConKind = kind,
dataCon = con
}
where
arity = length roles
-- | Create a promoted type constructor 'TyCon'
-- Somewhat dodgily, we give it the same Name
-- as the type constructor itself
mkPromotedTyCon :: TyCon -> Kind -> TyCon
mkPromotedTyCon tc kind
= PromotedTyCon {
tyConName = getName tc,
tyConUnique = getUnique tc,
tyConArity = tyConArity tc,
tyConKind = kind,
ty_con = tc
}
isFunTyCon :: TyCon -> Bool
isFunTyCon (FunTyCon {}) = True
isFunTyCon _ = False
-- | Test if the 'TyCon' is algebraic but abstract (invisible data constructors)
isAbstractTyCon :: TyCon -> Bool
isAbstractTyCon (AlgTyCon { algTcRhs = AbstractTyCon {} }) = True
isAbstractTyCon _ = False
-- | Make an algebraic 'TyCon' abstract. Panics if the supplied 'TyCon' is not
-- algebraic
makeTyConAbstract :: TyCon -> TyCon
makeTyConAbstract tc@(AlgTyCon { algTcRhs = rhs })
= tc { algTcRhs = AbstractTyCon (isDistinctAlgRhs rhs) }
makeTyConAbstract tc = pprPanic "makeTyConAbstract" (ppr tc)
-- | Does this 'TyCon' represent something that cannot be defined in Haskell?
isPrimTyCon :: TyCon -> Bool
isPrimTyCon (PrimTyCon {}) = True
isPrimTyCon _ = False
-- | Is this 'TyCon' unlifted (i.e. cannot contain bottom)? Note that this can
-- only be true for primitive and unboxed-tuple 'TyCon's
isUnLiftedTyCon :: TyCon -> Bool
isUnLiftedTyCon (PrimTyCon {isUnLifted = is_unlifted}) = is_unlifted
isUnLiftedTyCon (TupleTyCon {tyConTupleSort = sort})
= not (isBoxed (tupleSortBoxity sort))
isUnLiftedTyCon _ = False
-- | Returns @True@ if the supplied 'TyCon' resulted from either a
-- @data@ or @newtype@ declaration
isAlgTyCon :: TyCon -> Bool
isAlgTyCon (AlgTyCon {}) = True
isAlgTyCon (TupleTyCon {}) = True
isAlgTyCon _ = False
isDataTyCon :: TyCon -> Bool
-- ^ Returns @True@ for data types that are /definitely/ represented by
-- heap-allocated constructors. These are scrutinised by Core-level
-- @case@ expressions, and they get info tables allocated for them.
--
-- Generally, the function will be true for all @data@ types and false
-- for @newtype@s, unboxed tuples and type family 'TyCon's. But it is
-- not guaranteed to return @True@ in all cases that it could.
--
-- NB: for a data type family, only the /instance/ 'TyCon's
-- get an info table. The family declaration 'TyCon' does not
isDataTyCon (AlgTyCon {algTcRhs = rhs})
= case rhs of
DataTyCon {} -> True
NewTyCon {} -> False
DataFamilyTyCon {} -> False
AbstractTyCon {} -> False -- We don't know, so return False
isDataTyCon (TupleTyCon {tyConTupleSort = sort}) = isBoxed (tupleSortBoxity sort)
isDataTyCon _ = False
-- | 'isInjectiveTyCon' is true of 'TyCon's for which this property holds
-- (where X is the role passed in):
-- If (T a1 b1 c1) ~X (T a2 b2 c2), then (a1 ~X1 a2), (b1 ~X2 b2), and (c1 ~X3 c2)
-- (where X1, X2, and X3, are the roles given by tyConRolesX tc X)
-- See also Note [Decomposing equalities] in TcCanonical
isInjectiveTyCon :: TyCon -> Role -> Bool
isInjectiveTyCon _ Phantom = False
isInjectiveTyCon (FunTyCon {}) _ = True
isInjectiveTyCon (AlgTyCon {}) Nominal = True
isInjectiveTyCon (AlgTyCon {algTcRhs = rhs}) Representational
= isGenInjAlgRhs rhs
isInjectiveTyCon (TupleTyCon {}) _ = True
isInjectiveTyCon (SynonymTyCon {}) _ = False
isInjectiveTyCon (FamilyTyCon {}) _ = False
isInjectiveTyCon (PrimTyCon {}) _ = True
isInjectiveTyCon (PromotedDataCon {}) _ = True
isInjectiveTyCon (PromotedTyCon {ty_con = tc}) r
= isInjectiveTyCon tc r
-- | 'isGenerativeTyCon' is true of 'TyCon's for which this property holds
-- (where X is the role passed in):
-- If (T tys ~X t), then (t's head ~X T).
-- See also Note [Decomposing equalities] in TcCanonical
isGenerativeTyCon :: TyCon -> Role -> Bool
isGenerativeTyCon = isInjectiveTyCon
-- as it happens, generativity and injectivity coincide, but there's
-- no a priori reason this must be the case
-- | Is this an 'AlgTyConRhs' of a 'TyCon' that is generative and injective
-- with respect to representational equality?
isGenInjAlgRhs :: AlgTyConRhs -> Bool
isGenInjAlgRhs (DataTyCon {}) = True
isGenInjAlgRhs (DataFamilyTyCon {}) = False
isGenInjAlgRhs (AbstractTyCon distinct) = distinct
isGenInjAlgRhs (NewTyCon {}) = False
-- | 'isDistinctTyCon' is true of 'TyCon's that are equal only to
-- themselves, even via coercions (except for unsafeCoerce).
-- This excludes newtypes, type functions, type synonyms.
-- It relates directly to the FC consistency story:
-- If the axioms are consistent,
-- and co : S tys ~ T tys, and S,T are "distinct" TyCons,
-- then S=T.
-- Cf Note [Pruning dead case alternatives] in Unify
isDistinctTyCon :: TyCon -> Bool
isDistinctTyCon (AlgTyCon {algTcRhs = rhs}) = isDistinctAlgRhs rhs
isDistinctTyCon (FunTyCon {}) = True
isDistinctTyCon (TupleTyCon {}) = True
isDistinctTyCon (PrimTyCon {}) = True
isDistinctTyCon (PromotedDataCon {}) = True
isDistinctTyCon _ = False
isDistinctAlgRhs :: AlgTyConRhs -> Bool
isDistinctAlgRhs (DataTyCon {}) = True
isDistinctAlgRhs (DataFamilyTyCon {}) = False
isDistinctAlgRhs (AbstractTyCon distinct) = distinct
isDistinctAlgRhs (NewTyCon {}) = False
-- | Is this 'TyCon' that for a @newtype@
isNewTyCon :: TyCon -> Bool
isNewTyCon (AlgTyCon {algTcRhs = NewTyCon {}}) = True
isNewTyCon _ = False
-- | Take a 'TyCon' apart into the 'TyVar's it scopes over, the 'Type' it expands
-- into, and (possibly) a coercion from the representation type to the @newtype@.
-- Returns @Nothing@ if this is not possible.
unwrapNewTyCon_maybe :: TyCon -> Maybe ([TyVar], Type, CoAxiom Unbranched)
unwrapNewTyCon_maybe (AlgTyCon { tyConTyVars = tvs,
algTcRhs = NewTyCon { nt_co = co,
nt_rhs = rhs }})
= Just (tvs, rhs, co)
unwrapNewTyCon_maybe _ = Nothing
unwrapNewTyConEtad_maybe :: TyCon -> Maybe ([TyVar], Type, CoAxiom Unbranched)
unwrapNewTyConEtad_maybe (AlgTyCon { algTcRhs = NewTyCon { nt_co = co,
nt_etad_rhs = (tvs,rhs) }})
= Just (tvs, rhs, co)
unwrapNewTyConEtad_maybe _ = Nothing
isProductTyCon :: TyCon -> Bool
-- True of datatypes or newtypes that have
-- one, vanilla, data constructor
isProductTyCon tc@(AlgTyCon {}) = case algTcRhs tc of
DataTyCon{ data_cons = [data_con] }
-> isVanillaDataCon data_con
NewTyCon {} -> True
_ -> False
isProductTyCon (TupleTyCon {}) = True
isProductTyCon _ = False
isDataProductTyCon_maybe :: TyCon -> Maybe DataCon
-- True of datatypes (not newtypes) with
-- one, vanilla, data constructor
isDataProductTyCon_maybe (AlgTyCon { algTcRhs = DataTyCon { data_cons = cons } })
| [con] <- cons -- Singleton
, isVanillaDataCon con -- Vanilla
= Just con
isDataProductTyCon_maybe (TupleTyCon { dataCon = con })
= Just con
isDataProductTyCon_maybe _ = Nothing
-- | Is this a 'TyCon' representing a regular H98 type synonym (@type@)?
isTypeSynonymTyCon :: TyCon -> Bool
isTypeSynonymTyCon (SynonymTyCon {}) = True
isTypeSynonymTyCon _ = False
-- As for newtypes, it is in some contexts important to distinguish between
-- closed synonyms and synonym families, as synonym families have no unique
-- right hand side to which a synonym family application can expand.
--
isDecomposableTyCon :: TyCon -> Bool
-- True iff we can decompose (T a b c) into ((T a b) c)
-- I.e. is it injective?
-- Specifically NOT true of synonyms (open and otherwise)
-- Ultimately we may have injective associated types
-- in which case this test will become more interesting
--
-- It'd be unusual to call isDecomposableTyCon on a regular H98
-- type synonym, because you should probably have expanded it first
-- But regardless, it's not decomposable
isDecomposableTyCon (SynonymTyCon {}) = False
isDecomposableTyCon (FamilyTyCon {}) = False
isDecomposableTyCon _other = True
-- | Is this an algebraic 'TyCon' declared with the GADT syntax?
isGadtSyntaxTyCon :: TyCon -> Bool
isGadtSyntaxTyCon (AlgTyCon { algTcGadtSyntax = res }) = res
isGadtSyntaxTyCon _ = False
-- | Is this an algebraic 'TyCon' which is just an enumeration of values?
isEnumerationTyCon :: TyCon -> Bool
-- See Note [Enumeration types] in TyCon
isEnumerationTyCon (AlgTyCon {algTcRhs = DataTyCon { is_enum = res }}) = res
isEnumerationTyCon (TupleTyCon {tyConArity = arity}) = arity == 0
isEnumerationTyCon _ = False
-- | Is this a 'TyCon', synonym or otherwise, that defines a family?
isFamilyTyCon :: TyCon -> Bool
isFamilyTyCon (FamilyTyCon {}) = True
isFamilyTyCon (AlgTyCon {algTcRhs = DataFamilyTyCon {}}) = True
isFamilyTyCon _ = False
-- | Is this a 'TyCon', synonym or otherwise, that defines a family with
-- instances?
isOpenFamilyTyCon :: TyCon -> Bool
isOpenFamilyTyCon (FamilyTyCon {famTcFlav = OpenSynFamilyTyCon }) = True
isOpenFamilyTyCon (AlgTyCon {algTcRhs = DataFamilyTyCon }) = True
isOpenFamilyTyCon _ = False
-- | Is this a synonym 'TyCon' that can have may have further instances appear?
isTypeFamilyTyCon :: TyCon -> Bool
isTypeFamilyTyCon (FamilyTyCon {}) = True
isTypeFamilyTyCon _ = False
isOpenTypeFamilyTyCon :: TyCon -> Bool
isOpenTypeFamilyTyCon (FamilyTyCon {famTcFlav = OpenSynFamilyTyCon }) = True
isOpenTypeFamilyTyCon _ = False
-- leave out abstract closed families here
isClosedSynFamilyTyCon_maybe :: TyCon -> Maybe (CoAxiom Branched)
isClosedSynFamilyTyCon_maybe
(FamilyTyCon {famTcFlav = ClosedSynFamilyTyCon ax}) = Just ax
isClosedSynFamilyTyCon_maybe _ = Nothing
isBuiltInSynFamTyCon_maybe :: TyCon -> Maybe BuiltInSynFamily
isBuiltInSynFamTyCon_maybe
(FamilyTyCon {famTcFlav = BuiltInSynFamTyCon ops }) = Just ops
isBuiltInSynFamTyCon_maybe _ = Nothing
-- | Is this a synonym 'TyCon' that can have may have further instances appear?
isDataFamilyTyCon :: TyCon -> Bool
isDataFamilyTyCon (AlgTyCon {algTcRhs = DataFamilyTyCon {}}) = True
isDataFamilyTyCon _ = False
-- | Are we able to extract informationa 'TyVar' to class argument list
-- mappping from a given 'TyCon'?
isTyConAssoc :: TyCon -> Bool
isTyConAssoc tc = isJust (tyConAssoc_maybe tc)
tyConAssoc_maybe :: TyCon -> Maybe Class
tyConAssoc_maybe tc = case tyConParent tc of
AssocFamilyTyCon cls -> Just cls
_ -> Nothing
-- The unit tycon didn't used to be classed as a tuple tycon
-- but I thought that was silly so I've undone it
-- If it can't be for some reason, it should be a AlgTyCon
isTupleTyCon :: TyCon -> Bool
-- ^ Does this 'TyCon' represent a tuple?
--
-- NB: when compiling @Data.Tuple@, the tycons won't reply @True@ to
-- 'isTupleTyCon', because they are built as 'AlgTyCons'. However they
-- get spat into the interface file as tuple tycons, so I don't think
-- it matters.
isTupleTyCon (TupleTyCon {}) = True
isTupleTyCon _ = False
-- | Is this the 'TyCon' for an unboxed tuple?
isUnboxedTupleTyCon :: TyCon -> Bool
isUnboxedTupleTyCon (TupleTyCon {tyConTupleSort = sort}) =
not (isBoxed (tupleSortBoxity sort))
isUnboxedTupleTyCon _ = False
-- | Is this the 'TyCon' for a boxed tuple?
isBoxedTupleTyCon :: TyCon -> Bool
isBoxedTupleTyCon (TupleTyCon {tyConTupleSort = sort}) = isBoxed (tupleSortBoxity sort)
isBoxedTupleTyCon _ = False
-- | Extract the boxity of the given 'TyCon', if it is a 'TupleTyCon'.
-- Panics otherwise
tupleTyConBoxity :: TyCon -> Boxity
tupleTyConBoxity tc = tupleSortBoxity (tyConTupleSort tc)
-- | Extract the 'TupleSort' of the given 'TyCon', if it is a 'TupleTyCon'.
-- Panics otherwise
tupleTyConSort :: TyCon -> TupleSort
tupleTyConSort tc = tyConTupleSort tc
-- | Extract the arity of the given 'TyCon', if it is a 'TupleTyCon'.
-- Panics otherwise
tupleTyConArity :: TyCon -> Arity
tupleTyConArity tc = tyConArity tc
-- | Is this a recursive 'TyCon'?
isRecursiveTyCon :: TyCon -> Bool
isRecursiveTyCon (AlgTyCon {algTcRec = Recursive}) = True
isRecursiveTyCon _ = False
promotableTyCon_maybe :: TyCon -> Maybe TyCon
promotableTyCon_maybe (AlgTyCon { tcPromoted = prom }) = prom
promotableTyCon_maybe (TupleTyCon { tcPromoted = prom }) = prom
promotableTyCon_maybe _ = Nothing
promoteTyCon :: TyCon -> TyCon
promoteTyCon tc = case promotableTyCon_maybe tc of
Just prom_tc -> prom_tc
Nothing -> pprPanic "promoteTyCon" (ppr tc)
-- | Is this a PromotedTyCon?
isPromotedTyCon :: TyCon -> Bool
isPromotedTyCon (PromotedTyCon {}) = True
isPromotedTyCon _ = False
-- | Retrieves the promoted TyCon if this is a PromotedTyCon;
isPromotedTyCon_maybe :: TyCon -> Maybe TyCon
isPromotedTyCon_maybe (PromotedTyCon { ty_con = tc }) = Just tc
isPromotedTyCon_maybe _ = Nothing
-- | Is this a PromotedDataCon?
isPromotedDataCon :: TyCon -> Bool
isPromotedDataCon (PromotedDataCon {}) = True
isPromotedDataCon _ = False
-- | Retrieves the promoted DataCon if this is a PromotedDataCon;
isPromotedDataCon_maybe :: TyCon -> Maybe DataCon
isPromotedDataCon_maybe (PromotedDataCon { dataCon = dc }) = Just dc
isPromotedDataCon_maybe _ = Nothing
-- | Identifies implicit tycons that, in particular, do not go into interface
-- files (because they are implicitly reconstructed when the interface is
-- read).
--
-- Note that:
--
-- * Associated families are implicit, as they are re-constructed from
-- the class declaration in which they reside, and
--
-- * Family instances are /not/ implicit as they represent the instance body
-- (similar to a @dfun@ does that for a class instance).
isImplicitTyCon :: TyCon -> Bool
isImplicitTyCon (FunTyCon {}) = True
isImplicitTyCon (TupleTyCon {}) = True
isImplicitTyCon (PrimTyCon {}) = True
isImplicitTyCon (PromotedDataCon {}) = True
isImplicitTyCon (PromotedTyCon {}) = True
isImplicitTyCon (AlgTyCon { algTcParent = AssocFamilyTyCon {} }) = True
isImplicitTyCon (AlgTyCon {}) = False
isImplicitTyCon (FamilyTyCon { famTcParent = AssocFamilyTyCon {} }) = True
isImplicitTyCon (FamilyTyCon {}) = False
isImplicitTyCon (SynonymTyCon {}) = False
tyConCType_maybe :: TyCon -> Maybe CType
tyConCType_maybe tc@(AlgTyCon {}) = tyConCType tc
tyConCType_maybe _ = Nothing
isObjectTyCon :: TyCon -> Bool
isObjectTyCon = (`hasKey` jobjectPrimTyConKey)
{-
-----------------------------------------------
-- Expand type-constructor applications
-----------------------------------------------
-}
tcExpandTyCon_maybe, coreExpandTyCon_maybe
:: TyCon
-> [tyco] -- ^ Arguments to 'TyCon'
-> Maybe ([(TyVar,tyco)],
Type,
[tyco]) -- ^ Returns a 'TyVar' substitution, the body
-- type of the synonym (not yet substituted)
-- and any arguments remaining from the
-- application
-- ^ Used to create the view the /typechecker/ has on 'TyCon's.
-- We expand (closed) synonyms only, cf. 'coreExpandTyCon_maybe'
tcExpandTyCon_maybe (SynonymTyCon { tyConTyVars = tvs
, synTcRhs = rhs }) tys
= expand tvs rhs tys
tcExpandTyCon_maybe _ _ = Nothing
---------------
-- ^ Used to create the view /Core/ has on 'TyCon's. We expand
-- not only closed synonyms like 'tcExpandTyCon_maybe',
-- but also non-recursive @newtype@s
coreExpandTyCon_maybe tycon tys = tcExpandTyCon_maybe tycon tys
----------------
expand :: [TyVar] -> Type -- Template
-> [a] -- Args
-> Maybe ([(TyVar,a)], Type, [a]) -- Expansion
expand tvs rhs tys
= case n_tvs `compare` length tys of
LT -> Just (tvs `zip` tys, rhs, drop n_tvs tys)
EQ -> Just (tvs `zip` tys, rhs, [])
GT -> Nothing
where
n_tvs = length tvs
-- | As 'tyConDataCons_maybe', but returns the empty list of constructors if no
-- constructors could be found
tyConDataCons :: TyCon -> [DataCon]
-- It's convenient for tyConDataCons to return the
-- empty list for type synonyms etc
tyConDataCons tycon = tyConDataCons_maybe tycon `orElse` []
-- | Determine the 'DataCon's originating from the given 'TyCon', if the 'TyCon'
-- is the sort that can have any constructors (note: this does not include
-- abstract algebraic types)
tyConDataCons_maybe :: TyCon -> Maybe [DataCon]
tyConDataCons_maybe (AlgTyCon {algTcRhs = DataTyCon { data_cons = cons }})
= Just cons
tyConDataCons_maybe (AlgTyCon {algTcRhs = NewTyCon { data_con = con }})
= Just [con]
tyConDataCons_maybe (TupleTyCon {dataCon = con})
= Just [con]
tyConDataCons_maybe _
= Nothing
-- | Determine the number of value constructors a 'TyCon' has. Panics if the
-- 'TyCon' is not algebraic or a tuple
tyConFamilySize :: TyCon -> Int
tyConFamilySize (AlgTyCon {algTcRhs = DataTyCon {data_cons = cons}}) =
length cons
tyConFamilySize (AlgTyCon {algTcRhs = NewTyCon {}}) = 1
tyConFamilySize (AlgTyCon {algTcRhs = DataFamilyTyCon {}}) = 0
tyConFamilySize (TupleTyCon {}) = 1
tyConFamilySize other = pprPanic "tyConFamilySize:" (ppr other)
-- | Extract an 'AlgTyConRhs' with information about data constructors from an
-- algebraic or tuple 'TyCon'. Panics for any other sort of 'TyCon'
algTyConRhs :: TyCon -> AlgTyConRhs
algTyConRhs (AlgTyCon {algTcRhs = rhs}) = rhs
algTyConRhs (TupleTyCon {dataCon = con, tyConArity = arity})
= DataTyCon { data_cons = [con], is_enum = arity == 0 }
algTyConRhs other = pprPanic "algTyConRhs" (ppr other)
-- | Get the list of roles for the type parameters of a TyCon
tyConRoles :: TyCon -> [Role]
-- See also Note [TyCon Role signatures]
tyConRoles tc
= case tc of
{ FunTyCon {} -> const_role Representational
; AlgTyCon { tcRoles = roles } -> roles
; TupleTyCon {} -> const_role Representational
; SynonymTyCon { tcRoles = roles } -> roles
; FamilyTyCon {} -> const_role Nominal
; PrimTyCon { tcRoles = roles } -> roles
; PromotedDataCon { tcRoles = roles } -> roles
; PromotedTyCon {} -> const_role Nominal
}
where
const_role r = replicate (tyConArity tc) r
-- | Extract the bound type variables and type expansion of a type synonym
-- 'TyCon'. Panics if the 'TyCon' is not a synonym
newTyConRhs :: TyCon -> ([TyVar], Type)
newTyConRhs (AlgTyCon {tyConTyVars = tvs, algTcRhs = NewTyCon { nt_rhs = rhs }})
= (tvs, rhs)
newTyConRhs tycon = pprPanic "newTyConRhs" (ppr tycon)
-- | The number of type parameters that need to be passed to a newtype to
-- resolve it. May be less than in the definition if it can be eta-contracted.
newTyConEtadArity :: TyCon -> Int
newTyConEtadArity (AlgTyCon {algTcRhs = NewTyCon { nt_etad_rhs = tvs_rhs }})
= length (fst tvs_rhs)
newTyConEtadArity tycon = pprPanic "newTyConEtadArity" (ppr tycon)
-- | Extract the bound type variables and type expansion of an eta-contracted
-- type synonym 'TyCon'. Panics if the 'TyCon' is not a synonym
newTyConEtadRhs :: TyCon -> ([TyVar], Type)
newTyConEtadRhs (AlgTyCon {algTcRhs = NewTyCon { nt_etad_rhs = tvs_rhs }}) = tvs_rhs
newTyConEtadRhs tycon = pprPanic "newTyConEtadRhs" (ppr tycon)
-- | Extracts the @newtype@ coercion from such a 'TyCon', which can be used to
-- construct something with the @newtype@s type from its representation type
-- (right hand side). If the supplied 'TyCon' is not a @newtype@, returns
-- @Nothing@
newTyConCo_maybe :: TyCon -> Maybe (CoAxiom Unbranched)
newTyConCo_maybe (AlgTyCon {algTcRhs = NewTyCon { nt_co = co }}) = Just co
newTyConCo_maybe _ = Nothing
newTyConCo :: TyCon -> CoAxiom Unbranched
newTyConCo tc = case newTyConCo_maybe tc of
Just co -> co
Nothing -> pprPanic "newTyConCo" (ppr tc)
-- | Find the primitive representation of a 'TyCon'
tyConPrimRep :: TyCon -> PrimRep
tyConPrimRep (PrimTyCon {primTyConRep = rep}) = rep
tyConPrimRep tc = ASSERT(not (isUnboxedTupleTyCon tc)) PtrRep
-- | Find the \"stupid theta\" of the 'TyCon'. A \"stupid theta\" is the context
-- to the left of an algebraic type declaration, e.g. @Eq a@ in the declaration
-- @data Eq a => T a ...@
tyConStupidTheta :: TyCon -> [PredType]
tyConStupidTheta (AlgTyCon {algTcStupidTheta = stupid}) = stupid
tyConStupidTheta (TupleTyCon {}) = []
tyConStupidTheta tycon = pprPanic "tyConStupidTheta" (ppr tycon)
-- | Extract the 'TyVar's bound by a vanilla type synonym
-- and the corresponding (unsubstituted) right hand side.
synTyConDefn_maybe :: TyCon -> Maybe ([TyVar], Type)
synTyConDefn_maybe (SynonymTyCon {tyConTyVars = tyvars, synTcRhs = ty})
= Just (tyvars, ty)
synTyConDefn_maybe _ = Nothing
-- | Extract the information pertaining to the right hand side of a type synonym
-- (@type@) declaration.
synTyConRhs_maybe :: TyCon -> Maybe Type
synTyConRhs_maybe (SynonymTyCon {synTcRhs = rhs}) = Just rhs
synTyConRhs_maybe _ = Nothing
-- | Extract the flavour of a type family (with all the extra information that
-- it carries)
famTyConFlav_maybe :: TyCon -> Maybe FamTyConFlav
famTyConFlav_maybe (FamilyTyCon {famTcFlav = flav}) = Just flav
famTyConFlav_maybe _ = Nothing
-- | If the given 'TyCon' has a /single/ data constructor, i.e. it is a @data@
-- type with one alternative, a tuple type or a @newtype@ then that constructor
-- is returned. If the 'TyCon' has more than one constructor, or represents a
-- primitive or function type constructor then @Nothing@ is returned. In any
-- other case, the function panics
tyConSingleDataCon_maybe :: TyCon -> Maybe DataCon
tyConSingleDataCon_maybe (TupleTyCon {dataCon = c})
= Just c
tyConSingleDataCon_maybe (AlgTyCon {algTcRhs = DataTyCon { data_cons = [c] }})
= Just c
tyConSingleDataCon_maybe (AlgTyCon {algTcRhs = NewTyCon { data_con = c }})
= Just c
tyConSingleDataCon_maybe _
= Nothing
tyConSingleAlgDataCon_maybe :: TyCon -> Maybe DataCon
-- Returns (Just con) for single-constructor *algebraic* data types
-- *not* newtypes
tyConSingleAlgDataCon_maybe (TupleTyCon {dataCon = c})
= Just c
tyConSingleAlgDataCon_maybe (AlgTyCon {algTcRhs = DataTyCon { data_cons= [c] }})
= Just c
tyConSingleAlgDataCon_maybe _
= Nothing
-- | Is this 'TyCon' that for a class instance?
isClassTyCon :: TyCon -> Bool
isClassTyCon (AlgTyCon {algTcParent = ClassTyCon _}) = True
isClassTyCon _ = False
-- | If this 'TyCon' is that for a class instance, return the class it is for.
-- Otherwise returns @Nothing@
tyConClass_maybe :: TyCon -> Maybe Class
tyConClass_maybe (AlgTyCon {algTcParent = ClassTyCon clas}) = Just clas
tyConClass_maybe _ = Nothing
tyConTuple_maybe :: TyCon -> Maybe TupleSort
tyConTuple_maybe (TupleTyCon {tyConTupleSort = sort}) = Just sort
tyConTuple_maybe _ = Nothing
----------------------------------------------------------------------------
tyConParent :: TyCon -> TyConParent
tyConParent (AlgTyCon {algTcParent = parent}) = parent
tyConParent (FamilyTyCon {famTcParent = parent}) = parent
tyConParent _ = NoParentTyCon
----------------------------------------------------------------------------
-- | Is this 'TyCon' that for a data family instance?
isFamInstTyCon :: TyCon -> Bool
isFamInstTyCon tc = case tyConParent tc of
FamInstTyCon {} -> True
_ -> False
tyConFamInstSig_maybe :: TyCon -> Maybe (TyCon, [Type], CoAxiom Unbranched)
tyConFamInstSig_maybe tc
= case tyConParent tc of
FamInstTyCon ax f ts -> Just (f, ts, ax)
_ -> Nothing
-- | If this 'TyCon' is that of a family instance, return the family in question
-- and the instance types. Otherwise, return @Nothing@
tyConFamInst_maybe :: TyCon -> Maybe (TyCon, [Type])
tyConFamInst_maybe tc
= case tyConParent tc of
FamInstTyCon _ f ts -> Just (f, ts)
_ -> Nothing
-- | If this 'TyCon' is that of a family instance, return a 'TyCon' which
-- represents a coercion identifying the representation type with the type
-- instance family. Otherwise, return @Nothing@
tyConFamilyCoercion_maybe :: TyCon -> Maybe (CoAxiom Unbranched)
tyConFamilyCoercion_maybe tc
= case tyConParent tc of
FamInstTyCon co _ _ -> Just co
_ -> Nothing
{-
************************************************************************
* *
\subsection[TyCon-instances]{Instance declarations for @TyCon@}
* *
************************************************************************
@TyCon@s are compared by comparing their @Unique@s.
The strictness analyser needs @Ord@. It is a lexicographic order with
the property @(a<=b) || (b<=a)@.
-}
instance Eq TyCon where
a == b = case (a `compare` b) of { EQ -> True; _ -> False }
a /= b = case (a `compare` b) of { EQ -> False; _ -> True }
instance Ord TyCon where
a <= b = case (a `compare` b) of { LT -> True; EQ -> True; GT -> False }
a < b = case (a `compare` b) of { LT -> True; EQ -> False; GT -> False }
a >= b = case (a `compare` b) of { LT -> False; EQ -> True; GT -> True }
a > b = case (a `compare` b) of { LT -> False; EQ -> False; GT -> True }
compare a b = getUnique a `compare` getUnique b
instance Uniquable TyCon where
getUnique tc = tyConUnique tc
instance Outputable TyCon where
-- At the moment a promoted TyCon has the same Name as its
-- corresponding TyCon, so we add the quote to distinguish it here
ppr tc = pprPromotionQuote tc <> ppr (tyConName tc)
pprPromotionQuote :: TyCon -> SDoc
pprPromotionQuote (PromotedDataCon {}) = char '\'' -- Quote promoted DataCons
-- in types
pprPromotionQuote (PromotedTyCon {}) = ifPprDebug (char '\'')
pprPromotionQuote _ = empty -- However, we don't quote TyCons
-- in kinds e.g.
-- type family T a :: Bool -> *
-- cf Trac #5952.
-- Except with -dppr-debug
instance NamedThing TyCon where
getName = tyConName
instance Data.Data TyCon where
-- don't traverse?
toConstr _ = abstractConstr "TyCon"
gunfold _ _ = error "gunfold"
dataTypeOf _ = mkNoRepType "TyCon"
{-
************************************************************************
* *
Walking over recursive TyCons
* *
************************************************************************
Note [Expanding newtypes and products]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When expanding a type to expose a data-type constructor, we need to be
careful about newtypes, lest we fall into an infinite loop. Here are
the key examples:
newtype Id x = MkId x
newtype Fix f = MkFix (f (Fix f))
newtype T = MkT (T -> T)
Type Expansion
--------------------------
T T -> T
Fix Maybe Maybe (Fix Maybe)
Id (Id Int) Int
Fix Id NO NO NO
Notice that we can expand T, even though it's recursive.
And we can expand Id (Id Int), even though the Id shows up
twice at the outer level.
So, when expanding, we keep track of when we've seen a recursive
newtype at outermost level; and bale out if we see it again.
We sometimes want to do the same for product types, so that the
strictness analyser doesn't unbox infinitely deeply.
The function that manages this is checkRecTc.
-}
newtype RecTcChecker = RC NameSet
initRecTc :: RecTcChecker
initRecTc = RC emptyNameSet
checkRecTc :: RecTcChecker -> TyCon -> Maybe RecTcChecker
-- Nothing => Recursion detected
-- Just rec_tcs => Keep going
checkRecTc (RC rec_nts) tc
| not (isRecursiveTyCon tc) = Just (RC rec_nts)
| tc_name `elemNameSet` rec_nts = Nothing
| otherwise = Just (RC (extendNameSet rec_nts tc_name))
where
tc_name = tyConName tc
|
alexander-at-github/eta
|
compiler/ETA/Types/TyCon.hs
|
bsd-3-clause
| 74,853
| 0
| 16
| 22,396
| 8,534
| 4,929
| 3,605
| 729
| 8
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE ForeignFunctionInterface #-}
{-# OPTIONS_HADDOCK hide #-}
module Distribution.Compat.Environment
( getEnvironment, lookupEnv, setEnv, unsetEnv )
where
import Prelude ()
import qualified Prelude
import Distribution.Compat.Prelude
#ifndef mingw32_HOST_OS
#if __GLASGOW_HASKELL__ < 708
import Foreign.C.Error (throwErrnoIf_)
#endif
#endif
import qualified System.Environment as System
#if __GLASGOW_HASKELL__ >= 706
import System.Environment (lookupEnv)
#if __GLASGOW_HASKELL__ >= 708
import System.Environment (unsetEnv)
#endif
#else
import Distribution.Compat.Exception (catchIO)
#endif
import Distribution.Compat.Stack
#ifdef mingw32_HOST_OS
import Foreign.C
import GHC.Windows
#else
import Foreign.C.Types
import Foreign.C.String
import Foreign.C.Error (throwErrnoIfMinus1_)
import System.Posix.Internals ( withFilePath )
#endif /* mingw32_HOST_OS */
getEnvironment :: NoCallStackIO [(String, String)]
#ifdef mingw32_HOST_OS
-- On Windows, the names of environment variables are case-insensitive, but are
-- often given in mixed-case (e.g. "PATH" is "Path"), so we have to normalise
-- them.
getEnvironment = fmap upcaseVars System.getEnvironment
where
upcaseVars = map upcaseVar
upcaseVar (var, val) = (map toUpper var, val)
#else
getEnvironment = System.getEnvironment
#endif
#if __GLASGOW_HASKELL__ < 706
-- | @lookupEnv var@ returns the value of the environment variable @var@, or
-- @Nothing@ if there is no such value.
lookupEnv :: String -> IO (Maybe String)
lookupEnv name = (Just `fmap` System.getEnv name) `catchIO` const (return Nothing)
#endif /* __GLASGOW_HASKELL__ < 706 */
-- | @setEnv name value@ sets the specified environment variable to @value@.
--
-- Throws `Control.Exception.IOException` if either @name@ or @value@ is the
-- empty string or contains an equals sign.
setEnv :: String -> String -> IO ()
setEnv key value_ = setEnv_ key value
where
-- NOTE: Anything that follows NUL is ignored on both POSIX and Windows. We
-- still strip it manually so that the null check above succeeds if a value
-- starts with NUL.
value = takeWhile (/= '\NUL') value_
setEnv_ :: String -> String -> IO ()
#ifdef mingw32_HOST_OS
setEnv_ key value = withCWString key $ \k -> withCWString value $ \v -> do
success <- c_SetEnvironmentVariable k v
unless success (throwGetLastError "setEnv")
where
_ = callStack -- TODO: attach CallStack to exception
# if defined(i386_HOST_ARCH)
# define WINDOWS_CCONV stdcall
# elif defined(x86_64_HOST_ARCH)
# define WINDOWS_CCONV ccall
# else
# error Unknown mingw32 arch
# endif /* i386_HOST_ARCH */
foreign import WINDOWS_CCONV unsafe "windows.h SetEnvironmentVariableW"
c_SetEnvironmentVariable :: LPTSTR -> LPTSTR -> Prelude.IO Bool
#else
setEnv_ key value = do
withFilePath key $ \ keyP ->
withFilePath value $ \ valueP ->
throwErrnoIfMinus1_ "setenv" $
c_setenv keyP valueP (fromIntegral (fromEnum True))
where
_ = callStack -- TODO: attach CallStack to exception
foreign import ccall unsafe "setenv"
c_setenv :: CString -> CString -> CInt -> Prelude.IO CInt
#endif /* mingw32_HOST_OS */
#if __GLASGOW_HASKELL__ < 708
-- | @unsetEnv name@ removes the specified environment variable from the
-- environment of the current process.
--
-- Throws `Control.Exception.IOException` if @name@ is the empty string or
-- contains an equals sign.
--
-- @since 4.7.0.0
unsetEnv :: String -> IO ()
#ifdef mingw32_HOST_OS
unsetEnv key = withCWString key $ \k -> do
success <- c_SetEnvironmentVariable k nullPtr
unless success $ do
-- We consider unsetting an environment variable that does not exist not as
-- an error, hence we ignore eRROR_ENVVAR_NOT_FOUND.
err <- c_GetLastError
unless (err == eRROR_ENVVAR_NOT_FOUND) $ do
throwGetLastError "unsetEnv"
#else
unsetEnv key = withFilePath key (throwErrnoIf_ (/= 0) "unsetEnv" . c_unsetenv)
#if __GLASGOW_HASKELL__ > 706
foreign import ccall unsafe "__hsbase_unsetenv" c_unsetenv :: CString -> Prelude.IO CInt
#else
-- HACK: We hope very hard that !UNSETENV_RETURNS_VOID
foreign import ccall unsafe "unsetenv" c_unsetenv :: CString -> Prelude.IO CInt
#endif
#endif
#endif
|
sopvop/cabal
|
Cabal/Distribution/Compat/Environment.hs
|
bsd-3-clause
| 4,214
| 14
| 14
| 695
| 529
| 314
| 215
| 35
| 1
|
import Text.Read
operators :: [(String, Integer -> Integer -> Integer)]
operators = [("+", (+)), ("-", (-)), ("*", (*)), ("/", div)]
rpolishIter :: Maybe [Integer] -> [String] -> Maybe [Integer]
rpolishIter mns [] = mns
rpolishIter (Just ns) (s : ss) = case lookup s operators of
Just o -> case ns of
y : x : ns' -> rpolishIter (Just $ x `o` y : ns') ss
_ -> Nothing
Nothing -> rpolishIter (fmap (: ns) $ readMaybe s) ss
rpolishIter Nothing _ = Nothing
rpolish :: [String] -> Maybe [Integer]
rpolish = rpolishIter $ Just []
|
YoshikuniJujo/funpaala
|
samples/15_fold/rpolish0.hs
|
bsd-3-clause
| 533
| 0
| 15
| 106
| 272
| 150
| 122
| 13
| 3
|
-- | Assignment of unique IDs to values.
-- Inspired by the 'intern' package.
{-# LANGUAGE RecordWildCards, ScopedTypeVariables, BangPatterns #-}
module Twee.Label(Label, unsafeMkLabel, labelNum, label, find) where
import Data.IORef
import System.IO.Unsafe
import qualified Data.Map.Strict as Map
import Data.Map.Strict(Map)
import qualified Data.DynamicArray as DynamicArray
import Data.DynamicArray(Array)
import Data.Typeable
import GHC.Exts
import Unsafe.Coerce
import Data.Int
-- | A value of type @a@ which has been given a unique ID.
newtype Label a =
Label {
-- | The unique ID of a label.
labelNum :: Int32 }
deriving (Eq, Ord, Show)
-- | Construct a @'Label' a@ from its unique ID, which must be the 'labelNum' of
-- an already existing 'Label'. Extremely unsafe!
unsafeMkLabel :: Int32 -> Label a
unsafeMkLabel = Label
-- The global cache of labels.
{-# NOINLINE cachesRef #-}
cachesRef :: IORef Caches
cachesRef = unsafePerformIO (newIORef (Caches 0 Map.empty DynamicArray.newArray))
data Caches =
Caches {
-- The next id number to assign.
caches_nextId :: {-# UNPACK #-} !Int32,
-- A map from values to labels.
caches_from :: !(Map TypeRep (Cache Any)),
-- The reverse map from labels to values.
caches_to :: !(Array Any) }
type Cache a = Map a Int32
atomicModifyCaches :: (Caches -> (Caches, a)) -> IO a
atomicModifyCaches f = do
-- N.B. atomicModifyIORef' ref f evaluates f ref *after* doing the
-- compare-and-swap. This causes bad things to happen when 'label'
-- is used reentrantly (i.e. the Ord instance itself calls label).
-- This function only lets the swap happen if caches_nextId didn't
-- change (i.e., no new values were inserted).
!caches <- readIORef cachesRef
-- First compute the update.
let !(!caches', !x) = f caches
-- Now see if anyone else updated the cache in between
-- (can happen if f called 'label', or in a concurrent setting).
ok <- atomicModifyIORef' cachesRef $ \cachesNow ->
if caches_nextId caches == caches_nextId cachesNow
then (caches', True)
else (cachesNow, False)
if ok then return x else atomicModifyCaches f
-- Versions of unsafeCoerce with slightly more type checking
toAnyCache :: Cache a -> Cache Any
toAnyCache = unsafeCoerce
fromAnyCache :: Cache Any -> Cache a
fromAnyCache = unsafeCoerce
toAny :: a -> Any
toAny = unsafeCoerce
fromAny :: Any -> a
fromAny = unsafeCoerce
-- | Assign a label to a value.
{-# NOINLINE label #-}
label :: forall a. (Typeable a, Ord a) => a -> Label a
label x =
unsafeDupablePerformIO $ do
-- Common case: label is already there.
caches <- readIORef cachesRef
case tryFind caches of
Just l -> return l
Nothing -> do
-- Rare case: label was not there.
x <- atomicModifyCaches $ \caches ->
case tryFind caches of
Just l -> (caches, l)
Nothing ->
insert caches
return x
where
ty = typeOf x
tryFind :: Caches -> Maybe (Label a)
tryFind Caches{..} =
Label <$> (Map.lookup ty caches_from >>= Map.lookup x . fromAnyCache)
insert :: Caches -> (Caches, Label a)
insert caches@Caches{..} =
if n < 0 then error "label overflow" else
(caches {
caches_nextId = n+1,
caches_from = Map.insert ty (toAnyCache (Map.insert x n cache)) caches_from,
caches_to = DynamicArray.updateWithDefault undefined (fromIntegral n) (toAny x) caches_to },
Label n)
where
n = caches_nextId
cache =
fromAnyCache $
Map.findWithDefault Map.empty ty caches_from
-- | Recover the underlying value from a label.
find :: Label a -> a
-- N.B. must force n before calling readIORef, otherwise a call of
-- the form
-- find (label x)
-- doesn't work.
find (Label !n) = unsafeDupablePerformIO $ do
Caches{..} <- readIORef cachesRef
x <- return $! fromAny (DynamicArray.getWithDefault undefined (fromIntegral n) caches_to)
return x
|
nick8325/kbc
|
src/Twee/Label.hs
|
bsd-3-clause
| 3,995
| 0
| 19
| 912
| 916
| 493
| 423
| 83
| 4
|
len \*\ tms = (len - 1) * tms - (len + 3)
|
YoshikuniJujo/funpaala
|
samples/05_function/golf.hs
|
bsd-3-clause
| 42
| 0
| 8
| 13
| 34
| 17
| 17
| 1
| 1
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -Wno-orphans #-}
-- | Use persistent-mongodb the same way you would use other persistent
-- libraries and refer to the general persistent documentation.
-- There are some new MongoDB specific filters under the filters section.
-- These help extend your query into a nested document.
--
-- However, at some point you will find the normal Persistent APIs lacking.
-- and want lower level-level MongoDB access.
-- There are functions available to make working with the raw driver
-- easier: they are under the Entity conversion section.
-- You should still use the same connection pool that you are using for Persistent.
--
-- MongoDB is a schema-less database.
-- The MongoDB Persistent backend does not help perform migrations.
-- Unlike SQL backends, uniqueness constraints cannot be created for you.
-- You must place a unique index on unique fields.
module Database.Persist.MongoDB
(
-- * Entity conversion
collectionName
, docToEntityEither
, docToEntityThrow
, recordToDocument
, documentFromEntity
, toInsertDoc
, entityToInsertDoc
, updatesToDoc
, filtersToDoc
, toUniquesDoc
-- * MongoDB specific queries
-- $nested
, (->.), (~>.), (?&->.), (?&~>.), (&->.), (&~>.)
-- ** Filters
-- $filters
, nestEq, nestNe, nestGe, nestLe, nestIn, nestNotIn
, anyEq, nestAnyEq, nestBsonEq, anyBsonEq
, inList, ninList
, (=~.)
-- non-operator forms of filters
, NestedField(..)
, MongoRegexSearchable
, MongoRegex
-- ** Updates
-- $updates
, nestSet, nestInc, nestDec, nestMul, push, pull, pullAll, addToSet, eachOp
-- * Key conversion helpers
, BackendKey(..)
, keyToOid
, oidToKey
, recordTypeFromKey
, readMayObjectId
, readMayMongoKey
, keyToText
-- * PersistField conversion
, fieldName
-- * using connections
, withConnection
, withMongoPool
, withMongoDBConn
, withMongoDBPool
, createMongoDBPool
, runMongoDBPool
, runMongoDBPoolDef
, ConnectionPool
, Connection
, MongoAuth (..)
-- * Connection configuration
, MongoConf (..)
, defaultMongoConf
, defaultHost
, defaultAccessMode
, defaultPoolStripes
, defaultConnectionIdleTime
, defaultStripeConnections
, applyDockerEnv
-- ** using raw MongoDB pipes
, PipePool
, createMongoDBPipePool
, runMongoDBPipePool
-- * network type
, HostName
-- * MongoDB driver types
, Database
, DB.Action
, DB.AccessMode(..)
, DB.master
, DB.slaveOk
, (DB.=:)
, DB.ObjectId
, DB.MongoContext
, DB.PortID
-- * Database.Persist
, module Database.Persist
) where
import Control.Exception (throw, throwIO)
import Control.Monad (liftM, (>=>), forM_, unless)
import Control.Monad.IO.Class (liftIO)
import qualified Control.Monad.IO.Class as Trans
import Control.Monad.IO.Unlift (MonadUnliftIO, withRunInIO)
import Control.Monad.Trans.Reader (ask, runReaderT)
import Data.Acquire (mkAcquire)
import Data.Aeson (Value (Number), (.:), (.:?), (.!=), FromJSON(..), ToJSON(..), withText, withObject)
import Data.Aeson.Types (modifyFailure)
import Data.Bits (shiftR)
import Data.Bson (ObjectId(..))
import qualified Data.ByteString as BS
import Data.Conduit
import Data.Maybe (mapMaybe, fromJust)
import Data.Monoid (mappend)
import qualified Data.Serialize as Serialize
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as E
import qualified Data.Traversable as Traversable
import qualified Data.Pool as Pool
import Data.Time (NominalDiffTime)
import Data.Time.Calendar (Day(..))
#ifdef HIGH_PRECISION_DATE
import Data.Time.Clock.POSIX (utcTimeToPOSIXSeconds)
#endif
import Data.Word (Word16)
import Network.Socket (HostName)
import Numeric (readHex)
import System.Environment (lookupEnv)
import Unsafe.Coerce (unsafeCoerce)
import Web.PathPieces (PathPiece(..))
import Web.HttpApiData (ToHttpApiData(..), FromHttpApiData(..), parseUrlPieceMaybe, parseUrlPieceWithPrefix, readTextData)
#ifdef DEBUG
import FileLocation (debug)
#endif
import qualified Database.MongoDB as DB
import Database.MongoDB.Query (Database)
import Database.Persist
import qualified Database.Persist.Sql as Sql
instance HasPersistBackend DB.MongoContext where
type BaseBackend DB.MongoContext = DB.MongoContext
persistBackend = id
recordTypeFromKey :: Key record -> record
recordTypeFromKey _ = error "recordTypeFromKey"
newtype NoOrphanNominalDiffTime = NoOrphanNominalDiffTime NominalDiffTime
deriving (Show, Eq, Num)
instance FromJSON NoOrphanNominalDiffTime where
parseJSON (Number x) = (return . NoOrphanNominalDiffTime . fromRational . toRational) x
parseJSON _ = fail "couldn't parse diff time"
newtype NoOrphanPortID = NoOrphanPortID DB.PortID deriving (Show, Eq)
instance FromJSON NoOrphanPortID where
parseJSON (Number x) = (return . NoOrphanPortID . DB.PortNumber . fromIntegral ) cnvX
where cnvX :: Word16
cnvX = round x
parseJSON _ = fail "couldn't parse port number"
data Connection = Connection DB.Pipe DB.Database
type ConnectionPool = Pool.Pool Connection
instance ToHttpApiData (BackendKey DB.MongoContext) where
toUrlPiece = keyToText
instance FromHttpApiData (BackendKey DB.MongoContext) where
parseUrlPiece input = do
s <- parseUrlPieceWithPrefix "o" input <!> return input
MongoKey <$> readTextData s
where
infixl 3 <!>
Left _ <!> y = y
x <!> _ = x
-- | ToPathPiece is used to convert a key to/from text
instance PathPiece (BackendKey DB.MongoContext) where
toPathPiece = toUrlPiece
fromPathPiece = parseUrlPieceMaybe
keyToText :: BackendKey DB.MongoContext -> Text
keyToText = T.pack . show . unMongoKey
-- | Convert a Text to a Key
readMayMongoKey :: Text -> Maybe (BackendKey DB.MongoContext)
readMayMongoKey = fmap MongoKey . readMayObjectId
readMayObjectId :: Text -> Maybe DB.ObjectId
readMayObjectId str =
case filter (null . snd) $ reads $ T.unpack str :: [(DB.ObjectId,String)] of
(parsed,_):[] -> Just parsed
_ -> Nothing
instance PersistField DB.ObjectId where
toPersistValue = oidToPersistValue
fromPersistValue oid@(PersistObjectId _) = Right $ persistObjectIdToDbOid oid
fromPersistValue (PersistByteString bs) = fromPersistValue (PersistObjectId bs)
fromPersistValue _ = Left $ T.pack "expected PersistObjectId"
instance Sql.PersistFieldSql DB.ObjectId where
sqlType _ = Sql.SqlOther "doesn't make much sense for MongoDB"
instance Sql.PersistFieldSql (BackendKey DB.MongoContext) where
sqlType _ = Sql.SqlOther "doesn't make much sense for MongoDB"
withConnection :: (Trans.MonadIO m)
=> MongoConf
-> (ConnectionPool -> m b) -> m b
withConnection mc =
withMongoDBPool (mgDatabase mc) (T.unpack $ mgHost mc) (mgPort mc) (mgAuth mc) (mgPoolStripes mc) (mgStripeConnections mc) (mgConnectionIdleTime mc)
withMongoDBConn :: (Trans.MonadIO m)
=> Database -> HostName -> DB.PortID
-> Maybe MongoAuth -> NominalDiffTime
-> (ConnectionPool -> m b) -> m b
withMongoDBConn dbname hostname port mauth connectionIdleTime = withMongoDBPool dbname hostname port mauth 1 1 connectionIdleTime
createPipe :: HostName -> DB.PortID -> IO DB.Pipe
createPipe hostname port = DB.connect (DB.Host hostname port)
createReplicatSet :: (DB.ReplicaSetName, [DB.Host]) -> Database -> Maybe MongoAuth -> IO Connection
createReplicatSet rsSeed dbname mAuth = do
pipe <- DB.openReplicaSet rsSeed >>= DB.primary
testAccess pipe dbname mAuth
return $ Connection pipe dbname
createRsPool :: (Trans.MonadIO m) => Database -> ReplicaSetConfig
-> Maybe MongoAuth
-> Int -- ^ pool size (number of stripes)
-> Int -- ^ stripe size (number of connections per stripe)
-> NominalDiffTime -- ^ time a connection is left idle before closing
-> m ConnectionPool
createRsPool dbname (ReplicaSetConfig rsName rsHosts) mAuth connectionPoolSize stripeSize connectionIdleTime = do
Trans.liftIO $ Pool.createPool
(createReplicatSet (rsName, rsHosts) dbname mAuth)
(\(Connection pipe _) -> DB.close pipe)
connectionPoolSize
connectionIdleTime
stripeSize
testAccess :: DB.Pipe -> Database -> Maybe MongoAuth -> IO ()
testAccess pipe dbname mAuth = do
_ <- case mAuth of
Just (MongoAuth user pass) -> DB.access pipe DB.UnconfirmedWrites dbname (DB.auth user pass)
Nothing -> return undefined
return ()
createConnection :: Database -> HostName -> DB.PortID -> Maybe MongoAuth -> IO Connection
createConnection dbname hostname port mAuth = do
pipe <- createPipe hostname port
testAccess pipe dbname mAuth
return $ Connection pipe dbname
createMongoDBPool :: (Trans.MonadIO m) => Database -> HostName -> DB.PortID
-> Maybe MongoAuth
-> Int -- ^ pool size (number of stripes)
-> Int -- ^ stripe size (number of connections per stripe)
-> NominalDiffTime -- ^ time a connection is left idle before closing
-> m ConnectionPool
createMongoDBPool dbname hostname port mAuth connectionPoolSize stripeSize connectionIdleTime = do
Trans.liftIO $ Pool.createPool
(createConnection dbname hostname port mAuth)
(\(Connection pipe _) -> DB.close pipe)
connectionPoolSize
connectionIdleTime
stripeSize
createMongoPool :: (Trans.MonadIO m) => MongoConf -> m ConnectionPool
createMongoPool c@MongoConf{mgReplicaSetConfig = Just (ReplicaSetConfig rsName hosts)} =
createRsPool
(mgDatabase c)
(ReplicaSetConfig rsName ((DB.Host (T.unpack $ mgHost c) (mgPort c)):hosts))
(mgAuth c)
(mgPoolStripes c) (mgStripeConnections c) (mgConnectionIdleTime c)
createMongoPool c@MongoConf{mgReplicaSetConfig = Nothing} =
createMongoDBPool
(mgDatabase c) (T.unpack (mgHost c)) (mgPort c)
(mgAuth c)
(mgPoolStripes c) (mgStripeConnections c) (mgConnectionIdleTime c)
type PipePool = Pool.Pool DB.Pipe
-- | A pool of plain MongoDB pipes.
-- The database parameter has not yet been applied yet.
-- This is useful for switching between databases (on the same host and port)
-- Unlike the normal pool, no authentication is available
createMongoDBPipePool :: (Trans.MonadIO m) => HostName -> DB.PortID
-> Int -- ^ pool size (number of stripes)
-> Int -- ^ stripe size (number of connections per stripe)
-> NominalDiffTime -- ^ time a connection is left idle before closing
-> m PipePool
createMongoDBPipePool hostname port connectionPoolSize stripeSize connectionIdleTime =
Trans.liftIO $ Pool.createPool
(createPipe hostname port)
DB.close
connectionPoolSize
connectionIdleTime
stripeSize
withMongoPool :: (Trans.MonadIO m) => MongoConf -> (ConnectionPool -> m b) -> m b
withMongoPool conf connectionReader = createMongoPool conf >>= connectionReader
withMongoDBPool :: (Trans.MonadIO m) =>
Database -> HostName -> DB.PortID -> Maybe MongoAuth -> Int -> Int -> NominalDiffTime -> (ConnectionPool -> m b) -> m b
withMongoDBPool dbname hostname port mauth poolStripes stripeConnections connectionIdleTime connectionReader = do
pool <- createMongoDBPool dbname hostname port mauth poolStripes stripeConnections connectionIdleTime
connectionReader pool
-- | run a pool created with 'createMongoDBPipePool'
runMongoDBPipePool :: MonadUnliftIO m => DB.AccessMode -> Database -> DB.Action m a -> PipePool -> m a
runMongoDBPipePool accessMode db action pool =
withRunInIO $ \run ->
Pool.withResource pool $ \pipe ->
run $ DB.access pipe accessMode db action
runMongoDBPool :: MonadUnliftIO m => DB.AccessMode -> DB.Action m a -> ConnectionPool -> m a
runMongoDBPool accessMode action pool =
withRunInIO $ \run ->
Pool.withResource pool $ \(Connection pipe db) ->
run $ DB.access pipe accessMode db action
-- | use default 'AccessMode'
runMongoDBPoolDef :: MonadUnliftIO m => DB.Action m a -> ConnectionPool -> m a
runMongoDBPoolDef = runMongoDBPool defaultAccessMode
queryByKey :: (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext)
=> Key record -> DB.Query
queryByKey k = (DB.select (keyToMongoDoc k) (collectionNameFromKey k)) {DB.project = projectionFromKey k}
selectByKey :: (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext)
=> Key record -> DB.Selection
selectByKey k = DB.select (keyToMongoDoc k) (collectionNameFromKey k)
updatesToDoc :: (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext)
=> [Update record] -> DB.Document
updatesToDoc upds = map updateToMongoField upds
updateToBson :: Text
-> PersistValue
-> Either PersistUpdate MongoUpdateOperation
-> DB.Field
updateToBson fname v up =
#ifdef DEBUG
debug (
#endif
opName DB.:= DB.Doc [fname DB.:= opValue]
#ifdef DEBUG
)
#endif
where
inc = "$inc"
mul = "$mul"
(opName, opValue) = case up of
Left pup -> case (pup, v) of
(Assign, PersistNull) -> ("$unset", DB.Int64 1)
(Assign,a) -> ("$set", DB.val a)
(Add, a) -> (inc, DB.val a)
(Subtract, PersistInt64 i) -> (inc, DB.Int64 (-i))
(Multiply, PersistInt64 i) -> (mul, DB.Int64 i)
(Multiply, PersistDouble d) -> (mul, DB.Float d)
(Subtract, _) -> error "expected PersistInt64 for a subtraction"
(Multiply, _) -> error "expected PersistInt64 or PersistDouble for a subtraction"
-- Obviously this could be supported for floats by multiplying with 1/x
(Divide, _) -> throw $ PersistMongoDBUnsupported "divide not supported"
(BackendSpecificUpdate bsup, _) -> throw $ PersistMongoDBError $
T.pack $ "did not expect BackendSpecificUpdate " ++ T.unpack bsup
Right mup -> case mup of
MongoEach op -> case op of
MongoPull -> ("$pullAll", DB.val v)
_ -> (opToText op, DB.Doc ["$each" DB.:= DB.val v])
MongoSimple x -> (opToText x, DB.val v)
updateToMongoField :: (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext)
=> Update record -> DB.Field
updateToMongoField (Update field v up) = updateToBson (fieldName field) (toPersistValue v) (Left up)
updateToMongoField (BackendUpdate up) = mongoUpdateToDoc up
-- | convert a unique key into a MongoDB document
toUniquesDoc :: forall record. (PersistEntity record) => Unique record -> [DB.Field]
toUniquesDoc uniq = zipWith (DB.:=)
(map (unDBName . snd) $ persistUniqueToFieldNames uniq)
(map DB.val (persistUniqueToValues uniq))
-- | convert a PersistEntity into document fields.
-- for inserts only: nulls are ignored so they will be unset in the document.
-- 'recordToDocument' includes nulls
toInsertDoc :: forall record. (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext)
=> record -> DB.Document
toInsertDoc record = zipFilter (embeddedFields $ toEmbedEntityDef entDef)
(map toPersistValue $ toPersistFields record)
where
entDef = entityDef $ Just record
zipFilter :: [EmbedFieldDef] -> [PersistValue] -> DB.Document
zipFilter [] _ = []
zipFilter _ [] = []
zipFilter (fd:efields) (pv:pvs) =
if isNull pv then recur else
(fieldToLabel fd DB.:= embeddedVal (emFieldEmbed fd) pv):recur
where
recur = zipFilter efields pvs
isNull PersistNull = True
isNull (PersistMap m) = null m
isNull (PersistList l) = null l
isNull _ = False
-- make sure to removed nulls from embedded entities also
embeddedVal :: Maybe EmbedEntityDef -> PersistValue -> DB.Value
embeddedVal (Just emDef) (PersistMap m) = DB.Doc $
zipFilter (embeddedFields emDef) $ map snd m
embeddedVal je@(Just _) (PersistList l) = DB.Array $ map (embeddedVal je) l
embeddedVal _ pv = DB.val pv
entityToInsertDoc :: forall record. (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext)
=> Entity record -> DB.Document
entityToInsertDoc (Entity key record) = keyToMongoDoc key ++ toInsertDoc record
collectionName :: (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext)
=> record -> Text
collectionName = unDBName . entityDB . entityDef . Just
-- | convert a PersistEntity into document fields.
-- unlike 'toInsertDoc', nulls are included.
recordToDocument :: (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext)
=> record -> DB.Document
recordToDocument record = zipToDoc (map fieldDB $ entityFields entity) (toPersistFields record)
where
entity = entityDef $ Just record
documentFromEntity :: (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext)
=> Entity record -> DB.Document
documentFromEntity (Entity key record) =
keyToMongoDoc key ++ recordToDocument record
zipToDoc :: PersistField a => [DBName] -> [a] -> [DB.Field]
zipToDoc [] _ = []
zipToDoc _ [] = []
zipToDoc (e:efields) (p:pfields) =
let pv = toPersistValue p
in (unDBName e DB.:= DB.val pv):zipToDoc efields pfields
fieldToLabel :: EmbedFieldDef -> Text
fieldToLabel = unDBName . emFieldDB
keyFrom_idEx :: (Trans.MonadIO m, PersistEntity record) => DB.Value -> m (Key record)
keyFrom_idEx idVal = case keyFrom_id idVal of
Right k -> return k
Left err -> liftIO $ throwIO $ PersistMongoDBError $ "could not convert key: "
`Data.Monoid.mappend` T.pack (show idVal)
`mappend` err
keyFrom_id :: (PersistEntity record) => DB.Value -> Either Text (Key record)
keyFrom_id idVal = case cast idVal of
(PersistMap m) -> keyFromValues $ map snd m
pv -> keyFromValues [pv]
-- | It would make sense to define the instance for ObjectId
-- and then use newtype deriving
-- however, that would create an orphan instance
instance ToJSON (BackendKey DB.MongoContext) where
toJSON (MongoKey (Oid x y)) = toJSON $ DB.showHexLen 8 x $ DB.showHexLen 16 y ""
instance FromJSON (BackendKey DB.MongoContext) where
parseJSON = withText "MongoKey" $ \t ->
maybe
(fail "Invalid base64")
(return . MongoKey . persistObjectIdToDbOid . PersistObjectId)
$ fmap (i2bs (8 * 12) . fst) $ headMay $ readHex $ T.unpack t
where
-- should these be exported from Types/Base.hs ?
headMay [] = Nothing
headMay (x:_) = Just x
-- taken from crypto-api
-- |@i2bs bitLen i@ converts @i@ to a 'ByteString' of @bitLen@ bits (must be a multiple of 8).
i2bs :: Int -> Integer -> BS.ByteString
i2bs l i = BS.unfoldr (\l' -> if l' < 0 then Nothing else Just (fromIntegral (i `shiftR` l'), l' - 8)) (l-8)
{-# INLINE i2bs #-}
-- | older versions versions of haddock (like that on hackage) do not show that this defines
-- @BackendKey DB.MongoContext = MongoKey { unMongoKey :: DB.ObjectId }@
instance PersistCore DB.MongoContext where
newtype BackendKey DB.MongoContext = MongoKey { unMongoKey :: DB.ObjectId }
deriving (Show, Read, Eq, Ord, PersistField)
instance PersistStoreWrite DB.MongoContext where
insert record = DB.insert (collectionName record) (toInsertDoc record)
>>= keyFrom_idEx
insertMany [] = return []
insertMany records@(r:_) = mapM keyFrom_idEx =<<
DB.insertMany (collectionName r) (map toInsertDoc records)
insertEntityMany [] = return ()
insertEntityMany ents@(Entity _ r : _) =
DB.insertMany_ (collectionName r) (map entityToInsertDoc ents)
insertKey k record = DB.insert_ (collectionName record) $
entityToInsertDoc (Entity k record)
repsert k record = DB.save (collectionName record) $
documentFromEntity (Entity k record)
replace k record = do
DB.replace (selectByKey k) (recordToDocument record)
return ()
delete k =
DB.deleteOne DB.Select {
DB.coll = collectionNameFromKey k
, DB.selector = keyToMongoDoc k
}
update _ [] = return ()
update key upds =
DB.modify
(DB.Select (keyToMongoDoc key) (collectionNameFromKey key))
$ updatesToDoc upds
updateGet key upds = do
result <- DB.findAndModify (queryByKey key) (updatesToDoc upds)
either err instantiate result
where
instantiate doc = do
Entity _ rec <- fromPersistValuesThrow t doc
return rec
err msg = Trans.liftIO $ throwIO $ KeyNotFound $ show key ++ msg
t = entityDefFromKey key
instance PersistStoreRead DB.MongoContext where
get k = do
d <- DB.findOne (queryByKey k)
case d of
Nothing -> return Nothing
Just doc -> do
Entity _ ent <- fromPersistValuesThrow t doc
return $ Just ent
where
t = entityDefFromKey k
instance PersistUniqueRead DB.MongoContext where
getBy uniq = do
mdoc <- DB.findOne $
(DB.select (toUniquesDoc uniq) (collectionName rec)) {DB.project = projectionFromRecord rec}
case mdoc of
Nothing -> return Nothing
Just doc -> liftM Just $ fromPersistValuesThrow t doc
where
t = entityDef $ Just rec
rec = dummyFromUnique uniq
instance PersistUniqueWrite DB.MongoContext where
deleteBy uniq =
DB.delete DB.Select {
DB.coll = collectionName $ dummyFromUnique uniq
, DB.selector = toUniquesDoc uniq
}
upsert newRecord upds = do
uniq <- onlyUnique newRecord
upsertBy uniq newRecord upds
-- - let uniqKeys = map DB.label uniqueDoc
-- - let insDoc = DB.exclude uniqKeys $ toInsertDoc newRecord
-- let selection = DB.select uniqueDoc $ collectionName newRecord
-- - if null upds
-- - then DB.upsert selection ["$set" DB.=: insDoc]
-- - else do
-- - DB.upsert selection ["$setOnInsert" DB.=: insDoc]
-- - DB.modify selection $ updatesToDoc upds
-- - -- because findAndModify $setOnInsert is broken we do a separate get now
upsertBy uniq newRecord upds = do
let uniqueDoc = toUniquesDoc uniq :: [DB.Field]
let uniqKeys = map DB.label uniqueDoc :: [DB.Label]
let insDoc = DB.exclude uniqKeys $ toInsertDoc newRecord :: DB.Document
let selection = DB.select uniqueDoc $ collectionName newRecord :: DB.Selection
mdoc <- getBy uniq
case mdoc of
Nothing -> unless (null upds) (DB.upsert selection ["$setOnInsert" DB.=: insDoc])
Just _ -> unless (null upds) (DB.modify selection $ DB.exclude uniqKeys $ updatesToDoc upds)
newMdoc <- getBy uniq
case newMdoc of
Nothing -> err "possible race condition: getBy found Nothing"
Just doc -> return doc
where
err = Trans.liftIO . throwIO . UpsertError
{-
-- cannot use findAndModify
-- because $setOnInsert is crippled
-- https://jira.mongodb.org/browse/SERVER-2643
result <- DB.findAndModifyOpts
selection
(DB.defFamUpdateOpts ("$setOnInsert" DB.=: insDoc : ["$set" DB.=: insDoc]))
{ DB.famUpsert = True }
either err instantiate result
where
-- this is only possible when new is False
instantiate Nothing = error "upsert: impossible null"
instantiate (Just doc) =
fromPersistValuesThrow (entityDef $ Just newRecord) doc
-}
-- | It would make more sense to call this _id, but GHC treats leading underscore in special ways
id_ :: T.Text
id_ = "_id"
-- _id is always the primary key in MongoDB
-- but _id can contain any unique value
keyToMongoDoc :: (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext)
=> Key record -> DB.Document
keyToMongoDoc k = case entityPrimary $ entityDefFromKey k of
Nothing -> zipToDoc [DBName id_] values
Just pdef -> [id_ DB.=: zipToDoc (primaryNames pdef) values]
where
primaryNames = map fieldDB . compositeFields
values = keyToValues k
entityDefFromKey :: PersistEntity record => Key record -> EntityDef
entityDefFromKey = entityDef . Just . recordTypeFromKey
collectionNameFromKey :: (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext)
=> Key record -> Text
collectionNameFromKey = collectionName . recordTypeFromKey
projectionFromEntityDef :: EntityDef -> DB.Projector
projectionFromEntityDef eDef =
map toField (entityFields eDef)
where
toField :: FieldDef -> DB.Field
toField fDef = (unDBName (fieldDB fDef)) DB.=: (1 :: Int)
projectionFromKey :: PersistEntity record => Key record -> DB.Projector
projectionFromKey = projectionFromEntityDef . entityDefFromKey
projectionFromRecord :: PersistEntity record => record -> DB.Projector
projectionFromRecord = projectionFromEntityDef . entityDef . Just
instance PersistQueryWrite DB.MongoContext where
updateWhere _ [] = return ()
updateWhere filts upds =
DB.modify DB.Select {
DB.coll = collectionName $ dummyFromFilts filts
, DB.selector = filtersToDoc filts
} $ updatesToDoc upds
deleteWhere filts = do
DB.delete DB.Select {
DB.coll = collectionName $ dummyFromFilts filts
, DB.selector = filtersToDoc filts
}
instance PersistQueryRead DB.MongoContext where
count filts = do
i <- DB.count query
return $ fromIntegral i
where
query = DB.select (filtersToDoc filts) $
collectionName $ dummyFromFilts filts
-- | uses cursor option NoCursorTimeout
-- If there is no sorting, it will turn the $snapshot option on
-- and explicitly closes the cursor when done
selectSourceRes filts opts = do
context <- ask
return (pullCursor context `fmap` mkAcquire (open context) (close context))
where
close :: DB.MongoContext -> DB.Cursor -> IO ()
close context cursor = runReaderT (DB.closeCursor cursor) context
open :: DB.MongoContext -> IO DB.Cursor
open = runReaderT (DB.find (makeQuery filts opts)
-- it is an error to apply $snapshot when sorting
{ DB.snapshot = noSort
, DB.options = [DB.NoCursorTimeout]
})
pullCursor context cursor = do
mdoc <- liftIO $ runReaderT (DB.nextBatch cursor) context
case mdoc of
[] -> return ()
docs -> do
forM_ docs $ fromPersistValuesThrow t >=> yield
pullCursor context cursor
t = entityDef $ Just $ dummyFromFilts filts
(_, _, orders) = limitOffsetOrder opts
noSort = null orders
selectFirst filts opts = DB.findOne (makeQuery filts opts)
>>= Traversable.mapM (fromPersistValuesThrow t)
where
t = entityDef $ Just $ dummyFromFilts filts
selectKeysRes filts opts = do
context <- ask
let make = do
cursor <- liftIO $ flip runReaderT context $ DB.find $ (makeQuery filts opts) {
DB.project = [id_ DB.=: (1 :: Int)]
}
pullCursor context cursor
return $ return make
where
pullCursor context cursor = do
mdoc <- liftIO $ runReaderT (DB.next cursor) context
case mdoc of
Nothing -> return ()
Just [_id DB.:= idVal] -> do
k <- liftIO $ keyFrom_idEx idVal
yield k
pullCursor context cursor
Just y -> liftIO $ throwIO $ PersistMarshalError $ T.pack $ "Unexpected in selectKeys: " ++ show y
orderClause :: PersistEntity val => SelectOpt val -> DB.Field
orderClause o = case o of
Asc f -> fieldName f DB.=: ( 1 :: Int)
Desc f -> fieldName f DB.=: (-1 :: Int)
_ -> error "orderClause: expected Asc or Desc"
makeQuery :: (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext) => [Filter record] -> [SelectOpt record] -> DB.Query
makeQuery filts opts =
(DB.select (filtersToDoc filts) (collectionName $ dummyFromFilts filts)) {
DB.limit = fromIntegral limit
, DB.skip = fromIntegral offset
, DB.sort = orders
, DB.project = projectionFromRecord (dummyFromFilts filts)
}
where
(limit, offset, orders') = limitOffsetOrder opts
orders = map orderClause orders'
filtersToDoc :: (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext) => [Filter record] -> DB.Document
filtersToDoc filts =
#ifdef DEBUG
debug $
#endif
if null filts then [] else multiFilter AndDollar filts
filterToDocument :: (PersistEntity val, PersistEntityBackend val ~ DB.MongoContext) => Filter val -> DB.Document
filterToDocument f =
case f of
Filter field v filt -> [filterToBSON (fieldName field) v filt]
BackendFilter mf -> mongoFilterToDoc mf
-- The empty filter case should never occur when the user uses ||.
-- An empty filter list will throw an exception in multiFilter
--
-- The alternative would be to create a query which always returns true
-- However, I don't think an end user ever wants that.
FilterOr fs -> multiFilter OrDollar fs
-- Ignore an empty filter list instead of throwing an exception.
-- \$and is necessary in only a few cases, but it makes query construction easier
FilterAnd [] -> []
FilterAnd fs -> multiFilter AndDollar fs
data MultiFilter = OrDollar | AndDollar deriving Show
toMultiOp :: MultiFilter -> Text
toMultiOp OrDollar = orDollar
toMultiOp AndDollar = andDollar
multiFilter :: forall record. (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext) => MultiFilter -> [Filter record] -> [DB.Field]
multiFilter _ [] = throw $ PersistMongoDBError "An empty list of filters was given"
multiFilter multi filters =
case (multi, filter (not . null) (map filterToDocument filters)) of
-- a $or must have at least 2 items
(OrDollar, []) -> orError
(AndDollar, []) -> []
(OrDollar, _:[]) -> orError
(AndDollar, doc:[]) -> doc
(_, doc) -> [toMultiOp multi DB.:= DB.Array (map DB.Doc doc)]
where
orError = throw $ PersistMongoDBError $
"An empty list of filters was given to one side of ||."
existsDollar, orDollar, andDollar :: Text
existsDollar = "$exists"
orDollar = "$or"
andDollar = "$and"
filterToBSON :: forall a. ( PersistField a)
=> Text
-> FilterValue a
-> PersistFilter
-> DB.Field
filterToBSON fname v filt = case filt of
Eq -> nullEq
Ne -> nullNeq
_ -> notEquality
where
dbv = toValue v
notEquality = fname DB.=: [showFilter filt DB.:= dbv]
nullEq = case dbv of
DB.Null -> orDollar DB.=:
[ [fname DB.:= DB.Null]
, [fname DB.:= DB.Doc [existsDollar DB.:= DB.Bool False]]
]
_ -> fname DB.:= dbv
nullNeq = case dbv of
DB.Null ->
fname DB.:= DB.Doc
[ showFilter Ne DB.:= DB.Null
, existsDollar DB.:= DB.Bool True
]
_ -> notEquality
showFilter Ne = "$ne"
showFilter Gt = "$gt"
showFilter Lt = "$lt"
showFilter Ge = "$gte"
showFilter Le = "$lte"
showFilter In = "$in"
showFilter NotIn = "$nin"
showFilter Eq = error "EQ filter not expected"
showFilter (BackendSpecificFilter bsf) = throw $ PersistMongoDBError $ T.pack $ "did not expect BackendSpecificFilter " ++ T.unpack bsf
mongoFilterToBSON :: forall typ. PersistField typ
=> Text
-> MongoFilterOperator typ
-> DB.Document
mongoFilterToBSON fname filt = case filt of
(PersistFilterOperator v op) -> [filterToBSON fname v op]
(MongoFilterOperator bval) -> [fname DB.:= bval]
mongoUpdateToBson :: forall typ. PersistField typ
=> Text
-> UpdateValueOp typ
-> DB.Field
mongoUpdateToBson fname upd = case upd of
UpdateValueOp (Left v) op -> updateToBson fname (toPersistValue v) op
UpdateValueOp (Right v) op -> updateToBson fname (PersistList $ map toPersistValue v) op
mongoUpdateToDoc :: PersistEntity record => MongoUpdate record -> DB.Field
mongoUpdateToDoc (NestedUpdate field op) = mongoUpdateToBson (nestedFieldName field) op
mongoUpdateToDoc (ArrayUpdate field op) = mongoUpdateToBson (fieldName field) op
mongoFilterToDoc :: PersistEntity record => MongoFilter record -> DB.Document
mongoFilterToDoc (NestedFilter field op) = mongoFilterToBSON (nestedFieldName field) op
mongoFilterToDoc (ArrayFilter field op) = mongoFilterToBSON (fieldName field) op
mongoFilterToDoc (NestedArrayFilter field op) = mongoFilterToBSON (nestedFieldName field) op
mongoFilterToDoc (RegExpFilter fn (reg, opts)) = [ fieldName fn DB.:= DB.RegEx (DB.Regex reg opts)]
nestedFieldName :: forall record typ. PersistEntity record => NestedField record typ -> Text
nestedFieldName = T.intercalate "." . nesFldName
where
nesFldName :: forall r1 r2. (PersistEntity r1) => NestedField r1 r2 -> [DB.Label]
nesFldName (nf1 `LastEmbFld` nf2) = [fieldName nf1, fieldName nf2]
nesFldName ( f1 `MidEmbFld` f2) = fieldName f1 : nesFldName f2
nesFldName ( f1 `MidNestFlds` f2) = fieldName f1 : nesFldName f2
nesFldName ( f1 `MidNestFldsNullable` f2) = fieldName f1 : nesFldName f2
nesFldName (nf1 `LastNestFld` nf2) = [fieldName nf1, fieldName nf2]
nesFldName (nf1 `LastNestFldNullable` nf2) = [fieldName nf1, fieldName nf2]
toValue :: forall a. PersistField a => FilterValue a -> DB.Value
toValue val =
case val of
FilterValue v -> DB.val $ toPersistValue v
UnsafeValue v -> DB.val $ toPersistValue v
FilterValues vs -> DB.val $ map toPersistValue vs
fieldName :: forall record typ. (PersistEntity record) => EntityField record typ -> DB.Label
fieldName f | fieldHaskell fd == HaskellName "Id" = id_
| otherwise = unDBName $ fieldDB $ fd
where
fd = persistFieldDef f
docToEntityEither :: forall record. (PersistEntity record) => DB.Document -> Either T.Text (Entity record)
docToEntityEither doc = entity
where
entDef = entityDef $ Just (getType entity)
entity = eitherFromPersistValues entDef doc
getType :: Either err (Entity ent) -> ent
getType = error "docToEntityEither/getType: never here"
docToEntityThrow :: forall m record. (Trans.MonadIO m, PersistEntity record, PersistEntityBackend record ~ DB.MongoContext) => DB.Document -> m (Entity record)
docToEntityThrow doc =
case docToEntityEither doc of
Left s -> Trans.liftIO . throwIO $ PersistMarshalError $ s
Right entity -> return entity
fromPersistValuesThrow :: (Trans.MonadIO m, PersistEntity record, PersistEntityBackend record ~ DB.MongoContext) => EntityDef -> [DB.Field] -> m (Entity record)
fromPersistValuesThrow entDef doc =
case eitherFromPersistValues entDef doc of
Left t -> Trans.liftIO . throwIO $ PersistMarshalError $
unHaskellName (entityHaskell entDef) `mappend` ": " `mappend` t
Right entity -> return entity
mapLeft :: (a -> c) -> Either a b -> Either c b
mapLeft _ (Right r) = Right r
mapLeft f (Left l) = Left (f l)
eitherFromPersistValues :: (PersistEntity record) => EntityDef -> [DB.Field] -> Either T.Text (Entity record)
eitherFromPersistValues entDef doc = case mKey of
Nothing -> addDetail $ Left $ "could not find _id field: "
Just kpv -> do
body <- addDetail (fromPersistValues (map snd $ orderPersistValues (toEmbedEntityDef entDef) castDoc))
key <- keyFromValues [kpv]
return $ Entity key body
where
addDetail :: Either Text a -> Either Text a
addDetail = mapLeft (\msg -> msg `mappend` " for doc: " `mappend` T.pack (show doc))
castDoc = assocListFromDoc doc
-- normally _id is the first field
mKey = lookup id_ castDoc
-- | unlike many SQL databases, MongoDB makes no guarantee of the ordering
-- of the fields returned in the document.
-- Ordering might be maintained if persistent were the only user of the db,
-- but other tools may be using MongoDB.
--
-- Persistent creates a Haskell record from a list of PersistValue
-- But most importantly it puts all PersistValues in the proper order
orderPersistValues :: EmbedEntityDef -> [(Text, PersistValue)] -> [(Text, PersistValue)]
orderPersistValues entDef castDoc = reorder
where
castColumns = map nameAndEmbed (embeddedFields entDef)
nameAndEmbed fdef = (fieldToLabel fdef, emFieldEmbed fdef)
-- TODO: the below reasoning should be re-thought now that we are no longer inserting null: searching for a null column will look at every returned field before giving up
-- Also, we are now doing the _id lookup at the start.
--
-- we have an alist of fields that need to be the same order as entityColumns
--
-- this naive lookup is O(n^2)
-- reorder = map (fromJust . (flip Prelude.lookup $ castDoc)) castColumns
--
-- this is O(n * log(n))
-- reorder = map (\c -> (M.fromList castDoc) M.! c) castColumns
--
-- and finally, this is O(n * log(n))
-- * do an alist lookup for each column
-- * but once we found an item in the alist use a new alist without that item for future lookups
-- * so for the last query there is only one item left
--
reorder :: [(Text, PersistValue)]
reorder = match castColumns castDoc []
where
match :: [(Text, Maybe EmbedEntityDef)]
-> [(Text, PersistValue)]
-> [(Text, PersistValue)]
-> [(Text, PersistValue)]
-- when there are no more Persistent castColumns we are done
--
-- allow extra mongoDB fields that persistent does not know about
-- another application may use fields we don't care about
-- our own application may set extra fields with the raw driver
match [] _ values = values
match (column:columns) fields values =
let (found, unused) = matchOne fields []
in match columns unused $ values ++
[(fst column, nestedOrder (snd column) (snd found))]
where
nestedOrder (Just em) (PersistMap m) =
PersistMap $ orderPersistValues em m
nestedOrder (Just em) (PersistList l) =
PersistList $ map (nestedOrder (Just em)) l
-- implied: nestedOrder Nothing found = found
nestedOrder _ found = found
matchOne (field:fs) tried =
if fst column == fst field
-- snd drops the name now that it has been used to make the match
-- persistent will add the field name later
then (field, tried ++ fs)
else matchOne fs (field:tried)
-- if field is not found, assume it was a Nothing
--
-- a Nothing could be stored as null, but that would take up space.
-- instead, we want to store no field at all: that takes less space.
-- Also, another ORM may be doing the same
-- Also, this adding a Maybe field means no migration required
matchOne [] tried = ((fst column, PersistNull), tried)
assocListFromDoc :: DB.Document -> [(Text, PersistValue)]
assocListFromDoc = Prelude.map (\f -> ( (DB.label f), cast (DB.value f) ) )
oidToPersistValue :: DB.ObjectId -> PersistValue
oidToPersistValue = PersistObjectId . Serialize.encode
oidToKey :: (ToBackendKey DB.MongoContext record) => DB.ObjectId -> Key record
oidToKey = fromBackendKey . MongoKey
persistObjectIdToDbOid :: PersistValue -> DB.ObjectId
persistObjectIdToDbOid (PersistObjectId k) = case Serialize.decode k of
Left msg -> throw $ PersistError $ T.pack $ "error decoding " ++ (show k) ++ ": " ++ msg
Right o -> o
persistObjectIdToDbOid _ = throw $ PersistInvalidField "expected PersistObjectId"
keyToOid :: ToBackendKey DB.MongoContext record => Key record -> DB.ObjectId
keyToOid = unMongoKey . toBackendKey
instance DB.Val PersistValue where
val (PersistInt64 x) = DB.Int64 x
val (PersistText x) = DB.String x
val (PersistDouble x) = DB.Float x
val (PersistBool x) = DB.Bool x
#ifdef HIGH_PRECISION_DATE
val (PersistUTCTime x) = DB.Int64 $ round $ 1000 * 1000 * 1000 * (utcTimeToPOSIXSeconds x)
#else
-- this is just millisecond precision: https://jira.mongodb.org/browse/SERVER-1460
val (PersistUTCTime x) = DB.UTC x
#endif
val (PersistDay d) = DB.Int64 $ fromInteger $ toModifiedJulianDay d
val (PersistNull) = DB.Null
val (PersistList l) = DB.Array $ map DB.val l
val (PersistMap m) = DB.Doc $ map (\(k, v)-> (DB.=:) k v) m
val (PersistByteString x) = DB.Bin (DB.Binary x)
val x@(PersistObjectId _) = DB.ObjId $ persistObjectIdToDbOid x
val (PersistTimeOfDay _) = throw $ PersistMongoDBUnsupported "PersistTimeOfDay not implemented for the MongoDB backend. only PersistUTCTime currently implemented"
val (PersistRational _) = throw $ PersistMongoDBUnsupported "PersistRational not implemented for the MongoDB backend"
val (PersistArray a) = DB.val $ PersistList a
val (PersistDbSpecific _) = throw $ PersistMongoDBUnsupported "PersistDbSpecific not implemented for the MongoDB backend"
cast' (DB.Float x) = Just (PersistDouble x)
cast' (DB.Int32 x) = Just $ PersistInt64 $ fromIntegral x
cast' (DB.Int64 x) = Just $ PersistInt64 x
cast' (DB.String x) = Just $ PersistText x
cast' (DB.Bool x) = Just $ PersistBool x
cast' (DB.UTC d) = Just $ PersistUTCTime d
cast' DB.Null = Just $ PersistNull
cast' (DB.Bin (DB.Binary b)) = Just $ PersistByteString b
cast' (DB.Fun (DB.Function f)) = Just $ PersistByteString f
cast' (DB.Uuid (DB.UUID uid)) = Just $ PersistByteString uid
cast' (DB.Md5 (DB.MD5 md5)) = Just $ PersistByteString md5
cast' (DB.UserDef (DB.UserDefined bs)) = Just $ PersistByteString bs
cast' (DB.RegEx (DB.Regex us1 us2)) = Just $ PersistByteString $ E.encodeUtf8 $ T.append us1 us2
cast' (DB.Doc doc) = Just $ PersistMap $ assocListFromDoc doc
cast' (DB.Array xs) = Just $ PersistList $ mapMaybe DB.cast' xs
cast' (DB.ObjId x) = Just $ oidToPersistValue x
cast' (DB.JavaScr _) = throw $ PersistMongoDBUnsupported "cast operation not supported for javascript"
cast' (DB.Sym _) = throw $ PersistMongoDBUnsupported "cast operation not supported for sym"
cast' (DB.Stamp _) = throw $ PersistMongoDBUnsupported "cast operation not supported for stamp"
cast' (DB.MinMax _) = throw $ PersistMongoDBUnsupported "cast operation not supported for minmax"
cast :: DB.Value -> PersistValue
-- since we have case analysys this won't ever be Nothing
-- However, unsupported types do throw an exception in pure code
-- probably should re-work this to throw in IO
cast = fromJust . DB.cast'
instance Serialize.Serialize DB.ObjectId where
put (DB.Oid w1 w2) = do Serialize.put w1
Serialize.put w2
get = do w1 <- Serialize.get
w2 <- Serialize.get
return (DB.Oid w1 w2)
dummyFromUnique :: Unique v -> v
dummyFromUnique _ = error "dummyFromUnique"
dummyFromFilts :: [Filter v] -> v
dummyFromFilts _ = error "dummyFromFilts"
data MongoAuth = MongoAuth DB.Username DB.Password deriving Show
-- | Information required to connect to a mongo database
data MongoConf = MongoConf
{ mgDatabase :: Text
, mgHost :: Text
, mgPort :: DB.PortID
, mgAuth :: Maybe MongoAuth
, mgAccessMode :: DB.AccessMode
, mgPoolStripes :: Int
, mgStripeConnections :: Int
, mgConnectionIdleTime :: NominalDiffTime
-- | YAML fields for this are @rsName@ and @rsSecondaries@
-- mgHost is assumed to be the primary
, mgReplicaSetConfig :: Maybe ReplicaSetConfig
} deriving Show
defaultHost :: Text
defaultHost = "127.0.0.1"
defaultAccessMode :: DB.AccessMode
defaultAccessMode = DB.ConfirmWrites ["w" DB.:= DB.Int32 1]
defaultPoolStripes, defaultStripeConnections :: Int
defaultPoolStripes = 1
defaultStripeConnections = 10
defaultConnectionIdleTime :: NominalDiffTime
defaultConnectionIdleTime = 20
defaultMongoConf :: Text -> MongoConf
defaultMongoConf dbName = MongoConf
{ mgDatabase = dbName
, mgHost = defaultHost
, mgPort = DB.defaultPort
, mgAuth = Nothing
, mgAccessMode = defaultAccessMode
, mgPoolStripes = defaultPoolStripes
, mgStripeConnections = defaultStripeConnections
, mgConnectionIdleTime = defaultConnectionIdleTime
, mgReplicaSetConfig = Nothing
}
data ReplicaSetConfig = ReplicaSetConfig DB.ReplicaSetName [DB.Host]
deriving Show
instance FromJSON MongoConf where
parseJSON v = modifyFailure ("Persistent: error loading MongoDB conf: " ++) $
flip (withObject "MongoConf") v $ \o ->do
db <- o .: "database"
host <- o .:? "host" .!= defaultHost
NoOrphanPortID port <- o .:? "port" .!= NoOrphanPortID DB.defaultPort
poolStripes <- o .:? "poolstripes" .!= defaultPoolStripes
stripeConnections <- o .:? "connections" .!= defaultStripeConnections
NoOrphanNominalDiffTime connectionIdleTime <- o .:? "connectionIdleTime" .!= NoOrphanNominalDiffTime defaultConnectionIdleTime
mUser <- o .:? "user"
mPass <- o .:? "password"
accessString <- o .:? "accessMode" .!= confirmWrites
mRsName <- o .:? "rsName"
rsSecondaires <- o .:? "rsSecondaries" .!= []
mPoolSize <- o .:? "poolsize"
case mPoolSize of
Nothing -> return ()
Just (_::Int) -> fail "specified deprecated poolsize attribute. Please specify a connections. You can also specify a pools attribute which defaults to 1. Total connections opened to the db are connections * pools"
accessMode <- case accessString of
"ReadStaleOk" -> return DB.ReadStaleOk
"UnconfirmedWrites" -> return DB.UnconfirmedWrites
"ConfirmWrites" -> return defaultAccessMode
badAccess -> fail $ "unknown accessMode: " ++ T.unpack badAccess
let rs = case (mRsName, rsSecondaires) of
(Nothing, []) -> Nothing
(Nothing, _) -> error "found rsSecondaries key. Also expected but did not find a rsName key"
(Just rsName, hosts) -> Just $ ReplicaSetConfig rsName $ fmap DB.readHostPort hosts
return MongoConf {
mgDatabase = db
, mgHost = host
, mgPort = port
, mgAuth =
case (mUser, mPass) of
(Just user, Just pass) -> Just (MongoAuth user pass)
_ -> Nothing
, mgPoolStripes = poolStripes
, mgStripeConnections = stripeConnections
, mgAccessMode = accessMode
, mgConnectionIdleTime = connectionIdleTime
, mgReplicaSetConfig = rs
}
where
confirmWrites = "ConfirmWrites"
instance PersistConfig MongoConf where
type PersistConfigBackend MongoConf = DB.Action
type PersistConfigPool MongoConf = ConnectionPool
createPoolConfig = createMongoPool
runPool c = runMongoDBPool (mgAccessMode c)
loadConfig = parseJSON
-- | docker integration: change the host to the mongodb link
applyDockerEnv :: MongoConf -> IO MongoConf
applyDockerEnv mconf = do
mHost <- lookupEnv "MONGODB_PORT_27017_TCP_ADDR"
return $ case mHost of
Nothing -> mconf
Just h -> mconf { mgHost = T.pack h }
-- ---------------------------
-- * MongoDB specific Filters
-- $filters
--
-- You can find example usage for all of Persistent in our test cases:
-- <https://github.com/yesodweb/persistent/blob/master/persistent-test/EmbedTest.hs#L144>
--
-- These filters create a query that reaches deeper into a document with
-- nested fields.
type instance BackendSpecificFilter DB.MongoContext record = MongoFilter record
type instance BackendSpecificUpdate DB.MongoContext record = MongoUpdate record
data NestedField record typ
= forall emb. PersistEntity emb => EntityField record [emb] `LastEmbFld` EntityField emb typ
| forall emb. PersistEntity emb => EntityField record [emb] `MidEmbFld` NestedField emb typ
| forall nest. PersistEntity nest => EntityField record nest `MidNestFlds` NestedField nest typ
| forall nest. PersistEntity nest => EntityField record (Maybe nest) `MidNestFldsNullable` NestedField nest typ
| forall nest. PersistEntity nest => EntityField record nest `LastNestFld` EntityField nest typ
| forall nest. PersistEntity nest => EntityField record (Maybe nest) `LastNestFldNullable` EntityField nest typ
-- | A MongoRegex represents a Regular expression.
-- It is a tuple of the expression and the options for the regular expression, respectively
-- Options are listed here: <http://docs.mongodb.org/manual/reference/operator/query/regex/>
-- If you use the same options you may want to define a helper such as @r t = (t, "ims")@
type MongoRegex = (Text, Text)
-- | Mark the subset of 'PersistField's that can be searched by a mongoDB regex
-- Anything stored as PersistText or an array of PersistText would be valid
class PersistField typ => MongoRegexSearchable typ where
instance MongoRegexSearchable Text
instance MongoRegexSearchable rs => MongoRegexSearchable (Maybe rs)
instance MongoRegexSearchable rs => MongoRegexSearchable [rs]
-- | Filter using a Regular expression.
(=~.) :: forall record searchable. (MongoRegexSearchable searchable, PersistEntity record, PersistEntityBackend record ~ DB.MongoContext) => EntityField record searchable -> MongoRegex -> Filter record
fld =~. val = BackendFilter $ RegExpFilter fld val
data MongoFilterOperator typ = PersistFilterOperator (FilterValue typ) PersistFilter
| MongoFilterOperator DB.Value
data UpdateValueOp typ =
UpdateValueOp
(Either typ [typ])
(Either PersistUpdate MongoUpdateOperation)
deriving Show
data MongoUpdateOperation = MongoEach MongoUpdateOperator
| MongoSimple MongoUpdateOperator
deriving Show
data MongoUpdateOperator = MongoPush
| MongoPull
| MongoAddToSet
deriving Show
opToText :: MongoUpdateOperator -> Text
opToText MongoPush = "$push"
opToText MongoPull = "$pull"
opToText MongoAddToSet = "$addToSet"
data MongoFilter record =
forall typ. PersistField typ =>
NestedFilter
(NestedField record typ)
(MongoFilterOperator typ)
| forall typ. PersistField typ =>
ArrayFilter
(EntityField record [typ])
(MongoFilterOperator typ)
| forall typ. PersistField typ =>
NestedArrayFilter
(NestedField record [typ])
(MongoFilterOperator typ)
| forall typ. MongoRegexSearchable typ =>
RegExpFilter
(EntityField record typ)
MongoRegex
data MongoUpdate record =
forall typ. PersistField typ =>
NestedUpdate
(NestedField record typ)
(UpdateValueOp typ)
| forall typ. PersistField typ =>
ArrayUpdate
(EntityField record [typ])
(UpdateValueOp typ)
-- | Point to an array field with an embedded object and give a deeper query into the embedded object.
-- Use with 'nestEq'.
(->.) :: forall record emb typ. PersistEntity emb => EntityField record [emb] -> EntityField emb typ -> NestedField record typ
(->.) = LastEmbFld
-- | Point to an array field with an embedded object and give a deeper query into the embedded object.
-- This level of nesting is not the final level.
-- Use '->.' or '&->.' to point to the final level.
(~>.) :: forall record typ emb. PersistEntity emb => EntityField record [emb] -> NestedField emb typ -> NestedField record typ
(~>.) = MidEmbFld
-- | Point to a nested field to query. This field is not an array type.
-- Use with 'nestEq'.
(&->.) :: forall record typ nest. PersistEntity nest => EntityField record nest -> EntityField nest typ -> NestedField record typ
(&->.) = LastNestFld
-- | Same as '&->.', but Works against a Maybe type
(?&->.) :: forall record typ nest. PersistEntity nest => EntityField record (Maybe nest) -> EntityField nest typ -> NestedField record typ
(?&->.) = LastNestFldNullable
-- | Point to a nested field to query. This field is not an array type.
-- This level of nesting is not the final level.
-- Use '->.' or '&>.' to point to the final level.
(&~>.) :: forall val nes nes1. PersistEntity nes1 => EntityField val nes1 -> NestedField nes1 nes -> NestedField val nes
(&~>.) = MidNestFlds
-- | Same as '&~>.', but works against a Maybe type
(?&~>.) :: forall val nes nes1. PersistEntity nes1 => EntityField val (Maybe nes1) -> NestedField nes1 nes -> NestedField val nes
(?&~>.) = MidNestFldsNullable
infixr 4 =~.
infixr 5 ~>.
infixr 5 &~>.
infixr 5 ?&~>.
infixr 6 &->.
infixr 6 ?&->.
infixr 6 ->.
infixr 4 `nestEq`
infixr 4 `nestNe`
infixr 4 `nestGe`
infixr 4 `nestLe`
infixr 4 `nestIn`
infixr 4 `nestNotIn`
infixr 4 `anyEq`
infixr 4 `nestAnyEq`
infixr 4 `nestBsonEq`
infixr 4 `anyBsonEq`
infixr 4 `nestSet`
infixr 4 `push`
infixr 4 `pull`
infixr 4 `pullAll`
infixr 4 `addToSet`
-- | The normal Persistent equality test '==.' is not generic enough.
-- Instead use this with the drill-down arrow operaters such as '->.'
--
-- using this as the only query filter is similar to the following in the mongoDB shell
--
-- > db.Collection.find({"object.field": item})
nestEq, nestNe, nestGe, nestLe, nestIn, nestNotIn :: forall record typ.
( PersistField typ , PersistEntityBackend record ~ DB.MongoContext)
=> NestedField record typ
-> typ
-> Filter record
nestEq = nestedFilterOp Eq
nestNe = nestedFilterOp Ne
nestGe = nestedFilterOp Ge
nestLe = nestedFilterOp Le
nestIn = nestedFilterOp In
nestNotIn = nestedFilterOp NotIn
nestedFilterOp :: forall record typ.
( PersistField typ
, PersistEntityBackend record ~ DB.MongoContext
) => PersistFilter -> NestedField record typ -> typ -> Filter record
nestedFilterOp op nf v = BackendFilter $
NestedFilter nf $ PersistFilterOperator (FilterValue v) op
-- | same as `nestEq`, but give a BSON Value
nestBsonEq :: forall record typ.
( PersistField typ
, PersistEntityBackend record ~ DB.MongoContext
) => NestedField record typ -> DB.Value -> Filter record
nf `nestBsonEq` val = BackendFilter $
NestedFilter nf $ MongoFilterOperator val
-- | Like '(==.)' but for an embedded list.
-- Checks to see if the list contains an item.
--
-- In Haskell we need different equality functions for embedded fields that are lists or non-lists to keep things type-safe.
--
-- using this as the only query filter is similar to the following in the mongoDB shell
--
-- > db.Collection.find({arrayField: arrayItem})
anyEq :: forall record typ.
( PersistField typ
, PersistEntityBackend record ~ DB.MongoContext
) => EntityField record [typ] -> typ -> Filter record
fld `anyEq` val = BackendFilter $
ArrayFilter fld $ PersistFilterOperator (FilterValue val) Eq
-- | Like nestEq, but for an embedded list.
-- Checks to see if the nested list contains an item.
nestAnyEq :: forall record typ.
( PersistField typ
, PersistEntityBackend record ~ DB.MongoContext
) => NestedField record [typ] -> typ -> Filter record
fld `nestAnyEq` val = BackendFilter $
NestedArrayFilter fld $ PersistFilterOperator (FilterValue val) Eq
-- | same as `anyEq`, but give a BSON Value
anyBsonEq :: forall record typ.
( PersistField typ
, PersistEntityBackend record ~ DB.MongoContext
) => EntityField record [typ] -> DB.Value -> Filter record
fld `anyBsonEq` val = BackendFilter $
ArrayFilter fld $ MongoFilterOperator val
nestSet, nestInc, nestDec, nestMul :: forall record typ.
( PersistField typ , PersistEntityBackend record ~ DB.MongoContext)
=> NestedField record typ
-> typ
-> Update record
nestSet = nestedUpdateOp Assign
nestInc = nestedUpdateOp Add
nestDec = nestedUpdateOp Subtract
nestMul = nestedUpdateOp Multiply
push, pull, addToSet :: forall record typ.
( PersistField typ
, PersistEntityBackend record ~ DB.MongoContext
) => EntityField record [typ] -> typ -> Update record
fld `push` val = backendArrayOperation MongoPush fld val
fld `pull` val = backendArrayOperation MongoPull fld val
fld `addToSet` val = backendArrayOperation MongoAddToSet fld val
backendArrayOperation ::
forall record typ.
(PersistField typ, BackendSpecificUpdate (PersistEntityBackend record) record ~ MongoUpdate record)
=> MongoUpdateOperator -> EntityField record [typ] -> typ
-> Update record
backendArrayOperation op fld val = BackendUpdate $
ArrayUpdate fld $ UpdateValueOp (Left val) (Right $ MongoSimple op)
-- | equivalent to $each
--
-- > eachOp push field []
--
-- @eachOp pull@ will get translated to @$pullAll@
eachOp :: forall record typ.
( PersistField typ, PersistEntityBackend record ~ DB.MongoContext)
=> (EntityField record [typ] -> typ -> Update record)
-> EntityField record [typ] -> [typ]
-> Update record
eachOp haskellOp fld val = case haskellOp fld (error "eachOp: undefined") of
BackendUpdate (ArrayUpdate _ (UpdateValueOp (Left _) (Right (MongoSimple op)))) -> each op
BackendUpdate (ArrayUpdate{}) -> error "eachOp: unexpected ArrayUpdate"
BackendUpdate (NestedUpdate{}) -> error "eachOp: did not expect NestedUpdate"
Update{} -> error "eachOp: did not expect Update"
where
each op = BackendUpdate $ ArrayUpdate fld $
UpdateValueOp (Right val) (Right $ MongoEach op)
pullAll :: forall record typ.
( PersistField typ
, PersistEntityBackend record ~ DB.MongoContext
) => EntityField record [typ] -> [typ] -> Update record
fld `pullAll` val = eachOp pull fld val
nestedUpdateOp :: forall record typ.
( PersistField typ
, PersistEntityBackend record ~ DB.MongoContext
) => PersistUpdate -> NestedField record typ -> typ -> Update record
nestedUpdateOp op nf v = BackendUpdate $
NestedUpdate nf $ UpdateValueOp (Left v) (Left op)
-- | Intersection of lists: if any value in the field is found in the list.
inList :: PersistField typ => EntityField v [typ] -> [typ] -> Filter v
f `inList` a = Filter (unsafeCoerce f) (FilterValues a) In
infix 4 `inList`
-- | No intersection of lists: if no value in the field is found in the list.
ninList :: PersistField typ => EntityField v [typ] -> [typ] -> Filter v
f `ninList` a = Filter (unsafeCoerce f) (FilterValues a) In
infix 4 `ninList`
|
naushadh/persistent
|
persistent-mongoDB/Database/Persist/MongoDB.hs
|
mit
| 59,967
| 0
| 21
| 14,516
| 15,228
| 7,929
| 7,299
| -1
| -1
|
{-# LANGUAGE DeriveDataTypeable #-}
{- |
Module : ./CspCASL/SymbItems.hs
Description : syntactic csp-casl symbols
Copyright : (c) Christian Maeder, DFKI GmbH 2011
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : portable
-}
module CspCASL.SymbItems where
import CspCASL.AS_CspCASL_Process
import CspCASL.CspCASL_Keywords
import CspCASL.Print_CspCASL
import CspCASL.Parse_CspCASL_Process
import CASL.AS_Basic_CASL
import CASL.SymbolParser
import CASL.ToDoc
import Common.AnnoState
import Common.Doc hiding (braces)
import Common.DocUtils
import Common.Id
import Common.Keywords
import Common.Lexer
import Common.Parsec
import Common.Token
import Text.ParserCombinators.Parsec
import Control.Monad
import Data.Data
import qualified Data.Set as Set
data CspSymbItems = CspSymbItems CspSymbKind [CspSymb]
deriving (Show, Eq, Ord, Typeable, Data)
data CspSymbMapItems = CspSymbMapItems CspSymbKind [CspSymbMap]
deriving (Show, Eq, Ord, Typeable, Data)
data CspSymbKind = CaslKind SYMB_KIND | ProcessKind | ChannelKind
deriving (Show, Eq, Ord, Typeable, Data)
data CspSymb = CspSymb Id (Maybe CspType)
deriving (Show, Eq, Ord, Typeable, Data)
-- for channels with sorts we may re-use A_type that is ambiguous
data CspType = CaslType TYPE | ProcType ProcProfile
deriving (Show, Eq, Ord, Typeable, Data)
data CspSymbMap = CspSymbMap CspSymb (Maybe CspSymb)
deriving (Show, Eq, Ord, Typeable, Data)
pluralCspSympKind :: CspSymbKind -> [a] -> Doc
pluralCspSympKind k l = case k of
CaslKind c -> case c of
Implicit -> empty
_ -> keyword $ pluralS_symb_list c l
ProcessKind -> keyword processS
ChannelKind -> keyword $ channelS ++ appendS l
instance Pretty CspSymbKind where
pretty k = pluralCspSympKind k [()]
instance Pretty CspType where
pretty t = case t of
CaslType c -> colon <> pretty c
ProcType p -> printProcProfile p
instance Pretty CspSymb where
pretty (CspSymb i ms) = pretty i <+> pretty ms
instance Pretty CspSymbMap where
pretty (CspSymbMap s ms) = pretty s <+> case ms of
Nothing -> empty
Just t -> mapsto <+> pretty t
instance Pretty CspSymbItems where
pretty (CspSymbItems k l) = pluralCspSympKind k l <+> ppWithCommas l
instance Pretty CspSymbMapItems where
pretty (CspSymbMapItems k l) = pluralCspSympKind k l <+> ppWithCommas l
commAlpha :: AParser st CommAlpha
commAlpha = fmap Set.fromList $ single commType <|> bracedList
-- | parsing a possibly qualified identifier
cspSymb :: AParser st CspSymb
cspSymb =
do i <- parseCspId
do
_ <- colonST
t <- fmap CaslType (opOrPredType cspKeywords) <|>
fmap (ProcType . ProcProfile []) commAlpha
return $ CspSymb i $ Just t
<|> do
ts <- parenList cspSortId
colonT
cs <- commAlpha
return $ CspSymb i $ Just $ ProcType $ ProcProfile ts cs
<|> return (CspSymb i Nothing)
-- | parsing one symbol or a mapping of one to second symbol
cspSymbMap :: AParser st CspSymbMap
cspSymbMap = liftM2 CspSymbMap cspSymb $ optionMaybe
$ asKey mapsTo >> optional cspSymbKind >> cspSymb
-- | parse a kind keyword
cspSymbKind :: AParser st CspSymbKind
cspSymbKind =
fmap (const ChannelKind) (pluralKeyword channelS)
<|> fmap (const ProcessKind) (asKey processS)
<|> fmap (CaslKind . fst) symbKind
-- | parse a comma separated list of symbols
cspSymbs :: AParser st [CspSymb]
cspSymbs =
do s <- cspSymb
do
_ <- commaT `followedWith` parseCspId
is <- cspSymbs
return $ s : is
<|> return [s]
{- | Parse a possible kinded list of comma separated CspCASL symbols.
The argument is a list of keywords to avoid as identifiers. -}
cspSymbItems :: AParser st CspSymbItems
cspSymbItems = fmap (CspSymbItems $ CaslKind Implicit) cspSymbs <|> do
k <- cspSymbKind
fmap (CspSymbItems k) cspSymbs
-- | parse a comma separated list of symbols
cspSymbMaps :: AParser st [CspSymbMap]
cspSymbMaps =
do s <- cspSymbMap
do
_ <- commaT `followedWith` parseCspId
is <- cspSymbMaps
return $ s : is
<|> return [s]
-- | parse a possible kinded list of CspCASL symbol mappings
cspSymbMapItems :: AParser st CspSymbMapItems
cspSymbMapItems = fmap (CspSymbMapItems $ CaslKind Implicit) cspSymbMaps
<|> do
k <- cspSymbKind
fmap (CspSymbMapItems k) cspSymbMaps
|
spechub/Hets
|
CspCASL/SymbItems.hs
|
gpl-2.0
| 4,469
| 0
| 17
| 957
| 1,221
| 616
| 605
| 105
| 4
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TypeFamilies #-}
#ifdef LEKSAH_WITH_CODE_MIRROR
{-# LANGUAGE RecordWildCards #-}
#endif
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE NoImplicitPrelude #-}
-----------------------------------------------------------------------------
--
-- Module : IDE.TextEditor.CodeMirror
-- Copyright : 2007-2013 Juergen Nicklisch-Franken, Hamish Mackenzie
-- License : GPL Nothing
--
-- Maintainer : maintainer@leksah.org
-- Stability : provisional
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
module IDE.TextEditor.CodeMirror (
CodeMirror(..)
#ifdef LEKSAH_WITH_CODE_MIRROR
, TextEditor(..)
, EditorBuffer(..)
, EditorView(..)
, EditorIter(..)
, EditorMark(..)
, EditorTag(..)
, EditorTagTable(..)
, newCMBuffer
#endif
) where
import Data.Typeable (Typeable)
import Graphics.UI.Gtk (scrolledWindowSetShadowType)
import Graphics.UI.Gtk.General.Enums (ShadowType(..))
import Data.Text (Text)
import Text.Show (Show)
import Data.Tuple (snd, fst)
import Data.Function (($), (.))
import Data.Maybe (Maybe, Maybe(..))
import GHC.Base (Functor(..), Monad(..))
import Data.Int (Int)
import System.IO (FilePath)
import Data.List ((++))
import Data.Bool (Bool(..), not)
import GHC.Real (fromIntegral, RealFrac(..))
import GHC.Num (Num(..))
import Data.Eq (Eq(..))
import GHC.Float (Double)
import qualified Data.Text as T (pack)
#ifdef LEKSAH_WITH_CODE_MIRROR
import Control.Monad (unless)
import Data.Text (pack, unpack)
import IDE.TextEditor.Class (TextEditor(..))
import Graphics.UI.Gtk.WebKit.Types (WebView(..))
import Control.Monad.Reader (ReaderT(..))
import Language.Javascript.JSaddle
(valToObject, (#), JSContextRef, JSObjectRef, jsg, (<#), obj, js2,
js, JSM, js1, valToText, valToStr, js3, js0, MakeValueRef(..), MakeStringRef(..),
JSStringRef, JSValueRef, valToBool, strToText, valToNumber, MakeObjectRef)
import Control.Applicative ((<$>))
import Control.Monad.Reader.Class (MonadReader(..))
import Control.Concurrent (putMVar, newEmptyMVar, takeMVar, MVar, tryTakeMVar)
import IDE.Core.Types (IDEM)
import Control.Monad.IO.Class (MonadIO(..))
import Control.Monad.Trans.Class (MonadTrans(..))
import Control.Lens ((^.), IndexPreservingGetter)
import Graphics.UI.Gtk.WebKit.WebView
(webViewLoadUri, webViewLoadString, webViewGetMainFrame,
loadFinished, webViewNew)
import qualified GHCJS.CodeMirror as CM (getDataDir)
import System.Glib.Signals (after, on)
import Graphics.UI.Gtk.WebKit.JavaScriptCore.WebFrame
(webFrameGetGlobalContext)
import Text.Blaze.Html.Renderer.Text (renderHtml)
import Text.Hamlet (shamlet)
import Graphics.UI.Gtk
(ScrolledWindow, menuPopup, menuAttachToWidget, menuNew,
popupMenuSignal, eventModifier, widgetAddEvents, keyReleaseEvent,
leaveNotifyEvent, motionNotifyEvent, keyPressEvent,
buttonReleaseEvent, buttonPressEvent, focusInEvent,
widgetGrabFocus, widgetGetParent, castToScrolledWindow,
containerAdd, scrolledWindowNew, Rectangle(..),
EventMask(..), Modifier(..), ContainerClass, mainIteration,
castToWidget,
widgetGetWindow
)
import Data.Maybe (fromJust)
import IDE.Core.State (onIDE, reflectIDE, leksahOrPackageDir)
import Graphics.UI.Editor.Basics (Connection(..))
import System.Log.Logger (debugM)
#endif
data CodeMirror = CodeMirror deriving( Typeable, Show )
#ifdef LEKSAH_WITH_CODE_MIRROR
data CodeMirrorState = CodeMirrorState {
cmContext :: JSContextRef
, cmObject :: JSObjectRef }
type CM = ReaderT (WebView, CodeMirrorState) JSM
webView :: CM WebView
webView = fst <$> ask
codeMirror :: CM JSObjectRef
codeMirror = cmObject . snd <$> ask
runCM :: CodeMirrorRef -> CM a -> IDEM a
runCM (v, mvar) f = liftIO $ do
s <- guiTakeMVar mvar
runReaderT (runReaderT f (v, s)) (cmContext s)
where
guiTakeMVar mvar = do
maybeValue <- tryTakeMVar mvar
case maybeValue of
Just value -> do
putMVar mvar value
return value
Nothing -> do
debugM "leksah" "looping"
s <- loop mvar
debugM "leksah" "done looping"
return s
loop mvar = do
maybeValue <- tryTakeMVar mvar
case maybeValue of
Just value -> do
putMVar mvar value
return value
Nothing -> do
mainIteration
loop mvar
type CodeMirrorRef = (WebView, MVar CodeMirrorState)
body = js "body"
value = js "value"
setSize = js2 "setSize"
mode = js "mode"
line = js "line"
ch = js "ch"
left = js "left"
top = js "top"
right = js "right"
bottom = js "bottom"
lastLine = js0 "lastLine"
getRange = js2 "getRange"
setValue = js1 "setValue"
setBookmark' = js2 "setBookmark"
find = js0 "find"
from = js "from"
getCursor :: (MakeValueRef a0, MakeObjectRef o) => a0 -> IndexPreservingGetter o (JSM JSValueRef)
getCursor = js1 "getCursor"
isClean = js0 "isClean"
markText = js3 "markText"
className = js "className"
clearHistory = js0 "clearHistory"
callUndo = js0 "undo"
undo' = js "undo"
callRedo = js0 "redo"
redo' = js "redo"
historySize = js0 "historySize"
replaceRange = js3 "replaceRange"
insertAt = js2 "replaceRange"
replaceSelection = js1 "replaceSelection"
posFromIndex = js1 "posFromIndex"
lineCount = js0 "lineCount"
somethingSelected = js0 "somethingSelected"
setSelection = js2 "setSelection"
placeCursorAt = js1 "setSelection"
markClean = js0 "markClean"
coordsChar = js2 "coordsChar"
charCoords = js2 "charCoords"
scrollIntoView = js2 "scrollIntoView"
getAllMarks = js0 "getAllMarks"
indexFromPos = js1 "indexFromPos"
getLineText :: (MakeValueRef a0, MakeObjectRef o) => a0 -> IndexPreservingGetter o (JSM JSValueRef)
getLineText = js1 "getLine"
jsLength = js "length"
cmIter :: CodeMirrorRef -> Int -> Int -> CM (EditorIter CodeMirror)
cmIter cm l c = do
lift $ do
i <- obj
i ^. line <# (fromIntegral l :: Double)
i ^. ch <# (fromIntegral c :: Double)
return $ CMIter cm i
newCMBuffer :: Maybe FilePath -> Text -> IDEM (EditorBuffer CodeMirror)
newCMBuffer mbFilename contents = do
ideR <- ask
liftIO $ do
debugM "leksah" "newCMBuffer"
scrolledWindow <- scrolledWindowNew Nothing Nothing
scrolledWindowSetShadowType scrolledWindow ShadowIn
cmWebView <- webViewNew
containerAdd scrolledWindow cmWebView
dataDir <- liftIO $ leksahOrPackageDir "ghcjs-codemirror" CM.getDataDir
s <- newEmptyMVar
cmWebView `on` loadFinished $ \ _ -> do
debugM "leksah" "newCMBuffer loadFinished"
cmContext <- webViewGetMainFrame cmWebView >>= webFrameGetGlobalContext
let runjs f = f `runReaderT` cmContext
runjs $ do
document <- jsg "document"
codeMirror <- jsg "CodeMirror"
code <- obj
code ^. value <# contents
code ^. mode <# "haskell"
cmObject <- codeMirror # (document ^. body, code) >>= valToObject
cmObject ^. setSize "100%" "100%"
liftIO $ debugM "leksah" "newCMBuffer loaded"
liftIO . putMVar s $ CodeMirrorState{..}
webViewLoadString cmWebView (T.pack $
"<html><head>"
++ "<script src=\"lib/codemirror.js\">"
++ "<link rel=\"stylesheet\" href=\"lib/codemirror.css\">"
++ "<script src=\"mode/javascript/javascript.js\">"
++ "<script src=\"mode/haskell/haskell.js\">"
++ "</head>"
++ "<body style=\"margin:0;padding:0 auto;\">"
++ "</body></html>"
) Nothing (T.pack $ "file://" ++ dataDir ++ "/codemirror.html")
debugM "leksah" "newCMBuffer loading"
return $ CMBuffer (cmWebView, s)
instance TextEditor CodeMirror where
data EditorBuffer CodeMirror = CMBuffer CodeMirrorRef
data EditorView CodeMirror = CMView CodeMirrorRef
data EditorMark CodeMirror = CMMark JSObjectRef | CMCursor JSValueRef
data EditorIter CodeMirror = CMIter CodeMirrorRef JSObjectRef
data EditorTagTable CodeMirror = CMTagTable CodeMirrorRef
data EditorTag CodeMirror = CMTag
newBuffer = newCMBuffer
applyTagByName (CMBuffer cm) name (CMIter _ first) (CMIter _ last) = runCM cm $ do
m <- codeMirror
lift $ do
o <- obj
o ^. className <# name
m ^. markText first last o
return ()
beginNotUndoableAction (CMBuffer cm) = return () -- TODO
beginUserAction (CMBuffer cm) = return () -- TODO
canRedo (CMBuffer cm) = runCM cm $ do
m <- codeMirror
lift $ (m ^. historySize ^. redo') >>= valToBool
canUndo (CMBuffer cm) = runCM cm $ do
m <- codeMirror
lift $ (m ^. historySize ^. undo') >>= valToBool
copyClipboard (CMBuffer cm) _ = return () -- TODO
createMark (CMView cm) _refType (CMIter _ i) _tooltip = runCM cm $ do
m <- codeMirror
lift $ do
o <- obj
m ^. setBookmark' i o
return ()
cutClipboard (CMBuffer cm) _ _ = return () -- TODO
delete (CMBuffer cm) (CMIter _ first) (CMIter _ last) = runCM cm $ do
m <- codeMirror
lift $ m ^. replaceRange "" first last
return ()
deleteSelection (CMBuffer cm) = runCM cm $ do
m <- codeMirror
lift $ m ^. replaceSelection ""
return ()
endNotUndoableAction (CMBuffer cm) = runCM cm $ do
m <- codeMirror
lift $ m ^. clearHistory
return ()
endUserAction (CMBuffer cm) = return () -- TODO
getEndIter (CMBuffer cm) = runCM cm $ do
m <- codeMirror
lift $ do
i <- obj
l <- m ^. lastLine
i ^. line <# l
i ^. ch <# m ^. getLineText l ^. jsLength
return $ CMIter cm i
getInsertMark (CMBuffer cm) = runCM cm . lift $ CMCursor <$> makeValueRef "head"
getIterAtLine (CMBuffer cm) line = runCM cm $ cmIter cm line 0
getIterAtMark (CMBuffer cm) (CMMark mark) = runCM cm $ do
lift $ CMIter cm <$> (mark ^. find ^. from >>= valToObject)
getIterAtMark (CMBuffer cm) (CMCursor c) = runCM cm $ do
m <- codeMirror
lift $ CMIter cm <$> ((m ^. getCursor c) >>= valToObject)
getIterAtOffset (CMBuffer cm) offset = runCM cm $ do
m <- codeMirror
lift $ CMIter cm <$> ((m ^. posFromIndex (fromIntegral offset :: Double)) >>= valToObject)
getLineCount (CMBuffer cm) = runCM cm $ do
m <- codeMirror
lift $ round <$> ((m ^. lineCount) >>= valToNumber)
getModified (CMBuffer cm) = runCM cm $ do
m <- codeMirror
lift $ not <$> ((m ^. isClean) >>= valToBool)
getSelectionBoundMark (CMBuffer cm) = runCM cm . lift $ CMCursor <$> makeValueRef "anchor"
getSelectionBounds (CMBuffer cm) = runCM cm $ do
m <- codeMirror
lift $ do
start <- (m ^. getCursor "start") >>= valToObject
end <- (m ^. getCursor "end") >>= valToObject
return (CMIter cm start, CMIter cm end)
getInsertIter (CMBuffer cm) = runCM cm $ do
m <- codeMirror
lift $ CMIter cm <$> (m ^. getCursor "head" >>= valToObject)
getSlice (CMBuffer cm) (CMIter _ first) (CMIter _ last) includeHidenChars = runCM cm $ do
m <- codeMirror
lift $ m ^. getRange first last >>= valToText
getStartIter (CMBuffer cm) = runCM cm $ cmIter cm 0 0
getTagTable (CMBuffer cm) = return $ CMTagTable cm
getText (CMBuffer cm) (CMIter _ first) (CMIter _ last) includeHidenChars = runCM cm $ do
m <- codeMirror
lift $ m ^. getRange first last >>= valToText
hasSelection (CMBuffer cm) = runCM cm $ do
m <- codeMirror
lift $ (m ^. somethingSelected) >>= valToBool
insert (CMBuffer cm) (CMIter _ p) text = runCM cm $ do
m <- codeMirror
lift $ m ^. insertAt text p >> return ()
newView (CMBuffer cm) mbFontString = return (CMView cm)
pasteClipboard (CMBuffer cm) clipboard (CMIter _ p) defaultEditable = return () -- TODO
placeCursor (CMBuffer cm) (CMIter _ i) = runCM cm $ do
m <- codeMirror
lift $ m ^. placeCursorAt i >> return ()
redo (CMBuffer cm) = runCM cm $ do
m <- codeMirror
lift $ m ^. callRedo
return ()
removeTagByName (CMBuffer cm) name = runCM cm $ do
m <- codeMirror
lift $ do
marks <- m ^. getAllMarks
-- TODO
return ()
selectRange (CMBuffer cm) (CMIter _ first) (CMIter _ last) = runCM cm $ do
m <- codeMirror
lift $ m ^. setSelection first last >> return ()
setModified (CMBuffer cm) modified = unless modified . runCM cm $ do
m <- codeMirror
lift $ m ^. markClean >> return ()
setStyle (CMBuffer cm) _style = return () -- TODO
setText (CMBuffer cm) text = runCM cm $ do
m <- codeMirror
lift $ m ^. setValue text
return ()
undo (CMBuffer cm) = runCM cm $ do
m <- codeMirror
lift $ m ^. callUndo
return ()
bufferToWindowCoords (CMView cm) point = return point -- TODO
drawTabs (CMView _) = return () -- TODO
getBuffer (CMView cm) = return $ CMBuffer cm
getWindow (CMView cm) = runCM cm $ do
v <- webView
liftIO $ widgetGetWindow v
getIterAtLocation (CMView cm) x y = runCM cm $ do
m <- codeMirror
lift $ do
pos <- obj
pos ^. left <# (fromIntegral x :: Double)
pos ^. top <# (fromIntegral y :: Double)
CMIter cm <$> (m ^. coordsChar pos "window" >>= valToObject)
getIterLocation (CMView cm) (CMIter _ i) = runCM cm $ do
m <- codeMirror
lift $ do
rect <- (m ^. charCoords i "window" >>= valToObject)
l <- rect ^. left >>= valToNumber
r <- rect ^. right >>= valToNumber
t <- rect ^. top >>= valToNumber
b <- rect ^. bottom >>= valToNumber
return $ Rectangle (round l) (round t) (round $ r - l) (round $ b - t)
getOverwrite (CMView cm) = return False -- TODO
getScrolledWindow (CMView (v,_)) = liftIO . fmap (castToScrolledWindow . fromJust) $ widgetGetParent v
getEditorWidget (CMView (v,_)) = return $ castToWidget v
grabFocus (CMView cm) = runCM cm $ do
v <- webView
liftIO $ widgetGrabFocus v
scrollToMark (CMView cm) m withMargin mbAlign = do
i <- getIterAtMark (CMBuffer cm) m
scrollToIter (CMView cm) i withMargin mbAlign
scrollToIter (CMView cm) (CMIter _ i) withMargin mbAlign = runCM cm $ do
m <- codeMirror
lift $ m ^. scrollIntoView i withMargin >> return ()
setFont (CMView cm) mbFontString = return () -- TODO
setIndentWidth (CMView cm) width = return () -- TODO
setWrapMode (CMView cm) width = return () -- TODO
setRightMargin (CMView cm) mbRightMargin = return () -- TODO
setShowLineNumbers (CMView cm) show = return () -- TODO
setTabWidth (CMView cm) width = return () -- TODO
backwardCharC (CMIter cm i) = runCM cm $ do
m <- codeMirror
lift $ do
n <- m ^. indexFromPos i >>= valToNumber
i2 <- m ^. posFromIndex (n - 1)
return (CMIter cm i2)
backwardFindCharC (CMIter cm i) pred mbLimit = return Nothing -- TODO
backwardWordStartC (CMIter cm i) = return Nothing -- TODO
backwardToLineStartC (CMIter cm i) = runCM cm $ do
m <- codeMirror
lift $ do
i2 <- obj
i2 ^. line <# i ^. line
i2 ^. ch <# (0 :: Double)
return $ CMIter cm i2
endsWord (CMIter cm i) = return False -- TODO
forwardCharC (CMIter cm i) = runCM cm $ do
m <- codeMirror
lift $ do
n <- m ^. indexFromPos i >>= valToNumber
i2 <- m ^. posFromIndex (n + 1)
return (CMIter cm i2)
forwardCharsC (CMIter cm i) d = runCM cm $ do
m <- codeMirror
lift $ do
n <- m ^. indexFromPos i >>= valToNumber
i2 <- m ^. posFromIndex (n + fromIntegral d)
return (CMIter cm i2)
forwardFindCharC (CMIter cm i) pred mbLimit = return Nothing -- TODO
forwardSearch (CMIter cm i) str pred mbLimit = return Nothing -- TODO
forwardToLineEndC (CMIter cm i) = runCM cm $ do
m <- codeMirror
lift $ do
i2 <- obj
l <- i ^. line >>= makeValueRef
i2 ^. line <# l
i2 ^. ch <# m ^. getLineText l ^. jsLength
return $ CMIter cm i2
forwardWordEndC (CMIter cm i) = return Nothing -- TODO
getChar (CMIter cm i) = return Nothing -- TODO
getCharsInLine (CMIter cm i) = runCM cm $ do
m <- codeMirror
lift $ round <$> (m ^. getLineText (i ^. line) ^. jsLength >>= valToNumber)
getLine (CMIter cm i) = runCM cm $ do
lift $ round <$> (i ^. line >>= valToNumber)
getLineOffset (CMIter cm i) = runCM cm $ do
lift $ round <$> (i ^. ch >>= valToNumber)
getOffset (CMIter cm i) = runCM cm $ do
m <- codeMirror
lift $ round <$> (m ^. indexFromPos i >>= valToNumber)
isStart i@(CMIter cm _) = do
start <- getStartIter (CMBuffer cm)
iterEqual start i
isEnd i@(CMIter cm _) = do
end <- getEndIter (CMBuffer cm)
iterEqual end i
iterEqual (CMIter cm i1) (CMIter _ i2) = runCM cm . lift $ do
l1 <- i1 ^. line >>= valToNumber
l2 <- i2 ^. line >>= valToNumber
if l1 /= l2
then return False
else do
c1 <- i1 ^. ch >>= valToNumber
c2 <- i2 ^. ch >>= valToNumber
return $ c1 == c2
startsLine i = (== 0) <$> getLineOffset i
startsWord (CMIter cm i) = return False -- TODO
atEnd (CMIter cm _) = getEndIter (CMBuffer cm)
atLine (CMIter cm _) l = runCM cm $ do
m <- codeMirror
lift $ do
i2 <- obj
i2 ^. line <# (fromIntegral l :: Double)
i2 ^. ch <# (0 :: Double)
return $ CMIter cm i2
atLineOffset (CMIter cm i) column = runCM cm $ do
m <- codeMirror
lift $ do
i2 <- obj
i2 ^. line <# i ^. line
i2 ^. ch <# (fromIntegral column :: Double)
return $ CMIter cm i2
atOffset (CMIter cm _ ) offset = getIterAtOffset (CMBuffer cm) offset
atStart (CMIter cm _) = getStartIter (CMBuffer cm)
newTag (CMTagTable cm) name = return CMTag -- TODO
lookupTag (CMTagTable cm) name = return Nothing -- TODO
background (CMTag) color = return () -- TODO
underline (CMTag) value = return () -- TODO
afterFocusIn (CMView (v, _)) f = do
ideR <- ask
liftIO $ do
id1 <- v `after` focusInEvent $ lift $ reflectIDE f ideR >> return False
return [ConnectC id1]
afterModifiedChanged (CMBuffer cm) f = return [] -- TODO
afterMoveCursor (CMView cm) f = return [] -- TODO
afterToggleOverwrite (CMView cm) f = return [] -- TODO
onButtonPress (CMView (v, _)) f = do
id1 <- v `onIDE` buttonPressEvent $ f
return [ConnectC id1]
onButtonRelease (CMView (v, _)) f = do
id1 <- v `onIDE` buttonReleaseEvent $ f
return [ConnectC id1]
onCompletion (CMView cm) start cancel = return [] -- TODO
onKeyPress (CMView (v, _)) f = do
id1 <- v `onIDE` keyPressEvent $ f
return [ConnectC id1]
onMotionNotify (CMView (v, _)) f = do
id1 <- v `onIDE` motionNotifyEvent $ f
return [ConnectC id1]
onLeaveNotify (CMView (v, _)) f = do
id1 <- v `onIDE` leaveNotifyEvent $ f
return [ConnectC id1]
onKeyRelease (CMView (v, _)) f = do
id1 <- v `onIDE` keyReleaseEvent $ f
return [ConnectC id1]
onLookupInfo (CMView (v, _)) f = do
ideR <- ask
liftIO $ do
v `widgetAddEvents` [ButtonReleaseMask]
id1 <- (`reflectIDE` ideR) $ v `onIDE` buttonReleaseEvent $ do
mod <- lift $ eventModifier
case mod of
[Control] -> f >> return True
_ -> return False
return [ConnectC id1]
onMotionNotifyEvent (CMView cm) f = return [] -- TODO
onPopulatePopup (CMView (v, _)) f = do
ideR <- ask
liftIO $ do
id1 <- on v popupMenuSignal $ do
menu <- menuNew
menuAttachToWidget menu v
reflectIDE (f menu) ideR
menuPopup menu Nothing
return True
return [ConnectC id1]
onSelectionChanged (CMBuffer cm) f = return [] -- TODO
#endif
|
cocreature/leksah
|
src/IDE/TextEditor/CodeMirror.hs
|
gpl-2.0
| 21,354
| 0
| 21
| 6,472
| 7,106
| 3,558
| 3,548
| 27
| 0
|
import Test.DocTest
main :: IO ()
main = doctest ["-ilib", "lib"]
|
thsutton/aeson-diff
|
test/doctests.hs
|
bsd-2-clause
| 67
| 0
| 6
| 12
| 30
| 16
| 14
| 3
| 1
|
-- |
-- Module : Foundation.IO.Terminal
-- License : BSD-style
-- Maintainer : Foundation
-- Stability : experimental
-- Portability : portable
--
module Foundation.IO.Terminal
( putStrLn
, putStr
, stdin
, stdout
, getArgs
, exitFailure
, exitSuccess
) where
import Basement.Imports
import qualified Prelude
import System.IO (stdin, stdout)
import System.Exit
import qualified System.Environment as SE (getArgs)
-- | Print a string to standard output
putStr :: String -> IO ()
putStr = Prelude.putStr . toList
-- | Print a string with a newline to standard output
putStrLn :: String -> IO ()
putStrLn = Prelude.putStrLn . toList
-- | Get the arguments from the terminal command
getArgs :: IO [String]
getArgs = fmap fromList <$> SE.getArgs
|
vincenthz/hs-foundation
|
foundation/Foundation/IO/Terminal.hs
|
bsd-3-clause
| 819
| 0
| 7
| 196
| 155
| 95
| 60
| 19
| 1
|
data U = MkU (U -> Bool)
|
batterseapower/chsc
|
examples/toys/GHCBug.hs
|
bsd-3-clause
| 25
| 0
| 8
| 7
| 17
| 9
| 8
| 1
| 0
|
{-# LANGUAGE OverloadedStrings #-}
-- | Main test suite.
module Main where
import Control.Monad
import Data.Monoid
import TryHaskell
-- | Main entry point.
main :: IO ()
main =
do assert (SuccessResult ("'a'","Char","'a'") mempty mempty)
(run "'a'" mempty mempty)
assert (SuccessResult ("123","Num a => a","123") [] mempty)
(run "123" mempty mempty)
assert (ErrorResult "Evaluation killed!")
(run "sum [1..]" mempty mempty)
assert (SuccessResult ("head []","a","") mempty mempty)
(run "head []" mempty mempty)
assert (SuccessResult ("id","a -> a","") mempty mempty)
(run "id" mempty mempty)
assert (SuccessResult ("putStrLn","String -> IO ()","") mempty mempty)
(run "putStrLn" mempty mempty)
assert (GetInputResult mempty mempty)
(run "getLine" mempty mempty)
assert (SuccessResult ("getLine","IO String","\"x\"") mempty mempty)
(run "getLine" ["x"] mempty)
assert (SuccessResult ("(*)","Num a => a -> a -> a","") mempty mempty)
(run "(*)" mempty mempty)
assert (ErrorResult "Evaluation killed!")
(run "let r = r in r :: ()" mempty mempty)
where
run e i f =
do putStrLn ("Running " ++ e)
muevalOrType e i f
assert e m =
do v <- m
unless (v == e)
(error ("Assertion failed:\n\n" ++ show e ++ " == " ++ show v))
|
chrisdone/tryhaskell
|
src/test/Main.hs
|
bsd-3-clause
| 1,438
| 0
| 16
| 411
| 469
| 237
| 232
| 34
| 1
|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveDataTypeable #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Compiler
-- Copyright : Isaac Jones 2003-2004
-- License : BSD3
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- This should be a much more sophisticated abstraction than it is. Currently
-- it's just a bit of data about the compiler, like it's flavour and name and
-- version. The reason it's just data is because currently it has to be in
-- 'Read' and 'Show' so it can be saved along with the 'LocalBuildInfo'. The
-- only interesting bit of info it contains is a mapping between language
-- extensions and compiler command line flags. This module also defines a
-- 'PackageDB' type which is used to refer to package databases. Most compilers
-- only know about a single global package collection but GHC has a global and
-- per-user one and it lets you create arbitrary other package databases. We do
-- not yet fully support this latter feature.
module Distribution.Simple.Compiler (
-- * Haskell implementations
module Distribution.Compiler,
Compiler(..),
showCompilerId, showCompilerIdWithAbi,
compilerFlavor, compilerVersion,
compilerCompatFlavor,
compilerCompatVersion,
compilerInfo,
-- * Support for package databases
PackageDB(..),
PackageDBStack,
registrationPackageDB,
absolutePackageDBPaths,
absolutePackageDBPath,
-- * Support for optimisation levels
OptimisationLevel(..),
flagToOptimisationLevel,
-- * Support for debug info levels
DebugInfoLevel(..),
flagToDebugInfoLevel,
-- * Support for language extensions
Flag,
languageToFlags,
unsupportedLanguages,
extensionsToFlags,
unsupportedExtensions,
parmakeSupported,
reexportedModulesSupported,
renamingPackageFlagsSupported,
unifiedIPIDRequired,
packageKeySupported,
unitIdSupported,
coverageSupported,
profilingSupported,
backpackSupported,
libraryDynDirSupported,
-- * Support for profiling detail levels
ProfDetailLevel(..),
knownProfDetailLevels,
flagToProfDetailLevel,
showProfDetailLevel,
) where
import Prelude ()
import Distribution.Compat.Prelude
import Distribution.Compiler
import Distribution.Version
import Distribution.Text
import Language.Haskell.Extension
import Distribution.Simple.Utils
import qualified Data.Map as Map (lookup)
import System.Directory (canonicalizePath)
data Compiler = Compiler {
compilerId :: CompilerId,
-- ^ Compiler flavour and version.
compilerAbiTag :: AbiTag,
-- ^ Tag for distinguishing incompatible ABI's on the same
-- architecture/os.
compilerCompat :: [CompilerId],
-- ^ Other implementations that this compiler claims to be
-- compatible with.
compilerLanguages :: [(Language, Flag)],
-- ^ Supported language standards.
compilerExtensions :: [(Extension, Flag)],
-- ^ Supported extensions.
compilerProperties :: Map String String
-- ^ A key-value map for properties not covered by the above fields.
}
deriving (Eq, Generic, Typeable, Show, Read)
instance Binary Compiler
showCompilerId :: Compiler -> String
showCompilerId = display . compilerId
showCompilerIdWithAbi :: Compiler -> String
showCompilerIdWithAbi comp =
display (compilerId comp) ++
case compilerAbiTag comp of
NoAbiTag -> []
AbiTag xs -> '-':xs
compilerFlavor :: Compiler -> CompilerFlavor
compilerFlavor = (\(CompilerId f _) -> f) . compilerId
compilerVersion :: Compiler -> Version
compilerVersion = (\(CompilerId _ v) -> v) . compilerId
-- | Is this compiler compatible with the compiler flavour we're interested in?
--
-- For example this checks if the compiler is actually GHC or is another
-- compiler that claims to be compatible with some version of GHC, e.g. GHCJS.
--
-- > if compilerCompatFlavor GHC compiler then ... else ...
--
compilerCompatFlavor :: CompilerFlavor -> Compiler -> Bool
compilerCompatFlavor flavor comp =
flavor == compilerFlavor comp
|| flavor `elem` [ flavor' | CompilerId flavor' _ <- compilerCompat comp ]
-- | Is this compiler compatible with the compiler flavour we're interested in,
-- and if so what version does it claim to be compatible with.
--
-- For example this checks if the compiler is actually GHC-7.x or is another
-- compiler that claims to be compatible with some GHC-7.x version.
--
-- > case compilerCompatVersion GHC compiler of
-- > Just (Version (7:_)) -> ...
-- > _ -> ...
--
compilerCompatVersion :: CompilerFlavor -> Compiler -> Maybe Version
compilerCompatVersion flavor comp
| compilerFlavor comp == flavor = Just (compilerVersion comp)
| otherwise =
listToMaybe [ v | CompilerId fl v <- compilerCompat comp, fl == flavor ]
compilerInfo :: Compiler -> CompilerInfo
compilerInfo c = CompilerInfo (compilerId c)
(compilerAbiTag c)
(Just . compilerCompat $ c)
(Just . map fst . compilerLanguages $ c)
(Just . map fst . compilerExtensions $ c)
-- ------------------------------------------------------------
-- * Package databases
-- ------------------------------------------------------------
-- |Some compilers have a notion of a database of available packages.
-- For some there is just one global db of packages, other compilers
-- support a per-user or an arbitrary db specified at some location in
-- the file system. This can be used to build isloated environments of
-- packages, for example to build a collection of related packages
-- without installing them globally.
--
data PackageDB = GlobalPackageDB
| UserPackageDB
| SpecificPackageDB FilePath
deriving (Eq, Generic, Ord, Show, Read)
instance Binary PackageDB
-- | We typically get packages from several databases, and stack them
-- together. This type lets us be explicit about that stacking. For example
-- typical stacks include:
--
-- > [GlobalPackageDB]
-- > [GlobalPackageDB, UserPackageDB]
-- > [GlobalPackageDB, SpecificPackageDB "package.conf.inplace"]
--
-- Note that the 'GlobalPackageDB' is invariably at the bottom since it
-- contains the rts, base and other special compiler-specific packages.
--
-- We are not restricted to using just the above combinations. In particular
-- we can use several custom package dbs and the user package db together.
--
-- When it comes to writing, the top most (last) package is used.
--
type PackageDBStack = [PackageDB]
-- | Return the package that we should register into. This is the package db at
-- the top of the stack.
--
registrationPackageDB :: PackageDBStack -> PackageDB
registrationPackageDB [] = error "internal error: empty package db set"
registrationPackageDB dbs = last dbs
-- | Make package paths absolute
absolutePackageDBPaths :: PackageDBStack -> NoCallStackIO PackageDBStack
absolutePackageDBPaths = traverse absolutePackageDBPath
absolutePackageDBPath :: PackageDB -> NoCallStackIO PackageDB
absolutePackageDBPath GlobalPackageDB = return GlobalPackageDB
absolutePackageDBPath UserPackageDB = return UserPackageDB
absolutePackageDBPath (SpecificPackageDB db) =
SpecificPackageDB `liftM` canonicalizePath db
-- ------------------------------------------------------------
-- * Optimisation levels
-- ------------------------------------------------------------
-- | Some compilers support optimising. Some have different levels.
-- For compilers that do not the level is just capped to the level
-- they do support.
--
data OptimisationLevel = NoOptimisation
| NormalOptimisation
| MaximumOptimisation
deriving (Bounded, Enum, Eq, Generic, Read, Show)
instance Binary OptimisationLevel
flagToOptimisationLevel :: Maybe String -> OptimisationLevel
flagToOptimisationLevel Nothing = NormalOptimisation
flagToOptimisationLevel (Just s) = case reads s of
[(i, "")]
| i >= fromEnum (minBound :: OptimisationLevel)
&& i <= fromEnum (maxBound :: OptimisationLevel)
-> toEnum i
| otherwise -> error $ "Bad optimisation level: " ++ show i
++ ". Valid values are 0..2"
_ -> error $ "Can't parse optimisation level " ++ s
-- ------------------------------------------------------------
-- * Debug info levels
-- ------------------------------------------------------------
-- | Some compilers support emitting debug info. Some have different
-- levels. For compilers that do not the level is just capped to the
-- level they do support.
--
data DebugInfoLevel = NoDebugInfo
| MinimalDebugInfo
| NormalDebugInfo
| MaximalDebugInfo
deriving (Bounded, Enum, Eq, Generic, Read, Show)
instance Binary DebugInfoLevel
flagToDebugInfoLevel :: Maybe String -> DebugInfoLevel
flagToDebugInfoLevel Nothing = NormalDebugInfo
flagToDebugInfoLevel (Just s) = case reads s of
[(i, "")]
| i >= fromEnum (minBound :: DebugInfoLevel)
&& i <= fromEnum (maxBound :: DebugInfoLevel)
-> toEnum i
| otherwise -> error $ "Bad debug info level: " ++ show i
++ ". Valid values are 0..3"
_ -> error $ "Can't parse debug info level " ++ s
-- ------------------------------------------------------------
-- * Languages and Extensions
-- ------------------------------------------------------------
unsupportedLanguages :: Compiler -> [Language] -> [Language]
unsupportedLanguages comp langs =
[ lang | lang <- langs
, isNothing (languageToFlag comp lang) ]
languageToFlags :: Compiler -> Maybe Language -> [Flag]
languageToFlags comp = filter (not . null)
. catMaybes . map (languageToFlag comp)
. maybe [Haskell98] (\x->[x])
languageToFlag :: Compiler -> Language -> Maybe Flag
languageToFlag comp ext = lookup ext (compilerLanguages comp)
-- |For the given compiler, return the extensions it does not support.
unsupportedExtensions :: Compiler -> [Extension] -> [Extension]
unsupportedExtensions comp exts =
[ ext | ext <- exts
, isNothing (extensionToFlag comp ext) ]
type Flag = String
-- |For the given compiler, return the flags for the supported extensions.
extensionsToFlags :: Compiler -> [Extension] -> [Flag]
extensionsToFlags comp = nub . filter (not . null)
. catMaybes . map (extensionToFlag comp)
extensionToFlag :: Compiler -> Extension -> Maybe Flag
extensionToFlag comp ext = lookup ext (compilerExtensions comp)
-- | Does this compiler support parallel --make mode?
parmakeSupported :: Compiler -> Bool
parmakeSupported = ghcSupported "Support parallel --make"
-- | Does this compiler support reexported-modules?
reexportedModulesSupported :: Compiler -> Bool
reexportedModulesSupported = ghcSupported "Support reexported-modules"
-- | Does this compiler support thinning/renaming on package flags?
renamingPackageFlagsSupported :: Compiler -> Bool
renamingPackageFlagsSupported = ghcSupported
"Support thinning and renaming package flags"
-- | Does this compiler have unified IPIDs (so no package keys)
unifiedIPIDRequired :: Compiler -> Bool
unifiedIPIDRequired = ghcSupported "Requires unified installed package IDs"
-- | Does this compiler support package keys?
packageKeySupported :: Compiler -> Bool
packageKeySupported = ghcSupported "Uses package keys"
-- | Does this compiler support unit IDs?
unitIdSupported :: Compiler -> Bool
unitIdSupported = ghcSupported "Uses unit IDs"
-- | Does this compiler support Backpack?
backpackSupported :: Compiler -> Bool
backpackSupported = ghcSupported "Support Backpack"
-- | Does this compiler support a package database entry with:
-- "dynamic-library-dirs"?
libraryDynDirSupported :: Compiler -> Bool
libraryDynDirSupported comp = case compilerFlavor comp of
GHC ->
-- Not just v >= mkVersion [8,0,1,20161022], as there
-- are many GHC 8.1 nightlies which don't support this.
((v >= mkVersion [8,0,1,20161022] && v < mkVersion [8,1]) ||
v >= mkVersion [8,1,20161021])
_ -> False
where
v = compilerVersion comp
-- | Does this compiler support Haskell program coverage?
coverageSupported :: Compiler -> Bool
coverageSupported comp =
case compilerFlavor comp of
GHC -> True
GHCJS -> True
_ -> False
-- | Does this compiler support profiling?
profilingSupported :: Compiler -> Bool
profilingSupported comp =
case compilerFlavor comp of
GHC -> True
GHCJS -> True
LHC -> True
_ -> False
-- | Utility function for GHC only features
ghcSupported :: String -> Compiler -> Bool
ghcSupported key comp =
case compilerFlavor comp of
GHC -> checkProp
GHCJS -> checkProp
_ -> False
where checkProp =
case Map.lookup key (compilerProperties comp) of
Just "YES" -> True
_ -> False
-- ------------------------------------------------------------
-- * Profiling detail level
-- ------------------------------------------------------------
-- | Some compilers (notably GHC) support profiling and can instrument
-- programs so the system can account costs to different functions. There are
-- different levels of detail that can be used for this accounting.
-- For compilers that do not support this notion or the particular detail
-- levels, this is either ignored or just capped to some similar level
-- they do support.
--
data ProfDetailLevel = ProfDetailNone
| ProfDetailDefault
| ProfDetailExportedFunctions
| ProfDetailToplevelFunctions
| ProfDetailAllFunctions
| ProfDetailOther String
deriving (Eq, Generic, Read, Show)
instance Binary ProfDetailLevel
flagToProfDetailLevel :: String -> ProfDetailLevel
flagToProfDetailLevel "" = ProfDetailDefault
flagToProfDetailLevel s =
case lookup (lowercase s)
[ (name, value)
| (primary, aliases, value) <- knownProfDetailLevels
, name <- primary : aliases ]
of Just value -> value
Nothing -> ProfDetailOther s
knownProfDetailLevels :: [(String, [String], ProfDetailLevel)]
knownProfDetailLevels =
[ ("default", [], ProfDetailDefault)
, ("none", [], ProfDetailNone)
, ("exported-functions", ["exported"], ProfDetailExportedFunctions)
, ("toplevel-functions", ["toplevel", "top"], ProfDetailToplevelFunctions)
, ("all-functions", ["all"], ProfDetailAllFunctions)
]
showProfDetailLevel :: ProfDetailLevel -> String
showProfDetailLevel dl = case dl of
ProfDetailNone -> "none"
ProfDetailDefault -> "default"
ProfDetailExportedFunctions -> "exported-functions"
ProfDetailToplevelFunctions -> "toplevel-functions"
ProfDetailAllFunctions -> "all-functions"
ProfDetailOther other -> other
|
mydaum/cabal
|
Cabal/Distribution/Simple/Compiler.hs
|
bsd-3-clause
| 15,445
| 0
| 16
| 3,486
| 2,372
| 1,332
| 1,040
| 227
| 6
|
module DerivingRead where
import DerivingUtils
deriveRead stdnames src t@(_,TypeInfo{constructors=cs}) =
do let pv = stdvalue stdnames mod_Prelude
readParenArg <- pv "readParenArg"
readArgument <- pv "readArgument"
readToken <- pv "readToken"
HsVar readsPrec <- pv "readsPrec"
readAp <- pv "readAp"
readChoice <- pv "readChoice"
let d = var (localVal "d")
alt = alt1 src readsPrec d
rdCon ConInfo{conName=c0,conArity=n} =
case n of
0 -> rdConName cn c
_ -> rdParenArg (comp (rdConName cn c:replicate n rdArg))
where
c = convCon t c0
cn = getBaseName c0
rdConName cn c = rdToken (con c) (str src cn)
rdToken = opapp readToken
rdParenArg a = opapp readParenArg d a
rdArg = ident readArgument
comp = foldl1 (opapp readAp)
choice = foldr1 (opapp readChoice)
return [fun src [alt (choice (map rdCon cs))]]
|
forste/haReFork
|
tools/base/transforms/Deriving/DerivingRead.hs
|
bsd-3-clause
| 899
| 11
| 15
| 225
| 300
| 153
| 147
| 25
| 2
|
{-# LANGUAGE GADTs, KindSignatures #-}
-- Test for trac #810
-- Should be able to infer bool :: Bool and integer :: Integer, so
-- we should know that they both have Show instances.
module Foo where
import Data.Kind (Type)
data Pair :: (Type -> Type) -> Type where
Pair :: a b -> b -> Pair a
data Sel :: Type -> Type where
A :: Sel Bool
B :: Sel Integer
showSnd :: Pair Sel -> String
showSnd (Pair A bool) = show bool
showSnd (Pair B integer) = show integer
|
sdiehl/ghc
|
testsuite/tests/gadt/gadt20.hs
|
bsd-3-clause
| 480
| 0
| 8
| 115
| 132
| 72
| 60
| -1
| -1
|
import System.Environment
import System.Process
import Data.Maybe
main = do
[ghc] <- getArgs
info <- readProcess ghc ["+RTS", "--info"] ""
let fields = read info :: [(String,String)]
getGhcFieldOrFail fields "HostOS" "Host OS"
getGhcFieldOrFail fields "WORDSIZE" "Word size"
getGhcFieldOrFail fields "TARGETPLATFORM" "Target platform"
getGhcFieldOrFail fields "TargetOS_CPP" "Target OS"
getGhcFieldOrFail fields "TargetARCH_CPP" "Target architecture"
info <- readProcess ghc ["--info"] ""
let fields = read info :: [(String,String)]
getGhcFieldOrFail fields "GhcStage" "Stage"
getGhcFieldOrFail fields "GhcDebugged" "Debug on"
getGhcFieldOrFail fields "GhcWithNativeCodeGen" "Have native code generator"
getGhcFieldOrFail fields "GhcWithInterpreter" "Have interpreter"
getGhcFieldOrFail fields "GhcUnregisterised" "Unregisterised"
getGhcFieldOrFail fields "GhcWithSMP" "Support SMP"
getGhcFieldOrFail fields "GhcRTSWays" "RTS ways"
getGhcFieldOrDefault fields "GhcDynamicByDefault" "Dynamic by default" "NO"
getGhcFieldOrDefault fields "GhcDynamic" "GHC Dynamic" "NO"
getGhcFieldOrDefault fields "GhcProfiled" "GHC Profiled" "NO"
getGhcFieldProgWithDefault fields "AR" "ar command" "ar"
getGhcFieldProgWithDefault fields "LLC" "LLVM llc command" "llc"
let pkgdb_flag = case lookup "Project version" fields of
Just v
| parseVersion v >= [7,5] -> "package-db"
_ -> "package-conf"
putStrLn $ "GhcPackageDbFlag" ++ '=':pkgdb_flag
let minGhcVersion711 = case lookup "Project version" fields of
Just v
| parseVersion v >= [7,11] -> "YES"
_ -> "NO"
putStrLn $ "MinGhcVersion711" ++ '=':minGhcVersion711
getGhcFieldOrFail :: [(String,String)] -> String -> String -> IO ()
getGhcFieldOrFail fields mkvar key
= getGhcField fields mkvar key id (fail ("No field: " ++ key))
getGhcFieldOrDefault :: [(String,String)] -> String -> String -> String -> IO ()
getGhcFieldOrDefault fields mkvar key deflt
= getGhcField fields mkvar key id on_fail
where
on_fail = putStrLn (mkvar ++ '=' : deflt)
getGhcFieldProgWithDefault
:: [(String,String)]
-> String -> String -> String
-> IO ()
getGhcFieldProgWithDefault fields mkvar key deflt
= getGhcField fields mkvar key fix on_fail
where
fix val = fixSlashes (fixTopdir topdir val)
topdir = fromMaybe "" (lookup "LibDir" fields)
on_fail = putStrLn (mkvar ++ '=' : deflt)
getGhcField
:: [(String,String)] -> String -> String
-> (String -> String)
-> IO ()
-> IO ()
getGhcField fields mkvar key fix on_fail =
case lookup key fields of
Nothing -> on_fail
Just val -> putStrLn (mkvar ++ '=' : fix val)
fixTopdir :: String -> String -> String
fixTopdir t "" = ""
fixTopdir t ('$':'t':'o':'p':'d':'i':'r':s) = t ++ s
fixTopdir t (c:s) = c : fixTopdir t s
fixSlashes :: FilePath -> FilePath
fixSlashes = map f
where f '\\' = '/'
f c = c
parseVersion :: String -> [Int]
parseVersion v = case break (== '.') v of
(n, rest) -> read n : case rest of
[] -> []
('.':v') -> parseVersion v'
_ -> error "bug in parseVersion"
|
oldmanmike/ghc
|
testsuite/mk/ghc-config.hs
|
bsd-3-clause
| 3,147
| 0
| 17
| 626
| 1,024
| 500
| 524
| 75
| 3
|
{-# LANGUAGE FlexibleContexts #-}
module TcRun038_B where
class Foo a where
op :: a -> Int
-- Note the (Foo Int) constraint here; and the fact
-- that there is no (Foo Int) instance in this module
-- It's in the importing module!
bar :: Foo Int => Int -> Int
bar x = op x + 7
|
urbanslug/ghc
|
testsuite/tests/typecheck/should_run/TcRun038_B.hs
|
bsd-3-clause
| 294
| 0
| 7
| 76
| 57
| 31
| 26
| 6
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
module Database.PostgreSQL.Simple.Classy.Exceptions.ExecStatus
( AsExecStatus (..)
) where
import Prelude (Eq,Show (show))
import Data.Profunctor (Profunctor)
import Data.Functor (Functor)
import Data.String (IsString (fromString))
import Data.Maybe (Maybe (Just,Nothing))
import Control.Category (id,(.))
import Control.Lens (Choice,Optic',prism')
import Control.Applicative (Applicative)
import Database.PostgreSQL.Simple (ExecStatus (..))
-- $setup
-- >>> :set -XOverloadedStrings
-- >>> import Control.Lens ((^?),(#))
-- >>> import Data.Maybe (Maybe(Just,Nothing))
-- >>> import Data.ByteString (ByteString (..))
-- >>> import Data.String (String)
-- >>> import Data.Text (Text)
class AsExecStatus p f s where
_ExecStatus ::
Optic' p f s ExecStatus
instance (Profunctor p, Functor f) => AsExecStatus p f ExecStatus where
_ExecStatus =
id
-- |
-- >>> ("TuplesOk"::String) ^? _ExecStatus
-- Just TuplesOk
-- >>> ("CopyOut"::ByteString) ^? _ExecStatus
-- Just CopyOut
-- >>> ("Spanner"::Text) ^? _ExecStatus
-- Nothing
-- >>> ("tuplesok"::String) ^? _ExecStatus
-- Nothing
-- >>> (_ExecStatus # CopyIn) :: String
-- "CopyIn"
-- >>> (_ExecStatus # CopyIn) :: Text
-- "CopyIn"
-- >>> (_ExecStatus # CopyIn) :: ByteString
-- "CopyIn"
instance ( Choice p
, Applicative f
, Eq a
, Show a
, IsString a
)
=> AsExecStatus p f a where
_ExecStatus =
prism'
(fromString . show)
(\s -> case s of
"EmptyQuery" -> Just EmptyQuery
"CommandOk" -> Just CommandOk
"TuplesOk" -> Just TuplesOk
"CopyOut" -> Just CopyOut
"CopyIn" -> Just CopyIn
"BadResponse" -> Just BadResponse
"NonfatalError" -> Just NonfatalError
"FatalError" -> Just FatalError
_ -> Nothing
)
|
mankyKitty/classy-pgsql-errors
|
src/Database/PostgreSQL/Simple/Classy/Exceptions/ExecStatus.hs
|
mit
| 2,002
| 0
| 12
| 456
| 377
| 222
| 155
| 40
| 0
|
module Main where
{-
- Testing module
-}
import Brain
import Test.Hspec
import Test.QuickCheck
import Control.Monad.Trans.State.Lazy
import Control.Exception
import Control.Monad
import System.Process
checkFormat :: Bool
checkFormat = format test1 == test1 &&
format test2 == "..><,,..>" &&
format test3 == "..[]>+-..]]" &&
format test4 == [] where
test1 = "..>[]>>.,<+-"
test2 = "he..><,,..>lo"
test3 = "ja..[]>+-@m3..]]"
test4 = []
checkRight :: IO Bool
checkRight = do
let blank = Deque [1,2,3] 4 [5,6,7]
(i,d) <- execStateT moveRight (([],[]), blank)
return $ d == Deque [4,1,2,3] 5 [6,7]
checkLeft :: IO Bool
checkLeft = do
let blank = Deque [1,2,3] 4 [5,6,7]
(i,d) <- execStateT moveLeft (([],[]), blank)
return $ d == Deque [2,3] 1 [4,5,6,7]
checkIncrement :: IO Bool
checkIncrement = do
let blank = Deque [1,2,3] 4 [5,6,7]
(i,d) <- execStateT increment (([],[]), blank)
return $ d == Deque [1,2,3] 5 [5,6,7]
checkDecrement :: IO Bool
checkDecrement = do
let blank = Deque [1,2,3] 4 [5,6,7]
(i,d) <- execStateT decrement (([],[]), blank)
return $ d == Deque [1,2,3] 3 [5,6,7]
checkConsume :: IO Bool
checkConsume = do
let blank = Deque [1,2,3] 4 [5,6,7]
(i,((con, uncon),d)) <- runStateT consume (([],">...<"), blank)
return $ i == '>' && con == ">" && uncon == "...<" && d == blank
checkSkipBracket :: IO Bool
checkSkipBracket = do
let blank = Deque [1,2,3] 0 [5,6,7]
(i,((con, uncon),d)) <- runStateT executeBrainState (([],"[[+]]"), blank)
return $ i == () && con == "]]+[[" && uncon == "" && d == blank
checkLoops :: IO Bool
checkLoops = do
let blank = Deque [1,2,3] 4 [5,6,7,0,8]
(i,((con, uncon),d)) <- runStateT executeBrainState (([],"[>]"), blank)
return $ i == () && con == "]>[" && uncon == "" && d == Deque [7,6,5,4,1,2,3] 0 [8]
checkUnconsume :: IO Bool
checkUnconsume = do
let blank = Deque [1,2,3] 4 [5,6,7]
(i,((con, uncon),d)) <- runStateT unconsume ((">..<",">...<"), blank)
return $ i == '>' && con == "..<" && uncon == ">>...<" && d == blank
checkUnmatchedEndBracket :: IO ()
checkUnmatchedEndBracket = do
let blank = Deque [1,2,3] 0 [5,6,7]
print "Begin"
(i,d) <- runStateT executeBrainState (("","[++"), blank)
when (i == ()) $ print "Hello again"
print "End"
return ()
checkUnmatchedBeginBracket :: IO ()
checkUnmatchedBeginBracket = do
let blank = Deque [1,2,3] 4 [5,6,7]
_ <- runStateT executeBrainState (("","++]"), blank)
return ()
checkZero :: Bool
checkZero = isZero $ Deque [1,2,3] 0 [4,5,6]
checkNotZero :: Bool
checkNotZero = isNotZero $ Deque [1,2,3] 4 [5,6,7]
checkLikeStacks :: Bool
checkLikeStacks = stackBracket '[' "[" == "[[" &&
stackBracket ']' "]" == "]]"
checkMatchingBrackets :: Bool
checkMatchingBrackets = stackBracket '[' "][[]]" == "[[]]" &&
stackBracket ']' "[[[]]" == "[[]]"
main = do
hspec $ describe "Testing program filtering" $ do
it "should filter correctly" $ checkFormat `shouldBe` True
hspec $ describe "Testing State operations" $ do
it "should move right correctly" $ checkRight `shouldReturn` True
it "should move left correctly" $ checkLeft `shouldReturn` True
it "should increment correctly" $ checkIncrement `shouldReturn` True
it "should decrement correctly" $ checkDecrement `shouldReturn` True
it "should consume correctly" $ checkConsume `shouldReturn` True
it "should unconsume correctly" $ checkUnconsume `shouldReturn` True
it "should show zero correctly" $ checkZero `shouldBe` True
it "shoule show nonzero correctly" $ checkNotZero `shouldBe` True
hspec $ describe "Testing bracket operations" $ do
it "should error on empty stack" $ evaluate (stackBracket 'a' []) `shouldThrow` anyException
it "should ignore non stack chars" $ (stackBracket 'a' "[]") `shouldBe` "[]"
it "should append like stack symbols" $ checkLikeStacks `shouldBe` True
it "should cancel out matching brackets" $ checkMatchingBrackets `shouldBe` True
it "should skip open brackets on zero" $ checkSkipBracket `shouldReturn` True
it "should loop properly" $ checkLoops `shouldReturn` True
hspec $ describe "Test bad programs" $ do
it "Should error on unmatched end bracket" $ checkUnmatchedEndBracket `shouldThrow` anyException
it "Should error on unmatched begin bracket" $ checkUnmatchedBeginBracket `shouldThrow` anyException
hspec $ describe "Total functionality" $ do
it "Should execute hello world properly" $ callCommand "sh test/testHello.sh" `shouldReturn` ()
|
allonsy/brain
|
test/brainTest.hs
|
mit
| 4,581
| 0
| 15
| 915
| 1,861
| 997
| 864
| 104
| 1
|
{-|
Module : Data.Algorithm.PPattern.Geometry.Point
Description : Simple 2D point
Copyright : (c) Laurent Bulteau, Romeo Rizzi, Stéphane Vialette, 2016
License : MIT
Maintainer : vialette@gmail.com
Stability : experimental
-}
module Data.Algorithm.PPattern.Geometry.Point
(
-- * The @Point@ type
Point(..)
-- * Constructing
, mk
-- * Accessing
, xCoord
, yCoord
-- * Rendering
, toTuple
-- * Modifying
, updateXCoord
, updateXCoord'
, updateYCoord
, updateYCoord'
)
where
newtype Point = Point (Int, Int) deriving (Show, Eq, Ord)
{-|
Make a point from x and y coordinates.
-}
mk :: Int -> Int -> Point
mk x y = Point (x, y)
{-|
Transform a point to a pair.
-}
toTuple :: Point -> (Int, Int)
toTuple (Point (x, y)) = (x, y)
{-|
Get x-coordinate of a point.
-}
xCoord :: Point -> Int
xCoord (Point (x, _)) = x
{-|
Get y-coordinate of a point.
-}
yCoord :: Point -> Int
yCoord (Point (_, y)) = y
{-|
Make a new point from a point by updating its x-ccordinate.
-}
updateXCoord :: Int -> Point -> Point
updateXCoord x' (Point (_, y)) = mk x' y
{-|
Make a new point from a point by function-updating its x-ccordinate.
-}
updateXCoord' :: (Int -> Int) -> Point -> Point
updateXCoord' f (Point (x, y)) = mk x' y
where
x' = f x
{-|
Make a new point from a point by updating its y-ccordinate.
-}
updateYCoord :: Int -> Point -> Point
updateYCoord y' (Point (x, _)) = mk x y'
{-|
Make a new point from a point by function-updating its y-ccordinate.
-}
updateYCoord' :: (Int -> Int) -> Point -> Point
updateYCoord' f (Point (x, y)) = mk x y'
where
y' = f y
|
vialette/ppattern-tmp
|
src/Data/Algorithm/PPattern/Geometry/Point.hs
|
mit
| 1,703
| 0
| 8
| 446
| 410
| 235
| 175
| 30
| 1
|
module Main where
main = putStrLn "Hello, World!"
|
archdragon/haskell_exercises
|
hello_world/hello_world.hs
|
mit
| 50
| 0
| 5
| 8
| 12
| 7
| 5
| 2
| 1
|
{-# LANGUAGE BangPatterns #-}
module Stage.Diffusion
( DiffSolver (..)
, diffusion)
where
import Model
import FieldElt
import Stage.Linear
import Data.Array.Repa as R
import Data.Array.Repa.Eval as R
import Data.Vector.Unboxed
data DiffSolver
= DiffStable Int
| DiffUnstable
-- | Diffuse a field at a certain rate.
diffusion
:: (FieldElt a, Num a, Elt a, Unbox a)
=> DiffSolver
-> Delta
-> Rate
-> Field a
-> IO (Field a)
diffusion !solver !delta !rate field
= {-# SCC diffusion #-}
field `deepSeqArray`
let _ :. _ :. width' = R.extent field
!width = fromIntegral width'
in case solver of
DiffUnstable
-> let !a = delta * rate * width * width
in unstableSolver field field a
DiffStable iters
-> let !a = delta * rate * width * width
!c = 1 + 4 * a
in linearSolver field field a c iters
{-# SPECIALIZE diffusion
:: DiffSolver -> Delta -> Rate
-> Field Float
-> IO (Field Float) #-}
{-# SPECIALIZE diffusion
:: DiffSolver -> Delta -> Rate
-> Field (Float, Float)
-> IO (Field (Float, Float)) #-}
|
gscalzo/HaskellTheHardWay
|
gloss-try/gloss-master/gloss-examples/raster/Fluid/src-repa/Stage/Diffusion.hs
|
mit
| 1,283
| 0
| 17
| 465
| 289
| 150
| 139
| -1
| -1
|
{- |
Module : Numeric.Information.IT
Description : Information quantities
Copyright : (c) Malte Harder
License : MIT
Maintainer : malte.harder@gmail.com
Stability : experimental
Portability : portable
This module contains various function to calculate information
theoretic quantities
-}
module Numeric.Information.IT (
-- * Expected values
expected
-- * Entropies
, entropy
, condEntropy
-- * Divergences
, divKL
, divJS
-- * Mutual Information
, mutualInfo
, mutualInfoJ
, condMutualInfo
-- * Information flow
-- * Alternative Implementations
-- ** Expected values
, expected'
-- ** Entropies
, entropy'
, condEntropy'
-- * Mutual Information
, mutualInfo'
-- -- * Information flow
--, informationFlow
-- * Helper functions
, log2
, eventDivergence
) where
import Numeric.Information.Distribution
import Numeric.Probability.Distribution hiding (expected)
import qualified Data.List as List
import Control.Monad (liftM, liftM2)
import Control.Arrow (first, second)
log2 :: (Floating prob)
=> prob
-> prob
log2 = logBase 2
minVal :: (Fractional prob) => prob
minVal = 1e-15
-- | Summing a function over the events of a distribution
-- weighted by the events probabilities
expected :: (Fractional prob, Ord prob, Ord a)
=> Dist prob a -- ^ p(x)
-> (a -> prob) -- ^ f : X -> R
-> prob -- ^ Sum_x p(x)*f(x)
expected p q = sum' . List.map (uncurry (*)) $
List.filter ((> minVal) . snd) $
(List.map . first $ q) $
decons (normS p)
-- | Entropy of a distribution H(A)
entropy :: (Floating prob, Ord prob, Ord a)
=> Dist prob a
-> prob
entropy p = - expected p (\a -> log2 $! a `seq` p ?= a)
-- | Conditional entropy H(A|B)
condEntropy :: (Floating prob, Ord prob, Ord a, Ord b)
=> (a -> Dist prob b)
-> Dist prob a
-> prob
condEntropy p_ygx p_x =
let p_yx = p_ygx -|- p_x
in - expected p_yx (\(y,x) -> log2 $ p_ygx x ?= y)
-- | Kullback Leibler Divergence of two distributions
divKL :: (Floating prob, Ord prob, Ord a)
=> Dist prob a
-> Dist prob a
-> prob
divKL p q = expected p (\a -> log2 $ (p ?= a)/(q ?= a))
-- | Jensen Shannon Divergence
divJS :: (Floating prob, Ord prob, Ord a)
=> prob -- ^ pi
-> Dist prob a
-> Dist prob a
-> prob
divJS pi p q =
let m = combine (convex pi) p q
in entropy m - convex pi (entropy p) (entropy q)
-- | Mutual information of two random variables
mutualInfo :: (Floating prob, Ord prob, Ord a, Ord b)
=> (a -> Dist prob b) -- ^
-> Dist prob a
-> prob
mutualInfo p_ygx p_x =
let p_yx = p_ygx -|- p_x
p_y = margin fst p_yx
in expected p_yx (\(y,x) ->
log2 $ eventDivergence p_yx p_y p_x (y,x))
-- | Mutual information of two random variables given by a joint distribution
mutualInfoJ :: (Floating prob, Ord prob, Ord a, Ord b)
=> Dist prob (a,b) -- ^
-> prob
mutualInfoJ p_xy =
let p_x = margin fst p_xy
p_y = margin snd p_xy
in expected p_xy (\(x,y) -> log2 $ eventDivergence p_xy p_x p_y (x,y))
-- | Conditional mutual information I(A;B|C)
condMutualInfo :: (Floating prob, Ord prob, Ord a, Ord b, Ord c)
=> (c -> Dist prob a) -- ^
-> ((b,c) -> Dist prob a)
-> Dist prob c
-> Dist prob (b,c)
-> prob
condMutualInfo p_xgz p_xgyz p_z p_yz
= condEntropy p_xgz p_z - condEntropy p_xgyz p_yz
{-
-- | Information flow between two random variables
informationFlow :: (Floating prob, Ord prob, Ord a, Ord b)
=> (a -> Dist prob b) -- ^ Interventional distribution
-> Dist prob a
-> prob
informationFlow p_ygx p_x =
let p_yx = p_ygx -|- p_x
in expected p_yx (\(y,x) ->
log2 $ (p_ygx x ?= y)/
(expected p_x (\x' -> (p_ygx x' ?= y) )))
-- | Imposed information flow
imposedInformationFlow :: (Floating prob, Ord prob, Ord a, Ord b, Ord c)
=> ((b,c) -> Dist prob a) -- ^ Interventional distribution
-> (c -> Dist prob b) -- ^ Interventional distribution
-> Dist prob c
-> prob
imposedInformationFlow p_xgyz p_ygz p_z =
let p_yz = p_ygz -|- p_z
in expected p_yz (\(y,z) -> expected (p_xgyz (y,z))
(\x ->
log2 $ (p_xgyz (y,z) ?= x)/
(expected (p_ygz z)
(\y' -> (p_xgyz (y',z) ?= x)))))
-}
-- Alternative definitions of entropy and mutual information
-- maybe more efficient to use
-- | Summing a function over the events of a distribution,
-- weighted by the events probabilities with custom combinator
expected' :: (Fractional prob, Ord prob, Ord a)
=> ((prob,prob) -> prob) -- ^
-> Dist prob a
-> (a -> prob)
-> prob
expected' f p q =
sum' . List.map f $ List.filter ((> minVal) . snd) $
(List.map . first $ q) $ decons (normS p)
entropy' :: (Floating prob, Ord prob, Ord a)
=> Dist prob a -- ^
-> prob
entropy' p = - expected' (\(c,d) -> d * log2 d) p (const 1.0)
condEntropy' :: (Floating prob, Ord prob, Ord a, Ord b)
=> (a -> Dist prob b) -- ^
-> Dist prob a
-> prob
condEntropy' p_ygx p_x =
let p_yx = p_ygx -|- p_x
in - expected' (\(c,d) -> d * log2 (d*c)) p_yx (\(y,x) -> 1.0/(p_x ?= x) )
mutualInfo' :: (Floating prob, Ord prob, Ord a, Ord b)
=> (a -> Dist prob b)
-> Dist prob a
-> prob
mutualInfo' p_ygx p_x =
let p_yx = p_ygx -|- p_x
p_y = margin fst p_yx
in expected' (\(c,d) -> d * log2 (d*c)) p_yx (\(y,x) -> 1.0/((p_x ?= x)*(p_y ?= y)))
-- Helper function
eventDivergence :: (Fractional prob, Ord a, Ord b)
=> Dist prob (a,b)
-> Dist prob a
-> Dist prob b
-> (a,b)
-> prob
eventDivergence p_xy p_x p_y (x,y) =
(p_xy ?= (x,y)) / ((p_x ?= x)*(p_y ?= y))
|
mahrz/hit
|
src/Numeric/Information/IT.hs
|
mit
| 6,370
| 4
| 14
| 2,163
| 1,713
| 924
| 789
| 117
| 1
|
{-# LANGUAGE FlexibleInstances #-}
module Qy.Chat.Types (
emptyRoomMap
, Room(..)
, Client(..)
, getRoomChan
, ErrorMsg(..)
, IncomingMsg(..)
, ChanMessage(..)
, UserName
, RoomName
, RoomMap
)
where
import Data.Text
import Control.Concurrent.STM
import Control.Concurrent.STM.TChan
import qualified Data.Set as Set
import Data.HashMap.Strict as M
import qualified Network.WebSockets as WS
import Data.Aeson
import Data.Aeson.Types
import qualified Control.Applicative as A
import Control.Applicative ((<$>), (<*>))
import Data.Function (on)
type UserName = Text
type RoomName = Text
type RoomMap = TVar (HashMap RoomName Room)
emptyRoomMap :: IO RoomMap
emptyRoomMap = newTVarIO M.empty
data Room = Room { roomName :: RoomName
, roomClients :: TVar (Set.Set Client)
, roomChan :: TChan ChanMessage
}
getRoomChan = roomChan
data Client = Client { clientName :: Text
, clientRooms :: TVar (Set.Set Room)
, clientChan :: TChan ChanMessage
, clientRoomChans :: TVar [TChan ChanMessage]
}
instance Eq Client where
(==) = on (==) clientName
instance Ord Client where
compare = on compare clientName
instance Eq Room where
(==) = on (==) roomName
instance Ord Room where
compare = on compare roomName
data ErrorMsg = ParserError Text
| InitialConnectionError Text
| ForbiddenJoinError Text
deriving Show
instance ToJSON ErrorMsg where
toJSON (ParserError t) =
object [ "type" .= ("error" :: Text)
, "code" .= (1 :: Int)
, "payload" .= t]
toJSON (InitialConnectionError t) =
object [ "type" .= ("error" :: Text)
, "code" .= (2 :: Int)
, "payload" .= t]
toJSON (ForbiddenJoinError t) =
object [ "type" .= ("error" :: Text)
, "code" .= (3 :: Int)
, "payload" .= t]
data IncomingMsg = UserJoin { iRoomName :: !Text }
| UserLeave { iRoomName :: !Text }
| UserSendText { iRoomName :: !Text
, iMsg :: !Text }
deriving Show
instance FromJSON IncomingMsg where
parseJSON o@(Object v) = do
typ <- v .: "type" :: Parser Text
case typ of
"join" -> UserJoin <$> v .: "roomname"
"leave" -> UserLeave <$> v .: "roomname"
"msg" -> UserSendText <$>
v .: "roomname" <*>
v .: "payload"
_ -> typeMismatch "invalid msg type" o
parseJSON invalid = typeMismatch "Invalid" invalid
data ChanMessage = Broadcast !RoomName !UserName !Text
| Tell !UserName !Text
| JoinNotice !RoomName !UserName
| LeaveNotice !RoomName !UserName
| Notice !RoomName !Text
instance ToJSON ChanMessage where
toJSON (Broadcast r u m) =
object [ "type" .= ("msg" :: Text)
, "username" .= u
, "roomname" .= r
, "payload" .= m]
toJSON (JoinNotice r u) =
object [ "type" .= ("join" :: Text)
, "username" .= u
, "roomname" .= r]
toJSON (LeaveNotice r u) =
object [ "type" .= ("leave" :: Text)
, "username" .= u
, "roomname" .= r]
toJSON (Tell u m) = undefined
toJSON (Notice r m) = undefined
instance WS.WebSocketsData (Either ErrorMsg IncomingMsg) where
fromLazyByteString s =
case eitherDecode s of
Left m -> Left . ParserError $ pack m
Right i -> Right i
toLazyByteString = undefined
instance WS.WebSocketsData ChanMessage where
fromLazyByteString = undefined
toLazyByteString = encode
instance WS.WebSocketsData ErrorMsg where
fromLazyByteString = undefined
toLazyByteString = encode
|
realli/chatqy
|
src/Qy/Chat/Types.hs
|
mit
| 3,972
| 0
| 14
| 1,336
| 1,079
| 598
| 481
| 140
| 1
|
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
import Data.Foldable (for_)
import Test.Hspec (Spec, describe, it, shouldBe)
import Test.Hspec.Runner (configFastFail, defaultConfig, hspecWith)
import Triangle (rows)
main :: IO ()
main = hspecWith defaultConfig {configFastFail = True} specs
specs :: Spec
specs = describe "rows" $ for_ rowsCases rowsTest
where
rowsTest (description, n, expected) = it description assertion
where
assertion = rows n `shouldBe` expected
rowsCases = [ ("no rows" , 0, [ ])
, ("single row" , 1, [[1] ])
, ("two rows" , 2, [[1], [1, 1] ])
, ("three rows" , 3, [[1], [1, 1], [1, 2, 1] ])
, ("four rows" , 4, [[1], [1, 1], [1, 2, 1], [1, 3, 3, 1]])
, ("negative rows",-1, [ ]) ]
|
c19/Exercism-Haskell
|
pascals-triangle/test/Tests.hs
|
mit
| 978
| 0
| 10
| 394
| 319
| 197
| 122
| 17
| 1
|
import CodeGen
import HughesSaft ( saft_fluid, saft_entropy, yuwu_correlation, hughes_X, hughes_HB )
import WaterSaft ( water_saft, water_entropy, water_X, mu, water_saft_by_hand )
import IdealGas ( idealgas )
import FMT ( n, n2, n2mxx, n2x )
import SFMT ( sfmt )
import Rosenfeld ( fmt )
import WhiteBear ( whitebear, tensorwhitebear, whitebear_m2, correlation_gross, gSigmaA, gSigmaS,
gSigmaA_m2, gSigmaS_m2, gSigmaA_by_hand, gSigmaA_automagic )
import System.Environment ( getArgs )
main :: IO ()
main =
do todo <- getArgs
let gen f x = if f `elem` todo
then writeFile f x
else return ()
let nmu = "nmu" === integrate (n*mu)
gen "src/HardFluidFast.cpp" $
defineFunctional (idealgas + whitebear + nmu) ["R", "mu"] "HardFluid"
gen "src/HardRosenfeldFluidFast.cpp" $
defineFunctional (idealgas + fmt + nmu) ["R", "mu"] "HardRosenfeldFluid"
gen "src/HardSpheresNoTensor2Fast.cpp" $
defineFunctional whitebear ["R"] "HardSpheresNoTensor2"
gen "src/WhiteBearMarkIIFast.cpp" $
defineFunctional whitebear_m2 ["R"] "WhiteBearMarkII"
gen "src/TensorWhiteBearFast.cpp" $
defineFunctional tensorwhitebear ["R"] "TensorWhiteBear"
gen "src/n2DensityFast.cpp" $
defineTransformation n2 ["R"] "n2Density"
gen "src/TensorDensityXXFast.cpp" $
defineTransformation n2mxx ["R"] "TensorDensityXX"
gen "src/VectorDensityXFast.cpp" $
defineTransformation n2x ["R"] "VectorDensityX"
gen "src/gSigmaSFast.cpp" $
defineTransformation gSigmaS ["R"] "gSigmaS"
gen "src/gSigmaAFast.cpp" $
defineTransformation gSigmaA ["R"] "gSigmaA"
gen "src/gSigmaA_by_handFast.cpp" $
generateHeader gSigmaA_by_hand ["R"] "gSigmaA_by_hand"
gen "src/gSigmaA_automagicFast.cpp" $
generateHeader gSigmaA_automagic ["R"] "gSigmaA_automagic"
gen "src/gSigmaSm2Fast.cpp" $
defineTransformation gSigmaS_m2 ["R"] "gSigmaSm2"
gen "src/CorrelationGrossCorrectFast.cpp" $
defineTransformation correlation_gross ["R"] "CorrelationGrossCorrect"
gen "src/gSigmaAm2Fast.cpp" $
defineTransformation gSigmaA_m2 ["R"] "gSigmaAm2"
gen "src/YuWuCorrelationFast.cpp" $
defineTransformation yuwu_correlation ["R"] "YuWuCorrelationFast"
gen "src/SaftFluid2Fast.cpp" $
defineFunctional saft_fluid ["R", "epsilon_association", "kappa_association",
"epsilon_dispersion", "lambda_dispersion", "length_scaling",
"mu"] "SaftFluid2"
gen "src/WaterSaftFast.cpp" $
defineFunctional water_saft ["R", "epsilon_association", "kappa_association",
"epsilon_dispersion", "lambda_dispersion", "length_scaling",
"mu"] "WaterSaft"
gen "src/WaterSaft_by_handFast.cpp" $
defineFunctional water_saft_by_hand ["R", "epsilon_association", "kappa_association",
"epsilon_dispersion", "lambda_dispersion", "length_scaling",
"mu"] "WaterSaft_by_hand"
gen "src/WaterXFast.cpp" $
defineTransformation water_X ["R", "epsilon_association", "kappa_association",
"epsilon_dispersion", "lambda_dispersion", "length_scaling",
"mu"] "WaterX"
gen "src/HughesXFast.cpp" $
defineTransformation hughes_X ["R", "epsilon_association", "kappa_association",
"epsilon_dispersion", "lambda_dispersion", "length_scaling",
"mu"] "HughesX"
gen "src/HughesHBFast.cpp" $
defineTransformation hughes_HB ["R", "epsilon_association", "kappa_association",
"epsilon_dispersion", "lambda_dispersion", "length_scaling",
"mu"] "HughesHB"
gen "src/WaterEntropyFast.cpp" $
defineFunctional water_entropy ["R", "epsilon_association", "kappa_association",
"epsilon_dispersion", "lambda_dispersion", "length_scaling",
"mu"] "WaterEntropy"
gen "src/EntropySaftFluid2Fast.cpp" $
defineFunctionalNoGradient saft_entropy
["R", "epsilon_association", "kappa_association",
"epsilon_dispersion", "lambda_dispersion", "length_scaling"] "EntropySaftFluid2"
|
droundy/deft
|
src/haskell/functionals.hs
|
gpl-2.0
| 4,533
| 0
| 13
| 1,233
| 862
| 450
| 412
| 81
| 2
|
import Control.Arrow
error = (&&&) ==> (***)
|
alphaHeavy/hlint
|
tests/test-hints-fail.hs
|
gpl-2.0
| 45
| 0
| 5
| 7
| 19
| 12
| 7
| 2
| 1
|
{-# LANGUAGE DeriveDataTypeable, MultiParamTypeClasses #-}
module Rewriting.Check where
import Rewriting.TRS
import Rewriting.Overlap
import qualified Machine.Numerical.Config as C
import Autolib.Size
import Autolib.Set
import Autolib.FiniteMap
import Autolib.ToDoc
import Autolib.Reader
import Autolib.Reporter
import qualified Autolib.Reporter.Set
import Data.Typeable
data Check = Left_Linear
| Linear
| Non_Overlapping
| Constructor
| Max_Rules Int
| Max_Size Int
| Max_Symbols Int
| Max_Variables Int
deriving ( Eq, Ord, Typeable )
instance(Symbol c, Symbol v) => C.Check Check ( TRS v c ) where
check Non_Overlapping trs = do
inform $ text "Das System soll nicht-überlappend sein."
sequence_ $ do
ol <- overlaps trs
return $ reject $ toDoc ol
check Left_Linear trs = do
inform $ text "Das System soll links-linear sein."
sequence_ $ do
( k, rule ) <- zip [0..] $ regeln trs
return $ do
inform $ text "prüfe Regel" <+> toDoc rule
linear ( text "linke Seite der Regel" ) ( lhs rule )
check Linear trs = do
inform $ text "Das System soll linear sein."
sequence_ $ do
( k, rule ) <- zip [0..] $ regeln trs
return $ do
inform $ text "prüfe Regel" <+> toDoc rule
linear ( text "linke Seite der Regel" ) ( lhs rule )
linear ( text "rechte Seite der Regel" ) ( rhs rule )
let lvs = vars ( lhs rule ) ; rvs = vars ( rhs rule )
Autolib.Reporter.Set.subeq
( text "Menge der Variablen in der rechten Seite:", rvs )
( text "Menge der Variablen in der linken Seite:", lvs )
check ( Max_Rules n ) trs =
bounder ( text "Anzahl der Regeln" ) n ( length $ regeln trs )
check ( Max_Size n ) trs =
bounder ( text "Größe einer Regelseite" ) n
( maximum $ 0 : map size ( terms trs ) )
check ( Max_Symbols n ) trs =
bounder ( text "Anzahl verschiedener Symbole" ) n
( cardinality $ unionManySets $ map syms $ terms trs )
check ( Max_Variables n ) trs =
bounder ( text "Anzahl verschiedener Variablen" ) n
( cardinality $ unionManySets $ map vars $ terms trs )
check ch trs = do
reject $ vcat
[ text "Test für" <+> toDoc ch <+> text "nicht implementiert."
, text "(please file a bug report)"
]
bounder :: Doc -> Int -> Int -> Reporter ()
bounder name bound value = do
inform $ vcat
[ name
, text "erlaubt :" <+> toDoc bound
, text "vorhanden:" <+> toDoc value
]
assert ( value <= bound ) $ text "OK?"
mehrfache_variablen t = do
( v, c ) <- fmToList $ addListToFM_C (+) emptyFM $ do
p <- varpos t
let Var v = peek t p
return ( v, 1 )
guard $ c > 1
return v
linear name t = do
let vs = mehrfache_variablen t
when ( not $ null vs ) $ reject $ vcat
[ text "in diesem Term" <+> parens name
, nest 4 $ toDoc t
, text "kommen diese Variablen mehrfach vor:"
, nest 4 $ toDoc vs
]
terms trs = do rule <- regeln trs ; [ lhs rule, rhs rule ]
instance Reader Check where
atomic_readerPrec d = readerParenPrec d $ \ d -> do
((do my_reserved "Left_Linear"
return (Left_Linear))
<|>
(do my_reserved "Linear"
return (Linear))
<|>
(do my_reserved "Non_Overlapping"
return (Non_Overlapping))
<|>
(do my_reserved "Constructor"
return (Constructor))
<|>
(do guard (d < 9)
my_reserved "Max_Rules"
aa <- readerPrec 9
return (Max_Rules aa))
<|>
(do guard (d < 9)
my_reserved "Max_Size"
aa <- readerPrec 9
return (Max_Size aa))
<|>
(do guard (d < 9)
my_reserved "Max_Symbols"
aa <- readerPrec 9
return (Max_Symbols aa))
<|>
(do guard (d < 9)
my_reserved "Max_Variables"
aa <- readerPrec 9
return (Max_Variables aa)))
instance ToDoc Check where
toDocPrec d (Left_Linear) = text "Left_Linear"
toDocPrec d (Linear) = text "Linear"
toDocPrec d (Non_Overlapping) = text "Non_Overlapping"
toDocPrec d (Constructor) = text "Constructor"
toDocPrec d (Max_Rules aa) = docParen (d >= 10)
(text "Max_Rules" </> fsep [toDocPrec 10 aa])
toDocPrec d (Max_Size aa) = docParen (d >= 10)
(text "Max_Size" </> fsep [toDocPrec 10 aa])
toDocPrec d (Max_Symbols aa) = docParen (d >= 10)
(text "Max_Symbols" </> fsep [toDocPrec 10 aa])
toDocPrec d (Max_Variables aa) = docParen (d >= 10)
(text "Max_Variables" </> fsep [toDocPrec 10 aa])
|
Erdwolf/autotool-bonn
|
src/Rewriting/Check.hs
|
gpl-2.0
| 5,536
| 0
| 22
| 2,269
| 1,610
| 773
| 837
| 130
| 1
|
type RingF2 a = RingF (RingF (RingF a))
|
hmemcpy/milewski-ctfp-pdf
|
src/content/3.8/code/haskell/snippet08.hs
|
gpl-3.0
| 39
| 0
| 9
| 7
| 23
| 12
| 11
| 1
| 0
|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
module Main ( main ) where
import Data.Aeson (decode, FromJSON)
import Data.ByteString.Lazy.Char8 (ByteString, unpack)
import Data.Maybe (fromMaybe)
import Data.Text.Lazy (Text)
import Data.Text.Lazy.Encoding (encodeUtf8)
import Foreign.C.Error
import GHC.Generics (Generic)
import GHC.IO.Exception
import Network.FastCGI
import System.IO
import System.IO.Error
import System.Lock.FLock
import System.Log.Logger
import System.Log.Handler.Simple (streamHandler, GenericHandler)
import System.Log.Handler (setFormatter)
import System.Log.Formatter
import System.Posix.User
import System.Process
import Text.XHtml
import Text.Regex.PCRE ((=~))
_DEBUG_LEVEL :: Priority
--_DEBUG_LEVEL = WARNING
--_DEBUG_LEVEL = INFO
_DEBUG_LEVEL = DEBUG
data GithubResponse = GithubResponse { ref :: !Text } deriving (Show, Generic)
instance FromJSON GithubResponse
parseJsonByHand :: String -> IO String
parseJsonByHand payload = do
let (_, _, _, groups) = (payload =~ ("\\{\"ref\":\"(.*?)\"" :: String)) :: (String, String, String, [String])
case groups of
[] -> return ""
(match:_) -> return match
page :: Maybe String -> Html
page Nothing = (body << h1 << ("Thanks GitHub!!"::String)) +++ (p << ("Request: Nothing!"::String))
page (Just request) = (body << h1 << ("Thanks GitHub!!"::String)) +++ (p << (("Request: "::String) ++ request))
getRepo :: ByteString -> Maybe Text
getRepo jsonPayload = do
jsonResponse <- decode jsonPayload
return $ ref jsonResponse
repoPath :: String -> Maybe FilePath
repoPath "refs/heads/develop" = Just "/var/www/html/api/latest"
repoPath "refs/heads/v0_hotfix" = Just "/var/www/html/api/0"
repoPath "refs/heads/v3_hotfix" = Just "/var/www/html/api/3"
repoPath "refs/heads/v4_hotfix" = Just "/var/www/html/api/4"
repoPath "refs/heads/v5_hotfix" = Just "/var/www/html/api/5"
repoPath _ = Nothing
repoRemoteBranch :: String -> Maybe String
repoRemoteBranch "refs/heads/develop" = Just "origin/develop"
repoRemoteBranch "refs/heads/v0_hotfix" = Just "origin/v0_hotfix"
repoRemoteBranch "refs/heads/v3_hotfix" = Just "origin/v3_hotfix"
repoRemoteBranch "refs/heads/v4_hotfix" = Just "origin/v4_hotfix"
repoRemoteBranch "refs/heads/v5_hotfix" = Just "origin/v5_hotfix"
repoRemoteBranch _ = Nothing
repoLocalBranch :: String -> Maybe String
repoLocalBranch "refs/heads/develop" = Just "develop"
repoLocalBranch "refs/heads/v0_hotfix" = Just "v0_hotfix"
repoLocalBranch "refs/heads/v3_hotfix" = Just "v3_hotfix"
repoLocalBranch "refs/heads/v4_hotfix" = Just "v4_hotfix"
repoLocalBranch "refs/heads/v5_hotfix" = Just "v5_hotfix"
repoLocalBranch _ = Nothing
silence :: String
silence = if _DEBUG_LEVEL == DEBUG then " " else " >/dev/null "
gitCommand :: String -> String -> String -> String
gitCommand path remoteBranch localBranch =
(if _DEBUG_LEVEL == DEBUG then "set -x && " else "") ++
"cd " ++ path ++ silence ++ " && " ++
"git stash " ++ silence ++ " && " ++
"git fetch -p origin " ++ silence ++ " && " ++
"(if [ \"$(git branch | cut -f 2 -d ' ')\" != \""++ localBranch ++ "\" ] ; then git checkout " ++ localBranch ++ silence ++ " ; fi) && " ++
"git merge --ff-only " ++ remoteBranch ++ silence ++ " && " ++
"git stash pop" ++ silence
runGitCommand :: String -> String -> String -> String -> IO ()
runGitCommand mylog path remoteBranch localBranch = do
let command = gitCommand path remoteBranch localBranch
liftIO $ debugM mylog $ "Running command: " ++ command
let shellProc = CreateProcess {
cmdspec = ShellCommand command,
cwd = Nothing,
env = Nothing,
std_in = Inherit,
std_out = UseHandle stdout,
std_err = Inherit,
close_fds = False,
create_group = False }
(_, _, _, procHandle) <- createProcess shellProc
_ <- waitForProcess procHandle
return ()
updateRepo :: String -> String -> IO ()
updateRepo mylog repo = do
let path = repoPath repo
let remoteBranch = repoRemoteBranch repo
let localBranch = repoLocalBranch repo
case (path, remoteBranch, localBranch) of
(Just path', Just remoteBranch', Just localBranch') -> do
-- lock on the directory's index.php file
catchIOError (runGitWithLock path' remoteBranch' localBranch') handler
_ -> return ()
where
runGitWithLock :: String -> String -> String -> IO()
runGitWithLock path remoteBranch localBranch = do
withLock (path ++ "/index.php") Exclusive NoBlock $
runGitCommand mylog path remoteBranch localBranch
handler :: IOError -> IO ()
handler IOError{ ioe_errno = Just errorNumber } | (Errno errorNumber) == eWOULDBLOCK =
infoM mylog $ "Tried to update repo (" ++ repo ++ "), but another program has a lock on it."
handler ex = warningM mylog $ "When updating repo (" ++ repo ++ "), unhandled exception: " ++ show ex
cgiMain :: CGI CGIResult
cgiMain = do
liftIO $ hSetEncoding stderr utf8
myStreamHandler <- fmap withFormatter $ liftIO $ streamHandler stderr _DEBUG_LEVEL
let mylog = rootLoggerName
liftIO $ updateGlobalLogger mylog (setLevel _DEBUG_LEVEL)
liftIO $ updateGlobalLogger mylog (setHandlers [myStreamHandler])
uid <- liftIO getRealUserID
euid <- liftIO getEffectiveUserID
liftIO $ debugM mylog $ "Real User ID: " ++ (show uid) ++ ", Effective User ID: " ++ (show euid)
payloadString <- getInput "payload"
payloadByteString <- getInputFPS "payload"
liftIO $ debugM mylog $ "Payload String: " ++ (fromMaybe "(No payload uploaded!)" payloadString)
case payloadByteString of
Nothing -> output $ renderHtml $ page Nothing
Just payload' -> do
let repo = fmap encodeUtf8 $ getRepo payload'
case repo of
Nothing -> do
repo' <- liftIO $ parseJsonByHand $ unpack payload'
case repo' of
"" -> liftIO $ debugM mylog $ "Couldn't get repo to update!"
_ -> liftIO $ updateRepo mylog repo'
Just repo' -> do
liftIO $ debugM mylog $ "Updating repo: " ++ (unpack repo')
liftIO $ updateRepo mylog (unpack repo')
output $ renderHtml $ page payloadString
main :: IO ()
main = do
runFastCGI $ handleErrors cgiMain
withFormatter :: GenericHandler Handle -> GenericHandler Handle
withFormatter handler = setFormatter handler formatter
where formatter = simpleLogFormatter "[$time GitHub-Updater $prio] $msg"
|
cdepillabout/haskell-github-updater
|
Main.hs
|
gpl-3.0
| 6,680
| 0
| 29
| 1,496
| 1,788
| 906
| 882
| 140
| 4
|
-- License: GPL v2 or later
{-# LANGUAGE ForeignFunctionInterface #-}
module Display(doDisplay, initDisplay) where
import Graphics.UI.GLUT
import qualified Sim
import Data.Array.Unboxed
import Data.Word(Word32)
import Control.Monad(when, unless, liftM2, forM_)
import System.Random
import ArrayUtils (arraySize)
import Foreign.Marshal.Array (withArray)
import Foreign.Ptr (Ptr)
import Foreign.C.Types (CFloat(CFloat))
-- This C code is for speed. I first wrote this code before vertex buffers
-- even existed in the OpenGL standard (and before I knew about them);
-- it's quite likely that modern GHC and OpenGL (ES) 2.0 could be plenty fast
-- without using C code.
foreign import ccall unsafe "foreignPollution" foreignPollution :: Word32 -> Ptr Double -> Word32 -> Word32 -> IO ()
-- We don't use the Random instance of GLfloat (CFloat) directly
-- because it uses realToFrac which might be slow.
randomRIOGLF :: (Float, Float) -> IO GLfloat
randomRIOGLF range_ = do
result <- randomRIO range_
return (CFloat result)
initDisplay :: IO ()
initDisplay = do
ortho2D 0 1 0 1
blend $= Enabled ; blendFunc $= (SrcAlpha, OneMinusSrcAlpha)
-- Possibly the three int-like arguments should be better distinguished
-- or combined ("data TimeSituation"?) somehow. Or even made into one
-- value, simStepsSinceLastUpdate(Float), which could be used fine as a random
-- seed (which ms is currently used for), most likely (just transform it appropriately).
-- TODO.
doDisplay :: Sim.World -> Int{-milliseconds per step-}
-> Word32{-current time-} -> Word32{-last update time-} -> IO ()
doDisplay (Sim.World worldMap worldParticles worldCreatures worldPollution worldHour)
msPerStep ms lastUpdateTime = do
let simStepsSinceLastUpdate = fromIntegral (ms - lastUpdateTime) / fromIntegral msPerStep :: GLfloat
let ((minX,minY),(maxX,maxY)) = bounds worldMap
let (numX,numY) = (fromIntegral $ maxX + 1 - minX, fromIntegral $ maxY + 1 - minY)
let rotationReference = Vector3 0 0 1 :: Vector3 GLfloat
let distFromCenter = 0.5 :: GLfloat
let translatingTo :: Integral coord => (coord,coord) -> IO a -> IO a
translatingTo (x,y) io = preservingMatrix $ do
translate (Vector3 (fromIntegral x + distFromCenter) (fromIntegral y + distFromCenter) 0 :: Vector3 GLfloat)
io
preservingMatrix $ do
scale (recip numX) (recip numY :: GLfloat) 1
-- draw the floors
--timeL "flor" $
renderPrimitive Quads $ forM_ (assocs worldMap) $ \ ((x,y), _maybeMachine) -> do
let (x',y') = (fromIntegral x :: GLfloat, fromIntegral y :: GLfloat)
color (Color3 0.50 0.50 0.60 :: Color3 GLfloat); vertex (Vertex2 x' y' )
color (Color3 0.45 0.55 0.45 :: Color3 GLfloat); vertex (Vertex2 (x'+1) y' )
color (Color3 0.60 0.60 0.50 :: Color3 GLfloat); vertex (Vertex2 (x'+1) (y'+1) )
color (Color3 0.40 0.35 0.40 :: Color3 GLfloat); vertex (Vertex2 x' (y'+1) )
-- draw the creatures (what about multiple creatures in the same place?
-- I guess one just gets drawn first, then the next...)
--timeL "cret" $
forM_ worldCreatures $ \ (loc, creature) -> translatingTo loc $ do
case creature of
Sim.Water _rng -> do
color (Color4 0.4 0.5 0.9 0.6 :: Color4 GLfloat)
renderPrimitive Quads $ do
let shortDist = 0.5 :: GLfloat
let longDist = 0.5 :: GLfloat
vertex $ Vertex2 (-longDist) (-shortDist)
vertex $ Vertex2 ( longDist) (-shortDist)
vertex $ Vertex2 ( longDist) ( shortDist)
vertex $ Vertex2 (-longDist) ( shortDist)
Sim.Creature { Sim.creatureEnergy = energy } -> do
renderPrimitive Quads $ do
let legSpread = 0.35 :: GLfloat
let legHeight = -0.4 :: GLfloat
let waistSpread = 0.2 :: GLfloat
let waistHeight = 0 :: GLfloat
let armSpread = 0.3 :: GLfloat
let shoulderSpread = 0.2 :: GLfloat
let neckSpread = 0.1 :: GLfloat
let neckHeight = 0.2 :: GLfloat
let headHeight = 0.45 :: GLfloat
let headSpread = 0.2 :: GLfloat
color (Color3 0.5 0.5 (0.7 + realToFrac energy / 20) :: Color3 GLfloat)
vertex $ Vertex2 (-legSpread) (legHeight)
vertex $ Vertex2 ( legSpread) (legHeight)
vertex $ Vertex2 ( waistSpread) (waistHeight)
vertex $ Vertex2 (-waistSpread) (waistHeight)
color (Color3 0.4 0.9 0.6 :: Color3 GLfloat)
vertex $ Vertex2 (-armSpread) (waistHeight)
vertex $ Vertex2 ( armSpread) (waistHeight)
vertex $ Vertex2 ( shoulderSpread) (neckHeight)
vertex $ Vertex2 (-shoulderSpread) (neckHeight)
color (Color3 0.9 0.7 0.5 :: Color3 GLfloat)
vertex $ Vertex2 (-neckSpread) (neckHeight)
vertex $ Vertex2 ( neckSpread) (neckHeight)
vertex $ Vertex2 ( headSpread) (headHeight)
vertex $ Vertex2 (-headSpread) (headHeight)
-- draw the particles (what relation to machines? what about the dangerous particles?)
--timeL "part" $
forM_ worldParticles $ \ (loc, Sim.Particle dir pType) -> translatingTo loc $ do
rotate (Sim.dirAngle dir) rotationReference
translate (Vector3 (simStepsSinceLastUpdate) 0 0)
--hmm, could separate list by particle-type and
--encompass more with each renderPrimitive...
case pType of
Sim.Energy strength -> do
color (Color4 0.9 0.9 0.2 (log $ fromIntegral strength) :: Color4 GLfloat)
renderPrimitive Quads $ do
let shortDist = 0.15 :: GLfloat
let longDist = 0.25 :: GLfloat
vertex $ Vertex2 (-longDist) (-shortDist)
vertex $ Vertex2 ( longDist) (-shortDist)
vertex $ Vertex2 ( longDist) ( shortDist)
vertex $ Vertex2 (-longDist) ( shortDist)
-- could draw this after the machines...
Sim.Chaos _rng -> do
let
io :: IO ()
io = do
alpha1 <- randomRIOGLF (0.7,1.0)
color (Color4 0.7 0.2 0.7 alpha1 :: Color4 GLfloat)
let randPos = randomRIOGLF (-0.5, 0.5)
let randVertex = vertex =<< liftM2 Vertex2 randPos randPos
randVertex ; randVertex ; randVertex
randVal <- randomRIO (1, 10 :: Int)
unless (randVal == 1) io
renderPrimitive Triangles $ io
-- draw the machines
--timeL "mach" $
forM_ [(l,m) | (l,Just m) <- assocs worldMap] $ \ (loc,machine) -> translatingTo loc $ do
case machine of
Sim.Generator dir energy -> do
rotate (Sim.dirAngle dir) (Vector3 0 0 1 :: Vector3 GLfloat)
renderPrimitive Quads $ do
let dist = 0.3 :: GLfloat
color (Color3 0.6 0.6 0.6 :: Color3 GLfloat)
vertex $ Vertex2 (-dist) (-dist)
vertex $ Vertex2 ( dist) (-dist)
vertex $ Vertex2 ( dist) ( dist)
vertex $ Vertex2 (-dist) ( dist)
let shortDist = 0.1 :: GLfloat
let yellow = fromIntegral energy / 10
color (Color3 yellow yellow 0.0 :: Color3 GLfloat)
vertex $ Vertex2 (-shortDist) (-shortDist)
vertex $ Vertex2 ( dist) (-shortDist)
vertex $ Vertex2 ( dist) ( shortDist)
vertex $ Vertex2 (-shortDist) ( shortDist)
Sim.Mirror mdir lSilvered rSilvered -> do
rotate (case mdir of { Sim.NW_SE -> 0; Sim.SW_NE -> -90 } :: GLfloat) rotationReference
let
dist = 0.25 :: GLfloat
silverDepth = 0.07 -- orthogonal, in both directions
renderPrimitive Quads $ do
color (Color3 0.9 0.9 0.9 :: Color3 GLfloat)
vertex $ Vertex2 (-dist) (distFromCenter)
vertex $ Vertex2 (distFromCenter) (-dist)
vertex $ Vertex2 (dist) (-distFromCenter)
vertex $ Vertex2 (-distFromCenter) (dist)
color (Color3 0.7 0.7 0.8 :: Color3 GLfloat)
when lSilvered $ do
vertex $ Vertex2 (-(distFromCenter-silverDepth)) (dist+silverDepth)
vertex $ Vertex2 (dist+silverDepth) (-(distFromCenter-silverDepth))
vertex $ Vertex2 (dist) (-distFromCenter)
vertex $ Vertex2 (-distFromCenter) (dist)
when rSilvered $ do
vertex $ Vertex2 (-dist) (distFromCenter)
vertex $ Vertex2 (distFromCenter) (-dist)
vertex $ Vertex2 (distFromCenter-silverDepth) (-(dist+silverDepth))
vertex $ Vertex2 (-(dist+silverDepth)) (distFromCenter-silverDepth)
Sim.Greenery -> do
renderPrimitive Polygon $ do
color (Color3 0.2 0.9 0.3 :: Color3 GLfloat)
vertex $ Vertex2 (-0.3) (0.2 :: GLfloat)
vertex $ Vertex2 (0) (distFromCenter)
vertex $ Vertex2 (0.3) (0.2 :: GLfloat)
vertex $ Vertex2 (0.5) (-0.3 :: GLfloat)
vertex $ Vertex2 (0.3) (-0.4 :: GLfloat)
vertex $ Vertex2 (0.1) (-0.35 :: GLfloat)
vertex $ Vertex2 (-0.1) (-0.35 :: GLfloat)
vertex $ Vertex2 (-0.3) (-0.4 :: GLfloat)
vertex $ Vertex2 (-0.5) (-0.3 :: GLfloat)
renderPrimitive Triangles $ do
color (Color3 0.6 0.4 0.3 :: Color3 GLfloat)
let trunkWidth = 0.1 :: GLfloat
let trunkY = 0.25 :: GLfloat
vertex $ Vertex2 (0) (trunkY)
vertex $ Vertex2 (trunkWidth) (-distFromCenter)
vertex $ Vertex2 (-trunkWidth) (-distFromCenter)
let branchTipY = -0.1 :: GLfloat
let branchTipX = 0.25 :: GLfloat
let branchStartHigh = -0.3 :: GLfloat
let branchStartLow = -0.4 :: GLfloat
vertex $ Vertex2 (0) (branchStartHigh)
vertex $ Vertex2 (branchTipX) (branchTipY)
vertex $ Vertex2 (0) (branchStartLow)
vertex $ Vertex2 (0) (branchStartHigh)
vertex $ Vertex2 (-branchTipX) (branchTipY)
vertex $ Vertex2 (0) (branchStartLow)
Sim.Storm energy _rng -> do
let
io :: Double -> Int -> IO ()
io amount side = do
alpha1 <- randomRIOGLF (0.7,1.0)
color (Color4 0.4 0.3 0.7 alpha1 :: Color4 GLfloat)
let randPos a b = randomRIOGLF (a, b)
let randVertex = [
vertex =<< liftM2 Vertex2 (randPos 0 0.5) (randPos (-0.5) 0.5),
vertex =<< liftM2 Vertex2 (randPos (-0.5) 0) (randPos (-0.5) 0.5),
vertex =<< liftM2 Vertex2 (randPos (-0.5) 0.5) (randPos 0 0.5),
vertex =<< liftM2 Vertex2 (randPos (-0.5) 0.5) (randPos (-0.5) 0)]
!! side
randVertex ; randVertex ; randVertex
unless (amount < 0) ( io (amount - 1) ( (side + 1) `mod` 4 ) )
renderPrimitive Triangles $ io ((energy+2) * 4) 0
Sim.Mountain -> do
-- How about shadows i.e. "The sun rises in the east"?
renderPrimitive Triangles $ do
let width = 0.3 :: GLfloat
let height = 0.4 :: GLfloat
let offsetX = 0.15 :: GLfloat
let offsetY = 0.35 :: GLfloat
color (Color3 0.45 0.4 0.35 :: Color3 GLfloat)
vertex $ Vertex2 ((-width)-offsetX) (offsetY-0.1)
vertex $ Vertex2 (-offsetX) (height+offsetY-0.1)
vertex $ Vertex2 (width-offsetX) (offsetY-0.1)
color (Color3 0.25 0.2 0.15 :: Color3 GLfloat)
vertex $ Vertex2 ((-width)+offsetX) (offsetY)
vertex $ Vertex2 (offsetX) (height+offsetY)
vertex $ Vertex2 (width+offsetX) (offsetY)
color (Color3 0.35 0.4 0.45 :: Color3 GLfloat)
vertex $ Vertex2 (-width) (0)
vertex $ Vertex2 (0) (height)
vertex $ Vertex2 (width) (0)
color (Color3 0.2 0.2 0.2 :: Color3 GLfloat)
vertex $ Vertex2 ((-width)-offsetX) (-offsetY)
vertex $ Vertex2 (-offsetX) (height-offsetY)
vertex $ Vertex2 (width-offsetX) (-offsetY)
color (Color3 0.3 0.4 0.3 :: Color3 GLfloat)
vertex $ Vertex2 ((-width)+offsetX) ((-offsetY)+0.05)
vertex $ Vertex2 (offsetX) ((height-offsetY)+0.05)
vertex $ Vertex2 (width+offsetX) ((-offsetY)+0.05)
Sim.Riverbed{} -> error "Unimplemented yet"
-- draw the pollution!
-- (the 'resolution' should depend on windowsize / (i.e.?) number of places displayed)
-- currently just 5x5 per place though
-- should it be invisible where really low?
do
let (width, height) = arraySize worldPollution
-- marshalling takes about 1 ms by last measurement
withArray (elems worldPollution) (\cArr -> foreignPollution ms cArr (fromIntegral width) (fromIntegral height))
-- draw the night-time! (er.) (HACK!!!)
do
let dayFraction = case worldHour of Sim.WorldHour h -> (realToFrac h + simStepsSinceLastUpdate) / realToFrac Sim.dayLength
let dayLight = if dayFraction >= 0.5 then 0 else sin (dayFraction * pi * 2)
let nightMasking = (1 - dayLight) / 2 /10--since it doesn't do anything
color (Color4 0.1 0.1 0.3 nightMasking :: Color4 GLfloat)
renderPrimitive Quads $ do
vertex $ Vertex2 0 (0::GLfloat)
vertex $ Vertex2 numX (0::GLfloat)
vertex $ Vertex2 numX numY
vertex $ Vertex2 0 numY
swapBuffers
reportErrors
|
idupree/Pollutocracy
|
Display.hs
|
gpl-3.0
| 12,241
| 20
| 34
| 2,759
| 4,435
| 2,216
| 2,219
| 232
| 10
|
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
module Chap03.Data.RedBlackTree10 where
import qualified Chap03.Exercise10 as Ex10
import Chap02.Data.Set
import Chap03.Data.RedBlackTree
import Control.Applicative (liftA2, pure)
import Data.Foldable
import Prelude hiding (foldr)
import Test.QuickCheck (Arbitrary(..), sized)
newtype RedBlackTree10 a = C3E10 {unC3E10 :: RedBlackTree a}
instance Show a => Show (RedBlackTree10 a) where
show (C3E10 t) = show t
instance Foldable RedBlackTree10 where
foldr f z (C3E10 h) = foldr f z h
instance Ord a => Set RedBlackTree10 a where
empty = C3E10 empty
member x (C3E10 t) = member x t
insert x (C3E10 t) = C3E10 $ Ex10.insert x t
instance (Arbitrary a, Ord a) => Arbitrary (RedBlackTree10 a) where
arbitrary = sized arb
where
arb 0 = pure empty
arb n = liftA2 insert arbitrary $ arb (n - 1)
|
stappit/okasaki-pfds
|
src/Chap03/Data/RedBlackTree10.hs
|
gpl-3.0
| 917
| 0
| 11
| 188
| 311
| 167
| 144
| 23
| 0
|
{-# LANGUAGE ExistentialQuantification #-}
import Data.Aeson (ToJSON (toJSON), encode)
import Data.ByteString.Lazy (toStrict)
import Data.Text.Buildable (Buildable (build))
import Data.Text.Encoding (decodeUtf8)
import qualified Data.Text.IO as TIO
import Serokell.Util (show')
import qualified RSCoin.Core as C
import qualified RSCoin.Explorer.WebTypes as W
data V =
forall v. (Buildable v, Show v, ToJSON v) => V v
instance Buildable W.ServerError where
build _ = "build is not defined"
instance Buildable W.ControlMsg where
build _ = "build is not defined"
instance Buildable W.AddressInfoMsg where
build _ = "build is not defined"
instance Buildable W.OutcomingMsg where
build _ = "build is not defined"
main :: IO ()
main = do
let coin = C.Coin 0 0.3242342
key = C.testBankPublicKey
hash = C.unsafeHash ()
addr = C.Address key
tx = C.Transaction [(hash, 0, coin)] [(addr, coin)]
err = W.ParseError "github" "error"
introMsg = W.CMSetAddress addr
aiMsg = W.AIGetTransactions (0, 2)
outMsg = W.OMTxNumber addr 8 (10, 10)
values =
[ V coin
, V key
, V hash
, V addr
, V tx
, V err
, V introMsg
, V aiMsg
, V outMsg]
printAsNeeded (V v) = do
TIO.putStrLn "Show"
print v
TIO.putStrLn "Buildable"
TIO.putStrLn $ show' v
TIO.putStrLn "ToJSON"
print $ toJSON v
TIO.putStrLn "encode"
TIO.putStrLn . decodeUtf8 . toStrict . encode . toJSON $ v
TIO.putStrLn "___"
mapM_ printAsNeeded values
|
input-output-hk/rscoin-haskell
|
src/JsonDemo/Main.hs
|
gpl-3.0
| 1,834
| 0
| 17
| 665
| 524
| 276
| 248
| 51
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.CodeDeploy.StopDeployment
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Attempts to stop an ongoing deployment.
--
-- <http://docs.aws.amazon.com/codedeploy/latest/APIReference/API_StopDeployment.html>
module Network.AWS.CodeDeploy.StopDeployment
(
-- * Request
StopDeployment
-- ** Request constructor
, stopDeployment
-- ** Request lenses
, sdDeploymentId
-- * Response
, StopDeploymentResponse
-- ** Response constructor
, stopDeploymentResponse
-- ** Response lenses
, sdrStatus
, sdrStatusMessage
) where
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.CodeDeploy.Types
import qualified GHC.Exts
newtype StopDeployment = StopDeployment
{ _sdDeploymentId :: Text
} deriving (Eq, Ord, Read, Show, Monoid, IsString)
-- | 'StopDeployment' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'sdDeploymentId' @::@ 'Text'
--
stopDeployment :: Text -- ^ 'sdDeploymentId'
-> StopDeployment
stopDeployment p1 = StopDeployment
{ _sdDeploymentId = p1
}
-- | The unique ID of a deployment.
sdDeploymentId :: Lens' StopDeployment Text
sdDeploymentId = lens _sdDeploymentId (\s a -> s { _sdDeploymentId = a })
data StopDeploymentResponse = StopDeploymentResponse
{ _sdrStatus :: Maybe StopStatus
, _sdrStatusMessage :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'StopDeploymentResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'sdrStatus' @::@ 'Maybe' 'StopStatus'
--
-- * 'sdrStatusMessage' @::@ 'Maybe' 'Text'
--
stopDeploymentResponse :: StopDeploymentResponse
stopDeploymentResponse = StopDeploymentResponse
{ _sdrStatus = Nothing
, _sdrStatusMessage = Nothing
}
-- | The status of the stop deployment operation:
--
-- Pending: The stop operation is pending. Succeeded: The stop operation
-- succeeded.
sdrStatus :: Lens' StopDeploymentResponse (Maybe StopStatus)
sdrStatus = lens _sdrStatus (\s a -> s { _sdrStatus = a })
-- | An accompanying status message.
sdrStatusMessage :: Lens' StopDeploymentResponse (Maybe Text)
sdrStatusMessage = lens _sdrStatusMessage (\s a -> s { _sdrStatusMessage = a })
instance ToPath StopDeployment where
toPath = const "/"
instance ToQuery StopDeployment where
toQuery = const mempty
instance ToHeaders StopDeployment
instance ToJSON StopDeployment where
toJSON StopDeployment{..} = object
[ "deploymentId" .= _sdDeploymentId
]
instance AWSRequest StopDeployment where
type Sv StopDeployment = CodeDeploy
type Rs StopDeployment = StopDeploymentResponse
request = post "StopDeployment"
response = jsonResponse
instance FromJSON StopDeploymentResponse where
parseJSON = withObject "StopDeploymentResponse" $ \o -> StopDeploymentResponse
<$> o .:? "status"
<*> o .:? "statusMessage"
|
dysinger/amazonka
|
amazonka-codedeploy/gen/Network/AWS/CodeDeploy/StopDeployment.hs
|
mpl-2.0
| 3,898
| 0
| 11
| 839
| 516
| 311
| 205
| 61
| 1
|
{-# LANGUAGE TemplateHaskell, DataKinds #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Model.URL
( URI
, validHDL
, hdlURL
, parseURL
-- for testing
, urlLink
) where
import Control.Monad (guard)
import Data.Aeson (ToJSON(..))
import Data.Char (isDigit)
import Data.Maybe (fromMaybe, isNothing)
import Database.PostgreSQL.Typed.Types (PGParameter(..), PGColumn(..))
import Language.Haskell.TH.Lift (deriveLiftMany)
import Network.URI
import qualified Text.Blaze as H
-- | Prepare a URI value for using in a query or storing in a table
toPG :: URI -> String
toPG u = uriToString id u ""
-- | Extract a URI value, after it has been retrieved using a query
fromPG :: String -> URI
fromPG u = fromMaybe (error $ "pgDecode URI: " ++ u) $ parseURI u
-- | From URI value into value to be provided to database
instance PGParameter "text" URI where
pgEncode t = pgEncode t . toPG
pgEncodeValue e t = pgEncodeValue e t . toPG
pgLiteral t = pgLiteral t . toPG
-- | From database value to URI value
instance PGColumn "text" URI where
pgDecode t = fromPG . pgDecode t
pgDecodeValue e t = fromPG . pgDecodeValue e t
-- | Format a URL value for inclusion in a JSON object
instance ToJSON URI where
toJSON = toJSON . show
-- | Format a URI for display in a server side generated html page
instance H.ToValue URI where
toValue = H.stringValue . show . urlLink
preEscapedToValue = H.preEscapedStringValue . show . urlLink
-- | A valid HDL handle consists of digits with periods interleaved, ending with a slash (following by anything).
-- See handle.net for more information. What is generating and using HDL urls?
validHDL :: String -> Bool
validHDL = v0 (0 :: Int) where
v0 n (c:s) | isDigit c = v1 n s
v0 _ _ = False
v1 n ('/':_) = n > 0
v1 n ('.':s) = v0 (succ n) s
v1 n s = v0 n s
-- | Build an HDL url from a DOI
hdlURL :: String -> URI
hdlURL doi = URI "hdl:" Nothing doi "" ""
-- | Start from either a shorthand DOI value or a doi/hdl scheme or doi domain, and
-- expand out to canonical HDL based URI. For all other http/https URLs, pass value through
parseURL :: String -> Maybe URI
parseURL d@('1':'0':'.':c:_) | isDigit c = parseURL $ "doi:" ++ d
parseURL s = do
u <- parseURI s
if uriScheme u `elem` ["doi:","hdl:"] && isNothing (uriAuthority u) ||
uriScheme u == "http:"
&& (uriAuthority u == Just (URIAuth "" "dx.doi.org" "") || uriAuthority u == Just (URIAuth "" "doi.org" ""))
then do
let p = dropWhile ('/' ==) $ uriPath u
guard $ validHDL p
return u
{ uriScheme = "hdl:"
, uriAuthority = Nothing
, uriPath = p
}
else do
guard $ uriScheme u `elem` ["http:","https:"]
return u
-- | Utility for building a URI value from a domain and path
httpAuth :: String -> URI -> URI
httpAuth a u = u{ uriScheme = "http:", uriAuthority = Just (URIAuth "" a ""), uriPath = '/':uriPath u }
-- | Expand special doi and hdl scheme URIs to equivalent http scheme URIs.
-- Allow http URIs to pass through
urlLink :: URI -> URI
urlLink u@URI{ uriScheme = "hdl:" } = httpAuth "hdl.handle.net" u
urlLink u@URI{ uriScheme = "doi:" } = httpAuth "doi.org" u
urlLink u = u
deriveLiftMany [''URIAuth, ''URI]
|
databrary/databrary
|
src/Model/URL.hs
|
agpl-3.0
| 3,220
| 0
| 15
| 701
| 926
| 491
| 435
| -1
| -1
|
-- Copyright 2020 Google LLC
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
{-# LANGUAGE OverloadedStrings #-}
-- | Defines conversion between Cabal package names and
-- Google3 package names.
module Google.Google3.Tools.Cabal2Build.PackageName (
google3packageNameForCabalPackageName,
google3packageNameForCabalPackageId,
cabalVersionForSegment) where
import qualified Data.Text as Text
import qualified Distribution.Package as Cabal
import qualified Distribution.Text as Cabal
import qualified Distribution.Version as Cabal
import qualified Google.Google3.Name as Google3
import Text.Read (readMaybe)
-- | Converts an unversioned Cabal package name to a valid Google3 package
-- name. Dashes become underscores.
--
-- e.g. "utf8-string" becomes "//third_party/haskell/utf8_string"
google3packageNameForCabalPackageName :: Cabal.PackageName
-> Google3.PackageName
google3packageNameForCabalPackageName name =
Google3.PackageName [
"third_party",
"haskell",
Text.pack $ map mungeChar (Cabal.display name)
]
where mungeChar '-' = '_'
mungeChar x = x
-- | Converts a versioned Cabal package name to a valid Google3 package
-- name.
-- e.g. "utf8-string-0.3.6" becomes "//third_party/haskell/utf8_string/v0_3_6"
google3packageNameForCabalPackageId :: Cabal.PackageIdentifier
-> Google3.PackageName
google3packageNameForCabalPackageId (Cabal.PackageIdentifier name version)
= Google3.subpackageName unversionedPackageName [versionSegment]
where unversionedPackageName = google3packageNameForCabalPackageName name
versionSegment =
Text.cons 'v' $
Text.intercalate "_" $
map (Text.pack . show) branch
branch = Cabal.versionNumbers version
-- | Converts a Google3 version directory name into a Cabal version.
-- For example, "v0_3_6" becomes 0.3.6.
cabalVersionForSegment :: Text.Text -> Maybe Cabal.Version
cabalVersionForSegment v = do
('v', v') <- Text.uncons v
vs <- mapM (readMaybe . Text.unpack)
$ Text.splitOn "_" v'
return $ Cabal.mkVersion vs
|
google/cabal2bazel
|
src/Google/Google3/Tools/Cabal2Build/PackageName.hs
|
apache-2.0
| 2,668
| 0
| 12
| 517
| 350
| 202
| 148
| 36
| 2
|
import System.Random
import Data.Char as C
import Data.List as L
main :: IO ()
main = undefined
type Weight = Double
-- data Network a b = Input (a -> Weight)
-- | Neuron Weight [(Network a b, Weight)]
-- | Output (Weight -> b) (Network a b)
data Neuron = Neuron Weight [(Neuron, Weight)] deriving (Show)
data Network a b = Network [a -> Weight] [Neuron] ([Weight] -> b)
-- fun1 = average . map (fromIntegral . length) . words
-- fun2 = (\str -> foldl (\acc c -> acc + (fromIntegral (ord c) / 122)) 0 str)
-- fun3 = (\str -> (foldl (\acc c -> acc + if c `elem` "aoeui" then 1 else 0) 0 str) / fromIntegral (length str) )
-- neuronB1 = Neuron 0.5 [(neuronA1, 0.5), (neuronA2, 0.5), (neuronA3, 0.5)]
-- neuronB2 = Neuron 0.5 [(neuronA1, 0.5), (neuronA2, 0.5), (neuronA3, 0.5)]
-- neuronB3 = Neuron 0.5 [(neuronA1, 0.5), (neuronA2, 0.5), (neuronA3, 0.5)]
-- neuronB4 = Neuron 0.5 [(neuronA1, 0.5), (neuronA2, 0.5), (neuronA3, 0.5)]
-- neuronC1 = Neuron 0.5 [(neuronB1, 0.5), (neuronB2, 0.5), (neuronB3, 0.5), (neuronB4, 0.5)]
-- neuronC2 = Neuron 0.5 [(neuronB1, 0.5), (neuronB2, 0.5), (neuronB3, 0.5), (neuronB4, 0.5)]
-- Maybe inline this?
sigmoid :: Floating a => a -> a
sigmoid t = 1 / (1 + exp (-t))
diomgis t = log((-t)/(t - 1))
-- length xs > 0
average :: (Fractional a, Foldable t) => t a -> a
average xs = sum xs / (fromIntegral (length xs))
update :: Neuron -> Weight -> Weight
update (Neuron bias neurons) a = sigmoid $ weightedSum / (fromIntegral (length neurons))
where
weightedSum = foldl (\acc (neuron, weight) -> acc + update neuron a * weight) (-bias) neurons
for :: [a] -> (a -> b) -> [b]
for = flip map
-- runNetwork :: Network a b -> a -> b
-- runNetwork (Network inputReaders neurons outputWriter ) input = outputWriter $
-- for inputReaders (\inputReader ->
-- for neurons (\neuron ->
-- update neuron (inputReader input)
-- )
-- )
-- trainNeuron :: Weight -> a -> Network a b -> Network a b
-- trainNeuron expected input (Neuron bias neurons) = (Neuron bias newNeurons)
-- where
-- newNeurons = map (\(network, weight) -> let result = update network input in (network, newWeight weight expected result)) neurons
-- newWeight w e r = w * (0.5 + diomgis (abs (e - r)))
-- trainNeuron _ _ n = n
-- train :: a -> b -> (b -> b -> Weight) -> Network a b -> Network a b
-- train input expected scoring network = undefined
-- where result = runNetwork network input
-- score = scoring result expected
|
niilohlin/neural
|
neural2.hs
|
apache-2.0
| 2,545
| 0
| 12
| 579
| 394
| 229
| 165
| 18
| 1
|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
-- Some basic structures about nodes in a graph, etc.
module Spark.Core.StructuresInternal(
NodeName(..),
NodePath(..),
NodeId(..),
FieldName(..),
FieldPath(..),
ComputationID(..),
catNodePath,
fieldName,
unsafeFieldName,
emptyFieldPath,
nullFieldPath,
headFieldPath,
fieldPath,
prettyNodePath,
) where
import qualified Data.Text as T
import Data.ByteString(ByteString)
import GHC.Generics (Generic)
import Data.Hashable(Hashable)
import Data.List(intercalate)
import qualified Data.Aeson as A
import Data.String(IsString(..))
import Data.Vector(Vector)
import qualified Data.Vector as V
import Spark.Core.Internal.Utilities
-- | The name of a node (without path information)
newtype NodeName = NodeName { unNodeName :: T.Text } deriving (Eq, Ord)
-- | The user-defined path of the node in the hierarchical representation of the graph.
newtype NodePath = NodePath { unNodePath :: Vector NodeName } deriving (Eq, Ord)
-- | The unique ID of a node. It is based on the parents of the node
-- and all the relevant intrinsic values of the node.
newtype NodeId = NodeId { unNodeId :: ByteString } deriving (Eq, Ord, Generic)
-- | The name of a field in a sql structure
-- This structure ensures that proper escaping happens if required.
-- TODO: prevent the constructor from being used, it should be checked first.
newtype FieldName = FieldName { unFieldName :: T.Text } deriving (Eq)
-- | A path to a nested field an a sql structure.
-- This structure ensures that proper escaping happens if required.
newtype FieldPath = FieldPath { unFieldPath :: Vector FieldName } deriving (Eq)
{-| A unique identifier for a computation (a batch of nodes sent for execution
to Spark).
-}
data ComputationID = ComputationID {
unComputationID :: !T.Text
} deriving (Eq, Show, Generic)
-- | A safe constructor for field names that fixes all the issues relevant to
-- SQL escaping
-- TODO: proper implementation
fieldName :: T.Text -> Either String FieldName
fieldName = Right . FieldName
-- | Constructs the field name, but will fail if the content is not correct.
unsafeFieldName :: (HasCallStack) => T.Text -> FieldName
unsafeFieldName = forceRight . fieldName
-- | A safe constructor for field names that fixes all the issues relevant to SQL escaping
-- TODO: proper implementation
fieldPath :: T.Text -> Either String FieldPath
fieldPath x = Right . FieldPath . V.singleton $ FieldName x
emptyFieldPath :: FieldPath
emptyFieldPath = FieldPath V.empty
nullFieldPath :: FieldPath -> Bool
nullFieldPath = V.null . unFieldPath
headFieldPath :: FieldPath -> Maybe FieldName
headFieldPath (FieldPath v) | V.null v = Nothing
headFieldPath (FieldPath v) = Just $ V.head v
-- | The concatenated path. This is the inverse function of fieldPath.
-- | TODO: this one should be hidden?
catNodePath :: NodePath -> T.Text
catNodePath (NodePath np) =
T.intercalate "/" (unNodeName <$> V.toList np)
prettyNodePath :: NodePath -> T.Text
-- Only a single slash, double slashes are reserved for the case
-- of global paths (including session and computation)
prettyNodePath np = "/" <> catNodePath np
instance Show NodeId where
show (NodeId bs) = let s = show bs in
if length s > 9 then
(drop 1 . take 6) s ++ ".."
else
s
instance Show NodeName where
show (NodeName nn) = T.unpack nn
instance Show NodePath where
show np = T.unpack $ T.concat ["NPath(", catNodePath np, ")" ]
instance Show FieldPath where
show (FieldPath l) =
intercalate "." (show <$> V.toList l)
instance Show FieldName where
-- TODO(kps) escape the '.' characters in the field name
show (FieldName fn) = T.unpack fn
instance Hashable NodeId
instance IsString FieldName where
fromString = FieldName . T.pack
instance A.ToJSON NodeName where
toJSON = A.toJSON . unNodeName
instance A.FromJSON NodeName where
-- TODO: more parse checks
parseJSON x = NodeName <$> A.parseJSON x
instance A.ToJSON NodePath where
toJSON = A.toJSON . unNodePath
instance A.FromJSON NodePath where
parseJSON x = NodePath <$> A.parseJSON x
instance A.ToJSON FieldName where
toJSON = A.toJSON . unFieldName
instance A.ToJSON FieldPath where
toJSON = A.toJSON . unFieldPath
instance Ord FieldName where
compare f1 f2 = compare (unFieldName f1) (unFieldName f2)
instance A.ToJSON ComputationID where
toJSON = A.toJSON . unComputationID
|
krapsh/kraps-haskell
|
src/Spark/Core/StructuresInternal.hs
|
apache-2.0
| 4,463
| 0
| 13
| 784
| 1,037
| 576
| 461
| 89
| 1
|
distribution_Candies :: Int -> Int -> [Int]
distribution_Candies c n =
let result = [0 | _ <- [1 .. n]]
in inner_dis 1 c (splitAt 0 result)
where
inner_dis :: Int -> Int -> ([Int], [Int]) -> [Int]
inner_dis count can (h, []) = inner_dis count can ([], h)
inner_dis count can (h, (x:xs))
| count >= can = h ++ [x + can] ++ xs
| otherwise = inner_dis (count + 1) (can - count) ((h ++ [x + count]), xs)
main = do
print $ distribution_Candies 7 4
print $ distribution_Candies 10 3
|
ccqpein/Arithmetic-Exercises
|
Distribute-Candies-to-People/DCtP.hs
|
apache-2.0
| 514
| 0
| 13
| 133
| 268
| 141
| 127
| 12
| 2
|
-- |
-- Module : Crypto.PubKey.RSA.Prim
-- License : BSD-style
-- Maintainer : Vincent Hanquez <vincent@snarc.org>
-- Stability : experimental
-- Portability : Good
--
module Crypto.PubKey.RSA.Prim
(
-- * decrypt primitive
dp
-- * encrypt primitive
, ep
) where
import Data.ByteString (ByteString)
import Crypto.PubKey.RSA.Types (Blinder(..))
import Crypto.Types.PubKey.RSA
import Crypto.Number.ModArithmetic (expFast, expSafe)
import Crypto.Number.Serialize (os2ip, i2ospOf_)
{- dpSlow computes the decrypted message not using any precomputed cache value.
only n and d need to valid. -}
dpSlow :: PrivateKey -> ByteString -> ByteString
dpSlow pk c = i2ospOf_ (private_size pk) $ expSafe (os2ip c) (private_d pk) (private_n pk)
{- dpFast computes the decrypted message more efficiently if the
precomputed private values are available. mod p and mod q are faster
to compute than mod pq -}
dpFast :: Blinder -> PrivateKey -> ByteString -> ByteString
dpFast (Blinder r rm1) pk c =
i2ospOf_ (private_size pk) (multiplication rm1 (m2 + h * (private_q pk)) (private_n pk))
where
re = expFast r (public_e $ private_pub pk) (private_n pk)
iC = multiplication re (os2ip c) (private_n pk)
m1 = expSafe iC (private_dP pk) (private_p pk)
m2 = expSafe iC (private_dQ pk) (private_q pk)
h = ((private_qinv pk) * (m1 - m2)) `mod` (private_p pk)
dpFastNoBlinder :: PrivateKey -> ByteString -> ByteString
dpFastNoBlinder pk c = i2ospOf_ (private_size pk) (m2 + h * (private_q pk))
where iC = os2ip c
m1 = expSafe iC (private_dP pk) (private_p pk)
m2 = expSafe iC (private_dQ pk) (private_q pk)
h = ((private_qinv pk) * (m1 - m2)) `mod` (private_p pk)
-- | Compute the RSA decrypt primitive.
-- if the p and q numbers are available, then dpFast is used
-- otherwise, we use dpSlow which only need d and n.
dp :: Maybe Blinder -> PrivateKey -> ByteString -> ByteString
dp blinder pk
| private_p pk /= 0 && private_q pk /= 0 = maybe dpFastNoBlinder dpFast blinder $ pk
| otherwise = dpSlow pk
-- | Compute the RSA encrypt primitive
ep :: PublicKey -> ByteString -> ByteString
ep pk m = i2ospOf_ (public_size pk) $ expFast (os2ip m) (public_e pk) (public_n pk)
-- | multiply 2 integers in Zm only performing the modulo operation if necessary
multiplication :: Integer -> Integer -> Integer -> Integer
multiplication a b m = (a * b) `mod` m
|
vincenthz/hs-crypto-pubkey
|
Crypto/PubKey/RSA/Prim.hs
|
bsd-2-clause
| 2,503
| 0
| 11
| 566
| 700
| 374
| 326
| 33
| 1
|
module Prettyprinter.Render.String (
renderString,
renderShowS,
) where
import Prettyprinter.Internal (SimpleDocStream, renderShowS)
-- | Render a 'SimpleDocStream' to a 'String'.
renderString :: SimpleDocStream ann -> String
renderString s = renderShowS s ""
|
quchen/prettyprinter
|
prettyprinter/src/Prettyprinter/Render/String.hs
|
bsd-2-clause
| 270
| 0
| 6
| 41
| 55
| 32
| 23
| 6
| 1
|
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
module Language.Haskell.Liquid.Bare.RefToLogic (
Transformable
, txRefToLogic
) where
import Language.Haskell.Liquid.Types
import Language.Haskell.Liquid.Misc (mapSnd)
import Language.Haskell.Liquid.Bare.Env
import Language.Fixpoint.Types hiding (Def, R)
import Language.Fixpoint.Misc (errorstar, traceShow)
import Language.Fixpoint.Types.Names
import Language.Haskell.Liquid.GHC.Misc (dropModuleUnique)
import qualified Data.HashMap.Strict as M
import Control.Applicative ((<$>))
txRefToLogic :: (Transformable r) => LogicMap -> InlnEnv -> r -> r
txRefToLogic = tx'
class Transformable a where
tx :: Symbol -> Either LMap TInline -> a -> a
tx' :: LogicMap -> InlnEnv -> a -> a
tx' lmap imap x = M.foldrWithKey tx x limap
where
limap = M.fromList ((mapSnd Left <$> (M.toList $ logic_map lmap)) ++ (mapSnd Right <$> M.toList imap))
instance (Transformable a) => (Transformable [a]) where
tx s m xs = tx s m <$> xs
instance Transformable DataConP where
tx s m x = x { tyConsts = tx s m (tyConsts x)
, tyArgs = mapSnd (tx s m) <$> tyArgs x
, tyRes = tx s m (tyRes x)
}
instance Transformable TInline where
tx s m (TI xs e) = TI xs (tx s m e)
instance (Transformable r) => Transformable (RType c v r) where
tx s m = fmap (tx s m)
instance Transformable RReft where
tx s m = fmap (tx s m)
instance Transformable Reft where
tx s m (Reft (v, p)) = if v == s
then errorstar "Transformable: this should not happen"
else Reft(v, tx s m p)
-- OLD instance Transformable Refa where
-- OLD tx s m (RConc p) = RConc $ tx s m p
-- OLD tx _ _ (RKvar x sub) = RKvar x sub
instance (Transformable a, Transformable b) => Transformable (Either a b) where
tx s m (Left x) = Left (tx s m x)
tx s m (Right x) = Right (tx s m x)
instance Transformable Pred where
tx _ _ PTrue = PTrue
tx _ _ PFalse = PFalse
tx _ _ PTop = PTop
tx s m (PAnd ps) = PAnd (tx s m <$> ps)
tx s m (POr ps) = POr (tx s m <$> ps)
tx s m (PNot p) = PNot (tx s m p)
tx s m (PImp p1 p2) = PImp (tx s m p1) (tx s m p2)
tx s m (PIff p1 p2) = PIff (tx s m p1) (tx s m p2)
tx s m (PBexp (EApp f es)) = txPApp (s, m) f (tx s m <$> es)
tx s m (PBexp e) = PBexp (tx s m e)
tx s m (PAtom r e1 e2) = PAtom r (tx s m e1) (tx s m e2)
tx s m (PAll xss p) = PAll xss $ txQuant xss s m p
tx _ _ (PExist _ _) = error "tx: PExist is for fixpoint internals only"
-- tx s m (PExist xss p) = PExist xss $ txQuant xss s m p
tx _ _ p@(PKVar _ _) = p
txQuant xss s m p
| s `elem` (fst <$> xss) = errorstar "Transformable.tx on Pred: this should not happen"
| otherwise = tx s m p
instance Transformable Expr where
tx s m (EVar s')
| cmpSymbol s s' = mexpr s' m
| otherwise = EVar s'
tx s m (EApp f es) = txEApp (s, m) f (tx s m <$> es)
tx _ _ (ESym c) = ESym c
tx _ _ (ECon c) = ECon c
--tx _ _ (ELit l s') = ELit l s'
tx s m (ENeg e) = ENeg (tx s m e)
tx s m (EBin o e1 e2) = EBin o (tx s m e1) (tx s m e2)
tx s m (EIte p e1 e2) = EIte (tx s m p) (tx s m e1) (tx s m e2)
tx s m (ECst e s') = ECst (tx s m e) s'
tx _ _ EBot = EBot
instance Transformable (Measure t c) where
tx s m x = x{eqns = tx s m <$> (eqns x)}
instance Transformable (Def t c) where
tx s m x = x{body = tx s m (body x)}
instance Transformable Body where
tx s m (E e) = E $ tx s m e
tx s m (P p) = P $ tx s m p
tx s m (R v p) = R v $ tx s m p
mexpr _ (Left (LMap _ [] e)) = e
mexpr s (Left (LMap _ _ _)) = EVar s
mexpr _ (Right (TI _ (Right e))) = e
mexpr s s' = errorstar ("mexpr on " ++ show s ++ "\t" ++ show s')
txEApp (s, (Left (LMap _ xs e))) f es
| cmpSymbol s (val f)
= subst (mkSubst $ zip xs es) $ dropArgs (length xs - length es) e
| otherwise
= EApp f es
txEApp (s, (Right (TI xs (Right e)))) f es
| cmpSymbol s (val f)
= subst (mkSubst $ zip xs es) e
| otherwise
= EApp f es
txEApp (s, (Right (TI _ (Left _)))) f es
| cmpSymbol s (val f)
= errorstar "txEApp: deep internal error"
| otherwise
= EApp f es
-- HACK for currying, but it only works on runFun things
-- TODO: make it work for any curried function
dropArgs 0 e = e
dropArgs n (EApp _ [e,_]) = dropArgs (n-1) e
dropArgs n e = error $ "dropArgs on " ++ show (n, e)
txPApp (s, (Right (TI xs (Left e)))) f es
| cmpSymbol s (val f)
= subst (mkSubst $ zip xs es) e
| otherwise
= PBexp $ EApp f es
txPApp (s, m) f es = PBexp $ txEApp (s, m) f es
cmpSymbol s1 {- symbol in Core -} s2 {- logical Symbol-}
= dropModuleNamesAndUnique s1 == dropModuleNamesAndUnique s2
dropModuleNamesAndUnique = dropModuleUnique {- . dropModuleNames -}
|
abakst/liquidhaskell
|
src/Language/Haskell/Liquid/Bare/RefToLogic.hs
|
bsd-3-clause
| 4,939
| 1
| 16
| 1,452
| 2,316
| 1,168
| 1,148
| 109
| 1
|
{-# LANGUAGE OverloadedStrings, EmptyDataDecls, FlexibleContexts, GADTs, GeneralizedNewtypeDeriving, MultiParamTypeClasses, QuasiQuotes, TemplateHaskell, TypeFamilies #-}
module Main where
import Protolude
import Network.HTTP.Client (newManager, defaultManagerSettings)
import qualified Data.Text as T
import Database.Persist.Sqlite (withSqliteConn)
import Database.Persist.Sql (SqlBackend)
import Control.Monad.Logger (runNoLoggingT, NoLoggingT(..))
import Control.Monad.Trans.Resource (runResourceT)
import Control.Monad.Trans.Reader (ReaderT(..))
import qualified Api
import qualified Args
import qualified Response
import qualified Store
main :: IO ()
main = do
command <- Args.execParser Args.commandParser
case command of
Args.Fetch fetchOptions -> fetchRoutes fetchOptions
Args.Check checkOptions -> checkRoutes checkOptions
withDB :: Text -> (SqlBackend -> IO a) -> IO a
withDB store action = runNoLoggingT $ withSqliteConn store $ NoLoggingT <$> action
fetchRoutes :: Args.FetchOptions -> IO ()
fetchRoutes options = do
let host = Args.fetchOptionsHost options
port = Args.fetchOptionsPort options
store = Args.fetchOptionsStore options
putStrLn $ "Endpoint: " <> host <> ":" <> (show port) <> "\n"
<> "Store: " <> store
manager <- newManager defaultManagerSettings
withDB store $ runReaderT (void $ Store.runMigrationSilent Store.migrateAll)
--
-- query <- runExceptT $ Api.runRoute manager host port
--
-- case query of
-- Left err -> putStrLn $ "Error: " <> Api.explainError err
-- Right resp -> case Response.responseRoutes resp of
-- Nothing -> putStrLn $ "Service: " <> Response.responseCode resp
-- Just routes -> do
-- putStrLn $ "Success: " <> Response.responseCode resp
-- mapM_ checkRoute routes
-- mapM_ storeRoute routes
-- where
-- storeRoute route = flip Store.runSqlPool pool $ do
-- Store.insert $ (Store.toStorableRoute route :: Store.Route)
-- checkRoute route = putStrLn $ "Duration: " <> (T.pack . show . Response.routeDuration) route <> "s, "
-- <> "Distance: " <> (T.pack . show . Response.routeDistance) route <> "m"
--
checkRoutes :: Args.CheckOptions -> IO ()
checkRoutes options = do
let verbose = Args.checkOptionsVerbose options
store = Args.checkOptionsStore options
putStrLn $ "Verbose: " <> (show verbose) <> "\n"
<> "Store: " <> store
|
daniel-j-h/hosrm-qa
|
src/Main.hs
|
bsd-3-clause
| 2,514
| 0
| 13
| 539
| 455
| 248
| 207
| 37
| 2
|
import GitYrp
import Test.QuickCheck
import Text.Printf
import System.Exit
succOrFail :: Result -> IO ()
succOrFail (Success _ _ _) = return ()
succOrFail x = print x >> exitFailure >> return ()
testOne :: Testable p => (String, p) -> IO Result
testOne (s, prop) = printf "%-25s: " s >> quickCheckResult prop
main :: IO ()
main = mapM testOne tests >>= mapM_ succOrFail
prop_getCoverLetterFilename1 =
getCoverLetterFilename [] == Nothing
prop_getCoverLetterFilename2 =
getCoverLetterFilename ["foo", x] == Just x
where
x = "0000-cover-letter.patch"
prop_getCoverLetterFilename3 =
getCoverLetterFilename ["foo", x] == Just x
where
x = "v2-0000-cover-letter.patch"
prop_cutOffBlurb =
cutOffBlurb "foo\nbar\n*** BLURB HERE ***\nqux\nquuux" == "foo\nbar\n"
tests =
[ ("xy1", prop_getCoverLetterFilename1)
, ("xy2", prop_getCoverLetterFilename2)
, ("xy3", prop_getCoverLetterFilename3)
, ("cutOffBlurb", prop_cutOffBlurb)
]
|
yaccz/git-yrp
|
tests/unit/main.hs
|
bsd-3-clause
| 977
| 0
| 7
| 177
| 281
| 149
| 132
| 26
| 1
|
module Main where
import qualified Nlp1
import qualified Nlp2
import qualified Nlp3
import qualified Nlp4
import System.Environment
import Control.Monad
q = join [Nlp1.answers, Nlp2.answers, Nlp3.answers, Nlp4.answers]
main :: IO ()
main = getArgs >>= (q !!) . read . head
|
Iruyan-Zak/Nlp100
|
app/Main.hs
|
bsd-3-clause
| 278
| 0
| 8
| 45
| 89
| 54
| 35
| 10
| 1
|
module Math.IRT.Model.Rasch
( RaschModel (..)
) where
import Statistics.Distribution
import Math.IRT.Internal.Distribution
import Math.IRT.Internal.LogLikelihood
import Math.IRT.Model.FourPLM ( FourPLM(..) )
import Math.IRT.Model.Generic
data RaschModel = RaschModel { difficulty :: !Double
} deriving (Show)
instance Distribution RaschModel where
cumulative = cumulative . toFourPLM
instance ContDistr RaschModel where
density = density . toFourPLM
quantile _ = error "This shouldn't be needed"
instance DensityDeriv RaschModel where
densityDeriv = densityDeriv . toFourPLM
instance GenericModel RaschModel where
fromRasch = RaschModel
fromOnePLM = RaschModel
fromTwoPLM _ b = RaschModel b
fromThreePLM _ b _ = RaschModel b
fromFourPLM _ b _ _ = RaschModel b
instance LogLikelihood RaschModel where
logLikelihood b = logLikelihood b . toFourPLM
toFourPLM :: RaschModel -> FourPLM
toFourPLM (RaschModel sb) = FourPLM 1.0 sb 0.0 1.0
|
argiopetech/irt
|
Math/IRT/Model/Rasch.hs
|
bsd-3-clause
| 1,043
| 0
| 9
| 233
| 262
| 144
| 118
| 28
| 1
|
module Network.Wai.Session.VCache.SimpleSpec (spec) where
import Network.Wai.Session.VCache.Simple
import Test.Tasty
import Test.Tasty.QuickCheck as QC
import Test.QuickCheck
import Test.QuickCheck.Instances
spec :: TestTree
spec = testGroup "Network.Wai.Session.VCache.Simple"
[ QC.testProperty "`someFunction` should pass"
someFunction
]
someFunction :: Bool -> Property
someFunction x = not (not $ x) === x
|
athanclark/wai-session-alt
|
test/Network/Wai/Session/VCache/SimpleSpec.hs
|
bsd-3-clause
| 425
| 0
| 8
| 59
| 102
| 61
| 41
| 12
| 1
|
{-# LANGUAGE OverloadedStrings, FlexibleInstances, TypeSynonymInstances #-}
--------------------------------------------------------------------
-- |
-- Module : Text.Feed.Util
-- Copyright : (c) Galois, Inc. 2008
-- License : BSD3
--
-- Maintainer: Sigbjorn Finne <sof@galois.com>
-- Stability : provisional
-- Portability:
--
--------------------------------------------------------------------
module Text.Feed.Util where
import Text.Feed.Types
import System.Time
import System.Locale
import Data.Default
import Data.List
import Data.Maybe
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.XML.Types as XMLTypes
import Text.XML
instance Default Element where def = Element def [] []
instance Default Name where def = Name "" Nothing Nothing
type Attr = (Name,Text)
-- | 'toFeedDate' translates a calendar time into
-- the format expected by the feed kind.
toFeedDateText :: FeedKind -> ClockTime -> {-Date-}String
toFeedDateText fk ct =
case fk of
AtomKind{} -> formatCalendarTime defaultTimeLocale "%Y-%m-%dT%H:%M:%SZ" (toUTCTime ct)
RSSKind{} -> formatCalendarTime defaultTimeLocale "%a, %d %b %Y %H:%M:%S GMT" (toUTCTime ct)
RDFKind{} -> formatCalendarTime defaultTimeLocale "%Y-%m-%dT%H:%M:%SZ" (toUTCTime ct)
-- xml helpers like those in the old xml and feed packages
-- | Get the text value of an XML element. This function
-- ignores non-text elements, and concatenates all text elements.
strContent :: Element -> Text
strContent e = T.concat $ catMaybes $ map nodeText $ elementNodes e --concatMap nodeText $ onlyText $ elementNodes e
nodeText :: Node -> Maybe Text
nodeText (NodeContent t) = Just t
nodeText (NodeElement e) = Just $ T.concat $ catMaybes $ map nodeText $ elementNodes e
nodeText _ = Nothing
-- | Select only the elements from a list of XML content.
onlyElems :: [Node] -> [Element]
onlyElems xs = [ x | NodeElement x <- xs ]
-- -- | Select only the elements from a parent.
-- elChildren :: Element -> [Element]
-- elChildren e = [ x | Elem x <- elContent e ]
-- -- | Select only the text from a list of XML content.
-- onlyText :: [Node] -> [CData]
-- onlyText xs = [ x | Text x <- xs ]
-- | Find all immediate children with the given name.
findChildren :: Name -> Element -> [Element]
findChildren q e = filterChildren ((q ==) . elementName) e
-- | Filter all immediate children wrt a given predicate.
filterChildren :: (Element -> Bool) -> Element -> [Element]
filterChildren p e = filter p (onlyElems (elementNodes e))
-- -- | Filter all immediate children wrt a given predicate over their names.
-- filterChildrenName :: (Name -> Bool) -> Element -> [Element]
-- filterChildrenName p e = filter (p.elName) (onlyElems (elContent e))
-- | Find an immediate child with the given name.
findChild :: Name -> Element -> Maybe Element
findChild q e = listToMaybe (findChildren q e)
-- -- | Find an immediate child with the given name.
-- filterChild :: (Element -> Bool) -> Element -> Maybe Element
-- filterChild p e = listToMaybe (filterChildren p e)
-- -- | Find an immediate child with name matching a predicate.
-- filterChildName :: (Name -> Bool) -> Element -> Maybe Element
-- filterChildName p e = listToMaybe (filterChildrenName p e)
-- | Find the left-most occurrence of an element matching given name.
findElement :: Name -> Element -> Maybe Element
findElement q e = listToMaybe (findElements q e)
-- -- | Filter the left-most occurrence of an element wrt. given predicate.
-- filterElement :: (Element -> Bool) -> Element -> Maybe Element
-- filterElement p e = listToMaybe (filterElements p e)
-- -- | Filter the left-most occurrence of an element wrt. given predicate.
-- filterElementName :: (Name -> Bool) -> Element -> Maybe Element
-- filterElementName p e = listToMaybe (filterElementsName p e)
-- | Find all non-nested occurances of an element.
-- (i.e., once we have found an element, we do not search
-- for more occurances among the element's children).
findElements :: Name -> Element -> [Element]
findElements qn e = filterElementsName (qn==) e
-- | Find all non-nested occurrences of an element wrt. given predicate.
-- (i.e., once we have found an element, we do not search
-- for more occurances among the element's children).
filterElements :: (Element -> Bool) -> Element -> [Element]
filterElements p e
| p e = [e]
| otherwise = concatMap (filterElements p) $ onlyElems $ elementNodes e
-- | Find all non-nested occurences of an element wrt a predicate over element names.
-- (i.e., once we have found an element, we do not search
-- for more occurances among the element's children).
filterElementsName :: (Name -> Bool) -> Element -> [Element]
filterElementsName p e = filterElements (p.elementName) e
-- | Lookup the value of an attribute.
findAttr :: Name -> Element -> Maybe Text
findAttr x e = lookupAttr x (elementAttributes e)
-- | Lookup attribute name from list.
lookupAttr :: Name -> [Attr] -> Maybe Text
lookupAttr x = lookupAttrBy (x ==)
-- | Lookup the first attribute whose name satisfies the given predicate.
lookupAttrBy :: (Name -> Bool) -> [Attr] -> Maybe Text
lookupAttrBy p as = snd `fmap` find (p . fst) as
-- -- | Lookup the value of the first attribute whose name
-- -- satisfies the given predicate.
-- findAttrBy :: (Name -> Bool) -> Element -> Maybe String
-- findAttrBy p e = lookupAttrBy p (elAttribs e)
-- Text.XML.Light
-- | Create an unqualified name.
unqual :: Text -> Name
unqual x = Name{nameLocalName=x, nameNamespace=Nothing, namePrefix=Nothing}
mkelement :: Text -> [Attr] -> [Node] -> Element
mkelement name attrs nodes = Element {elementName=unqual name,
elementAttributes=attrs,
elementNodes=nodes}
-- | A smart element constructor which uses the type of its argument
-- to determine what sort of element to make.
class Elementable t where
toElement :: Text -> t -> Element
instance Elementable Text where
toElement n t = mkelement n [] [NodeContent t]
instance Elementable [Element] where
toElement n es = mkelement n [] $ map NodeElement es
instance Elementable [Attr] where
toElement n as = mkelement n as []
instance Elementable (Attr,Text) where
toElement n (a,t) = mkelement n [a] [NodeContent t]
-- Data.XML.Types
elementChildren :: Element -> [Element]
elementChildren e = concatMap ((either (const []) (:[])) . fromXMLElement) $
XMLTypes.elementChildren $ toXMLElement e
-- ?
showElement :: Element -> Text
showElement = T.concat . XMLTypes.elementText . toXMLElement
|
haskell-pkg-janitors/feed
|
Text/Feed/Util.hs
|
bsd-3-clause
| 6,811
| 0
| 14
| 1,433
| 1,235
| 680
| 555
| 72
| 3
|
module Day8 where
import Data.List
import Debug.Trace
import Text.Parsec
import Data.Bifunctor
import qualified Data.Matrix as Mat
import qualified Data.Vector as Vec
bOff = '.'
bOn = '#'
totalRows = 6
totalCols = 50
type Screen = Mat.Matrix Char
type ScreenFn = Screen -> Screen
empty = Mat.matrix totalRows totalCols (const bOff)
mkScreen :: [String] -> Screen
mkScreen = foldl (flip apply) empty
countPixels :: Screen -> Int
countPixels = length . filter (== bOn) . Mat.toList
apply :: String -> ScreenFn
apply = unwrap . parse instrParser "(unknown)"
where
unwrap = either (const $ const empty) id
instrParser = try rectParser <|> rotateParser
shift :: Int -> [a] -> [a]
shift n = reverse . shift' . reverse
where shift' l = drop n l ++ take n l
-- rect nxm
turnOnRect :: Int -> Int -> ScreenFn
turnOnRect w h s = foldl setM s pts
where
setM = flip $ Mat.setElem bOn
pts = [(x, y) | x <- [1..h], y <- [1..w]]
rectParser :: Parsec String st ScreenFn
rectParser = do
string "rect"
spaces
width <- numberParser
char 'x'
height <- numberParser
return $ turnOnRect width height
-- rotate column x=1 by 1
rotateCol :: Int -> Int -> ScreenFn
rotateCol col n s = setCol shifted (col + 1) s
where shifted = shift n $ Vec.toList $ Mat.getCol (col + 1) s
rotateRow row n s = setRow shifted (row + 1) s
where shifted = shift n $ Vec.toList $ Mat.getRow (row + 1) s
setRow :: [a] -> Int -> Mat.Matrix a -> Mat.Matrix a
setRow row = Mat.mapRow (\c _ -> row !! (c - 1))
setCol col = Mat.mapCol (\c _ -> col !! (c - 1))
rotateParser :: Parsec String st ScreenFn
rotateParser = do
string "rotate"
spaces
fn <- rotateFnParser
spaces
oneOf "xy"
char '='
tgt <- numberParser
spaces
string "by"
spaces
n <- numberParser
return $ fn tgt n
rotateFnParser :: Parsec String st (Int -> Int -> ScreenFn)
rotateFnParser = chooseFn <$> rowOrCol
where
rowOrCol = try (string "column") <|> string "row"
chooseFn "column" = rotateCol
chooseFn "row" = rotateRow
numberParser :: Parsec String st Int
numberParser = read <$> many1 digit
|
amirci/aoc2016-hs
|
src/Day8.hs
|
bsd-3-clause
| 2,104
| 0
| 10
| 464
| 836
| 427
| 409
| 66
| 2
|
{-# LANGUAGE TemplateHaskell, DeriveDataTypeable, FlexibleInstances #-}
-- TODO: The DRY principle is violated in the instance declarations of
-- 'UniqueDeclaration'. Fix by writing an abstraction.
--
-- TODO: The readability of the TH code here would be enhanced by writing in a
-- comment something similar to an example of the output of Template Haskell.
-- Add this.
--
-- TODO: The understandability of this library could be improved by writing
-- something similar to an example of the output of Template Haskell in the
-- 8documentation8. Add this. (SEE NEXT TODO before removing this TODO.)
--
-- TODO: ^^ Replacing the @8@s with @*@ (asterisks) in the above TODO breaks
-- haddock -_-. It fails with this:
--
-- @
-- % cabal haddock
-- Running Haddock for global-0.1.0.1...
-- Preprocessing library global-0.1.0.1...
-- Haddock coverage:
--
-- src/Data/Global.hs:13:1: parse error on input ‘module’
-- @
--
-- At *least* file a bug report, please.
module Data.Global
( UniqueDeclaration(..)
, UDEmpty(..)
, UN
, un
, UT
, ut
, Cnt
, UV
, monomorphic
, QSemQuantity
, translateExtsToTH'
, utl
, ud
, uninitialized
) where
import Control.Applicative
import Control.Concurrent.Chan
import Control.Concurrent.FairRWLock as FairRWLock
import Control.Concurrent.MSampleVar as MSampleVar
import Control.Concurrent.MSem as MSem
import Control.Concurrent.MSemN as MSemN
import Control.Concurrent.MVar
import Control.Concurrent.QSem
import Control.Concurrent.QSemN
import Control.Concurrent.STM.TVar
import Control.Concurrent.STM.TMVar
import Control.Concurrent.STM.TChan
import Data.Data
import Data.IORef
import Data.Tagged
import Debug.Trace.LocationTH
import qualified Language.Haskell.Exts.Syntax as Exts (Exp)
import Language.Haskell.Exts.QQ
import Fork.Bairyn.Language.Haskell.SyntaxTrees.ExtsToTH
import Language.Haskell.TH
import Language.Haskell.TH.Quote
import System.IO.Unsafe (unsafePerformIO)
import Text.Printf
-- | Types that can be uniquely declared on the top level.
--
-- Like 'Monad', this type class itself is not "magical". Its instances,
-- however, may be primitive, at least conceptually, much like 'IO's 'Monad'
-- instance.
--
-- Individual instances may be accompanied with certain caveats. Each
-- individual instance should include in its documentation what these are.
-- These caveats may affect surrounding code, perhaps in ways detrimental to
-- the program's performance or efficiency; users should thus consider
-- isolating "global" declarations in their own @.Global@ module; this is not
-- necessarily necessary for every instance. See the documentation of the
-- particular instance to see how the declarations should be declared.
--
-- The type should be monomorphic, or concrete
-- enough, to be type safe, so that the references
-- cannot be treated as multiple concreet types
-- (writing @[Integer]@ to a reference that has the
-- type @IORef [a]@ and subsequently reading @[Char]@
-- can cause the program to core dump). Ensuring
-- this safety is the responsibility of the
-- implementer of the instances of this type class;
-- other users of this library who do not extend this
-- class's functionality generally do not need to be
-- concerned whether the program will run correctly
-- at run-time, since the mistakes, which can violate
-- type safety, will be caught at compile-time and
-- the code will not build (this is, however, not
-- intrinsically guaranteed, much like the monad
-- laws: they are expected to be followed). It is
-- worth reiterating that instances of this class
-- need to be sure to not allow code with such
-- erroneous types to compile. For more information
-- about type safety, see the documentation of
-- 'unsafePerformIO'.
--
-- Example:
--
-- @
-- un \"lives\" =:: ([| 3 |], ut [t| Integer |] :: UT TVar)
-- @
--
-- @lives@ would then refer to the 'TVar' and would initially contain the value @3@.
class UniqueDeclaration u where
-- | Declare uniquely.
(=::) ::
UN u -- ^ Name of reference
-> (UV, UT u) -- ^ Initial value, accompanied with the internal type
-- and tagged with the unique constructor so that the
-- correct instance can be unambiguously determined.
--
-- An initial value may not make sense in some
-- contexts; implementations of instances may choose
-- to ignore this value, as well as the internal type
-- and unique constructor. Implementations should
-- document how this parameter is used.
-> Q [Dec] -- ^ Top-level declarations for the unique declaration.
--
-- At least a definition for the name and a type signature should be provided.
-- | Declaring unique 'IORef's; for thread-safe handling of mutable data, see 'TVar'.
--
-- The initial value is used so that the reference refers initially to that value.
--
-- These preconditions apply to GHC 7.0.4 and base-0.4.3.1 and likely similar versions and implementations as well.
--
-- In its low-level implementation, this instance uses 'unsafePerformIO';
-- thus, the same preconditions apply to this instance, particularly those
-- regarding top-level declarations (referential transparency cannot be
-- violated here). As of base-4.3.1.0, these conditions, that the user needs
-- to be aware of, are the following:
-- * Compile the declarations with the compiler flag -fno-cse. This
-- prevents multiple references from being substituted to refer to the
-- same data. This flag thus does not affect the semantics of the
-- program, but may potentially adversely affect its performance; thus,
-- isolating in a @.Global@ module may be advisable in some cases. This
-- condition is not strictly necessary when only one declaration is made
-- in a module, since the compiler cannot substitute multiple references
-- to refer to same data.
--
-- If your code behaves differently when optimizations are enabled,
-- ensure that this flag is indeed being used when the declarations are being compiled.
-- Setting or passing this flag is NOT handled automatically by this
-- implementation; it is the responsibility of users of this
-- implementation to ensure that such appropriate behaviour is set when
-- necessary.
--
-- This can be accomplished by placing the line @{-# OPTIONS_GHC -fno-cse #-}@ in
-- the file in which the declarations are declared, before the "module"
-- line.
instance UniqueDeclaration IORef where
(Tagged name) =:: (uvq, Tagged typq) = do
uv <- uvq
typ <- typq
$(assert [| monomorphic typ |]) . return $
[ SigD name $ AppT (ConT ''IORef) typ
, PragmaD (InlineP name NoInline FunLike AllPhases)
, ValD (VarP name) (NormalB $ AppE (VarE 'unsafeUDeclInternal) $ AppE (VarE 'newIORef) uv) []
]
-- | Declaring unique 'MVar's; see also 'TMVar'; caveats are the same as those of 'IORef's.
--
-- The initial value is used so that the reference refers initially to that value.
--
-- These preconditions apply to GHC 7.0.4 and base-0.4.3.1 and likely similar versions and implementations as well.
--
-- In its low-level implementation, this instance uses 'unsafePerformIO';
-- thus, the same caveats apply to this instance, particularly those
-- regarding top-level declarations (referential transparency cannot be
-- violated here). As of base-4.3.1.0, these conditions, that the user needs
-- to be aware of, are the following:
-- * Compile the declarations with the compiler flag -fno-cse. This
-- prevents multiple references from being substituted to refer to the
-- same data. This flag thus does not affect the semantics of the
-- program, but may potentially adversely affect its performance; thus,
-- isolating in a @.Global@ module may be advisable in some cases. This
-- condition is not strictly necessary when only one declaration is made
-- in a module, since the compiler cannot substitute multiple references
-- to refer to same data.
--
-- If your code behaves differently when optimizations are enabled,
-- ensure that this flag is indeed being used when the declarations are being compiled.
-- Setting or passing this flag is NOT handled automatically by this
-- implementation; it is the responsibility of users of this
-- implementation to ensure that such appropriate behaviour is set when
-- necessary.
--
-- This can be accomplished by placing the line @{-# OPTIONS_GHC -fno-cse #-}@ in
-- the file in which the declarations are declared, before the "module"
-- line.
instance UniqueDeclaration MVar where
(Tagged name) =:: (uvq, Tagged typq) = do
uv <- uvq
typ <- typq
$(assert [| monomorphic typ |]) . return $
[ SigD name $ AppT (ConT ''MVar) typ
, PragmaD (InlineP name NoInline FunLike AllPhases)
, ValD (VarP name) (NormalB $ AppE (VarE 'unsafeUDeclInternal) $ AppE (VarE 'newMVar) uv) []
]
-- | Declaring unique 'MVar's that are initially empty; see also 'TMVar'.
--
-- The initial value is ignored.
--
-- These preconditions apply to GHC 7.0.4 and base-0.4.3.1 and likely similar versions and implementations as well.
--
-- In its low-level implementation, this instance uses 'unsafePerformIO';
-- thus, the same caveats apply to this instance, particularly those
-- regarding top-level declarations (referential transparency cannot be
-- violated here). As of base-4.3.1.0, these conditions, that the user needs
-- to be aware of, are the following:
-- * Compile the declarations with the compiler flag -fno-cse. This
-- prevents multiple references from being substituted to refer to the
-- same data. This flag thus does not affect the semantics of the
-- program, but may potentially adversely affect its performance; thus,
-- isolating in a @.Global@ module may be advisable in some cases. This
-- condition is not strictly necessary when only one declaration is made
-- in a module, since the compiler cannot substitute multiple references
-- to refer to same data.
--
-- If your code behaves differently when optimizations are enabled,
-- ensure that this flag is indeed being used when the declarations are being compiled.
-- Setting or passing this flag is NOT handled automatically by this
-- implementation; it is the responsibility of users of this
-- implementation to ensure that such appropriate behaviour is set when
-- necessary.
--
-- This can be accomplished by placing the line @{-# OPTIONS_GHC -fno-cse #-}@ in
-- the file in which the declarations are declared, before the "module"
-- line.
instance UniqueDeclaration (UDEmpty MVar) where
(Tagged name) =:: (_, Tagged typq) = do
typ <- typq
$(assert [| monomorphic typ |]) . return $
[ SigD name $ AppT (ConT ''MVar) typ
, PragmaD (InlineP name NoInline FunLike AllPhases)
, ValD (VarP name) (NormalB $ AppE (VarE 'unsafeUDeclInternal) $ VarE 'newEmptyMVar) []
]
-- | Declaring unique 'Chan's that are initially empty; for thread-safe atomic accessing of channels, see 'TChan'; caveats are the same as those of 'MVar's that are initially empty.
--
-- The initial value is ignored.
--
-- These preconditions apply to GHC 7.0.4 and base-0.4.3.1 and likely similar versions and implementations as well.
--
-- In its low-level implementation, this instance uses 'unsafePerformIO';
-- thus, the same caveats apply to this instance, particularly those
-- regarding top-level declarations (referential transparency cannot be
-- violated here). As of base-4.3.1.0, these conditions, that the user needs
-- to be aware of, are the following:
-- * Compile the declarations with the compiler flag -fno-cse. This
-- prevents multiple references from being substituted to refer to the
-- same data. This flag thus does not affect the semantics of the
-- program, but may potentially adversely affect its performance; thus,
-- isolating in a @.Global@ module may be advisable in some cases. This
-- condition is not strictly necessary when only one declaration is made
-- in a module, since the compiler cannot substitute multiple references
-- to refer to same data.
--
-- If your code behaves differently when optimizations are enabled,
-- ensure that this flag is indeed being used when the declarations are being compiled.
-- Setting or passing this flag is NOT handled automatically by this
-- implementation; it is the responsibility of users of this
-- implementation to ensure that such appropriate behaviour is set when
-- necessary.
--
-- This can be accomplished by placing the line @{-# OPTIONS_GHC -fno-cse #-}@ in
-- the file in which the declarations are declared, before the "module"
-- line.
instance UniqueDeclaration (UDEmpty Chan) where
(Tagged name) =:: (_, Tagged typq) = do
typ <- typq
$(assert [| monomorphic typ |]) . return $
[ SigD name $ AppT (ConT ''Chan) typ
, PragmaD (InlineP name NoInline FunLike AllPhases)
, ValD (VarP name) (NormalB $ AppE (VarE 'unsafeUDeclInternal) $ VarE 'newChan) []
]
-- | Declaring unique 'QSem's.
--
-- The initial value; which is, in this case, determines the initial quantity
-- of the semaphore; is passed to 'newQSem'; the types thus must match. The
-- internal type is ignored.
--
-- NB: When multiple units of a resource are needed simultaneously, consider using 'QSemN's to avoid deadlocks.
--
-- These preconditions apply to GHC 7.0.4 and base-0.4.3.1 and likely similar versions and implementations as well.
--
-- In its low-level implementation, this instance uses 'unsafePerformIO';
-- thus, the same caveats apply to this instance, particularly those
-- regarding top-level declarations (referential transparency cannot be
-- violated here). As of base-4.3.1.0, these conditions, that the user needs
-- to be aware of, are the following:
-- * Compile the declarations with the compiler flag -fno-cse. This
-- prevents multiple references from being substituted to refer to the
-- same data. This flag thus does not affect the semantics of the
-- program, but may potentially adversely affect its performance; thus,
-- isolating in a @.Global@ module may be advisable in some cases. This
-- condition is not strictly necessary when only one declaration is made
-- in a module, since the compiler cannot substitute multiple references
-- to refer to same data.
--
-- If your code behaves differently when optimizations are enabled,
-- ensure that this flag is indeed being used when the declarations are being compiled.
-- Setting or passing this flag is NOT handled automatically by this
-- implementation; it is the responsibility of users of this
-- implementation to ensure that such appropriate behaviour is set when
-- necessary.
--
-- This can be accomplished by placing the line @{-# OPTIONS_GHC -fno-cse #-}@ in
-- the file in which the declarations are declared, before the "module"
-- line.
instance UniqueDeclaration (Const QSem) where
(Tagged name) =:: (uvq, _) = do
uv <- uvq
return $
[ SigD name $ ConT ''QSem
, PragmaD (InlineP name NoInline FunLike AllPhases)
, ValD (VarP name) (NormalB $ AppE (VarE 'unsafeUDeclInternal) $ AppE (VarE 'newQSem) uv) []
]
-- | Declaring unique 'QSemN's.
--
-- The initial value; which is, in this case, determines the initial quantity
-- of the semaphore; is passed to 'newQSemN'; the types thus must match. The
-- internal type is ignored.
--
-- NB: When multiple units of a resource are needed simultaneously, consider using 'QSemN's to avoid deadlocks.
--
-- These preconditions apply to GHC 7.0.4 and base-0.4.3.1 and likely similar versions and implementations as well.
--
-- In its low-level implementation, this instance uses 'unsafePerformIO';
-- thus, the same caveats apply to this instance, particularly those
-- regarding top-level declarations (referential transparency cannot be
-- violated here). As of base-4.3.1.0, these conditions, that the user needs
-- to be aware of, are the following:
-- * Compile the declarations with the compiler flag -fno-cse. This
-- prevents multiple references from being substituted to refer to the
-- same data. This flag thus does not affect the semantics of the
-- program, but may potentially adversely affect its performance; thus,
-- isolating in a @.Global@ module may be advisable in some cases. This
-- condition is not strictly necessary when only one declaration is made
-- in a module, since the compiler cannot substitute multiple references
-- to refer to same data.
--
-- If your code behaves differently when optimizations are enabled,
-- ensure that this flag is indeed being used when the declarations are being compiled.
-- Setting or passing this flag is NOT handled automatically by this
-- implementation; it is the responsibility of users of this
-- implementation to ensure that such appropriate behaviour is set when
-- necessary.
--
-- This can be accomplished by placing the line @{-# OPTIONS_GHC -fno-cse #-}@ in
-- the file in which the declarations are declared, before the "module"
-- line.
instance UniqueDeclaration (Const QSemN) where
(Tagged name) =:: (uvq, _) = do
uv <- uvq
return $
[ SigD name $ ConT ''QSemN
, PragmaD (InlineP name NoInline FunLike AllPhases)
, ValD (VarP name) (NormalB $ AppE (VarE 'unsafeUDeclInternal) $ AppE (VarE 'newQSemN) uv) []
]
-- | Declaring unique 'RWLock's.
--
-- The initial value and the internal type are ignored.
--
-- NB: When multiple units of a resource are needed simultaneously, consider using 'QSemN's to avoid deadlocks.
--
-- These preconditions apply to GHC 7.0.4 and base-0.4.3.1 and likely similar versions and implementations as well.
--
-- In its low-level implementation, this instance uses 'unsafePerformIO';
-- thus, the same caveats apply to this instance, particularly those
-- regarding top-level declarations (referential transparency cannot be
-- violated here). As of base-4.3.1.0, these conditions, that the user needs
-- to be aware of, are the following:
-- * Compile the declarations with the compiler flag -fno-cse. This
-- prevents multiple references from being substituted to refer to the
-- same data. This flag thus does not affect the semantics of the
-- program, but may potentially adversely affect its performance; thus,
-- isolating in a @.Global@ module may be advisable in some cases. This
-- condition is not strictly necessary when only one declaration is made
-- in a module, since the compiler cannot substitute multiple references
-- to refer to same data.
--
-- If your code behaves differently when optimizations are enabled,
-- ensure that this flag is indeed being used when the declarations are being compiled.
-- Setting or passing this flag is NOT handled automatically by this
-- implementation; it is the responsibility of users of this
-- implementation to ensure that such appropriate behaviour is set when
-- necessary.
--
-- This can be accomplished by placing the line @{-# OPTIONS_GHC -fno-cse #-}@ in
-- the file in which the declarations are declared, before the "module"
-- line.
instance UniqueDeclaration (Const RWLock) where
(Tagged name) =:: (_, _) = do
return $
[ SigD name $ ConT ''RWLock
, PragmaD (InlineP name NoInline FunLike AllPhases)
, ValD (VarP name) (NormalB $ AppE (VarE 'unsafeUDeclInternal) $ VarE 'FairRWLock.new) []
]
-- | Declaring unique 'MSampleVar's; caveats are the same as those of 'IORef's.
--
-- The initial value is used so that the reference refers initially to that value.
--
-- These preconditions apply to GHC 7.0.4 and base-0.4.3.1 and likely similar versions and implementations as well.
--
-- In its low-level implementation, this instance uses 'unsafePerformIO';
-- thus, the same caveats apply to this instance, particularly those
-- regarding top-level declarations (referential transparency cannot be
-- violated here). As of base-4.3.1.0, these conditions, that the user needs
-- to be aware of, are the following:
-- * Compile the declarations with the compiler flag -fno-cse. This
-- prevents multiple references from being substituted to refer to the
-- same data. This flag thus does not affect the semantics of the
-- program, but may potentially adversely affect its performance; thus,
-- isolating in a @.Global@ module may be advisable in some cases. This
-- condition is not strictly necessary when only one declaration is made
-- in a module, since the compiler cannot substitute multiple references
-- to refer to same data.
--
-- If your code behaves differently when optimizations are enabled,
-- ensure that this flag is indeed being used when the declarations are being compiled.
-- Setting or passing this flag is NOT handled automatically by this
-- implementation; it is the responsibility of users of this
-- implementation to ensure that such appropriate behaviour is set when
-- necessary.
--
-- This can be accomplished by placing the line @{-# OPTIONS_GHC -fno-cse #-}@ in
-- the file in which the declarations are declared, before the "module"
-- line.
instance UniqueDeclaration MSampleVar where
(Tagged name) =:: (uvq, Tagged typq) = do
uv <- uvq
typ <- typq
$(assert [| monomorphic typ |]) . return $
[ SigD name $ AppT (ConT ''MSampleVar) typ
, PragmaD (InlineP name NoInline FunLike AllPhases)
, ValD (VarP name) (NormalB $ AppE (VarE 'unsafeUDeclInternal) $ AppE (VarE 'MSampleVar.newSV) uv) []
]
-- | Declaring unique 'MSampleVars's that are initially empty; caveats are the same as those of 'MVar's that are initially empty.
--
-- The initial value is ignored.
--
-- These preconditions apply to GHC 7.0.4 and base-0.4.3.1 and likely similar versions and implementations as well.
--
-- In its low-level implementation, this instance uses 'unsafePerformIO';
-- thus, the same caveats apply to this instance, particularly those
-- regarding top-level declarations (referential transparency cannot be
-- violated here). As of base-4.3.1.0, these conditions, that the user needs
-- to be aware of, are the following:
-- * Compile the declarations with the compiler flag -fno-cse. This
-- prevents multiple references from being substituted to refer to the
-- same data. This flag thus does not affect the semantics of the
-- program, but may potentially adversely affect its performance; thus,
-- isolating in a @.Global@ module may be advisable in some cases. This
-- condition is not strictly necessary when only one declaration is made
-- in a module, since the compiler cannot substitute multiple references
-- to refer to same data.
--
-- If your code behaves differently when optimizations are enabled,
-- ensure that this flag is indeed being used when the declarations are being compiled.
-- Setting or passing this flag is NOT handled automatically by this
-- implementation; it is the responsibility of users of this
-- implementation to ensure that such appropriate behaviour is set when
-- necessary.
--
-- This can be accomplished by placing the line @{-# OPTIONS_GHC -fno-cse #-}@ in
-- the file in which the declarations are declared, before the "module"
-- line.
instance UniqueDeclaration (UDEmpty MSampleVar) where
(Tagged name) =:: (_, Tagged typq) = do
typ <- typq
$(assert [| monomorphic typ |]) . return $
[ SigD name $ AppT (ConT ''MSampleVar) typ
, PragmaD (InlineP name NoInline FunLike AllPhases)
, ValD (VarP name) (NormalB $ AppE (VarE 'unsafeUDeclInternal) $ VarE 'MSampleVar.newEmptySV) []
]
-- | Declaring unique 'MSem's.
--
-- The initial value; which is, in this case, determines the initial quantity
-- of the semaphore; is passed to 'newMSem'; the types thus must match. The
-- internal type is given to the 'MSem' constructor to construct a semaphore
-- based on an integral type.
--
-- NB: When multiple units of a resource are needed simultaneously, consider using 'MSemN's to avoid deadlocks.
--
-- These preconditions apply to GHC 7.0.4 and base-0.4.3.1 and likely similar versions and implementations as well.
--
-- In its low-level implementation, this instance uses 'unsafePerformIO';
-- thus, the same caveats apply to this instance, particularly those
-- regarding top-level declarations (referential transparency cannot be
-- violated here). As of base-4.3.1.0, these conditions, that the user needs
-- to be aware of, are the following:
-- * Compile the declarations with the compiler flag -fno-cse. This
-- prevents multiple references from being substituted to refer to the
-- same data. This flag thus does not affect the semantics of the
-- program, but may potentially adversely affect its performance; thus,
-- isolating in a @.Global@ module may be advisable in some cases. This
-- condition is not strictly necessary when only one declaration is made
-- in a module, since the compiler cannot substitute multiple references
-- to refer to same data.
--
-- If your code behaves differently when optimizations are enabled,
-- ensure that this flag is indeed being used when the declarations are being compiled.
-- Setting or passing this flag is NOT handled automatically by this
-- implementation; it is the responsibility of users of this
-- implementation to ensure that such appropriate behaviour is set when
-- necessary.
--
-- This can be accomplished by placing the line @{-# OPTIONS_GHC -fno-cse #-}@ in
-- the file in which the declarations are declared, before the "module"
-- line.
instance UniqueDeclaration MSem where
(Tagged name) =:: (uvq, Tagged typq) = do
uv <- uvq
typ <- typq
return $
[ SigD name $ AppT (ConT ''MSem) typ
, PragmaD (InlineP name NoInline FunLike AllPhases)
, ValD (VarP name) (NormalB $ AppE (VarE 'unsafeUDeclInternal) $ AppE (VarE 'MSem.new) uv) []
]
-- | Declaring unique 'MSemN's.
--
-- The initial value; which is, in this case, determines the initial quantity
-- of the semaphore; is passed to 'newMSemN'; the types thus must match. The
-- internal type is given to the 'MSemN' constructor to construct a semaphore
-- based on an integral type.
--
-- NB: When multiple units of a resource are needed simultaneously, consider using 'MSemN's to avoid deadlocks.
--
-- These preconditions apply to GHC 7.0.4 and base-0.4.3.1 and likely similar versions and implementations as well.
--
-- In its low-level implementation, this instance uses 'unsafePerformIO';
-- thus, the same caveats apply to this instance, particularly those
-- regarding top-level declarations (referential transparency cannot be
-- violated here). As of base-4.3.1.0, these conditions, that the user needs
-- to be aware of, are the following:
-- * Compile the declarations with the compiler flag -fno-cse. This
-- prevents multiple references from being substituted to refer to the
-- same data. This flag thus does not affect the semantics of the
-- program, but may potentially adversely affect its performance; thus,
-- isolating in a @.Global@ module may be advisable in some cases. This
-- condition is not strictly necessary when only one declaration is made
-- in a module, since the compiler cannot substitute multiple references
-- to refer to same data.
--
-- If your code behaves differently when optimizations are enabled,
-- ensure that this flag is indeed being used when the declarations are being compiled.
-- Setting or passing this flag is NOT handled automatically by this
-- implementation; it is the responsibility of users of this
-- implementation to ensure that such appropriate behaviour is set when
-- necessary.
--
-- This can be accomplished by placing the line @{-# OPTIONS_GHC -fno-cse #-}@ in
-- the file in which the declarations are declared, before the "module"
-- line.
instance UniqueDeclaration MSemN where
(Tagged name) =:: (uvq, Tagged typq) = do
uv <- uvq
typ <- typq
return $
[ SigD name $ AppT (ConT ''MSemN) typ
, PragmaD (InlineP name NoInline FunLike AllPhases)
, ValD (VarP name) (NormalB $ AppE (VarE 'unsafeUDeclInternal) $ AppE (VarE 'MSemN.new) uv) []
]
-- | Declaring unique 'TVar's; caveats are the same as those of 'IORef's.
--
-- The initial value is used so that the reference refers initially to that value.
--
-- These preconditions apply to GHC 7.0.4 and base-0.4.3.1 and likely similar versions and implementations as well.
--
-- In its low-level implementation, this instance uses 'unsafePerformIO';
-- thus, the same caveats apply to this instance, particularly those
-- regarding top-level declarations (referential transparency cannot be
-- violated here). As of base-4.3.1.0, these conditions, that the user needs
-- to be aware of, are the following:
-- * Compile the declarations with the compiler flag -fno-cse. This
-- prevents multiple references from being substituted to refer to the
-- same data. This flag thus does not affect the semantics of the
-- program, but may potentially adversely affect its performance; thus,
-- isolating in a @.Global@ module may be advisable in some cases. This
-- condition is not strictly necessary when only one declaration is made
-- in a module, since the compiler cannot substitute multiple references
-- to refer to same data.
--
-- If your code behaves differently when optimizations are enabled,
-- ensure that this flag is indeed being used when the declarations are being compiled.
-- Setting or passing this flag is NOT handled automatically by this
-- implementation; it is the responsibility of users of this
-- implementation to ensure that such appropriate behaviour is set when
-- necessary.
--
-- This can be accomplished by placing the line @{-# OPTIONS_GHC -fno-cse #-}@ in
-- the file in which the declarations are declared, before the "module"
-- line.
instance UniqueDeclaration TVar where
(Tagged name) =:: (uvq, Tagged typq) = do
uv <- uvq
typ <- typq
$(assert [| monomorphic typ |]) . return $
[ SigD name $ AppT (ConT ''TVar) typ
, PragmaD (InlineP name NoInline FunLike AllPhases)
, ValD (VarP name) (NormalB $ AppE (VarE 'unsafeUDeclInternal) $ AppE (VarE 'newTVarIO) uv) []
]
-- | Declaring unique 'TMVar's; caveats are the same as those of 'IORef's.
--
-- The initial value is used so that the reference refers initially to that value.
--
-- These preconditions apply to GHC 7.0.4 and base-0.4.3.1 and likely similar versions and implementations as well.
--
-- In its low-level implementation, this instance uses 'unsafePerformIO';
-- thus, the same caveats apply to this instance, particularly those
-- regarding top-level declarations (referential transparency cannot be
-- violated here). As of base-4.3.1.0, these conditions, that the user needs
-- to be aware of, are the following:
-- * Compile the declarations with the compiler flag -fno-cse. This
-- prevents multiple references from being substituted to refer to the
-- same data. This flag thus does not affect the semantics of the
-- program, but may potentially adversely affect its performance; thus,
-- isolating in a @.Global@ module may be advisable in some cases. This
-- condition is not strictly necessary when only one declaration is made
-- in a module, since the compiler cannot substitute multiple references
-- to refer to same data.
--
-- If your code behaves differently when optimizations are enabled,
-- ensure that this flag is indeed being used when the declarations are being compiled.
-- Setting or passing this flag is NOT handled automatically by this
-- implementation; it is the responsibility of users of this
-- implementation to ensure that such appropriate behaviour is set when
-- necessary.
--
-- This can be accomplished by placing the line @{-# OPTIONS_GHC -fno-cse #-}@ in
-- the file in which the declarations are declared, before the "module"
-- line.
instance UniqueDeclaration TMVar where
(Tagged name) =:: (uvq, Tagged typq) = do
uv <- uvq
typ <- typq
$(assert [| monomorphic typ |]) . return $
[ SigD name $ AppT (ConT ''TMVar) typ
, PragmaD (InlineP name NoInline FunLike AllPhases)
, ValD (VarP name) (NormalB $ AppE (VarE 'unsafeUDeclInternal) $ AppE (VarE 'newTMVarIO) uv) []
]
-- | Declaring unique 'TMVar's that are initially empty; caveats are the same as those of 'MVar's that are initially empty.
--
-- The initial value is ignored.
--
-- These preconditions apply to GHC 7.0.4 and base-0.4.3.1 and likely similar versions and implementations as well.
--
-- In its low-level implementation, this instance uses 'unsafePerformIO';
-- thus, the same caveats apply to this instance, particularly those
-- regarding top-level declarations (referential transparency cannot be
-- violated here). As of base-4.3.1.0, these conditions, that the user needs
-- to be aware of, are the following:
-- * Compile the declarations with the compiler flag -fno-cse. This
-- prevents multiple references from being substituted to refer to the
-- same data. This flag thus does not affect the semantics of the
-- program, but may potentially adversely affect its performance; thus,
-- isolating in a @.Global@ module may be advisable in some cases. This
-- condition is not strictly necessary when only one declaration is made
-- in a module, since the compiler cannot substitute multiple references
-- to refer to same data.
--
-- If your code behaves differently when optimizations are enabled,
-- ensure that this flag is indeed being used when the declarations are being compiled.
-- Setting or passing this flag is NOT handled automatically by this
-- implementation; it is the responsibility of users of this
-- implementation to ensure that such appropriate behaviour is set when
-- necessary.
--
-- This can be accomplished by placing the line @{-# OPTIONS_GHC -fno-cse #-}@ in
-- the file in which the declarations are declared, before the "module"
-- line.
instance UniqueDeclaration (UDEmpty TMVar) where
(Tagged name) =:: (_, Tagged typq) = do
typ <- typq
$(assert [| monomorphic typ |]) . return $
[ SigD name $ AppT (ConT ''TMVar) typ
, PragmaD (InlineP name NoInline FunLike AllPhases)
, ValD (VarP name) (NormalB $ AppE (VarE 'unsafeUDeclInternal) $ VarE 'newEmptyTMVarIO) []
]
-- | Declaring unique 'TChan's that are initially empty; caveats are the same as those of 'MVar's that are initially empty.
--
-- The initial value is ignored.
--
-- These preconditions apply to GHC 7.0.4 and base-0.4.3.1 and likely similar versions and implementations as well.
--
-- In its low-level implementation, this instance uses 'unsafePerformIO';
-- thus, the same caveats apply to this instance, particularly those
-- regarding top-level declarations (referential transparency cannot be
-- violated here). As of base-4.3.1.0, these conditions, that the user needs
-- to be aware of, are the following:
-- * Compile the declarations with the compiler flag -fno-cse. This
-- prevents multiple references from being substituted to refer to the
-- same data. This flag thus does not affect the semantics of the
-- program, but may potentially adversely affect its performance; thus,
-- isolating in a @.Global@ module may be advisable in some cases. This
-- condition is not strictly necessary when only one declaration is made
-- in a module, since the compiler cannot substitute multiple references
-- to refer to same data.
--
-- If your code behaves differently when optimizations are enabled,
-- ensure that this flag is indeed being used when the declarations are being compiled.
-- Setting or passing this flag is NOT handled automatically by this
-- implementation; it is the responsibility of users of this
-- implementation to ensure that such appropriate behaviour is set when
-- necessary.
--
-- This can be accomplished by placing the line @{-# OPTIONS_GHC -fno-cse #-}@ in
-- the file in which the declarations are declared, before the "module"
-- line.
instance UniqueDeclaration (UDEmpty TChan) where
(Tagged name) =:: (_, Tagged typq) = do
typ <- typq
$(assert [| monomorphic typ |]) . return $
[ SigD name $ AppT (ConT ''TChan) typ
, PragmaD (InlineP name NoInline FunLike AllPhases)
, ValD (VarP name) (NormalB $ AppE (VarE 'unsafeUDeclInternal) $ VarE 'newTChanIO) []
]
-- | Identity type wrapper that indicates that the unique declaration should be "empty" by default.
newtype UDEmpty u a = UDEmpty {unUDEmpty :: u a}
deriving (Eq, Ord, Show, Read, Data, Typeable)
-- | Tagged name type.
type UN u = Tagged (Cnt u) Name
-- | Construct a name for a unique declaration from a string.
un :: (UniqueDeclaration u) => String -> UN u
un = Tagged . mkName
-- | Tagged unique declaration type.
type UT c = Tagged (Cnt c) TypeQ
-- | Tagged unique declaration type constructor.
ut :: (UniqueDeclaration c) => TypeQ -> UT c
ut = Tagged
-- | Transform a container with kind @* -> *@ into a concrete type @*@ by
-- applying the type '()' to the constructor.
--
-- Intended to be used for tagging types for unique declarations.
type Cnt c = c ()
-- | An expression for a value contained in a unique declaration.
type UV = ExpQ
-- | Determine whether a type is polymorphic.
monomorphic :: Type -> Bool
monomorphic (ConT _) = True
monomorphic (TupleT _) = True
monomorphic (ArrowT) = True
monomorphic (ListT) = True
monomorphic (AppT _ _) = True
monomorphic (SigT _ _) = True
monomorphic t = $failure $ printf "type is too polymorphic: `%s'" (pprint t)
-- | Type of 'unsafeUDeclInternal', used only by this library.
type UnsafeUDeclInternal a = IO a -> a
-- | The type of values that supply an initial quantity for quantity semaphores.
type QSemQuantity = Int
-- | Internal means of constructing unique values, used only by this library.
--
-- 'unsafeUDeclInternal' should never be used directly, outside this
-- library.
unsafeUDeclInternal :: UnsafeUDeclInternal a
{-# NOINLINE unsafeUDeclInternal #-}
unsafeUDeclInternal = unsafePerformIO
-- | Translate an "Exts" AST to a Template Haskell AST, failing when the translation result is not a Template Haskell AST.
--
-- This is defined in terms of 'Language.Haskell.Exts.QQ.translateExtsToTH'
translateExtsToTH' :: Exts.Exp -> Exp
translateExtsToTH' = either (const $ $failure $ printf "translating Exts AST to Template Haskell AST resulted in Exts AST") id . translateExtsToTH
-- | Apply translateExtsToTH' and lift the result into the 'Q' monad.
--
-- This is often used with 'ud' to refer to variables whose names are not required to be in scope when the quotation is expanded, in a very roundabout way.
--
-- "utl" can be thought of as a mnemonic for "unique", "translate" and "lift"; and will be updated appropriately to reflect changes to 'UV'.
--
-- For example, to enable self-referential recursion by referring to
-- variables whose names are not yet in scope, an expression quotation
-- @[| … |]@ can usually be written as @utl [ud| … |]@.
utl :: Exts.Exp -> UV
utl = return . translateExtsToTH'
-- | Alias to the 'QuasiQuoter' 'hs', which does not require names to be in scope when the quotation is expanded, which enables self-referential recursion.
ud :: QuasiQuoter
ud = hs
-- | An alternative to providing an initial value.
--
-- Warning: attempting to read uninitialized references can cause the program to crash.
uninitialized :: Q Exp
uninitialized = [| $failure "uninitialized" |]
|
bairyn/global
|
src/Data/Global.hs
|
bsd-3-clause
| 40,628
| 0
| 16
| 8,490
| 3,497
| 2,094
| 1,403
| 186
| 1
|
module Vectorise.Env (
Scope(..),
-- * Local Environments
LocalEnv(..),
emptyLocalEnv,
-- * Global Environments
GlobalEnv(..),
initGlobalEnv,
extendImportedVarsEnv,
extendFamEnv,
setPAFunsEnv,
setPRFunsEnv,
modVectInfo
) where
import HscTypes
import InstEnv
import FamInstEnv
import CoreSyn
import Type
import Class
import TyCon
import DataCon
import VarEnv
import VarSet
import Var
import NameSet
import Name
import NameEnv
import FastString
import TysPrim
import TysWiredIn
import Data.Maybe
-- |Indicates what scope something (a variable) is in.
--
data Scope a b
= Global a
| Local b
-- LocalEnv -------------------------------------------------------------------
-- |The local environment.
--
data LocalEnv
= LocalEnv {
-- Mapping from local variables to their vectorised and lifted versions.
local_vars :: VarEnv (Var, Var)
-- In-scope type variables.
, local_tyvars :: [TyVar]
-- Mapping from tyvars to their PA dictionaries.
, local_tyvar_pa :: VarEnv CoreExpr
-- Local binding name.
, local_bind_name :: FastString
}
-- |Create an empty local environment.
--
emptyLocalEnv :: LocalEnv
emptyLocalEnv = LocalEnv {
local_vars = emptyVarEnv
, local_tyvars = []
, local_tyvar_pa = emptyVarEnv
, local_bind_name = fsLit "fn"
}
-- GlobalEnv ------------------------------------------------------------------
-- |The global environment: entities that exist at top-level.
--
data GlobalEnv
= GlobalEnv
{ global_vars :: VarEnv Var
-- ^Mapping from global variables to their vectorised versions — aka the /vectorisation
-- map/.
, global_vect_decls :: VarEnv (Type, CoreExpr)
-- ^Mapping from global variables that have a vectorisation declaration to the right-hand
-- side of that declaration and its type. This mapping only applies to non-scalar
-- vectorisation declarations. All variables with a scalar vectorisation declaration are
-- mentioned in 'global_scalars_vars'.
, global_scalar_vars :: VarSet
-- ^Purely scalar variables. Code which mentions only these variables doesn't have to be
-- lifted. This includes variables from the current module that have a scalar
-- vectorisation declaration and those that the vectoriser determines to be scalar.
, global_scalar_tycons :: NameSet
-- ^Type constructors whose values can only contain scalar data. This includes type
-- constructors that appear in a 'VECTORISE SCALAR type' pragma or 'VECTORISE type' pragma
-- *without* a right-hand side in the current or an imported module as well as type
-- constructors that are automatically identified as scalar by the vectoriser (in
-- 'Vectorise.Type.Env'). Scalar code may only operate on such data.
--
-- NB: Not all type constructors in that set are members of the 'Scalar' type class
-- (which can be trivially marshalled across scalar code boundaries).
, global_novect_vars :: VarSet
-- ^Variables that are not vectorised. (They may be referenced in the right-hand sides
-- of vectorisation declarations, though.)
, global_tycons :: NameEnv TyCon
-- ^Mapping from TyCons to their vectorised versions.
-- TyCons which do not have to be vectorised are mapped to themselves.
, global_datacons :: NameEnv DataCon
-- ^Mapping from DataCons to their vectorised versions.
, global_pa_funs :: NameEnv Var
-- ^Mapping from TyCons to their PA dfuns.
, global_pr_funs :: NameEnv Var
-- ^Mapping from TyCons to their PR dfuns.
, global_inst_env :: (InstEnv, InstEnv)
-- ^External package inst-env & home-package inst-env for class instances.
, global_fam_inst_env :: FamInstEnvs
-- ^External package inst-env & home-package inst-env for family instances.
, global_bindings :: [(Var, CoreExpr)]
-- ^Hoisted bindings.
}
-- |Create an initial global environment.
--
-- We add scalar variables and type constructors identified by vectorisation pragmas already here
-- to the global table, so that we can query scalarness during vectorisation, and especially, when
-- vectorising the scalar entities' definitions themselves.
--
initGlobalEnv :: VectInfo -> [CoreVect] -> (InstEnv, InstEnv) -> FamInstEnvs -> GlobalEnv
initGlobalEnv info vectDecls instEnvs famInstEnvs
= GlobalEnv
{ global_vars = mapVarEnv snd $ vectInfoVar info
, global_vect_decls = mkVarEnv vects
, global_scalar_vars = vectInfoScalarVars info `extendVarSetList` scalar_vars
, global_scalar_tycons = vectInfoScalarTyCons info `addListToNameSet` scalar_tycons
, global_novect_vars = mkVarSet novects
, global_tycons = mapNameEnv snd $ vectInfoTyCon info
, global_datacons = mapNameEnv snd $ vectInfoDataCon info
, global_pa_funs = emptyNameEnv
, global_pr_funs = emptyNameEnv
, global_inst_env = instEnvs
, global_fam_inst_env = famInstEnvs
, global_bindings = []
}
where
vects = [(var, (ty, exp)) | Vect var (Just exp@(Var rhs_var)) <- vectDecls
, let ty = varType rhs_var]
-- FIXME: we currently only allow RHSes consisting of a
-- single variable to be able to obtain the type without
-- inference — see also 'TcBinds.tcVect'
scalar_vars = [var | Vect var Nothing <- vectDecls] ++
[var | VectInst var <- vectDecls] ++
[dataConWrapId doubleDataCon, dataConWrapId floatDataCon, dataConWrapId intDataCon] -- TODO: fix this hack
novects = [var | NoVect var <- vectDecls]
scalar_tycons = [tyConName tycon | VectType True tycon Nothing <- vectDecls] ++
[tyConName tycon | VectType _ tycon (Just tycon') <- vectDecls
, tycon == tycon'] ++
map tyConName [doublePrimTyCon, intPrimTyCon, floatPrimTyCon] -- TODO: fix this hack
-- - for 'VectType True tycon Nothing', we checked that the type does not
-- contain arrays (or type variables that could be instatiated to arrays)
-- - for 'VectType _ tycon (Just tycon')', where the two tycons are the same,
-- we also know that there can be no embedded arrays
-- Operators on Global Environments -------------------------------------------
-- |Extend the list of global variables in an environment.
--
extendImportedVarsEnv :: [(Var, Var)] -> GlobalEnv -> GlobalEnv
extendImportedVarsEnv ps genv
= genv { global_vars = extendVarEnvList (global_vars genv) ps }
-- |Extend the list of type family instances.
--
extendFamEnv :: [FamInst] -> GlobalEnv -> GlobalEnv
extendFamEnv new genv
= genv { global_fam_inst_env = (g_fam_inst, extendFamInstEnvList l_fam_inst new) }
where (g_fam_inst, l_fam_inst) = global_fam_inst_env genv
-- |Set the list of PA functions in an environment.
--
setPAFunsEnv :: [(Name, Var)] -> GlobalEnv -> GlobalEnv
setPAFunsEnv ps genv = genv { global_pa_funs = mkNameEnv ps }
-- |Set the list of PR functions in an environment.
--
setPRFunsEnv :: [(Name, Var)] -> GlobalEnv -> GlobalEnv
setPRFunsEnv ps genv = genv { global_pr_funs = mkNameEnv ps }
-- |Compute vectorisation information that goes into 'ModGuts' (and is stored in interface files).
-- The incoming 'vectInfo' is that from the 'HscEnv' and 'EPS'. The outgoing one contains only the
-- declarations for the currently compiled module; this includes variables, type constructors, and
-- data constructors referenced in VECTORISE pragmas, even if they are defined in an imported
-- module.
--
-- The variables explicitly include class selectors and dfuns.
--
modVectInfo :: GlobalEnv -> [Id] -> [TyCon] -> [CoreVect]-> VectInfo -> VectInfo
modVectInfo env mg_ids mg_tyCons vectDecls info
= info
{ vectInfoVar = mk_env ids (global_vars env)
, vectInfoTyCon = mk_env tyCons (global_tycons env)
, vectInfoDataCon = mk_env dataCons (global_datacons env)
, vectInfoScalarVars = global_scalar_vars env `minusVarSet` vectInfoScalarVars info
, vectInfoScalarTyCons = global_scalar_tycons env `minusNameSet` vectInfoScalarTyCons info
}
where
vectIds = [id | Vect id _ <- vectDecls] ++
[id | VectInst id <- vectDecls]
vectTypeTyCons = [tycon | VectType _ tycon _ <- vectDecls] ++
[tycon | VectClass tycon <- vectDecls]
vectDataCons = concatMap tyConDataCons vectTypeTyCons
ids = mg_ids ++ vectIds ++ dataConIds ++ selIds
tyCons = mg_tyCons ++ vectTypeTyCons
dataCons = concatMap tyConDataCons mg_tyCons ++ vectDataCons
dataConIds = map dataConWorkId dataCons
selIds = concat [ classAllSelIds cls
| tycon <- tyCons
, cls <- maybeToList . tyConClass_maybe $ tycon]
-- Produce an entry for every declaration that is mentioned in the domain of the 'inspectedEnv'
mk_env decls inspectedEnv
= mkNameEnv [(name, (decl, to))
| decl <- decls
, let name = getName decl
, Just to <- [lookupNameEnv inspectedEnv name]]
|
nomeata/ghc
|
compiler/vectorise/Vectorise/Env.hs
|
bsd-3-clause
| 10,185
| 0
| 15
| 3,119
| 1,393
| 811
| 582
| 119
| 1
|
-- |
-- Module: Trace.Hpc.Codecov.Types
-- Copyright: (c) 2014 Guillaume Nargeot
-- License: BSD3
-- Maintainer: Guillaume Nargeot <guillaume+hackage@nargeot.com>
-- Stability: experimental
--
-- Types to represent hpc code coverage data.
module Trace.Hpc.Codecov.Types where
import Network.Curl
import Trace.Hpc.Mix
type CoverageEntry = (
[MixEntry], -- mix entries
[Integer], -- tix values
[String]) -- entry source code
data Hit = Full
| Partial
| None
| Irrelevant
deriving (Eq, Show)
type Lix = [Hit]
-- | Result to the POST request to codecov.io
data PostResult =
PostSuccess URLString URLString -- ^ Codecov job url and wait url
| PostFailure String -- ^ error message
|
guillaume-nargeot/codecov-haskell
|
src/Trace/Hpc/Codecov/Types.hs
|
bsd-3-clause
| 761
| 0
| 6
| 187
| 109
| 74
| 35
| 16
| 0
|
{- ORMOLU_DISABLE -}
-- Options passed to Haddock
{-# OPTIONS_HADDOCK hide #-}
-----------------------------------------------------------------------------
{-# LANGUAGE CPP #-}
-- CPP: GHC >= 7.8 for Safe Haskell
#if __GLASGOW_HASKELL__ >= 708
{-
The 'Data.DList.Unsafe' module exports 'UnsafeDList' and 'unsafeApplyDList',
which allow breaking the invariant of the 'DList' newtype. Therefore, we
explicitly mark 'Data.DList.Unsafe' as unsafe.
-}
{-# LANGUAGE Unsafe #-}
#endif
-----------------------------------------------------------------------------
{-|
Module: Data.DList.Unsafe
Copyright: © 2006-2009 Don Stewart, 2013-2020 Sean Leather
License: BSD-3-Clause
Maintainer: sean.leather@gmail.com
Stability: stable
This module exports the 'DList' constructor, 'UnsafeDList', and the record label,
'unsafeApplyDList', both of which can be used to create unsafe 'DList' values
that break the invariant preserved by the names exported from 'Data.DList'.
-}
{- ORMOLU_ENABLE -}
module Data.DList.Unsafe (DList (UnsafeDList, unsafeApplyDList)) where
import Data.DList.Internal
|
spl/dlist
|
Data/DList/Unsafe.hs
|
bsd-3-clause
| 1,092
| 0
| 5
| 136
| 37
| 30
| 7
| 7
| 0
|
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Control.Monad (replicateM_, liftM)
import Control.Concurrent (threadDelay)
import Control.Monad.IO.Class (liftIO)
import Data.Int (Int64)
import Test.Tasty
import Test.Tasty.HUnit
import qualified Test.Tasty.QuickCheck as QC
import qualified Test.QuickCheck.Monadic as QCM
import qualified Data.ByteString as BS
import Web.Scotty (scottyApp, middleware, get, html, raise)
import Network.Wai (Application)
import qualified Network.Wai.Test as WT
import System.Metrics
import qualified System.Metrics.Counter as Counter
import qualified System.Metrics.Distribution as Distribution
import Network.Wai.Metrics
-- Send a GET request to a WAI Application
httpGet :: BS.ByteString -> Application -> IO WT.SResponse
httpGet path = WT.runSession (WT.srequest (WT.SRequest req ""))
where req = WT.setRawPathInfo WT.defaultRequest path
between :: Ord a => a -> a -> a -> Bool
between low high x = low <= x && x <= high
-- Return the state of Wai Metrics after running n times
-- an action over a fresh scotty server
testServer :: (Application -> IO a) -> Int -> IO WaiMetrics
testServer action times = do
store <- newStore
waiMetrics <- registerWaiMetrics store
app <- scottyApp $ do
middleware (metrics waiMetrics)
get "/" $ html "Ping"
get "/error" $ raise "error"
get "/wait" $ liftIO (threadDelay 100000) >> html "Ping"
replicateM_ times (action app)
return waiMetrics
-- Return the number of requests after running n times
-- an action over a fresh scotty server
readRequestCounter :: (Application -> IO a) -> Int -> IO Int64
readRequestCounter action times = do
waiMetrics <- testServer action times
Counter.read (requestCounter waiMetrics)
-- Return the number of server errors after running n times
-- an action over a fresh scotty server
readErrorCounter :: (Application -> IO a) -> Int -> IO Int64
readErrorCounter action times = do
waiMetrics <- testServer action times
Counter.read (serverErrorCounter waiMetrics)
-- Return the response time distribution after running n times
-- an action over a fresh scotty server
readResponseTime :: (Application -> IO a) -> Int -> IO Distribution.Stats
readResponseTime action times = do
waiMetrics <- testServer action times
Distribution.read (latencyDistribution waiMetrics)
testRequestCounterScotty :: QC.NonNegative Int -> QC.Property
testRequestCounterScotty (QC.NonNegative n) = QCM.monadicIO test
where test = do c <- QCM.run $ readRequestCounter (httpGet "") n
QCM.assert $ fromIntegral c == n
testErrorCounterScotty :: QC.NonNegative Int -> QC.Property
testErrorCounterScotty (QC.NonNegative n) = QCM.monadicIO test
where test = do c <- QCM.run $ readErrorCounter (httpGet "/error") n
QCM.assert $ fromIntegral c == n
testResponseTimeScotty :: IO()
testResponseTimeScotty = do s <- readResponseTime (httpGet "/wait") 3
assert $ between 0.1 0.11 (Distribution.mean s)
tests :: TestTree
tests = testGroup "Metrics tests" [
QC.testProperty "Request counter must be incremented in middleware" testRequestCounterScotty
, QC.testProperty "Error counter must be incremented in middleware" testErrorCounterScotty
, testCase "Request time average must be measured in middleware" testResponseTimeScotty]
main :: IO()
main = defaultMain tests
|
ambiata/wai-middleware-metrics
|
tests.hs
|
bsd-3-clause
| 3,379
| 0
| 15
| 589
| 901
| 462
| 439
| 64
| 1
|
{-# LANGUAGE MultiParamTypeClasses, FlexibleContexts #-}
module Opaleye.Internal.Binary where
import Opaleye.Internal.Column (Column(Column))
import qualified Opaleye.Internal.Tag as T
import qualified Opaleye.Internal.PackMap as PM
import qualified Opaleye.Internal.QueryArr as Q
import qualified Opaleye.Internal.PrimQuery as PQ
import qualified Opaleye.Internal.HaskellDB.PrimQuery as HPQ
import Data.Profunctor (Profunctor, dimap)
import Data.Profunctor.Product (ProductProfunctor, empty, (***!))
import qualified Data.Profunctor.Product as PP
import Data.Profunctor.Product.Default (Default, def)
import Control.Applicative (Applicative, pure, (<*>))
import Control.Arrow ((***))
extractBinaryFields :: T.Tag -> (HPQ.PrimExpr, HPQ.PrimExpr)
-> PM.PM [(HPQ.Symbol, (HPQ.PrimExpr, HPQ.PrimExpr))]
HPQ.PrimExpr
extractBinaryFields = PM.extractAttr "binary"
data Binaryspec columns columns' =
Binaryspec (PM.PackMap (HPQ.PrimExpr, HPQ.PrimExpr) HPQ.PrimExpr
(columns, columns) columns')
runBinaryspec :: Applicative f => Binaryspec columns columns'
-> ((HPQ.PrimExpr, HPQ.PrimExpr) -> f HPQ.PrimExpr)
-> (columns, columns) -> f columns'
runBinaryspec (Binaryspec b) = PM.packmap b
binaryspecColumn :: Binaryspec (Column a) (Column a)
binaryspecColumn = Binaryspec (PM.PackMap (\f (Column e, Column e')
-> fmap Column (f (e, e'))))
sameTypeBinOpHelper :: PQ.BinOp -> Binaryspec columns columns'
-> Q.Query columns -> Q.Query columns -> Q.Query columns'
sameTypeBinOpHelper binop binaryspec q1 q2 = Q.simpleQueryArr q where
q ((), startTag) = (newColumns, newPrimQuery, T.next endTag)
where (columns1, primQuery1, midTag) = Q.runSimpleQueryArr q1 ((), startTag)
(columns2, primQuery2, endTag) = Q.runSimpleQueryArr q2 ((), midTag)
(newColumns, pes) =
PM.run (runBinaryspec binaryspec (extractBinaryFields endTag)
(columns1, columns2))
newPrimQuery = PQ.Binary binop pes (primQuery1, primQuery2)
instance Default Binaryspec (Column a) (Column a) where
def = binaryspecColumn
-- {
-- Boilerplate instance definitions. Theoretically, these are derivable.
instance Functor (Binaryspec a) where
fmap f (Binaryspec g) = Binaryspec (fmap f g)
instance Applicative (Binaryspec a) where
pure = Binaryspec . pure
Binaryspec f <*> Binaryspec x = Binaryspec (f <*> x)
instance Profunctor Binaryspec where
dimap f g (Binaryspec b) = Binaryspec (dimap (f *** f) g b)
instance ProductProfunctor Binaryspec where
empty = PP.defaultEmpty
(***!) = PP.defaultProfunctorProduct
-- }
|
silkapp/haskell-opaleye
|
src/Opaleye/Internal/Binary.hs
|
bsd-3-clause
| 2,816
| 0
| 13
| 644
| 827
| 464
| 363
| 50
| 1
|
{-# LANGUAGE OverloadedStrings, RankNTypes #-}
-- | It should be noted that most of the code snippets below depend on the
-- OverloadedStrings language pragma.
--
-- Scotty is set up by default for development mode. For production servers,
-- you will likely want to modify 'Trans.settings' and the 'defaultHandler'. See
-- the comments on each of these functions for more information.
module Web.Scotty
( -- * scotty-to-WAI
scotty, scottyApp, scottyOpts, scottySocket, Options(..)
-- * Defining Middleware and Routes
--
-- | 'Middleware' and routes are run in the order in which they
-- are defined. All middleware is run first, followed by the first
-- route that matches. If no route matches, a 404 response is given.
, middleware, get, post, put, delete, patch, options, addroute, matchAny, notFound
-- ** Route Patterns
, capture, regex, function, literal
-- ** Accessing the Request, Captures, and Query Parameters
, request, header, headers, body, bodyReader, param, params, jsonData, files
-- ** Modifying the Response and Redirecting
, status, addHeader, setHeader, redirect
-- ** Setting Response Body
--
-- | Note: only one of these should be present in any given route
-- definition, as they completely replace the current 'Response' body.
, text, html, file, json, stream, raw
-- ** Exceptions
, raise, rescue, next, defaultHandler, liftAndCatchIO
-- * Parsing Parameters
, Param, Trans.Parsable(..), Trans.readEither
-- * Types
, ScottyM, ActionM, RoutePattern, File
) where
-- With the exception of this, everything else better just import types.
import qualified Web.Scotty.Trans as Trans
import Data.Aeson (FromJSON, ToJSON)
import qualified Data.ByteString as BS
import Data.ByteString.Lazy.Char8 (ByteString)
import Data.Text.Lazy (Text)
import Network (Socket)
import Network.HTTP.Types (Status, StdMethod)
import Network.Wai (Application, Middleware, Request, StreamingBody)
import Network.Wai.Handler.Warp (Port)
import Web.Scotty.Internal.Types (ScottyT, ActionT, Param, RoutePattern, Options, File)
type ScottyM = ScottyT Text IO
type ActionM = ActionT Text IO
-- | Run a scotty application using the warp server.
scotty :: Port -> ScottyM () -> IO ()
scotty p = Trans.scottyT p id
-- | Run a scotty application using the warp server, passing extra options.
scottyOpts :: Options -> ScottyM () -> IO ()
scottyOpts opts = Trans.scottyOptsT opts id
-- | Run a scotty application using the warp server, passing extra options,
-- and listening on the provided socket. This allows the user to provide, for
-- example, a Unix named socket, which can be used when reverse HTTP proxying
-- into your application.
scottySocket :: Options -> Socket -> ScottyM () -> IO ()
scottySocket opts sock = Trans.scottySocketT opts sock id
-- | Turn a scotty application into a WAI 'Application', which can be
-- run with any WAI handler.
scottyApp :: ScottyM () -> IO Application
scottyApp = Trans.scottyAppT id
-- | Global handler for uncaught exceptions.
--
-- Uncaught exceptions normally become 500 responses.
-- You can use this to selectively override that behavior.
--
-- Note: IO exceptions are lifted into Scotty exceptions by default.
-- This has security implications, so you probably want to provide your
-- own defaultHandler in production which does not send out the error
-- strings as 500 responses.
defaultHandler :: (Text -> ActionM ()) -> ScottyM ()
defaultHandler = Trans.defaultHandler
-- | Use given middleware. Middleware is nested such that the first declared
-- is the outermost middleware (it has first dibs on the request and last action
-- on the response). Every middleware is run on each request.
middleware :: Middleware -> ScottyM ()
middleware = Trans.middleware
-- | Throw an exception, which can be caught with 'rescue'. Uncaught exceptions
-- turn into HTTP 500 responses.
raise :: Text -> ActionM a
raise = Trans.raise
-- | Abort execution of this action and continue pattern matching routes.
-- Like an exception, any code after 'next' is not executed.
--
-- As an example, these two routes overlap. The only way the second one will
-- ever run is if the first one calls 'next'.
--
-- > get "/foo/:bar" $ do
-- > w :: Text <- param "bar"
-- > unless (w == "special") next
-- > text "You made a request to /foo/special"
-- >
-- > get "/foo/:baz" $ do
-- > w <- param "baz"
-- > text $ "You made a request to: " <> w
next :: ActionM a
next = Trans.next
-- | Catch an exception thrown by 'raise'.
--
-- > raise "just kidding" `rescue` (\msg -> text msg)
rescue :: ActionM a -> (Text -> ActionM a) -> ActionM a
rescue = Trans.rescue
-- | Like 'liftIO', but catch any IO exceptions and turn them into Scotty exceptions.
liftAndCatchIO :: IO a -> ActionM a
liftAndCatchIO = Trans.liftAndCatchIO
-- | Redirect to given URL. Like throwing an uncatchable exception. Any code after the call to redirect
-- will not be run.
--
-- > redirect "http://www.google.com"
--
-- OR
--
-- > redirect "/foo/bar"
redirect :: Text -> ActionM a
redirect = Trans.redirect
-- | Get the 'Request' object.
request :: ActionM Request
request = Trans.request
-- | Get list of uploaded files.
files :: ActionM [File]
files = Trans.files
-- | Get a request header. Header name is case-insensitive.
header :: Text -> ActionM (Maybe Text)
header = Trans.header
-- | Get all the request headers. Header names are case-insensitive.
headers :: ActionM [(Text, Text)]
headers = Trans.headers
-- | Get the request body.
body :: ActionM ByteString
body = Trans.body
-- | Get an IO action that reads body chunks
--
-- * This is incompatible with 'body' since 'body' consumes all chunks.
bodyReader :: ActionM (IO BS.ByteString)
bodyReader = Trans.bodyReader
-- | Parse the request body as a JSON object and return it. Raises an exception if parse is unsuccessful.
jsonData :: FromJSON a => ActionM a
jsonData = Trans.jsonData
-- | Get a parameter. First looks in captures, then form data, then query parameters.
--
-- * Raises an exception which can be caught by 'rescue' if parameter is not found.
--
-- * If parameter is found, but 'read' fails to parse to the correct type, 'next' is called.
-- This means captures are somewhat typed, in that a route won't match if a correctly typed
-- capture cannot be parsed.
param :: Trans.Parsable a => Text -> ActionM a
param = Trans.param
-- | Get all parameters from capture, form and query (in that order).
params :: ActionM [Param]
params = Trans.params
-- | Set the HTTP response status. Default is 200.
status :: Status -> ActionM ()
status = Trans.status
-- | Add to the response headers. Header names are case-insensitive.
addHeader :: Text -> Text -> ActionM ()
addHeader = Trans.addHeader
-- | Set one of the response headers. Will override any previously set value for that header.
-- Header names are case-insensitive.
setHeader :: Text -> Text -> ActionM ()
setHeader = Trans.setHeader
-- | Set the body of the response to the given 'Text' value. Also sets \"Content-Type\"
-- header to \"text/plain; charset=utf-8\" if it has not already been set.
text :: Text -> ActionM ()
text = Trans.text
-- | Set the body of the response to the given 'Text' value. Also sets \"Content-Type\"
-- header to \"text/html; charset=utf-8\" if it has not already been set.
html :: Text -> ActionM ()
html = Trans.html
-- | Send a file as the response. Doesn't set the \"Content-Type\" header, so you probably
-- want to do that on your own with 'setHeader'.
file :: FilePath -> ActionM ()
file = Trans.file
-- | Set the body of the response to the JSON encoding of the given value. Also sets \"Content-Type\"
-- header to \"application/json; charset=utf-8\" if it has not already been set.
json :: ToJSON a => a -> ActionM ()
json = Trans.json
-- | Set the body of the response to a StreamingBody. Doesn't set the
-- \"Content-Type\" header, so you probably want to do that on your
-- own with 'setHeader'.
stream :: StreamingBody -> ActionM ()
stream = Trans.stream
-- | Set the body of the response to the given 'BL.ByteString' value. Doesn't set the
-- \"Content-Type\" header, so you probably want to do that on your own with 'setHeader'.
raw :: ByteString -> ActionM ()
raw = Trans.raw
-- | get = 'addroute' 'GET'
get :: RoutePattern -> ActionM () -> ScottyM ()
get = Trans.get
-- | post = 'addroute' 'POST'
post :: RoutePattern -> ActionM () -> ScottyM ()
post = Trans.post
-- | put = 'addroute' 'PUT'
put :: RoutePattern -> ActionM () -> ScottyM ()
put = Trans.put
-- | delete = 'addroute' 'DELETE'
delete :: RoutePattern -> ActionM () -> ScottyM ()
delete = Trans.delete
-- | patch = 'addroute' 'PATCH'
patch :: RoutePattern -> ActionM () -> ScottyM ()
patch = Trans.patch
-- | options = 'addroute' 'OPTIONS'
options :: RoutePattern -> ActionM () -> ScottyM ()
options = Trans.options
-- | Add a route that matches regardless of the HTTP verb.
matchAny :: RoutePattern -> ActionM () -> ScottyM ()
matchAny = Trans.matchAny
-- | Specify an action to take if nothing else is found. Note: this _always_ matches,
-- so should generally be the last route specified.
notFound :: ActionM () -> ScottyM ()
notFound = Trans.notFound
-- | Define a route with a 'StdMethod', 'Text' value representing the path spec,
-- and a body ('Action') which modifies the response.
--
-- > addroute GET "/" $ text "beam me up!"
--
-- The path spec can include values starting with a colon, which are interpreted
-- as /captures/. These are named wildcards that can be looked up with 'param'.
--
-- > addroute GET "/foo/:bar" $ do
-- > v <- param "bar"
-- > text v
--
-- >>> curl http://localhost:3000/foo/something
-- something
addroute :: StdMethod -> RoutePattern -> ActionM () -> ScottyM ()
addroute = Trans.addroute
-- | Match requests using a regular expression.
-- Named captures are not yet supported.
--
-- > get (regex "^/f(.*)r$") $ do
-- > path <- param "0"
-- > cap <- param "1"
-- > text $ mconcat ["Path: ", path, "\nCapture: ", cap]
--
-- >>> curl http://localhost:3000/foo/bar
-- Path: /foo/bar
-- Capture: oo/ba
--
regex :: String -> RoutePattern
regex = Trans.regex
-- | Standard Sinatra-style route. Named captures are prepended with colons.
-- This is the default route type generated by OverloadedString routes. i.e.
--
-- > get (capture "/foo/:bar") $ ...
--
-- and
--
-- > {-# LANGUAGE OverloadedStrings #-}
-- > ...
-- > get "/foo/:bar" $ ...
--
-- are equivalent.
capture :: String -> RoutePattern
capture = Trans.capture
-- | Build a route based on a function which can match using the entire 'Request' object.
-- 'Nothing' indicates the route does not match. A 'Just' value indicates
-- a successful match, optionally returning a list of key-value pairs accessible
-- by 'param'.
--
-- > get (function $ \req -> Just [("version", pack $ show $ httpVersion req)]) $ do
-- > v <- param "version"
-- > text v
--
-- >>> curl http://localhost:3000/
-- HTTP/1.1
--
function :: (Request -> Maybe [Param]) -> RoutePattern
function = Trans.function
-- | Build a route that requires the requested path match exactly, without captures.
literal :: String -> RoutePattern
literal = Trans.literal
|
beni55/scotty
|
Web/Scotty.hs
|
bsd-3-clause
| 11,272
| 0
| 9
| 2,115
| 1,544
| 919
| 625
| 108
| 1
|
{-# LANGUAGE RankNTypes #-}
--
-- Examples for package basic-lens
--
module Basic where
import Control.Applicative (pure)
import Control.Applicative (Applicative)
import Control.Lens.Basic
infixl 0 ^.
(^.) :: s -> Lens s t a b -> a
(^.) = flip view
infixl 1 .~
(.~) :: Lens s t a b -> b -> s -> t
(.~) = set
infixl 1 %~
(%~) :: Lens s t a b -> (a -> b) -> s -> t
(%~) = over
data Person = Person String Int deriving Show
name :: Lens Person Person String String
name f (Person a x) = fmap (\b -> Person b x) (f a)
age :: Lens Person Person Int Int
age f (Person x a) = fmap (\b -> Person x b) (f a)
data Book = Book String Person String deriving Show
title :: Lens Book Book String String
title f (Book a x y) = fmap (\b -> Book b x y) (f a)
author :: Lens Book Book Person Person
author f (Book x a y) = fmap (\b -> Book x b y) (f a)
person1 :: Person
person1 = Person "foo" 14
book1 :: Book
book1 = Book "hoge" person1 "content"
{-|
>>> book1 ^. author . age
14
>>> (title .~ "moge") book1
Book "moge" (Person "foo" 14) "content"
>>> (author . name %~ reverse) book1
Book "hoge" (Person "oof" 14) "content"
-}
|
notae/haskell-exercise
|
lens/Basic.hs
|
bsd-3-clause
| 1,123
| 0
| 8
| 246
| 455
| 246
| 209
| 28
| 1
|
{-# LANGUAGE CPP #-}
#if __GLASGOW_HASKELL__ >= 702
{-# LANGUAGE Trustworthy #-}
#endif
-- |
-- Module : Data.ByteString.Base64.Lazy
-- Copyright : (c) 2012 Ian Lynagh
--
-- License : BSD-style
-- Maintainer : Emily Pillmore <emilypi@cohomolo.gy>,
-- Herbert Valerio Riedel <hvr@gnu.org>,
-- Mikhail Glushenkov <mikhail.glushenkov@gmail.com>
-- Stability : experimental
-- Portability : GHC
--
-- Fast and efficient encoding and decoding of base64-encoded
-- lazy bytestrings.
--
-- @since 1.0.0.0
module Data.ByteString.Base64.Lazy
(
encode
, decode
, decodeLenient
) where
import Data.ByteString.Base64.Internal
import qualified Data.ByteString.Base64 as B64
import qualified Data.ByteString as S
import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString.Lazy.Char8 as LC
import Data.Char
-- | Encode a string into base64 form. The result will always be a
-- multiple of 4 bytes in length.
encode :: L.ByteString -> L.ByteString
encode = L.fromChunks . map B64.encode . reChunkIn 3 . L.toChunks
-- | Decode a base64-encoded string. This function strictly follows
-- the specification in
-- <http://tools.ietf.org/rfc/rfc4648 RFC 4648>.
decode :: L.ByteString -> Either String L.ByteString
decode b = -- Returning an Either type means that the entire result will
-- need to be in memory at once anyway, so we may as well
-- keep it simple and just convert to and from a strict byte
-- string
-- TODO: Use L.{fromStrict,toStrict} once we can rely on
-- a new enough bytestring
case B64.decode $ S.concat $ L.toChunks b of
Left err -> Left err
Right b' -> Right $ L.fromChunks [b']
-- | Decode a base64-encoded string. This function is lenient in
-- following the specification from
-- <http://tools.ietf.org/rfc/rfc4648 RFC 4648>, and will not generate
-- parse errors no matter how poor its input.
decodeLenient :: L.ByteString -> L.ByteString
decodeLenient = L.fromChunks . map B64.decodeLenient . reChunkIn 4 . L.toChunks
. LC.filter goodChar
where -- We filter out and '=' padding here, but B64.decodeLenient
-- handles that
goodChar c = isDigit c || isAsciiUpper c || isAsciiLower c
|| c == '+' || c == '/'
|
bos/base64-bytestring
|
Data/ByteString/Base64/Lazy.hs
|
bsd-3-clause
| 2,382
| 0
| 13
| 581
| 319
| 189
| 130
| 24
| 2
|
{-# LANGUAGE RecordWildCards #-}
module Passman.Engine.Strength where
import Math.NumberTheory.Logarithms (intLog2)
data Strength = Strength { strengthName :: String
, strengthBits :: Int
, strengthIterations :: Int
}
strengthIsLegacy :: Strength -> Bool
strengthIsLegacy Strength{..} = intLog2 strengthIterations * strengthBits < 1000
|
chwthewke/passman-hs
|
src/Passman/Engine/Strength.hs
|
bsd-3-clause
| 442
| 0
| 8
| 147
| 80
| 47
| 33
| 8
| 1
|
-- | Some more DOM examples.
{-# LANGUAGE EmptyDataDecls #-}
module Demo.Properties where
import Language.Fay.FFI
import Language.Fay.Prelude
import Language.Fay.DOM
main :: Fay ()
main = addEventListener "load" updateBody False
updateBody :: Fay ()
updateBody = do
printList [[1,2,3],[4,5,6]]
print thebody
print thewindow
setInnerHtml thebody "Hai!"
inner <- getInnerHtml thebody
print' ("'" ++ inner ++ "'")
printList :: [[Double]] -> Fay ()
printList = ffi "console.log(%1)"
-- | Print using window.print.
print' :: String -> Fay ()
print' = ffi "console['log'](%1)"
-- | Print using window.print.
print :: Foreign a => a -> Fay ()
print = ffi "console['log'](%1)"
addEventListener :: String -> Fay () -> Bool -> Fay ()
addEventListener = ffi "window['addEventListener'](%1,%2,%3)"
setInnerHtml :: Element -> String -> Fay ()
setInnerHtml = ffi "%1['innerHTML']=%2"
getInnerHtml :: Element -> Fay String
getInnerHtml = ffi "%1['innerHTML']"
thebody :: Element
thebody = ffi "document.body"
thewindow :: Element
thewindow = ffi "window"
|
faylang/fay-server
|
modules/project/Demo/Properties.hs
|
bsd-3-clause
| 1,068
| 0
| 10
| 177
| 328
| 170
| 158
| 31
| 1
|
module Chapter9.ListsSpec where
import Test.Hspec
safeTail :: [a] -> Maybe [a]
safeTail = undefined
useRangeInstead :: [Integer]
useRangeInstead = [1, 2, 3, 4, 5, 6]
myWords :: String -> [String]
myWords = undefined
squares :: [Int]
squares = fmap (^ 2) [1 .. 3]
squareEvens :: [Int]
squareEvens = fmap (^ 2) (filter (\i -> rem i 2 == 0) [1 .. 4])
spec :: Spec
spec =
describe "List" $ do
it "use range instead" $ useRangeInstead `shouldBe` [1, 2, 3, 4, 5, 6]
xit "implement safeTail" $ safeTail [1, 2] `shouldBe` Just [2]
xit "implement myWods with takeWhile and dropWhile" $ myWords fun `shouldBe` funList
it "use list comprehension" $ squares `shouldBe` [1, 4, 9]
it "use list comprehension with predicate" $ squareEvens `shouldBe` [4, 16]
where
fun = "all i wanna do is have some fun"
funList = ["all", "i", "wanna", "do", "is", "have", "some", "fun"]
|
yannick-cw/haskell_katas
|
test/Chapter9/ListsSpec.hs
|
bsd-3-clause
| 905
| 0
| 11
| 198
| 347
| 200
| 147
| 22
| 1
|
module Random where
import Control.Monad
import System.Random
import Text.Printf
bounds = 1.9 :: Float
-- an infinte stream of random floats
randomFloats g =
let (a,g') = randomR (-bounds, bounds) g in a : randomFloats g'
-- an infinite stream of random pairs
randomPairs g =
let (a,g') = randomR (-bounds, bounds) g
(b,g'') = randomR (-bounds, bounds) g'
in (a,b) : randomPairs g''
emitRandomPairs' n g = do
forM_ (take n $ randomPairs g) $ \(a,b) -> do
putStrLn $ printf "%.6f" a ++ "+" ++ printf "%.6f" b ++ "*I"
emitRandomPairs n seed = emitRandomPairs' n (mkStdGen seed)
|
erantapaa/parse-complex
|
src/Random.hs
|
bsd-3-clause
| 603
| 0
| 15
| 127
| 246
| 128
| 118
| 15
| 1
|
{-|
Description : Essential Critical Pairs generator.
This module provides functions that generate all essential critical
pairs between two productions. The essential critical pairs are
classified with 'CriticalPairType', but each pair can be only a
'DeleteUse'.
-}
module Analysis.EssentialCriticalPairs
( findEssentialCriticalPairs
, findAllEssentialDeleteUse
) where
import Abstract.Category
import Abstract.Category.Adhesive
import Abstract.Category.FindMorphism
import Abstract.Category.Finitary
import Abstract.Category.Limit
import Abstract.Rewriting.DPO
import Analysis.CriticalPairs
-- | Finds all Essential Critical Pairs between two given Productions
findEssentialCriticalPairs :: (E'PairCofinitary morph, DPO morph, MInitialPushout morph, Complete morph, Cocomplete morph) =>
MorphismsConfig morph -> Production morph -> Production morph -> [CriticalPair morph]
findEssentialCriticalPairs = findAllEssentialDeleteUse
-- | Get all essential delete-use and organize them in a list of 'CriticalPair'.
findAllEssentialDeleteUse :: (E'PairCofinitary morph, DPO morph, MInitialPushout morph, Complete morph, Cocomplete morph) =>
MorphismsConfig morph -> Production morph -> Production morph -> [CriticalPair morph]
findAllEssentialDeleteUse conf p1 p2 =
map (\(_,_,m1,m2) -> CriticalPair (m1,m2) Nothing Nothing DeleteUse) essentialCPs
where
essentialCPs =
filter
(isEssentialDeleteUse conf)
(findPotentialEssentialCPs conf p1 p2)
-- | Generates all "epi" pairs for essential delete-use,
-- returns part of the initial pushout to avoid recalculations.
findPotentialEssentialCPs :: (DPO morph, E'PairCofinitary morph, MInitialPushout morph, Cocomplete morph) => MorphismsConfig morph -> Production morph -> Production morph -> [(morph,morph, morph,morph)]
findPotentialEssentialCPs conf p1 p2 = satisfyingPairs
where
(_,l1',c) = calculateMInitialPushout (leftMorphism p1)
pairs = findJointSurjections (matchRestriction conf, codomain l1') (matchRestriction conf, leftObject p2)
shiftedPairs =
map
(\(e1,e2) ->
let (m1,d1') = calculatePushout e1 c
m2 = d1' <&> e2
in (l1', c, m1, m2)
)
pairs
satisfyingPairs = filter (\(_,_,m1,m2) -> satisfiesRewritingConditions conf p1 m1 && satisfiesRewritingConditions conf p2 m2) shiftedPairs
-- | A pair of monomorphic matches (with precalcultated initial pushout (l1',c) elements)
-- is an essential delete use when the
-- pullback of the composition of the c (from the initial pushout) with m1
-- and m2 is a pushout (guaranteed by the construction of 'findPotentialEssentialCPs')
-- and does not exist morphism from S1 to B that commutes.
isEssentialDeleteUse :: (DPO morph, Complete morph) => MorphismsConfig morph -> (morph,morph, morph,morph) -> Bool
isEssentialDeleteUse conf (l1',c,m1,m2) = null commuting
where
(_,o1) = calculatePullback (m1 <&> c) m2
alls1 = findMorphismsFromDomains conf o1 l1'
commuting = filter (\s1 -> l1' <&> s1 == o1) alls1
findMorphismsFromDomains :: FindMorphism morph => MorphismsConfig morph -> morph -> morph -> [morph]
findMorphismsFromDomains conf a b =
findMorphisms (matchRestriction conf) (domain a) (domain b)
|
Verites/verigraph
|
src/library/Analysis/EssentialCriticalPairs.hs
|
apache-2.0
| 3,313
| 0
| 14
| 606
| 734
| 397
| 337
| 41
| 1
|
import AI.HNN.FF.Network
import Numeric.LinearAlgebra
samples :: Samples Double
samples = [
(fromList [ 1, 1, 1, 1, 1
, 0, 0, 0, 0, 1
, 0, 0, 1, 1, 1
, 0, 0, 0, 0, 1
, 1, 1, 1, 1, 1 ], fromList [1]), -- three
(fromList [ 1, 1, 1, 1, 1
, 0, 0, 0, 0, 1
, 1, 1, 1, 1, 1
, 0, 0, 0, 0, 1
, 1, 1, 1, 1, 1 ], fromList [1]), -- three
(fromList [ 0, 1, 1, 1, 1
, 0, 0, 0, 0, 1
, 0, 0, 0, 1, 1
, 0, 0, 0, 0, 1
, 0, 1, 1, 1, 1 ], fromList [1]), -- three
(fromList [ 1, 1, 1, 1, 1
, 1, 0, 0, 0, 1
, 1, 0, 0, 0, 1
, 1, 0, 0, 0, 1
, 1, 1, 1, 1, 1 ], fromList [0]), -- not a three
(fromList [ 1, 1, 1, 1, 1
, 1, 0, 0, 0, 0
, 1, 0, 0, 0, 0
, 1, 0, 0, 0, 0
, 1, 0, 0, 0, 0 ], fromList [0]), -- not a three
(fromList [ 0, 1, 1, 1, 0
, 0, 1, 0, 1, 0
, 0, 1, 1, 1, 0
, 0, 1, 0, 1, 0
, 0, 1, 1, 1, 0 ], fromList [0]), -- not a three
(fromList [ 0, 0, 1, 0, 0
, 0, 1, 1, 0, 0
, 1, 0, 1, 0, 0
, 0, 0, 1, 0, 0
, 0, 0, 1, 0, 0 ], fromList [0]) ] -- not a three
main :: IO ()
main = do
n <- createNetwork 25 [250] 1
let n' = trainNTimes 10000 0.5 tanh tanh' n samples
mapM_ (putStrLn . show . output n' tanh . fst) samples
putStrLn "-------------"
putStrLn . show . output n' tanh $ testInput
where testInput = fromList [ 0, 0, 1, 1, 1
, 0, 0, 0, 0, 1
, 0, 0, 1, 1, 1
, 0, 0, 0, 0, 1
, 0, 0, 1, 1, 1 ]
{-
OUPUT:
fromList [0.9996325368507625]
fromList [0.9997784075859734]
fromList [0.9996165887689248]
fromList [-2.8107935971909852e-2]
fromList [7.001808876464477e-3]
fromList [2.54989546107178e-2]
fromList [5.286805464313172e-4]
-------------
fromList [0.9993713524712442]
-}
|
marcusbuffett/hnn
|
examples/ff/three.hs
|
bsd-3-clause
| 2,063
| 0
| 11
| 883
| 878
| 548
| 330
| 51
| 1
|
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1999
Analysis functions over data types. Specficially, detecting recursive types.
This stuff is only used for source-code decls; it's recorded in interface
files for imported data types.
-}
{-# LANGUAGE CPP #-}
module Eta.TypeCheck.TcTyDecls(
calcRecFlags, RecTyInfo(..),
calcSynCycles, calcClassCycles,
RoleAnnots, extractRoleAnnots, emptyRoleAnnots, lookupRoleAnnots
) where
import Eta.Types.TypeRep
import Eta.HsSyn.HsSyn
import Eta.Types.Class
import Eta.Types.Type
import Eta.Types.Kind
import Eta.TypeCheck.TcRnTypes ( SelfBootInfo(..) )
import Eta.Types.TyCon
import Eta.BasicTypes.DataCon
import Eta.BasicTypes.Var
import Eta.BasicTypes.Name
import Eta.BasicTypes.NameEnv
import Eta.BasicTypes.VarEnv
import Eta.BasicTypes.VarSet
import Eta.BasicTypes.NameSet
import Eta.Types.Coercion ( ltRole )
import Eta.Utils.Digraph
import Eta.BasicTypes.BasicTypes
import Eta.BasicTypes.SrcLoc
import Eta.Utils.Outputable
import Eta.Utils.UniqSet
import Eta.Utils.Util
import Eta.Utils.Maybes
import Data.List
--TODO: #if __GLASGOW_HASKELL__ < 709
-- import Control.Applicative (Applicative(..))
-- #endif
import Control.Monad
#include "HsVersions.h"
{-
************************************************************************
* *
Cycles in class and type synonym declarations
* *
************************************************************************
Checking for class-decl loops is easy, because we don't allow class decls
in interface files.
We allow type synonyms in hi-boot files, but we *trust* hi-boot files,
so we don't check for loops that involve them. So we only look for synonym
loops in the module being compiled.
We check for type synonym and class cycles on the *source* code.
Main reasons:
a) Otherwise we'd need a special function to extract type-synonym tycons
from a type, whereas we already have the free vars pinned on the decl
b) If we checked for type synonym loops after building the TyCon, we
can't do a hoistForAllTys on the type synonym rhs, (else we fall into
a black hole) which seems unclean. Apart from anything else, it'd mean
that a type-synonym rhs could have for-alls to the right of an arrow,
which means adding new cases to the validity checker
Indeed, in general, checking for cycles beforehand means we need to
be less careful about black holes through synonym cycles.
The main disadvantage is that a cycle that goes via a type synonym in an
.hi-boot file can lead the compiler into a loop, because it assumes that cycles
only occur entirely within the source code of the module being compiled.
But hi-boot files are trusted anyway, so this isn't much worse than (say)
a kind error.
[ NOTE ----------------------------------------------
If we reverse this decision, this comment came from tcTyDecl1, and should
go back there
-- dsHsType, not tcHsKindedType, to avoid a loop. tcHsKindedType does hoisting,
-- which requires looking through synonyms... and therefore goes into a loop
-- on (erroneously) recursive synonyms.
-- Solution: do not hoist synonyms, because they'll be hoisted soon enough
-- when they are substituted
We'd also need to add back in this definition
synonymTyConsOfType :: Type -> [TyCon]
-- Does not look through type synonyms at all
-- Return a list of synonym tycons
synonymTyConsOfType ty
= nameEnvElts (go ty)
where
go :: Type -> NameEnv TyCon -- The NameEnv does duplicate elim
go (TyVarTy v) = emptyNameEnv
go (TyConApp tc tys) = go_tc tc tys
go (AppTy a b) = go a `plusNameEnv` go b
go (FunTy a b) = go a `plusNameEnv` go b
go (ForAllTy _ ty) = go ty
go_tc tc tys | isTypeSynonymTyCon tc = extendNameEnv (go_s tys)
(tyConName tc) tc
| otherwise = go_s tys
go_s tys = foldr (plusNameEnv . go) emptyNameEnv tys
---------------------------------------- END NOTE ]
-}
mkSynEdges :: [LTyClDecl Name] -> [(LTyClDecl Name, Name, [Name])]
mkSynEdges syn_decls = [ (ldecl, name, nameSetElems fvs)
| ldecl@(L _ (SynDecl { tcdLName = L _ name
, tcdFVs = fvs })) <- syn_decls ]
calcSynCycles :: [LTyClDecl Name] -> [SCC (LTyClDecl Name)]
calcSynCycles = stronglyConnCompFromEdgedVertices . mkSynEdges
{-
Note [Superclass cycle check]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We can't allow cycles via superclasses because it would result in the
type checker looping when it canonicalises a class constraint (superclasses
are added during canonicalisation). More precisely, given a constraint
C ty1 .. tyn
we want to instantiate all of C's superclasses, transitively, and
that set must be finite. So if
class (D b, E b a) => C a b
then when we encounter the constraint
C ty1 ty2
we'll instantiate the superclasses
(D ty2, E ty2 ty1)
and then *their* superclasses, and so on. This set must be finite!
It is OK for superclasses to be type synonyms for other classes, so
must "look through" type synonyms. Eg
type X a = C [a]
class X a => C a -- No! Recursive superclass!
We want definitions such as:
class C cls a where cls a => a -> a
class C D a => D a where
to be accepted, even though a naive acyclicity check would reject the
program as having a cycle between D and its superclass. Why? Because
when we instantiate
D ty1
we get the superclas
C D ty1
and C has no superclasses, so we have terminated with a finite set.
More precisely, the rule is this: the superclasses sup_C of a class C
are rejected iff:
C \elem expand(sup_C)
Where expand is defined as follows:
(1) expand(a ty1 ... tyN) = expand(ty1) \union ... \union expand(tyN)
(2) expand(D ty1 ... tyN) = {D}
\union sup_D[ty1/x1, ..., tyP/xP]
\union expand(ty(P+1)) ... \union expand(tyN)
where (D x1 ... xM) is a class, P = min(M,N)
(3) expand(T ty1 ... tyN) = expand(ty1) \union ... \union expand(tyN)
where T is not a class
Eqn (1) is conservative; when there's a type variable at the head,
look in all the argument types. Eqn (2) expands superclasses; the
third component of the union is like Eqn (1). Eqn (3) happens mainly
when the context is a (constraint) tuple, such as (Eq a, Show a).
Furthermore, expand always looks through type synonyms.
-}
calcClassCycles :: Class -> [[TyCon]]
calcClassCycles cls
= nubBy eqAsCycle $
expandTheta (unitUniqSet cls) [classTyCon cls] (classSCTheta cls) []
where
-- The last TyCon in the cycle is always the same as the first
eqAsCycle xs ys = any (xs ==) (cycles (tail ys))
cycles xs = take n . map (take n) . tails . cycle $ xs
where n = length xs
-- No more superclasses to expand ==> no problems with cycles
-- See Note [Superclass cycle check]
expandTheta :: UniqSet Class -- Path of Classes to here in set form
-> [TyCon] -- Path to here
-> ThetaType -- Superclass work list
-> [[TyCon]] -- Input error paths
-> [[TyCon]] -- Final error paths
expandTheta _ _ [] = id
expandTheta seen path (pred:theta) = expandType seen path pred . expandTheta seen path theta
expandType seen path (TyConApp tc tys)
-- Expand unsaturated classes to their superclass theta if they are yet unseen.
-- If they have already been seen then we have detected an error!
| Just cls <- tyConClass_maybe tc
, let (env, remainder) = papp (classTyVars cls) tys
rest_tys = either (const []) id remainder
= if cls `elementOfUniqSet` seen
then (reverse (classTyCon cls:path):)
. flip (foldr (expandType seen path)) tys
else expandTheta (addOneToUniqSet seen cls) (tc:path)
(substTys (mkTopTvSubst env) (classSCTheta cls))
. flip (foldr (expandType seen path)) rest_tys
-- For synonyms, try to expand them: some arguments might be
-- phantoms, after all. We can expand with impunity because at
-- this point the type synonym cycle check has already happened.
| Just (tvs, rhs) <- synTyConDefn_maybe tc
, let (env, remainder) = papp tvs tys
rest_tys = either (const []) id remainder
= expandType seen (tc:path) (substTy (mkTopTvSubst env) rhs)
. flip (foldr (expandType seen path)) rest_tys
-- For non-class, non-synonyms, just check the arguments
| otherwise
= flip (foldr (expandType seen path)) tys
expandType _ _ (TyVarTy {}) = id
expandType _ _ (LitTy {}) = id
expandType seen path (AppTy t1 t2) = expandType seen path t1 . expandType seen path t2
expandType seen path (FunTy t1 t2) = expandType seen path t1 . expandType seen path t2
expandType seen path (ForAllTy _tv t) = expandType seen path t
papp :: [TyVar] -> [Type] -> ([(TyVar, Type)], Either [TyVar] [Type])
papp [] tys = ([], Right tys)
papp tvs [] = ([], Left tvs)
papp (tv:tvs) (ty:tys) = ((tv, ty):env, remainder)
where (env, remainder) = papp tvs tys
{-
************************************************************************
* *
Deciding which type constructors are recursive
* *
************************************************************************
Identification of recursive TyCons
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The knot-tying parameters: @rec_details_list@ is an alist mapping @Name@s to
@TyThing@s.
Identifying a TyCon as recursive serves two purposes
1. Avoid infinite types. Non-recursive newtypes are treated as
"transparent", like type synonyms, after the type checker. If we did
this for all newtypes, we'd get infinite types. So we figure out for
each newtype whether it is "recursive", and add a coercion if so. In
effect, we are trying to "cut the loops" by identifying a loop-breaker.
2. Avoid infinite unboxing. This has nothing to do with newtypes.
Suppose we have
data T = MkT Int T
f (MkT x t) = f t
Well, this function diverges, but we don't want the strictness analyser
to diverge. But the strictness analyser will diverge because it looks
deeper and deeper into the structure of T. (I believe there are
examples where the function does something sane, and the strictness
analyser still diverges, but I can't see one now.)
Now, concerning (1), the FC2 branch currently adds a coercion for ALL
newtypes. I did this as an experiment, to try to expose cases in which
the coercions got in the way of optimisations. If it turns out that we
can indeed always use a coercion, then we don't risk recursive types,
and don't need to figure out what the loop breakers are.
For newtype *families* though, we will always have a coercion, so they
are always loop breakers! So you can easily adjust the current
algorithm by simply treating all newtype families as loop breakers (and
indeed type families). I think.
For newtypes, we label some as "recursive" such that
INVARIANT: there is no cycle of non-recursive newtypes
In any loop, only one newtype need be marked as recursive; it is
a "loop breaker". Labelling more than necessary as recursive is OK,
provided the invariant is maintained.
A newtype M.T is defined to be "recursive" iff
(a) it is declared in an hi-boot file (see RdrHsSyn.hsIfaceDecl)
(b) it is declared in a source file, but that source file has a
companion hi-boot file which declares the type
or (c) one can get from T's rhs to T via type
synonyms, or non-recursive newtypes *in M*
e.g. newtype T = MkT (T -> Int)
(a) is conservative; declarations in hi-boot files are always
made loop breakers. That's why in (b) we can restrict attention
to tycons in M, because any loops through newtypes outside M
will be broken by those newtypes
(b) ensures that a newtype is not treated as a loop breaker in one place
and later as a non-loop-breaker. This matters in GHCi particularly, when
a newtype T might be embedded in many types in the environment, and then
T's source module is compiled. We don't want T's recursiveness to change.
The "recursive" flag for algebraic data types is irrelevant (never consulted)
for types with more than one constructor.
An algebraic data type M.T is "recursive" iff
it has just one constructor, and
(a) it is declared in an hi-boot file (see RdrHsSyn.hsIfaceDecl)
(b) it is declared in a source file, but that source file has a
companion hi-boot file which declares the type
or (c) one can get from its arg types to T via type synonyms,
or by non-recursive newtypes or non-recursive product types in M
e.g. data T = MkT (T -> Int) Bool
Just like newtype in fact
A type synonym is recursive if one can get from its
right hand side back to it via type synonyms. (This is
reported as an error.)
A class is recursive if one can get from its superclasses
back to it. (This is an error too.)
Hi-boot types
~~~~~~~~~~~~~
A data type read from an hi-boot file will have an AbstractTyCon as its AlgTyConRhs
and will respond True to isAbstractTyCon. The idea is that we treat these as if one
could get from these types to anywhere. So when we see
module Baz where
import {-# SOURCE #-} Foo( T )
newtype S = MkS T
then we mark S as recursive, just in case. What that means is that if we see
import Baz( S )
newtype R = MkR S
then we don't need to look inside S to compute R's recursiveness. Since S is imported
(not from an hi-boot file), one cannot get from R back to S except via an hi-boot file,
and that means that some data type will be marked recursive along the way. So R is
unconditionly non-recursive (i.e. there'll be a loop breaker elsewhere if necessary)
This in turn means that we grovel through fewer interface files when computing
recursiveness, because we need only look at the type decls in the module being
compiled, plus the outer structure of directly-mentioned types.
-}
data RecTyInfo = RTI { rti_promotable :: Bool
, rti_roles :: Name -> [Role]
, rti_is_rec :: Name -> RecFlag }
calcRecFlags :: SelfBootInfo -> Bool -- hs-boot file?
-> RoleAnnots -> [TyThing] -> RecTyInfo
-- The 'boot_names' are the things declared in M.hi-boot, if M is the current module.
-- Any type constructors in boot_names are automatically considered loop breakers
calcRecFlags boot_details is_boot mrole_env tyclss
= RTI { rti_promotable = is_promotable
, rti_roles = roles
, rti_is_rec = is_rec }
where
rec_tycon_names = mkNameSet (map tyConName all_tycons)
all_tycons = mapMaybe getTyCon tyclss
-- Recursion of newtypes/data types can happen via
-- the class TyCon, so tyclss includes the class tycons
is_promotable = all (isPromotableTyCon rec_tycon_names) all_tycons
roles = inferRoles is_boot mrole_env all_tycons
----------------- Recursion calculation ----------------
is_rec n | n `elemNameSet` rec_names = Recursive
| otherwise = NonRecursive
boot_name_set = case boot_details of
NoSelfBoot -> emptyNameSet
SelfBoot { sb_tcs = tcs } -> tcs
rec_names = boot_name_set `unionNameSet`
nt_loop_breakers `unionNameSet`
prod_loop_breakers
-------------------------------------------------
-- NOTE
-- These edge-construction loops rely on
-- every loop going via tyclss, the types and classes
-- in the module being compiled. Stuff in interface
-- files should be correctly marked. If not (e.g. a
-- type synonym in a hi-boot file) we can get an infinite
-- loop. We could program round this, but it'd make the code
-- rather less nice, so I'm not going to do that yet.
single_con_tycons = [ tc | tc <- all_tycons
, not (tyConName tc `elemNameSet` boot_name_set)
-- Remove the boot_name_set because they are
-- going to be loop breakers regardless.
, isSingleton (tyConDataCons tc) ]
-- Both newtypes and data types, with exactly one data constructor
(new_tycons, prod_tycons) = partition isNewTyCon single_con_tycons
-- NB: we do *not* call isProductTyCon because that checks
-- for vanilla-ness of data constructors; and that depends
-- on empty existential type variables; and that is figured
-- out by tcResultType; which uses tcMatchTy; which uses
-- coreView; which calls coreExpandTyCon_maybe; which uses
-- the recursiveness of the TyCon. Result... a black hole.
-- YUK YUK YUK
--------------- Newtypes ----------------------
nt_loop_breakers = mkNameSet (findLoopBreakers nt_edges)
is_rec_nt tc = tyConName tc `elemNameSet` nt_loop_breakers
-- is_rec_nt is a locally-used helper function
nt_edges = [(t, mk_nt_edges t) | t <- new_tycons]
mk_nt_edges nt -- Invariant: nt is a newtype
= [ tc | tc <- nameEnvElts (tyConsOfType (new_tc_rhs nt))
-- tyConsOfType looks through synonyms
, tc `elem` new_tycons ]
-- If not (tc `elem` new_tycons) we know that either it's a local *data* type,
-- or it's imported. Either way, it can't form part of a newtype cycle
--------------- Product types ----------------------
prod_loop_breakers = mkNameSet (findLoopBreakers prod_edges)
prod_edges = [(tc, mk_prod_edges tc) | tc <- prod_tycons]
mk_prod_edges tc -- Invariant: tc is a product tycon
= concatMap (mk_prod_edges1 tc) (dataConOrigArgTys (head (tyConDataCons tc)))
mk_prod_edges1 ptc ty = concatMap (mk_prod_edges2 ptc) (nameEnvElts (tyConsOfType ty))
mk_prod_edges2 ptc tc
| tc `elem` prod_tycons = [tc] -- Local product
| tc `elem` new_tycons = if is_rec_nt tc -- Local newtype
then []
else mk_prod_edges1 ptc (new_tc_rhs tc)
-- At this point we know that either it's a local non-product data type,
-- or it's imported. Either way, it can't form part of a cycle
| otherwise = []
new_tc_rhs :: TyCon -> Type
new_tc_rhs tc = snd (newTyConRhs tc) -- Ignore the type variables
getTyCon :: TyThing -> Maybe TyCon
getTyCon (ATyCon tc) = Just tc
getTyCon _ = Nothing
findLoopBreakers :: [(TyCon, [TyCon])] -> [Name]
-- Finds a set of tycons that cut all loops
findLoopBreakers deps
= go [(tc,tc,ds) | (tc,ds) <- deps]
where
go edges = [ name
| CyclicSCC ((tc,_,_) : edges') <- stronglyConnCompFromEdgedVerticesR edges,
name <- tyConName tc : go edges']
{-
************************************************************************
* *
Promotion calculation
* *
************************************************************************
See Note [Checking whether a group is promotable]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We only want to promote a TyCon if all its data constructors
are promotable; it'd be very odd to promote some but not others.
But the data constructors may mention this or other TyCons.
So we treat the recursive uses as all OK (ie promotable) and
do one pass to check that each TyCon is promotable.
Currently type synonyms are not promotable, though that
could change.
-}
isPromotableTyCon :: NameSet -> TyCon -> Bool
isPromotableTyCon rec_tycons tc
= isAlgTyCon tc -- Only algebraic; not even synonyms
-- (we could reconsider the latter)
&& ok_kind (tyConKind tc)
&& case algTyConRhs tc of
DataTyCon { data_cons = cs } -> all ok_con cs
NewTyCon { data_con = c } -> ok_con c
AbstractTyCon {} -> False
DataFamilyTyCon {} -> False
where
ok_kind kind = all isLiftedTypeKind args && isLiftedTypeKind res
where -- Checks for * -> ... -> * -> *
(args, res) = splitKindFunTys kind
-- See Note [Promoted data constructors] in TyCon
ok_con con = all (isLiftedTypeKind . tyVarKind) ex_tvs
&& null eq_spec -- No constraints
&& null theta
&& all (isPromotableType rec_tycons) orig_arg_tys
where
(_, ex_tvs, eq_spec, theta, orig_arg_tys, _) = dataConFullSig con
isPromotableType :: NameSet -> Type -> Bool
-- Must line up with DataCon.promoteType
-- But the function lives here because we must treat the
-- *recursive* tycons as promotable
isPromotableType rec_tcs con_arg_ty
= go con_arg_ty
where
go (TyConApp tc tys) = tys `lengthIs` tyConArity tc
&& (tyConName tc `elemNameSet` rec_tcs
|| isJust (promotableTyCon_maybe tc))
&& all go tys
go (FunTy arg res) = go arg && go res
go (TyVarTy {}) = True
go _ = False
{-
************************************************************************
* *
Role annotations
* *
************************************************************************
-}
type RoleAnnots = NameEnv (LRoleAnnotDecl Name)
extractRoleAnnots :: TyClGroup Name -> RoleAnnots
extractRoleAnnots (TyClGroup { group_roles = roles })
= mkNameEnv [ (tycon, role_annot)
| role_annot@(L _ (RoleAnnotDecl (L _ tycon) _)) <- roles ]
emptyRoleAnnots :: RoleAnnots
emptyRoleAnnots = emptyNameEnv
lookupRoleAnnots :: RoleAnnots -> Name -> Maybe (LRoleAnnotDecl Name)
lookupRoleAnnots = lookupNameEnv
{-
************************************************************************
* *
Role inference
* *
************************************************************************
Note [Role inference]
~~~~~~~~~~~~~~~~~~~~~
The role inference algorithm datatype definitions to infer the roles on the
parameters. Although these roles are stored in the tycons, we can perform this
algorithm on the built tycons, as long as we don't peek at an as-yet-unknown
roles field! Ah, the magic of laziness.
First, we choose appropriate initial roles. For families and classes, roles
(including initial roles) are N. For datatypes, we start with the role in the
role annotation (if any), or otherwise use Phantom. This is done in
initialRoleEnv1.
The function irGroup then propagates role information until it reaches a
fixpoint, preferring N over (R or P) and R over P. To aid in this, we have a
monad RoleM, which is a combination reader and state monad. In its state are
the current RoleEnv, which gets updated by role propagation, and an update
bit, which we use to know whether or not we've reached the fixpoint. The
environment of RoleM contains the tycon whose parameters we are inferring, and
a VarEnv from parameters to their positions, so we can update the RoleEnv.
Between tycons, this reader information is missing; it is added by
addRoleInferenceInfo.
There are two kinds of tycons to consider: algebraic ones (excluding classes)
and type synonyms. (Remember, families don't participate -- all their parameters
are N.) An algebraic tycon processes each of its datacons, in turn. Note that
a datacon's universally quantified parameters might be different from the parent
tycon's parameters, so we use the datacon's univ parameters in the mapping from
vars to positions. Note also that we don't want to infer roles for existentials
(they're all at N, too), so we put them in the set of local variables. As an
optimisation, we skip any tycons whose roles are already all Nominal, as there
nowhere else for them to go. For synonyms, we just analyse their right-hand sides.
irType walks through a type, looking for uses of a variable of interest and
propagating role information. Because anything used under a phantom position
is at phantom and anything used under a nominal position is at nominal, the
irType function can assume that anything it sees is at representational. (The
other possibilities are pruned when they're encountered.)
The rest of the code is just plumbing.
How do we know that this algorithm is correct? It should meet the following
specification:
Let Z be a role context -- a mapping from variables to roles. The following
rules define the property (Z |- t : r), where t is a type and r is a role:
Z(a) = r' r' <= r
------------------------- RCVar
Z |- a : r
---------- RCConst
Z |- T : r -- T is a type constructor
Z |- t1 : r
Z |- t2 : N
-------------- RCApp
Z |- t1 t2 : r
forall i<=n. (r_i is R or N) implies Z |- t_i : r_i
roles(T) = r_1 .. r_n
---------------------------------------------------- RCDApp
Z |- T t_1 .. t_n : R
Z, a:N |- t : r
---------------------- RCAll
Z |- forall a:k.t : r
We also have the following rules:
For all datacon_i in type T, where a_1 .. a_n are universally quantified
and b_1 .. b_m are existentially quantified, and the arguments are t_1 .. t_p,
then if forall j<=p, a_1 : r_1 .. a_n : r_n, b_1 : N .. b_m : N |- t_j : R,
then roles(T) = r_1 .. r_n
roles(->) = R, R
roles(~#) = N, N
With -dcore-lint on, the output of this algorithm is checked in checkValidRoles,
called from checkValidTycon.
Note [Role-checking data constructor arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
data T a where
MkT :: Eq b => F a -> (a->a) -> T (G a)
Then we want to check the roles at which 'a' is used
in MkT's type. We want to work on the user-written type,
so we need to take into account
* the arguments: (F a) and (a->a)
* the context: C a b
* the result type: (G a) -- this is in the eq_spec
-}
type RoleEnv = NameEnv [Role] -- from tycon names to roles
-- This, and any of the functions it calls, must *not* look at the roles
-- field of a tycon we are inferring roles about!
-- See Note [Role inference]
inferRoles :: Bool -> RoleAnnots -> [TyCon] -> Name -> [Role]
inferRoles is_boot annots tycons
= let role_env = initialRoleEnv is_boot annots tycons
role_env' = irGroup role_env tycons in
\name -> case lookupNameEnv role_env' name of
Just roles -> roles
Nothing -> pprPanic "inferRoles" (ppr name)
initialRoleEnv :: Bool -> RoleAnnots -> [TyCon] -> RoleEnv
initialRoleEnv is_boot annots = extendNameEnvList emptyNameEnv .
map (initialRoleEnv1 is_boot annots)
initialRoleEnv1 :: Bool -> RoleAnnots -> TyCon -> (Name, [Role])
initialRoleEnv1 is_boot annots_env tc
| isFamilyTyCon tc = (name, map (const Nominal) tyvars)
| isAlgTyCon tc = (name, default_roles)
| isTypeSynonymTyCon tc = (name, default_roles)
| otherwise = pprPanic "initialRoleEnv1" (ppr tc)
where name = tyConName tc
tyvars = tyConTyVars tc
(kvs, tvs) = span isKindVar tyvars
-- if the number of annotations in the role annotation decl
-- is wrong, just ignore it. We check this in the validity check.
role_annots
= case lookupNameEnv annots_env name of
Just (L _ (RoleAnnotDecl _ annots))
| annots `equalLength` tvs -> map unLoc annots
_ -> map (const Nothing) tvs
default_roles = map (const Nominal) kvs ++
zipWith orElse role_annots (repeat default_role)
default_role
| isClassTyCon tc = Nominal
| is_boot = Representational
| otherwise = Phantom
irGroup :: RoleEnv -> [TyCon] -> RoleEnv
irGroup env tcs
= let (env', update) = runRoleM env $ mapM_ irTyCon tcs in
if update
then irGroup env' tcs
else env'
irTyCon :: TyCon -> RoleM ()
irTyCon tc
| isAlgTyCon tc
= do { old_roles <- lookupRoles tc
; unless (all (== Nominal) old_roles) $ -- also catches data families,
-- which don't want or need role inference
do { whenIsJust (tyConClass_maybe tc) (irClass tc_name)
; addRoleInferenceInfo tc_name (tyConTyVars tc) $
mapM_ (irType emptyVarSet) (tyConStupidTheta tc) -- See #8958
; mapM_ (irDataCon tc_name) (visibleDataCons $ algTyConRhs tc) }}
| Just ty <- synTyConRhs_maybe tc
= addRoleInferenceInfo tc_name (tyConTyVars tc) $
irType emptyVarSet ty
| otherwise
= return ()
where
tc_name = tyConName tc
-- any type variable used in an associated type must be Nominal
irClass :: Name -> Class -> RoleM ()
irClass tc_name cls
= addRoleInferenceInfo tc_name cls_tvs $
mapM_ ir_at (classATs cls)
where
cls_tvs = classTyVars cls
cls_tv_set = mkVarSet cls_tvs
ir_at at_tc
= mapM_ (updateRole Nominal) (varSetElems nvars)
where nvars = (mkVarSet $ tyConTyVars at_tc) `intersectVarSet` cls_tv_set
-- See Note [Role inference]
irDataCon :: Name -> DataCon -> RoleM ()
irDataCon tc_name datacon
= addRoleInferenceInfo tc_name univ_tvs $
mapM_ (irType ex_var_set) (eqSpecPreds eq_spec ++ theta ++ arg_tys)
-- See Note [Role-checking data constructor arguments]
where
(univ_tvs, ex_tvs, eq_spec, theta, arg_tys, _res_ty) = dataConFullSig datacon
ex_var_set = mkVarSet ex_tvs
irType :: VarSet -> Type -> RoleM ()
irType = go
where
go lcls (TyVarTy tv) = unless (tv `elemVarSet` lcls) $
updateRole Representational tv
go lcls (AppTy t1 t2) = go lcls t1 >> mark_nominal lcls t2
go lcls (TyConApp tc tys)
= do { roles <- lookupRolesX tc
; zipWithM_ (go_app lcls) roles tys }
go lcls (FunTy t1 t2) = go lcls t1 >> go lcls t2
go lcls (ForAllTy tv ty) = go (extendVarSet lcls tv) ty
go _ (LitTy {}) = return ()
go_app _ Phantom _ = return () -- nothing to do here
go_app lcls Nominal ty = mark_nominal lcls ty -- all vars below here are N
go_app lcls Representational ty = go lcls ty
mark_nominal lcls ty = let nvars = tyVarsOfType ty `minusVarSet` lcls in
mapM_ (updateRole Nominal) (varSetElems nvars)
-- like lookupRoles, but with Nominal tags at the end for oversaturated TyConApps
lookupRolesX :: TyCon -> RoleM [Role]
lookupRolesX tc
= do { roles <- lookupRoles tc
; return $ roles ++ repeat Nominal }
-- gets the roles either from the environment or the tycon
lookupRoles :: TyCon -> RoleM [Role]
lookupRoles tc
= do { env <- getRoleEnv
; case lookupNameEnv env (tyConName tc) of
Just roles -> return roles
Nothing -> return $ tyConRoles tc }
-- tries to update a role; won't ever update a role "downwards"
updateRole :: Role -> TyVar -> RoleM ()
updateRole role tv
= do { var_ns <- getVarNs
; case lookupVarEnv var_ns tv of
{ Nothing -> pprPanic "updateRole" (ppr tv)
; Just n -> do
{ name <- getTyConName
; updateRoleEnv name n role }}}
-- the state in the RoleM monad
data RoleInferenceState = RIS { role_env :: RoleEnv
, update :: Bool }
-- the environment in the RoleM monad
type VarPositions = VarEnv Int
data RoleInferenceInfo = RII { var_ns :: VarPositions
, name :: Name }
-- See [Role inference]
newtype RoleM a = RM { unRM :: Maybe RoleInferenceInfo
-> RoleInferenceState
-> (a, RoleInferenceState) }
instance Functor RoleM where
fmap = liftM
instance Applicative RoleM where
pure = return
(<*>) = ap
instance Monad RoleM where
return x = RM $ \_ state -> (x, state)
a >>= f = RM $ \m_info state -> let (a', state') = unRM a m_info state in
unRM (f a') m_info state'
runRoleM :: RoleEnv -> RoleM () -> (RoleEnv, Bool)
runRoleM env thing = (env', update)
where RIS { role_env = env', update = update } = snd $ unRM thing Nothing state
state = RIS { role_env = env, update = False }
addRoleInferenceInfo :: Name -> [TyVar] -> RoleM a -> RoleM a
addRoleInferenceInfo name tvs thing
= RM $ \_nothing state -> ASSERT( isNothing _nothing )
unRM thing (Just info) state
where info = RII { var_ns = mkVarEnv (zip tvs [0..]), name = name }
getRoleEnv :: RoleM RoleEnv
getRoleEnv = RM $ \_ state@(RIS { role_env = env }) -> (env, state)
getVarNs :: RoleM VarPositions
getVarNs = RM $ \m_info state ->
case m_info of
Nothing -> panic "getVarNs"
Just (RII { var_ns = var_ns }) -> (var_ns, state)
getTyConName :: RoleM Name
getTyConName = RM $ \m_info state ->
case m_info of
Nothing -> panic "getTyConName"
Just (RII { name = name }) -> (name, state)
updateRoleEnv :: Name -> Int -> Role -> RoleM ()
updateRoleEnv name n role
= RM $ \_ state@(RIS { role_env = role_env }) -> ((),
case lookupNameEnv role_env name of
Nothing -> pprPanic "updateRoleEnv" (ppr name)
Just roles -> let (before, old_role : after) = splitAt n roles in
if role `ltRole` old_role
then let roles' = before ++ role : after
role_env' = extendNameEnv role_env name roles' in
RIS { role_env = role_env', update = True }
else state )
|
rahulmutt/ghcvm
|
compiler/Eta/TypeCheck/TcTyDecls.hs
|
bsd-3-clause
| 34,725
| 2
| 19
| 9,569
| 4,795
| 2,540
| 2,255
| 314
| 10
|
-- Copyright Corey O'Connor
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeFamilies #-}
-- | This provides a mock terminal implementation that is nice for
-- testing. This transforms the output operations to visible characters
-- which is useful for testing.
module Graphics.Vty.Output.Mock
( MockData
, mockTerminal
)
where
import Graphics.Vty.Image (DisplayRegion)
import Graphics.Vty.Output.Interface
import Blaze.ByteString.Builder.Word (writeWord8)
import Control.Monad.Trans
import qualified Data.ByteString as BS
import Data.IORef
import qualified Data.String.UTF8 as UTF8
type MockData = IORef (UTF8.UTF8 BS.ByteString)
-- | The mock display terminal produces a string representation of
-- the requested picture. There is *not* an isomorphism between the
-- string representation and the picture. The string representation is
-- a simplification of the picture that is only useful in debugging VTY
-- without considering terminal specific issues.
--
-- The mock implementation is useful in manually determining if the
-- sequence of terminal operations matche the expected sequence. The
-- requirement of the produced representation is simplicity in parsing
-- the text representation and determining how the picture was mapped to
-- terminal operations.
--
-- The string representation is a sequence of identifiers where each
-- identifier is the name of an operation in the algebra.
mockTerminal :: (Applicative m, MonadIO m) => DisplayRegion -> m (MockData, Output)
mockTerminal r = liftIO $ do
outRef <- newIORef undefined
newAssumedStateRef <- newIORef initialAssumedState
let t = Output
{ terminalID = "mock terminal"
, releaseTerminal = return ()
, reserveDisplay = return ()
, releaseDisplay = return ()
, ringTerminalBell = return ()
, supportsBell = return False
, supportsItalics = return False
, supportsStrikethrough = return False
, setDisplayBounds = const $ return ()
, displayBounds = return r
, outputByteBuffer = \bytes -> do
putStrLn $ "mock outputByteBuffer of " ++ show (BS.length bytes) ++ " bytes"
writeIORef outRef $ UTF8.fromRep bytes
, contextColorCount = 16
, supportsCursorVisibility = True
, supportsMode = const False
, setMode = const $ const $ return ()
, getModeStatus = const $ return False
, assumedStateRef = newAssumedStateRef
, mkDisplayContext = \tActual rActual -> return $ DisplayContext
{ contextRegion = rActual
, contextDevice = tActual
-- A cursor move is always visualized as the single
-- character 'M'
, writeMoveCursor = \_x _y -> writeWord8 $ toEnum $ fromEnum 'M'
-- Show cursor is always visualized as the single
-- character 'S'
, writeShowCursor = writeWord8 $ toEnum $ fromEnum 'S'
-- Hide cursor is always visualized as the single
-- character 'H'
, writeHideCursor = writeWord8 $ toEnum $ fromEnum 'H'
-- An attr change is always visualized as the single
-- character 'A'
, writeSetAttr = \_ _fattr _diffs _attr -> writeWord8 $ toEnum $ fromEnum 'A'
-- default attr is always visualized as the single
-- character 'D'
, writeDefaultAttr = const $ writeWord8 $ toEnum $ fromEnum 'D'
-- row end is always visualized as the single character
-- 'E'
, writeRowEnd = writeWord8 $ toEnum $ fromEnum 'E'
, inlineHack = return ()
}
}
return (outRef, t)
|
jtdaugherty/vty
|
src/Graphics/Vty/Output/Mock.hs
|
bsd-3-clause
| 3,857
| 0
| 21
| 1,143
| 591
| 342
| 249
| 48
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.RDS.CreateEventSubscription
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Creates an RDS event notification subscription. This action requires a topic
-- ARN (Amazon Resource Name) created by either the RDS console, the SNS
-- console, or the SNS API. To obtain an ARN with SNS, you must create a topic
-- in Amazon SNS and subscribe to the topic. The ARN is displayed in the SNS
-- console.
--
-- You can specify the type of source (SourceType) you want to be notified of,
-- provide a list of RDS sources (SourceIds) that triggers the events, and
-- provide a list of event categories (EventCategories) for events you want to
-- be notified of. For example, you can specify SourceType = db-instance,
-- SourceIds = mydbinstance1, mydbinstance2 and EventCategories = Availability,
-- Backup.
--
-- If you specify both the SourceType and SourceIds, such as SourceType =
-- db-instance and SourceIdentifier = myDBInstance1, you will be notified of all
-- the db-instance events for the specified source. If you specify a SourceType
-- but do not specify a SourceIdentifier, you will receive notice of the events
-- for that source type for all your RDS sources. If you do not specify either
-- the SourceType nor the SourceIdentifier, you will be notified of events
-- generated from all RDS sources belonging to your customer account.
--
-- <http://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_CreateEventSubscription.html>
module Network.AWS.RDS.CreateEventSubscription
(
-- * Request
CreateEventSubscription
-- ** Request constructor
, createEventSubscription
-- ** Request lenses
, cesEnabled
, cesEventCategories
, cesSnsTopicArn
, cesSourceIds
, cesSourceType
, cesSubscriptionName
, cesTags
-- * Response
, CreateEventSubscriptionResponse
-- ** Response constructor
, createEventSubscriptionResponse
-- ** Response lenses
, cesrEventSubscription
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.RDS.Types
import qualified GHC.Exts
data CreateEventSubscription = CreateEventSubscription
{ _cesEnabled :: Maybe Bool
, _cesEventCategories :: List "member" Text
, _cesSnsTopicArn :: Text
, _cesSourceIds :: List "member" Text
, _cesSourceType :: Maybe Text
, _cesSubscriptionName :: Text
, _cesTags :: List "member" Tag
} deriving (Eq, Read, Show)
-- | 'CreateEventSubscription' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'cesEnabled' @::@ 'Maybe' 'Bool'
--
-- * 'cesEventCategories' @::@ ['Text']
--
-- * 'cesSnsTopicArn' @::@ 'Text'
--
-- * 'cesSourceIds' @::@ ['Text']
--
-- * 'cesSourceType' @::@ 'Maybe' 'Text'
--
-- * 'cesSubscriptionName' @::@ 'Text'
--
-- * 'cesTags' @::@ ['Tag']
--
createEventSubscription :: Text -- ^ 'cesSubscriptionName'
-> Text -- ^ 'cesSnsTopicArn'
-> CreateEventSubscription
createEventSubscription p1 p2 = CreateEventSubscription
{ _cesSubscriptionName = p1
, _cesSnsTopicArn = p2
, _cesSourceType = Nothing
, _cesEventCategories = mempty
, _cesSourceIds = mempty
, _cesEnabled = Nothing
, _cesTags = mempty
}
-- | A Boolean value; set to true to activate the subscription, set to false to
-- create the subscription but not active it.
cesEnabled :: Lens' CreateEventSubscription (Maybe Bool)
cesEnabled = lens _cesEnabled (\s a -> s { _cesEnabled = a })
-- | A list of event categories for a SourceType that you want to subscribe to.
-- You can see a list of the categories for a given SourceType in the <http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_Events.html Events>
-- topic in the Amazon RDS User Guide or by using the DescribeEventCategories
-- action.
cesEventCategories :: Lens' CreateEventSubscription [Text]
cesEventCategories =
lens _cesEventCategories (\s a -> s { _cesEventCategories = a })
. _List
-- | The Amazon Resource Name (ARN) of the SNS topic created for event
-- notification. The ARN is created by Amazon SNS when you create a topic and
-- subscribe to it.
cesSnsTopicArn :: Lens' CreateEventSubscription Text
cesSnsTopicArn = lens _cesSnsTopicArn (\s a -> s { _cesSnsTopicArn = a })
-- | The list of identifiers of the event sources for which events will be
-- returned. If not specified, then all sources are included in the response. An
-- identifier must begin with a letter and must contain only ASCII letters,
-- digits, and hyphens; it cannot end with a hyphen or contain two consecutive
-- hyphens.
--
-- Constraints:
--
-- If SourceIds are supplied, SourceType must also be provided. If the source
-- type is a DB instance, then a 'DBInstanceIdentifier' must be supplied. If the
-- source type is a DB security group, a 'DBSecurityGroupName' must be supplied. If the source type is a DB parameter group, a
-- 'DBParameterGroupName' must be supplied. If the source type is a DB snapshot, a 'DBSnapshotIdentifier' must be supplied.
cesSourceIds :: Lens' CreateEventSubscription [Text]
cesSourceIds = lens _cesSourceIds (\s a -> s { _cesSourceIds = a }) . _List
-- | The type of source that will be generating the events. For example, if you
-- want to be notified of events generated by a DB instance, you would set this
-- parameter to db-instance. if this value is not specified, all events are
-- returned.
--
-- Valid values: db-instance | db-parameter-group | db-security-group |
-- db-snapshot
cesSourceType :: Lens' CreateEventSubscription (Maybe Text)
cesSourceType = lens _cesSourceType (\s a -> s { _cesSourceType = a })
-- | The name of the subscription.
--
-- Constraints: The name must be less than 255 characters.
cesSubscriptionName :: Lens' CreateEventSubscription Text
cesSubscriptionName =
lens _cesSubscriptionName (\s a -> s { _cesSubscriptionName = a })
cesTags :: Lens' CreateEventSubscription [Tag]
cesTags = lens _cesTags (\s a -> s { _cesTags = a }) . _List
newtype CreateEventSubscriptionResponse = CreateEventSubscriptionResponse
{ _cesrEventSubscription :: Maybe EventSubscription
} deriving (Eq, Read, Show)
-- | 'CreateEventSubscriptionResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'cesrEventSubscription' @::@ 'Maybe' 'EventSubscription'
--
createEventSubscriptionResponse :: CreateEventSubscriptionResponse
createEventSubscriptionResponse = CreateEventSubscriptionResponse
{ _cesrEventSubscription = Nothing
}
cesrEventSubscription :: Lens' CreateEventSubscriptionResponse (Maybe EventSubscription)
cesrEventSubscription =
lens _cesrEventSubscription (\s a -> s { _cesrEventSubscription = a })
instance ToPath CreateEventSubscription where
toPath = const "/"
instance ToQuery CreateEventSubscription where
toQuery CreateEventSubscription{..} = mconcat
[ "Enabled" =? _cesEnabled
, "EventCategories" =? _cesEventCategories
, "SnsTopicArn" =? _cesSnsTopicArn
, "SourceIds" =? _cesSourceIds
, "SourceType" =? _cesSourceType
, "SubscriptionName" =? _cesSubscriptionName
, "Tags" =? _cesTags
]
instance ToHeaders CreateEventSubscription
instance AWSRequest CreateEventSubscription where
type Sv CreateEventSubscription = RDS
type Rs CreateEventSubscription = CreateEventSubscriptionResponse
request = post "CreateEventSubscription"
response = xmlResponse
instance FromXML CreateEventSubscriptionResponse where
parseXML = withElement "CreateEventSubscriptionResult" $ \x -> CreateEventSubscriptionResponse
<$> x .@? "EventSubscription"
|
kim/amazonka
|
amazonka-rds/gen/Network/AWS/RDS/CreateEventSubscription.hs
|
mpl-2.0
| 8,720
| 0
| 10
| 1,783
| 893
| 552
| 341
| 94
| 1
|
{-# LANGUAGE FlexibleContexts, TypeOperators, MultiParamTypeClasses, ScopedTypeVariables #-}
{- |
Module : BaseTransCFJavaNew
Description : Basic translation of dependently typed FCore to Java
Copyright : (c) 2014—2015 The F2J Project Developers (given in AUTHORS.txt)
License : BSD3
Maintainer : Jeremy <bixuanxbi@gmail.com>, Tomas <tomtau@connect.hku.hk>
Stability : stable
Portability : non-portable (MPTC)
This module implements the basic translation of FCore to Java. For
more information, please refer to the paper on wiki.
-}
module BaseTransCFJavaNew where
import qualified Language.Java.Syntax as J
import Lens.Micro
import ClosureFNew
import Inheritance
import JavaEDSL
import MonadLib hiding (Alt)
import Panic
import qualified Src as S
import StringPrefixes
instance (:<) (Translate m) (Translate m) where
up = id
type InitVars = [J.BlockStmt]
createCUB :: Maybe J.PackageDecl -> [J.TypeDecl] -> J.CompilationUnit
createCUB package compDef = cu
where
cu = J.CompilationUnit package [] compDef
initClass :: String -> String -> J.Exp -> J.BlockStmt
initClass className tempName expr =
let ty = classTy className
in localFinalVar ty
(varDecl tempName
(if ty == objClassTy
then expr
else cast ty expr))
type Var = Int -- Either normal variable or class name
type TransJavaExp = Either J.Name J.Literal -- either variable or special case: Lit
type TransType = ([J.BlockStmt], TransJavaExp, Type TransBind)
data TyBind = TB { unTB :: Type (Var, TyBind) }
| None
type TransBind = (Var, TyBind)
data Translate m =
T
{ translateM :: Expr TransBind -> m TransType
, translateScopeM :: Scope TransBind -> Maybe Int -> m ([J.BlockStmt], TransJavaExp, Scope TransBind)
, translateApply :: Bool -> m TransType -> Expr TransBind -> m TransType
, translateIf :: m TransType -> m TransType -> m TransType -> m TransType
, translateLet :: TransType -> (TransBind -> Expr TransBind) -> m TransType
, translateScopeTyp :: Int -> Int -> [J.BlockStmt] -> Scope TransBind -> m ([J.BlockStmt], TransJavaExp, Scope TransBind) -> String -> m ([J.BlockStmt], Scope TransBind)
, genApply :: J.Exp -> Scope TransBind -> String -> J.Type -> J.Type -> m [J.BlockStmt]
, genRes :: Scope TransBind -> [J.BlockStmt] -> m [J.BlockStmt]
, applyRetType :: Type TransBind -> m (Maybe J.Type)
, genClone :: m Bool
, withApply :: m Bool
, getPrefix :: m String
, javaType :: Type TransBind -> m J.Type
, chooseCastBox :: Type TransBind -> m (String -> J.Exp -> J.BlockStmt, J.Type)
, stackMainBody :: Type Int -> m [J.BlockStmt]
, genClosureVar :: Bool -> Int -> TransJavaExp -> m J.Exp
, createWrap :: String -> Expr TransBind -> m (J.CompilationUnit, Type TransBind)
}
getNewVarName :: MonadState Int m => Translate m -> m String
getNewVarName _ = do
(n :: Int) <- get
put (n + 1)
return $ localvarstr ++ show n
assignVar :: Monad m => Translate m -> Type TransBind -> String -> J.Exp -> m J.BlockStmt
assignVar this t varId e = do
aType <- javaType this t
return $
localFinalVar aType (varDecl varId e)
getTupleClassName :: [a] -> String
getTupleClassName tuple =
if lengthOfTuple > 50
then panic "The length of tuple is too long (>50)!"
else namespace ++ "tuples.Tuple" ++ show lengthOfTuple
where
lengthOfTuple = length tuple
genIfBody :: MonadState Int m
=> Translate m
-> m TransType
-> m TransType
-> ([J.BlockStmt], TransJavaExp)
-> Int
-> m TransType
genIfBody this m2 m3 (s1, j1) n = do
(s2, j2, t2) <- m2
(s3, j3, _) <- m3
let ifvarname = ifresultstr ++ show n
aType <- javaType this t2
let ifresdecl = localVar aType (varDeclNoInit ifvarname)
let thenPart = J.StmtBlock $ block (s2 ++ [bsAssign (name [ifvarname]) (unwrap j2)])
let elsePart = J.StmtBlock $ block (s3 ++ [bsAssign (name [ifvarname]) (unwrap j3)])
let ifstmt = bStmt $ J.IfThenElse (unwrap j1) thenPart elsePart
return (s1 ++ [ifresdecl, ifstmt], var ifvarname, t2)
getS3 :: MonadState Int m
=> Translate m
-> J.Exp
-> J.Exp
-> Scope TransBind
-> J.Type
-> m ([J.BlockStmt], TransJavaExp)
getS3 this j1 j2 retTyp ctempCastTyp = do
(n :: Int) <- get
put (n + 2)
let f = localvarstr ++ show n
let xf = localvarstr ++ show (n + 1)
let fexp = left . var $ f
let fd = localVar ctempCastTyp (varDecl f j1)
let fs = assignField (fieldAccExp fexp closureInput) j2
(castBox, typ) <- chooseCastBox this (scope2ctyp retTyp)
apply <- genApply this fexp retTyp xf typ ctempCastTyp
let fout = fieldAccess fexp closureOutput
res <- genRes this retTyp [castBox xf fout]
let r = fd : fs : apply ++ res
return (r, var xf)
createTypeHouse :: MonadState Int m
=> Translate m
-> String
-> m ([J.BlockStmt], TransJavaExp)
createTypeHouse this str = do
(n :: Int) <- get
put (n + 1)
let typeVar = localvarstr ++ show n
let fstmt = [ localVar typeOfType (varDecl typeVar typeHouseCreate)
, assignField (fieldAccExp (left . var $ typeVar) typeField) (J.Lit (J.String str))
]
return (fstmt, var typeVar)
substEScope :: Int -> Expr TransBind -> Scope TransBind -> Scope TransBind
substEScope n t (Body t1) = Body (substExpr n t t1)
substEScope n t (Type t1 f) = Type (substExpr n t t1) (substEScope n t . f)
substExpr :: Int -> Expr TransBind -> Expr TransBind -> Expr TransBind
substExpr n t v@(Var _ (x, _)) = if n == x then t else v
substExpr n t (Pi n' s) = Pi n' (substEScope n t s)
substExpr _ _ x = x
-- | Translation dispatcher
trans :: (MonadState Int m, selfType :< Translate m) => Base selfType (Translate m)
trans self =
let this = up self
in T
{ translateM = translateM' this
, translateScopeM = translateScopeM' this
, translateApply = translateApply' this
, translateIf = translateIf' this
, translateLet = translateLet' this
, translateScopeTyp = translateScopeTyp' this
, genApply = \f _ _ _ _ -> return [bStmt $ applyMethodCall f]
, genRes = const return
, applyRetType = applyRetType' this
, genClone = return False -- do not generate clone method
, withApply = return True
, getPrefix = return namespace
, javaType = javaType' this
, chooseCastBox = chooseCastBox' this
, stackMainBody = \_ -> return []
, genClosureVar = \_ _ j1 -> return (unwrap j1)
, createWrap = createWrap' this
}
-- Field functions
translateM' this e =
case e of
Var _ (i, t) -> return ([], var (localvarstr ++ show i), unTB t)
Lit lit ->
case lit of
(S.Int i) -> return ([], Right $ J.Int i, JClass "java.lang.Integer")
(S.UnitLit) -> return ([], Right J.Null, Unit)
(S.String s) -> return ([], Right $ J.String s, JClass "java.lang.String")
(S.Bool b) -> return ([], Right $ J.Boolean b, JClass "java.lang.Boolean")
(S.Char c) -> return ([], Right $ J.Char c, JClass "java.lang.Character")
PrimOp e1 op e2 -> do
(s1, j1, _) <- translateM this e1
(s2, j2, _) <- translateM this e2
let j1' = unwrap j1
let j2' = unwrap j2
let (jexpr, typ) =
case op of
(S.Arith realOp) -> (J.BinOp j1' realOp j2', JClass "java.lang.Integer")
(S.Compare realOp) -> (J.BinOp j1' realOp j2', JClass "java.lang.Boolean")
(S.Logic realOp) -> (J.BinOp j1' realOp j2', JClass "java.lang.Boolean")
newVarName <- getNewVarName this
assignExpr <- assignVar this typ newVarName jexpr
return (s1 ++ s2 ++ [assignExpr], var newVarName, typ)
If e1 e2 e3 -> translateIf this (translateM this e1) (translateM this e2) (translateM this e3)
Tuple tuple ->
case tuple of
[t] -> do
(s1, j1, t1) <- translateM this t
return (s1, j1, TupleType [t1])
_ -> do
tuple' <- mapM (translateM this) tuple
let (statements, exprs, types) = unzip3 tuple' & _1 %~ concat
newVarName <- getNewVarName this
let c = getTupleClassName tuple
let rhs = instCreat (classTyp c) (map unwrap exprs)
assignExpr <- assignVar this (JClass c) newVarName rhs
return (statements ++ [assignExpr], var newVarName, TupleType types)
Proj index expr -> do
ret@(statement, javaExpr, exprType) <- translateM this expr
case exprType of
TupleType [_] -> return ret
TupleType types -> do
newVarName <- getNewVarName this
let typ = types !! (index - 1)
aType <- javaType this typ
let rhs = cast aType (fieldAccess (unwrap javaExpr) ("_" ++ show index))
assignExpr <- assignVar this typ newVarName rhs
return (statement ++ [assignExpr], var newVarName, typ)
_ -> panic "BaseTransCFJava.trans: expected tuple type"
App e1 e2 -> translateApply this False (translateM this e1) e2
Let _ expr body -> do
(s1, j1, t1) <- translateM this expr
translateLet this (s1, j1, t1) body
Lam _ se -> do
(s, je, t) <- translateScopeM this se Nothing
return (s, je, Pi "_" t)
Mu _ (Type t se) -> do
n <- get
put (n + 1)
(expr, je, _) <- translateScopeM this (se (n, TB t)) (Just n)
return (expr, je, t)
SeqExprs es -> do
es' <- mapM (translateM this) es
let (_, lastExp, lastType) = last es'
let statements = concatMap (\(x, _, _) -> x) es'
return (statements, lastExp, lastType)
JNew c args -> do
args' <- mapM (translateM this) args
let (statements, exprs, types) = unzip3 args' & _1 %~ concat
let rhs = J.InstanceCreation
(map
(\y -> case y of
JClass x -> J.ActualType $ J.ClassRefType $ J.ClassType [(J.Ident x, [])]
_ -> sorry "BaseTransCFJava.trans.JNew: no idea how to do")
types)
(J.ClassType [(J.Ident c, [])])
(map unwrap exprs)
Nothing
let typ = JClass c
newVarName <- getNewVarName this
assignExpr <- assignVar this typ newVarName rhs
return (statements ++ [assignExpr], var newVarName, typ)
JMethod c m args r -> do
args' <- mapM (translateM this) args
let (statements, exprs, types) = unzip3 args' & _1 %~ concat
let exprs' = map unwrap exprs
let refTypes =
map
(\y -> case y of
JClass x -> J.ClassRefType $ J.ClassType [(J.Ident x, [])]
_ -> sorry "BaseTransCFJava.trans.JMethod: no idea how to do")
types
(classStatement, rhs) <- case c of
Right ce ->
do
(classS, classE, _) <- translateM this ce
return
(classS, J.MethodInv $ J.PrimaryMethodCall
(unwrap classE)
refTypes
(J.Ident m)
exprs')
Left cn ->
return
([], J.MethodInv $ J.TypeMethodCall
(J.Name [J.Ident cn])
refTypes
(J.Ident m)
exprs')
if r /= "java.lang.Void"
then do
let typ = JClass r
newVarName <- getNewVarName this
assignExpr <- assignVar this typ newVarName rhs
return (statements ++ classStatement ++ [assignExpr], var newVarName, typ)
else return
(statements ++ classStatement ++ [J.BlockStmt $ J.ExpStmt rhs], Right J.Null, Unit)
JField c fName r -> do
(classStatement, classExpr, _) <- case c of
Right ce ->
translateM this ce
Left cn ->
return ([], Left $ J.Name [J.Ident cn], undefined)
newVarName <- getNewVarName this
aType <- javaType this r
let rhs = J.Cast aType $ J.FieldAccess $ J.PrimaryFieldAccess (unwrap classExpr)
(J.Ident fName)
assignExpr = localFinalVar aType $ varDecl newVarName rhs
return (classStatement ++ [assignExpr], var newVarName, r)
CastUp t e -> do
(s, v, _) <- translateM this e
return (s, v, t)
CastDown t e -> do
(s, v, _) <- translateM this e
return (s, v, t)
JClass className -> do
(stmts, jvar) <- createTypeHouse this className
return (stmts, jvar, Star)
Star -> do
(stmts, jvar) <- createTypeHouse this "Star"
return (stmts, jvar, Star)
TupleType xs -> do
(stmts, jvar) <- createTypeHouse this "TupleType"
return (stmts, jvar, Star)
Unit -> do
(stmts, jvar) <- createTypeHouse this "Unit"
return (stmts, jvar, Star)
Pi _ s -> do
(stmts, jvar) <- createTypeHouse this "Pi"
return (stmts, jvar, Star)
-- Error related
_ -> panic "BaseTransCFJavaNew.trans: don't know how to do"
translateScopeM' this e m =
case e of
Body t -> do
(s, je, t1) <- translateM this t
return (s, je, Body t1)
Type t g -> do
n <- get
let (x1, x2) = maybe (n + 1, n + 2) (\i -> (i, n + 1)) m
put (x2 + 1)
let nextInClosure = g (x2, TB t)
typT1 <- javaType this t
let flag = typT1 == objClassTy
let accessField = fieldAccess (left $ var (localvarstr ++ show x1)) closureInput
let xf = localFinalVar typT1
(varDecl (localvarstr ++ show x2)
(if flag
then accessField
else cast typT1 accessField))
closureClass <- liftM2 (++) (getPrefix this) (return "Closure")
(cvar, t1) <- translateScopeTyp
this
x1
n
[xf]
nextInClosure
(translateScopeM this nextInClosure Nothing)
closureClass
let fstmt = [localVar closureType (varDecl (localvarstr ++ show n) (funInstCreate n))]
return
(cvar ++ fstmt, var (localvarstr ++ show n), Type t (\a -> substEScope x2 (Var "_" a) t1))
translateApply' this flag m1 m2 = do
(s1, j1', Pi _ (Type t1 g)) <- m1
(n :: Int) <- get
put (n + 1)
(s2, j2, _) <- translateM this m2
let retTyp = substEScope n m2 (g (n, None))
j1 <- genClosureVar this flag (getArity retTyp) j1'
(s3, nje3) <- getS3 this j1 (unwrap j2) retTyp closureType
return (s2 ++ s1 ++ s3, nje3, scope2ctyp retTyp)
-- TODO: This is not dependently typed
translateLet' this (s1, j1, t1) body = do
(n :: Int) <- get
put (n + 1)
(s2, j2, t2) <- translateM this (body (n, TB t1))
let x = localvarstr ++ show n
jt1 <- javaType this t1
let xDecl = localVar jt1 (varDecl x $ unwrap j1)
return (s1 ++ [xDecl] ++ s2, j2, t2)
translateIf' this m1 m2 m3 = do
n <- get
put (n + 1)
(s1, j1, _) <- m1
genIfBody this m2 m3 (s1, j1) n
translateScopeTyp' this x1 f initVars _ otherStmts closureClass = do
b <- genClone this
(ostmts, oexpr, t1) <- otherStmts
let fc = f
return
([ localClassDecl (closureTransName ++ show fc) closureClass
(closureBodyGen
[ memberDecl $ fieldDecl (classTy closureClass)
(varDecl (localvarstr ++ show x1) J.This)
]
(initVars ++ ostmts ++ [bsAssign (name [closureOutput]) (unwrap oexpr)])
fc
b
(classTy closureClass))
], t1)
javaType' this typ =
case typ of
(JClass c) -> return $ classTy c
(Pi _ s) -> do
closureClass <- liftM2 (++) (getPrefix this) (return "Closure")
return (classTy closureClass)
(TupleType tuple) ->
case tuple of
[t] -> javaType this t
_ -> return $ classTy $ getTupleClassName tuple
_ -> return objClassTy
chooseCastBox' this typ =
case typ of
(JClass c) -> return (initClass c, classTy c)
(Pi _ _) -> do
closureClass <- liftM2 (++) (getPrefix this) (return "Closure")
return (initClass closureClass, classTy closureClass)
(TupleType tuple) ->
case tuple of
[t] -> chooseCastBox this t
_ -> do
let tupleClassName = getTupleClassName tuple
return (initClass tupleClassName, classTy tupleClassName)
_ -> return (initClass "Object", objClassTy)
createWrap' this name expr = do
(bs, e, t) <- translateM this expr
returnType <- applyRetType this t
let javaCode =
let returnStmt = [bStmt $ J.Return $ Just (unwrap e)]
in wrapperClass False name (bs ++ returnStmt) returnType mainBody
return (createCUB Nothing [javaCode], t)
applyRetType' _ t =
case t of
JClass "java.lang.Integer" -> return $ Just $ J.PrimType J.IntT
JClass "java.lang.Boolean" -> return $ Just $ J.PrimType J.BooleanT
_ -> return $ Just objClassTy
|
bixuanzju/fcore
|
lib/archive/BaseTransCFJavaNew.hs
|
bsd-2-clause
| 17,815
| 0
| 26
| 5,898
| 6,349
| 3,167
| 3,182
| 396
| 36
|
{-# LANGUAGE PolyKinds, TypeFamilies, MagicHash, DataKinds, TypeInType, RankNTypes #-}
module T11473 where
import GHC.Exts
import GHC.Types
type family Boxed (a :: k) :: *
type instance Boxed Char# = Char
type instance Boxed Char = Char
class BoxIt (a :: TYPE lev) where
boxed :: a -> Boxed a
instance BoxIt Char# where boxed x = C# x
instance BoxIt Char where boxed = id
-- This should be an error: there is no way we can produce code for both Lifted
-- and Unlifted levities
hello :: forall (lev :: Levity). forall (a :: TYPE lev). BoxIt a => a -> Boxed a
hello x = boxed x
|
mcschroeder/ghc
|
testsuite/tests/typecheck/should_fail/T11473.hs
|
bsd-3-clause
| 587
| 0
| 9
| 120
| 169
| 94
| 75
| 13
| 1
|
{-
NOTA BENE: Do NOT use ($) anywhere in this module! The type of ($) is
slightly magical (it can return unlifted types), and it is wired in.
But, it is also *defined* in this module, with a non-magical type.
GHC gets terribly confused (and *hangs*) if you try to use ($) in this
module, because it has different types in different scenarios.
This is not a problem in general, because the type ($), being wired in, is not
written out to the interface file, so importing files don't get confused.
The problem is only if ($) is used here. So don't!
---------------------------------------------
The overall structure of the GHC Prelude is a bit tricky.
a) We want to avoid "orphan modules", i.e. ones with instance
decls that don't belong either to a tycon or a class
defined in the same module
b) We want to avoid giant modules
So the rough structure is as follows, in (linearised) dependency order
GHC.Prim Has no implementation. It defines built-in things, and
by importing it you bring them into scope.
The source file is GHC.Prim.hi-boot, which is just
copied to make GHC.Prim.hi
GHC.Base Classes: Eq, Ord, Functor, Monad
Types: list, (), Int, Bool, Ordering, Char, String
Data.Tuple Types: tuples, plus instances for GHC.Base classes
GHC.Show Class: Show, plus instances for GHC.Base/GHC.Tup types
GHC.Enum Class: Enum, plus instances for GHC.Base/GHC.Tup types
Data.Maybe Type: Maybe, plus instances for GHC.Base classes
GHC.List List functions
GHC.Num Class: Num, plus instances for Int
Type: Integer, plus instances for all classes so far (Eq, Ord, Num, Show)
Integer is needed here because it is mentioned in the signature
of 'fromInteger' in class Num
GHC.Real Classes: Real, Integral, Fractional, RealFrac
plus instances for Int, Integer
Types: Ratio, Rational
plus intances for classes so far
Rational is needed here because it is mentioned in the signature
of 'toRational' in class Real
GHC.ST The ST monad, instances and a few helper functions
Ix Classes: Ix, plus instances for Int, Bool, Char, Integer, Ordering, tuples
GHC.Arr Types: Array, MutableArray, MutableVar
Arrays are used by a function in GHC.Float
GHC.Float Classes: Floating, RealFloat
Types: Float, Double, plus instances of all classes so far
This module contains everything to do with floating point.
It is a big module (900 lines)
With a bit of luck, many modules can be compiled without ever reading GHC.Float.hi
Other Prelude modules are much easier with fewer complex dependencies.
-}
{-# LANGUAGE Unsafe #-}
{-# LANGUAGE CPP
, NoImplicitPrelude
, BangPatterns
, ExplicitForAll
, MagicHash
, UnboxedTuples
, ExistentialQuantification
, RankNTypes
#-}
-- -Wno-orphans is needed for things like:
-- Orphan rule: "x# -# x#" ALWAYS forall x# :: Int# -# x# x# = 0
{-# OPTIONS_GHC -Wno-orphans #-}
{-# OPTIONS_HADDOCK hide #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.Base
-- Copyright : (c) The University of Glasgow, 1992-2002
-- License : see libraries/base/LICENSE
--
-- Maintainer : cvs-ghc@haskell.org
-- Stability : internal
-- Portability : non-portable (GHC extensions)
--
-- Basic data types and classes.
--
-----------------------------------------------------------------------------
#include "MachDeps.h"
module GHC.Base
(
module GHC.Base,
module GHC.Classes,
module GHC.CString,
module GHC.Magic,
module GHC.Types,
module GHC.Prim, -- Re-export GHC.Prim and [boot] GHC.Err,
-- to avoid lots of people having to
module GHC.Err -- import it explicitly
)
where
import GHC.Types
import GHC.Classes
import GHC.CString
import GHC.Magic
import GHC.Prim
import GHC.Err
import {-# SOURCE #-} GHC.IO (failIO,mplusIO)
import GHC.Tuple () -- Note [Depend on GHC.Tuple]
import GHC.Integer () -- Note [Depend on GHC.Integer]
infixr 9 .
infixr 5 ++
infixl 4 <$
infixl 1 >>, >>=
infixr 1 =<<
infixr 0 $, $!
infixl 4 <*>, <*, *>, <**>
default () -- Double isn't available yet
{-
Note [Depend on GHC.Integer]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The Integer type is special because TidyPgm uses
GHC.Integer.Type.mkInteger to construct Integer literal values
Currently it reads the interface file whether or not the current
module *has* any Integer literals, so it's important that
GHC.Integer.Type (in package integer-gmp or integer-simple) is
compiled before any other module. (There's a hack in GHC to disable
this for packages ghc-prim, integer-gmp, integer-simple, which aren't
allowed to contain any Integer literals.)
Likewise we implicitly need Integer when deriving things like Eq
instances.
The danger is that if the build system doesn't know about the dependency
on Integer, it'll compile some base module before GHC.Integer.Type,
resulting in:
Failed to load interface for ‘GHC.Integer.Type’
There are files missing in the ‘integer-gmp’ package,
Bottom line: we make GHC.Base depend on GHC.Integer; and everything
else either depends on GHC.Base, or does not have NoImplicitPrelude
(and hence depends on Prelude).
Note [Depend on GHC.Tuple]
~~~~~~~~~~~~~~~~~~~~~~~~~~
Similarly, tuple syntax (or ()) creates an implicit dependency on
GHC.Tuple, so we use the same rule as for Integer --- see Note [Depend on
GHC.Integer] --- to explain this to the build system. We make GHC.Base
depend on GHC.Tuple, and everything else depends on GHC.Base or Prelude.
-}
#if 0
-- for use when compiling GHC.Base itself doesn't work
data Bool = False | True
data Ordering = LT | EQ | GT
data Char = C# Char#
type String = [Char]
data Int = I# Int#
data () = ()
data [] a = MkNil
not True = False
(&&) True True = True
otherwise = True
build = errorWithoutStackTrace "urk"
foldr = errorWithoutStackTrace "urk"
#endif
-- | The 'Maybe' type encapsulates an optional value. A value of type
-- @'Maybe' a@ either contains a value of type @a@ (represented as @'Just' a@),
-- or it is empty (represented as 'Nothing'). Using 'Maybe' is a good way to
-- deal with errors or exceptional cases without resorting to drastic
-- measures such as 'error'.
--
-- The 'Maybe' type is also a monad. It is a simple kind of error
-- monad, where all errors are represented by 'Nothing'. A richer
-- error monad can be built using the 'Data.Either.Either' type.
--
data Maybe a = Nothing | Just a
deriving (Eq, Ord)
-- | The class of monoids (types with an associative binary operation that
-- has an identity). Instances should satisfy the following laws:
--
-- * @mappend mempty x = x@
--
-- * @mappend x mempty = x@
--
-- * @mappend x (mappend y z) = mappend (mappend x y) z@
--
-- * @mconcat = 'foldr' mappend mempty@
--
-- The method names refer to the monoid of lists under concatenation,
-- but there are many other instances.
--
-- Some types can be viewed as a monoid in more than one way,
-- e.g. both addition and multiplication on numbers.
-- In such cases we often define @newtype@s and make those instances
-- of 'Monoid', e.g. 'Sum' and 'Product'.
class Monoid a where
mempty :: a
-- ^ Identity of 'mappend'
mappend :: a -> a -> a
-- ^ An associative operation
mconcat :: [a] -> a
-- ^ Fold a list using the monoid.
-- For most types, the default definition for 'mconcat' will be
-- used, but the function is included in the class definition so
-- that an optimized version can be provided for specific types.
mconcat = foldr mappend mempty
instance Monoid [a] where
{-# INLINE mempty #-}
mempty = []
{-# INLINE mappend #-}
mappend = (++)
{-# INLINE mconcat #-}
mconcat xss = [x | xs <- xss, x <- xs]
-- See Note: [List comprehensions and inlining]
{-
Note: [List comprehensions and inlining]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The list monad operations are traditionally described in terms of concatMap:
xs >>= f = concatMap f xs
Similarly, mconcat for lists is just concat. Here in Base, however, we don't
have concatMap, and we'll refrain from adding it here so it won't have to be
hidden in imports. Instead, we use GHC's list comprehension desugaring
mechanism to define mconcat and the Applicative and Monad instances for lists.
We mark them INLINE because the inliner is not generally too keen to inline
build forms such as the ones these desugar to without our insistence. Defining
these using list comprehensions instead of foldr has an additional potential
benefit, as described in compiler/deSugar/DsListComp.lhs: if optimizations
needed to make foldr/build forms efficient are turned off, we'll get reasonably
efficient translations anyway.
-}
instance Monoid b => Monoid (a -> b) where
mempty _ = mempty
mappend f g x = f x `mappend` g x
instance Monoid () where
-- Should it be strict?
mempty = ()
_ `mappend` _ = ()
mconcat _ = ()
instance (Monoid a, Monoid b) => Monoid (a,b) where
mempty = (mempty, mempty)
(a1,b1) `mappend` (a2,b2) =
(a1 `mappend` a2, b1 `mappend` b2)
instance (Monoid a, Monoid b, Monoid c) => Monoid (a,b,c) where
mempty = (mempty, mempty, mempty)
(a1,b1,c1) `mappend` (a2,b2,c2) =
(a1 `mappend` a2, b1 `mappend` b2, c1 `mappend` c2)
instance (Monoid a, Monoid b, Monoid c, Monoid d) => Monoid (a,b,c,d) where
mempty = (mempty, mempty, mempty, mempty)
(a1,b1,c1,d1) `mappend` (a2,b2,c2,d2) =
(a1 `mappend` a2, b1 `mappend` b2,
c1 `mappend` c2, d1 `mappend` d2)
instance (Monoid a, Monoid b, Monoid c, Monoid d, Monoid e) =>
Monoid (a,b,c,d,e) where
mempty = (mempty, mempty, mempty, mempty, mempty)
(a1,b1,c1,d1,e1) `mappend` (a2,b2,c2,d2,e2) =
(a1 `mappend` a2, b1 `mappend` b2, c1 `mappend` c2,
d1 `mappend` d2, e1 `mappend` e2)
-- lexicographical ordering
instance Monoid Ordering where
mempty = EQ
LT `mappend` _ = LT
EQ `mappend` y = y
GT `mappend` _ = GT
-- | Lift a semigroup into 'Maybe' forming a 'Monoid' according to
-- <http://en.wikipedia.org/wiki/Monoid>: \"Any semigroup @S@ may be
-- turned into a monoid simply by adjoining an element @e@ not in @S@
-- and defining @e*e = e@ and @e*s = s = s*e@ for all @s ∈ S@.\" Since
-- there is no \"Semigroup\" typeclass providing just 'mappend', we
-- use 'Monoid' instead.
instance Monoid a => Monoid (Maybe a) where
mempty = Nothing
Nothing `mappend` m = m
m `mappend` Nothing = m
Just m1 `mappend` Just m2 = Just (m1 `mappend` m2)
instance Monoid a => Applicative ((,) a) where
pure x = (mempty, x)
(u, f) <*> (v, x) = (u `mappend` v, f x)
instance Monoid a => Monad ((,) a) where
(u, a) >>= k = case k a of (v, b) -> (u `mappend` v, b)
instance Monoid a => Monoid (IO a) where
mempty = pure mempty
mappend = liftA2 mappend
{- | The 'Functor' class is used for types that can be mapped over.
Instances of 'Functor' should satisfy the following laws:
> fmap id == id
> fmap (f . g) == fmap f . fmap g
The instances of 'Functor' for lists, 'Data.Maybe.Maybe' and 'System.IO.IO'
satisfy these laws.
-}
class Functor f where
fmap :: (a -> b) -> f a -> f b
-- | Replace all locations in the input with the same value.
-- The default definition is @'fmap' . 'const'@, but this may be
-- overridden with a more efficient version.
(<$) :: a -> f b -> f a
(<$) = fmap . const
-- | A functor with application, providing operations to
--
-- * embed pure expressions ('pure'), and
--
-- * sequence computations and combine their results ('<*>').
--
-- A minimal complete definition must include implementations of these
-- functions satisfying the following laws:
--
-- [/identity/]
--
-- @'pure' 'id' '<*>' v = v@
--
-- [/composition/]
--
-- @'pure' (.) '<*>' u '<*>' v '<*>' w = u '<*>' (v '<*>' w)@
--
-- [/homomorphism/]
--
-- @'pure' f '<*>' 'pure' x = 'pure' (f x)@
--
-- [/interchange/]
--
-- @u '<*>' 'pure' y = 'pure' ('$' y) '<*>' u@
--
-- The other methods have the following default definitions, which may
-- be overridden with equivalent specialized implementations:
--
-- * @u '*>' v = 'pure' ('const' 'id') '<*>' u '<*>' v@
--
-- * @u '<*' v = 'pure' 'const' '<*>' u '<*>' v@
--
-- As a consequence of these laws, the 'Functor' instance for @f@ will satisfy
--
-- * @'fmap' f x = 'pure' f '<*>' x@
--
-- If @f@ is also a 'Monad', it should satisfy
--
-- * @'pure' = 'return'@
--
-- * @('<*>') = 'ap'@
--
-- (which implies that 'pure' and '<*>' satisfy the applicative functor laws).
class Functor f => Applicative f where
-- | Lift a value.
pure :: a -> f a
-- | Sequential application.
(<*>) :: f (a -> b) -> f a -> f b
-- | Sequence actions, discarding the value of the first argument.
(*>) :: f a -> f b -> f b
a1 *> a2 = (id <$ a1) <*> a2
-- This is essentially the same as liftA2 (const id), but if the
-- Functor instance has an optimized (<$), we want to use that instead.
-- | Sequence actions, discarding the value of the second argument.
(<*) :: f a -> f b -> f a
(<*) = liftA2 const
-- | A variant of '<*>' with the arguments reversed.
(<**>) :: Applicative f => f a -> f (a -> b) -> f b
(<**>) = liftA2 (flip ($))
-- | Lift a function to actions.
-- This function may be used as a value for `fmap` in a `Functor` instance.
liftA :: Applicative f => (a -> b) -> f a -> f b
liftA f a = pure f <*> a
-- Caution: since this may be used for `fmap`, we can't use the obvious
-- definition of liftA = fmap.
-- | Lift a binary function to actions.
liftA2 :: Applicative f => (a -> b -> c) -> f a -> f b -> f c
liftA2 f a b = fmap f a <*> b
-- | Lift a ternary function to actions.
liftA3 :: Applicative f => (a -> b -> c -> d) -> f a -> f b -> f c -> f d
liftA3 f a b c = fmap f a <*> b <*> c
{-# INLINEABLE liftA #-}
{-# SPECIALISE liftA :: (a1->r) -> IO a1 -> IO r #-}
{-# SPECIALISE liftA :: (a1->r) -> Maybe a1 -> Maybe r #-}
{-# INLINEABLE liftA2 #-}
{-# SPECIALISE liftA2 :: (a1->a2->r) -> IO a1 -> IO a2 -> IO r #-}
{-# SPECIALISE liftA2 :: (a1->a2->r) -> Maybe a1 -> Maybe a2 -> Maybe r #-}
{-# INLINEABLE liftA3 #-}
{-# SPECIALISE liftA3 :: (a1->a2->a3->r) -> IO a1 -> IO a2 -> IO a3 -> IO r #-}
{-# SPECIALISE liftA3 :: (a1->a2->a3->r) ->
Maybe a1 -> Maybe a2 -> Maybe a3 -> Maybe r #-}
-- | The 'join' function is the conventional monad join operator. It
-- is used to remove one level of monadic structure, projecting its
-- bound argument into the outer level.
join :: (Monad m) => m (m a) -> m a
join x = x >>= id
{- | The 'Monad' class defines the basic operations over a /monad/,
a concept from a branch of mathematics known as /category theory/.
From the perspective of a Haskell programmer, however, it is best to
think of a monad as an /abstract datatype/ of actions.
Haskell's @do@ expressions provide a convenient syntax for writing
monadic expressions.
Instances of 'Monad' should satisfy the following laws:
* @'return' a '>>=' k = k a@
* @m '>>=' 'return' = m@
* @m '>>=' (\\x -> k x '>>=' h) = (m '>>=' k) '>>=' h@
Furthermore, the 'Monad' and 'Applicative' operations should relate as follows:
* @'pure' = 'return'@
* @('<*>') = 'ap'@
The above laws imply:
* @'fmap' f xs = xs '>>=' 'return' . f@
* @('>>') = ('*>')@
and that 'pure' and ('<*>') satisfy the applicative functor laws.
The instances of 'Monad' for lists, 'Data.Maybe.Maybe' and 'System.IO.IO'
defined in the "Prelude" satisfy these laws.
-}
class Applicative m => Monad m where
-- | Sequentially compose two actions, passing any value produced
-- by the first as an argument to the second.
(>>=) :: forall a b. m a -> (a -> m b) -> m b
-- | Sequentially compose two actions, discarding any value produced
-- by the first, like sequencing operators (such as the semicolon)
-- in imperative languages.
(>>) :: forall a b. m a -> m b -> m b
m >> k = m >>= \_ -> k -- See Note [Recursive bindings for Applicative/Monad]
{-# INLINE (>>) #-}
-- | Inject a value into the monadic type.
return :: a -> m a
return = pure
-- | Fail with a message. This operation is not part of the
-- mathematical definition of a monad, but is invoked on pattern-match
-- failure in a @do@ expression.
--
-- As part of the MonadFail proposal (MFP), this function is moved
-- to its own class 'MonadFail' (see "Control.Monad.Fail" for more
-- details). The definition here will be removed in a future
-- release.
fail :: String -> m a
fail s = errorWithoutStackTrace s
{- Note [Recursive bindings for Applicative/Monad]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The original Applicative/Monad proposal stated that after
implementation, the designated implementation of (>>) would become
(>>) :: forall a b. m a -> m b -> m b
(>>) = (*>)
by default. You might be inclined to change this to reflect the stated
proposal, but you really shouldn't! Why? Because people tend to define
such instances the /other/ way around: in particular, it is perfectly
legitimate to define an instance of Applicative (*>) in terms of (>>),
which would lead to an infinite loop for the default implementation of
Monad! And people do this in the wild.
This turned into a nasty bug that was tricky to track down, and rather
than eliminate it everywhere upstream, it's easier to just retain the
original default.
-}
-- | Same as '>>=', but with the arguments interchanged.
{-# SPECIALISE (=<<) :: (a -> [b]) -> [a] -> [b] #-}
(=<<) :: Monad m => (a -> m b) -> m a -> m b
f =<< x = x >>= f
-- | Conditional execution of 'Applicative' expressions. For example,
--
-- > when debug (putStrLn "Debugging")
--
-- will output the string @Debugging@ if the Boolean value @debug@
-- is 'True', and otherwise do nothing.
when :: (Applicative f) => Bool -> f () -> f ()
{-# INLINEABLE when #-}
{-# SPECIALISE when :: Bool -> IO () -> IO () #-}
{-# SPECIALISE when :: Bool -> Maybe () -> Maybe () #-}
when p s = if p then s else pure ()
-- | Evaluate each action in the sequence from left to right,
-- and collect the results.
sequence :: Monad m => [m a] -> m [a]
{-# INLINE sequence #-}
sequence = mapM id
-- Note: [sequence and mapM]
-- | @'mapM' f@ is equivalent to @'sequence' . 'map' f@.
mapM :: Monad m => (a -> m b) -> [a] -> m [b]
{-# INLINE mapM #-}
mapM f as = foldr k (return []) as
where
k a r = do { x <- f a; xs <- r; return (x:xs) }
{-
Note: [sequence and mapM]
~~~~~~~~~~~~~~~~~~~~~~~~~
Originally, we defined
mapM f = sequence . map f
This relied on list fusion to produce efficient code for mapM, and led to
excessive allocation in cryptarithm2. Defining
sequence = mapM id
relies only on inlining a tiny function (id) and beta reduction, which tends to
be a more reliable aspect of simplification. Indeed, this does not lead to
similar problems in nofib.
-}
-- | Promote a function to a monad.
liftM :: (Monad m) => (a1 -> r) -> m a1 -> m r
liftM f m1 = do { x1 <- m1; return (f x1) }
-- | Promote a function to a monad, scanning the monadic arguments from
-- left to right. For example,
--
-- > liftM2 (+) [0,1] [0,2] = [0,2,1,3]
-- > liftM2 (+) (Just 1) Nothing = Nothing
--
liftM2 :: (Monad m) => (a1 -> a2 -> r) -> m a1 -> m a2 -> m r
liftM2 f m1 m2 = do { x1 <- m1; x2 <- m2; return (f x1 x2) }
-- | Promote a function to a monad, scanning the monadic arguments from
-- left to right (cf. 'liftM2').
liftM3 :: (Monad m) => (a1 -> a2 -> a3 -> r) -> m a1 -> m a2 -> m a3 -> m r
liftM3 f m1 m2 m3 = do { x1 <- m1; x2 <- m2; x3 <- m3; return (f x1 x2 x3) }
-- | Promote a function to a monad, scanning the monadic arguments from
-- left to right (cf. 'liftM2').
liftM4 :: (Monad m) => (a1 -> a2 -> a3 -> a4 -> r) -> m a1 -> m a2 -> m a3 -> m a4 -> m r
liftM4 f m1 m2 m3 m4 = do { x1 <- m1; x2 <- m2; x3 <- m3; x4 <- m4; return (f x1 x2 x3 x4) }
-- | Promote a function to a monad, scanning the monadic arguments from
-- left to right (cf. 'liftM2').
liftM5 :: (Monad m) => (a1 -> a2 -> a3 -> a4 -> a5 -> r) -> m a1 -> m a2 -> m a3 -> m a4 -> m a5 -> m r
liftM5 f m1 m2 m3 m4 m5 = do { x1 <- m1; x2 <- m2; x3 <- m3; x4 <- m4; x5 <- m5; return (f x1 x2 x3 x4 x5) }
{-# INLINEABLE liftM #-}
{-# SPECIALISE liftM :: (a1->r) -> IO a1 -> IO r #-}
{-# SPECIALISE liftM :: (a1->r) -> Maybe a1 -> Maybe r #-}
{-# INLINEABLE liftM2 #-}
{-# SPECIALISE liftM2 :: (a1->a2->r) -> IO a1 -> IO a2 -> IO r #-}
{-# SPECIALISE liftM2 :: (a1->a2->r) -> Maybe a1 -> Maybe a2 -> Maybe r #-}
{-# INLINEABLE liftM3 #-}
{-# SPECIALISE liftM3 :: (a1->a2->a3->r) -> IO a1 -> IO a2 -> IO a3 -> IO r #-}
{-# SPECIALISE liftM3 :: (a1->a2->a3->r) -> Maybe a1 -> Maybe a2 -> Maybe a3 -> Maybe r #-}
{-# INLINEABLE liftM4 #-}
{-# SPECIALISE liftM4 :: (a1->a2->a3->a4->r) -> IO a1 -> IO a2 -> IO a3 -> IO a4 -> IO r #-}
{-# SPECIALISE liftM4 :: (a1->a2->a3->a4->r) -> Maybe a1 -> Maybe a2 -> Maybe a3 -> Maybe a4 -> Maybe r #-}
{-# INLINEABLE liftM5 #-}
{-# SPECIALISE liftM5 :: (a1->a2->a3->a4->a5->r) -> IO a1 -> IO a2 -> IO a3 -> IO a4 -> IO a5 -> IO r #-}
{-# SPECIALISE liftM5 :: (a1->a2->a3->a4->a5->r) -> Maybe a1 -> Maybe a2 -> Maybe a3 -> Maybe a4 -> Maybe a5 -> Maybe r #-}
{- | In many situations, the 'liftM' operations can be replaced by uses of
'ap', which promotes function application.
> return f `ap` x1 `ap` ... `ap` xn
is equivalent to
> liftMn f x1 x2 ... xn
-}
ap :: (Monad m) => m (a -> b) -> m a -> m b
ap m1 m2 = do { x1 <- m1; x2 <- m2; return (x1 x2) }
-- Since many Applicative instances define (<*>) = ap, we
-- cannot define ap = (<*>)
{-# INLINEABLE ap #-}
{-# SPECIALISE ap :: IO (a -> b) -> IO a -> IO b #-}
{-# SPECIALISE ap :: Maybe (a -> b) -> Maybe a -> Maybe b #-}
-- instances for Prelude types
instance Functor ((->) r) where
fmap = (.)
instance Applicative ((->) a) where
pure = const
(<*>) f g x = f x (g x)
instance Monad ((->) r) where
f >>= k = \ r -> k (f r) r
instance Functor ((,) a) where
fmap f (x,y) = (x, f y)
instance Functor Maybe where
fmap _ Nothing = Nothing
fmap f (Just a) = Just (f a)
instance Applicative Maybe where
pure = Just
Just f <*> m = fmap f m
Nothing <*> _m = Nothing
Just _m1 *> m2 = m2
Nothing *> _m2 = Nothing
instance Monad Maybe where
(Just x) >>= k = k x
Nothing >>= _ = Nothing
(>>) = (*>)
fail _ = Nothing
-- -----------------------------------------------------------------------------
-- The Alternative class definition
infixl 3 <|>
-- | A monoid on applicative functors.
--
-- If defined, 'some' and 'many' should be the least solutions
-- of the equations:
--
-- * @some v = (:) '<$>' v '<*>' many v@
--
-- * @many v = some v '<|>' 'pure' []@
class Applicative f => Alternative f where
-- | The identity of '<|>'
empty :: f a
-- | An associative binary operation
(<|>) :: f a -> f a -> f a
-- | One or more.
some :: f a -> f [a]
some v = some_v
where
many_v = some_v <|> pure []
some_v = (fmap (:) v) <*> many_v
-- | Zero or more.
many :: f a -> f [a]
many v = many_v
where
many_v = some_v <|> pure []
some_v = (fmap (:) v) <*> many_v
instance Alternative Maybe where
empty = Nothing
Nothing <|> r = r
l <|> _ = l
-- -----------------------------------------------------------------------------
-- The MonadPlus class definition
-- | Monads that also support choice and failure.
class (Alternative m, Monad m) => MonadPlus m where
-- | the identity of 'mplus'. It should also satisfy the equations
--
-- > mzero >>= f = mzero
-- > v >> mzero = mzero
--
mzero :: m a
mzero = empty
-- | an associative operation
mplus :: m a -> m a -> m a
mplus = (<|>)
instance MonadPlus Maybe
----------------------------------------------
-- The list type
instance Functor [] where
{-# INLINE fmap #-}
fmap = map
-- See Note: [List comprehensions and inlining]
instance Applicative [] where
{-# INLINE pure #-}
pure x = [x]
{-# INLINE (<*>) #-}
fs <*> xs = [f x | f <- fs, x <- xs]
{-# INLINE (*>) #-}
xs *> ys = [y | _ <- xs, y <- ys]
-- See Note: [List comprehensions and inlining]
instance Monad [] where
{-# INLINE (>>=) #-}
xs >>= f = [y | x <- xs, y <- f x]
{-# INLINE (>>) #-}
(>>) = (*>)
{-# INLINE fail #-}
fail _ = []
instance Alternative [] where
empty = []
(<|>) = (++)
instance MonadPlus []
{-
A few list functions that appear here because they are used here.
The rest of the prelude list functions are in GHC.List.
-}
----------------------------------------------
-- foldr/build/augment
----------------------------------------------
-- | 'foldr', applied to a binary operator, a starting value (typically
-- the right-identity of the operator), and a list, reduces the list
-- using the binary operator, from right to left:
--
-- > foldr f z [x1, x2, ..., xn] == x1 `f` (x2 `f` ... (xn `f` z)...)
foldr :: (a -> b -> b) -> b -> [a] -> b
-- foldr _ z [] = z
-- foldr f z (x:xs) = f x (foldr f z xs)
{-# INLINE [0] foldr #-}
-- Inline only in the final stage, after the foldr/cons rule has had a chance
-- Also note that we inline it when it has *two* parameters, which are the
-- ones we are keen about specialising!
foldr k z = go
where
go [] = z
go (y:ys) = y `k` go ys
-- | A list producer that can be fused with 'foldr'.
-- This function is merely
--
-- > build g = g (:) []
--
-- but GHC's simplifier will transform an expression of the form
-- @'foldr' k z ('build' g)@, which may arise after inlining, to @g k z@,
-- which avoids producing an intermediate list.
build :: forall a. (forall b. (a -> b -> b) -> b -> b) -> [a]
{-# INLINE [1] build #-}
-- The INLINE is important, even though build is tiny,
-- because it prevents [] getting inlined in the version that
-- appears in the interface file. If [] *is* inlined, it
-- won't match with [] appearing in rules in an importing module.
--
-- The "1" says to inline in phase 1
build g = g (:) []
-- | A list producer that can be fused with 'foldr'.
-- This function is merely
--
-- > augment g xs = g (:) xs
--
-- but GHC's simplifier will transform an expression of the form
-- @'foldr' k z ('augment' g xs)@, which may arise after inlining, to
-- @g k ('foldr' k z xs)@, which avoids producing an intermediate list.
augment :: forall a. (forall b. (a->b->b) -> b -> b) -> [a] -> [a]
{-# INLINE [1] augment #-}
augment g xs = g (:) xs
{-# RULES
"fold/build" forall k z (g::forall b. (a->b->b) -> b -> b) .
foldr k z (build g) = g k z
"foldr/augment" forall k z xs (g::forall b. (a->b->b) -> b -> b) .
foldr k z (augment g xs) = g k (foldr k z xs)
"foldr/id" foldr (:) [] = \x -> x
"foldr/app" [1] forall ys. foldr (:) ys = \xs -> xs ++ ys
-- Only activate this from phase 1, because that's
-- when we disable the rule that expands (++) into foldr
-- The foldr/cons rule looks nice, but it can give disastrously
-- bloated code when commpiling
-- array (a,b) [(1,2), (2,2), (3,2), ...very long list... ]
-- i.e. when there are very very long literal lists
-- So I've disabled it for now. We could have special cases
-- for short lists, I suppose.
-- "foldr/cons" forall k z x xs. foldr k z (x:xs) = k x (foldr k z xs)
"foldr/single" forall k z x. foldr k z [x] = k x z
"foldr/nil" forall k z. foldr k z [] = z
"foldr/cons/build" forall k z x (g::forall b. (a->b->b) -> b -> b) .
foldr k z (x:build g) = k x (g k z)
"augment/build" forall (g::forall b. (a->b->b) -> b -> b)
(h::forall b. (a->b->b) -> b -> b) .
augment g (build h) = build (\c n -> g c (h c n))
"augment/nil" forall (g::forall b. (a->b->b) -> b -> b) .
augment g [] = build g
#-}
-- This rule is true, but not (I think) useful:
-- augment g (augment h t) = augment (\cn -> g c (h c n)) t
----------------------------------------------
-- map
----------------------------------------------
-- | 'map' @f xs@ is the list obtained by applying @f@ to each element
-- of @xs@, i.e.,
--
-- > map f [x1, x2, ..., xn] == [f x1, f x2, ..., f xn]
-- > map f [x1, x2, ...] == [f x1, f x2, ...]
map :: (a -> b) -> [a] -> [b]
{-# NOINLINE [0] map #-}
-- We want the RULEs "map" and "map/coerce" to fire first.
-- map is recursive, so won't inline anyway,
-- but saying so is more explicit, and silences warnings
map _ [] = []
map f (x:xs) = f x : map f xs
-- Note eta expanded
mapFB :: (elt -> lst -> lst) -> (a -> elt) -> a -> lst -> lst
{-# INLINE [0] mapFB #-}
mapFB c f = \x ys -> c (f x) ys
-- The rules for map work like this.
--
-- Up to (but not including) phase 1, we use the "map" rule to
-- rewrite all saturated applications of map with its build/fold
-- form, hoping for fusion to happen.
-- In phase 1 and 0, we switch off that rule, inline build, and
-- switch on the "mapList" rule, which rewrites the foldr/mapFB
-- thing back into plain map.
--
-- It's important that these two rules aren't both active at once
-- (along with build's unfolding) else we'd get an infinite loop
-- in the rules. Hence the activation control below.
--
-- The "mapFB" rule optimises compositions of map.
--
-- This same pattern is followed by many other functions:
-- e.g. append, filter, iterate, repeat, etc.
{-# RULES
"map" [~1] forall f xs. map f xs = build (\c n -> foldr (mapFB c f) n xs)
"mapList" [1] forall f. foldr (mapFB (:) f) [] = map f
"mapFB" forall c f g. mapFB (mapFB c f) g = mapFB c (f.g)
#-}
-- See Breitner, Eisenberg, Peyton Jones, and Weirich, "Safe Zero-cost
-- Coercions for Haskell", section 6.5:
-- http://research.microsoft.com/en-us/um/people/simonpj/papers/ext-f/coercible.pdf
{-# RULES "map/coerce" [1] map coerce = coerce #-}
----------------------------------------------
-- append
----------------------------------------------
-- | Append two lists, i.e.,
--
-- > [x1, ..., xm] ++ [y1, ..., yn] == [x1, ..., xm, y1, ..., yn]
-- > [x1, ..., xm] ++ [y1, ...] == [x1, ..., xm, y1, ...]
--
-- If the first list is not finite, the result is the first list.
(++) :: [a] -> [a] -> [a]
{-# NOINLINE [1] (++) #-} -- We want the RULE to fire first.
-- It's recursive, so won't inline anyway,
-- but saying so is more explicit
(++) [] ys = ys
(++) (x:xs) ys = x : xs ++ ys
{-# RULES
"++" [~1] forall xs ys. xs ++ ys = augment (\c n -> foldr c n xs) ys
#-}
-- |'otherwise' is defined as the value 'True'. It helps to make
-- guards more readable. eg.
--
-- > f x | x < 0 = ...
-- > | otherwise = ...
otherwise :: Bool
otherwise = True
----------------------------------------------
-- Type Char and String
----------------------------------------------
-- | A 'String' is a list of characters. String constants in Haskell are values
-- of type 'String'.
--
type String = [Char]
unsafeChr :: Int -> Char
unsafeChr (I# i#) = C# (chr# i#)
-- | The 'Prelude.fromEnum' method restricted to the type 'Data.Char.Char'.
ord :: Char -> Int
ord (C# c#) = I# (ord# c#)
-- | This 'String' equality predicate is used when desugaring
-- pattern-matches against strings.
eqString :: String -> String -> Bool
eqString [] [] = True
eqString (c1:cs1) (c2:cs2) = c1 == c2 && cs1 `eqString` cs2
eqString _ _ = False
{-# RULES "eqString" (==) = eqString #-}
-- eqString also has a BuiltInRule in PrelRules.lhs:
-- eqString (unpackCString# (Lit s1)) (unpackCString# (Lit s2) = s1==s2
----------------------------------------------
-- 'Int' related definitions
----------------------------------------------
maxInt, minInt :: Int
{- Seems clumsy. Should perhaps put minInt and MaxInt directly into MachDeps.h -}
#if WORD_SIZE_IN_BITS == 31
minInt = I# (-0x40000000#)
maxInt = I# 0x3FFFFFFF#
#elif WORD_SIZE_IN_BITS == 32
minInt = I# (-0x80000000#)
maxInt = I# 0x7FFFFFFF#
#else
minInt = I# (-0x8000000000000000#)
maxInt = I# 0x7FFFFFFFFFFFFFFF#
#endif
----------------------------------------------
-- The function type
----------------------------------------------
-- | Identity function.
id :: a -> a
id x = x
-- Assertion function. This simply ignores its boolean argument.
-- The compiler may rewrite it to @('assertError' line)@.
-- | If the first argument evaluates to 'True', then the result is the
-- second argument. Otherwise an 'AssertionFailed' exception is raised,
-- containing a 'String' with the source file and line number of the
-- call to 'assert'.
--
-- Assertions can normally be turned on or off with a compiler flag
-- (for GHC, assertions are normally on unless optimisation is turned on
-- with @-O@ or the @-fignore-asserts@
-- option is given). When assertions are turned off, the first
-- argument to 'assert' is ignored, and the second argument is
-- returned as the result.
-- SLPJ: in 5.04 etc 'assert' is in GHC.Prim,
-- but from Template Haskell onwards it's simply
-- defined here in Base.lhs
assert :: Bool -> a -> a
assert _pred r = r
breakpoint :: a -> a
breakpoint r = r
breakpointCond :: Bool -> a -> a
breakpointCond _ r = r
data Opaque = forall a. O a
-- | @const x@ is a unary function which evaluates to @x@ for all inputs.
--
-- For instance,
--
-- >>> map (const 42) [0..3]
-- [42,42,42,42]
const :: a -> b -> a
const x _ = x
-- | Function composition.
{-# INLINE (.) #-}
-- Make sure it has TWO args only on the left, so that it inlines
-- when applied to two functions, even if there is no final argument
(.) :: (b -> c) -> (a -> b) -> a -> c
(.) f g = \x -> f (g x)
-- | @'flip' f@ takes its (first) two arguments in the reverse order of @f@.
flip :: (a -> b -> c) -> b -> a -> c
flip f x y = f y x
-- | Application operator. This operator is redundant, since ordinary
-- application @(f x)@ means the same as @(f '$' x)@. However, '$' has
-- low, right-associative binding precedence, so it sometimes allows
-- parentheses to be omitted; for example:
--
-- > f $ g $ h x = f (g (h x))
--
-- It is also useful in higher-order situations, such as @'map' ('$' 0) xs@,
-- or @'Data.List.zipWith' ('$') fs xs@.
{-# INLINE ($) #-}
($) :: (a -> b) -> a -> b
f $ x = f x
-- | Strict (call-by-value) application operator. It takes a function and an
-- argument, evaluates the argument to weak head normal form (WHNF), then calls
-- the function with that value.
($!) :: (a -> b) -> a -> b
f $! x = let !vx = x in f vx -- see #2273
-- | @'until' p f@ yields the result of applying @f@ until @p@ holds.
until :: (a -> Bool) -> (a -> a) -> a -> a
until p f = go
where
go x | p x = x
| otherwise = go (f x)
-- | 'asTypeOf' is a type-restricted version of 'const'. It is usually
-- used as an infix operator, and its typing forces its first argument
-- (which is usually overloaded) to have the same type as the second.
asTypeOf :: a -> a -> a
asTypeOf = const
----------------------------------------------
-- Functor/Applicative/Monad instances for IO
----------------------------------------------
instance Functor IO where
fmap f x = x >>= (pure . f)
instance Applicative IO where
{-# INLINE pure #-}
{-# INLINE (*>) #-}
pure = returnIO
m *> k = m >>= \ _ -> k
(<*>) = ap
instance Monad IO where
{-# INLINE (>>) #-}
{-# INLINE (>>=) #-}
(>>) = (*>)
(>>=) = bindIO
fail s = failIO s
instance Alternative IO where
empty = failIO "mzero"
(<|>) = mplusIO
instance MonadPlus IO
returnIO :: a -> IO a
returnIO x = IO (\ s -> (# s, x #))
bindIO :: IO a -> (a -> IO b) -> IO b
bindIO (IO m) k = IO (\ s -> case m s of (# new_s, a #) -> unIO (k a) new_s)
thenIO :: IO a -> IO b -> IO b
thenIO (IO m) k = IO (\ s -> case m s of (# new_s, _ #) -> unIO k new_s)
unIO :: IO a -> (State# RealWorld -> (# State# RealWorld, a #))
unIO (IO a) = a
{- |
Returns the 'tag' of a constructor application; this function is used
by the deriving code for Eq, Ord and Enum.
The primitive dataToTag# requires an evaluated constructor application
as its argument, so we provide getTag as a wrapper that performs the
evaluation before calling dataToTag#. We could have dataToTag#
evaluate its argument, but we prefer to do it this way because (a)
dataToTag# can be an inline primop if it doesn't need to do any
evaluation, and (b) we want to expose the evaluation to the
simplifier, because it might be possible to eliminate the evaluation
in the case when the argument is already known to be evaluated.
-}
{-# INLINE getTag #-}
getTag :: a -> Int#
getTag !x = dataToTag# x
----------------------------------------------
-- Numeric primops
----------------------------------------------
-- Definitions of the boxed PrimOps; these will be
-- used in the case of partial applications, etc.
{-# INLINE quotInt #-}
{-# INLINE remInt #-}
quotInt, remInt, divInt, modInt :: Int -> Int -> Int
(I# x) `quotInt` (I# y) = I# (x `quotInt#` y)
(I# x) `remInt` (I# y) = I# (x `remInt#` y)
(I# x) `divInt` (I# y) = I# (x `divInt#` y)
(I# x) `modInt` (I# y) = I# (x `modInt#` y)
quotRemInt :: Int -> Int -> (Int, Int)
(I# x) `quotRemInt` (I# y) = case x `quotRemInt#` y of
(# q, r #) ->
(I# q, I# r)
divModInt :: Int -> Int -> (Int, Int)
(I# x) `divModInt` (I# y) = case x `divModInt#` y of
(# q, r #) -> (I# q, I# r)
divModInt# :: Int# -> Int# -> (# Int#, Int# #)
x# `divModInt#` y#
| isTrue# (x# ># 0#) && isTrue# (y# <# 0#) =
case (x# -# 1#) `quotRemInt#` y# of
(# q, r #) -> (# q -# 1#, r +# y# +# 1# #)
| isTrue# (x# <# 0#) && isTrue# (y# ># 0#) =
case (x# +# 1#) `quotRemInt#` y# of
(# q, r #) -> (# q -# 1#, r +# y# -# 1# #)
| otherwise =
x# `quotRemInt#` y#
-- Wrappers for the shift operations. The uncheckedShift# family are
-- undefined when the amount being shifted by is greater than the size
-- in bits of Int#, so these wrappers perform a check and return
-- either zero or -1 appropriately.
--
-- Note that these wrappers still produce undefined results when the
-- second argument (the shift amount) is negative.
-- | Shift the argument left by the specified number of bits
-- (which must be non-negative).
shiftL# :: Word# -> Int# -> Word#
a `shiftL#` b | isTrue# (b >=# WORD_SIZE_IN_BITS#) = 0##
| otherwise = a `uncheckedShiftL#` b
-- | Shift the argument right by the specified number of bits
-- (which must be non-negative).
-- The "RL" means "right, logical" (as opposed to RA for arithmetic)
-- (although an arithmetic right shift wouldn't make sense for Word#)
shiftRL# :: Word# -> Int# -> Word#
a `shiftRL#` b | isTrue# (b >=# WORD_SIZE_IN_BITS#) = 0##
| otherwise = a `uncheckedShiftRL#` b
-- | Shift the argument left by the specified number of bits
-- (which must be non-negative).
iShiftL# :: Int# -> Int# -> Int#
a `iShiftL#` b | isTrue# (b >=# WORD_SIZE_IN_BITS#) = 0#
| otherwise = a `uncheckedIShiftL#` b
-- | Shift the argument right (signed) by the specified number of bits
-- (which must be non-negative).
-- The "RA" means "right, arithmetic" (as opposed to RL for logical)
iShiftRA# :: Int# -> Int# -> Int#
a `iShiftRA#` b | isTrue# (b >=# WORD_SIZE_IN_BITS#) = if isTrue# (a <# 0#)
then (-1#)
else 0#
| otherwise = a `uncheckedIShiftRA#` b
-- | Shift the argument right (unsigned) by the specified number of bits
-- (which must be non-negative).
-- The "RL" means "right, logical" (as opposed to RA for arithmetic)
iShiftRL# :: Int# -> Int# -> Int#
a `iShiftRL#` b | isTrue# (b >=# WORD_SIZE_IN_BITS#) = 0#
| otherwise = a `uncheckedIShiftRL#` b
-- Rules for C strings (the functions themselves are now in GHC.CString)
{-# RULES
"unpack" [~1] forall a . unpackCString# a = build (unpackFoldrCString# a)
"unpack-list" [1] forall a . unpackFoldrCString# a (:) [] = unpackCString# a
"unpack-append" forall a n . unpackFoldrCString# a (:) n = unpackAppendCString# a n
-- There's a built-in rule (in PrelRules.lhs) for
-- unpackFoldr "foo" c (unpackFoldr "baz" c n) = unpackFoldr "foobaz" c n
#-}
|
tolysz/prepare-ghcjs
|
spec-lts8/base/GHC/Base.hs
|
bsd-3-clause
| 42,231
| 187
| 46
| 11,059
| 6,462
| 3,638
| 2,824
| 397
| 2
|
module Dotnet.System.ObjectTy (Dotnet.Object(..)) where
import qualified Dotnet
--data Object_ a
--type Object a = Dotnet.Object (Object_ a)
|
FranklinChen/Hugs
|
dotnet/lib/Dotnet/System/ObjectTy.hs
|
bsd-3-clause
| 147
| 0
| 5
| 23
| 24
| 17
| 7
| 2
| 0
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="hi-IN">
<title>Common Library</title>
<maps>
<homeID>commonlib</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
kingthorin/zap-extensions
|
addOns/commonlib/src/main/javahelp/help_hi_IN/helpset_hi_IN.hs
|
apache-2.0
| 965
| 77
| 66
| 156
| 407
| 206
| 201
| -1
| -1
|
module IRTS.CodegenC (codegenC) where
import Idris.AbsSyntax
import IRTS.Bytecode
import IRTS.Lang
import IRTS.Simplified
import IRTS.Defunctionalise
import IRTS.System
import IRTS.CodegenCommon
import Idris.Core.TT
import Util.System
import Numeric
import Data.Char
import Data.Bits
import Data.List (intercalate)
import System.Process
import System.Exit
import System.IO
import System.Directory
import System.FilePath ((</>), (<.>))
import Control.Monad
import Debug.Trace
codegenC :: CodeGenerator
codegenC ci = do codegenC' (simpleDecls ci)
(outputFile ci)
(outputType ci)
(includes ci)
(compileObjs ci)
(map mkLib (compileLibs ci) ++
map incdir (importDirs ci))
(compilerFlags ci)
(exportDecls ci)
(interfaces ci)
(debugLevel ci)
when (interfaces ci) $
codegenH (exportDecls ci)
where mkLib l = "-l" ++ l
incdir i = "-I" ++ i
codegenC' :: [(Name, SDecl)] ->
String -> -- output file name
OutputType -> -- generate executable if True, only .o if False
[FilePath] -> -- include files
[String] -> -- extra object files
[String] -> -- extra compiler flags (libraries)
[String] -> -- extra compiler flags (anything)
[ExportIFace] ->
Bool -> -- interfaces too (so make a .o instead)
DbgLevel ->
IO ()
codegenC' defs out exec incs objs libs flags exports iface dbg
= do -- print defs
let bc = map toBC defs
let h = concatMap toDecl (map fst bc)
let cc = concatMap (uncurry toC) bc
let hi = concatMap ifaceC (concatMap getExp exports)
d <- getDataDir
mprog <- readFile (d </> "rts" </> "idris_main" <.> "c")
let cout = headers incs ++ debug dbg ++ h ++ cc ++
(if (exec == Executable) then mprog else hi)
case exec of
MavenProject -> putStrLn ("FAILURE: output type not supported")
Raw -> writeSource out cout
_ -> do
(tmpn, tmph) <- tempfile
hPutStr tmph cout
hFlush tmph
hClose tmph
comp <- getCC
libFlags <- getLibFlags
incFlags <- getIncFlags
envFlags <- getEnvFlags
let args = [gccDbg dbg] ++
gccFlags iface ++
-- # Any flags defined here which alter the RTS API must also be added to config.mk
["-DHAS_PTHREAD", "-DIDRIS_ENABLE_STATS",
"-I."] ++ objs ++ ["-x", "c"] ++ envFlags ++
(if (exec == Executable) then [] else ["-c"]) ++
[tmpn] ++
(if not iface then concatMap words libFlags else []) ++
concatMap words incFlags ++
(if not iface then concatMap words libs else []) ++
concatMap words flags ++
["-o", out]
-- putStrLn (show args)
exit <- rawSystem comp args
when (exit /= ExitSuccess) $
putStrLn ("FAILURE: " ++ show comp ++ " " ++ show args)
where
getExp (Export _ _ exp) = exp
headers xs =
concatMap
(\h -> "#include <" ++ h ++ ">\n")
(xs ++ ["idris_rts.h", "idris_bitstring.h", "idris_stdfgn.h"])
debug TRACE = "#define IDRIS_TRACE\n\n"
debug _ = ""
-- We're using signed integers now. Make sure we get consistent semantics
-- out of them from gcc. See e.g. http://thiemonagel.de/2010/01/signed-integer-overflow/
gccFlags i = if i then ["-fwrapv"]
else ["-fwrapv", "-fno-strict-overflow"]
gccDbg DEBUG = "-g"
gccDbg TRACE = "-O2"
gccDbg _ = "-O2"
cname :: Name -> String
cname n = "_idris_" ++ concatMap cchar (showCG n)
where cchar x | isAlpha x || isDigit x = [x]
| otherwise = "_" ++ show (fromEnum x) ++ "_"
indent :: Int -> String
indent n = replicate (n*4) ' '
creg RVal = "RVAL"
creg (L i) = "LOC(" ++ show i ++ ")"
creg (T i) = "TOP(" ++ show i ++ ")"
creg Tmp = "REG1"
toDecl :: Name -> String
toDecl f = "void " ++ cname f ++ "(VM*, VAL*);\n"
toC :: Name -> [BC] -> String
toC f code
= -- "/* " ++ show code ++ "*/\n\n" ++
"void " ++ cname f ++ "(VM* vm, VAL* oldbase) {\n" ++
indent 1 ++ "INITFRAME;\n" ++
concatMap (bcc 1) code ++ "}\n\n"
showCStr :: String -> String
showCStr s = '"' : foldr ((++) . showChar) "\"" s
where
showChar :: Char -> String
showChar '"' = "\\\""
showChar '\\' = "\\\\"
showChar c
-- Note: we need the double quotes around the codes because otherwise
-- "\n3" would get encoded as "\x0a3", which is incorrect.
-- Instead, we opt for "\x0a""3" and let the C compiler deal with it.
| ord c < 0x10 = "\"\"\\x0" ++ showHex (ord c) "\"\""
| ord c < 0x20 = "\"\"\\x" ++ showHex (ord c) "\"\""
| ord c < 0x7f = [c] -- 0x7f = \DEL
| otherwise = showHexes (utf8bytes (ord c))
utf8bytes :: Int -> [Int]
utf8bytes x = let (h : bytes) = split [] x in
headHex h (length bytes) : map toHex bytes
where
split acc 0 = acc
split acc x = let xbits = x .&. 0x3f
xrest = shiftR x 6 in
split (xbits : acc) xrest
headHex h 1 = h + 0xc0
headHex h 2 = h + 0xe0
headHex h 3 = h + 0xf0
headHex h n = error "Can't happen: Invalid UTF8 character"
toHex i = i + 0x80
showHexes = foldr ((++) . showUTF8) ""
showUTF8 c = "\"\"\\x" ++ showHex c "\"\""
bcc :: Int -> BC -> String
bcc i (ASSIGN l r) = indent i ++ creg l ++ " = " ++ creg r ++ ";\n"
bcc i (ASSIGNCONST l c)
= indent i ++ creg l ++ " = " ++ mkConst c ++ ";\n"
where
mkConst (I i) = "MKINT(" ++ show i ++ ")"
mkConst (BI i) | i < (2^30) = "MKINT(" ++ show i ++ ")"
| otherwise = "MKBIGC(vm,\"" ++ show i ++ "\")"
mkConst (Fl f) = "MKFLOAT(vm, " ++ show f ++ ")"
mkConst (Ch c) = "MKINT(" ++ show (fromEnum c) ++ ")"
mkConst (Str s) = "MKSTR(vm, " ++ showCStr s ++ ")"
mkConst (B8 x) = "idris_b8const(vm, " ++ show x ++ "U)"
mkConst (B16 x) = "idris_b16const(vm, " ++ show x ++ "U)"
mkConst (B32 x) = "idris_b32const(vm, " ++ show x ++ "UL)"
mkConst (B64 x) = "idris_b64const(vm, " ++ show x ++ "ULL)"
-- if it's a type constant, we won't use it, but equally it shouldn't
-- report an error. These might creep into generated for various reasons
-- (especially if erasure is disabled).
mkConst c | isTypeConst c = "MKINT(42424242)"
mkConst c = error $ "mkConst of (" ++ show c ++ ") not implemented"
bcc i (UPDATE l r) = indent i ++ creg l ++ " = " ++ creg r ++ ";\n"
bcc i (MKCON l loc tag []) | tag < 256
= indent i ++ creg l ++ " = NULL_CON(" ++ show tag ++ ");\n"
bcc i (MKCON l loc tag args)
= indent i ++ alloc loc tag ++
indent i ++ setArgs 0 args ++ "\n" ++
indent i ++ creg l ++ " = " ++ creg Tmp ++ ";\n"
-- "MKCON(vm, " ++ creg l ++ ", " ++ show tag ++ ", " ++
-- show (length args) ++ concatMap showArg args ++ ");\n"
where showArg r = ", " ++ creg r
setArgs i [] = ""
setArgs i (x : xs) = "SETARG(" ++ creg Tmp ++ ", " ++ show i ++ ", " ++ creg x ++
"); " ++ setArgs (i + 1) xs
alloc Nothing tag
= "allocCon(" ++ creg Tmp ++ ", vm, " ++ show tag ++ ", " ++
show (length args) ++ ", 0);\n"
alloc (Just old) tag
= "updateCon(" ++ creg Tmp ++ ", " ++ creg old ++ ", " ++ show tag ++ ", " ++
show (length args) ++ ");\n"
bcc i (PROJECT l loc a) = indent i ++ "PROJECT(vm, " ++ creg l ++ ", " ++ show loc ++
", " ++ show a ++ ");\n"
bcc i (PROJECTINTO r t idx)
= indent i ++ creg r ++ " = GETARG(" ++ creg t ++ ", " ++ show idx ++ ");\n"
bcc i (CASE True r code def)
| length code < 4 = showCase i def code
where
showCode :: Int -> [BC] -> String
showCode i bc = "{\n" ++ concatMap (bcc (i + 1)) bc ++
indent i ++ "}\n"
showCase :: Int -> Maybe [BC] -> [(Int, [BC])] -> String
showCase i Nothing [(t, c)] = indent i ++ showCode i c
showCase i (Just def) [] = indent i ++ showCode i def
showCase i def ((t, c) : cs)
= indent i ++ "if (CTAG(" ++ creg r ++ ") == " ++ show t ++ ") " ++ showCode i c
++ indent i ++ "else\n" ++ showCase i def cs
bcc i (CASE safe r code def)
= indent i ++ "switch(" ++ ctag safe ++ "(" ++ creg r ++ ")) {\n" ++
concatMap (showCase i) code ++
showDef i def ++
indent i ++ "}\n"
where
ctag True = "CTAG"
ctag False = "TAG"
showCase i (t, bc) = indent i ++ "case " ++ show t ++ ":\n"
++ concatMap (bcc (i+1)) bc ++ indent (i + 1) ++ "break;\n"
showDef i Nothing = ""
showDef i (Just c) = indent i ++ "default:\n"
++ concatMap (bcc (i+1)) c ++ indent (i + 1) ++ "break;\n"
bcc i (CONSTCASE r code def)
| intConsts code
-- = indent i ++ "switch(GETINT(" ++ creg r ++ ")) {\n" ++
-- concatMap (showCase i) code ++
-- showDef i def ++
-- indent i ++ "}\n"
= concatMap (iCase (creg r)) code ++
indent i ++ "{\n" ++ showDefS i def ++ indent i ++ "}\n"
| strConsts code
= concatMap (strCase ("GETSTR(" ++ creg r ++ ")")) code ++
indent i ++ "{\n" ++ showDefS i def ++ indent i ++ "}\n"
| bigintConsts code
= concatMap (biCase (creg r)) code ++
indent i ++ "{\n" ++ showDefS i def ++ indent i ++ "}\n"
| otherwise = error $ "Can't happen: Can't compile const case " ++ show code
where
intConsts ((I _, _ ) : _) = True
intConsts ((Ch _, _ ) : _) = True
intConsts _ = False
bigintConsts ((BI _, _ ) : _) = True
bigintConsts _ = False
strConsts ((Str _, _ ) : _) = True
strConsts _ = False
strCase sv (s, bc) =
indent i ++ "if (strcmp(" ++ sv ++ ", " ++ show s ++ ") == 0) {\n" ++
concatMap (bcc (i+1)) bc ++ indent i ++ "} else\n"
biCase bv (BI b, bc) =
indent i ++ "if (bigEqConst(" ++ bv ++ ", " ++ show b ++ ")) {\n"
++ concatMap (bcc (i+1)) bc ++ indent i ++ "} else\n"
iCase v (I b, bc) =
indent i ++ "if (GETINT(" ++ v ++ ") == " ++ show b ++ ") {\n"
++ concatMap (bcc (i+1)) bc ++ indent i ++ "} else\n"
iCase v (Ch b, bc) =
indent i ++ "if (GETINT(" ++ v ++ ") == " ++ show (fromEnum b) ++ ") {\n"
++ concatMap (bcc (i+1)) bc ++ indent i ++ "} else\n"
showCase i (t, bc) = indent i ++ "case " ++ show t ++ ":\n"
++ concatMap (bcc (i+1)) bc ++
indent (i + 1) ++ "break;\n"
showDef i Nothing = ""
showDef i (Just c) = indent i ++ "default:\n"
++ concatMap (bcc (i+1)) c ++
indent (i + 1) ++ "break;\n"
showDefS i Nothing = ""
showDefS i (Just c) = concatMap (bcc (i+1)) c
bcc i (CALL n) = indent i ++ "CALL(" ++ cname n ++ ");\n"
bcc i (TAILCALL n) = indent i ++ "TAILCALL(" ++ cname n ++ ");\n"
bcc i (SLIDE n) = indent i ++ "SLIDE(vm, " ++ show n ++ ");\n"
bcc i REBASE = indent i ++ "REBASE;\n"
bcc i (RESERVE 0) = ""
bcc i (RESERVE n) = indent i ++ "RESERVE(" ++ show n ++ ");\n"
bcc i (ADDTOP 0) = ""
bcc i (ADDTOP n) = indent i ++ "ADDTOP(" ++ show n ++ ");\n"
bcc i (TOPBASE n) = indent i ++ "TOPBASE(" ++ show n ++ ");\n"
bcc i (BASETOP n) = indent i ++ "BASETOP(" ++ show n ++ ");\n"
bcc i STOREOLD = indent i ++ "STOREOLD;\n"
bcc i (OP l fn args) = indent i ++ doOp (creg l ++ " = ") fn args ++ ";\n"
bcc i (FOREIGNCALL l rty (FStr fn) args)
= indent i ++
c_irts (toFType rty) (creg l ++ " = ")
(fn ++ "(" ++ showSep "," (map fcall args) ++ ")") ++ ";\n"
where fcall (t, arg) = irts_c (toFType t) (creg arg)
bcc i (NULL r) = indent i ++ creg r ++ " = NULL;\n" -- clear, so it'll be GCed
bcc i (ERROR str) = indent i ++ "fprintf(stderr, " ++ show str ++ "); fprintf(stderr, \"\\n\"); exit(-1);\n"
-- bcc i c = error (show c) -- indent i ++ "// not done yet\n"
-- Deconstruct the Foreign type in the defunctionalised expression and build
-- a foreign type description for c_irts and irts_c
toAType (FCon i)
| i == sUN "C_IntChar" = ATInt ITChar
| i == sUN "C_IntNative" = ATInt ITNative
| i == sUN "C_IntBits8" = ATInt (ITFixed IT8)
| i == sUN "C_IntBits16" = ATInt (ITFixed IT16)
| i == sUN "C_IntBits32" = ATInt (ITFixed IT32)
| i == sUN "C_IntBits64" = ATInt (ITFixed IT64)
toAType t = error (show t ++ " not defined in toAType")
toFType (FCon c)
| c == sUN "C_Str" = FString
| c == sUN "C_Float" = FArith ATFloat
| c == sUN "C_Ptr" = FPtr
| c == sUN "C_MPtr" = FManagedPtr
| c == sUN "C_Unit" = FUnit
toFType (FApp c [_,ity])
| c == sUN "C_IntT" = FArith (toAType ity)
toFType (FApp c [_])
| c == sUN "C_Any" = FAny
toFType t = FAny
c_irts (FArith (ATInt ITNative)) l x = l ++ "MKINT((i_int)(" ++ x ++ "))"
c_irts (FArith (ATInt ITChar)) l x = c_irts (FArith (ATInt ITNative)) l x
c_irts (FArith (ATInt (ITFixed ity))) l x
= l ++ "idris_b" ++ show (nativeTyWidth ity) ++ "const(vm, " ++ x ++ ")"
c_irts FString l x = l ++ "MKSTR(vm, " ++ x ++ ")"
c_irts FUnit l x = x
c_irts FPtr l x = l ++ "MKPTR(vm, " ++ x ++ ")"
c_irts FManagedPtr l x = l ++ "MKMPTR(vm, " ++ x ++ ")"
c_irts (FArith ATFloat) l x = l ++ "MKFLOAT(vm, " ++ x ++ ")"
c_irts FAny l x = l ++ x
irts_c (FArith (ATInt ITNative)) x = "GETINT(" ++ x ++ ")"
irts_c (FArith (ATInt ITChar)) x = irts_c (FArith (ATInt ITNative)) x
irts_c (FArith (ATInt (ITFixed ity))) x
= "(" ++ x ++ "->info.bits" ++ show (nativeTyWidth ity) ++ ")"
irts_c FString x = "GETSTR(" ++ x ++ ")"
irts_c FUnit x = x
irts_c FPtr x = "GETPTR(" ++ x ++ ")"
irts_c FManagedPtr x = "GETMPTR(" ++ x ++ ")"
irts_c (FArith ATFloat) x = "GETFLOAT(" ++ x ++ ")"
irts_c FAny x = x
bitOp v op ty args = v ++ "idris_b" ++ show (nativeTyWidth ty) ++ op ++ "(vm, " ++ intercalate ", " (map creg args) ++ ")"
bitCoerce v op input output arg
= v ++ "idris_b" ++ show (nativeTyWidth input) ++ op ++ show (nativeTyWidth output) ++ "(vm, " ++ creg arg ++ ")"
signedTy :: NativeTy -> String
signedTy t = "int" ++ show (nativeTyWidth t) ++ "_t"
doOp v (LPlus (ATInt ITNative)) [l, r] = v ++ "ADD(" ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LMinus (ATInt ITNative)) [l, r] = v ++ "INTOP(-," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LTimes (ATInt ITNative)) [l, r] = v ++ "MULT(" ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LUDiv ITNative) [l, r] = v ++ "UINTOP(/," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSDiv (ATInt ITNative)) [l, r] = v ++ "INTOP(/," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LURem ITNative) [l, r] = v ++ "UINTOP(%," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSRem (ATInt ITNative)) [l, r] = v ++ "INTOP(%," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LAnd ITNative) [l, r] = v ++ "INTOP(&," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LOr ITNative) [l, r] = v ++ "INTOP(|," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LXOr ITNative) [l, r] = v ++ "INTOP(^," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSHL ITNative) [l, r] = v ++ "INTOP(<<," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LLSHR ITNative) [l, r] = v ++ "UINTOP(>>," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LASHR ITNative) [l, r] = v ++ "INTOP(>>," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LCompl ITNative) [x] = v ++ "INTOP(~," ++ creg x ++ ")"
doOp v (LEq (ATInt ITNative)) [l, r] = v ++ "INTOP(==," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSLt (ATInt ITNative)) [l, r] = v ++ "INTOP(<," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSLe (ATInt ITNative)) [l, r] = v ++ "INTOP(<=," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSGt (ATInt ITNative)) [l, r] = v ++ "INTOP(>," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSGe (ATInt ITNative)) [l, r] = v ++ "INTOP(>=," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LLt ITNative) [l, r] = v ++ "UINTOP(<," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LLe ITNative) [l, r] = v ++ "UINTOP(<=," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LGt ITNative) [l, r] = v ++ "UINTOP(>," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LGe ITNative) [l, r] = v ++ "UINTOP(>=," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LPlus (ATInt ITChar)) [l, r] = doOp v (LPlus (ATInt ITNative)) [l, r]
doOp v (LMinus (ATInt ITChar)) [l, r] = doOp v (LMinus (ATInt ITNative)) [l, r]
doOp v (LTimes (ATInt ITChar)) [l, r] = doOp v (LTimes (ATInt ITNative)) [l, r]
doOp v (LUDiv ITChar) [l, r] = doOp v (LUDiv ITNative) [l, r]
doOp v (LSDiv (ATInt ITChar)) [l, r] = doOp v (LSDiv (ATInt ITNative)) [l, r]
doOp v (LURem ITChar) [l, r] = doOp v (LURem ITNative) [l, r]
doOp v (LSRem (ATInt ITChar)) [l, r] = doOp v (LSRem (ATInt ITNative)) [l, r]
doOp v (LAnd ITChar) [l, r] = doOp v (LAnd ITNative) [l, r]
doOp v (LOr ITChar) [l, r] = doOp v (LOr ITNative) [l, r]
doOp v (LXOr ITChar) [l, r] = doOp v (LXOr ITNative) [l, r]
doOp v (LSHL ITChar) [l, r] = doOp v (LSHL ITNative) [l, r]
doOp v (LLSHR ITChar) [l, r] = doOp v (LLSHR ITNative) [l, r]
doOp v (LASHR ITChar) [l, r] = doOp v (LASHR ITNative) [l, r]
doOp v (LCompl ITChar) [x] = doOp v (LCompl ITNative) [x]
doOp v (LEq (ATInt ITChar)) [l, r] = doOp v (LEq (ATInt ITNative)) [l, r]
doOp v (LSLt (ATInt ITChar)) [l, r] = doOp v (LSLt (ATInt ITNative)) [l, r]
doOp v (LSLe (ATInt ITChar)) [l, r] = doOp v (LSLe (ATInt ITNative)) [l, r]
doOp v (LSGt (ATInt ITChar)) [l, r] = doOp v (LSGt (ATInt ITNative)) [l, r]
doOp v (LSGe (ATInt ITChar)) [l, r] = doOp v (LSGe (ATInt ITNative)) [l, r]
doOp v (LLt ITChar) [l, r] = doOp v (LLt ITNative) [l, r]
doOp v (LLe ITChar) [l, r] = doOp v (LLe ITNative) [l, r]
doOp v (LGt ITChar) [l, r] = doOp v (LGt ITNative) [l, r]
doOp v (LGe ITChar) [l, r] = doOp v (LGe ITNative) [l, r]
doOp v (LPlus ATFloat) [l, r] = v ++ "FLOATOP(+," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LMinus ATFloat) [l, r] = v ++ "FLOATOP(-," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LTimes ATFloat) [l, r] = v ++ "FLOATOP(*," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSDiv ATFloat) [l, r] = v ++ "FLOATOP(/," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LEq ATFloat) [l, r] = v ++ "FLOATBOP(==," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSLt ATFloat) [l, r] = v ++ "FLOATBOP(<," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSLe ATFloat) [l, r] = v ++ "FLOATBOP(<=," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSGt ATFloat) [l, r] = v ++ "FLOATBOP(>," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSGe ATFloat) [l, r] = v ++ "FLOATBOP(>=," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LIntFloat ITBig) [x] = v ++ "idris_castBigFloat(vm, " ++ creg x ++ ")"
doOp v (LFloatInt ITBig) [x] = v ++ "idris_castFloatBig(vm, " ++ creg x ++ ")"
doOp v (LPlus (ATInt ITBig)) [l, r] = v ++ "idris_bigPlus(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LMinus (ATInt ITBig)) [l, r] = v ++ "idris_bigMinus(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LTimes (ATInt ITBig)) [l, r] = v ++ "idris_bigTimes(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSDiv (ATInt ITBig)) [l, r] = v ++ "idris_bigDivide(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSRem (ATInt ITBig)) [l, r] = v ++ "idris_bigMod(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LEq (ATInt ITBig)) [l, r] = v ++ "idris_bigEq(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSLt (ATInt ITBig)) [l, r] = v ++ "idris_bigLt(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSLe (ATInt ITBig)) [l, r] = v ++ "idris_bigLe(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSGt (ATInt ITBig)) [l, r] = v ++ "idris_bigGt(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSGe (ATInt ITBig)) [l, r] = v ++ "idris_bigGe(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LIntFloat ITNative) [x] = v ++ "idris_castIntFloat(" ++ creg x ++ ")"
doOp v (LFloatInt ITNative) [x] = v ++ "idris_castFloatInt(" ++ creg x ++ ")"
doOp v (LSExt ITNative ITBig) [x] = v ++ "idris_castIntBig(vm, " ++ creg x ++ ")"
doOp v (LTrunc ITBig ITNative) [x] = v ++ "idris_castBigInt(vm, " ++ creg x ++ ")"
doOp v (LStrInt ITBig) [x] = v ++ "idris_castStrBig(vm, " ++ creg x ++ ")"
doOp v (LIntStr ITBig) [x] = v ++ "idris_castBigStr(vm, " ++ creg x ++ ")"
doOp v (LIntStr ITNative) [x] = v ++ "idris_castIntStr(vm, " ++ creg x ++ ")"
doOp v (LStrInt ITNative) [x] = v ++ "idris_castStrInt(vm, " ++ creg x ++ ")"
doOp v (LIntStr (ITFixed _)) [x] = v ++ "idris_castBitsStr(vm, " ++ creg x ++ ")"
doOp v LFloatStr [x] = v ++ "idris_castFloatStr(vm, " ++ creg x ++ ")"
doOp v LStrFloat [x] = v ++ "idris_castStrFloat(vm, " ++ creg x ++ ")"
doOp v (LSLt (ATInt (ITFixed ty))) [x, y] = bitOp v "SLt" ty [x, y]
doOp v (LSLe (ATInt (ITFixed ty))) [x, y] = bitOp v "SLte" ty [x, y]
doOp v (LEq (ATInt (ITFixed ty))) [x, y] = bitOp v "Eq" ty [x, y]
doOp v (LSGe (ATInt (ITFixed ty))) [x, y] = bitOp v "SGte" ty [x, y]
doOp v (LSGt (ATInt (ITFixed ty))) [x, y] = bitOp v "SGt" ty [x, y]
doOp v (LLt (ITFixed ty)) [x, y] = bitOp v "Lt" ty [x, y]
doOp v (LLe (ITFixed ty)) [x, y] = bitOp v "Lte" ty [x, y]
doOp v (LGe (ITFixed ty)) [x, y] = bitOp v "Gte" ty [x, y]
doOp v (LGt (ITFixed ty)) [x, y] = bitOp v "Gt" ty [x, y]
doOp v (LSHL (ITFixed ty)) [x, y] = bitOp v "Shl" ty [x, y]
doOp v (LLSHR (ITFixed ty)) [x, y] = bitOp v "LShr" ty [x, y]
doOp v (LASHR (ITFixed ty)) [x, y] = bitOp v "AShr" ty [x, y]
doOp v (LAnd (ITFixed ty)) [x, y] = bitOp v "And" ty [x, y]
doOp v (LOr (ITFixed ty)) [x, y] = bitOp v "Or" ty [x, y]
doOp v (LXOr (ITFixed ty)) [x, y] = bitOp v "Xor" ty [x, y]
doOp v (LCompl (ITFixed ty)) [x] = bitOp v "Compl" ty [x]
doOp v (LPlus (ATInt (ITFixed ty))) [x, y] = bitOp v "Plus" ty [x, y]
doOp v (LMinus (ATInt (ITFixed ty))) [x, y] = bitOp v "Minus" ty [x, y]
doOp v (LTimes (ATInt (ITFixed ty))) [x, y] = bitOp v "Times" ty [x, y]
doOp v (LUDiv (ITFixed ty)) [x, y] = bitOp v "UDiv" ty [x, y]
doOp v (LSDiv (ATInt (ITFixed ty))) [x, y] = bitOp v "SDiv" ty [x, y]
doOp v (LURem (ITFixed ty)) [x, y] = bitOp v "URem" ty [x, y]
doOp v (LSRem (ATInt (ITFixed ty))) [x, y] = bitOp v "SRem" ty [x, y]
doOp v (LSExt (ITFixed from) ITBig) [x]
= v ++ "MKBIGSI(vm, (" ++ signedTy from ++ ")" ++ creg x ++ "->info.bits" ++ show (nativeTyWidth from) ++ ")"
doOp v (LSExt ITNative (ITFixed to)) [x]
= v ++ "idris_b" ++ show (nativeTyWidth to) ++ "const(vm, GETINT(" ++ creg x ++ "))"
doOp v (LSExt ITChar (ITFixed to)) [x]
= doOp v (LSExt ITNative (ITFixed to)) [x]
doOp v (LSExt (ITFixed from) ITNative) [x]
= v ++ "MKINT((i_int)((" ++ signedTy from ++ ")" ++ creg x ++ "->info.bits" ++ show (nativeTyWidth from) ++ "))"
doOp v (LSExt (ITFixed from) ITChar) [x]
= doOp v (LSExt (ITFixed from) ITNative) [x]
doOp v (LSExt (ITFixed from) (ITFixed to)) [x]
| nativeTyWidth from < nativeTyWidth to = bitCoerce v "S" from to x
doOp v (LZExt ITNative (ITFixed to)) [x]
= v ++ "idris_b" ++ show (nativeTyWidth to) ++ "const(vm, (uintptr_t)GETINT(" ++ creg x ++ "))"
doOp v (LZExt ITChar (ITFixed to)) [x]
= doOp v (LZExt ITNative (ITFixed to)) [x]
doOp v (LZExt (ITFixed from) ITNative) [x]
= v ++ "MKINT((i_int)" ++ creg x ++ "->info.bits" ++ show (nativeTyWidth from) ++ ")"
doOp v (LZExt (ITFixed from) ITChar) [x]
= doOp v (LZExt (ITFixed from) ITNative) [x]
doOp v (LZExt (ITFixed from) ITBig) [x]
= v ++ "MKBIGUI(vm, " ++ creg x ++ "->info.bits" ++ show (nativeTyWidth from) ++ ")"
doOp v (LZExt ITNative ITBig) [x]
= v ++ "MKBIGUI(vm, (uintptr_t)GETINT(" ++ creg x ++ "))"
doOp v (LZExt (ITFixed from) (ITFixed to)) [x]
| nativeTyWidth from < nativeTyWidth to = bitCoerce v "Z" from to x
doOp v (LTrunc ITNative (ITFixed to)) [x]
= v ++ "idris_b" ++ show (nativeTyWidth to) ++ "const(vm, GETINT(" ++ creg x ++ "))"
doOp v (LTrunc ITChar (ITFixed to)) [x]
= doOp v (LTrunc ITNative (ITFixed to)) [x]
doOp v (LTrunc (ITFixed from) ITNative) [x]
= v ++ "MKINT((i_int)" ++ creg x ++ "->info.bits" ++ show (nativeTyWidth from) ++ ")"
doOp v (LTrunc (ITFixed from) ITChar) [x]
= doOp v (LTrunc (ITFixed from) ITNative) [x]
doOp v (LTrunc ITBig (ITFixed to)) [x]
= v ++ "idris_b" ++ show (nativeTyWidth to) ++ "const(vm, ISINT(" ++ creg x ++ ") ? GETINT(" ++ creg x ++ ") : mpz_get_ui(GETMPZ(" ++ creg x ++ ")))"
doOp v (LTrunc (ITFixed from) (ITFixed to)) [x]
| nativeTyWidth from > nativeTyWidth to = bitCoerce v "T" from to x
doOp v LFExp [x] = v ++ flUnOp "exp" (creg x)
doOp v LFLog [x] = v ++ flUnOp "log" (creg x)
doOp v LFSin [x] = v ++ flUnOp "sin" (creg x)
doOp v LFCos [x] = v ++ flUnOp "cos" (creg x)
doOp v LFTan [x] = v ++ flUnOp "tan" (creg x)
doOp v LFASin [x] = v ++ flUnOp "asin" (creg x)
doOp v LFACos [x] = v ++ flUnOp "acos" (creg x)
doOp v LFATan [x] = v ++ flUnOp "atan" (creg x)
doOp v LFSqrt [x] = v ++ flUnOp "sqrt" (creg x)
doOp v LFFloor [x] = v ++ flUnOp "floor" (creg x)
doOp v LFCeil [x] = v ++ flUnOp "ceil" (creg x)
doOp v LFNegate [x] = v ++ "MKFLOAT(vm, -GETFLOAT(" ++ (creg x) ++ "))"
-- String functions which don't need to know we're UTF8
doOp v LStrConcat [l,r] = v ++ "idris_concat(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v LStrLt [l,r] = v ++ "idris_strlt(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v LStrEq [l,r] = v ++ "idris_streq(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v LReadStr [_] = v ++ "idris_readStr(vm, stdin)"
doOp v LWriteStr [_,s]
= v ++ "MKINT((i_int)(idris_writeStr(stdout"
++ ",GETSTR("
++ creg s ++ "))))"
-- String functions which need to know we're UTF8
doOp v LStrHead [x] = v ++ "idris_strHead(vm, " ++ creg x ++ ")"
doOp v LStrTail [x] = v ++ "idris_strTail(vm, " ++ creg x ++ ")"
doOp v LStrCons [x, y] = v ++ "idris_strCons(vm, " ++ creg x ++ "," ++ creg y ++ ")"
doOp v LStrIndex [x, y] = v ++ "idris_strIndex(vm, " ++ creg x ++ "," ++ creg y ++ ")"
doOp v LStrRev [x] = v ++ "idris_strRev(vm, " ++ creg x ++ ")"
doOp v LStrLen [x] = v ++ "idris_strlen(vm, " ++ creg x ++ ")"
doOp v LFork [x] = v ++ "MKPTR(vm, vmThread(vm, " ++ cname (sMN 0 "EVAL") ++ ", " ++ creg x ++ "))"
doOp v LPar [x] = v ++ creg x -- "MKPTR(vm, vmThread(vm, " ++ cname (MN 0 "EVAL") ++ ", " ++ creg x ++ "))"
doOp v (LChInt ITNative) args = v ++ creg (last args)
doOp v (LChInt ITChar) args = doOp v (LChInt ITNative) args
doOp v (LIntCh ITNative) args = v ++ creg (last args)
doOp v (LIntCh ITChar) args = doOp v (LIntCh ITNative) args
doOp v LSystemInfo [x] = v ++ "idris_systemInfo(vm, " ++ creg x ++ ")"
doOp v LNoOp args = v ++ creg (last args)
-- Pointer primitives (declared as %extern in Builtins.idr)
doOp v (LExternal rf) [_,x]
| rf == sUN "prim__readFile"
= v ++ "idris_readStr(vm, GETPTR(" ++ creg x ++ "))"
doOp v (LExternal wf) [_,x,s]
| wf == sUN "prim__writeFile"
= v ++ "MKINT((i_int)(idris_writeStr(GETPTR(" ++ creg x
++ "),GETSTR("
++ creg s ++ "))))"
doOp v (LExternal vm) [] | vm == sUN "prim__vm" = v ++ "MKPTR(vm, vm)"
doOp v (LExternal si) [] | si == sUN "prim__stdin" = v ++ "MKPTR(vm, stdin)"
doOp v (LExternal so) [] | so == sUN "prim__stdout" = v ++ "MKPTR(vm, stdout)"
doOp v (LExternal se) [] | se == sUN "prim__stderr" = v ++ "MKPTR(vm, stderr)"
doOp v (LExternal nul) [] | nul == sUN "prim__null" = v ++ "MKPTR(vm, NULL)"
doOp v (LExternal eqp) [x, y] | eqp == sUN "prim__eqPtr"
= v ++ "MKINT((i_int)(GETPTR(" ++ creg x ++ ") == GETPTR(" ++ creg y ++ ")))"
doOp v (LExternal eqp) [x, y] | eqp == sUN "prim__eqManagedPtr"
= v ++ "MKINT((i_int)(GETMPTR(" ++ creg x ++ ") == GETMPTR(" ++ creg y ++ ")))"
doOp v (LExternal rp) [p, i] | rp == sUN "prim__registerPtr"
= v ++ "MKMPTR(vm, GETPTR(" ++ creg p ++ "), GETINT(" ++ creg i ++ "))"
doOp _ op args = error $ "doOp not implemented (" ++ show (op, args) ++ ")"
flUnOp :: String -> String -> String
flUnOp name val = "MKFLOAT(vm, " ++ name ++ "(GETFLOAT(" ++ val ++ ")))"
-------------------- Interface file generation
-- First, the wrappers in the C file
ifaceC :: Export -> String
ifaceC (ExportData n) = "typedef VAL " ++ cdesc n ++ ";\n"
ifaceC (ExportFun n cn ret args)
= ctype ret ++ " " ++ cdesc cn ++
"(VM* vm" ++ showArgs (zip argNames args) ++ ") {\n"
++ mkBody n (zip argNames args) ret ++ "}\n\n"
where showArgs [] = ""
showArgs ((n, t) : ts) = ", " ++ ctype t ++ " " ++ n ++
showArgs ts
argNames = zipWith (++) (repeat "arg") (map show [0..])
mkBody n as t = indent 1 ++ "INITFRAME;\n" ++
indent 1 ++ "RESERVE(" ++ show (max (length as) 3) ++ ");\n" ++
push 0 as ++ call n ++ retval t
where push i [] = ""
push i ((n, t) : ts) = indent 1 ++ c_irts (toFType t)
("TOP(" ++ show i ++ ") = ") n
++ ";\n" ++ push (i + 1) ts
call _ = indent 1 ++ "STOREOLD;\n" ++
indent 1 ++ "BASETOP(0);\n" ++
indent 1 ++ "ADDTOP(" ++ show (length as) ++ ");\n" ++
indent 1 ++ "CALL(" ++ cname n ++ ");\n"
retval (FIO t)
= indent 1 ++ "TOP(0) = NULL;\n" ++
indent 1 ++ "TOP(1) = NULL;\n" ++
indent 1 ++ "TOP(2) = RVAL;\n" ++
indent 1 ++ "STOREOLD;\n" ++
indent 1 ++ "BASETOP(0);\n" ++
indent 1 ++ "ADDTOP(3);\n" ++
indent 1 ++ "CALL(" ++ cname (sUN "call__IO") ++ ");\n" ++
retval t
retval t = indent 1 ++ "return " ++ irts_c (toFType t) "RVAL" ++ ";\n"
ctype (FCon c)
| c == sUN "C_Str" = "char*"
| c == sUN "C_Float" = "float"
| c == sUN "C_Ptr" = "void*"
| c == sUN "C_MPtr" = "void*"
| c == sUN "C_Unit" = "void"
ctype (FApp c [_,ity])
| c == sUN "C_IntT" = carith ity
ctype (FApp c [_])
| c == sUN "C_Any" = "VAL"
ctype (FStr s) = s
ctype FUnknown = "void*"
ctype (FIO t) = ctype t
ctype t = error "Can't happen: Not a valid interface type " ++ show t
carith (FCon i)
| i == sUN "C_IntChar" = "char"
| i == sUN "C_IntNative" = "int"
carith t = error "Can't happen: Not an exportable arithmetic type"
cdesc (FStr s) = s
cdesc s = error "Can't happen: Not a valid C name"
-- Then, the header files
codegenH :: [ExportIFace] -> IO ()
codegenH es = mapM_ writeIFace es
writeIFace :: ExportIFace -> IO ()
writeIFace (Export ffic hdr exps)
| ffic == sNS (sUN "FFI_C") ["FFI_C"]
= do let hfile = "#ifndef " ++ hdr_guard hdr ++ "\n" ++
"#define " ++ hdr_guard hdr ++ "\n\n" ++
"#include <idris_rts.h>\n\n" ++
concatMap hdr_export exps ++ "\n" ++
"#endif\n\n"
writeFile hdr hfile
| otherwise = return ()
hdr_guard x = "__" ++ map hchar x
where hchar x | isAlphaNum x = toUpper x
hchar _ = '_'
hdr_export :: Export -> String
hdr_export (ExportData n) = "typedef VAL " ++ cdesc n ++ ";\n"
hdr_export (ExportFun n cn ret args)
= ctype ret ++ " " ++ cdesc cn ++
"(VM* vm" ++ showArgs (zip argNames args) ++ ");\n"
where showArgs [] = ""
showArgs ((n, t) : ts) = ", " ++ ctype t ++ " " ++ n ++
showArgs ts
argNames = zipWith (++) (repeat "arg") (map show [0..])
|
osa1/Idris-dev
|
src/IRTS/CodegenC.hs
|
bsd-3-clause
| 31,711
| 0
| 28
| 9,074
| 14,411
| 7,168
| 7,243
| 570
| 24
|
module BST () where
import Language.Haskell.Liquid.Prelude
{-@
data Bst [blen] k v <l :: x0:k -> x1:k -> Prop, r :: x0:k -> x1:k -> Prop>
= Empty
| Bind (key :: k)
(value :: v)
(left :: Bst <l, r> (k <l key>) v)
(right :: Bst <l, r> (k <r key>) v)
@-}
{-@ measure blen :: (Bst k v) -> Int
blen(Empty) = 0
blen(Bind k v l r) = 1 + (blen l) + (blen r)
@-}
{-@ invariant {v:Bst k v | (blen v) >= 0} @-}
data Bst k v = Empty | Bind k v (Bst k v) (Bst k v)
{-@
data Pair k v <p :: x0:k -> x1:k -> Prop, l :: x0:k -> x1:k -> Prop, r :: x0:k -> x1:k -> Prop>
= P (fld0 :: k) (fld1 :: v) (tree :: Bst <l, r> (k <p fld0>) v)
@-}
data Pair k v = P k v (Bst k v)
-- insert :: (Eq k, Ord k) => k -> v -> Bst k v -> Bst k v
insert k v Empty = Bind k v Empty Empty
insert k v (Bind k' v' l r)
| k == k' = Bind k v l r
| k < k' = Bind k' v' (insert k v l) r
| otherwise = Bind k' v' l (insert k v r)
-- delete :: (Eq k, Ord k) => k -> Bst k v -> Bst k v
delete _ Empty = Empty
delete k' (Bind k v l r)
| k' == k =
case r of
Empty -> l
_ -> let P kmin vmin r' = getMin r in Bind kmin vmin l r'
| k' < k = Bind k v (delete k' l) r
| otherwise = Bind k v l (delete k' r)
getMin (Bind k v Empty rt) = P k v rt
getMin (Bind k v lt rt) = P k0min v0min (Bind k v l' rt)
where P k0min v0min l' = getMin lt
getMin _ = error "getMin"
chkMin x Empty = liquidAssertB True
chkMin x (Bind k v lt rt) = liquidAssertB (x<k) && chkMin x lt && chkMin x rt
chk Empty = liquidAssertB True
chk (Bind k v lt rt) = chk lt && chk rt && chkl k lt && chkr k rt
chkl k Empty = liquidAssertB True
chkl k (Bind kl _ _ _) = liquidAssertB (kl < k)
chkr k Empty = liquidAssertB True
chkr k (Bind kr _ _ _) = liquidAssertB (k < kr)
key, key1, val, val1 :: Int
key = choose 0
val = choose 1
key1 = choose 0
val1 = choose 1
bst = insert key val $ insert key1 val1 Empty
mkBst = foldl (\t (k, v) -> insert k v t) Empty
prop = chk bst
prop1 = chk $ mkBst $ zip [1..] [1..]
propDelete = chk $ delete 1 bst
propMin = chkMin x t
where pr = getMin bst
P x _ t = pr
|
ssaavedra/liquidhaskell
|
tests/pos/BST.hs
|
bsd-3-clause
| 2,233
| 2
| 13
| 740
| 833
| 414
| 419
| 42
| 2
|
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP, NoImplicitPrelude #-}
-----------------------------------------------------------------------------
-- |
-- Module : Foreign.StablePtr
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : ffi@haskell.org
-- Stability : provisional
-- Portability : portable
--
-- This module is part of the Foreign Function Interface (FFI) and will usually
-- be imported via the module "Foreign".
--
-----------------------------------------------------------------------------
module Foreign.StablePtr
( -- * Stable references to Haskell values
StablePtr -- abstract
, newStablePtr -- :: a -> IO (StablePtr a)
, deRefStablePtr -- :: StablePtr a -> IO a
, freeStablePtr -- :: StablePtr a -> IO ()
, castStablePtrToPtr -- :: StablePtr a -> Ptr ()
, castPtrToStablePtr -- :: Ptr () -> StablePtr a
, -- ** The C-side interface
-- $cinterface
) where
#ifdef __GLASGOW_HASKELL__
import GHC.Stable
#endif
#ifdef __HUGS__
import Hugs.StablePtr
#endif
#ifdef __NHC__
import NHC.FFI
( StablePtr
, newStablePtr
, deRefStablePtr
, freeStablePtr
, castStablePtrToPtr
, castPtrToStablePtr
)
#endif
-- $cinterface
--
-- The following definition is available to C programs inter-operating with
-- Haskell code when including the header @HsFFI.h@.
--
-- > typedef void *HsStablePtr; /* C representation of a StablePtr */
--
-- Note that no assumptions may be made about the values representing stable
-- pointers. In fact, they need not even be valid memory addresses. The only
-- guarantee provided is that if they are passed back to Haskell land, the
-- function 'deRefStablePtr' will be able to reconstruct the
-- Haskell value referred to by the stable pointer.
|
beni55/haste-compiler
|
libraries/ghc-7.8/base/Foreign/StablePtr.hs
|
bsd-3-clause
| 1,913
| 0
| 5
| 416
| 105
| 83
| 22
| 10
| 0
|
{-# LANGUAGE OverloadedStrings #-}
import Network.Wai
import Network.Wai.Handler.Warp
import Blaze.ByteString.Builder (fromByteString)
main = run 3000 $ const $ return $ responseBuilder
status200
[("Content-Type", "text/html")]
$ fromByteString "<form method='post' enctype='multipart/form-data'><textarea name='foo'></textarea><input type='submit'></form>"
|
jberryman/wai
|
warp/attic/undrained.hs
|
mit
| 371
| 0
| 9
| 44
| 66
| 37
| 29
| 8
| 1
|
{-# OPTIONS_GHC -Wmissing-signatures -Werror -Wwarn=missing-signatures #-}
module Werror02 where
-- this should generate missing-signatures warning
foo () = ()
|
ezyang/ghc
|
testsuite/tests/warnings/should_compile/Werror02.hs
|
bsd-3-clause
| 161
| 0
| 6
| 20
| 18
| 11
| 7
| 3
| 1
|
{-# LANGUAGE FunctionalDependencies #-} {-# LANGUAGE FlexibleInstances #-}
module T7171 where
import T7171a
import Data.ByteString
-- this works
-- test1 :: [Int] -> [Int]
-- test1 = test
-- this fails
test2 :: ByteString -> ByteString
test2 = test
|
ghc-android/ghc
|
testsuite/tests/typecheck/should_compile/T7171.hs
|
bsd-3-clause
| 254
| 0
| 5
| 44
| 32
| 22
| 10
| 6
| 1
|
module LightSpecular where
import qualified Color3
import qualified Direction
import qualified Normal
import qualified Reflection
import qualified Spaces
import qualified Specular
import qualified Vector3f
specularPhong :: Direction.T Spaces.Eye -> Direction.T Spaces.Eye -> Normal.T -> Color3.T -> Float -> Specular.T -> Vector3f.T
specularPhong stl view n light_color light_intensity (Specular.S surface_spec surface_exponent) =
let
reflection = Reflection.reflection view n
factor = (max 0.0 (Vector3f.dot3 reflection stl)) ** surface_exponent
light_raw = Vector3f.scale light_color light_intensity
light_scaled = Vector3f.scale light_raw factor
in
Vector3f.mult3 light_scaled surface_spec
specularBlinnPhong :: Direction.T Spaces.Eye -> Direction.T Spaces.Eye -> Normal.T -> Color3.T -> Float -> Specular.T -> Vector3f.T
specularBlinnPhong stl view n light_color light_intensity (Specular.S surface_spec surface_exponent) =
let
reflection = Reflection.reflection view n
factor = (max 0.0 (Vector3f.dot3 reflection stl)) ** surface_exponent
light_raw = Vector3f.scale light_color light_intensity
light_scaled = Vector3f.scale light_raw factor
in
Vector3f.mult3 light_scaled surface_spec
|
io7m/r2
|
com.io7m.r2.documentation/src/main/resources/com/io7m/r2/documentation/haskell/LightSpecular.hs
|
isc
| 1,264
| 0
| 14
| 210
| 342
| 176
| 166
| 24
| 1
|
{-# OPTIONS_GHC -fplugin Foreign.Storable.Generic.Plugin #-}
{-# OPTIONS_GHC -fplugin-opt=Foreign.Storable.Generic.Plugin:-crash #-}
module Instances where
import Types
import Foreign.Storable.Generic
instance GStorable Flat
instance GStorable Nested
instance GStorable Nested2
|
mkloczko/derive-storable-plugin
|
test/ids/Concrete/Instances.hs
|
mit
| 281
| 0
| 5
| 28
| 36
| 20
| 16
| 8
| 0
|
module Rebase.Data.Type.Bool
(
module Data.Type.Bool
)
where
import Data.Type.Bool
|
nikita-volkov/rebase
|
library/Rebase/Data/Type/Bool.hs
|
mit
| 86
| 0
| 5
| 12
| 23
| 16
| 7
| 4
| 0
|
{-#LANGUAGE DeriveGeneric #-}
module Types where
import GHC.Generics
data Flat = Flat Int Double deriving (Generic)
data Flat2 = Flat2 Double Char deriving (Generic)
data Nested = Nested Flat Flat2 deriving (Generic)
data Nested2 = Nested2 Nested deriving (Generic)
type TFlat = Flat
type TNested = Nested
type TNested2 = Nested2
|
mkloczko/derive-storable-plugin
|
test/ids/TypeSynonym/Types.hs
|
mit
| 364
| 0
| 6
| 85
| 98
| 58
| 40
| 10
| 0
|
{-# LANGUAGE Arrows #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TemplateHaskell #-}
module OpaleyeDemo.Ids where
import Data.Profunctor.Product.TH (makeAdaptorAndInstance)
import Opaleye (Column, Nullable, PGInt4, PGOrd,
PGText)
--------------------------------------------------------------------------------
-- | Todo Id
data TodoId' a = TodoId { todoId :: a } deriving Show
makeAdaptorAndInstance "pTodoId" ''TodoId'
type TodoId = TodoId' Int
type TodoIdColumn = TodoId' (Column PGInt4)
type TodoIdColumnMaybe = TodoId' (Maybe (Column PGInt4))
type TodoIdColumnNullable = TodoId' (Column (Nullable PGInt4))
--------------------------------------------------------------------------------
-- | Priority indicator
data Prio' a = Prio { prio :: a } deriving Show
makeAdaptorAndInstance "pPrio" ''Prio'
type Prio = Prio' (Maybe Int)
type PrioColumn = Prio' (Column (Nullable PGInt4))
--------------------------------------------------------------------------------
-- | Hashtag
data HashtagStr' a = HashtagStr { hashtagStr :: a } deriving Show
makeAdaptorAndInstance "pHashtagStr" ''HashtagStr'
type HashtagStr = HashtagStr' String
type HashtagStrColumn = HashtagStr' (Column PGText)
type HashtagStrColumnNullable = HashtagStr' (Column (Nullable PGText))
|
charlydagos/haskell-sql-edsl-demo
|
code/opaleye/src/OpaleyeDemo/Ids.hs
|
mit
| 1,461
| 0
| 9
| 274
| 283
| 162
| 121
| 24
| 0
|
module HOI4.Types (
-- Used by Settings
HOI4Data (..)
, HOI4State (..)
, HOI4Info (..)
-- Features
, HOI4EvtDesc (..), HOI4Event (..), HOI4Option (..)
-- Low level
, HOI4Scope (..)
, Party (..)
-- , AIWillDo (..)
-- , AIModifier (..)
-- , aiWillDo
) where
import Data.List (foldl')
import Data.Text (Text)
import qualified Data.Text as T
import Data.HashMap.Strict (HashMap)
import Abstract -- everything
import SettingsTypes ( PPT, GameState (..), Settings (..)
, IsGame (..), IsGameData (..), IsGameState (..))
--import Doc
--------------------------------------------
-- Types used by toplevel Settings module --
--------------------------------------------
data HOI4Data = HOI4Data {
hoi4settings :: Settings
, hoi4eventScripts :: HashMap String GenericScript
, hoi4events :: HashMap Text HOI4Event
-- etc.
}
-- State
data HOI4State = HOI4State {
hoi4scopeStack :: [HOI4Scope]
, hoi4currentFile :: Maybe FilePath
, hoi4currentIndent :: Maybe Int
}
-------------------
-- Feature types --
-------------------
data HOI4EvtDesc
= HOI4EvtDescSimple Text -- desc = key
| HOI4EvtDescConditional GenericScript Text
-- desc = { text = key trigger = conditions }
| HOI4EvtDescCompound GenericScript
-- desc = { trigger = { conditional_expressions } }
deriving (Show)
-- Object that accumulates info about an event.
data HOI4Event = HOI4Event
{ hoi4evt_id :: Maybe Text -- event id
, hoi4evt_title :: Maybe Text -- event title l10n key
, hoi4evt_desc :: [HOI4EvtDesc]
, hoi4evt_picture :: Maybe Text -- event picture
, hoi4evt_scope :: HOI4Scope -- type of thing the event happens to
, hoi4evt_trigger :: Maybe GenericScript
, hoi4evt_is_triggered_only :: Maybe Bool
, hoi4evt_mean_time_to_happen :: Maybe GenericScript
, hoi4evt_immediate :: Maybe GenericScript
, hoi4evt_hide_window :: Bool
, hoi4evt_options :: [HOI4Option]
, hoi4evt_path :: Maybe FilePath -- source file
} deriving (Show)
data HOI4Option = HOI4Option
{ hoi4opt_name :: Maybe Text
, hoi4opt_trigger :: Maybe GenericScript
, hoi4opt_ai_chance :: Maybe GenericScript
, hoi4opt_effects :: Maybe GenericScript
} deriving (Show)
class (IsGame g,
Scope g ~ HOI4Scope,
IsGameData (GameData g),
IsGameState (GameState g)) => HOI4Info g where
getEventTitle :: Monad m => Text -- ^ Event ID
-> PPT g m (Maybe Text)
getEventScripts :: Monad m => PPT g m (HashMap FilePath GenericScript)
setEventScripts :: Monad m => HashMap FilePath GenericScript -> PPT g m ()
getEvents :: Monad m => PPT g m (HashMap Text HOI4Event)
------------------------------
-- Shared lower level types --
------------------------------
-- TODO: expand these. Initial scopes assumed from event types.
data HOI4Scope
= HOI4NoScope
| HOI4Country
| HOI4Province
deriving (Show, Eq, Ord, Enum, Bounded)
data Party
= Communism
| Democratic
| Fascism
| Neutrality
deriving (Show, Eq, Ord, Enum, Bounded)
-- AI decision factors
{- deferred
data AIWillDo = AIWillDo
{ awd_base :: Maybe Double
, awd_modifiers :: [AIModifier]
} deriving (Show)
data AIModifier = AIModifier
{ aim_factor :: Maybe Double
, aim_triggers :: GenericScript
} deriving (Show)
newAIWillDo :: AIWillDo
newAIWillDo = AIWillDo Nothing []
newAIModifier :: AIModifier
newAIModifier = AIModifier Nothing []
aiWillDo :: GenericScript -> AIWillDo
aiWillDo = foldl' aiWillDoAddSection newAIWillDo
aiWillDoAddSection :: AIWillDo -> GenericStatement -> AIWillDo
aiWillDoAddSection awd (Statement (GenericLhs left) OpEq right) = case T.toLower left of
"factor" -> case floatRhs right of
Just fac -> awd { awd_base = Just fac }
_ -> awd
"modifier" -> case right of
CompoundRhs scr -> awd { awd_modifiers = awd_modifiers awd ++ [awdModifier scr] }
_ -> awd
_ -> awd
aiWillDoAddSection awd _ = awd
awdModifier :: GenericScript -> AIModifier
awdModifier = foldl' awdModifierAddSection newAIModifier
awdModifierAddSection :: AIModifier -> GenericStatement -> AIModifier
awdModifierAddSection aim stmt@(Statement (GenericLhs left) OpEq right) = case T.toLower left of
"factor" -> case floatRhs right of
Just fac -> aim { aim_factor = Just fac }
Nothing -> aim
_ -> -- the rest of the statements are just the conditions.
aim { aim_triggers = aim_triggers aim ++ [stmt] }
awdModifierAddSection aim _ = aim
-}
|
HairyDude/pdxparse
|
src/HOI4/Types.hs
|
mit
| 4,729
| 0
| 11
| 1,168
| 693
| 412
| 281
| -1
| -1
|
module IEEE754 (floatToDecimal) where
import Data.Char
type Bits = [Int]
floatToDecimal :: String -> Float
floatToDecimal str = let bits = toBits str
f = factor bits
e = IEEE754.exponent bits
m = mantissa bits
(intP, fracP) = splitParts e m
in f * ((toF . bitsToInt $ intP) + (bitsToFrac $ fracP))
-- ##################################################################
-- Split bits into integer and fraction part based on the exponent
-- ##################################################################
-- split bits into integer and fraction part using a given exponent
splitParts :: Int -> Bits -> (Bits, Bits)
splitParts e bits = splitAt splitIdx bits'
where
splitIdx = if e > 0 then e + 1 else 1
bits' = fillBits e bits
-- repeats a given value n-times
repeatN :: Int -> a -> [a]
repeatN n e = take n . repeat $ e
-- fill missing bits with zeros based on the offset defined by the exponent
fillBits :: Int -> Bits -> Bits
fillBits e bits
| e > 0 = bits ++ repeatN holes 0
| otherwise = (repeatN holes 0) ++ bits
where
holes = if e > 0 then maximum [0, e - (length bits) + 2] else abs e
-- ##################################################################
-- Retrieving different parts of the IEEE754 Float
-- ##################################################################
-- Bias for exponent
cBIAS = 127
-- Bits for algebraic sign
cSIGN_BIT = 1
-- Bit lengths for exponent and mantissa
cEXP_BITS = 8
cMAN_BITS = 23
-- determines the algebraic sign
factor :: Bits -> Float
factor (0:_) = 1.0
factor _ = (-1.0)
exponent :: Bits -> Int
exponent bits = (bitsToInt . take cEXP_BITS . drop cSIGN_BIT $ bits) - cBIAS
mantissa :: Bits -> Bits
mantissa bits = (1 :) . take cMAN_BITS . drop (cSIGN_BIT + cEXP_BITS) $ bits
-- ##################################################################
-- Converting Types
-- ##################################################################
-- Convert the bits represented as a string to actual numbers
toBits :: String -> Bits
toBits = map (\b -> ord b - 48)
-- Shorthand for fromIntegral
toF :: Int -> Float
toF = fromIntegral
-- Convert integer part from binary to decimal
bitsToInt :: Bits -> Int
bitsToInt xs = foldl1 (+) . map (\(w,b) -> w * b) . zip weights $ bits
where
weights = map (2^) [0..]
bits = reverse xs
-- Convert fraction part from binary to decimal
bitsToFrac :: Bits -> Float
bitsToFrac xs = foldl1 (+) . map (\(w,b) -> w * b) . zip weights $ bits
where
weights = map ((1.0/) . (2.0**)) [1.0..]
bits = map toF xs
|
HenningBrandt/Study
|
IEEE754/IEEE754.hs
|
mit
| 2,700
| 0
| 13
| 645
| 732
| 399
| 333
| 44
| 2
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.