code
stringlengths
2
1.05M
repo_name
stringlengths
5
101
path
stringlengths
4
991
language
stringclasses
3 values
license
stringclasses
5 values
size
int64
2
1.05M
module Examples.Language.MicroKanren where import Language.MicroKanren import Numeric.Natural q5 :: Goal Natural q5 = callFresh $ \q -> Variable q === Term 5 aAndB :: Goal Natural aAndB = conj (callFresh $ \a -> Variable a === Term 7) (callFresh $ \b -> disj (Variable b === Term 5) (Variable b === Term 6)) bs :: Goal Natural bs = callFresh $ \b -> disj (Variable b === Term 1) (Variable b === Term 2) fives :: Var -> Goal Natural fives x = disj (Variable x === Term 5) (fives x) sixes :: Var -> Goal Natural sixes x = disj (Variable x === Term 6) (sixes x) fivesAndSixes :: Goal Natural fivesAndSixes = callFresh $ disj <$> fives <*> sixes q3 = callFresh $ \q -> callFresh $ \x -> callFresh $ \z -> conj (Variable x === Variable z) (conj (Term (Just 3) === Variable z) (Variable q === Variable x)) q = callFresh $ \q -> callFresh $ \x -> Variable x === Term 3 main :: IO () main = do print $ q5 emptyState print $ aAndB emptyState print $ bs emptyState print $ take 5 $ fivesAndSixes emptyState print $ run 1 q3 print $ run 1 q5 print $ run 5 fivesAndSixes
joneshf/MicroKanren
examples/Examples/Language/MicroKanren.hs
Haskell
bsd-3-clause
1,172
{-# LANGUAGE TemplateHaskell #-} module Main where import Control.Exception.IOChoice.TH import Control.Exception (|||>) :: IO a -> IO a -> IO a (|||>) = $(newIOChoice [''ErrorCall, ''ArithException]) main :: IO () main = do a0 <- evaluate (1 `div` 0) |||> return 3 putStrLn $ "Should be 3: " ++ show a0 a1 <- error "Unexpected answer!" |||> return "expected answer." putStrLn $ "This is an " ++ a1 a2 <- ioError (userError "IO Error!") |||> return "IO Exception is handled by default." putStrLn a2 a3 <- assert False (return "should be fail.") |||> return "this should not be seen." putStrLn $ "This message should not be printed: " ++ a3
kazu-yamamoto/io-choice
examples/derive-test.hs
Haskell
bsd-3-clause
669
{-# LANGUAGE CPP #-} -- | -- Module: Data.Aeson -- Copyright: (c) 2011, 2012 Bryan O'Sullivan -- (c) 2011 MailRank, Inc. -- License: Apache -- Maintainer: Bryan O'Sullivan <bos@serpentine.com> -- Stability: experimental -- Portability: portable -- -- Types and functions for working efficiently with JSON data. -- -- (A note on naming: in Greek mythology, Aeson was the father of Jason.) module Data.Aeson ( -- * How to use this library -- $use -- ** Working with the AST -- $ast -- ** Decoding to a Haskell value -- $haskell -- ** Decoding a mixed-type object -- $mixed -- ** Automatically decoding data types -- $typeable -- ** Pitfalls -- $pitfalls -- * Encoding and decoding -- $encoding_and_decoding decode , decode' , eitherDecode , eitherDecode' , encode -- ** Variants for strict bytestrings , decodeStrict , decodeStrict' , eitherDecodeStrict , eitherDecodeStrict' -- * Core JSON types , Value(..) , Array , Object -- * Convenience types , DotNetTime(..) -- * Type conversion , FromJSON(..) , Result(..) , fromJSON , ToJSON(..) #ifdef GENERICS -- ** Generic JSON classes , GFromJSON(..) , GToJSON(..) , genericToJSON , genericParseJSON #endif -- * Inspecting @'Value's@ , withObject , withText , withArray , withNumber , withBool -- * Constructors and accessors , (.=) , (.:) , (.:?) , (.!=) , object -- * Parsing , json , json' ) where import Data.Aeson.Encode (encode) import Data.Aeson.Parser.Internal (decodeWith, decodeStrictWith, eitherDecodeWith, eitherDecodeStrictWith, jsonEOF, json, jsonEOF', json') import Data.Aeson.Types import qualified Data.ByteString as B import qualified Data.ByteString.Lazy as L -- | Efficiently deserialize a JSON value from a lazy 'L.ByteString'. -- If this fails due to incomplete or invalid input, 'Nothing' is -- returned. -- -- The input must consist solely of a JSON document, with no trailing -- data except for whitespace. This restriction is necessary to ensure -- that if data is being lazily read from a file handle, the file -- handle will be closed in a timely fashion once the document has -- been parsed. -- -- This function parses immediately, but defers conversion. See -- 'json' for details. decode :: (FromJSON a) => L.ByteString -> Maybe a decode = decodeWith jsonEOF fromJSON {-# INLINE decode #-} -- | Efficiently deserialize a JSON value from a strict 'B.ByteString'. -- If this fails due to incomplete or invalid input, 'Nothing' is -- returned. -- -- The input must consist solely of a JSON document, with no trailing -- data except for whitespace. -- -- This function parses immediately, but defers conversion. See -- 'json' for details. decodeStrict :: (FromJSON a) => B.ByteString -> Maybe a decodeStrict = decodeStrictWith jsonEOF fromJSON {-# INLINE decodeStrict #-} -- | Efficiently deserialize a JSON value from a lazy 'L.ByteString'. -- If this fails due to incomplete or invalid input, 'Nothing' is -- returned. -- -- The input must consist solely of a JSON document, with no trailing -- data except for whitespace. This restriction is necessary to ensure -- that if data is being lazily read from a file handle, the file -- handle will be closed in a timely fashion once the document has -- been parsed. -- -- This function parses and performs conversion immediately. See -- 'json'' for details. decode' :: (FromJSON a) => L.ByteString -> Maybe a decode' = decodeWith jsonEOF' fromJSON {-# INLINE decode' #-} -- | Efficiently deserialize a JSON value from a lazy 'L.ByteString'. -- If this fails due to incomplete or invalid input, 'Nothing' is -- returned. -- -- The input must consist solely of a JSON document, with no trailing -- data except for whitespace. -- -- This function parses and performs conversion immediately. See -- 'json'' for details. decodeStrict' :: (FromJSON a) => B.ByteString -> Maybe a decodeStrict' = decodeStrictWith jsonEOF' fromJSON {-# INLINE decodeStrict' #-} -- | Like 'decode' but returns an error message when decoding fails. eitherDecode :: (FromJSON a) => L.ByteString -> Either String a eitherDecode = eitherDecodeWith jsonEOF fromJSON {-# INLINE eitherDecode #-} -- | Like 'decodeStrict' but returns an error message when decoding fails. eitherDecodeStrict :: (FromJSON a) => B.ByteString -> Either String a eitherDecodeStrict = eitherDecodeStrictWith jsonEOF fromJSON {-# INLINE eitherDecodeStrict #-} -- | Like 'decode'' but returns an error message when decoding fails. eitherDecode' :: (FromJSON a) => L.ByteString -> Either String a eitherDecode' = eitherDecodeWith jsonEOF' fromJSON {-# INLINE eitherDecode' #-} -- | Like 'decodeStrict'' but returns an error message when decoding fails. eitherDecodeStrict' :: (FromJSON a) => B.ByteString -> Either String a eitherDecodeStrict' = eitherDecodeStrictWith jsonEOF' fromJSON {-# INLINE eitherDecodeStrict' #-} -- $use -- -- This section contains basic information on the different ways to -- decode data using this library. These range from simple but -- inflexible, to complex but flexible. -- -- The most common way to use the library is to define a data type, -- corresponding to some JSON data you want to work with, and then -- write either a 'FromJSON' instance, a to 'ToJSON' instance, or both -- for that type. For example, given this JSON data: -- -- > { "name": "Joe", "age": 12 } -- -- we create a matching data type: -- -- > data Person = Person -- > { name :: Text -- > , age :: Int -- > } deriving Show -- -- To decode data, we need to define a 'FromJSON' instance: -- -- > {-# LANGUAGE OverloadedStrings #-} -- > -- > instance FromJSON Person where -- > parseJSON (Object v) = Person <$> -- > v .: "name" <*> -- > v .: "age" -- > -- A non-Object value is of the wrong type, so fail. -- > parseJSON _ = mzero -- -- We can now parse the JSON data like so: -- -- > >>> decode "{\"name\":\"Joe\",\"age\":12}" :: Maybe Person -- > Just (Person {name = "Joe", age = 12}) -- -- To encode data, we need to define a 'ToJSON' instance: -- -- > instance ToJSON Person where -- > toJSON (Person name age) = object ["name" .= name, "age" .= age] -- -- We can now encode a value like so: -- -- > >>> encode (Person {name = "Joe", age = 12}) -- > "{\"name\":\"Joe\",\"age\":12}" -- -- There are predefined 'FromJSON' and 'ToJSON' instances for many -- types. Here's an example using lists and 'Int's: -- -- > >>> decode "[1,2,3]" :: Maybe [Int] -- > Just [1,2,3] -- -- And here's an example using the 'Data.Map.Map' type to get a map of -- 'Int's. -- -- > >>> decode "{\"foo\":1,\"bar\":2}" :: Maybe (Map String Int) -- > Just (fromList [("bar",2),("foo",1)]) -- While the notes below focus on decoding, you can apply almost the -- same techniques to /encoding/ data. (The main difference is that -- encoding always succeeds, but decoding has to handle the -- possibility of failure, where an input doesn't match our -- expectations.) -- -- See the documentation of 'FromJSON' and 'ToJSON' for some examples -- of how you can automatically derive instances in some -- circumstances. -- $ast -- -- Sometimes you want to work with JSON data directly, without first -- converting it to a custom data type. This can be useful if you want -- to e.g. convert JSON data to YAML data, without knowing what the -- contents of the original JSON data was. The 'Value' type, which is -- an instance of 'FromJSON', is used to represent an arbitrary JSON -- AST (abstract syntax tree). Example usage: -- -- > >>> decode "{\"foo\": 123}" :: Maybe Value -- > Just (Object (fromList [("foo",Number 123)])) -- -- > >>> decode "{\"foo\": [\"abc\",\"def\"]}" :: Maybe Value -- > Just (Object (fromList [("foo",Array (fromList [String "abc",String "def"]))])) -- -- Once you have a 'Value' you can write functions to traverse it and -- make arbitrary transformations. -- $haskell -- -- Any instance of 'FromJSON' can be specified (but see the -- \"Pitfalls\" section here&#8212;"Data.Aeson#pitfalls"): -- -- > λ> decode "[1,2,3]" :: Maybe [Int] -- > Just [1,2,3] -- -- Alternatively, there are instances for standard data types, so you -- can use them directly. For example, use the 'Data.Map.Map' type to -- get a map of 'Int's. -- -- > λ> :m + Data.Map -- > λ> decode "{\"foo\":1,\"bar\":2}" :: Maybe (Map String Int) -- > Just (fromList [("bar",2),("foo",1)]) -- $mixed -- -- The above approach with maps of course will not work for mixed-type -- objects that don't follow a strict schema, but there are a couple -- of approaches available for these. -- -- The 'Object' type contains JSON objects: -- -- > λ> decode "{\"name\":\"Dave\",\"age\":2}" :: Maybe Object -- > Just (fromList) [("name",String "Dave"),("age",Number 2)] -- -- You can extract values from it with a parser using 'parse', -- 'parseEither' or, in this example, 'parseMaybe': -- -- > λ> do result <- decode "{\"name\":\"Dave\",\"age\":2}" -- > flip parseMaybe result $ \obj -> do -- > age <- obj .: "age" -- > name <- obj .: "name" -- > return (name ++ ": " ++ show (age*2)) -- > -- > Just "Dave: 4" -- -- Considering that any type that implements 'FromJSON' can be used -- here, this is quite a powerful way to parse JSON. See the -- documentation in 'FromJSON' for how to implement this class for -- your own data types. -- -- The downside is that you have to write the parser yourself; the -- upside is that you have complete control over the way the JSON is -- parsed. -- $typeable -- -- If you don't want fine control and would prefer the JSON be parsed -- to your own data types automatically according to some reasonably -- sensible isomorphic implementation, you can use the generic parser -- based on 'Data.Typeable.Typeable' and 'Data.Data.Data'. Switch to -- the 'Data.Aeson.Generic' module, and you can do the following: -- -- > λ> decode "[1]" :: Maybe [Int] -- > Just [1] -- > λ> :m + Data.Typeable Data.Data -- > λ> :set -XDeriveDataTypeable -- > λ> data Person = Person { personName :: String, personAge :: Int } deriving (Data,Typeable,Show) -- > λ> encode Person { personName = "Chris", personAge = 123 } -- > "{\"personAge\":123,\"personName\":\"Chris\"}" -- > λ> decode "{\"personAge\":123,\"personName\":\"Chris\"}" :: Maybe Person -- > Just (Person { -- > personName = "Chris", personAge = 123 -- > }) -- -- Be aware that the encoding may not always be what you'd naively -- expect: -- -- > λ> data Foo = Foo Int Int deriving (Data,Typeable,Show) -- > λ> encode (Foo 1 2) -- > "[1,2]" -- -- With this approach, it's best to treat the -- 'Data.Aeson.Generic.decode' and 'Data.Aeson.Generic.encode' -- functions as an isomorphism, and not to rely upon (or care about) -- the specific intermediate representation. -- $pitfalls -- #pitfalls# -- -- Note that the JSON standard requires that the top-level value be -- either an array or an object. If you try to use 'decode' with a -- result type that is /not/ represented in JSON as an array or -- object, your code will typecheck, but it will always \"fail\" at -- runtime: -- -- > >>> decode "1" :: Maybe Int -- > Nothing -- > >>> decode "1" :: Maybe String -- > Nothing -- -- So stick to objects (e.g. maps in Haskell) or arrays (lists or -- vectors in Haskell): -- -- > >>> decode "[1,2,3]" :: Maybe [Int] -- > Just [1,2,3] -- -- When encoding to JSON you can encode anything that's an instance of -- 'ToJSON', and this may include simple types. So beware that this -- aspect of the API is not isomorphic. You can round-trip arrays and -- maps, but not simple values: -- -- > >>> encode [1,2,3] -- > "[1,2,3]" -- > >>> decode (encode [1]) :: Maybe [Int] -- > Just [1] -- > >>> encode 1 -- > "1" -- > >>> decode (encode (1 :: Int)) :: Maybe Int -- > Nothing -- -- Alternatively, see 'Data.Aeson.Parser.value' to parse non-top-level -- JSON values. -- $encoding_and_decoding -- -- Encoding and decoding are each two-step processes. -- -- * To encode a value, it is first converted to an abstract syntax -- tree (AST), using 'ToJSON'. This generic representation is then -- encoded as bytes. -- -- * When decoding a value, the process is reversed: the bytes are -- converted to an AST, then the 'FromJSON' class is used to convert -- to the desired type. -- -- For convenience, the 'encode' and 'decode' functions combine both -- steps.
moonKimura/aeson-0.6.2.1
Data/Aeson.hs
Haskell
bsd-3-clause
12,627
{-# LANGUAGE ScopedTypeVariables, CPP, BangPatterns, RankNTypes #-} #if __GLASGOW_HASKELL__ == 700 -- This is needed as a workaround for an old bug in GHC 7.0.1 (Trac #4498) {-# LANGUAGE MonoPatBinds #-} #endif #if __GLASGOW_HASKELL__ >= 703 {-# LANGUAGE Unsafe #-} #endif {-# OPTIONS_HADDOCK not-home #-} -- | Copyright : (c) 2010 - 2011 Simon Meier -- License : BSD3-style (see LICENSE) -- -- Maintainer : Simon Meier <iridcode@gmail.com> -- Stability : unstable, private -- Portability : GHC -- -- *Warning:* this module is internal. If you find that you need it then please -- contact the maintainers and explain what you are trying to do and discuss -- what you would need in the public API. It is important that you do this as -- the module may not be exposed at all in future releases. -- -- Core types and functions for the 'Builder' monoid and its generalization, -- the 'Put' monad. -- -- The design of the 'Builder' monoid is optimized such that -- -- 1. buffers of arbitrary size can be filled as efficiently as possible and -- -- 2. sequencing of 'Builder's is as cheap as possible. -- -- We achieve (1) by completely handing over control over writing to the buffer -- to the 'BuildStep' implementing the 'Builder'. This 'BuildStep' is just told -- the start and the end of the buffer (represented as a 'BufferRange'). Then, -- the 'BuildStep' can write to as big a prefix of this 'BufferRange' in any -- way it desires. If the 'BuildStep' is done, the 'BufferRange' is full, or a -- long sequence of bytes should be inserted directly, then the 'BuildStep' -- signals this to its caller using a 'BuildSignal'. -- -- We achieve (2) by requiring that every 'Builder' is implemented by a -- 'BuildStep' that takes a continuation 'BuildStep', which it calls with the -- updated 'BufferRange' after it is done. Therefore, only two pointers have -- to be passed in a function call to implement concatenation of 'Builder's. -- Moreover, many 'Builder's are completely inlined, which enables the compiler -- to sequence them without a function call and with no boxing at all. -- -- This design gives the implementation of a 'Builder' full access to the 'IO' -- monad. Therefore, utmost care has to be taken to not overwrite anything -- outside the given 'BufferRange's. Moreover, further care has to be taken to -- ensure that 'Builder's and 'Put's are referentially transparent. See the -- comments of the 'builder' and 'put' functions for further information. -- Note that there are /no safety belts/ at all, when implementing a 'Builder' -- using an 'IO' action: you are writing code that might enable the next -- buffer-overflow attack on a Haskell server! -- module Data.ByteString.Builder.Internal ( -- * Buffer management Buffer(..) , BufferRange(..) , newBuffer , bufferSize , byteStringFromBuffer , ChunkIOStream(..) , buildStepToCIOS , ciosUnitToLazyByteString , ciosToLazyByteString -- * Build signals and steps , BuildSignal , BuildStep , finalBuildStep , done , bufferFull , insertChunk , fillWithBuildStep -- * The Builder monoid , Builder , builder , runBuilder , runBuilderWith -- ** Primitive combinators , empty , append , flush , ensureFree -- , sizedChunksInsert , byteStringCopy , byteStringInsert , byteStringThreshold , lazyByteStringCopy , lazyByteStringInsert , lazyByteStringThreshold , shortByteString , maximalCopySize , byteString , lazyByteString -- ** Execution , toLazyByteStringWith , AllocationStrategy , safeStrategy , untrimmedStrategy , customStrategy , L.smallChunkSize , L.defaultChunkSize , L.chunkOverhead -- * The Put monad , Put , put , runPut -- ** Execution , putToLazyByteString , putToLazyByteStringWith , hPut -- ** Conversion to and from Builders , putBuilder , fromPut -- -- ** Lifting IO actions -- , putLiftIO ) where import Control.Arrow (second) #if !(MIN_VERSION_base(4,11,0)) && MIN_VERSION_base(4,9,0) import Data.Semigroup (Semigroup((<>))) #endif #if !(MIN_VERSION_base(4,8,0)) import Data.Monoid import Control.Applicative (Applicative(..),(<$>)) #endif import qualified Data.ByteString as S import qualified Data.ByteString.Internal as S import qualified Data.ByteString.Lazy.Internal as L import qualified Data.ByteString.Short.Internal as Sh #if __GLASGOW_HASKELL__ >= 611 import qualified GHC.IO.Buffer as IO (Buffer(..), newByteBuffer) import GHC.IO.Handle.Internals (wantWritableHandle, flushWriteBuffer) import GHC.IO.Handle.Types (Handle__, haByteBuffer, haBufferMode) import System.IO (hFlush, BufferMode(..)) import Data.IORef #else import qualified Data.ByteString.Lazy as L #endif import System.IO (Handle) #if MIN_VERSION_base(4,4,0) #if MIN_VERSION_base(4,7,0) import Foreign #else import Foreign hiding (unsafeForeignPtrToPtr) #endif import Foreign.ForeignPtr.Unsafe (unsafeForeignPtrToPtr) import System.IO.Unsafe (unsafeDupablePerformIO) #else import Foreign import GHC.IO (unsafeDupablePerformIO) #endif ------------------------------------------------------------------------------ -- Buffers ------------------------------------------------------------------------------ -- | A range of bytes in a buffer represented by the pointer to the first byte -- of the range and the pointer to the first byte /after/ the range. data BufferRange = BufferRange {-# UNPACK #-} !(Ptr Word8) -- First byte of range {-# UNPACK #-} !(Ptr Word8) -- First byte /after/ range -- | A 'Buffer' together with the 'BufferRange' of free bytes. The filled -- space starts at offset 0 and ends at the first free byte. data Buffer = Buffer {-# UNPACK #-} !(ForeignPtr Word8) {-# UNPACK #-} !BufferRange -- | Combined size of the filled and free space in the buffer. {-# INLINE bufferSize #-} bufferSize :: Buffer -> Int bufferSize (Buffer fpbuf (BufferRange _ ope)) = ope `minusPtr` unsafeForeignPtrToPtr fpbuf -- | Allocate a new buffer of the given size. {-# INLINE newBuffer #-} newBuffer :: Int -> IO Buffer newBuffer size = do fpbuf <- S.mallocByteString size let pbuf = unsafeForeignPtrToPtr fpbuf return $! Buffer fpbuf (BufferRange pbuf (pbuf `plusPtr` size)) -- | Convert the filled part of a 'Buffer' to a strict 'S.ByteString'. {-# INLINE byteStringFromBuffer #-} byteStringFromBuffer :: Buffer -> S.ByteString byteStringFromBuffer (Buffer fpbuf (BufferRange op _)) = S.PS fpbuf 0 (op `minusPtr` unsafeForeignPtrToPtr fpbuf) -- | Prepend the filled part of a 'Buffer' to a lazy 'L.ByteString' -- trimming it if necessary. {-# INLINE trimmedChunkFromBuffer #-} trimmedChunkFromBuffer :: AllocationStrategy -> Buffer -> L.ByteString -> L.ByteString trimmedChunkFromBuffer (AllocationStrategy _ _ trim) buf k | S.null bs = k | trim (S.length bs) (bufferSize buf) = L.Chunk (S.copy bs) k | otherwise = L.Chunk bs k where bs = byteStringFromBuffer buf ------------------------------------------------------------------------------ -- Chunked IO Stream ------------------------------------------------------------------------------ -- | A stream of chunks that are constructed in the 'IO' monad. -- -- This datatype serves as the common interface for the buffer-by-buffer -- execution of a 'BuildStep' by 'buildStepToCIOS'. Typical users of this -- interface are 'ciosToLazyByteString' or iteratee-style libraries like -- @enumerator@. data ChunkIOStream a = Finished Buffer a -- ^ The partially filled last buffer together with the result. | Yield1 S.ByteString (IO (ChunkIOStream a)) -- ^ Yield a /non-empty/ strict 'S.ByteString'. -- | A smart constructor for yielding one chunk that ignores the chunk if -- it is empty. {-# INLINE yield1 #-} yield1 :: S.ByteString -> IO (ChunkIOStream a) -> IO (ChunkIOStream a) yield1 bs cios | S.null bs = cios | otherwise = return $ Yield1 bs cios -- | Convert a @'ChunkIOStream' ()@ to a lazy 'L.ByteString' using -- 'unsafeDupablePerformIO'. {-# INLINE ciosUnitToLazyByteString #-} ciosUnitToLazyByteString :: AllocationStrategy -> L.ByteString -> ChunkIOStream () -> L.ByteString ciosUnitToLazyByteString strategy k = go where go (Finished buf _) = trimmedChunkFromBuffer strategy buf k go (Yield1 bs io) = L.Chunk bs $ unsafeDupablePerformIO (go <$> io) -- | Convert a 'ChunkIOStream' to a lazy tuple of the result and the written -- 'L.ByteString' using 'unsafeDupablePerformIO'. {-# INLINE ciosToLazyByteString #-} ciosToLazyByteString :: AllocationStrategy -> (a -> (b, L.ByteString)) -> ChunkIOStream a -> (b, L.ByteString) ciosToLazyByteString strategy k = go where go (Finished buf x) = second (trimmedChunkFromBuffer strategy buf) $ k x go (Yield1 bs io) = second (L.Chunk bs) $ unsafeDupablePerformIO (go <$> io) ------------------------------------------------------------------------------ -- Build signals ------------------------------------------------------------------------------ -- | 'BuildStep's may be called *multiple times* and they must not rise an -- async. exception. type BuildStep a = BufferRange -> IO (BuildSignal a) -- | 'BuildSignal's abstract signals to the caller of a 'BuildStep'. There are -- three signals: 'done', 'bufferFull', or 'insertChunks signals data BuildSignal a = Done {-# UNPACK #-} !(Ptr Word8) a | BufferFull {-# UNPACK #-} !Int {-# UNPACK #-} !(Ptr Word8) (BuildStep a) | InsertChunk {-# UNPACK #-} !(Ptr Word8) S.ByteString (BuildStep a) -- | Signal that the current 'BuildStep' is done and has computed a value. {-# INLINE done #-} done :: Ptr Word8 -- ^ Next free byte in current 'BufferRange' -> a -- ^ Computed value -> BuildSignal a done = Done -- | Signal that the current buffer is full. {-# INLINE bufferFull #-} bufferFull :: Int -- ^ Minimal size of next 'BufferRange'. -> Ptr Word8 -- ^ Next free byte in current 'BufferRange'. -> BuildStep a -- ^ 'BuildStep' to run on the next 'BufferRange'. This 'BuildStep' -- may assume that it is called with a 'BufferRange' of at least the -- required minimal size; i.e., the caller of this 'BuildStep' must -- guarantee this. -> BuildSignal a bufferFull = BufferFull -- | Signal that a 'S.ByteString' chunk should be inserted directly. {-# INLINE insertChunk #-} insertChunk :: Ptr Word8 -- ^ Next free byte in current 'BufferRange' -> S.ByteString -- ^ Chunk to insert. -> BuildStep a -- ^ 'BuildStep' to run on next 'BufferRange' -> BuildSignal a insertChunk op bs = InsertChunk op bs -- | Fill a 'BufferRange' using a 'BuildStep'. {-# INLINE fillWithBuildStep #-} fillWithBuildStep :: BuildStep a -- ^ Build step to use for filling the 'BufferRange'. -> (Ptr Word8 -> a -> IO b) -- ^ Handling the 'done' signal -> (Ptr Word8 -> Int -> BuildStep a -> IO b) -- ^ Handling the 'bufferFull' signal -> (Ptr Word8 -> S.ByteString -> BuildStep a -> IO b) -- ^ Handling the 'insertChunk' signal -> BufferRange -- ^ Buffer range to fill. -> IO b -- ^ Value computed while filling this 'BufferRange'. fillWithBuildStep step fDone fFull fChunk !br = do signal <- step br case signal of Done op x -> fDone op x BufferFull minSize op nextStep -> fFull op minSize nextStep InsertChunk op bs nextStep -> fChunk op bs nextStep ------------------------------------------------------------------------------ -- The 'Builder' monoid ------------------------------------------------------------------------------ -- | 'Builder's denote sequences of bytes. -- They are 'Monoid's where -- 'mempty' is the zero-length sequence and -- 'mappend' is concatenation, which runs in /O(1)/. newtype Builder = Builder (forall r. BuildStep r -> BuildStep r) -- | Construct a 'Builder'. In contrast to 'BuildStep's, 'Builder's are -- referentially transparent. {-# INLINE builder #-} builder :: (forall r. BuildStep r -> BuildStep r) -- ^ A function that fills a 'BufferRange', calls the continuation with -- the updated 'BufferRange' once its done, and signals its caller how -- to proceed using 'done', 'bufferFull', or 'insertChunk'. -- -- This function must be referentially transparent; i.e., calling it -- multiple times with equally sized 'BufferRange's must result in the -- same sequence of bytes being written. If you need mutable state, -- then you must allocate it anew upon each call of this function. -- Moroever, this function must call the continuation once its done. -- Otherwise, concatenation of 'Builder's does not work. Finally, this -- function must write to all bytes that it claims it has written. -- Otherwise, the resulting 'Builder' is not guaranteed to be -- referentially transparent and sensitive data might leak. -> Builder builder = Builder -- | The final build step that returns the 'done' signal. finalBuildStep :: BuildStep () finalBuildStep !(BufferRange op _) = return $ Done op () -- | Run a 'Builder' with the 'finalBuildStep'. {-# INLINE runBuilder #-} runBuilder :: Builder -- ^ 'Builder' to run -> BuildStep () -- ^ 'BuildStep' that writes the byte stream of this -- 'Builder' and signals 'done' upon completion. runBuilder b = runBuilderWith b finalBuildStep -- | Run a 'Builder'. {-# INLINE runBuilderWith #-} runBuilderWith :: Builder -- ^ 'Builder' to run -> BuildStep a -- ^ Continuation 'BuildStep' -> BuildStep a runBuilderWith (Builder b) = b -- | The 'Builder' denoting a zero-length sequence of bytes. This function is -- only exported for use in rewriting rules. Use 'mempty' otherwise. {-# INLINE[1] empty #-} empty :: Builder empty = Builder (\cont -> (\range -> cont range)) -- This eta expansion (hopefully) allows GHC to worker-wrapper the -- 'BufferRange' in the 'empty' base case of loops (since -- worker-wrapper requires (TODO: verify this) that all paths match -- against the wrapped argument. -- | Concatenate two 'Builder's. This function is only exported for use in rewriting -- rules. Use 'mappend' otherwise. {-# INLINE[1] append #-} append :: Builder -> Builder -> Builder append (Builder b1) (Builder b2) = Builder $ b1 . b2 #if MIN_VERSION_base(4,9,0) instance Semigroup Builder where {-# INLINE (<>) #-} (<>) = append #endif instance Monoid Builder where {-# INLINE mempty #-} mempty = empty {-# INLINE mappend #-} #if MIN_VERSION_base(4,9,0) mappend = (<>) #else mappend = append #endif {-# INLINE mconcat #-} mconcat = foldr mappend mempty -- | Flush the current buffer. This introduces a chunk boundary. {-# INLINE flush #-} flush :: Builder flush = builder step where step k !(BufferRange op _) = return $ insertChunk op S.empty k ------------------------------------------------------------------------------ -- Put ------------------------------------------------------------------------------ -- | A 'Put' action denotes a computation of a value that writes a stream of -- bytes as a side-effect. 'Put's are strict in their side-effect; i.e., the -- stream of bytes will always be written before the computed value is -- returned. -- -- 'Put's are a generalization of 'Builder's. The typical use case is the -- implementation of an encoding that might fail (e.g., an interface to the -- 'zlib' compression library or the conversion from Base64 encoded data to -- 8-bit data). For a 'Builder', the only way to handle and report such a -- failure is ignore it or call 'error'. In contrast, 'Put' actions are -- expressive enough to allow reportng and handling such a failure in a pure -- fashion. -- -- @'Put' ()@ actions are isomorphic to 'Builder's. The functions 'putBuilder' -- and 'fromPut' convert between these two types. Where possible, you should -- use 'Builder's, as sequencing them is slightly cheaper than sequencing -- 'Put's because they do not carry around a computed value. newtype Put a = Put { unPut :: forall r. (a -> BuildStep r) -> BuildStep r } -- | Construct a 'Put' action. In contrast to 'BuildStep's, 'Put's are -- referentially transparent in the sense that sequencing the same 'Put' -- multiple times yields every time the same value with the same side-effect. {-# INLINE put #-} put :: (forall r. (a -> BuildStep r) -> BuildStep r) -- ^ A function that fills a 'BufferRange', calls the continuation with -- the updated 'BufferRange' and its computed value once its done, and -- signals its caller how to proceed using 'done', 'bufferFull', or -- 'insertChunk' signals. -- -- This function must be referentially transparent; i.e., calling it -- multiple times with equally sized 'BufferRange's must result in the -- same sequence of bytes being written and the same value being -- computed. If you need mutable state, then you must allocate it anew -- upon each call of this function. Moroever, this function must call -- the continuation once its done. Otherwise, monadic sequencing of -- 'Put's does not work. Finally, this function must write to all bytes -- that it claims it has written. Otherwise, the resulting 'Put' is -- not guaranteed to be referentially transparent and sensitive data -- might leak. -> Put a put = Put -- | Run a 'Put'. {-# INLINE runPut #-} runPut :: Put a -- ^ Put to run -> BuildStep a -- ^ 'BuildStep' that first writes the byte stream of -- this 'Put' and then yields the computed value using -- the 'done' signal. runPut (Put p) = p $ \x (BufferRange op _) -> return $ Done op x instance Functor Put where fmap f p = Put $ \k -> unPut p (\x -> k (f x)) {-# INLINE fmap #-} -- | Synonym for '<*' from 'Applicative'; used in rewriting rules. {-# INLINE[1] ap_l #-} ap_l :: Put a -> Put b -> Put a ap_l (Put a) (Put b) = Put $ \k -> a (\a' -> b (\_ -> k a')) -- | Synonym for '*>' from 'Applicative' and '>>' from 'Monad'; used in -- rewriting rules. {-# INLINE[1] ap_r #-} ap_r :: Put a -> Put b -> Put b ap_r (Put a) (Put b) = Put $ \k -> a (\_ -> b k) instance Applicative Put where {-# INLINE pure #-} pure x = Put $ \k -> k x {-# INLINE (<*>) #-} Put f <*> Put a = Put $ \k -> f (\f' -> a (\a' -> k (f' a'))) {-# INLINE (<*) #-} (<*) = ap_l {-# INLINE (*>) #-} (*>) = ap_r instance Monad Put where {-# INLINE return #-} return = pure {-# INLINE (>>=) #-} Put m >>= f = Put $ \k -> m (\m' -> unPut (f m') k) {-# INLINE (>>) #-} (>>) = (*>) -- Conversion between Put and Builder ------------------------------------- -- | Run a 'Builder' as a side-effect of a @'Put' ()@ action. {-# INLINE[1] putBuilder #-} putBuilder :: Builder -> Put () putBuilder (Builder b) = Put $ \k -> b (k ()) -- | Convert a @'Put' ()@ action to a 'Builder'. {-# INLINE fromPut #-} fromPut :: Put () -> Builder fromPut (Put p) = Builder $ \k -> p (\_ -> k) -- We rewrite consecutive uses of 'putBuilder' such that the append of the -- involved 'Builder's is used. This can significantly improve performance, -- when the bound-checks of the concatenated builders are fused. -- ap_l rules {-# RULES "ap_l/putBuilder" forall b1 b2. ap_l (putBuilder b1) (putBuilder b2) = putBuilder (append b1 b2) "ap_l/putBuilder/assoc_r" forall b1 b2 (p :: Put a). ap_l (putBuilder b1) (ap_l (putBuilder b2) p) = ap_l (putBuilder (append b1 b2)) p "ap_l/putBuilder/assoc_l" forall (p :: Put a) b1 b2. ap_l (ap_l p (putBuilder b1)) (putBuilder b2) = ap_l p (putBuilder (append b1 b2)) #-} -- ap_r rules {-# RULES "ap_r/putBuilder" forall b1 b2. ap_r (putBuilder b1) (putBuilder b2) = putBuilder (append b1 b2) "ap_r/putBuilder/assoc_r" forall b1 b2 (p :: Put a). ap_r (putBuilder b1) (ap_r (putBuilder b2) p) = ap_r (putBuilder (append b1 b2)) p "ap_r/putBuilder/assoc_l" forall (p :: Put a) b1 b2. ap_r (ap_r p (putBuilder b1)) (putBuilder b2) = ap_r p (putBuilder (append b1 b2)) #-} -- combined ap_l/ap_r rules {-# RULES "ap_l/ap_r/putBuilder/assoc_r" forall b1 b2 (p :: Put a). ap_l (putBuilder b1) (ap_r (putBuilder b2) p) = ap_l (putBuilder (append b1 b2)) p "ap_r/ap_l/putBuilder/assoc_r" forall b1 b2 (p :: Put a). ap_r (putBuilder b1) (ap_l (putBuilder b2) p) = ap_l (putBuilder (append b1 b2)) p "ap_l/ap_r/putBuilder/assoc_l" forall (p :: Put a) b1 b2. ap_l (ap_r p (putBuilder b1)) (putBuilder b2) = ap_r p (putBuilder (append b1 b2)) "ap_r/ap_l/putBuilder/assoc_l" forall (p :: Put a) b1 b2. ap_r (ap_l p (putBuilder b1)) (putBuilder b2) = ap_r p (putBuilder (append b1 b2)) #-} -- Lifting IO actions --------------------- {- -- | Lift an 'IO' action to a 'Put' action. {-# INLINE putLiftIO #-} putLiftIO :: IO a -> Put a putLiftIO io = put $ \k br -> io >>= (`k` br) -} ------------------------------------------------------------------------------ -- Executing a Put directly on a buffered Handle ------------------------------------------------------------------------------ -- | Run a 'Put' action redirecting the produced output to a 'Handle'. -- -- The output is buffered using the 'Handle's associated buffer. If this -- buffer is too small to execute one step of the 'Put' action, then -- it is replaced with a large enough buffer. hPut :: forall a. Handle -> Put a -> IO a #if __GLASGOW_HASKELL__ >= 611 hPut h p = do fillHandle 1 (runPut p) where fillHandle :: Int -> BuildStep a -> IO a fillHandle !minFree step = do next <- wantWritableHandle "hPut" h fillHandle_ next where -- | We need to return an inner IO action that is executed outside -- the lock taken on the Handle for two reasons: -- -- 1. GHC.IO.Handle.Internals mentions in "Note [async]" that -- we should never do any side-effecting operations before -- an interuptible operation that may raise an async. exception -- as long as we are inside 'wantWritableHandle' and the like. -- We possibly run the interuptible 'flushWriteBuffer' right at -- the start of 'fillHandle', hence entering it a second time is -- not safe, as it could lead to a 'BuildStep' being run twice. -- -- FIXME (SM): Adapt this function or at least its documentation, -- as it is OK to run a 'BuildStep' twice. We dropped this -- requirement in favor of being able to use -- 'unsafeDupablePerformIO' and the speed improvement that it -- brings. -- -- 2. We use the 'S.hPut' function to also write to the handle. -- This function tries to take the same lock taken by -- 'wantWritableHandle'. Therefore, we cannot call 'S.hPut' -- inside 'wantWritableHandle'. -- fillHandle_ :: Handle__ -> IO (IO a) fillHandle_ h_ = do makeSpace =<< readIORef refBuf fillBuffer =<< readIORef refBuf where refBuf = haByteBuffer h_ freeSpace buf = IO.bufSize buf - IO.bufR buf makeSpace buf | IO.bufSize buf < minFree = do flushWriteBuffer h_ s <- IO.bufState <$> readIORef refBuf IO.newByteBuffer minFree s >>= writeIORef refBuf | freeSpace buf < minFree = flushWriteBuffer h_ | otherwise = #if __GLASGOW_HASKELL__ >= 613 return () #else -- required for ghc-6.12 flushWriteBuffer h_ #endif fillBuffer buf | freeSpace buf < minFree = error $ unlines [ "Data.ByteString.Builder.Internal.hPut: internal error." , " Not enough space after flush." , " required: " ++ show minFree , " free: " ++ show (freeSpace buf) ] | otherwise = do let !br = BufferRange op (pBuf `plusPtr` IO.bufSize buf) res <- fillWithBuildStep step doneH fullH insertChunkH br touchForeignPtr fpBuf return res where fpBuf = IO.bufRaw buf pBuf = unsafeForeignPtrToPtr fpBuf op = pBuf `plusPtr` IO.bufR buf {-# INLINE updateBufR #-} updateBufR op' = do let !off' = op' `minusPtr` pBuf !buf' = buf {IO.bufR = off'} writeIORef refBuf buf' doneH op' x = do updateBufR op' -- We must flush if this Handle is set to NoBuffering. -- If it is set to LineBuffering, be conservative and -- flush anyway (we didn't check for newlines in the data). -- Flushing must happen outside this 'wantWriteableHandle' -- due to the possible async. exception. case haBufferMode h_ of BlockBuffering _ -> return $ return x _line_or_no_buffering -> return $ hFlush h >> return x fullH op' minSize nextStep = do updateBufR op' return $ fillHandle minSize nextStep -- 'fillHandle' will flush the buffer (provided there is -- really less than 'minSize' space left) before executing -- the 'nextStep'. insertChunkH op' bs nextStep = do updateBufR op' return $ do S.hPut h bs fillHandle 1 nextStep #else hPut h p = go =<< buildStepToCIOS strategy (runPut p) where strategy = untrimmedStrategy L.smallChunkSize L.defaultChunkSize go (Finished buf x) = S.hPut h (byteStringFromBuffer buf) >> return x go (Yield1 bs io) = S.hPut h bs >> io >>= go #endif -- | Execute a 'Put' and return the computed result and the bytes -- written during the computation as a lazy 'L.ByteString'. -- -- This function is strict in the computed result and lazy in the writing of -- the bytes. For example, given -- -- @ --infinitePut = sequence_ (repeat (putBuilder (word8 1))) >> return 0 -- @ -- -- evaluating the expression -- -- @ --fst $ putToLazyByteString infinitePut -- @ -- -- does not terminate, while evaluating the expression -- -- @ --L.head $ snd $ putToLazyByteString infinitePut -- @ -- -- does terminate and yields the value @1 :: Word8@. -- -- An illustrative example for these strictness properties is the -- implementation of Base64 decoding (<http://en.wikipedia.org/wiki/Base64>). -- -- @ --type DecodingState = ... -- --decodeBase64 :: 'S.ByteString' -> DecodingState -> 'Put' (Maybe DecodingState) --decodeBase64 = ... -- @ -- -- The above function takes a strict 'S.ByteString' supposed to represent -- Base64 encoded data and the current decoding state. -- It writes the decoded bytes as the side-effect of the 'Put' and returns the -- new decoding state, if the decoding of all data in the 'S.ByteString' was -- successful. The checking if the strict 'S.ByteString' represents Base64 -- encoded data and the actual decoding are fused. This makes the common case, -- where all data represents Base64 encoded data, more efficient. It also -- implies that all data must be decoded before the final decoding -- state can be returned. 'Put's are intended for implementing such fused -- checking and decoding/encoding, which is reflected in their strictness -- properties. {-# NOINLINE putToLazyByteString #-} putToLazyByteString :: Put a -- ^ 'Put' to execute -> (a, L.ByteString) -- ^ Result and lazy 'L.ByteString' -- written as its side-effect putToLazyByteString = putToLazyByteStringWith (safeStrategy L.smallChunkSize L.defaultChunkSize) (\x -> (x, L.Empty)) -- | Execute a 'Put' with a buffer-allocation strategy and a continuation. For -- example, 'putToLazyByteString' is implemented as follows. -- -- @ --putToLazyByteString = 'putToLazyByteStringWith' -- ('safeStrategy' 'L.smallChunkSize' 'L.defaultChunkSize') (\x -> (x, L.empty)) -- @ -- {-# INLINE putToLazyByteStringWith #-} putToLazyByteStringWith :: AllocationStrategy -- ^ Buffer allocation strategy to use -> (a -> (b, L.ByteString)) -- ^ Continuation to use for computing the final result and the tail of -- its side-effect (the written bytes). -> Put a -- ^ 'Put' to execute -> (b, L.ByteString) -- ^ Resulting lazy 'L.ByteString' putToLazyByteStringWith strategy k p = ciosToLazyByteString strategy k $ unsafeDupablePerformIO $ buildStepToCIOS strategy (runPut p) ------------------------------------------------------------------------------ -- ByteString insertion / controlling chunk boundaries ------------------------------------------------------------------------------ -- Raw memory ------------- -- | Ensure that there are at least 'n' free bytes for the following 'Builder'. {-# INLINE ensureFree #-} ensureFree :: Int -> Builder ensureFree minFree = builder step where step k br@(BufferRange op ope) | ope `minusPtr` op < minFree = return $ bufferFull minFree op k | otherwise = k br -- | Copy the bytes from a 'BufferRange' into the output stream. wrappedBytesCopyStep :: BufferRange -- ^ Input 'BufferRange'. -> BuildStep a -> BuildStep a wrappedBytesCopyStep !(BufferRange ip0 ipe) k = go ip0 where go !ip !(BufferRange op ope) | inpRemaining <= outRemaining = do copyBytes op ip inpRemaining let !br' = BufferRange (op `plusPtr` inpRemaining) ope k br' | otherwise = do copyBytes op ip outRemaining let !ip' = ip `plusPtr` outRemaining return $ bufferFull 1 ope (go ip') where outRemaining = ope `minusPtr` op inpRemaining = ipe `minusPtr` ip -- Strict ByteStrings ------------------------------------------------------------------------------ -- | Construct a 'Builder' that copies the strict 'S.ByteString's, if it is -- smaller than the treshold, and inserts it directly otherwise. -- -- For example, @byteStringThreshold 1024@ copies strict 'S.ByteString's whose size -- is less or equal to 1kb, and inserts them directly otherwise. This implies -- that the average chunk-size of the generated lazy 'L.ByteString' may be as -- low as 513 bytes, as there could always be just a single byte between the -- directly inserted 1025 byte, strict 'S.ByteString's. -- {-# INLINE byteStringThreshold #-} byteStringThreshold :: Int -> S.ByteString -> Builder byteStringThreshold maxCopySize = \bs -> builder $ step bs where step !bs@(S.PS _ _ len) !k br@(BufferRange !op _) | len <= maxCopySize = byteStringCopyStep bs k br | otherwise = return $ insertChunk op bs k -- | Construct a 'Builder' that copies the strict 'S.ByteString'. -- -- Use this function to create 'Builder's from smallish (@<= 4kb@) -- 'S.ByteString's or if you need to guarantee that the 'S.ByteString' is not -- shared with the chunks generated by the 'Builder'. -- {-# INLINE byteStringCopy #-} byteStringCopy :: S.ByteString -> Builder byteStringCopy = \bs -> builder $ byteStringCopyStep bs {-# INLINE byteStringCopyStep #-} byteStringCopyStep :: S.ByteString -> BuildStep a -> BuildStep a byteStringCopyStep (S.PS ifp ioff isize) !k0 br0@(BufferRange op ope) -- Ensure that the common case is not recursive and therefore yields -- better code. | op' <= ope = do copyBytes op ip isize touchForeignPtr ifp k0 (BufferRange op' ope) | otherwise = do wrappedBytesCopyStep (BufferRange ip ipe) k br0 where op' = op `plusPtr` isize ip = unsafeForeignPtrToPtr ifp `plusPtr` ioff ipe = ip `plusPtr` isize k br = do touchForeignPtr ifp -- input consumed: OK to release here k0 br -- | Construct a 'Builder' that always inserts the strict 'S.ByteString' -- directly as a chunk. -- -- This implies flushing the output buffer, even if it contains just -- a single byte. You should therefore use 'byteStringInsert' only for large -- (@> 8kb@) 'S.ByteString's. Otherwise, the generated chunks are too -- fragmented to be processed efficiently afterwards. -- {-# INLINE byteStringInsert #-} byteStringInsert :: S.ByteString -> Builder byteStringInsert = \bs -> builder $ \k (BufferRange op _) -> return $ insertChunk op bs k -- Short bytestrings ------------------------------------------------------------------------------ -- | Construct a 'Builder' that copies the 'SH.ShortByteString'. -- {-# INLINE shortByteString #-} shortByteString :: Sh.ShortByteString -> Builder shortByteString = \sbs -> builder $ shortByteStringCopyStep sbs -- | Copy the bytes from a 'SH.ShortByteString' into the output stream. {-# INLINE shortByteStringCopyStep #-} shortByteStringCopyStep :: Sh.ShortByteString -- ^ Input 'SH.ShortByteString'. -> BuildStep a -> BuildStep a shortByteStringCopyStep !sbs k = go 0 (Sh.length sbs) where go !ip !ipe !(BufferRange op ope) | inpRemaining <= outRemaining = do Sh.copyToPtr sbs ip op inpRemaining let !br' = BufferRange (op `plusPtr` inpRemaining) ope k br' | otherwise = do Sh.copyToPtr sbs ip op outRemaining let !ip' = ip + outRemaining return $ bufferFull 1 ope (go ip' ipe) where outRemaining = ope `minusPtr` op inpRemaining = ipe - ip -- Lazy bytestrings ------------------------------------------------------------------------------ -- | Construct a 'Builder' that uses the thresholding strategy of 'byteStringThreshold' -- for each chunk of the lazy 'L.ByteString'. -- {-# INLINE lazyByteStringThreshold #-} lazyByteStringThreshold :: Int -> L.ByteString -> Builder lazyByteStringThreshold maxCopySize = L.foldrChunks (\bs b -> byteStringThreshold maxCopySize bs `mappend` b) mempty -- TODO: We could do better here. Currently, Large, Small, Large, leads to -- an unnecessary copy of the 'Small' chunk. -- | Construct a 'Builder' that copies the lazy 'L.ByteString'. -- {-# INLINE lazyByteStringCopy #-} lazyByteStringCopy :: L.ByteString -> Builder lazyByteStringCopy = L.foldrChunks (\bs b -> byteStringCopy bs `mappend` b) mempty -- | Construct a 'Builder' that inserts all chunks of the lazy 'L.ByteString' -- directly. -- {-# INLINE lazyByteStringInsert #-} lazyByteStringInsert :: L.ByteString -> Builder lazyByteStringInsert = L.foldrChunks (\bs b -> byteStringInsert bs `mappend` b) mempty -- | Create a 'Builder' denoting the same sequence of bytes as a strict -- 'S.ByteString'. -- The 'Builder' inserts large 'S.ByteString's directly, but copies small ones -- to ensure that the generated chunks are large on average. -- {-# INLINE byteString #-} byteString :: S.ByteString -> Builder byteString = byteStringThreshold maximalCopySize -- | Create a 'Builder' denoting the same sequence of bytes as a lazy -- 'L.ByteString'. -- The 'Builder' inserts large chunks of the lazy 'L.ByteString' directly, -- but copies small ones to ensure that the generated chunks are large on -- average. -- {-# INLINE lazyByteString #-} lazyByteString :: L.ByteString -> Builder lazyByteString = lazyByteStringThreshold maximalCopySize -- FIXME: also insert the small chunk for [large,small,large] directly. -- Perhaps it makes even sense to concatenate the small chunks in -- [large,small,small,small,large] and insert them directly afterwards to avoid -- unnecessary buffer spilling. Hmm, but that uncontrollably increases latency -- => no good! -- | The maximal size of a 'S.ByteString' that is copied. -- @2 * 'L.smallChunkSize'@ to guarantee that on average a chunk is of -- 'L.smallChunkSize'. maximalCopySize :: Int maximalCopySize = 2 * L.smallChunkSize ------------------------------------------------------------------------------ -- Builder execution ------------------------------------------------------------------------------ -- | A buffer allocation strategy for executing 'Builder's. -- The strategy -- -- > 'AllocationStrategy' firstBufSize bufSize trim -- -- states that the first buffer is of size @firstBufSize@, all following buffers -- are of size @bufSize@, and a buffer of size @n@ filled with @k@ bytes should -- be trimmed iff @trim k n@ is 'True'. data AllocationStrategy = AllocationStrategy (Maybe (Buffer, Int) -> IO Buffer) {-# UNPACK #-} !Int (Int -> Int -> Bool) -- | Create a custom allocation strategy. See the code for 'safeStrategy' and -- 'untrimmedStrategy' for examples. {-# INLINE customStrategy #-} customStrategy :: (Maybe (Buffer, Int) -> IO Buffer) -- ^ Buffer allocation function. If 'Nothing' is given, then a new first -- buffer should be allocated. If @'Just' (oldBuf, minSize)@ is given, -- then a buffer with minimal size 'minSize' must be returned. The -- strategy may reuse the 'oldBuffer', if it can guarantee that this -- referentially transparent and 'oldBuffer' is large enough. -> Int -- ^ Default buffer size. -> (Int -> Int -> Bool) -- ^ A predicate @trim used allocated@ returning 'True', if the buffer -- should be trimmed before it is returned. -> AllocationStrategy customStrategy = AllocationStrategy -- | Sanitize a buffer size; i.e., make it at least the size of an 'Int'. {-# INLINE sanitize #-} sanitize :: Int -> Int sanitize = max (sizeOf (undefined :: Int)) -- | Use this strategy for generating lazy 'L.ByteString's whose chunks are -- discarded right after they are generated. For example, if you just generate -- them to write them to a network socket. {-# INLINE untrimmedStrategy #-} untrimmedStrategy :: Int -- ^ Size of the first buffer -> Int -- ^ Size of successive buffers -> AllocationStrategy -- ^ An allocation strategy that does not trim any of the -- filled buffers before converting it to a chunk untrimmedStrategy firstSize bufSize = AllocationStrategy nextBuffer (sanitize bufSize) (\_ _ -> False) where {-# INLINE nextBuffer #-} nextBuffer Nothing = newBuffer $ sanitize firstSize nextBuffer (Just (_, minSize)) = newBuffer minSize -- | Use this strategy for generating lazy 'L.ByteString's whose chunks are -- likely to survive one garbage collection. This strategy trims buffers -- that are filled less than half in order to avoid spilling too much memory. {-# INLINE safeStrategy #-} safeStrategy :: Int -- ^ Size of first buffer -> Int -- ^ Size of successive buffers -> AllocationStrategy -- ^ An allocation strategy that guarantees that at least half -- of the allocated memory is used for live data safeStrategy firstSize bufSize = AllocationStrategy nextBuffer (sanitize bufSize) trim where trim used size = 2 * used < size {-# INLINE nextBuffer #-} nextBuffer Nothing = newBuffer $ sanitize firstSize nextBuffer (Just (_, minSize)) = newBuffer minSize -- | /Heavy inlining./ Execute a 'Builder' with custom execution parameters. -- -- This function is inlined despite its heavy code-size to allow fusing with -- the allocation strategy. For example, the default 'Builder' execution -- function 'toLazyByteString' is defined as follows. -- -- @ -- {-\# NOINLINE toLazyByteString \#-} -- toLazyByteString = -- toLazyByteStringWith ('safeStrategy' 'L.smallChunkSize' 'L.defaultChunkSize') L.empty -- @ -- -- where @L.empty@ is the zero-length lazy 'L.ByteString'. -- -- In most cases, the parameters used by 'toLazyByteString' give good -- performance. A sub-performing case of 'toLazyByteString' is executing short -- (<128 bytes) 'Builder's. In this case, the allocation overhead for the first -- 4kb buffer and the trimming cost dominate the cost of executing the -- 'Builder'. You can avoid this problem using -- -- >toLazyByteStringWith (safeStrategy 128 smallChunkSize) L.empty -- -- This reduces the allocation and trimming overhead, as all generated -- 'L.ByteString's fit into the first buffer and there is no trimming -- required, if more than 64 bytes and less than 128 bytes are written. -- {-# INLINE toLazyByteStringWith #-} toLazyByteStringWith :: AllocationStrategy -- ^ Buffer allocation strategy to use -> L.ByteString -- ^ Lazy 'L.ByteString' to use as the tail of the generated lazy -- 'L.ByteString' -> Builder -- ^ 'Builder' to execute -> L.ByteString -- ^ Resulting lazy 'L.ByteString' toLazyByteStringWith strategy k b = ciosUnitToLazyByteString strategy k $ unsafeDupablePerformIO $ buildStepToCIOS strategy (runBuilder b) -- | Convert a 'BuildStep' to a 'ChunkIOStream' stream by executing it on -- 'Buffer's allocated according to the given 'AllocationStrategy'. {-# INLINE buildStepToCIOS #-} buildStepToCIOS :: AllocationStrategy -- ^ Buffer allocation strategy to use -> BuildStep a -- ^ 'BuildStep' to execute -> IO (ChunkIOStream a) buildStepToCIOS !(AllocationStrategy nextBuffer bufSize trim) = \step -> nextBuffer Nothing >>= fill step where fill !step !buf@(Buffer fpbuf br@(BufferRange _ pe)) = do res <- fillWithBuildStep step doneH fullH insertChunkH br touchForeignPtr fpbuf return res where pbuf = unsafeForeignPtrToPtr fpbuf doneH op' x = return $ Finished (Buffer fpbuf (BufferRange op' pe)) x fullH op' minSize nextStep = wrapChunk op' $ const $ nextBuffer (Just (buf, max minSize bufSize)) >>= fill nextStep insertChunkH op' bs nextStep = wrapChunk op' $ \isEmpty -> yield1 bs $ -- Checking for empty case avoids allocating 'n-1' empty -- buffers for 'n' insertChunkH right after each other. if isEmpty then fill nextStep buf else do buf' <- nextBuffer (Just (buf, bufSize)) fill nextStep buf' -- Wrap and yield a chunk, trimming it if necesary {-# INLINE wrapChunk #-} wrapChunk !op' mkCIOS | chunkSize == 0 = mkCIOS True | trim chunkSize size = do bs <- S.create chunkSize $ \pbuf' -> copyBytes pbuf' pbuf chunkSize -- FIXME: We could reuse the trimmed buffer here. return $ Yield1 bs (mkCIOS False) | otherwise = return $ Yield1 (S.PS fpbuf 0 chunkSize) (mkCIOS False) where chunkSize = op' `minusPtr` pbuf size = pe `minusPtr` pbuf
CloudI/CloudI
src/api/haskell/external/bytestring-0.10.10.0/Data/ByteString/Builder/Internal.hs
Haskell
mit
43,928
module Main where import Test.MiniUnitTest main :: IO () main = tests
bagl/takusen-oracle
Test/Main.hs
Haskell
bsd-3-clause
80
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-} {-| Binary instances for the core datatypes -} module Idris.Core.Binary where import Control.Applicative ((<*>), (<$>)) import Control.Monad (liftM2) import Control.DeepSeq (($!!)) import Data.Binary import Data.Vector.Binary import qualified Data.Text as T import qualified Data.Text.Encoding as E import Idris.Core.TT instance Binary ErrorReportPart where put (TextPart msg) = do putWord8 0 ; put msg put (NamePart n) = do putWord8 1 ; put n put (TermPart t) = do putWord8 2 ; put t put (SubReport ps) = do putWord8 3 ; put ps put (RawPart r) = do putWord8 4 ; put r get = do i <- getWord8 case i of 0 -> fmap TextPart get 1 -> fmap NamePart get 2 -> fmap TermPart get 3 -> fmap SubReport get 4 -> fmap RawPart get _ -> error "Corrupted binary data for ErrorReportPart" instance Binary Provenance where put ExpectedType = putWord8 0 put (SourceTerm t) = do putWord8 1 put t put InferredVal = putWord8 2 put GivenVal = putWord8 3 put (TooManyArgs t) = do putWord8 4 put t get = do i <- getWord8 case i of 0 -> return ExpectedType 1 -> do x1 <- get return (SourceTerm x1) 2 -> return InferredVal 3 -> return GivenVal 4 -> do x1 <- get return (TooManyArgs x1) _ -> error "Corrupted binary data for Provenance" instance Binary UConstraint where put (ULT x1 x2) = putWord8 0 >> put x1 >> put x2 put (ULE x1 x2) = putWord8 1 >> put x1 >> put x2 get = do i <- getWord8 case i of 0 -> ULT <$> get <*> get 1 -> ULE <$> get <*> get _ -> error "Corrupted binary data for UConstraint" instance Binary ConstraintFC where put (ConstraintFC x1 x2) = putWord8 0 >> put x1 >> put x2 get = do i <- getWord8 case i of 0 -> liftM2 ConstraintFC get get _ -> error "Corrupted binary data for ConstraintFC" instance Binary a => Binary (Err' a) where put (Msg str) = do putWord8 0 put str put (InternalMsg str) = do putWord8 1 put str put (CantUnify x y z e ctxt i) = do putWord8 2 put x put y put z put e put ctxt put i put (InfiniteUnify n t ctxt) = do putWord8 3 put n put t put ctxt put (CantConvert x y ctxt) = do putWord8 4 put x put y put ctxt put (CantSolveGoal x ctxt) = do putWord8 5 put x put ctxt put (UnifyScope n1 n2 x ctxt) = do putWord8 6 put n1 put n2 put x put ctxt put (CantInferType str) = do putWord8 7 put str put (NonFunctionType t1 t2) = do putWord8 8 put t1 put t2 put (NotEquality t1 t2) = do putWord8 9 put t1 put t2 put (TooManyArguments n) = do putWord8 10 put n put (CantIntroduce t) = do putWord8 11 put t put (NoSuchVariable n) = do putWord8 12 put n put (NoTypeDecl n) = do putWord8 13 put n put (NotInjective x y z) = do putWord8 14 put x put y put z put (CantResolve _ t) = do putWord8 15 put t put (CantResolveAlts ns) = do putWord8 16 put ns put (IncompleteTerm t) = do putWord8 17 put t put (UniverseError x1 x2 x3 x4 x5) = do putWord8 18 put x1 put x2 put x3 put x4 put x5 put (UniqueError u n) = do putWord8 19 put u put n put (UniqueKindError u n) = do putWord8 20 put u put n put ProgramLineComment = putWord8 21 put (Inaccessible n) = do putWord8 22 put n put (NonCollapsiblePostulate n) = do putWord8 23 put n put (AlreadyDefined n) = do putWord8 24 put n put (ProofSearchFail e) = do putWord8 25 put e put (NoRewriting t) = do putWord8 26 put t put (At fc e) = do putWord8 27 put fc put e put (Elaborating str n e) = do putWord8 28 put str put n put e put (ElaboratingArg n1 n2 ns e) = do putWord8 29 put n1 put n2 put ns put e put (ProviderError str) = do putWord8 30 put str put (LoadingFailed str e) = do putWord8 31 put str put e put (ReflectionError parts e) = do putWord8 32 put parts put e put (ReflectionFailed str e) = do putWord8 33 put str put e put (WithFnType t) = do putWord8 34 put t put (CantMatch t) = do putWord8 35 put t put (ElabScriptDebug x1 x2 x3) = do putWord8 36 put x1 put x2 put x3 put (NoEliminator s t) = do putWord8 37 put s put t put (InvalidTCArg n t) = do putWord8 38 put n put t put (ElabScriptStuck x1) = do putWord8 39 put x1 put (UnknownImplicit n f) = do putWord8 40 put n put f put (NoValidAlts ns) = do putWord8 41 put ns get = do i <- getWord8 case i of 0 -> fmap Msg get 1 -> fmap InternalMsg get 2 -> do x <- get ; y <- get ; z <- get ; e <- get ; ctxt <- get ; i <- get return $ CantUnify x y z e ctxt i 3 -> do x <- get ; y <- get ; z <- get return $ InfiniteUnify x y z 4 -> do x <- get ; y <- get ; z <- get return $ CantConvert x y z 5 -> do x <- get ; y <- get return $ CantSolveGoal x y 6 -> do w <- get ; x <- get ; y <- get ; z <- get return $ UnifyScope w x y z 7 -> fmap CantInferType get 8 -> do x <- get ; y <- get return $ NonFunctionType x y 9 -> do x <- get ; y <- get return $ NotEquality x y 10 -> fmap TooManyArguments get 11 -> fmap CantIntroduce get 12 -> fmap NoSuchVariable get 13 -> fmap NoTypeDecl get 14 -> do x <- get ; y <- get ; z <- get return $ NotInjective x y z 15 -> fmap (CantResolve False) get 16 -> fmap CantResolveAlts get 17 -> fmap IncompleteTerm get 18 -> UniverseError <$> get <*> get <*> get <*> get <*> get 19 -> do x <- get ; y <- get return $ UniqueError x y 20 -> do x <- get ; y <- get return $ UniqueKindError x y 21 -> return ProgramLineComment 22 -> fmap Inaccessible get 23 -> fmap NonCollapsiblePostulate get 24 -> fmap AlreadyDefined get 25 -> fmap ProofSearchFail get 26 -> fmap NoRewriting get 27 -> do x <- get ; y <- get return $ At x y 28 -> do x <- get ; y <- get ; z <- get return $ Elaborating x y z 29 -> do w <- get ; x <- get ; y <- get ; z <- get return $ ElaboratingArg w x y z 30 -> fmap ProviderError get 31 -> do x <- get ; y <- get return $ LoadingFailed x y 32 -> do x <- get ; y <- get return $ ReflectionError x y 33 -> do x <- get ; y <- get return $ ReflectionFailed x y 34 -> fmap WithFnType get 35 -> fmap CantMatch get 36 -> do x1 <- get x2 <- get x3 <- get return (ElabScriptDebug x1 x2 x3) 37 -> do x1 <- get x2 <- get return (NoEliminator x1 x2) 38 -> do x1 <- get x2 <- get return (InvalidTCArg x1 x2) 39 -> do x1 <- get return (ElabScriptStuck x1) 40 -> do x <- get ; y <- get return $ UnknownImplicit x y 41 -> fmap NoValidAlts get _ -> error "Corrupted binary data for Err'" ----- Generated by 'derive' instance Binary FC where put x = case x of (FC x1 (x2, x3) (x4, x5)) -> do putWord8 0 put x1 put (x2 * 65536 + x3) put (x4 * 65536 + x5) NoFC -> putWord8 1 FileFC x1 -> do putWord8 2 put x1 get = do i <- getWord8 case i of 0 -> do x1 <- get x2x3 <- get x4x5 <- get return (FC x1 (x2x3 `div` 65536, x2x3 `mod` 65536) (x4x5 `div` 65536, x4x5 `mod` 65536)) 1 -> return NoFC 2 -> do x1 <- get return (FileFC x1) _ -> error "Corrupted binary data for FC" instance Binary Name where put x = case x of UN x1 -> do putWord8 0 put x1 NS x1 x2 -> do putWord8 1 put x1 put x2 MN x1 x2 -> do putWord8 2 put x1 put x2 NErased -> putWord8 3 SN x1 -> do putWord8 4 put x1 SymRef x1 -> do putWord8 5 put x1 get = do i <- getWord8 case i of 0 -> do x1 <- get return (UN x1) 1 -> do x1 <- get x2 <- get return (NS x1 x2) 2 -> do x1 <- get x2 <- get return (MN x1 x2) 3 -> return NErased 4 -> do x1 <- get return (SN x1) 5 -> do x1 <- get return (SymRef x1) _ -> error "Corrupted binary data for Name" instance Binary SpecialName where put x = case x of WhereN x1 x2 x3 -> do putWord8 0 put x1 put x2 put x3 InstanceN x1 x2 -> do putWord8 1 put x1 put x2 ParentN x1 x2 -> do putWord8 2 put x1 put x2 MethodN x1 -> do putWord8 3 put x1 CaseN x1 -> do putWord8 4; put x1 ElimN x1 -> do putWord8 5; put x1 InstanceCtorN x1 -> do putWord8 6; put x1 WithN x1 x2 -> do putWord8 7 put x1 put x2 MetaN x1 x2 -> do putWord8 8 put x1 put x2 get = do i <- getWord8 case i of 0 -> do x1 <- get x2 <- get x3 <- get return (WhereN x1 x2 x3) 1 -> do x1 <- get x2 <- get return (InstanceN x1 x2) 2 -> do x1 <- get x2 <- get return (ParentN x1 x2) 3 -> do x1 <- get return (MethodN x1) 4 -> do x1 <- get return (CaseN x1) 5 -> do x1 <- get return (ElimN x1) 6 -> do x1 <- get return (InstanceCtorN x1) 7 -> do x1 <- get x2 <- get return (WithN x1 x2) 8 -> do x1 <- get x2 <- get return (MetaN x1 x2) _ -> error "Corrupted binary data for SpecialName" instance Binary Const where put x = case x of I x1 -> do putWord8 0 put x1 BI x1 -> do putWord8 1 put x1 Fl x1 -> do putWord8 2 put x1 Ch x1 -> do putWord8 3 put x1 Str x1 -> do putWord8 4 put x1 B8 x1 -> putWord8 5 >> put x1 B16 x1 -> putWord8 6 >> put x1 B32 x1 -> putWord8 7 >> put x1 B64 x1 -> putWord8 8 >> put x1 (AType (ATInt ITNative)) -> putWord8 9 (AType (ATInt ITBig)) -> putWord8 10 (AType ATFloat) -> putWord8 11 (AType (ATInt ITChar)) -> putWord8 12 StrType -> putWord8 13 Forgot -> putWord8 15 (AType (ATInt (ITFixed ity))) -> putWord8 (fromIntegral (16 + fromEnum ity)) -- 16-19 inclusive VoidType -> putWord8 27 WorldType -> putWord8 28 TheWorld -> putWord8 29 get = do i <- getWord8 case i of 0 -> do x1 <- get return (I x1) 1 -> do x1 <- get return (BI x1) 2 -> do x1 <- get return (Fl x1) 3 -> do x1 <- get return (Ch x1) 4 -> do x1 <- get return (Str x1) 5 -> fmap B8 get 6 -> fmap B16 get 7 -> fmap B32 get 8 -> fmap B64 get 9 -> return (AType (ATInt ITNative)) 10 -> return (AType (ATInt ITBig)) 11 -> return (AType ATFloat) 12 -> return (AType (ATInt ITChar)) 13 -> return StrType 15 -> return Forgot 16 -> return (AType (ATInt (ITFixed IT8))) 17 -> return (AType (ATInt (ITFixed IT16))) 18 -> return (AType (ATInt (ITFixed IT32))) 19 -> return (AType (ATInt (ITFixed IT64))) 27 -> return VoidType 28 -> return WorldType 29 -> return TheWorld _ -> error "Corrupted binary data for Const" instance Binary Raw where put x = case x of Var x1 -> do putWord8 0 put x1 RBind x1 x2 x3 -> do putWord8 1 put x1 put x2 put x3 RApp x1 x2 -> do putWord8 2 put x1 put x2 RType -> putWord8 3 RConstant x1 -> do putWord8 4 put x1 RForce x1 -> do putWord8 5 put x1 RUType x1 -> do putWord8 6 put x1 get = do i <- getWord8 case i of 0 -> do x1 <- get return (Var x1) 1 -> do x1 <- get x2 <- get x3 <- get return (RBind x1 x2 x3) 2 -> do x1 <- get x2 <- get return (RApp x1 x2) 3 -> return RType 4 -> do x1 <- get return (RConstant x1) 5 -> do x1 <- get return (RForce x1) 6 -> do x1 <- get return (RUType x1) _ -> error "Corrupted binary data for Raw" instance Binary ImplicitInfo where put x = case x of Impl x1 -> put x1 get = do x1 <- get return (Impl x1) instance (Binary b) => Binary (Binder b) where put x = case x of Lam x1 -> do putWord8 0 put x1 Pi x1 x2 x3 -> do putWord8 1 put x1 put x2 put x3 Let x1 x2 -> do putWord8 2 put x1 put x2 NLet x1 x2 -> do putWord8 3 put x1 put x2 Hole x1 -> do putWord8 4 put x1 GHole x1 x2 x3 -> do putWord8 5 put x1 put x2 put x3 Guess x1 x2 -> do putWord8 6 put x1 put x2 PVar x1 -> do putWord8 7 put x1 PVTy x1 -> do putWord8 8 put x1 get = do i <- getWord8 case i of 0 -> do x1 <- get return (Lam x1) 1 -> do x1 <- get x2 <- get x3 <- get return (Pi x1 x2 x3) 2 -> do x1 <- get x2 <- get return (Let x1 x2) 3 -> do x1 <- get x2 <- get return (NLet x1 x2) 4 -> do x1 <- get return (Hole x1) 5 -> do x1 <- get x2 <- get x3 <- get return (GHole x1 x2 x3) 6 -> do x1 <- get x2 <- get return (Guess x1 x2) 7 -> do x1 <- get return (PVar x1) 8 -> do x1 <- get return (PVTy x1) _ -> error "Corrupted binary data for Binder" instance Binary Universe where put x = case x of UniqueType -> putWord8 0 AllTypes -> putWord8 1 NullType -> putWord8 2 get = do i <- getWord8 case i of 0 -> return UniqueType 1 -> return AllTypes 2 -> return NullType _ -> error "Corrupted binary data for Universe" instance Binary NameType where put x = case x of Bound -> putWord8 0 Ref -> putWord8 1 DCon x1 x2 x3 -> do putWord8 2 put (x1 * 65536 + x2) put x3 TCon x1 x2 -> do putWord8 3 put (x1 * 65536 + x2) get = do i <- getWord8 case i of 0 -> return Bound 1 -> return Ref 2 -> do x1x2 <- get x3 <- get return (DCon (x1x2 `div` 65536) (x1x2 `mod` 65536) x3) 3 -> do x1x2 <- get return (TCon (x1x2 `div` 65536) (x1x2 `mod` 65536)) _ -> error "Corrupted binary data for NameType" -- record concrete levels only, for now instance Binary UExp where put x = case x of UVar t -> do putWord8 0 put ((-1) :: Int) -- TMP HACK! UVal t -> do putWord8 1 put t get = do i <- getWord8 case i of 0 -> do x1 <- get return (UVar x1) 1 -> do x1 <- get return (UVal x1) _ -> error "Corrupted binary data for UExp" instance {- (Binary n) => -} Binary (TT Name) where put x = {-# SCC "putTT" #-} case x of P x1 x2 x3 -> do putWord8 0 put x1 put x2 -- put x3 V x1 -> if (x1 >= 0 && x1 < 256) then do putWord8 1 putWord8 (toEnum (x1 + 1)) else do putWord8 9 put x1 Bind x1 x2 x3 -> do putWord8 2 put x1 put x2 put x3 App _ x1 x2 -> do putWord8 3 put x1 put x2 Constant x1 -> do putWord8 4 put x1 Proj x1 x2 -> do putWord8 5 put x1 putWord8 (toEnum (x2 + 1)) Erased -> putWord8 6 TType x1 -> do putWord8 7 put x1 Impossible -> putWord8 8 UType x1 -> do putWord8 10 put x1 get = do i <- getWord8 case i of 0 -> do x1 <- get x2 <- get -- x3 <- get return (P x1 x2 Erased) 1 -> do x1 <- getWord8 return (V ((fromEnum x1) - 1)) 2 -> do x1 <- get x2 <- get x3 <- get return (Bind x1 x2 x3) 3 -> do x1 <- get x2 <- get return (App Complete x1 x2) 4 -> do x1 <- get return (Constant x1) 5 -> do x1 <- get x2 <- getWord8 return (Proj x1 ((fromEnum x2)-1)) 6 -> return Erased 7 -> do x1 <- get return (TType x1) 8 -> return Impossible 9 -> do x1 <- get return (V x1) 10 -> do x1 <- get return (UType x1) _ -> error "Corrupted binary data for TT"
Enamex/Idris-dev
src/Idris/Core/Binary.hs
Haskell
bsd-3-clause
25,367
{-# OPTIONS_GHC -fno-warn-redundant-constraints #-} module DB where import Data.SafeCopy import Servant.Server.Auth.Token.Acid.Schema as A -- | Application global state for acid-state data DB = DB { dbAuth :: A.Model -- ^ Storage for Auth state , dbCustom :: () -- ^ Demo of custom state } -- | Generation of inital state newDB :: DB newDB = DB { dbAuth = A.newModel , dbCustom = () } instance HasModelRead DB where askModel = dbAuth instance HasModelWrite DB where putModel db m = db { dbAuth = m } deriveSafeCopy 0 'base ''DB A.deriveQueries ''DB A.makeModelAcidic ''DB
ivan-m/servant-auth-token
example/acid/src/DB.hs
Haskell
bsd-3-clause
594
{-# LANGUAGE DoRec #-} -- | Make sure this program runs without leaking memory import FRP.Sodium import Control.Applicative import Control.Exception import System.Timeout data Source = Source { unSource :: Reactive (Behaviour (Int, Int), Event Source) } verbose = False main = do (et, pushT) <- sync $ newEvent t <- sync $ hold 0 et let etens = (`div` 10) <$> et tens <- sync $ hold 0 etens let changeTens = filterJust $ snapshot (\new old -> if new /= old then Just new else Nothing) etens tens oout <- sync $ do let newSource = (\tens -> Source $ do let out = ((,) tens) <$> t return (out, newSource) ) <$> changeTens initPair = (((,) 0) <$> t, newSource) rec bPair <- hold initPair eSwitch let eSwitch = execute $ unSource <$> switchE (snd <$> bPair) return (fst <$> bPair) out <- sync $ switch oout kill <- sync $ listen (value out) $ \x -> if verbose then print x else (evaluate x >> return ()) timeout 4000000 $ mapM_ (sync . pushT) [0..] kill
kevintvh/sodium
haskell/examples/tests/memory-test-2.hs
Haskell
bsd-3-clause
1,162
{-# LANGUAGE BangPatterns #-} import System.Directory import System.FilePath import Control.Concurrent.Async import System.Environment import Data.List hiding (find) import Control.Exception (finally) import Data.Maybe (isJust) import Control.Concurrent.MVar import Data.IORef import GHC.Conc (getNumCapabilities) -- <<main main = do [n,s,d] <- getArgs sem <- newNBSem (read n) find sem s d >>= print -- >> -- <<find find :: NBSem -> String -> FilePath -> IO (Maybe FilePath) find sem s d = do fs <- getDirectoryContents d let fs' = sort $ filter (`notElem` [".",".."]) fs if any (== s) fs' then return (Just (d </> s)) else do let ps = map (d </>) fs' -- <1> foldr (subfind sem s) dowait ps [] -- <2> where dowait as = loop (reverse as) -- <3> loop [] = return Nothing loop (a:as) = do -- <4> r <- wait a -- <5> case r of Nothing -> loop as -- <6> Just a -> return (Just a) -- <7> -- >> -- <<subfind subfind :: NBSem -> String -> FilePath -> ([Async (Maybe FilePath)] -> IO (Maybe FilePath)) -> [Async (Maybe FilePath)] -> IO (Maybe FilePath) subfind sem s p inner asyncs = do isdir <- doesDirectoryExist p if not isdir then inner asyncs else do q <- tryAcquireNBSem sem -- <1> if q then do let dofind = find sem s p `finally` releaseNBSem sem -- <2> withAsync dofind $ \a -> inner (a:asyncs) else do r <- find sem s p -- <3> case r of Nothing -> inner asyncs Just _ -> return r -- >> -- <<NBSem newtype NBSem = NBSem (MVar Int) newNBSem :: Int -> IO NBSem newNBSem i = do m <- newMVar i return (NBSem m) tryAcquireNBSem :: NBSem -> IO Bool tryAcquireNBSem (NBSem m) = modifyMVar m $ \i -> if i == 0 then return (i, False) else let !z = i-1 in return (z, True) releaseNBSem :: NBSem -> IO () releaseNBSem (NBSem m) = modifyMVar m $ \i -> let !z = i+1 in return (z, ()) -- >>
prt2121/haskell-practice
parconc/findpar2.hs
Haskell
apache-2.0
2,127
module PackageTests.PreProcess.Check (suite) where import PackageTests.PackageTester (PackageSpec(..), SuiteConfig, assertBuildSucceeded, cabal_build) import System.FilePath import Test.Tasty.HUnit suite :: SuiteConfig -> Assertion suite config = do let spec = PackageSpec { directory = "PackageTests" </> "PreProcess" , distPref = Nothing , configOpts = ["--enable-tests", "--enable-benchmarks"] } result <- cabal_build config spec assertBuildSucceeded result
enolan/cabal
Cabal/tests/PackageTests/PreProcess/Check.hs
Haskell
bsd-3-clause
527
{-# LANGUAGE PolyKinds, GADTs #-} module T7328 where data Proxy a class Foo a where foo :: a ~ f i => Proxy (Foo f)
ghc-android/ghc
testsuite/tests/polykinds/T7328.hs
Haskell
bsd-3-clause
123
{-# LANGUAGE QuasiQuotes, TemplateHaskell, CPP, GADTs, TypeFamilies, OverloadedStrings, FlexibleContexts, EmptyDataDecls, FlexibleInstances, GeneralizedNewtypeDeriving, MultiParamTypeClasses #-} module PersistUniqueTest where import Init -- mpsGeneric = False is due to a bug or at least lack of a feature in mkKeyTypeDec TH.hs #if WITH_NOSQL mkPersist persistSettings { mpsGeneric = False } [persistUpperCase| #else share [mkPersist persistSettings { mpsGeneric = False }, mkMigrate "migration"] [persistLowerCase| #endif Fo foo Int bar Int Primary foo UniqueBar bar deriving Eq Show |] #ifdef WITH_NOSQL cleanDB :: (MonadIO m, PersistQuery backend, PersistEntityBackend Fo ~ backend) => ReaderT backend m () cleanDB = do deleteWhere ([] :: [Filter Fo]) db :: Action IO () -> Assertion db = db' cleanDB #endif specs :: Spec specs = describe "custom primary key" $ do #ifdef WITH_NOSQL return () #else it "getBy" $ db $ do let b = 5 k <- insert $ Fo 3 b Just vk <- get k Just vu <- getBy (UniqueBar b) vu @== Entity k vk it "insertUniqueEntity" $ db $ do let fo = Fo 3 5 Just (Entity _ insertedFoValue) <- insertUniqueEntity fo Nothing <- insertUniqueEntity fo fo @== insertedFoValue #endif
psibi/persistent
persistent-test/src/PersistUniqueTest.hs
Haskell
mit
1,271
{-# LANGUAGE ScopedTypeVariables, Rank2Types #-} -- -- (c) The University of Glasgow 2002-2006 -- -- Serialized values module GHCJS.Prim.TH.Serialized ( Serialized , fromSerialized , toSerialized , serializeWithData , deserializeWithData ) where import Data.Binary import Data.Bits import Data.Data import Data.Typeable.Internal -- | Represents a serialized value of a particular type. Attempts can be made to deserialize it at certain types data Serialized = Serialized TypeRep [Word8] instance Binary Serialized where put (Serialized the_type bytes) = put the_type >> put bytes get = Serialized <$> get <*> get instance Binary TyCon where put (TyCon _ p m n) = put p >> put m >> put n get = mkTyCon3 <$> get <*> get <*> get instance Binary TypeRep where put type_rep = let (ty_con, child_type_reps) = splitTyConApp type_rep in put ty_con >> put child_type_reps get = mkTyConApp <$> get <*> get -- | Put a Typeable value that we are able to actually turn into bytes into a 'Serialized' value ready for deserialization later toSerialized :: Typeable a => (a -> [Word8]) -> a -> Serialized toSerialized serialize what = Serialized (typeOf what) (serialize what) -- | If the 'Serialized' value contains something of the given type, then use the specified deserializer to return @Just@ that. -- Otherwise return @Nothing@. fromSerialized :: forall a. Typeable a => ([Word8] -> a) -> Serialized -> Maybe a fromSerialized deserialize (Serialized the_type bytes) | the_type == typeOf (undefined :: a) = Just (deserialize bytes) | otherwise = Nothing -- | Force the contents of the Serialized value so weknow it doesn't contain any bottoms seqSerialized :: Serialized -> () seqSerialized (Serialized the_type bytes) = the_type `seq` bytes `seqList` () -- | Use a 'Data' instance to implement a serialization scheme dual to that of 'deserializeWithData' serializeWithData :: Data a => a -> [Word8] serializeWithData what = serializeWithData' what [] serializeWithData' :: Data a => a -> [Word8] -> [Word8] serializeWithData' what = fst $ gfoldl (\(before, a_to_b) a -> (before . serializeWithData' a, a_to_b a)) (\x -> (serializeConstr (constrRep (toConstr what)), x)) what -- | Use a 'Data' instance to implement a deserialization scheme dual to that of 'serializeWithData' deserializeWithData :: Data a => [Word8] -> a deserializeWithData = snd . deserializeWithData' deserializeWithData' :: forall a. Data a => [Word8] -> ([Word8], a) deserializeWithData' bytes = deserializeConstr bytes $ \constr_rep bytes -> gunfold (\(bytes, b_to_r) -> let (bytes', b) = deserializeWithData' bytes in (bytes', b_to_r b)) (\x -> (bytes, x)) (repConstr (dataTypeOf (undefined :: a)) constr_rep) serializeConstr :: ConstrRep -> [Word8] -> [Word8] serializeConstr (AlgConstr ix) = serializeWord8 1 . serializeInt ix serializeConstr (IntConstr i) = serializeWord8 2 . serializeInteger i serializeConstr (FloatConstr r) = serializeWord8 3 . serializeRational r serializeConstr (CharConstr c) = serializeWord8 4 . serializeChar c deserializeConstr :: [Word8] -> (ConstrRep -> [Word8] -> a) -> a deserializeConstr bytes k = deserializeWord8 bytes $ \constr_ix bytes -> case constr_ix of 1 -> deserializeInt bytes $ \ix -> k (AlgConstr ix) 2 -> deserializeInteger bytes $ \i -> k (IntConstr i) 3 -> deserializeRational bytes $ \r -> k (FloatConstr r) 4 -> deserializeChar bytes $ \c -> k (CharConstr c) x -> error $ "deserializeConstr: unrecognised serialized constructor type " ++ show x ++ " in context " ++ show bytes serializeFixedWidthNum :: forall a. (Num a, Integral a, FiniteBits a) => a -> [Word8] -> [Word8] serializeFixedWidthNum what = go (finiteBitSize what) what where go :: Int -> a -> [Word8] -> [Word8] go size current rest | size <= 0 = rest | otherwise = fromIntegral (current .&. 255) : go (size - 8) (current `shiftR` 8) rest deserializeFixedWidthNum :: forall a b. (Num a, Integral a, FiniteBits a) => [Word8] -> (a -> [Word8] -> b) -> b deserializeFixedWidthNum bytes k = go (finiteBitSize (undefined :: a)) bytes k where go :: Int -> [Word8] -> (a -> [Word8] -> b) -> b go size bytes k | size <= 0 = k 0 bytes | otherwise = case bytes of (byte:bytes) -> go (size - 8) bytes (\x -> k ((x `shiftL` 8) .|. fromIntegral byte)) [] -> error "deserializeFixedWidthNum: unexpected end of stream" serializeEnum :: (Enum a) => a -> [Word8] -> [Word8] serializeEnum = serializeInt . fromEnum deserializeEnum :: Enum a => [Word8] -> (a -> [Word8] -> b) -> b deserializeEnum bytes k = deserializeInt bytes (k . toEnum) serializeWord8 :: Word8 -> [Word8] -> [Word8] serializeWord8 x = (x:) deserializeWord8 :: [Word8] -> (Word8 -> [Word8] -> a) -> a deserializeWord8 (byte:bytes) k = k byte bytes deserializeWord8 [] _ = error "deserializeWord8: unexpected end of stream" serializeInt :: Int -> [Word8] -> [Word8] serializeInt = serializeFixedWidthNum deserializeInt :: [Word8] -> (Int -> [Word8] -> a) -> a deserializeInt = deserializeFixedWidthNum serializeRational :: (Real a) => a -> [Word8] -> [Word8] serializeRational = serializeString . show . toRational deserializeRational :: (Fractional a) => [Word8] -> (a -> [Word8] -> b) -> b deserializeRational bytes k = deserializeString bytes (k . fromRational . read) serializeInteger :: Integer -> [Word8] -> [Word8] serializeInteger = serializeString . show deserializeInteger :: [Word8] -> (Integer -> [Word8] -> a) -> a deserializeInteger bytes k = deserializeString bytes (k . read) serializeChar :: Char -> [Word8] -> [Word8] serializeChar = serializeString . show deserializeChar :: [Word8] -> (Char -> [Word8] -> a) -> a deserializeChar bytes k = deserializeString bytes (k . read) serializeString :: String -> [Word8] -> [Word8] serializeString = serializeList serializeEnum deserializeString :: [Word8] -> (String -> [Word8] -> a) -> a deserializeString = deserializeList deserializeEnum serializeList :: (a -> [Word8] -> [Word8]) -> [a] -> [Word8] -> [Word8] serializeList serialize_element xs = serializeInt (length xs) . foldr (.) id (map serialize_element xs) deserializeList :: forall a b. (forall c. [Word8] -> (a -> [Word8] -> c) -> c) -> [Word8] -> ([a] -> [Word8] -> b) -> b deserializeList deserialize_element bytes k = deserializeInt bytes $ \len bytes -> go len bytes k where go :: Int -> [Word8] -> ([a] -> [Word8] -> b) -> b go len bytes k | len <= 0 = k [] bytes | otherwise = deserialize_element bytes (\elt bytes -> go (len - 1) bytes (k . (elt:))) seqList :: [a] -> b -> b seqList [] b = b seqList (x:xs) b = x `seq` seqList xs b
forked-upstream-packages-for-ghcjs/ghcjs
ghcjs-prim/src/GHCJS/Prim/TH/Serialized.hs
Haskell
mit
7,315
{-# LANGUAGE StandaloneDeriving, UndecidableInstances #-} module Data.Term.Types where import Data.Binding import Data.Name import Data.Typing import qualified Data.Map as Map import qualified Data.Set as Set type Result a = Either String a type Context term = Map.Map Name term type Inferer term = Context term -> Result term type Checker term = term -> Context term -> Result term data Term f = Term { freeVariables :: Set.Set Name, typeOf :: Checker (Term f), out :: Typing (Binding f) (Term f) } data Unification f = Unification (Set.Set Name) (Typing (Binding f) (Unification f)) | Conflict (Term f) (Term f) expected :: Functor f => Unification f -> Term f expected (Conflict expected _) = expected expected (Unification freeVariables out) = Term freeVariables (const . const $ Left "Unification does not preserve typecheckers.\n") (expected <$> out) actual :: Functor f => Unification f -> Term f actual (Conflict _ actual) = actual actual (Unification freeVariables out) = Term freeVariables (const . const $ Left "Unification does not preserve typecheckers.\n") (actual <$> out) unified :: Traversable f => Unification f -> Maybe (Term f) unified (Conflict _ _) = Nothing unified (Unification freeVariables out) = do out <- mapM unified out return $ Term freeVariables (const . const $ Left "Unification does not preserve typecheckers.\n") out into :: Functor f => Term f -> Unification f into term = Unification (freeVariables term) $ into <$> out term instance Eq (f (Term f)) => Eq (Term f) where a == b = freeVariables a == freeVariables b && out a == out b deriving instance (Eq (Term f), Eq (f (Unification f))) => Eq (Unification f) deriving instance (Show (Term f), Show (f (Unification f))) => Show (Unification f)
antitypical/Surface
src/Data/Term/Types.hs
Haskell
mit
1,752
-- Conduit -- ref: https://wiki.haskell.org/Conduit -- conduit-lib: https://www.schoolofhaskell.com/user/snoyberg/library-documentation/conduit-overview {- Streaming data library Collection of libraries that share the same underlying data structures Alternative to lazy I/O Promises: deterministic resource management (memory, file descriptors) -} -- example import Conduit import Control.Monad.Trans.Resource import qualified Data.Conduit.Binary as CB import Data.Word8 (toUpper) main :: IO () main = runResourceT $ CB.sourceFile "input.txt" $= omapCE toUpper $= takeCE 500 $$ CB.sinkFile "output.txt" {- Core datatype: data ConduitM i o m r Each conduit has: Upstream (i): stream of incoming values Downstream (o): stream of outgoing values Monad (m): conduits are monad transformers Result value (r): just like all monads Producer ignores its upstream Consumer ignores its downstream Source has no upstream Sink has no downstream Conduit has both upstream and downstream Producer unifies to Source and Conduit Consumer unifies to Conduit and Sink -} -- vs. Pipes -- ref: https://twanvl.nl/blog/haskell/conduits-vs-pipes
Airtnp/Freshman_Simple_Haskell_Lib
Idioms/Conduit.hs
Haskell
mit
1,278
-- | Settings are centralized, as much as possible, into this file. This -- includes database connection settings, static file locations, etc. -- In addition, you can configure a number of different aspects of Yesod -- by overriding methods in the Yesod typeclass. That instance is -- declared in the Foundation.hs file. module Settings ( widgetFile , PersistConfig , staticDir ) where import Prelude import Language.Haskell.TH.Syntax import Database.Persist.Postgresql (PostgresConf) import Yesod.Default.Util import Settings.Development import Data.Default (def) import Text.Hamlet -- | Which Persistent backend this site is using. type PersistConfig = PostgresConf -- Static setting below. Changing these requires a recompile -- | The location of static files on your system. This is a file system -- path. The default value works properly with your scaffolded site. staticDir :: FilePath staticDir = "static" -- | Settings for 'widgetFile', such as which template languages to support and -- default Hamlet settings. widgetFileSettings :: WidgetFileSettings widgetFileSettings = def { wfsHamletSettings = defaultHamletSettings { hamletNewlines = AlwaysNewlines } } -- The rest of this file contains settings which rarely need changing by a -- user. widgetFile :: String -> Q Exp widgetFile = (if development then widgetFileReload else widgetFileNoReload) widgetFileSettings
fpco/schoolofhaskell.com
src/Settings.hs
Haskell
mit
1,468
{-# LANGUAGE DeriveDataTypeable #-} -- | Strings normalized according to Normalization Form Compatibility -- Decomposition. module Data.Text.Normal.NFKD ( Normal, fromText, toText ) where import Control.Arrow (first) import Control.DeepSeq import Data.Data import Data.Monoid import Data.String import Data.Text (Text) import Data.Text.ICU.Normalize -- | Normalized text. newtype Normal = Normal { -- | Convert 'Normal' to 'Text'. This function just unwraps the newtype, so there is zero runtime cost. toText :: Text } deriving (Eq, Ord, Data, Typeable) -- | Convert 'Text' efficiently to 'Normal'. fromText :: Text -> Normal fromText t = Normal $ case quickCheck NFKD t of Nothing | isNormalized NFKD t -> t | otherwise -> normalize NFKD t Just False -> normalize NFKD t Just True -> t instance Show Normal where show = show . toText instance Read Normal where readsPrec i = map (first fromText) . readsPrec i instance Monoid Normal where mappend (Normal t1) (Normal t2) = Normal $ t1 <> t2 mempty = Normal mempty instance IsString Normal where fromString = fromText . fromString instance NFData Normal where rnf = rnf . toText
pikajude/text-normal
src/Data/Text/Normal/NFKD.hs
Haskell
mit
1,226
{-# LANGUAGE DataKinds, FlexibleContexts, TypeOperators #-} module Geometry where import Control.Applicative import Data.Foldable (fold, foldMap) import Data.Vinyl import Graphics.GLUtil import Graphics.Rendering.OpenGL hiding (normal, normalize, light, Normal, Color) import Linear import Graphics.VinylGL import System.FilePath ((</>)) type Pos = "vertexPos" ::: V3 GLfloat type Normal = "vertexNormal" ::: V3 GLfloat type Color = "vertexColor" ::: V3 GLfloat pos :: Pos pos = Field normal :: Normal normal = Field col :: Color col = Field -- The 2D corners of a square. square :: [V2 GLfloat] square = V2 <$> [-1,1] <*> [1,-1] -- The 3D faces of a cube. front,back,left,right,top,bottom :: [V3 GLfloat] front = map (\(V2 x y) -> V3 x y 1) square back = map (\(V2 x y) -> V3 (-x) y (-1)) square left = map (\(V2 z y) -> V3 (-1) y z) square right = map (\(V2 z y) -> V3 1 y (-z)) square top = map (\(V2 x z) -> V3 x 1 (-z)) square bottom = map (\(V2 x z) -> V3 x (-1) z) square -- Cube face vertices paired with normal vectors. pts :: [PlainRec [Pos,Normal]] pts = fold [ map (setNorm z) front , map (setNorm $ -z) back , map (setNorm $ -x) left , map (setNorm x) right , map (setNorm y) top , map (setNorm $ -y) bottom ] where [x,y,z] = basis setNorm v p = (pos =: p <+> normal =: v) -- Color the front vertices a dark blue, the back a light beige. colorize :: PlainRec [Pos,Normal] -> PlainRec [Pos,Normal,Color] colorize pt = pt <+> col =: c where c | view (rLens pos._z) pt > 0 = V3 8.235294e-2 0.20392157 0.3137255 | otherwise = V3 0.95686275 0.8392157 0.7372549 -- Indices into the vertex array for each face. inds :: [Word32] inds = take 36 $ foldMap (flip map faceInds . (+)) [0,4..] where faceInds = [0,1,2,2,1,3] -- For rendering a cube, we'll need a ModelView matrix, and a -- ProjectionModelView matrix. type CamInfo = PlainRec ["cam" ::: M44 GLfloat, "proj" ::: M44 GLfloat] cube :: (i <: CamInfo) => IO (i -> IO ()) cube = do s <- simpleShaderProgram ("etc"</>"poly.vert") ("etc"</>"poly.frag") vb <- bufferVertices (map colorize pts) eb <- makeBuffer ElementArrayBuffer inds vao <- makeVAO $ do currentProgram $= Just (program s) setUniforms s (light =: normalize (V3 0 0 1)) enableVertices' s vb bindVertices vb bindBuffer ElementArrayBuffer $= Just eb let ss = setUniforms s return $ \appInfo -> withVAO vao $ do currentProgram $= Just (program s) ss (cast appInfo :: CamInfo) drawIndexedTris 12 where light :: "lightDir" ::: V3 GLfloat light = Field -- We don't use normal vectors with the ground, so we just need a -- single composite projection matrix. type ProjInfo = PlainRec '["proj" ::: M44 GLfloat] -- Ground texture from: -- http://www.texturehd.com/data/media/21/Wood_floor_boards.jpg ground :: (i <: ProjInfo) => IO (i -> IO ()) ground = do Right t <- readTexture $ "art"</>"Wood_floor_boards.png" generateMipmap' Texture2D s <- simpleShaderProgram ("etc"</>"ground.vert") ("etc"</>"ground.frag") vb <- bufferVertices . map ((pos =:) . scale3D) $ V2 <$> [-1,1] <*> [-1,1] vao <- makeVAO $ do currentProgram $= Just (program s) enableVertices' s vb bindVertices vb setUniforms s (tex =: 0) textureBinding Texture2D $= Just t textureFilter Texture2D $= ((Linear', Just Linear'), Linear') texture2DWrap $= (Repeated, Repeat) let ss = setUniforms s return $ \appInfo -> withVAO vao $ do currentProgram $= Just (program s) ss (cast appInfo :: ProjInfo) withTextures2D [t] $ drawArrays TriangleStrip 0 4 where scale3D :: V2 GLfloat -> V3 GLfloat scale3D = (\(V2 x z) -> V3 x (-1.01) z) . (3*^) tex :: "tex" ::: GLint tex = Field
spetz911/progames
vinyl-gl-master/examples/src/Geometry.hs
Haskell
mit
4,209
module Classifier ( Classifier(..), Metadata(..), StoredClassifier(..), buildClassifier, classifySequence, classifySequenceMulti, classifySequenceAll, leafOTU) where import Data.Tree import qualified Data.Map.Strict as M import qualified Data.List as L import Data.Binary (Binary, put, get, Get) import Data.Ord import Data.Tuple.Select import Data.Word import MlgscTypes import Alignment import NucModel import PepModel import PWMModel (PWMModel(..), scoreSeq, cladeName) -- When storing a Classifier to disk, we add some metadata. They may be quieried -- with mlgsc_dump. data StoredClassifier = StoredClassifier { classifier :: Classifier , metadata ::Metadata } instance Binary StoredClassifier where put storedCls = do put $ classifier storedCls put $ metadata storedCls get = do cls <- get :: Get Classifier md <- get :: Get Metadata return $ StoredClassifier cls md data Metadata = Metadata { cmdLine :: String , checksum :: Word32 } instance Binary Metadata where put md = do put $ cmdLine md put $ checksum md get = do cmdLine <- get :: Get String checksum <- get :: Get Word32 return $ Metadata cmdLine checksum data Classifier = PWMClassifier (Tree PWMModel) ScaleFactor deriving (Show, Eq) instance Binary Classifier where put (PWMClassifier modTree scaleFactor) = do put modTree put scaleFactor get = do modTree <- get :: Get (Tree PWMModel) scaleFactor <- get :: Get ScaleFactor return $ PWMClassifier modTree scaleFactor buildClassifier :: Molecule -> SmallProb -> ScaleFactor -> AlnMap -> OTUTree -> Classifier buildClassifier mol smallProb scale alnMap otuTree = case mol of DNA -> buildNucClassifier smallProb scale alnMap otuTree Prot -> buildPepClassifier smallProb scale alnMap otuTree -- TODO: these two are almost identical: refactor and pass the alnt-to-model -- function as a parameter in the case clause of buildClassifier above. buildNucClassifier :: SmallProb -> ScaleFactor -> AlnMap -> OTUTree -> Classifier buildNucClassifier smallprob scale map otuTree = PWMClassifier cladeModTree scale where cladeModTree = fmap NucPWMModel modTree modTree = fmap (\(name, aln) -> alnToNucModel smallprob scale name aln) treeOfNamedAlns treeOfNamedAlns = mergeNamedAlns treeOfLeafNamedAlns treeOfLeafNamedAlns = fmap (\k -> (k, M.findWithDefault [] k map)) otuTree buildPepClassifier :: SmallProb -> ScaleFactor -> AlnMap -> OTUTree -> Classifier buildPepClassifier smallprob scale map otuTree = PWMClassifier cladeModTree scale where cladeModTree = fmap PepPWMModel modTree modTree = fmap (\(name, aln) -> alnToPepModel smallprob scale name aln) treeOfNamedAlns treeOfNamedAlns = mergeNamedAlns treeOfLeafNamedAlns treeOfLeafNamedAlns = fmap (\k -> (k, M.findWithDefault [] k map)) otuTree -- The Int parameter is the log_10(ER) cutoff (the support value of nodes in the -- path in the default output). classifySequence :: Classifier -> Int -> Sequence -> Trail classifySequence (PWMClassifier modTree scale) log10ERcutoff seq = chooseSubtree modTree scale log10ERcutoff seq chooseSubtree :: Tree PWMModel -> ScaleFactor -> Int -> Sequence -> Trail chooseSubtree (Node _ []) _ _ _ = [] -- single-kid-node case - there is no meaningful ER to speak of so I just use -- 1000 - could be optimized :-) chooseSubtree (Node model [kid]) scale cutoff seq = PWMStep (cladeName $ rootLabel kid) kidScore 0 1000 : chooseSubtree kid scale cutoff seq where kidScore = scoreSeq (rootLabel kid) seq chooseSubtree (Node model kids) scale cutoff seq | diff < (round scale * cutoff) = [] | otherwise = PWMStep bestKidName bestKidScore sndBestKidScore log10ER : chooseSubtree bestKid scale cutoff seq where diff = bestKidScore - sndBestKidScore bestKidName = cladeName $ rootLabel bestKid (bestKid, Down bestKidScore) = orderedKids !! 0 (sndBestKid, Down sndBestKidScore) = orderedKids !! 1 orderedKids = L.sortBy (comparing snd) $ zip kids (map Down scores) scores = map (flip scoreSeq seq . rootLabel) kids log10ER = log10evidenceRatio (round scale) bestKidScore sndBestKidScore -- Intended mainly for debugging, as it enables to see a -- query's score at every node of the tree, and therefore allows identifying -- where the classifier chooses the wrong branch. The recursion starts at the -- root (rather than at its children), so we get rid of the Trail's head (hence -- the call to map tail). classifySequenceAll :: Classifier -> Sequence -> [Trail] classifySequenceAll (PWMClassifier modTree scale) seq = map tail $ walkSubtrees modTree scale seq bestScore where bestScore = maximum $ map (flip scoreSeq seq . rootLabel) (subForest modTree) walkSubtrees :: Tree PWMModel -> ScaleFactor -> Sequence -> Score -> [Trail] walkSubtrees (Node model []) scale seq bestScore = [[PWMStep name score (-1) log10ER]] where name = cladeName model score = scoreSeq model seq log10ER = log10evidenceRatio (round scale) bestScore score walkSubtrees (Node model kids) scale seq bestScore = map (thisstep:) $ concat $ map (\kid -> walkSubtrees kid scale seq bestKidScore) kids where thisstep = PWMStep (cladeName model) score (-1) log10ER score = scoreSeq model seq log10ER = log10evidenceRatio (round scale) bestScore score bestKidScore = maximum kidsScores kidsScores = map (flip scoreSeq seq . rootLabel) kids classifySequenceMulti :: Classifier -> Int -> Sequence -> [Trail] classifySequenceMulti (PWMClassifier modTree scale) log10ERcutoff seq = map tail $ chooseSubtrees modTree scale log10ERcutoff seq bestScore where bestScore = maximum $ map (flip scoreSeq seq . rootLabel) (subForest modTree) chooseSubtrees :: Tree PWMModel -> ScaleFactor -> Int -> Sequence -> Score -> [Trail] chooseSubtrees (Node model []) scale _ seq bestScore = [[PWMStep name score (-1) log10ER]] where name = cladeName model score = scoreSeq model seq log10ER = log10evidenceRatio (round scale) score bestScore chooseSubtrees (Node model kids) scale cutoff seq bestNonTiedScore = map (thisstep:) $ concat $ map (\kid -> chooseSubtrees kid scale cutoff seq bestNonTiedKidsScore) tiedKids where thisstep = PWMStep (cladeName model) score (-1) log10ER score = scoreSeq model seq log10ER = log10evidenceRatio (round scale) score bestNonTiedScore bestKidScore = maximum kidsScores kidsScores = map (flip scoreSeq seq . rootLabel) kids kidlog10ERs = map (log10evidenceRatio (round scale) bestKidScore) kidsScores tiedKids = L.map sel1 tiedKids_tpl (tiedKids_tpl, otherKids_tpl) = L.partition (\(_,_,er) -> er <= cutoff') $ zip3 kids kidsScores kidlog10ERs cutoff' = fromIntegral cutoff bestNonTiedKidsScore = case otherKids_tpl of [] -> sel2 $ L.minimumBy (comparing sel2) tiedKids_tpl otherwise -> sel2 $ L.maximumBy (comparing sel2) otherKids_tpl paths :: OTUTree -> [[OTUName]] paths (Node name []) = [[name]] paths (Node name kids) = map (name:) $ foldl1 (++) $ map paths kids -- finds the (first) object in a list that maximizes some metric m (think score -- of a sequence according to a model), returns that object and its index in -- the list, as well as the best score and second-best score themselves. Not -- efficient, but should be ok for short lists. -- TODO: if we no longer need the indices, this is way to complicated. bestByExtended :: Ord b => [a] -> (a -> b) -> (a, Int, b, b) bestByExtended objs m = (bestObj, bestNdx, bestMetricValue, secondBestMetricValue) where sorted = L.sortBy (flip compare) metricValues metricValues = map m objs bestMetricValue = sorted !! 0 secondBestMetricValue = sorted !! 1 bestNdx = head $ L.elemIndices bestMetricValue metricValues bestObj = objs !! bestNdx -- produces a new tree of which each node's data is a concatenation of its -- children node's data. Meant to be called on a Tree Alignment whose inner -- nodes are empty. To see it in action, do -- putStrLn $ drawTree $ fmap show $ mergeAlns treeOfLeafAlns -- in GHCi. mergeNamedAlns :: Tree (CladeName, Alignment) -> Tree (CladeName, Alignment) mergeNamedAlns leaf@(Node _ []) = leaf mergeNamedAlns (Node (name,_) kids) = Node (name,mergedKidAlns) mergedKids where mergedKids = L.map mergeNamedAlns kids mergedKidAlns = concatMap (snd . rootLabel) mergedKids leafOTU :: Trail -> OTUName leafOTU trail = otuName $ last trail -- Computes the base-10 log of the evidence ratio, i.e. log_10 (exp(delta-AIC / -- 2)), except that I use delta-AIC' (in which the factor 2 is dropped, so I -- avoid having to multiply by 2 only to divide by 2 again just after). log10evidenceRatio :: Int -> Int -> Int -> Double log10evidenceRatio scaleFactor bestScore secondBestScore = logBase 10 er where l_min = scoreTologLikelihood scaleFactor bestScore l_sec = scoreTologLikelihood scaleFactor secondBestScore er = exp(deltaAIC' l_min l_sec) -- Converts a model score (which is a scaled, rounded log-likelihood (log base -- 10)) to a log-likelihood (log base e, i.e. ln). To do this, we _divide_ by -- the scale factor to get an unscaled log10-likelihood, and then divide by -- log10(e) to get a ln-based likelihood. scoreTologLikelihood :: Int -> Int -> Double scoreTologLikelihood scaleFactor score = log10Likelihood / logBase 10 e where log10Likelihood = fromIntegral score / fromIntegral scaleFactor e = exp 1.0 -- Computes the difference in AIC of two log-likelihoods, taking into account -- that the number of parameters k is in our case the same in any two models, -- and this cancels out, i.e. delta AIC = AIC1 - AIC2 = 2k -2 ln (L_1) - (2k - -- 2 ln(L_2)) = -2 (ln (L_1) - ln (L_2)). Since the arguments are already _log_ -- likelihoods, the expression simplifies to -2 (l_1 - l_2), where l_1 = -- ln(L_1), etc. I also drop the constant 2, since we'd be dividing by 2 right -- away in evidenceRatio anyway. deltaAIC' :: Double -> Double -> Double deltaAIC' l1 l2 = l1 - l2
tjunier/mlgsc
src/Classifier.hs
Haskell
mit
10,879
{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} -- | SWF Decider logic. -- module Network.AWS.Wolf.Decide ( decide , decideMain ) where import Data.Aeson import Data.Time import Data.UUID import Data.UUID.V4 import Network.AWS.SWF import Network.AWS.Wolf.Ctx import Network.AWS.Wolf.File import Network.AWS.Wolf.Prelude import Network.AWS.Wolf.SWF import Network.AWS.Wolf.Types -- | Successful end of workflow. -- end :: MonadAmazonDecision c m => Maybe Text -> m Decision end input = do traceInfo "end" mempty pure $ completeWork input -- | Next activity in workflow to run. -- next :: MonadAmazonDecision c m => Maybe Text -> Maybe Text -> Task -> m Decision next input priority t = do uid <- liftIO $ toText <$> nextRandom traceInfo "next" [ "uid" .= uid, "task" .= t ] pure $ scheduleWork uid (t ^. tName) (t ^. tVersion) (t ^. tQueue) input priority -- | Failed activity, stop the workflow. -- failed :: MonadAmazonDecision c m => m Decision failed = do traceInfo "failed" mempty pure failWork -- | Completed activity, start the next activity. -- completed :: MonadAmazonDecision c m => HistoryEvent -> m Decision completed he = do traceInfo "completed" mempty hes <- view adcEvents (input, priority, name) <- maybeThrowIO' "No Completed Information" $ do atcea <- he ^. heActivityTaskCompletedEventAttributes he' <- flip find hes $ (== atcea ^. atceaScheduledEventId) . view heEventId atsea <- he' ^. heActivityTaskScheduledEventAttributes pure (atcea ^. atceaResult, atsea ^. atseaTaskPriority, atsea ^. atseaActivityType . atName) p <- view adcPlan maybe (end input) (next input priority) $ tailMay (flip dropWhile (p ^. pTasks) $ (/= name) . view tName) >>= headMay -- | Beginning of workflow, start the first activity. -- begin :: MonadAmazonDecision c m => HistoryEvent -> m Decision begin he = do traceInfo "begin" mempty (input, priority) <- maybeThrowIO' "No Start Information" $ do wesea <- he ^. heWorkflowExecutionStartedEventAttributes pure (wesea ^. weseaInput, wesea ^. weseaTaskPriority) p <- view adcPlan maybe (end input) (next input priority) $ headMay (p ^. pTasks) -- | Schedule workflow based on historical events. -- schedule :: MonadAmazonDecision c m => m Decision schedule = do traceInfo "schedule" mempty hes <- view adcEvents f hes >>= maybeThrowIO' "No Select Information" where f [] = pure Nothing f (he:hes) = case he ^. heEventType of WorkflowExecutionStarted -> Just <$> begin he ActivityTaskCompleted -> Just <$> completed he ActivityTaskFailed -> Just <$> failed _et -> f hes -- | Decider logic - poll for decisions, make decisions. -- decide :: MonadConf c m => Plan -> m () decide p = preConfCtx [ "label" .= LabelDecide ] $ do let queue = p ^. pStart . tQueue runAmazonWorkCtx queue $ do traceInfo "poll" mempty t0 <- liftIO getCurrentTime (token, hes) <- pollDecision t1 <- liftIO getCurrentTime statsIncrement "wolf.decide.poll.count" [ "queue" =. queue ] statsHistogram "wolf.decide.poll.elapsed" (realToFrac (diffUTCTime t1 t0) :: Double) [ "queue" =. queue ] maybe_ token $ \token' -> runAmazonDecisionCtx p hes $ do traceInfo "start" mempty t2 <- liftIO getCurrentTime schedule >>= completeDecision token' t3 <- liftIO getCurrentTime traceInfo "finish" mempty statsIncrement "wolf.decide.decision.count" [ "queue" =. queue ] statsHistogram "wolf.decide.decision.elapsed" (realToFrac (diffUTCTime t3 t2) :: Double) [ "queue" =. queue ] -- | Run decider from main with config file. -- decideMain :: MonadControl m => FilePath -> FilePath -> Maybe Text -> m () decideMain cf pf domain = runCtx $ runTop $ do conf <- readYaml cf let conf' = override cDomain domain conf runConfCtx conf' $ do plans <- readYaml pf runConcurrent $ forever . decide <$> plans
swift-nav/wolf
src/Network/AWS/Wolf/Decide.hs
Haskell
mit
4,107
-- Copyright (c) 2011 Alexander Poluektov (alexander.poluektov@gmail.com) -- -- Use, modification and distribution are subject to the MIT license -- (See accompanyung file MIT-LICENSE) import Distribution.Simple main = defaultMain
apoluektov/domino
Setup.hs
Haskell
mit
232
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE ImplicitParams #-} {-# LANGUAGE CPP #-} module GenTM5Data ( instantiateDoc ) where import Data.Aeson import Data.Aeson.TH import Data.Text (Text) import qualified Data.Text as T import Data.HashMap.Strict (HashMap) import qualified Data.HashMap.Strict as HM import Data.Hashable (Hashable) import Data.Set (Set) import qualified Data.Set as Set import Data.List as L import GenTM5Parser (getDoc) import qualified GenTM5Parser as P import Prelude hiding (read) import qualified Prelude as Pre (read) import Control.Applicative import Control.Monad import Control.Monad.Trans (lift) import Control.Monad.Trans.State import Control.Monad.Trans.Reader import Data.Maybe import Data.Either import System.Exit (exitFailure) import System.IO.Unsafe (unsafeDupablePerformIO) import System.IO (hPutStrLn, stderr) import Text.Printf import Control.Lens -- DBG -- import System.IO.Unsafe unsafePeek :: (Show a) => a -> a unsafePeek showMe = unsafePerformIO $ print showMe >> return showMe -- DBG -- data TM5ConcreteTransition = TM5CTrans { read :: Text , to_state :: Text , write :: Text , action :: Text } deriving (Show,Eq) $(deriveJSON defaultOptions ''TM5ConcreteTransition) data TM5Machine = TM5 { name :: Text , alphabet :: [Text] , blank :: Text , states :: [Text] , initial :: Text , finals :: [Text] , transitions :: HashMap Text [TM5ConcreteTransition] } deriving (Show) $(deriveJSON defaultOptions ''TM5Machine) -- Rich transitions: embeds instantiation meta data along serializable structure. data RichCTransition = RCTrans { cTransRCT :: TM5ConcreteTransition -- toJSON , skellNameRCT :: Text -- template name, i.e. HM key to children template trans. , paramsRCT :: [Text] -- resolved params for this trans., used by children } deriving (Show,Eq) data StateInstance = SI { nameSI :: Text , paramsSI :: [Text] } deriving (Show) exitError :: String -> a exitError s = unsafeDupablePerformIO $ hPutStrLn stderr s >> exitFailure >> return undefined lookupOrDie :: (?deathMessage :: ShowS, Show k, Hashable k, Eq k) => k -> HashMap k v -> v lookupOrDie key hm = HM.lookupDefault (exitError $ ?deathMessage $ show key) key hm getPlaceHolder = getDoc ^. P.templatePatterns ^. P.inheritedNth getRCPOf = getDoc ^. P.templatePatterns ^. P.reciprocal getSameAsRead = getDoc ^. P.templatePatterns ^. P.readPat getSameAsState = getDoc ^. P.templatePatterns ^. P.currentState getGlobAny = getDoc ^. P.alphabet ^. P.globAnyInput getGlobFree = getDoc ^. P.alphabet ^. P.globFreeSymbols getFreeSyms = getDoc ^. P.alphabet ^. P.freeSymbols getRCPFree = getDoc ^. P.alphabet ^. P.freeSymbolsRCP getCollection = getDoc ^. P.alphabet ^. P.collection getExhaustiveSet = Set.fromList$ getCollection ++ getRCPFree makeState :: StateInstance -- The current concrete state and its params (for recursive transitions) -> StateInstance -- The a template state to instantiate with concrete params. -> StateInstance -- the resulting concrete name and list of concrete params. makeState currentState (SI templName params) = if templName == getSameAsState then currentState else let bits = T.splitOn getPlaceHolder templName :: [Text] in case bits of [single] -> SI single [] _ -> SI (T.concat$ L.concat$ L.transpose$ [bits, params]) params #define CALLJUSTORDIE(tailInfo, param) (let ?dbgInfo = ?dbgInfo ++ tailInfo in justOrDie param) justOrDie :: (?dbgInfo :: String) => Maybe a -> a justOrDie = fromMaybe (let ?dbgInfo = "justOrDie: DYING -> " ++ ?dbgInfo in exitError ?dbgInfo) -- Takes a selector -- The selector can be either (rcp to Nth sym) "~~%%N" or "%%N" (Nth sym) -- Returns the index data borne by the selector, -- as either Left iRcp or Right i. -- Non-selector or malformed selector will raise a deadly exception -- through the use of fromJust. indexFromSelector :: (?dbgInfo :: String) => Text -> Either Int Int indexFromSelector sel = let stripRcp = T.stripPrefix getRCPOf sel -- ?dbgInfo = ?dbgInfo ++ "; indexFromSelector" doRead = (Pre.read :: String -> Int) . T.unpack . CALLJUSTORDIE("; indexFromSelector",) . (T.stripPrefix getPlaceHolder) in case stripRcp of Just t -> Left$ doRead t Nothing -> Right$ doRead sel -- Provided a bare, litteral sym from the `freeSyms` set -- -- May throw, provided a non-bare, non-freeSym symbol. resolveRCP :: (?dbgInfo :: String) => Text -> Text resolveRCP t = let ?dbgInfo = ?dbgInfo ++ "; resolveRCP" in case elemIndex t getFreeSyms of Just i -> getRCPFree !! i Nothing -> getFreeSyms !! justOrDie (elemIndex t getRCPFree) -- From state instance params, a selector string: return the targeted parameter. -- A malformed selector, -- An improper (OOB index) selector, -- A bad freeSyms <=> RCP mapping -- will raise a deadly exception. paramFromSelector :: (?dbgInfo :: String) => ([Text], Text) -> Text paramFromSelector (params, sel) = let ?dbgInfo = ?dbgInfo ++ "; paramFromSelector" in case indexFromSelector sel of Right i -> params !! i Left i -> resolveRCP (params !! i) -- CAVEAT: 'action' field is %%'ed for return_* states ! -- CAVEAT²: 'action' field is %%'ed with non-symbols ! -- -- -- Draw obtained syms from the pool ! -- Update env: => accum' <- ((accum `union` gotSyms) `inter` pool) -- pool' <- (pool `diff` (pool `inter` accum')) -- => Env pool' accum' -- Starting and reference pools must include reciprocal-free-syms ! data Env = Env { availablePool :: Set Text, stateParams :: [Text], readEntry :: Text } gatherSyms :: [Text] -> State Env [Text] gatherSyms [] = return [] gatherSyms (sym:ls) = do e@(Env pool params readEnt) <- get let gotSyms = case sym of sym | sym == getGlobFree -> Set.fromList getFreeSyms -- Globbed... | sym == getGlobAny -> Set.fromList getCollection -- ...categories. _ -> let isRCP = T.isInfixOf getRCPOf sym -- Single static|template rcpStripped = spliceOut getRCPOf sym in let resolve = let ?dbgInfo = "sym:" ++ T.unpack sym in resolveSelector isRCP rcpStripped in Set.singleton$ runReader resolve e let pool' = \uSyms -> pool `Set.difference` uSyms in let updateEnv uSyms (Env lp pms r) = Env (pool' uSyms) pms r in modify (updateEnv gotSyms) let gathered = Set.toList$ pool `Set.intersection` gotSyms in return . (++) gathered =<< gatherSyms ls where -- -- -- -- -- -- -- Helpers -- -- -- -- -- -- -- spliceOut intron txt = T.concat$ T.splitOn intron txt resolveSelector :: (?dbgInfo :: String) => Bool -> Text -> Reader Env Text resolveSelector isRcp remainder = do readEnt <- asks readEntry params <- asks stateParams let morphRcpM dbg t = return $ if isRcp then let ?dbgInfo = ?dbgInfo ++ dbg++ "->gatherSyms:RCP: [" ++ T.unpack t ++ "]" in resolveRCP t else id t in do case remainder of rem | T.isInfixOf getPlaceHolder rem -> let dbgInfo = printf "gatherSyms:Straight: %s" (T.unpack rem) in morphRcpM dbgInfo$ paramFromSelector (params, remainder) | T.isInfixOf getSameAsRead rem -> let dbgInfo = printf "READPATTERN: readEnt: %s, rem: %s" (T.unpack readEnt) (T.unpack rem) in morphRcpM dbgInfo readEnt | otherwise -> let dbgInfo = "I AM OTHERWISE" in morphRcpM dbgInfo rem instantiateTrans :: [(Text, Text)] -- Template IO couples -> StateT (Set Text) -- Allowed Sym pool to draw from (Reader (StateInstance, P.M5Transition)) -- reference concrete state, template transition [RichCTransition] -- Resulting concrete transitions instantiateTrans [] = return [] instantiateTrans ((is,os):lio) = do (curSt@(SI parentState concreteParams) , P.M5Trans _ skToSt tpms act) <- lift ask symPool <- get let (iConcreteSyms, Env iRemPool _ _) = runState (gatherSyms [is]) (Env symPool concreteParams is) let resolveSyms = \el poo rs -> evalState (gatherSyms el) (Env poo concreteParams rs) collection = getExhaustiveSet oConcreteSyms = concat$ resolveSyms [os] collection <$> iConcreteSyms pConcretePrms = resolveSyms tpms collection <$> iConcreteSyms cActs = if T.isInfixOf getPlaceHolder act then (\ps -> let ?dbgInfo = "instantiateTrans" in paramFromSelector (ps,act)) <$> pConcretePrms else repeat act lsStates = makeState curSt <$> SI skToSt <$> pConcretePrms in let serialCTrans = zipWith4 TM5CTrans iConcreteSyms (nameSI <$> lsStates) oConcreteSyms cActs in let cTrans = zipWith3 RCTrans serialCTrans (repeat skToSt) (paramsSI <$> lsStates) in do -- return $ seq (unsafePeek curSt) () put iRemPool instantiateTrans lio >>= return . (++) (filter (\tr -> to_state (cTransRCT tr) /= parentState) cTrans) -- ForEach I:O couple -- comprehend template I:O couples -- instantiate State: -- comprehend template States -- makeTransition type WIPTransitions = HashMap Text [RichCTransition] makeTransitions :: StateInstance -- Previous concrete state, whence the transition is starting from. -> [P.M5Transition] -- Associated template transitions -> State (Set Text) -- Track consumed symbols as a State WIPTransitions -- fold resulting concrete transition maps. makeTransitions si@(SI parentState pParams) lSkellTr = foldM instantiateCondense HM.empty lSkellTr where instantiateCondense :: WIPTransitions -> P.M5Transition -> State (Set Text) WIPTransitions instantiateCondense accuHM skellTr = do pool <- get let foldingLRCTrM hm v = return$ HM.insertWith (flip (++)) parentState v hm let saneSkellTr = if skellTr ^. P.toStatePattern == getSameAsState then set P.toStatePattern parentState . set P.toStateParams pParams$ skellTr else skellTr let iol = P._inputOutput saneSkellTr in let (lRichTr,remPool) = flip runReader (si, saneSkellTr) $ runStateT (instantiateTrans iol) pool :: ([RichCTransition], Set Text) in do put remPool foldM foldingLRCTrM accuHM ((:[]) <$> lRichTr) type ConcreteTransitions = HashMap Text [TM5ConcreteTransition] dispatchInstantiation :: [(Text,[RichCTransition])] -> StateT WIPTransitions (State (Set Text)) -- final states instances ConcreteTransitions dispatchInstantiation [] = return . HM.map (\lrct -> cTransRCT <$> lrct) =<< get dispatchInstantiation ((cState, lRCTr):ls) = do moreTasks <- mapM stateFold lRCTr >>= return . concat dispatchInstantiation (ls ++ moreTasks) where ----------------------------------------------------------------------------- skellHM = getDoc ^. P.transitions fetchSkTr el = let ?deathMessage = (++) "dispatchInstantiation: could not find state: " in lookupOrDie skellKey skellHM where skellKey = skellNameRCT el stateFold :: RichCTransition -> StateT WIPTransitions (State (Set Text)) [(Text,[RichCTransition])] stateFold = \el -> do let skSt = skellNameRCT el let callMkTrans = \si -> \lSkTr -> makeTransitions si lSkTr `evalState` getExhaustiveSet let hmRich = callMkTrans (SI cState$ paramsRCT el) (fetchSkTr el) when (skSt `elem` P._finalStates getDoc)$ lift$ modify (Set.insert skSt) modify (HM.unionWith (\l r -> union (nub l) r) hmRich) return$ HM.toList hmRich data LocalEnv = LEnv { _curCState :: Text , _curCParams :: [Text] , _curSkellState :: Text , _curRead :: Text , _usedSyms :: [Text] , _transSyms :: [Text] } $(makeLenses ''LocalEnv) data MetaEnv = MEnv { _finals :: Set Text , _wipTrans :: WIPTransitions } $(makeLenses ''MetaEnv) resolveTemplateSym :: Text -> Reader LocalEnv [Text] resolveTemplateSym tSym = do used <- asks _usedSyms readEnt <- asks _curRead let inter l = (used \\ getExhaustiveSet) `intersection` l return$ case tSym of getGlobAny -> inter getCollection getGlobFree -> inter getFreeSyms _ | T.isInfixOf getPlaceHolder tSym -> inter$ paramFromSelector tSym : [] | case T.breakOn getSameAsRead tSym -> (a,b)| not$ T.null b -> inter$ resolveStatic (T.concat a readEnt) : [] | otherwise -> inter$ resolveStatic a : [] where resolveStatic sym = if T.isInfixOf getRCPOf sym then resolveRCP sym else sym -- depth first... rebootMakeTrans :: StateT MetaEnv (Reader LocalEnv) () rebootMakeTrans = do -- ((Initialization)) -- ForEach starting state template -- ForEach (R,W) -- log . instantiate: templateTrans -> (curState,R,W,params) -> WipConcreteTransition -- ((Recursion)) -- given WipConcreteTransitions : -- ForEach concrete state -- ForEach (curState,R,W,params,toTemplate) -- if instantiate (toTemplate,r,w,params) NotMemberOf (log:)WipConcreteTransitions: -- log . instantiate: templateTrans -> (curState,R,W,params) -> WipConcreteTransition instantiateDoc :: TM5Machine instantiateDoc = let dm = (++) "instantiateDoc: could not find state: " doc = getDoc alphaDoc = doc ^. P.alphabet iniState = doc ^. P.initialState staticFinals = Set.fromList$ doc ^. P.finalStates skellHM = doc ^. P.transitions iniTrans = let ?deathMessage = dm in lookupOrDie iniState skellHM --bootstrapInstance = HM.map (\lrct -> cTransRCT<$>lrct)$ evalState bootstrapInstance = evalState (makeTransitions (SI iniState []) iniTrans) getExhaustiveSet (concreteTrans, concreteFinals) = -- (,) bootstrapInstance [] ((dispatchInstantiation$ HM.toList bootstrapInstance) `evalStateT` bootstrapInstance) `runState` staticFinals in TM5 "UniversalMachine" (getCollection ++ getRCPFree) (alphaDoc ^. P.hostBlank) (HM.keys concreteTrans) iniState (Set.toList concreteFinals) concreteTrans
range12/there-is-no-B-side
tools/generators/GuestEncoder/GenTM5Data.hs
Haskell
mit
15,569
{-| Module : Collapse Description : Collapses all multi-clause functions into single-body ones -} module Latro.Collapse where import Control.Monad.Except import Latro.Ast import Latro.Compiler import Latro.Errors collectFunDefs :: RawId -> [RawAst Exp] -> ([RawAst FunDef], [RawAst Exp]) collectFunDefs _ [] = ([], []) collectFunDefs id (eFunDef@(ExpFunDef funDef@(FunDefFun _ fid _ _)) : es) | id == fid = let (funDefs, es') = collectFunDefs id es in (funDef : funDefs, es') | otherwise = ([], eFunDef : es) collectFunDefs _ es = ([], es) collapseBindingExp :: RawId -> [RawAst Exp] -> Collapsed (RawAst Exp, [RawAst Exp]) collapseBindingExp id (e@(ExpTopLevelAssign _ (PatExpId _ pid) _) : es) | id == pid = return (e, es) | otherwise = throwError $ ErrNoBindingAfterTyAnn id collapseBindingExp id (e@(ExpAssign _ (PatExpId _ pid) _) : es) | id == pid = return (e, es) | otherwise = throwError $ ErrNoBindingAfterTyAnn id collapseBindingExp id (e@(ExpFunDef (FunDefFun p fid _ _)) : es) | id == fid = do let (funDefs, es') = collectFunDefs fid (e : es) eFunDef <- collapse $ ExpFunDefClauses p fid funDefs case funDefs of [] -> throwError $ ErrNoBindingAfterTyAnn fid _ -> return (eFunDef, es') | otherwise = throwError $ ErrNoBindingAfterTyAnn id collapseBindingExp id _ = throwError $ ErrNoBindingAfterTyAnn id collapse :: RawAst Exp -> Collapsed (RawAst Exp) collapse (ExpAssign p patE e) = do e' <- collapse e return $ ExpAssign p patE e' collapse (ExpFunDef (FunDefFun p id argPatEs bodyE)) = do bodyE' <- collapse bodyE return $ ExpFunDef $ FunDefFun p id argPatEs bodyE' collapse (ExpModule p id bodyEs) = do bodyEs' <- collapseEs bodyEs return $ ExpModule p id bodyEs' collapse (ExpBegin p bodyEs) = do bodyEs' <- collapseEs bodyEs return $ ExpBegin p bodyEs' collapse (ExpFunDefClauses ap aid funDefs) = do funDefs' <- mapM collapseFunDef funDefs return $ ExpFunDefClauses ap aid funDefs' collapse e = return e collapseFunDef :: RawAst FunDef -> Collapsed (RawAst FunDef) collapseFunDef (FunDefFun p id patE bodyE) = do bodyE' <- collapse bodyE return $ FunDefFun p id patE bodyE' collapseEs :: [RawAst Exp] -> Collapsed [RawAst Exp] collapseEs [] = return [] collapseEs (ExpTopLevelTyAnn tyAnn@(TyAnn _ aid _ _ _) : es) = do (e, es') <- collapseBindingExp aid es es'' <- collapseEs es' return (ExpWithAnn tyAnn e : es'') collapseEs (ExpTyAnn tyAnn@(TyAnn _ aid _ _ _) : es) = do (e, es') <- collapseBindingExp aid es es'' <- collapseEs es' return (ExpWithAnn tyAnn e : es'') collapseEs (ExpFunDef (FunDefFun p fid argPatEs bodyE) : es) = do bodyE' <- collapse bodyE let (funDefs, es') = collectFunDefs fid es funDef = FunDefFun p fid argPatEs bodyE' eFunDef = ExpFunDefClauses p fid (funDef : funDefs) es'' <- collapseEs es' return (eFunDef : es'') collapseEs (ExpBegin p bodyEs : es) = do bodyEs' <- collapseEs bodyEs es' <- collapseEs es return (ExpBegin p bodyEs' : es') collapseEs (ExpProtoImp p synTy protoId straints bodyEs : es) = do bodyEs' <- collapseEs bodyEs es' <- collapseEs es return (ExpProtoImp p synTy protoId straints bodyEs' : es') collapseEs (e : es) = do e' <- collapse e es' <- collapseEs es return (e' : es') type Collapsed a = CompilerPass CompilerEnv a runCollapseFunClauses :: RawAst CompUnit -> Collapsed (RawAst CompUnit) runCollapseFunClauses (CompUnit pos exps) = do exps' <- collapseEs exps return $ CompUnit pos exps'
Zoetermeer/latro
src/Latro/Collapse.hs
Haskell
mit
3,533
-- Copyright (C) 2013 Jorge Aparicio import Data.Maybe (mapMaybe) main :: IO() main = print . head . filter isPalindrome . mapMaybe fst $ iterate next (Nothing, nums) where nums = [[x * y | y <- [x,x-1..start]] | x <- [end,end-1..start]] start = 100 :: Int end = 999 next :: Integral a => (Maybe a, [[a]]) -> (Maybe a, [[a]]) next (_, [[]]) = (Nothing, [[]]) next (_, (x:xs):[]) = (Just x, [xs]) next (_, x@(xh:xt):y@(yh:_):zs) | xh > yh = (Just xh, xt:y:zs) | otherwise = (h, x:zs') where (h, zs') = next (Nothing, y:zs) isPalindrome :: (Integral a, Show a) => a -> Bool isPalindrome n = s == reverse s where s = show n
japaric/eulermark
problems/0/0/4/004.hs
Haskell
mit
669
-- SYNTAX TEST "source.haskell" module Intro where -- <- keyword.other -- ^^^^^ support.other.module -- ^^^^^ keyword.other -- ^^^^^^^^^^^^^^^ meta.declaration.module import Language.Haskell.Liquid.Prelude (liquidAssert) -- ^^^^^^^^^^^^ meta.declaration.exports entity.name.function -- <- keyword.other -- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ support.other.module zero' :: Int zero' = 0 {-@ zero' :: {v: Int | 0 <= v} @-} -- ^^^^^^^^^^^^^^^^^ liquid.type -- ^^ keyword.operator -- ^ constant.numeric -- ^^^ entity.name.type -- ^^^^^ entity.name.function -- ^^^^^^^^^^^^^^^^^^^^^^^^^^ block.liquidhaskell.annotation -- <- block.liquidhaskell {-@ zero'' :: {v: Int | (0 <= v && v < 100) } @-} -- ^^^^^^ entity.name.function -- ^^^ entity.name.type -- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ liquid.type -- ^^ keyword.operator -- ^ keyword.operator -- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ block.liquidhaskell.annotation -- <- block.liquidhaskell zero'' :: Int -- <- meta.function.type-declaration zero'' = 0 -- <- identifier {-@ zero''' :: {v: Int | ((v mod 2) = 0) } @-} zero''' :: Int -- <- meta.function.type-declaration zero''' = 0 -- <- identifier {-@ zero'''' :: {v: Int | v = 0 } @-} zero'''' :: Int zero'''' = 0 {-@ zero :: {v: Int | ((0 <= v) && ((v mod 2) = 0) && (v < 100)) } @-} zero :: Int zero = 0 {-@ error' :: {v: String | false } -> a @-} error' :: String -> a error' = error {-@ lAssert :: {v:Bool | (Prop v)} -> a -> a @-} lAssert :: Bool -> a -> a lAssert True x = x lAssert False _ = error' "lAssert failure" divide :: Int -> Int -> Int divide n 0 = error' "divide by zero" divide n d = n `div` d {-@ divide :: Int -> {v: Int | v != 0 } -> Int @-} {-@ divide' :: Int -> {v:Int | v /= 0} -> Int @-} divide' :: Int -> Int -> Int divide' n 0 = error' "divide by zero" divide' n d = lAssert (d /= 0) $ n `div` d abz :: Int -> Int abz n | 0 < n = n | otherwise = 0 - n {-@ abz :: Int -> {v: Int | 0 <= v } @-} -- ^^^^^^^^^^^^^^^^^^ liquid.type -- ^^^ entity.name.type -- ^^^ entity.name.function -- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ block.liquidhaskell.annotation -- <- block.liquidhaskell {-@ truncate :: Int -> Int -> Int @-} truncate i max | i' <= max' = i | otherwise = max' * (i `divide` i') where i' = abz i max' = abz max {-@ truncate' :: Int -> Int -> Int @-} truncate' i max | i' <= max' = i | otherwise = lAssert (i' /= 0) $ max' * (i `divide` i') where i' = abz i max' = abz max {-@ truncate'' :: Int -> Int -> Int @-} truncate'' i max | i' <= max' = i | otherwise = liquidAssert (i' /= 0) $ max' * (i `divide` i') where i' = abz i max' = abz max {-@ listAssoc :: x:List a -> y:List a -> z:List a -> {(append x (append y z)) == (append (append x y) z) } @-} -- ^^^^^^ ^ ^^^^^^ ^ ^ ^^^^^^ ^^^^^^ ^ ^ ^ identifier {-@ type Something = SomethingElse @-} -- <- meta.declaration.type {-@ instance Something where -- <- meta.declaration.instance asd = instance -- <- identifier @-} -- >> =source.haskell
atom-haskell/language-haskell
spec/fixture/liquidhaskell.hs
Haskell
mit
3,345
class YesNo a where yesno :: a -> Bool instance YesNo Int where yesno 0 = False yesno _ = True instance YesNo [a] where yesno [] = False yesno _ = True instance YesNo Bool where yesno = id instance YesNo (Maybe a) where yesno Nothing = False yesno (Just _) = True yesnoIf :: (YesNo y) => y -> a -> a -> a yesnoIf yesnoVal yesResult noResult = if yesno yesnoVal then yesResult else noResult -- yesno :: [a] -> Bool -- yesno [] = False -- yesno _ = True -- yesno :: Maybe a -> Bool -- yesno (Just _) = True -- yesno Nothing = False
NickAger/LearningHaskell
CIS194/typeclassexperiments.hs
Haskell
mit
576
-- matricula 1 a00368770 -- matricula 2 a01273613 -- Problema 1 promedio de alumnos -- funcion que regresa el promedio de los alumnos promedio :: Integer -> [(Integer, [Char], [Double])] -> [(Integer, [Double])] promedio _ [] = [( "", [0.0])] -- caso base -- obtener el promedio promedio mat1 ((mat2, _, parcialista) :resto) = if mat1 == mat2 then --obtener el promedio por alumno sum parcialista / fromIntegral (length parcialista) else promedio mat1 resto --seguir buscando por el arbol --Problema 2 hojas --Funcion que regresa todas las hojas de un arbol binario data BinTree a = Empty | Node (BinTree a) a (BinTree a) deriving (Eq, Show) hojas :: BinTree a -> [a] hojas Empty = [] --caso base hojas (Node left current right) = [current]++hojas left++hojas right --agregamos el elemento al final de la lista -- mientras siga encontrando hijos, busca en su lado derecho e izquierdo --hasta regresar solo los hijos --Problema 3 binariza --funcion que cambia numeros impares por 0 y pares por 1's binariza :: Integer -> [(Char, [Integer])]
JorgeRubio96/LenguajesdeProgramacion
a00368770_a01273613_ejercicio13.hs
Haskell
mit
1,058
-- A letra grega λ ́e substitu ́ıda pela contrabarra “\”; -- O ponto “.” ́e substitu ́ıdo por ->. -- Por exemplo, λx.x 2 ́e notado, em Haskell como: \x->x^2 ou, ainda, \x->x*x. quad = \x->x*x expr = \x->x^2+2*x+3 raiz = \x->(sqrt x) map (f x = x*x) [1..10] -- <interactive>:1:9: parse error on input ‘=’ map (\x->x*x) [1..10] f x = x*x f [1..10] fac n = if n==1 then 1 else (n*fac(n-1)) map (\x->(fac x)) [1..10] {- Para carregar um modulo no interpretador ':module List' -} deleteBy (\x y -> y*x == 48) 6 [6,8,10,12] --fail let p x y = if x `mod` y == 0 then True else False let remove y list = [x | x <- list, not (p x y)] Prelude> remove 4 [4..19] [5,6,7,9,10,11,13,14,15,17,18,19] [ x | x <- [1..4], y <- [x..5], (x+y) ‘mod‘ 2 == 0 ]
tonussi/freezing-dubstep
pratica-03/Intro.hs
Haskell
mit
783
{-# LANGUAGE ScopedTypeVariables #-} -- allows "forall t. Moment t" import Network.Socket import System.IO import System.Exit import Control.Concurrent import Control.Monad import Data.Maybe import Data.Word import qualified Data.Map.Strict as Map import Graphics.UI.Gtk as Gtk hiding (Event) import Graphics.UI.Gtk.Builder import Reactive.Util import Reactive.Banana import Reactive.Banana.Gtk import IrcServer as S import IrcMessage as M data MsgPart = MsgIcon String | MsgText [TextTag] String main :: IO () main = do initGUI bldWnd <- builderNew builderAddFromFile bldWnd "dirc.glade" bld <- builderNew builderAddFromFile bld "server.glade" dlg <- builderGetObject bldWnd castToWindow "main-dialog" closeBtn <- builderGetObject bldWnd castToButton "close-button" msgTabs <- builderGetObject bldWnd castToNotebook "message-tabs" msgPage <- builderGetObject bld castToBox "server-page" msgTxt <- builderGetObject bld castToTextView "message-text" notebookAppendPage msgTabs msgPage "Server" buffer <- textViewGetBuffer msgTxt tagTbl <- textBufferGetTagTable buffer fontTag <- textTagNew Nothing set fontTag [ textTagFont := "Courier 12" ] textTagTableAdd tagTbl fontTag motdTag <- textTagNew Nothing set motdTag [ textTagParagraphBackground := "yellow", textTagWeight := 700 ] textTagTableAdd tagTbl motdTag boldTag <- textTagNew Nothing set boldTag [ textTagWeight := 700 ] textTagTableAdd tagTbl boldTag ulTag <- textTagNew Nothing set ulTag [ textTagUnderline := UnderlineSingle ] textTagTableAdd tagTbl ulTag italicTag <- textTagNew Nothing set italicTag [ textTagStyle := StyleItalic ] textTagTableAdd tagTbl italicTag let colors = [ "white", "black", "navy", "green", "red", "brown" , "purple", "olive drab", "yellow", "lime green", "turquoise4" , "cyan1", "blue", "magenta", "gray55", "gray90", "white" ] let insertTag attr num name map = do tag <- textTagNew Nothing set tag [ attr := name ] textTagTableAdd tagTbl tag return $ Map.insert num tag map let mkColorMap f = foldM (\m (k, v) -> f k v m) Map.empty $ zip [0..] colors fgColorMap <- mkColorMap $ insertTag textTagForeground bgColorMap <- mkColorMap $ insertTag textTagBackground exit <- newEmptyMVar esmsg <- newAddHandler esquit <- newAddHandler let handler = tryEvent $ do liftIO $ fire esquit () closeBtn `on` Gtk.buttonReleaseEvent $ handler dlg `on` Gtk.deleteEvent $ handler let toMsg :: [Int] -> [TextTag] -> [IrcText] -> [MsgPart] toMsg [] tags (Text text:ms) = (MsgText tags text:toMsg [] tags ms) toMsg (fgc:bgc:[]) tags (Text text:ms) = let fgTag = maybeToList $ Map.lookup fgc fgColorMap bgTag = maybeToList $ Map.lookup bgc bgColorMap in (MsgText (concat [fgTag, bgTag, tags]) text:toMsg [fgc, bgc] tags ms) toMsg colors tags (Bold:ms) = toMsg colors (boldTag:tags) ms toMsg colors tags (Underlined:ms) = toMsg colors (ulTag:tags) ms toMsg colors tags (Italic:ms) = toMsg colors (italicTag:tags) ms toMsg colors tags (Reset:ms) = toMsg colors [fontTag] ms toMsg (_:bgc:[]) tags (Foreground c:ms) = toMsg (c:bgc:[]) tags ms toMsg (fgc:_:[]) tags (Background c:ms) = toMsg (fgc:c:[]) tags ms toMsg (fgc:bgc:[]) tags (Reverse:ms) = toMsg (bgc:fgc:[]) tags ms toMsg colors tags [] = [] let handleMsg msg = postGUIAsync $ do case msg of (Notice sender target text) -> insertMsg (MsgIcon "icon-info.svg":toMsg [1, 0] [fontTag] text) (Generic sender nickname text) -> insertMsg $ toMsg [1, 0] [fontTag] text (Welcome sender nickname text) -> insertMsg $ toMsg [1, 0] [fontTag] text (YourHost sender nickname text) -> insertMsg $ toMsg [1, 0] [fontTag] text (Created sender nickname text) -> insertMsg $ toMsg [1, 0] [fontTag] text (MotD sender nickname text) -> insertMsg $ toMsg [2, 8] [fontTag, motdTag] text (MotDStart sender nickname text) -> insertMsg $ toMsg [2, 8] [fontTag, motdTag] [Text " "] (MotDEnd sender nickname text) -> insertMsg $ toMsg [2, 8] [fontTag, motdTag] [Text " "] (Channel sender target channel _ topic) -> insertMsg $ MsgText [fontTag] (channel ++ ": "):toMsg [1, 0] [fontTag] topic msg -> putStrLn $ show msg insertMsg msg = do m <- textBufferGetInsert buffer i <- textBufferGetIterAtMark buffer m case msg of (MsgIcon icon:ms) -> do b <- pixbufNewFromFile icon textBufferInsertPixbuf buffer i b textBufferInsertAtCursor buffer " " insertMsg ms (MsgText tags text:ms) -> do o <- textIterGetOffset i textBufferInsertAtCursor buffer text i1 <- textBufferGetIterAtOffset buffer o i2 <- textBufferGetIterAtMark buffer m let applyTags (t:ts) = do textBufferApplyTag buffer t i1 i2 applyTags ts applyTags [] = do return () applyTags tags insertMsg ms [] -> textBufferInsertAtCursor buffer "\n" handleQuit = putMVar exit ExitSuccess network <- compile $ setupNetwork (esmsg, esquit) handleMsg handleQuit actuate network sChan <- newChan S.startServer "irc.freenode.net" 6665 esmsg sChan --S.startServer "irc.dal.net" 7000 esmsg sChan writeChan sChan M.Nick { nickname = "dbanerjee1979" } writeChan sChan M.User { username = "guest", modeMask = 0, realname = "Joe" } writeChan sChan M.Join { channel = "#haskell" } widgetShowAll dlg forkOS mainGUI signal <- takeMVar exit postGUIAsync mainQuit exitWith signal setupNetwork :: forall t. Frameworks t => (EventSource Message, EventSource ()) -> (Message -> IO ()) -> IO () -> Moment t () setupNetwork (esmsg, esquit) handleMsg handleQuit = do emsg <- fromAddHandler (addHandler esmsg) equit <- fromAddHandler (addHandler esquit) reactimate $ handleMsg <$> emsg reactimate $ handleQuit <$ equit
dbanerjee1979/dirc
Dirc.hs
Haskell
mit
7,122
{- see Chapter 17 of the Haskell 2010 Language Report -} module Data.Complex where
evilcandybag/JSHC
hslib/Data/Complex.hs
Haskell
mit
86
{-# LANGUAGE OverloadedStrings #-} module Main (main) where import Network.JsonRpc.Server import System.IO (BufferMode (LineBuffering), hSetBuffering, stdout) import qualified Data.ByteString.Lazy.Char8 as B import Data.List (intercalate) import Data.Maybe (fromMaybe) import Control.Monad (forM_, when) import Control.Monad.Trans (liftIO) import Control.Monad.Except (throwError) import Control.Monad.Reader (ReaderT, ask, runReaderT) import Control.Concurrent.MVar (MVar, newMVar, modifyMVar) main = do hSetBuffering stdout LineBuffering contents <- B.getContents count <- newMVar 0 forM_ (B.lines contents) $ \request -> do response <- runReaderT (call methods request) count B.putStrLn $ fromMaybe "" response type Server = ReaderT (MVar Integer) IO methods :: [Method Server] methods = [add, printSequence, increment] add = toMethod "add" f (Required "x" :+: Required "y" :+: ()) where f :: Double -> Double -> RpcResult Server Double f x y = liftIO $ return (x + y) printSequence = toMethod "print_sequence" f params where params = Required "string" :+: Optional "count" 1 :+: Optional "separator" ',' :+: () f :: String -> Int -> Char -> RpcResult Server () f str count sep = do when (count < 0) $ throwError negativeCount liftIO $ print $ intercalate [sep] $ replicate count str negativeCount = rpcError (-32000) "negative count" increment = toMethod "increment_and_get_count" f () where f :: RpcResult Server Integer f = ask >>= \count -> liftIO $ modifyMVar count inc where inc x = return (x + 1, x + 1)
grayjay/json-rpc-server
demo/Demo.hs
Haskell
mit
1,690
sommeDeXaY x y = if x > y then 0 else x + sommeDeXaY (x+1) y somme :: [Int] -> Int somme [] = 0 somme (x:xs) = x + somme xs last' :: [a] -> a last' xs = head (reverse xs) init' :: [a] -> [a] init' xs = reverse (tail (reverse xs)) -- Function !! (!!!) :: [a] -> Int -> a (!!!) [] n = error "Index too large" (!!!) (x:xs) n = if (n == 0) then x else (!!!) xs (n -1) -- Function ++ plus' :: [a] -> [a] -> [a] plus' [] ys = ys plus' (x:xs) ys = x:(plus' xs ys) -- Function concat concate' :: [[a]] -> [a] concate' [] = [] concate' [[]] = [] concate' (xs:xss) = xs ++ concate' xss
Debaerdm/L3-MIAGE
Programmation Fonctionnel/TP/TP1/sommeDeXaY.hs
Haskell
mit
594
module Lycopene.ApplicationSpec (spec) where import Test.Hspec import Test.QuickCheck import Lycopene.SpecTool import qualified Lycopene.Core as Core spec :: Spec spec = do before localEngine $ do describe "Event" $ do it "create a project" $ \engine -> do created <- runEngine engine (Core.NewProject "new" Nothing) fetched <- runEngine engine (Core.FetchProject "new") fetched `shouldBe` created it "remove a project" $ \engine -> do _ <- runEngine engine (Core.NewProject "new" Nothing) runEngine engine (Core.RemoveProject "new") fetched <- runEngine engine Core.AllProject (mapR length fetched) `shouldBe` (Right 0) it "fetch backlog sprints created on project creation" $ \engine -> do _ <- runEngine engine (Core.NewProject "new" Nothing) fetched <- runEngine engine (Core.FetchProjectSprint "new") (mapR length fetched) `shouldBe` (Right 1) it "fetch a backlog sprint" $ \engine -> do _ <- runEngine engine (Core.NewProject "new" Nothing) fetched <- runEngine engine (Core.FetchSprint "new" "backlog") (mapR Core.sprintName fetched) `shouldBe` (Right "backlog") it "create a issue" $ \engine -> do _ <- runEngine engine (Core.NewProject "new" Nothing) created <- runEngine engine (Core.NewIssue "new" "backlog" "issue") fetched <- runEngine engine (Core.FetchIssues "new" "backlog" Core.IssueOpen) (mapR (Core.issueTitle . head) fetched) `shouldBe` (Right "issue")
utky/lycopene
tests/Lycopene/ApplicationSpec.hs
Haskell
apache-2.0
1,633
-- |Constraints that specify a control flow analysis for Javascript. module Ovid.Constraints ( initialize , stmt , expr , AnnotatedStatement , AnnotatedExpression , ParentNodeT , JsCFAState(..) , CFAOpts (..) ) where -- #define CONSERVATIVE_MODE -- #define DEBUG_XHR -- #define DEBUG_BUILTINS -- #define CONSERVATIVE_REFERENCES -- #define CONSERVATIVE_LVALS -- #define TRACE_APPLICATION -- #define TRACE_ASSIGNMENT -- #define PENTIUM_MMX_LOOP_OPTIMIZATIONS import Prelude hiding (catch) import qualified Data.Map as M import qualified Data.List as L import qualified Data.Foldable as F import Data.Generics import Data.Typeable import Framework import WebBits.JavaScript.JavaScript hiding (Expression,Statement) import qualified WebBits.JavaScript.JavaScript as Js -- 'buildJsEnv' is required for `on-demand JavaScript' import Ovid.Environment (Ann,AdditionalAnnotation(..),AnnotatedExpression, AnnotatedStatement) import CFA.Labels import Ovid.Abstraction import CFA import Ovid.Prelude import Data.InductiveGraph.Class import Ovid.ConstraintUtils import Ovid.Interactions -- required for `on demand JavaScript' import WebBits.Html.Html (parseHtmlFromString,parseHtmlFromFile) import WebBits.JavaScript.Crawl (getPageJavaScript) import Ovid.DOM (topLevelIds) import qualified System.IO as IO import Data.Store -- ----------------------------------------------------------------------------- -- Miscellaneous primNumeric OpAdd = Just (+) primNumeric OpMul = Just (*) primNumeric OpDiv = Just (/) primNumeric OpSub = Just (-) primNumeric _ = Nothing primLogical OpLT = Just (<) primLogical OpLEq = Just (<=) primLogical OpGT = Just (>) primLogical OpGEq = Just (>=) primLogical _ = Nothing logicalAssignOps = [OpAssignBAnd, OpAssignBXor, OpAssignBOr] numericAssignOps = [OpAssignAdd, OpAssignSub, OpAssignMul, OpAssignDiv, OpAssignMod, OpAssignLShift, OpAssignSpRShift, OpAssignZfRShift] logicalInfixOps = [OpLT .. OpLOr] arithmeticInfixOps = [OpMul .. OpBOr] lookupErr :: (Monad m, Ord k) => String -> k -> M.Map k v -> m v lookupErr errStr k t = case M.lookup k t of Just k -> return k Nothing -> fail ("Ovid.Constraints: " ++ errStr) idv :: Monad m => Id Ann -> m Label idv (Id (_,lbl,_) _) = return lbl type Expression = Js.Expression Ann type Statement = Js.Statement Ann type ParentNodeT n m = StateT n m -- |We need to be able to have multiple invocations of document.write on the stack. isRecursionAllowed _ lbl | isBuiltinLabel lbl = True isRecursionAllowed srcs lbl = case labelSource lbl of Nothing -> False Just src -> src `elem` srcs isPreciseArithmetic lbl = do JsCFAState { jscfaOpts = opts } <- lift get case labelSource lbl of Nothing -> return False Just src -> return (src `elem` cfaOptsPreciseArithmetic opts) isPreciseConditionals lbl = do JsCFAState { jscfaOpts = opts } <- lift get case labelSource lbl of Nothing -> return False Just src -> return (src `elem` cfaOptsPreciseConditionals opts) application :: (MonadIO m) => (Label,Contour) -- ^set of functions -> [(Label,Contour)] -- ^sets of arguments -> Contour -- ^dynamic call stack -> (Label,Contour) -- ^result set (out-flow) -> CfaT Value (StateT (JsCFAState Contour) m) () application svF actuals ct svCxt = do -- Mark 'svF' as a function set. After the analysis is complete, we can check if any function values flowed into this -- set. If not, it's a good sign that we missed modelling some part of the DOM. markFunctionSet svF -- Options governing the analysis. JsCFAState { jscfaOpts = opts } <- lift get let expSrcs = cfaOptUnlimitedRecursion opts -- Invoke callback for a piggybacked analysis. -- applicationHook <- callHook svF actuals ct svCxt let app fn@(ABuiltin{aBuiltinName=name,aBuiltinLabel=lbl, aBuiltinThis=this}) = do {- #ifdef TRACE_APPLICATION warn $ "builtin application; lbl " ++ show lbl #endif -} -- warn $ "Applying " ++ show lbl builtinDispatch name ct ((this,ct):actuals) svCxt -- dummy 'return []' or we violate let-polymorphism! -- applicationHook fn (dispatch >> return []) return () -- Check to see if this function is on the stack. If it is, we don't -- apply it, unless its label indicates that it should be permitted to -- recurse. app fn@(AFunction{aFunctionLabel=lbl}) | lbl `elem` callStack ct && (not $ isRecursionAllowed expSrcs lbl) = do warn $ "Ignoring an application of " ++ show lbl ++ " from " ++ show (fst svCxt) return () app fn@(AFunction{aFunctionLabel=lbl,aFunctionArgs=formals,aFunctionLocals=locals,aFunctionBody=(EqualOrd body), aFunctionEnv=ceClosure,aFunctionThis=thisLbl}) = do {- #ifdef TRACE_APPLICATION warn $ "Call stack of length " ++ (show $ length $ callStack ct) #endif -} size <- currentSetSize svF -- warn $ "Applying " ++ show lbl when (size > 5) (warn $ "WARNING: " ++ show size ++ " functions in a function set") let svFormals = map (\lbl -> (lbl,ct)) formals let svLocals = map (\lbl -> (lbl,ct)) locals let this = (thisLbl,ct) let ce = M.union (M.fromList (svFormals ++ svLocals)) ceClosure -- create the array of arguments argumentsLbl <- uniqueLabel let arguments = (argumentsLbl,ct) newArray arguments actuals -- flow actuals into the formals in this contour mapM_ (uncurry subsetOf) (zip (this:arguments:actuals) svFormals) -- flow undefined into the arguments that are not supplied let undefinedArgs = drop (length $ this:arguments:actuals) svFormals mapM_ (newValue UndefinedVal) undefinedArgs -- flow the results into the context of the call svResults <- {- applicationHook fn -} (stmt ce (newCall lbl ct) body) mapM_ (\set -> subsetOf set svCxt) svResults app UndefinedVal = return () app v = do warn $ "Ovid.Constraints.application: non-function in " ++ "function set : " ++ show v warn $ "function at " ++ show svF propagateTo svF svCxt app showProp (PropId _ id) = show id showProp (PropString _ s) = s showProp (PropNum _ n) = show n -- |Generates constraints for the expression. expr :: (MonadIO m) => M.Map Label Contour -> Contour -> AnnotatedExpression -> CfaT Value (StateT (JsCFAState Contour) m) (Label,Contour) expr ce ct e = {- exprHook ce ct e >>= \r -> -} case e of StringLit (_,lbl,_) s -> do -- This represents a standard problem. The object we create must have the label lbl' since its the only concrete -- label available in this expression. Suppose the outer expression is an application. In an application, we must -- prime the values sets of the argument array. We cannot prime the formal-labels, as they are shared between -- applications. So, we have to prime the arguments. However, since this is an argument, it is already primed. newString (lbl,ct) (SConst s) return (lbl,ct) RegexpLit (_,lbl,_) s b c -> do newValue (ARegexp s b c) (lbl,ct) return (lbl,ct) NumLit (_,lbl,_) n -> do newValue (ANum n) (lbl,ct) return ( lbl,ct) BoolLit (_,lbl,_) b -> do newValue (ABool b) (lbl,ct) return ( lbl,ct) NullLit (_,lbl,_) -> do newValue (NullVal) (lbl,ct) return ( lbl,ct) ArrayLit (_,lbl,_) es -> do elems <- mapM (expr ce ct) es newArray (lbl,ct) elems return (lbl,ct) ObjectLit (_,lbl,_) props -> do let prop (p,e) = do eSet <- expr ce ct e return (showProp p,eSet) propSets <- mapM prop props newObject (lbl,ct) propSets return (lbl,ct) ThisRef (_,lbl,_) -> do thisCt <- lookupErr ("`this' is unbound") lbl ce return (lbl,thisCt) VarRef (_,lbl,_) id -> do -- a bound identifier; not an assignment idCt <- lookupErr ("unbound identifier: " ++ show id) lbl ce subsetOf (lbl,idCt) (lbl,ct) return (lbl,ct) -- we *always* return the context DotRef (_,lbl,_) obj (Id _ propId) -> do -- obj.prop objSet <- expr ce ct obj propagateProperty objSet (lbl,ct) propId $ \propSet -> subsetOf propSet (lbl,ct) return (lbl,ct) BracketRef (_,lbl,_) obj prop -> do -- obj[prop] let stx = (lbl,ct) objStx <- expr ce ct obj propIdStx <- expr ce ct prop flow1 objStx stx (const Nothing) propagateTo propIdStx stx $ \propVal -> case propVal of AObject{aObjectX=(PString sp)} -> case unStringPat sp of Just propId -> do unsafePropagateProperty objStx stx propId $ \vals -> subsetOf vals stx Nothing -> do warnAt "indeterminate string used for property reference" (show ct) propagatePropertySetTo objStx stx $ \set -> propagate set $ \val -> case val of AProperty _ (ValueSet valSet) -> subsetOf valSet stx otherwise -> fail $ "non-property value in property set at " ++ show stx ++ "; value is " ++ show val ANum propIx -> unsafePropagateProperty objStx stx (show $ truncate propIx) $ \vals -> subsetOf vals stx AnyNum -> do -- This is far too common with arrays. -- warnAt "indeterminate number used for property reference" (show ct) propagatePropertySetTo objStx stx $ \set -> propagate set $ \val -> case val of AProperty _ (ValueSet valSet) -> subsetOf valSet stx otherwise -> fail $ "non-property value in property set at " ++ show stx ++ "; value is " ++ show val UndefinedVal -> return () otherwise -> warn $ "non-indexable value (" ++ show propVal ++ ") at " ++ show stx return stx NewExpr (_,lbl,_) constr args -> do -- We are essentially setting up a function call; the only difference is the construction of `this.' constrSet <- expr ce ct constr -- the function / constructor argSets <- mapM (expr ce ct) args -- the contour of the function call ct' <- extendContour lbl ct -- discard the return value of the function; note that the result flows -- into (lbl,ct') so that we can tell when we are applying at the edge of -- the contour. However, the result set is (lbl,ct). application constrSet argSets ct' (lbl,ct') propagateTo constrSet (lbl,ct) $ \val -> case (closureThis val,objectProperties val) of (Just this,Nothing) -> do -- builtin with no prototype subsetOf (this,ct') (lbl,ct) (Just this,Just propSet) -> do -- function/builtin with a prototype -- create an empty object obj@(AObject{aObjectProps=props}) <- newObject (this,ct') [] -- copy the members of the prototype over propagatePropertyOf val (lbl,ct) "prototype" $ \proto -> propagatePropertySetTo proto props $ \propSet -> do flow1 propSet props justProperty -- obj.prototype = constr newValue (AProperty "prototype" (ValueSet constrSet)) props -- flow the set of objects (thises) into the result subsetOf (this,ct') (lbl,ct) otherwise -> do warn $ "non-function value " ++ show val ++ " in " ++ show e return (lbl,ct) PostfixExpr (_,lbl,_) op e -> do -- e++ or e-- eSet <- expr ce ct e flow1 eSet eSet $ \val -> case val of ANum _ -> Just AnyNum otherwise -> Nothing subsetOf eSet (lbl,ct) return (lbl,ct) PrefixExpr (_,lbl,_) op e -> do eSet <- expr ce ct e stringPrototype <- builtinPrototype "String" flow1 eSet eSet $ \val -> case val of ANum _ -> Just AnyNum ABool _ -> Just AnyBool otherwise -> Nothing flow1 eSet (lbl,ct) $ \val -> case op of -- ++e and --e are the only operators that side-effect e PrefixInc -> Just AnyNum PrefixDec -> Just AnyNum PrefixLNot -> case val of _ | isTrueValue val -> Just (ABool False) _ | isFalseValue val -> Just (ABool True) otherwise -> Just AnyBool PrefixBNot -> Just AnyNum PrefixPlus -> Nothing -- e == +e PrefixMinus -> Just AnyNum PrefixTypeof -> case val of AFunction{} -> Just $ AObject (primeLabel lbl 1) stringPrototype (PString $ SConst "function") otherwise -> Just AnyBool -- TODO : distinction between void and undefined? check! PrefixVoid -> Just UndefinedVal PrefixDelete -> Just UndefinedVal return (lbl,ct) InfixExpr (_,lbl,_) op l r -> do let cxt = (lbl,ct) lVar <- expr ce ct l rVar <- expr ce ct r isPrecise <- isPreciseArithmetic lbl -- do you expect us to actually add? isPreciseIf <- isPreciseConditionals lbl -- do you expect us to actually branch? let prototype = primeSet cxt 1 -- we may use this case op of OpLOr -> flow2 lVar rVar cxt $ \lv rv -> if isTrueValue lv then Just lv else if isFalseValue lv then Just rv else Just AnyBool _ | op `elem` [OpLT,OpLEq,OpGT,OpGEq,OpIn,OpEq,OpNEq,OpStrictEq,OpStrictNEq,OpLAnd,OpIn] -> flow2 lVar rVar cxt $ \l r -> case (l,r,primLogical op) of (ANum m,ANum n,Just op) -> Just $ if m `op` n then (ABool True) else (ABool False) (AObject{aObjectX=(PString sp1)},AObject{aObjectX=(PString sp2)},Nothing) | op == OpLEq -> Just $ ABool (sp1 == sp2) otherwise -> Just AnyBool _ | op `elem` [OpMul,OpDiv,OpMod,OpSub] -> flow2 lVar rVar cxt $ \l r -> case (l,r,primNumeric op) of (ANum m,ANum n,Just op) -> Just $ ANum (m `op` n) otherwise -> Just AnyNum _ | op `elem` [OpLShift,OpSpRShift,OpZfRShift,OpBAnd,OpBXor,OpBOr] -> flow2 lVar rVar cxt $ \_ _ -> Just AnyNum OpInstanceof -> flow2 lVar rVar cxt $ \val prototype -> case (val,prototype) of (AObject{aObjectX=PString{}}, ABuiltin{aBuiltinName="String"}) -> Just (ABool True) (_ , ABuiltin{aBuiltinName="String"}) -> Just (ABool False) (AObject{aObjectX=PArray},ABuiltin{aBuiltinName="Array"}) -> Just (ABool True) (AObject{aObjectX=PArray},_) -> Just (ABool False) otherwise -> Just AnyBool OpAdd -> flow2 lVar rVar cxt $ \l r -> case (l,r) of (ANum m,ANum n) | isPrecise -> Just $ ANum (m+n) | otherwise -> Just AnyNum (ANum _,AnyNum) -> Just AnyNum (AnyNum,_) -> Just AnyNum (AObject{aObjectX=(PString s)},AObject{aObjectX=(PString t)}) -> Just $ (AObject lbl prototype (PString $ stringPatCat s t)) (AObject{aObjectX=(PString s)}, ANum n) -> Just $ (AObject lbl prototype (PString $ stringPatCat s (SConst $ show n))) (AObject{aObjectX=(PString s)},_) -> Just $ (AObject lbl prototype (PString $ stringPatCat s SAny)) otherwise -> Nothing otherwise -> fail $ "Ovid.Constraints.expr : unaccounted infix operator -- " ++ show op -- flow primitive prototypes as needed stringPrototype <- builtinPrototype "String" propagate cxt $ \val -> case val of AObject{aObjectX=(PString _),aObjectProps=props} -> subsetOf stringPrototype props otherwise -> return () return cxt CondExpr (_,lbl,_) test true false -> do expr ce ct test trueVar <- {- branchHook r -} (expr ce ct true) falseVar <- {- branchHook r -} (expr ce ct false) subsetOf trueVar (lbl,ct) subsetOf falseVar (lbl,ct) -- joinHook [trueR,falseR] r return (lbl,ct) AssignExpr (_,lbl,_) op l r | op == OpAssign -> do lVar <- lval ce ct l rVar <- expr ce ct r subsetOf rVar lVar subsetOf lVar (lbl,ct) return (lbl,ct) | op `elem` logicalAssignOps -> do lVar <- lval ce ct l rVar <- expr ce ct r flow1 rVar lVar $ \_ -> Just AnyBool subsetOf lVar (lbl,ct) return (lbl,ct) | op `elem` numericAssignOps -> do lVar <- lval ce ct l rVar <- expr ce ct r flow1 rVar lVar $ \_ -> Just AnyNum subsetOf lVar (lbl,ct) return (lbl,ct) | otherwise -> do fail $ "Program bug: the operator " ++ show op ++ " was unclassified" ParenExpr _ e -> expr ce ct e ListExpr _ [] -> fail "Ovid.Constraints.expr : empty list expression (program bug)" ListExpr (_,lbl,_) es -> do eVars <- mapM (expr ce ct ) es subsetOf (last eVars) (lbl,ct) return (lbl,ct) CallExpr (_,lbl,_) f@(DotRef (_,methodLbl,_) obj (Id _ propId)) args -> do let cxt = (lbl,ct) objSet <- expr ce ct obj -- the object whose method we are caling argSets <- mapM (expr ce ct) args -- the arguments to the method ct' <- extendContour lbl ct -- contour of the call application (methodLbl,ct) argSets ct' (lbl,ct) -- flow the method into (methodLbl,ct), for consistency propagateProperty objSet cxt propId $ \fnSet -> do subsetOf fnSet (methodLbl,ct) propagateTo fnSet cxt $ \fnVal -> case closureThis fnVal of -- setup `this' Just thisLbl -> subsetOf objSet (thisLbl,ct') Nothing -> return () -- `application' will handle the warning return (lbl,ct) CallExpr (_,lbl,_) f@(BracketRef (_,methodLbl,_) obj prop) args -> do let cxt = (lbl,ct) objSet <- expr ce ct obj propSet <- expr ce ct prop argSets <- mapM (expr ce ct) args ct' <- extendContour lbl ct -- contour of the call application (methodLbl,ct) argSets ct' (lbl,ct) -- the application *must* be here for the call graph propagateTo propSet cxt $ \propVal -> case asPropId propVal of -- select the value of the property Nothing -> warn $ "indeterminate index at " ++ show propSet ++ "; value is " ++ show propVal Just propId -> propagateProperty objSet cxt propId $ \fnSet -> do subsetOf fnSet (methodLbl,ct) propagateTo fnSet cxt $ \fnVal -> case closureThis fnVal of -- setup `this' Just thisLbl -> subsetOf (objSet) (thisLbl,ct') Nothing -> return () -- application will display a warning return (lbl,ct) CallExpr (_,lbl,ann) f args -> do {- #ifdef TRACE_APPLICATION warn $ "call: " ++ (show f) #endif -} (f:args) <- mapM (expr ce ct) (f:args) ct' <- extendContour lbl ct application f args ct' (lbl,ct) return (lbl,ct) FuncExpr (_,lbl,FnA{fnannEnclosing=enclosing,fnannLocals=locals,fnannThisLbl=thisLbl}) args body -> do formals <- mapM idv args -- the function value creates an implicit object let objSet = primeSet (lbl,ct) 1 -- members of Function.prototype are copied to the function object -- TODO: object.prototype = Function JsCFAState {jscfasBuiltins=builtins} <- lift get prototypeSet <- builtinPrototype "Function" subsetOf prototypeSet objSet newValue (AFunction lbl formals locals (EqualOrd body) ce objSet thisLbl) (lbl,ct) -- acceptable newValue return (lbl,ct) FuncExpr (_,_,ann) _ _ -> fail $ "(bug) Ovid.Constraints.expr: unexpected annotation on a function" ++ show ann ++ ", function was " ++ show e -- The label on e must be returned. (er. why?) lval :: (MonadIO m) => M.Map Label Contour -> Contour -> AnnotatedExpression -> CfaT Value (StateT (JsCFAState Contour) m) (Label,Contour) lval ce ct expr@(VarRef (_,lbl,_) id) = do ct' <- lookupErr ("lval : " ++ show lbl ++ " is not in ce :" ++ show id ++ "\n" ++ show expr) lbl ce return (lbl,ct') lval ce ct (DotRef (_,lbl,_) obj (Id _ propId)) = do -- obj.prop objSet <- expr ce ct obj propagateProperty objSet (lbl,ct) propId $ \valSet -> do {-flow1 (lbl,ct) valSet (const Nothing) propagate (lbl,ct) $ \val -> do clearValues valSet newValue val valSet -} removeValue UndefinedVal valSet subsetOf (lbl,ct) valSet return (lbl,ct) lval ce ct e@(BracketRef (_,lbl,_) obj prop) = do -- obj[prop] let stx = (lbl,ct) objSet <- expr ce ct obj propSet <- expr ce ct prop flow1 objSet (lbl,ct) (const Nothing) propagateTo propSet stx $ \propVal -> case asPropId propVal of Just propId -> unsafePropagateProperty objSet (lbl,ct) propId $ \valSet -> do {-flow1 (lbl,ct) valSet (const Nothing) propagate (lbl,ct) $ \val -> do clearValues valSet newValue val valSet-} removeValue UndefinedVal valSet subsetOf (lbl,ct) valSet Nothing | propVal == UndefinedVal -> return () | otherwise -> do warn $ "Indeterminate index at " ++ show stx ++ "--assigning to all values. The index was " ++ show propVal propagatePropertySetTo objSet stx $ \propSet -> propagateTo propSet (lbl,ct) $ \propVal -> case propVal of --TODO : overstructured AProperty _ (ValueSet valSet) -> subsetOf (lbl,ct) valSet otherwise -> warn $ "non-property at " ++ show (lbl,ct) ++ "; value is " ++ show propVal return (lbl,ct) lval _ _ e = fail $ "Invalid l-value: " ++ show e caseClause ce ct (CaseClause l e ss) = expr ce ct e >> mapM (stmt ce ct ) ss >>= return . concat caseClause ce ct (CaseDefault l ss) = mapM (stmt ce ct ) ss >>= return . concat catchClause ce ct (CatchClause l id s) = stmt ce ct s varDecl ce ct (VarDecl _ _ Nothing) = return [] varDecl ce ct e'@(VarDecl (_,idLabel,_) _ (Just e)) = do case M.lookup idLabel ce of Just idCt -> do eSet <- expr ce ct e subsetOf eSet (idLabel,idCt) return [] Nothing -> fail $ "varDecl: could not find contour for " ++ (show e') ++ "\n\n" ++ "the environment is " ++ show ce ++ "\n\n" ++ "the label is " ++ show idLabel forInit ce ct NoInit = return [] forInit ce ct (VarInit decls) = mapM (varDecl ce ct) decls >>= return . concat forInit ce ct (ExprInit e) = expr ce ct e >> return [] forInInit (ForInVar id) = id forInInit (ForInNoVar id) = id yl:: (Monad m) => (a -> m [b]) -> Maybe a -> m [b] yl f Nothing = return [] yl f (Just x) = f x stmt :: (MonadIO m) => M.Map Label Contour -- ^environment -> Contour -- ^contour -> Statement -> CfaT Value (StateT (JsCFAState Contour) m) [(Label,Contour)] stmt ce ct s = {- stmtHook ce ct s >>= \r -> -} case s of BlockStmt l ss -> do vss <- mapM (stmt ce ct) ss return (concat vss) EmptyStmt _ -> return [] ExprStmt l e -> expr ce ct e >> return [] IfStmt (_,lbl,_) e s1 s2 -> do let stx = (lbl,ct) testStx <- expr ce ct e markBranch testStx propagateTo testStx stx $ \testVal -> -- constraint identity will evaluate true/false branches at most once if isTrueValue testVal then do results <- stmt ce ct s1 mapM_ (\result -> subsetOf result stx) results else if isFalseValue testVal then do results <- stmt ce ct s2 mapM_ (\result -> subsetOf result stx) results else {- is indeterminate value -} do trueResults <- stmt ce ct s1 falseResults <- stmt ce ct s2 mapM_ (\result -> subsetOf result stx) (trueResults ++ falseResults) return [stx] {- isPrecise <- isPreciseConditionals lbl {- (s1r,s1vs) <- branchHook r (stmt ce ct s1) (s2r,s2vs) <- branchHook r (stmt ce ct s2) joinHook [s1r,s2r] r return (s1vs ++ s2vs) -} if isPrecise -- when using precise conditionals, we don't generate the branch in the -- control flow graph then do let result = primeSet (lbl,ct) 1 propagateTo test (lbl,ct) $ \testVal -> case testVal of ABool True -> do vs <- stmt ce ct s1 mapM_ (\set -> subsetOf set result) vs ABool False -> do vs <- stmt ce ct s2 mapM_ (\set -> subsetOf set result) vs otherwise -> warnAt "imprecise boolean value with precise conditionals" (show ct) return [result] else do (s1r,s1vs) <- branchHook r (stmt ce ct s1) (s2r,s2vs) <- branchHook r (stmt ce ct s2) joinHook [s1r,s2r] r return (s1vs ++ s2vs) -} IfSingleStmt (_,lbl,_) testExpr bodyStmt -> do let stx = (lbl,ct) testStx <- expr ce ct testExpr markBranch testStx propagateTo testStx stx $ \testVal -> if not (isFalseValue testVal) then do results <- stmt ce ct bodyStmt mapM_ (\result -> subsetOf result stx) results else return () return [stx] SwitchStmt l e cs -> expr ce ct e >> mapM (caseClause ce ct ) cs >>= return . concat WhileStmt l e s -> expr ce ct e >> stmt ce ct s DoWhileStmt l s e -> expr ce ct e >> stmt ce ct s BreakStmt l yid -> return [] ContinueStmt l yid -> return [] LabelledStmt l id s -> stmt ce ct s ForInStmt (_,lbl,_) init e body -> do -- we unroll for-in loops let stx = (lbl,ct) let (Id (_,varLbl,_) varId) = forInInit init stringPrototype <- builtinPrototype "String" eStx <- expr ce ct e propagateTo eStx stx $ \obj -> case objectProperties obj of Nothing | obj == NullVal -> return () | otherwise -> return () -- warn $ "for-in : non-object value (" ++ show obj ++ ") at " ++ show eStx Just propSet -> propagateTo propSet stx $ \property -> case property of -- propSet is unique for each object AProperty propId _ | (not $ isAbstractArray obj) || (isJust $ tryInt propId) -> do ct' <- extendContour (propLabel varLbl propId) ct newString (varLbl,ct') (SConst propId) results <- stmt (M.insert varLbl ct' ce) ct' body mapM_ (\result -> subsetOf result stx) results -- TODO: not quite right, for-in iterates over non-int indices that are not in the prototype AProperty _ _ | isAbstractArray obj -> return () otherwise -> fail $ "non-property value in property set at " ++ show eStx return [stx] -- special case: for (var id = initExpr; testExpr; id++) bodyStmt (we do postfix decrement here too) {- ForStmt (_,lbl,_) (VarInit [VarDecl (_,idLbl,_) _ (Just initExpr)]) (Just testExpr) ( (Just $ PostfixExpr _ postfixOp (VarRef (_,incrIdLbl,_) _)) bodyStmt | idLbl == incrIdLbl -> do let stx = (lbl,ct) initStx <- expr ce ct initExpr let iterate idStx ix = ct' <- extendContour (primeLabel idLbl ix) ct -- must extend the contour of the call, or the inner constraints won't get created uniquely for each run testStx <- expr (M.insert idLbl ct' ce) ct' testExpr propagate testStx $ \testVal -> case testVal of _ | isFalseValue testVal -> do return () otherwise -> do propagateTo initStx stx $ \init -> do testStx <- expr (M.insert idLbl ct' ce) ct' testExpr -} ForStmt (_,lbl,_) init ye1 ye2 s -> do forInit ce ct init ym (expr ce ct ) ye1 ym (expr ce ct ) ye2 stmt ce ct s TryStmt l s cs ys -> do sv <- stmt ce ct s cvs <- mapM (catchClause ce ct ) cs fvs <- yl (stmt ce ct ) ys return (sv ++ (concat cvs) ++ fvs) ThrowStmt l e -> expr ce ct e >> return [] ReturnStmt l Nothing -> return [] -- TODO : perhaps undefined? ReturnStmt _ (Just e) -> expr ce ct e >>= \v -> return [v] WithStmt loc e s -> expr ce ct e >> stmt ce ct s VarDeclStmt loc ds -> mapM_ (varDecl ce ct ) ds >> return [] FunctionStmt (_,_,FnA{fnannEnclosing=enclosing, fnannLocals=locals, fnannThisLbl=thisLbl}) id args body -> do lbl <- idv id -- the only difference from FuncExpr is the label formals <- mapM idv args -- the function value creates an implicit object let objSet = primeSet (lbl,ct) 1 -- members of Function.prototype are copied to the function object -- TODO: object.prototype = Function JsCFAState {jscfasBuiltins=builtins} <- lift get prototypeSet <- builtinPrototype "Function" subsetOf prototypeSet objSet newValue (AFunction lbl formals locals (EqualOrd body) ce objSet thisLbl) (lbl,ct) -- acceptable newValue return [] FunctionStmt (_,_,ann) _ _ _ -> fail $ "(bug) Ovid.Constraints.stmt: unexpected annotation on a function" ++ show ann ++ ", function was " ++ show s -- |The <unsafe> is necessary to handle leading holes. onDemandJavaScript :: (MonadIO m) => Contour -> StringPat -> String -> CfaT Value (StateT (JsCFAState Contour) m) () onDemandJavaScript ct sp sourceName = do let concreteHtml = "<unsafe>" ++ unStringPatForOnDemandJavaScript sp ++ "</unsafe>" -- deals with holes!! warn $ "HTML: " ++ show concreteHtml case parseHtmlFromString sourceName concreteHtml of Left err -> do warn $ "onDemandJavaScript: " ++ show err liftIO $ putStrLn $ "onDemandJavaScript: " ++ show err Right (htmlStx,_) -> do parsedStmts <- liftIO $ getPageJavaScript htmlStx dynamicJavaScriptLoader ct parsedStmts dynamicJavaScriptLoader ct parsedStmts = do globals <- getGlobals fail "dynamicJavaScriptLoader is temporarily disabled" {- (globals,stmts) <- makeDynamicEnv globals parsedStmts setGlobals globals -- (envTree,stmts) <- buildJsEnv topLevelIds parsedStmts warn $ (show $ length stmts) ++ " statements are on-demand" let ce = M.fromList $ map (\lbl -> (lbl,topContour)) globals -- This is correct. Execution is _not_ in the top contour. If it were, our beautiful recursion check -- goes out the door. Moreover, the analysis may not terminate in general, if, for example, factorial -- made its recursive call `on-demand' and we let it happen in the top contour. mapM (stmt ce ct) stmts return () -} dynamicIFrameLoader :: (MonadIO m) => Contour -> StringPat -> CfaT Value (StateT (JsCFAState Contour) m) () dynamicIFrameLoader ct sp = do let src = unStringPatForOnDemandJavaScript sp result <- liftIO $ try (parseHtmlFromFile src) case result of Left (err::IOException) -> tell $ "Error loading script at " ++ show src ++ ":\n" ++ show err Right (Left parseErr) -> tell $ "Parse error reading " ++ show src ++ "; " ++ show parseErr Right (Right (parsedHtml,_)) -> do parsedStmts <- liftIO $ getPageJavaScript parsedHtml dynamicJavaScriptLoader ct parsedStmts onDemandJavaScriptFromFile ct sp = do let src = unStringPatForOnDemandJavaScript sp -- Now, for some real input/output (and not simple mutation). Bye bye functional programming. result <- liftIO $ try (parseJavaScriptFromFile src) case result of Left (err::IOException) -> tell $ "Error loading script at " ++ show src ++ ":\n" ++ show err Right parsedStmts -> dynamicJavaScriptLoader ct parsedStmts -- |It is safe to 'newValue ... cxt'. builtinDispatch :: (MonadIO m) => [Char] -> Contour -> [(Label,Contour)] -> (Label,Contour) -> CfaT Value (StateT (JsCFAState Contour) m) () builtinDispatch "Object" ct _ cxt = do newObject cxt [] return () builtinDispatch "eval" ct _ cxt = do warn "trivial eval definition" newString cxt SAny return () builtinDispatch "Array" ct (this:_) cxt = do newArray cxt [] return () builtinDispatch "Array.concat" ct args@(this:rest) cxt = do arr@(AObject{aObjectProps=arrSet}) <- newArray cxt [] -- this implementation breaks ordering let copy src = propagateTo src cxt $ \val -> case val of AObject {aObjectX=PArray,aObjectProps=props} -> propagateTo props arrSet $ \propVal -> case propVal of AProperty id (ValueSet vals) -> propagateProperty cxt arrSet id $ \vals' -> subsetOf vals vals' otherwise -> warn $ "not a property: " ++ show propVal otherwise -> warn $ "arbitrary flattening at " ++ show cxt ++ "; possible conflation" let flatten src = propagateProperty cxt arrSet "0" $ \propSet -> flow1 src propSet $ \v -> if isAbstractArray v then Nothing else Just v mapM_ copy args mapM_ flatten args -- builtinDispatch "Array.length" ct (this .. builtinDispatch "Array.push" ct (this:arg:_) cxt = do propagateTo this cxt $ \val -> case val of AObject{aObjectProps=props,aObjectX=PArray} -> propagateTo arg props $ \argVal -> do -- we increment ix for each incoming value! flow1 arg props (\_ -> Nothing) -- artificial dependency creation, since we have to increment stuff ix' <- currentSetSize props let ix = ix' - 4 -- 4 things in the array prototype let set = primeSet cxt ix newValue argVal set newValue (AProperty (show ix) (ValueSet set)) props -- SAFE? otherwise -> warnAt ("Array.push applied to " ++ show val) (show ct) builtinDispatch "Array.slice" ct [this,begin] cxt = do propagateTo this cxt $ \obj -> case obj of AObject{aObjectProps=ixs,aObjectX=PArray} -> do AObject{aObjectProps=destIxs} <- newArray cxt [] -- each source array maps to a unique destination propagateTo begin cxt $ \begin_ix -> case begin_ix of ANum x -> do let ix_min = truncate x flow1 ixs destIxs $ \prop -> case prop of AProperty id (ValueSet vals) -> case tryInt id of Just ix | ix >= ix_min -> Just $ AProperty (show $ ix - ix_min) (ValueSet vals) | otherwise -> Nothing Nothing -> Nothing otherwise -> Nothing otherwise -> flow1 ixs destIxs $ \prop -> case prop of AProperty id (ValueSet vals) -> case tryInt id of Just ix -> Just prop Nothing -> Nothing otherwise -> Nothing otherwise -> warn $ "Array.slice at " ++ show cxt ++ " applied to " ++ show obj builtinDispatch "Array.slice" ct [this,begin,end] cxt = do propagateTo this cxt $ \obj -> case obj of AObject{aObjectProps=ixs,aObjectX=PArray} -> do AObject{aObjectProps=destIxs} <- newArray cxt [] -- each source array maps to a unique destination propagateTo begin cxt $ \begin_ix -> propagateTo end cxt $ \end_ix -> case (begin_ix,end_ix) of (ANum x,ANum y) -> do let ix_min = truncate x let ix_max = truncate y flow1 ixs destIxs $ \prop -> case prop of AProperty id (ValueSet vals) -> case tryInt id of Just ix | ix >= ix_min && ix < ix_max -> Just $ AProperty (show $ ix - ix_min) (ValueSet vals) | otherwise -> Nothing Nothing -> Nothing otherwise -> Nothing otherwise -> flow1 ixs destIxs $ \prop -> case prop of AProperty id (ValueSet vals) -> case tryInt id of Just ix -> Just prop Nothing -> Nothing otherwise -> Nothing otherwise -> warn $ "Array.slice at " ++ show cxt ++ " applied to " ++ show obj -- .apply with no arguments. builtinDispatch "Function.apply" ct [thisFn,thisObj] cxt = do application thisFn [thisObj] ct cxt propagateTo thisFn cxt $ \val -> case closureThis val of Just thisLbl -> subsetOf thisObj (thisLbl,ct) Nothing -> return () builtinDispatch "Function.apply" ct (thisFn:thisObj:arg:_) cxt = do JsCFAState { jscfaOpts = opts } <- lift get let expSrcs = cfaOptUnlimitedRecursion opts -- applicationHook <- callHook thisFn [] ct cxt let app formals results ce = do let formalsCount = (length formals) - 2 -- this flows into the first formal subsetOf thisObj (formals !! 0) -- construct the arguments array (second formal) arguments <- newArray cxt [] newValue arguments (formals !! 1) -- TODO: safe? -- the arguments are in an array (an object really) propagateTo arg cxt $ \argVal -> case argVal of AObject{aObjectProps=props,aObjectX=PArray} -> do propagateTo props cxt $ \elemVal -> case elemVal of AProperty id (ValueSet vals) -> do case tryInt id of Just ix -> if ix < formalsCount then do newValue elemVal (formals !! 1) -- TODO: safe? subsetOf vals (formals !! (ix + 2)) else return () -- warn $ "argument out of bound: " ++ id -- arrays have various builtins that are not copied over. This -- drops user-defined non-indexable properties too, but that -- should be okay. Nothing -> return () otherwise -> warn $ "non-object argument (1): " ++ show elemVal otherwise -> warn $ "illegal argument to Function.apply: " ++ show argVal mapM_ (\set -> subsetOf set cxt) results propagateTo thisFn cxt $ \fnVal -> case fnVal of AFunction{aFunctionLabel=fnLbl} | fnLbl `elem` callStack ct && (not $ isRecursionAllowed expSrcs fnLbl) -> do warn $ "ignoring .apply: " ++ show fnLbl ++ " at " ++ show (fst cxt) return () AFunction{aFunctionLabel=fnLbl,aFunctionArgs=fnArgs, aFunctionLocals=fnLocals,aFunctionEnv=ceClosure, aFunctionBody=(EqualOrd body)} -> do {- #ifdef TRACE_APPLICATION warn $ "applying " ++ show fnLbl ++ " from " ++ show (callStack ct) ++ " in " ++ show ct #endif -} let formals = map (\lbl -> (lbl,ct)) fnArgs let locals = map (\lbl -> (lbl,ct)) fnLocals let ce = M.union (M.fromList (formals ++ locals)) ceClosure results <- {-applicationHook fnVal-} (stmt ce (newCall fnLbl ct) body) app formals results ce otherwise -> warn $ "non-function value: " ++ show fnVal return () builtinDispatch "addEventListener" ct (this:evtType:listener:_) cxt = do newString cxt SAny -- cxt.1 let result = primeSet cxt 2 -- TODO: It's okay to use the contour ct since contours are abstract and don't represent any particular relationship -- between calls--for now. However, it is a total hack, and we really need different kinds of top-level contours. -- asyncHook ct this cxt (application listener [primeSet cxt 1] ct result) False -- state@(JsCFAState{jscfasFinalFlows=fs}) <- lift get -- lift $ put state{jscfasFinalFlows = (AnyString,arg):fs} return () builtinDispatch "document.write" ct [this,html] cxt@(lbl,_) = do warn $ "document.write ..." let sourceName = case labelSource lbl of { Just s -> s ; Nothing -> "<dynamic>" } propagateTo html cxt $ \html -> case html of AObject{aObjectX=(PString sp)} -> onDemandJavaScript ct sp sourceName otherwise -> warn $ "document.write applied to " ++ show html ++ " at " ++ show cxt builtinDispatch "Element" ct [this,tag] cxt@(lbl,_) = do let sourceName = case labelSource lbl of { Just s -> s ; Nothing -> "<dynamic>" } -- Programmatically, the only way to access an element is using one of the document.getElement* methods, or with -- an element object. -- If this is a <script> element, we have to load the script. Currently, we only handle scripts that reference -- external files (i.e. the src attribute). propagateTo tag cxt $ \tag -> case tag of AObject{aObjectX=(PString sp)} | lowercase (unStringPatForOnDemandJavaScript sp) == "script" -> propagateProperty this cxt "src" $ \srcSet -> propagateTo srcSet cxt $ \src -> case src of AObject{aObjectX=(PString sp)} -> onDemandJavaScriptFromFile ct sp otherwise -> warn $ "builtinDispatch [" ++ show ct ++ "] : src property of a <script> tag assigned to " ++ show src AObject{aObjectX=(PString sp)} | lowercase (unStringPatForOnDemandJavaScript sp) == "iframe" -> propagateProperty this cxt "src" $ \srcSet -> propagateTo srcSet cxt $ \src -> case src of AObject{aObjectX=(PString sp)} -> dynamicIFrameLoader ct sp otherwise -> warn $ "builtinDispatch [" ++ show ct ++ "] : src property of an <iframe> tag assigned to " ++ show src otherwise -> return () -- If the innerHTML attribute is assigned to, we parse it and load any JavaScript it contains. JsCFAState {jscfasBuiltins=builtins} <- lift get unsafePropagateProperty this cxt "innerHTML" $ \htmlSet -> propagateTo htmlSet cxt $ \html -> case html of AObject{aObjectX=(PString sp)} -> onDemandJavaScript ct sp sourceName UndefinedVal -> return () otherwise -> warn $ "Element.innerHTML assigned to " ++ show html ++ " at " ++ show htmlSet builtinDispatch "XMLHttpRequest" ct (this:_) cxt = do -- nothing to initialize return () builtinDispatch "XMLHttpRequest.open" ct [this,method,url] cxt = do {- #ifdef DEBUG_XHR warn $ "(XMLHttpRequest) this.open" propagateTo url cxt $ \val -> warn $ "(XMLHttpRequest) this.url = " ++ show val #endif -} propagateProperty this cxt "url" $ \urlSet -> subsetOf url urlSet return () builtinDispatch "XMLHttpRequest.open" ct [this,method,url,async] cxt = do warnAt "dropping async argument to XHR.open" (show ct) builtinDispatch "XMLHttpRequest.open" ct [this,method,url] cxt -- -- XMLHttpRequest.prototype.send -- builtinDispatch "XMLHttpRequest.send" ct (this:contentArg:_) cxt = do {- #ifdef DEBUG_XHR warn $ "(XMLHttpRequest) this.send in " ++ show ct #endif -} let result = primeSet cxt 1 let handlerSet = primeSet cxt 2 -- set XMLHttpRequest.content propagateProperty this contentArg "content" $ \contentProp -> subsetOf contentArg contentProp -- flow a server value into responseText unsafePropagateProperty this cxt "responseText" $ \responseText -> do let svrSet = primeSet responseText 1 newValue (AServerVal svrSet) responseText -- acceptable use of newValue -- the asynchronous application -- asyncHook ct this cxt (application handlerSet [] ct result) True -- populate the set of handler functions propagateProperty this handlerSet "onreadystatechange" $ \rst -> subsetOf rst handlerSet builtinDispatch "String.any" ct [this] cxt = do newString cxt SAny return () builtinDispatch "print" ct [this,val] cxt = do propagateTo val cxt $ \v -> do -- tell $ "----------------------------" tell $ show v tell $ "At: " ++ show ct {- srcs <- sources val tell $ "Sources:" mapM_ (tell.show) srcs tell $ "----------------------------" -} newValue UndefinedVal cxt -- acceptable use of newValue builtinDispatch "$A" ct [this,valStx] stx = do propagateTo valStx stx $ \val -> case objectProperties val of Nothing -> do newArray stx [] return () Just propSet -> propagatePropertyOf val stx "toArray" $ \propValSet -> propagate propValSet $ \propVal -> case propVal of -- toArray is not defined UndefinedVal -> do AObject{aObjectProps=ixs} <- newArray stx [] flow1 propSet ixs $ \property -> case property of AProperty propId _ | isJust (tryInt propId) -> Just property | otherwise -> Nothing otherwise -> error "$A: non-property value in a property set" AFunction{aFunctionThis=thisLbl} -> do subsetOf this (thisLbl,ct) application propValSet [] ct stx otherwise -> warn $ "$A: expected function or undefined in .toArray: " ++ show propVal builtinDispatch "$w" ct [this,valStx] stx = do propagateTo valStx stx $ \val -> case val of AObject{aObjectX=(PString sp)} -> case unStringPat sp of Just s -> do stringPrototype <- builtinPrototype "String" let mk (s,ix) = do let set = primeSet stx ix newValue (AObject (fst set) set (PString $ SConst s)) set return set strs <- mapM mk (zip (L.words s) [2..]) -- newArray uses 1 newArray stx strs return () Nothing -> warn $ "$w applied to strange string at " ++ show stx ++ "; argument was " ++ show val otherwise -> warn $ "$w applied to non-string at " ++ show stx ++ "; argument was " ++ show val builtinDispatch name ct args _ = do warn $ "ERROR: non-existant builtin or pattern-match failure`" ++ name ++ "'; " ++ show (length args) ++ " arguments, call from " ++ show ct initialize :: (MonadIO m) => CfaT Value (StateT (JsCFAState Contour) m) () initialize = do JsCFAState {jscfasBuiltins=builtins} <- lift get ctArray <- emptyContour -- Whenever window.location is assigned to, create a navigate node off the -- page. case M.lookup "window" builtins of Just lbl -> do let windowSet = (lbl,topContour) unsafePropagateProperty windowSet windowSet "location" $ \locSet -> return () -- propagate locSet navigateHook Nothing -> fail "initialize: could not find window"
brownplt/ovid
src/Ovid/Constraints.hs
Haskell
bsd-2-clause
44,770
-- vim: sw=2: ts=2: set expandtab: {-# LANGUAGE CPP, TemplateHaskell, MultiParamTypeClasses, FlexibleInstances, FlexibleContexts, OverlappingInstances, IncoherentInstances, OverloadedStrings, GADTs, NoMonomorphismRestriction, ScopedTypeVariables #-} ----------------------------------------------------------------------------- -- -- Module : Unify -- Copyright : BSD -- License : AllRightsReserved -- -- Maintainer : Ki Yung Ahn -- Stability : -- Portability : -- -- | -- ----------------------------------------------------------------------------- module Unify where import Syntax import Data.List import Data.Maybe import Control.Monad.Trans import Control.Monad.Error import Control.Monad.State import Language.LBNF.Runtime hiding (printTree) import Parser (printTree) import Generics.RepLib.Unify hiding (solveUnification) import Unbound.LocallyNameless hiding (subst, Con, union) -- uapply may cause problem becuase of subst -- I had to inline this in mininax project -- Let's see how it goes #define uapply (foldl' (flip (.)) id . map (uncurry subst)) instance HasVar (Name Ki) Ki where is_var (KVar nm) = Just nm is_var _ = Nothing var = KVar instance HasVar (Name Ty) Ty where is_var (TVar nm) = Just nm is_var _ = Nothing var = TVar instance HasVar (Name Ty) Tm where instance (Eq n, Show n, Show a, HasVar n a) => Unify n a String where unifyStep _ = unifyStepEq instance (Eq n, Show n, Show a, HasVar n a) => Unify n a (Name s) where unifyStep _ = unifyStepEq instance (Alpha n, Eq n, Show n, Alpha a, HasVar n a, Rep1 (UnifySubD n a) a) => Unify n a (Bind n a) where unifyStep _ b1 b2 | b1 `aeq` b2 = return () | otherwise = do (e1,e2) <- runFreshMT $ do { (_,e1) <- unbind b1 ; (_,e2) <- unbind b2 ; return (e1,e2) } -- trace ("trace in instance Unify n a (Bind n a): " ++ show (e1,e2)) $ unifyStep undefined e1 e2 -------------------------------------------- ----- maybe we don't need this -------------------------------------------- -- instance (Eq n, Show n, HasVar n Ty) => Unify n Ty Ty where -- unifyStep (dum :: Proxy(n,Ty)) a1 a2 = -- -- trace ("trace 2 in instance Unify n PSUT PSUT): " ++ show (a1,a2)) $ -- case ((is_var a1) :: Maybe n, (is_var a2) :: Maybe n) of -- (Just n1, Just n2) -> if n1 == n2 -- then return () -- else addSub n1 (var n2); -- (Just n1, _) -> addSub n1 a2 -- (_, Just n2) -> addSub n2 a1 -- (_, _) -> unifyStepR1 rep1 dum a1 a2 -- where -- addSub n t = extendSubstitution (n, t) -- modified the Generics.Replib.Unify version to throwError rather than error -- TODO Can be even better if we pass curret stat rather than (Ustate cs [])? -- somehow this idea doesn't work ... [] replaced with current subst loops -- solveUnification :: (HasVar n a, Eq n, Show n, Show a, Rep1 (UnifySubD n a) a) => [(a, a)] -> Either UnifyError [(n, a)] solveUnification (eqs :: [(a, a)]) = case r of Left e -> throwError e Right _ -> return $ uSubst final where (r, final) = runState (runErrorT rwConstraints) (UState cs []) cs = [(UC dict a1 a2) | (a1, a2) <- eqs] rwConstraints :: UM n a () rwConstraints = do c <- dequeueConstraint case c of Just (UC d a1 a2) -> do unifyStepD d (undefined :: Proxy (n, a)) a1 a2 rwConstraints Nothing -> return () mgu t1 t2 = do case solveUnification [(t1, t2)] of Left e -> throwError (strMsg $ e ++ "\n\t"++ errstr) Right u -> return u where errstr = "cannot unify "++printTree t1++" and "++printTree t2 mguMany ps = do case solveUnification ps of Left e -> throwError (strMsg $ e ++ "\n\t" ++ errstr) Right u -> return u where errstr = "cannot unify \n" ++ ( concat [ "\t"++printTree t1++" and "++printTree t2++"\n" | (t1,t2)<-ps ] ) lift2 = lift . lift getSubst = do { UState _ s <- lift get; return s } extendSubst :: ( HasVar (Name a) a, Show a, Print a , Rep1 (UnifySubD (Name a) a) a) => (Name a, a) -> ErrorT UnifyError (State (UnificationState (Name a) a)) () extendSubst (x,t) | isJust my && x < y = extendSubst (y,var x) | isJust my && x== y = return () where my = is_var t y = fromJust my extendSubst (x,t) = do u <- getSubst case lookup x u of Nothing -> extendSubstitution (x,t) Just t' -> mapM_ extendSubst =<< mgu t t' unify t1 t2 = -- trace ("unify ("++show t1++") ("++show t2++")") $ do u <- getSubst mapM_ extendSubst =<< mgu (uapply u t1) (uapply u t2) unifyMany ps = do u <- getSubst mapM_ extendSubst =<< mguMany (map (uapply u) ps)
kyagrd/micronax
src/Unify.hs
Haskell
bsd-2-clause
5,110
{-# LANGUAGE OverloadedStrings #-} {-| Implementation of the Ganeti confd server functionality. -} {- Copyright (C) 2013 Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -} module Ganeti.Monitoring.Server ( main , checkMain , prepMain ) where import Control.Applicative import Control.Monad import Control.Monad.IO.Class import Data.ByteString.Char8 hiding (map, filter, find) import Data.List import qualified Data.Map as Map import Snap.Core import Snap.Http.Server import qualified Text.JSON as J import Control.Concurrent import qualified Ganeti.BasicTypes as BT import Ganeti.Daemon import qualified Ganeti.DataCollectors.CPUload as CPUload import qualified Ganeti.DataCollectors.Diskstats as Diskstats import qualified Ganeti.DataCollectors.Drbd as Drbd import qualified Ganeti.DataCollectors.InstStatus as InstStatus import qualified Ganeti.DataCollectors.Lv as Lv import Ganeti.DataCollectors.Types import qualified Ganeti.Constants as C import Ganeti.Runtime -- * Types and constants definitions -- | Type alias for checkMain results. type CheckResult = () -- | Type alias for prepMain results. type PrepResult = Config Snap () -- | Version of the latest supported http API. latestAPIVersion :: Int latestAPIVersion = C.mondLatestApiVersion -- | A report of a data collector might be stateful or stateless. data Report = StatelessR (IO DCReport) | StatefulR (Maybe CollectorData -> IO DCReport) -- | Type describing a data collector basic information data DataCollector = DataCollector { dName :: String -- ^ Name of the data collector , dCategory :: Maybe DCCategory -- ^ Category (storage, instance, ecc) -- of the collector , dKind :: DCKind -- ^ Kind (performance or status reporting) of -- the data collector , dReport :: Report -- ^ Report produced by the collector , dUpdate :: Maybe (Maybe CollectorData -> IO CollectorData) -- ^ Update operation for stateful collectors. } -- | The list of available builtin data collectors. collectors :: [DataCollector] collectors = [ DataCollector Diskstats.dcName Diskstats.dcCategory Diskstats.dcKind (StatelessR Diskstats.dcReport) Nothing , DataCollector Drbd.dcName Drbd.dcCategory Drbd.dcKind (StatelessR Drbd.dcReport) Nothing , DataCollector InstStatus.dcName InstStatus.dcCategory InstStatus.dcKind (StatelessR InstStatus.dcReport) Nothing , DataCollector Lv.dcName Lv.dcCategory Lv.dcKind (StatelessR Lv.dcReport) Nothing , DataCollector CPUload.dcName CPUload.dcCategory CPUload.dcKind (StatefulR CPUload.dcReport) (Just CPUload.dcUpdate) ] -- * Configuration handling -- | The default configuration for the HTTP server. defaultHttpConf :: FilePath -> FilePath -> Config Snap () defaultHttpConf accessLog errorLog = setAccessLog (ConfigFileLog accessLog) . setCompression False . setErrorLog (ConfigFileLog errorLog) $ setVerbose False emptyConfig -- * Helper functions -- | Check function for the monitoring agent. checkMain :: CheckFn CheckResult checkMain _ = return $ Right () -- | Prepare function for monitoring agent. prepMain :: PrepFn CheckResult PrepResult prepMain opts _ = do accessLog <- daemonsExtraLogFile GanetiMond AccessLog errorLog <- daemonsExtraLogFile GanetiMond ErrorLog return $ setPort (maybe C.defaultMondPort fromIntegral (optPort opts)) (defaultHttpConf accessLog errorLog) -- * Query answers -- | Reply to the supported API version numbers query. versionQ :: Snap () versionQ = writeBS . pack $ J.encode [latestAPIVersion] -- | Version 1 of the monitoring HTTP API. version1Api :: MVar CollectorMap -> Snap () version1Api mvar = let returnNull = writeBS . pack $ J.encode J.JSNull :: Snap () in ifTop returnNull <|> route [ ("list", listHandler) , ("report", reportHandler mvar) ] -- | Get the JSON representation of a data collector to be used in the collector -- list. dcListItem :: DataCollector -> J.JSValue dcListItem dc = J.JSArray [ J.showJSON $ dName dc , maybe J.JSNull J.showJSON $ dCategory dc , J.showJSON $ dKind dc ] -- | Handler for returning lists. listHandler :: Snap () listHandler = dir "collectors" . writeBS . pack . J.encode $ map dcListItem collectors -- | Handler for returning data collector reports. reportHandler :: MVar CollectorMap -> Snap () reportHandler mvar = route [ ("all", allReports mvar) , (":category/:collector", oneReport mvar) ] <|> errorReport -- | Return the report of all the available collectors. allReports :: MVar CollectorMap -> Snap () allReports mvar = do reports <- mapM (liftIO . getReport mvar) collectors writeBS . pack . J.encode $ reports -- | Takes the CollectorMap and a DataCollector and returns the report for this -- collector. getReport :: MVar CollectorMap -> DataCollector -> IO DCReport getReport mvar collector = case dReport collector of StatelessR r -> r StatefulR r -> do colData <- getColData (dName collector) mvar r colData -- | Returns the data for the corresponding collector. getColData :: String -> MVar CollectorMap -> IO (Maybe CollectorData) getColData name mvar = do m <- readMVar mvar return $ Map.lookup name m -- | Returns a category given its name. -- If "collector" is given as the name, the collector has no category, and -- Nothing will be returned. catFromName :: String -> BT.Result (Maybe DCCategory) catFromName "instance" = BT.Ok $ Just DCInstance catFromName "storage" = BT.Ok $ Just DCStorage catFromName "daemon" = BT.Ok $ Just DCDaemon catFromName "hypervisor" = BT.Ok $ Just DCHypervisor catFromName "default" = BT.Ok Nothing catFromName _ = BT.Bad "No such category" errorReport :: Snap () errorReport = do modifyResponse $ setResponseStatus 404 "Not found" writeBS "Unable to produce a report for the requested resource" error404 :: Snap () error404 = do modifyResponse $ setResponseStatus 404 "Not found" writeBS "Resource not found" -- | Return the report of one collector. oneReport :: MVar CollectorMap -> Snap () oneReport mvar = do categoryName <- maybe mzero unpack <$> getParam "category" collectorName <- maybe mzero unpack <$> getParam "collector" category <- case catFromName categoryName of BT.Ok cat -> return cat BT.Bad msg -> fail msg collector <- case find (\col -> collectorName == dName col) $ filter (\c -> category == dCategory c) collectors of Just col -> return col Nothing -> fail "Unable to find the requested collector" dcr <- liftIO $ getReport mvar collector writeBS . pack . J.encode $ dcr -- | The function implementing the HTTP API of the monitoring agent. monitoringApi :: MVar CollectorMap -> Snap () monitoringApi mvar = ifTop versionQ <|> dir "1" (version1Api mvar) <|> error404 -- | The function collecting data for each data collector providing a dcUpdate -- function. collect :: CollectorMap -> DataCollector -> IO CollectorMap collect m collector = case dUpdate collector of Nothing -> return m Just update -> do let name = dName collector existing = Map.lookup name m new_data <- update existing return $ Map.insert name new_data m -- | Invokes collect for each data collector. collection :: CollectorMap -> IO CollectorMap collection m = foldM collect m collectors -- | The thread responsible for the periodical collection of data for each data -- data collector. collectord :: MVar CollectorMap -> IO () collectord mvar = do m <- takeMVar mvar m' <- collection m putMVar mvar m' threadDelay $ 10^(6 :: Int) * C.mondTimeInterval collectord mvar -- | Main function. main :: MainFn CheckResult PrepResult main _ _ httpConf = do mvar <- newMVar Map.empty _ <- forkIO $ collectord mvar httpServe httpConf . method GET $ monitoringApi mvar
apyrgio/snf-ganeti
src/Ganeti/Monitoring/Server.hs
Haskell
bsd-2-clause
9,228
{- This is a module for reading and parsing *.csv files. -} module DataBuilder ( getData , filterDataContainer , getMatrixFromDataContainer , getMatrixFromNumContainer , NumContainer(..) , Header , DataContainer )where import Data.Char import Data.List.Split import System.IO import Control.Exception import Exceptions import Matrix hiding (decreaseAll) -- * Types -- | Simply String containing path to a file§ type Filepath = String type Header = String -- | String that seperates data, in csv it is: "," type Separator = String -- | DataContainer is a container for headers of data and data itself newtype DataContainer = DataContainer ([Header], Matrix String) -- | NumContainer stores the same as DataContainer except non-numerical values newtype NumContainer = NumContainer ([Header], Matrix Double) -- * Instances instance Show DataContainer where show (DataContainer (xs, m)) = show xs ++ "\n" ++ show m instance Show NumContainer where show (NumContainer (xs, m)) = show xs ++ "\n" ++ show m -- * FUNCTION INDEX -- | Takes list of indexes of columns that are to be deleted, DataContainer with data and return NumContainer with only numeric values. -- | If it cannot get a number out of a field it removes this one training set. filterDataContainer :: [Int] -> DataContainer -> NumContainer -- | Takes a path to the file, seperator that seperates data and returns DataContainer getData :: Filepath -> Separator -> IO (DataContainer) -- BODIES filterDataContainer columnsToDelete (DataContainer (s, m)) = NumContainer ( deleteElements columnsToDelete s, fmap (\(Just e) -> e) . filterLinesHor (\s -> if s == Nothing then False else True) . fmap getNumber . deleteColumns columnsToDelete $ m) getData path sep = (do handle <- openFile path ReadMode contents <- hGetContents handle if isFileOK contents then return (getDataContainer sep . lines $ contents) else throwE "\n The file is not a valid csv file.") `catch` handler handler :: IOError -> IO (DataContainer) handler e = do throwE (show e ++ "Error while getting data.") getMatrixFromDataContainer :: DataContainer -> Matrix String getMatrixFromDataContainer (DataContainer (_, m)) = m getMatrixFromNumContainer :: NumContainer -> Matrix Double getMatrixFromNumContainer (NumContainer (_, m)) = m --PRIVATE FUNCTIONS isFileOK :: String -> Bool -- here we can evaluate on errors that can appear isFileOK s | length s == 0 = False -- when empty file | otherwise = True getDataContainer :: Separator -> [String] -> DataContainer getDataContainer sep contentInLines = DataContainer (map filterHeader . splitOn sep . head $ contentInLines, transposeM $ (packM inside)) where inside = map (\s -> splitOn sep $ s) . tail $ contentInLines filterHeader :: Header -> Header filterHeader s = filter (\ch -> (ch /= '\"') && (ch /= '\'')) s deleteElements :: [Int] -> [a] -> [a] deleteElements [] l = l deleteElements _ [] = [] deleteElements (a:as) xs = deleteElements (decreaseAll (as)) (deleteElement a xs) deleteElement :: Int -> [a] -> [a] deleteElement a xs = loop a xs where loop _ [] = [] loop 1 (y:ys) = loop 0 ys loop n (y:ys) = y:(loop (n-1) ys) decreaseAll :: [Int] -> [Int] decreaseAll [] = [] decreaseAll (x:xs) = (x-1):(decreaseAll xs) getNumber :: String -> Maybe Double getNumber s = case head s == '\"' || head s == '\'' || (length s == 0) ||(isLetter . head $ s) of True -> Nothing False -> Just $ (read s :: Double)
kanes115/Regressions
src/DataBuilder.hs
Haskell
bsd-3-clause
4,048
{- - Hacq (c) 2013 NEC Laboratories America, Inc. All rights reserved. - - This file is part of Hacq. - Hacq is distributed under the 3-clause BSD license. - See the LICENSE file for more details. -} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE FunctionalDependencies #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE UndecidableInstances #-} module Control.Monad.Quantum.ApproxSequence.Class (MonadApproxSequence(..), applyPrepareQubitApprox, applyGlobalPhase) where import Control.Monad.Reader (ReaderT) import Control.Monad.Trans (MonadTrans, lift) import Data.Complex import Control.Monad.Quantum.Class class MonadQuantumBase w m => MonadApproxSequence w m | m -> w where -- |@applyOneQubitUnitary a c d w@ applies unitary U to wire w, where U is given by the following matrix: -- -- > a c* -- > c d applyOneQubitUnitary :: Complex Double -> Complex Double -> Complex Double -> w -> m () -- |@applyPrepareQubitApprox a b@ prepares a qubit in a state a|0>+b|1>. applyPrepareQubitApprox :: MonadApproxSequence w m => Complex Double -> Complex Double -> m w applyPrepareQubitApprox a b = do w <- ancilla applyOneQubitUnitary a b (-conjugate a) w return w {-# INLINABLE applyPrepareQubitApprox #-} applyGlobalPhase :: (MonadQuantum w m, MonadApproxSequence w m) => Double -> m () applyGlobalPhase fraction = handleMaybeCtrl $ \ctrl -> case ctrl of Nothing -> return () Just ctrlwire -> applyOneQubitUnitary 1 0 (cis (2 * pi * fraction)) ctrlwire {-# INLINABLE applyGlobalPhase #-} -- Instance for ReaderT instance MonadApproxSequence w m => MonadApproxSequence w (ReaderT r m) where applyOneQubitUnitary a c d w = lift $ applyOneQubitUnitary a c d w {-# INLINABLE applyOneQubitUnitary #-}
ti1024/hacq
src/Control/Monad/Quantum/ApproxSequence/Class.hs
Haskell
bsd-3-clause
1,776
{-# language CPP #-} -- | = Name -- -- VK_NV_framebuffer_mixed_samples - device extension -- -- == VK_NV_framebuffer_mixed_samples -- -- [__Name String__] -- @VK_NV_framebuffer_mixed_samples@ -- -- [__Extension Type__] -- Device extension -- -- [__Registered Extension Number__] -- 153 -- -- [__Revision__] -- 1 -- -- [__Extension and Version Dependencies__] -- -- - Requires Vulkan 1.0 -- -- [__Contact__] -- -- - Jeff Bolz -- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_NV_framebuffer_mixed_samples] @jeffbolznv%0A<<Here describe the issue or question you have about the VK_NV_framebuffer_mixed_samples extension>> > -- -- == Other Extension Metadata -- -- [__Last Modified Date__] -- 2017-06-04 -- -- [__Contributors__] -- -- - Jeff Bolz, NVIDIA -- -- == Description -- -- This extension allows multisample rendering with a raster and -- depth\/stencil sample count that is larger than the color sample count. -- Rasterization and the results of the depth and stencil tests together -- determine the portion of a pixel that is “covered”. It can be useful to -- evaluate coverage at a higher frequency than color samples are stored. -- This coverage is then “reduced” to a collection of covered color -- samples, each having an opacity value corresponding to the fraction of -- the color sample covered. The opacity can optionally be blended into -- individual color samples. -- -- Rendering with fewer color samples than depth\/stencil samples greatly -- reduces the amount of memory and bandwidth consumed by the color buffer. -- However, converting the coverage values into opacity introduces -- artifacts where triangles share edges and /may/ not be suitable for -- normal triangle mesh rendering. -- -- One expected use case for this functionality is Stencil-then-Cover path -- rendering (similar to the OpenGL GL_NV_path_rendering extension). The -- stencil step determines the coverage (in the stencil buffer) for an -- entire path at the higher sample frequency, and then the cover step -- draws the path into the lower frequency color buffer using the coverage -- information to antialias path edges. With this two-step process, -- internal edges are fully covered when antialiasing is applied and there -- is no corruption on these edges. -- -- The key features of this extension are: -- -- - It allows render pass and framebuffer objects to be created where -- the number of samples in the depth\/stencil attachment in a subpass -- is a multiple of the number of samples in the color attachments in -- the subpass. -- -- - A coverage reduction step is added to Fragment Operations which -- converts a set of covered raster\/depth\/stencil samples to a set of -- color samples that perform blending and color writes. The coverage -- reduction step also includes an optional coverage modulation step, -- multiplying color values by a fractional opacity corresponding to -- the number of associated raster\/depth\/stencil samples covered. -- -- == New Structures -- -- - Extending -- 'Vulkan.Core10.Pipeline.PipelineMultisampleStateCreateInfo': -- -- - 'PipelineCoverageModulationStateCreateInfoNV' -- -- == New Enums -- -- - 'CoverageModulationModeNV' -- -- == New Bitmasks -- -- - 'PipelineCoverageModulationStateCreateFlagsNV' -- -- == New Enum Constants -- -- - 'NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME' -- -- - 'NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION' -- -- - Extending 'Vulkan.Core10.Enums.StructureType.StructureType': -- -- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV' -- -- == Version History -- -- - Revision 1, 2017-06-04 (Jeff Bolz) -- -- - Internal revisions -- -- == See Also -- -- 'CoverageModulationModeNV', -- 'PipelineCoverageModulationStateCreateFlagsNV', -- 'PipelineCoverageModulationStateCreateInfoNV' -- -- == Document Notes -- -- For more information, see the -- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_NV_framebuffer_mixed_samples Vulkan Specification> -- -- This page is a generated document. Fixes and changes should be made to -- the generator scripts, not directly. module Vulkan.Extensions.VK_NV_framebuffer_mixed_samples ( PipelineCoverageModulationStateCreateInfoNV(..) , PipelineCoverageModulationStateCreateFlagsNV(..) , CoverageModulationModeNV( COVERAGE_MODULATION_MODE_NONE_NV , COVERAGE_MODULATION_MODE_RGB_NV , COVERAGE_MODULATION_MODE_ALPHA_NV , COVERAGE_MODULATION_MODE_RGBA_NV , .. ) , NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION , pattern NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION , NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME , pattern NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME ) where import Vulkan.Internal.Utils (enumReadPrec) import Vulkan.Internal.Utils (enumShowsPrec) import Control.Monad (unless) import Foreign.Marshal.Alloc (allocaBytes) import GHC.IO (throwIO) import Foreign.Ptr (nullPtr) import Foreign.Ptr (plusPtr) import GHC.Show (showString) import GHC.Show (showsPrec) import Numeric (showHex) import Data.Coerce (coerce) import Control.Monad.Trans.Class (lift) import Control.Monad.Trans.Cont (evalContT) import Data.Vector (generateM) import qualified Data.Vector (imapM_) import qualified Data.Vector (length) import qualified Data.Vector (null) import Vulkan.CStruct (FromCStruct) import Vulkan.CStruct (FromCStruct(..)) import Vulkan.CStruct (ToCStruct) import Vulkan.CStruct (ToCStruct(..)) import Vulkan.Zero (Zero) import Vulkan.Zero (Zero(..)) import Data.Bits (Bits) import Data.Bits (FiniteBits) import Data.String (IsString) import Data.Typeable (Typeable) import Foreign.C.Types (CFloat) import Foreign.C.Types (CFloat(..)) import Foreign.C.Types (CFloat(CFloat)) import Foreign.Storable (Storable) import Foreign.Storable (Storable(peek)) import Foreign.Storable (Storable(poke)) import GHC.Generics (Generic) import GHC.IO.Exception (IOErrorType(..)) import GHC.IO.Exception (IOException(..)) import Data.Int (Int32) import Foreign.Ptr (Ptr) import GHC.Read (Read(readPrec)) import GHC.Show (Show(showsPrec)) import Data.Word (Word32) import Data.Kind (Type) import Control.Monad.Trans.Cont (ContT(..)) import Data.Vector (Vector) import Vulkan.CStruct.Utils (advancePtrBytes) import Vulkan.Core10.FundamentalTypes (bool32ToBool) import Vulkan.Core10.FundamentalTypes (boolToBool32) import Vulkan.Core10.FundamentalTypes (Bool32) import Vulkan.Core10.FundamentalTypes (Flags) import Vulkan.Core10.Enums.StructureType (StructureType) import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV)) -- | VkPipelineCoverageModulationStateCreateInfoNV - Structure specifying -- parameters controlling coverage modulation -- -- = Description -- -- If @coverageModulationTableEnable@ is -- 'Vulkan.Core10.FundamentalTypes.FALSE', then for each color sample the -- associated bits of the pixel coverage are counted and divided by the -- number of associated bits to produce a modulation factor R in the range -- (0,1] (a value of zero would have been killed due to a color coverage of -- 0). Specifically: -- -- - N = value of @rasterizationSamples@ -- -- - M = value of 'Vulkan.Core10.Pass.AttachmentDescription'::@samples@ -- for any color attachments -- -- - R = popcount(associated coverage bits) \/ (N \/ M) -- -- If @coverageModulationTableEnable@ is -- 'Vulkan.Core10.FundamentalTypes.TRUE', the value R is computed using a -- programmable lookup table. The lookup table has N \/ M elements, and the -- element of the table is selected by: -- -- - R = @pCoverageModulationTable@[popcount(associated coverage bits)-1] -- -- Note that the table does not have an entry for popcount(associated -- coverage bits) = 0, because such samples would have been killed. -- -- The values of @pCoverageModulationTable@ /may/ be rounded to an -- implementation-dependent precision, which is at least as fine as 1 \/ N, -- and clamped to [0,1]. -- -- For each color attachment with a floating point or normalized color -- format, each fragment output color value is replicated to M values which -- /can/ each be modulated (multiplied) by that color sample’s associated -- value of R. Which components are modulated is controlled by -- @coverageModulationMode@. -- -- If this structure is not included in the @pNext@ chain, it is as if -- @coverageModulationMode@ is 'COVERAGE_MODULATION_MODE_NONE_NV'. -- -- If the -- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#fragops-coverage-reduction coverage reduction mode> -- is -- 'Vulkan.Extensions.VK_NV_coverage_reduction_mode.COVERAGE_REDUCTION_MODE_TRUNCATE_NV', -- each color sample is associated with only a single coverage sample. In -- this case, it is as if @coverageModulationMode@ is -- 'COVERAGE_MODULATION_MODE_NONE_NV'. -- -- == Valid Usage -- -- - #VUID-VkPipelineCoverageModulationStateCreateInfoNV-coverageModulationTableEnable-01405# -- If @coverageModulationTableEnable@ is -- 'Vulkan.Core10.FundamentalTypes.TRUE', -- @coverageModulationTableCount@ /must/ be equal to the number of -- rasterization samples divided by the number of color samples in the -- subpass -- -- == Valid Usage (Implicit) -- -- - #VUID-VkPipelineCoverageModulationStateCreateInfoNV-sType-sType# -- @sType@ /must/ be -- 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV' -- -- - #VUID-VkPipelineCoverageModulationStateCreateInfoNV-flags-zerobitmask# -- @flags@ /must/ be @0@ -- -- - #VUID-VkPipelineCoverageModulationStateCreateInfoNV-coverageModulationMode-parameter# -- @coverageModulationMode@ /must/ be a valid -- 'CoverageModulationModeNV' value -- -- = See Also -- -- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_NV_framebuffer_mixed_samples VK_NV_framebuffer_mixed_samples>, -- 'Vulkan.Core10.FundamentalTypes.Bool32', 'CoverageModulationModeNV', -- 'PipelineCoverageModulationStateCreateFlagsNV', -- 'Vulkan.Core10.Enums.StructureType.StructureType' data PipelineCoverageModulationStateCreateInfoNV = PipelineCoverageModulationStateCreateInfoNV { -- | @flags@ is reserved for future use. flags :: PipelineCoverageModulationStateCreateFlagsNV , -- | @coverageModulationMode@ is a 'CoverageModulationModeNV' value -- controlling which color components are modulated. coverageModulationMode :: CoverageModulationModeNV , -- | @coverageModulationTableEnable@ controls whether the modulation factor -- is looked up from a table in @pCoverageModulationTable@. coverageModulationTableEnable :: Bool , -- | @coverageModulationTableCount@ is the number of elements in -- @pCoverageModulationTable@. coverageModulationTableCount :: Word32 , -- | @pCoverageModulationTable@ is a table of modulation factors containing a -- value for each number of covered samples. coverageModulationTable :: Vector Float } deriving (Typeable) #if defined(GENERIC_INSTANCES) deriving instance Generic (PipelineCoverageModulationStateCreateInfoNV) #endif deriving instance Show PipelineCoverageModulationStateCreateInfoNV instance ToCStruct PipelineCoverageModulationStateCreateInfoNV where withCStruct x f = allocaBytes 40 $ \p -> pokeCStruct p x (f p) pokeCStruct p PipelineCoverageModulationStateCreateInfoNV{..} f = evalContT $ do lift $ poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV) lift $ poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr) lift $ poke ((p `plusPtr` 16 :: Ptr PipelineCoverageModulationStateCreateFlagsNV)) (flags) lift $ poke ((p `plusPtr` 20 :: Ptr CoverageModulationModeNV)) (coverageModulationMode) lift $ poke ((p `plusPtr` 24 :: Ptr Bool32)) (boolToBool32 (coverageModulationTableEnable)) let pCoverageModulationTableLength = Data.Vector.length $ (coverageModulationTable) coverageModulationTableCount'' <- lift $ if (coverageModulationTableCount) == 0 then pure $ fromIntegral pCoverageModulationTableLength else do unless (fromIntegral pCoverageModulationTableLength == (coverageModulationTableCount) || pCoverageModulationTableLength == 0) $ throwIO $ IOError Nothing InvalidArgument "" "pCoverageModulationTable must be empty or have 'coverageModulationTableCount' elements" Nothing Nothing pure (coverageModulationTableCount) lift $ poke ((p `plusPtr` 28 :: Ptr Word32)) (coverageModulationTableCount'') pCoverageModulationTable'' <- if Data.Vector.null (coverageModulationTable) then pure nullPtr else do pPCoverageModulationTable <- ContT $ allocaBytes @CFloat (((Data.Vector.length (coverageModulationTable))) * 4) lift $ Data.Vector.imapM_ (\i e -> poke (pPCoverageModulationTable `plusPtr` (4 * (i)) :: Ptr CFloat) (CFloat (e))) ((coverageModulationTable)) pure $ pPCoverageModulationTable lift $ poke ((p `plusPtr` 32 :: Ptr (Ptr CFloat))) pCoverageModulationTable'' lift $ f cStructSize = 40 cStructAlignment = 8 pokeZeroCStruct p f = do poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV) poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr) poke ((p `plusPtr` 20 :: Ptr CoverageModulationModeNV)) (zero) poke ((p `plusPtr` 24 :: Ptr Bool32)) (boolToBool32 (zero)) f instance FromCStruct PipelineCoverageModulationStateCreateInfoNV where peekCStruct p = do flags <- peek @PipelineCoverageModulationStateCreateFlagsNV ((p `plusPtr` 16 :: Ptr PipelineCoverageModulationStateCreateFlagsNV)) coverageModulationMode <- peek @CoverageModulationModeNV ((p `plusPtr` 20 :: Ptr CoverageModulationModeNV)) coverageModulationTableEnable <- peek @Bool32 ((p `plusPtr` 24 :: Ptr Bool32)) coverageModulationTableCount <- peek @Word32 ((p `plusPtr` 28 :: Ptr Word32)) pCoverageModulationTable <- peek @(Ptr CFloat) ((p `plusPtr` 32 :: Ptr (Ptr CFloat))) let pCoverageModulationTableLength = if pCoverageModulationTable == nullPtr then 0 else (fromIntegral coverageModulationTableCount) pCoverageModulationTable' <- generateM pCoverageModulationTableLength (\i -> do pCoverageModulationTableElem <- peek @CFloat ((pCoverageModulationTable `advancePtrBytes` (4 * (i)) :: Ptr CFloat)) pure $ coerce @CFloat @Float pCoverageModulationTableElem) pure $ PipelineCoverageModulationStateCreateInfoNV flags coverageModulationMode (bool32ToBool coverageModulationTableEnable) coverageModulationTableCount pCoverageModulationTable' instance Zero PipelineCoverageModulationStateCreateInfoNV where zero = PipelineCoverageModulationStateCreateInfoNV zero zero zero zero mempty -- | VkPipelineCoverageModulationStateCreateFlagsNV - Reserved for future use -- -- = Description -- -- 'PipelineCoverageModulationStateCreateFlagsNV' is a bitmask type for -- setting a mask, but is currently reserved for future use. -- -- = See Also -- -- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_NV_framebuffer_mixed_samples VK_NV_framebuffer_mixed_samples>, -- 'PipelineCoverageModulationStateCreateInfoNV' newtype PipelineCoverageModulationStateCreateFlagsNV = PipelineCoverageModulationStateCreateFlagsNV Flags deriving newtype (Eq, Ord, Storable, Zero, Bits, FiniteBits) conNamePipelineCoverageModulationStateCreateFlagsNV :: String conNamePipelineCoverageModulationStateCreateFlagsNV = "PipelineCoverageModulationStateCreateFlagsNV" enumPrefixPipelineCoverageModulationStateCreateFlagsNV :: String enumPrefixPipelineCoverageModulationStateCreateFlagsNV = "" showTablePipelineCoverageModulationStateCreateFlagsNV :: [(PipelineCoverageModulationStateCreateFlagsNV, String)] showTablePipelineCoverageModulationStateCreateFlagsNV = [] instance Show PipelineCoverageModulationStateCreateFlagsNV where showsPrec = enumShowsPrec enumPrefixPipelineCoverageModulationStateCreateFlagsNV showTablePipelineCoverageModulationStateCreateFlagsNV conNamePipelineCoverageModulationStateCreateFlagsNV (\(PipelineCoverageModulationStateCreateFlagsNV x) -> x) (\x -> showString "0x" . showHex x) instance Read PipelineCoverageModulationStateCreateFlagsNV where readPrec = enumReadPrec enumPrefixPipelineCoverageModulationStateCreateFlagsNV showTablePipelineCoverageModulationStateCreateFlagsNV conNamePipelineCoverageModulationStateCreateFlagsNV PipelineCoverageModulationStateCreateFlagsNV -- | VkCoverageModulationModeNV - Specify the coverage modulation mode -- -- = See Also -- -- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_NV_framebuffer_mixed_samples VK_NV_framebuffer_mixed_samples>, -- 'PipelineCoverageModulationStateCreateInfoNV' newtype CoverageModulationModeNV = CoverageModulationModeNV Int32 deriving newtype (Eq, Ord, Storable, Zero) -- | 'COVERAGE_MODULATION_MODE_NONE_NV' specifies that no components are -- multiplied by the modulation factor. pattern COVERAGE_MODULATION_MODE_NONE_NV = CoverageModulationModeNV 0 -- | 'COVERAGE_MODULATION_MODE_RGB_NV' specifies that the red, green, and -- blue components are multiplied by the modulation factor. pattern COVERAGE_MODULATION_MODE_RGB_NV = CoverageModulationModeNV 1 -- | 'COVERAGE_MODULATION_MODE_ALPHA_NV' specifies that the alpha component -- is multiplied by the modulation factor. pattern COVERAGE_MODULATION_MODE_ALPHA_NV = CoverageModulationModeNV 2 -- | 'COVERAGE_MODULATION_MODE_RGBA_NV' specifies that all components are -- multiplied by the modulation factor. pattern COVERAGE_MODULATION_MODE_RGBA_NV = CoverageModulationModeNV 3 {-# complete COVERAGE_MODULATION_MODE_NONE_NV, COVERAGE_MODULATION_MODE_RGB_NV, COVERAGE_MODULATION_MODE_ALPHA_NV, COVERAGE_MODULATION_MODE_RGBA_NV :: CoverageModulationModeNV #-} conNameCoverageModulationModeNV :: String conNameCoverageModulationModeNV = "CoverageModulationModeNV" enumPrefixCoverageModulationModeNV :: String enumPrefixCoverageModulationModeNV = "COVERAGE_MODULATION_MODE_" showTableCoverageModulationModeNV :: [(CoverageModulationModeNV, String)] showTableCoverageModulationModeNV = [ (COVERAGE_MODULATION_MODE_NONE_NV , "NONE_NV") , (COVERAGE_MODULATION_MODE_RGB_NV , "RGB_NV") , (COVERAGE_MODULATION_MODE_ALPHA_NV, "ALPHA_NV") , (COVERAGE_MODULATION_MODE_RGBA_NV , "RGBA_NV") ] instance Show CoverageModulationModeNV where showsPrec = enumShowsPrec enumPrefixCoverageModulationModeNV showTableCoverageModulationModeNV conNameCoverageModulationModeNV (\(CoverageModulationModeNV x) -> x) (showsPrec 11) instance Read CoverageModulationModeNV where readPrec = enumReadPrec enumPrefixCoverageModulationModeNV showTableCoverageModulationModeNV conNameCoverageModulationModeNV CoverageModulationModeNV type NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION = 1 -- No documentation found for TopLevel "VK_NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION" pattern NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION :: forall a . Integral a => a pattern NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION = 1 type NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME = "VK_NV_framebuffer_mixed_samples" -- No documentation found for TopLevel "VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME" pattern NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a pattern NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME = "VK_NV_framebuffer_mixed_samples"
expipiplus1/vulkan
src/Vulkan/Extensions/VK_NV_framebuffer_mixed_samples.hs
Haskell
bsd-3-clause
20,822
{-# LANGUAGE RankNTypes #-} module Network.Mail.Locutoria.Cli.Keymap where import Control.Lens hiding (lens) import Data.Map (Map) import qualified Data.Map as Map import Data.Default (Default, def) import Graphics.Vty.Input (Key(..), Modifier(..)) import Network.Mail.Locutoria.Event import Network.Mail.Locutoria.State import Network.Mail.Locutoria.View data KeyBindings = KeyBindings { _keymapGlobal :: Keymap , _keymapChannelView :: Keymap , _keymapConversationView :: Keymap } type Keymap = Map KeyCombo Event type KeyCombo = (Key, [Modifier]) handleKey :: KeyBindings -> Key -> [Modifier] -> State -> Maybe Event handleKey kb key mods st = case st^.stView of Root -> tryKeymaps [_keymapGlobal] ComposeReply _ _ -> tryKeymaps [_keymapGlobal] ShowChannel _ _ -> tryKeymaps [_keymapChannelView, _keymapGlobal] ShowConversation _ _ _ -> tryKeymaps [_keymapConversationView, _keymapGlobal] ShowQueue -> tryKeymaps [_keymapGlobal] Quit -> tryKeymaps [_keymapGlobal] where tryKeymaps [] = Nothing tryKeymaps (m:ms) = case Map.lookup (key, mods) (m kb) of Just e -> Just e Nothing -> tryKeymaps ms instance Default KeyBindings where def = KeyBindings defKeymapGlobal defKeymapChannelView defKeymapConversationView defKeymapGlobal :: Map KeyCombo Event defKeymapGlobal = Map.fromList [ ((KChar 'q', []), quit) , ((KChar '@', []), refresh) , ((KChar 'p', [MCtrl]), prevChannel) , ((KChar 'n', [MCtrl]), nextChannel) ] defKeymapChannelView :: Map KeyCombo Event defKeymapChannelView = Map.fromList [ ((KChar 'r', []), composeReply) , ((KEnter, []), showConv) , ((KChar 'j', []), nextConv) , ((KChar 'k', []), prevConv) , ((KChar 'g', []), setConv 0) , ((KChar 'G', []), setConv (-1)) ] defKeymapConversationView :: Map KeyCombo Event defKeymapConversationView = Map.fromList [ ((KChar 'r', []), composeReply) , ((KChar 'j', []), nextMsg) , ((KChar 'k', []), prevMsg) , ((KChar 'g', []), setMsg 0) , ((KChar 'G', []), setMsg (-1)) ]
hallettj/locutoria
Network/Mail/Locutoria/Cli/Keymap.hs
Haskell
bsd-3-clause
2,183
{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE CPP #-} -- | -- Module : Text.Syntax.Poly.Combinators -- Copyright : 2010-11 University of Marburg, 2012 Kei Hibino -- License : BSD3 -- -- Maintainer : ex8k.hibino@gmail.com -- Stability : experimental -- Portability : unknown -- -- This module contains combinators for classes defined in "Text.Syntax.Poly.Classes". module Text.Syntax.Poly.Combinators ( -- * Lexemes this, list, -- * Repetition none, many, some, replicate, sepBy, sepBy1, chainl1, count, -- * Skipping skipMany, skipSome, -- * Sequencing (*>), (<*), between, -- * Alternation (<+>), choice, optional, bool, (<$?>), (<?$>), -- * Printing format ) where #if __GLASGOW_HASKELL__ < 710 import Prelude hiding (foldl, succ, replicate, (.)) #else import Prelude hiding (foldl, succ, replicate, (.), (<$>), (<*>), (<*), (*>)) #endif import Control.Isomorphism.Partial.Ext (nothing, just, nil, cons, left, right, foldl, (.), Iso, (<$>), inverse, element, unit, commute, ignore, mayAppend, mayPrepend, succ) import Text.Syntax.Poly.Class ((<*>), (<|>), empty, AbstractSyntax(syntax), Syntax(token)) -- | 'none' parses\/prints empty tokens stream consume\/produces a empty list. none :: AbstractSyntax delta => delta [alpha] none = nil <$> syntax () -- | The 'many' combinator is used to repeat syntax. -- @many p@ repeats the passed syntax @p@ -- zero or more than zero times. many :: AbstractSyntax delta => delta alpha -> delta [alpha] many p = some p <|> none -- | The 'some' combinator is used to repeat syntax. -- @some p@ repeats the passed syntax @p@ -- more than zero times. some :: AbstractSyntax delta => delta alpha -> delta [alpha] some p = cons <$> p <*> many p -- | The 'replicate' combinator is used to repeat syntax. -- @replicate n p@ repeats the passwd syntax @p@ -- @n@ times. replicate :: AbstractSyntax delta => Int -> delta alpha -> delta [alpha] replicate n' p = rec n' where rec n | n <= 0 = none | otherwise = cons <$> p <*> rec (n - 1) infixl 4 <+> -- | The '<+>' combinator choose one of two syntax. (<+>) :: AbstractSyntax delta => delta alpha -> delta beta -> delta (Either alpha beta) p <+> q = (left <$> p) <|> (right <$> q) -- | The 'this' combinator parses\/prints a fixed token this :: (Syntax tok delta, Eq tok) => tok -> delta () this t = inverse (element t) <$> token -- | The 'list' combinator parses\/prints a fixed token list and consumes\/produces a unit value. list :: (Syntax tok delta, Eq tok) => [tok] -> delta () list [] = syntax () list (c:cs) = inverse (element ((), ())) <$> this c <*> list cs -- list cs = foldr -- (\ c -> (inverse (element ((), ())) <$>) . (this c <*>)) -- (syntax ()) -- cs -- | This variant of '<*>' ignores its left result. -- In contrast to its counterpart derived from the `Applicative` class, the ignored -- parts have type `delta ()` rather than `delta beta` because otherwise information relevant -- for pretty-printing would be lost. (*>) :: AbstractSyntax delta => delta () -> delta alpha -> delta alpha p *> q = inverse unit . commute <$> p <*> q -- | This variant of '<*>' ignores its right result. -- In contrast to its counterpart derived from the `Applicative` class, the ignored -- parts have type `delta ()` rather than `delta beta` because otherwise information relevant -- for pretty-printing would be lost. (<*) :: AbstractSyntax delta => delta alpha -> delta () -> delta alpha p <* q = inverse unit <$> p <*> q infixl 7 *>, <* -- | The 'between' function combines '*>' and '<*' in the obvious way. between :: AbstractSyntax delta => delta () -> delta () -> delta alpha -> delta alpha between p q r = p *> r <* q -- | The 'chainl1' combinator is used to parse a -- left-associative chain of infix operators. chainl1 :: AbstractSyntax delta => delta alpha -> delta beta -> Iso (alpha, (beta, alpha)) alpha -> delta alpha chainl1 arg op f = foldl f <$> arg <*> many (op <*> arg) -- | The 'count' combinator counts fixed syntax. count :: (Eq beta, Enum beta, AbstractSyntax delta) => delta () -> delta beta count p = succ <$> p *> count p <|> syntax (toEnum 0) -- | The @skipMany p@ parse the passed syntax @p@ -- zero or more than zero times, and print nothing. skipMany :: AbstractSyntax delta => delta alpha -> delta () skipMany p = ignore [] <$> many p -- | The @skipSome v p@ parse the passed syntax @p@ -- more than zero times, and print @p@. skipSome :: AbstractSyntax delta => delta alpha -> delta alpha skipSome p = p <* skipMany p -- | 'choice' a syntax from list. choice :: AbstractSyntax delta => [delta alpha] -> delta alpha choice (s:ss) = s <|> choice ss choice [] = empty -- | The 'optional' combinator may parse \/ print passed syntax. optional :: AbstractSyntax delta => delta alpha -> delta (Maybe alpha) optional x = just <$> x <|> nothing <$> syntax () -- | The 'bool' combinator parse \/ print passed syntax or not. bool :: AbstractSyntax delta => delta () -> delta Bool bool x = x *> syntax True <|> syntax False -- | The 'sepBy' combinator separates syntax into delimited list. -- @sepBy p d@ is @p@ list syntax delimited by @d@ syntax. sepBy :: AbstractSyntax delta => delta alpha -> delta () -> delta [alpha] sepBy x sep = x `sepBy1` sep <|> none -- | The 'sepBy1' combinator separates syntax into delimited non-empty list. -- @sepBy p d@ is @p@ list syntax delimited by @d@ syntax. sepBy1 :: AbstractSyntax delta => delta alpha -> delta () -> delta [alpha] sepBy1 x sep = cons <$> x <*> many (sep *> x) -- | May append not to repeat prefix syntax. (<$?>) :: AbstractSyntax delta => Iso (a, b) a -> delta (a, Maybe b) -> delta a cf <$?> pair = mayAppend cf <$> pair -- | May prepend not to repeat suffix syntax. (<?$>) :: AbstractSyntax delta => Iso (a, b) b -> delta (Maybe a, b) -> delta b cf <?$> pair = mayPrepend cf <$> pair infix 5 <$?>, <?$> -- | The 'format' combinator just print passed tokens -- or may parse passed tokens. -- This is useful in cases when just formatting with indents. format :: (Syntax tok delta, Eq tok) => [tok] -> delta () format tks = ignore (Just ()) <$> optional (list tks)
schernichkin/haskell-invertible-syntax-poly
src/Text/Syntax/Poly/Combinators.hs
Haskell
bsd-3-clause
6,253
{- (c) The AQUA Project, Glasgow University, 1994-1998 \section[TysPrim]{Wired-in knowledge about primitive types} -} {-# LANGUAGE CPP #-} -- | This module defines TyCons that can't be expressed in Haskell. -- They are all, therefore, wired-in TyCons. C.f module TysWiredIn module TysPrim( mkPrimTyConName, -- For implicit parameters in TysWiredIn only mkTemplateKindVars, mkTemplateTyVars, mkTemplateTyVarsFrom, mkTemplateKiTyVars, mkTemplateTyConBinders, mkTemplateKindTyConBinders, mkTemplateAnonTyConBinders, alphaTyVars, alphaTyVar, betaTyVar, gammaTyVar, deltaTyVar, alphaTys, alphaTy, betaTy, gammaTy, deltaTy, runtimeRep1TyVar, runtimeRep2TyVar, runtimeRep1Ty, runtimeRep2Ty, openAlphaTy, openBetaTy, openAlphaTyVar, openBetaTyVar, -- Kind constructors... tYPETyConName, unliftedTypeKindTyConName, -- Kinds tYPE, funTyCon, funTyConName, primTyCons, charPrimTyCon, charPrimTy, intPrimTyCon, intPrimTy, wordPrimTyCon, wordPrimTy, addrPrimTyCon, addrPrimTy, floatPrimTyCon, floatPrimTy, doublePrimTyCon, doublePrimTy, voidPrimTyCon, voidPrimTy, statePrimTyCon, mkStatePrimTy, realWorldTyCon, realWorldTy, realWorldStatePrimTy, proxyPrimTyCon, mkProxyPrimTy, arrayPrimTyCon, mkArrayPrimTy, byteArrayPrimTyCon, byteArrayPrimTy, arrayArrayPrimTyCon, mkArrayArrayPrimTy, smallArrayPrimTyCon, mkSmallArrayPrimTy, mutableArrayPrimTyCon, mkMutableArrayPrimTy, mutableByteArrayPrimTyCon, mkMutableByteArrayPrimTy, mutableArrayArrayPrimTyCon, mkMutableArrayArrayPrimTy, smallMutableArrayPrimTyCon, mkSmallMutableArrayPrimTy, mutVarPrimTyCon, mkMutVarPrimTy, mVarPrimTyCon, mkMVarPrimTy, tVarPrimTyCon, mkTVarPrimTy, stablePtrPrimTyCon, mkStablePtrPrimTy, stableNamePrimTyCon, mkStableNamePrimTy, compactPrimTyCon, compactPrimTy, bcoPrimTyCon, bcoPrimTy, weakPrimTyCon, mkWeakPrimTy, threadIdPrimTyCon, threadIdPrimTy, int32PrimTyCon, int32PrimTy, word32PrimTyCon, word32PrimTy, int64PrimTyCon, int64PrimTy, word64PrimTyCon, word64PrimTy, eqPrimTyCon, -- ty1 ~# ty2 eqReprPrimTyCon, -- ty1 ~R# ty2 (at role Representational) eqPhantPrimTyCon, -- ty1 ~P# ty2 (at role Phantom) -- * SIMD #include "primop-vector-tys-exports.hs-incl" ) where #include "HsVersions.h" import {-# SOURCE #-} TysWiredIn ( runtimeRepTy, liftedTypeKind , vecRepDataConTyCon, ptrRepUnliftedDataConTyCon , voidRepDataConTy, intRepDataConTy , wordRepDataConTy, int64RepDataConTy, word64RepDataConTy, addrRepDataConTy , floatRepDataConTy, doubleRepDataConTy , vec2DataConTy, vec4DataConTy, vec8DataConTy, vec16DataConTy, vec32DataConTy , vec64DataConTy , int8ElemRepDataConTy, int16ElemRepDataConTy, int32ElemRepDataConTy , int64ElemRepDataConTy, word8ElemRepDataConTy, word16ElemRepDataConTy , word32ElemRepDataConTy, word64ElemRepDataConTy, floatElemRepDataConTy , doubleElemRepDataConTy ) import Var ( TyVar, mkTyVar ) import Name import TyCon import SrcLoc import Unique import PrelNames import FastString import Outputable import TyCoRep -- Doesn't need special access, but this is easier to avoid -- import loops which show up if you import Type instead import Data.Char {- ************************************************************************ * * \subsection{Primitive type constructors} * * ************************************************************************ -} primTyCons :: [TyCon] primTyCons = [ addrPrimTyCon , arrayPrimTyCon , byteArrayPrimTyCon , arrayArrayPrimTyCon , smallArrayPrimTyCon , charPrimTyCon , doublePrimTyCon , floatPrimTyCon , intPrimTyCon , int32PrimTyCon , int64PrimTyCon , bcoPrimTyCon , weakPrimTyCon , mutableArrayPrimTyCon , mutableByteArrayPrimTyCon , mutableArrayArrayPrimTyCon , smallMutableArrayPrimTyCon , mVarPrimTyCon , tVarPrimTyCon , mutVarPrimTyCon , realWorldTyCon , stablePtrPrimTyCon , stableNamePrimTyCon , compactPrimTyCon , statePrimTyCon , voidPrimTyCon , proxyPrimTyCon , threadIdPrimTyCon , wordPrimTyCon , word32PrimTyCon , word64PrimTyCon , eqPrimTyCon , eqReprPrimTyCon , eqPhantPrimTyCon , unliftedTypeKindTyCon , tYPETyCon #include "primop-vector-tycons.hs-incl" ] mkPrimTc :: FastString -> Unique -> TyCon -> Name mkPrimTc fs unique tycon = mkWiredInName gHC_PRIM (mkTcOccFS fs) unique (ATyCon tycon) -- Relevant TyCon UserSyntax mkBuiltInPrimTc :: FastString -> Unique -> TyCon -> Name mkBuiltInPrimTc fs unique tycon = mkWiredInName gHC_PRIM (mkTcOccFS fs) unique (ATyCon tycon) -- Relevant TyCon BuiltInSyntax charPrimTyConName, intPrimTyConName, int32PrimTyConName, int64PrimTyConName, wordPrimTyConName, word32PrimTyConName, word64PrimTyConName, addrPrimTyConName, floatPrimTyConName, doublePrimTyConName, statePrimTyConName, proxyPrimTyConName, realWorldTyConName, arrayPrimTyConName, arrayArrayPrimTyConName, smallArrayPrimTyConName, byteArrayPrimTyConName, mutableArrayPrimTyConName, mutableByteArrayPrimTyConName, mutableArrayArrayPrimTyConName, smallMutableArrayPrimTyConName, mutVarPrimTyConName, mVarPrimTyConName, tVarPrimTyConName, stablePtrPrimTyConName, stableNamePrimTyConName, compactPrimTyConName, bcoPrimTyConName, weakPrimTyConName, threadIdPrimTyConName, eqPrimTyConName, eqReprPrimTyConName, eqPhantPrimTyConName, voidPrimTyConName :: Name charPrimTyConName = mkPrimTc (fsLit "Char#") charPrimTyConKey charPrimTyCon intPrimTyConName = mkPrimTc (fsLit "Int#") intPrimTyConKey intPrimTyCon int32PrimTyConName = mkPrimTc (fsLit "Int32#") int32PrimTyConKey int32PrimTyCon int64PrimTyConName = mkPrimTc (fsLit "Int64#") int64PrimTyConKey int64PrimTyCon wordPrimTyConName = mkPrimTc (fsLit "Word#") wordPrimTyConKey wordPrimTyCon word32PrimTyConName = mkPrimTc (fsLit "Word32#") word32PrimTyConKey word32PrimTyCon word64PrimTyConName = mkPrimTc (fsLit "Word64#") word64PrimTyConKey word64PrimTyCon addrPrimTyConName = mkPrimTc (fsLit "Addr#") addrPrimTyConKey addrPrimTyCon floatPrimTyConName = mkPrimTc (fsLit "Float#") floatPrimTyConKey floatPrimTyCon doublePrimTyConName = mkPrimTc (fsLit "Double#") doublePrimTyConKey doublePrimTyCon statePrimTyConName = mkPrimTc (fsLit "State#") statePrimTyConKey statePrimTyCon voidPrimTyConName = mkPrimTc (fsLit "Void#") voidPrimTyConKey voidPrimTyCon proxyPrimTyConName = mkPrimTc (fsLit "Proxy#") proxyPrimTyConKey proxyPrimTyCon eqPrimTyConName = mkPrimTc (fsLit "~#") eqPrimTyConKey eqPrimTyCon eqReprPrimTyConName = mkBuiltInPrimTc (fsLit "~R#") eqReprPrimTyConKey eqReprPrimTyCon eqPhantPrimTyConName = mkBuiltInPrimTc (fsLit "~P#") eqPhantPrimTyConKey eqPhantPrimTyCon realWorldTyConName = mkPrimTc (fsLit "RealWorld") realWorldTyConKey realWorldTyCon arrayPrimTyConName = mkPrimTc (fsLit "Array#") arrayPrimTyConKey arrayPrimTyCon byteArrayPrimTyConName = mkPrimTc (fsLit "ByteArray#") byteArrayPrimTyConKey byteArrayPrimTyCon arrayArrayPrimTyConName = mkPrimTc (fsLit "ArrayArray#") arrayArrayPrimTyConKey arrayArrayPrimTyCon smallArrayPrimTyConName = mkPrimTc (fsLit "SmallArray#") smallArrayPrimTyConKey smallArrayPrimTyCon mutableArrayPrimTyConName = mkPrimTc (fsLit "MutableArray#") mutableArrayPrimTyConKey mutableArrayPrimTyCon mutableByteArrayPrimTyConName = mkPrimTc (fsLit "MutableByteArray#") mutableByteArrayPrimTyConKey mutableByteArrayPrimTyCon mutableArrayArrayPrimTyConName= mkPrimTc (fsLit "MutableArrayArray#") mutableArrayArrayPrimTyConKey mutableArrayArrayPrimTyCon smallMutableArrayPrimTyConName= mkPrimTc (fsLit "SmallMutableArray#") smallMutableArrayPrimTyConKey smallMutableArrayPrimTyCon mutVarPrimTyConName = mkPrimTc (fsLit "MutVar#") mutVarPrimTyConKey mutVarPrimTyCon mVarPrimTyConName = mkPrimTc (fsLit "MVar#") mVarPrimTyConKey mVarPrimTyCon tVarPrimTyConName = mkPrimTc (fsLit "TVar#") tVarPrimTyConKey tVarPrimTyCon stablePtrPrimTyConName = mkPrimTc (fsLit "StablePtr#") stablePtrPrimTyConKey stablePtrPrimTyCon stableNamePrimTyConName = mkPrimTc (fsLit "StableName#") stableNamePrimTyConKey stableNamePrimTyCon compactPrimTyConName = mkPrimTc (fsLit "Compact#") compactPrimTyConKey compactPrimTyCon bcoPrimTyConName = mkPrimTc (fsLit "BCO#") bcoPrimTyConKey bcoPrimTyCon weakPrimTyConName = mkPrimTc (fsLit "Weak#") weakPrimTyConKey weakPrimTyCon threadIdPrimTyConName = mkPrimTc (fsLit "ThreadId#") threadIdPrimTyConKey threadIdPrimTyCon {- ************************************************************************ * * \subsection{Support code} * * ************************************************************************ alphaTyVars is a list of type variables for use in templates: ["a", "b", ..., "z", "t1", "t2", ... ] -} mkTemplateKindVars :: [Kind] -> [TyVar] -- k0 with unique (mkAlphaTyVarUnique 0) -- k1 with unique (mkAlphaTyVarUnique 1) -- ... etc mkTemplateKindVars kinds = [ mkTyVar name kind | (kind, u) <- kinds `zip` [0..] , let occ = mkTyVarOccFS (mkFastString ('k' : show u)) name = mkInternalName (mkAlphaTyVarUnique u) occ noSrcSpan ] mkTemplateTyVarsFrom :: Int -> [Kind] -> [TyVar] -- a with unique (mkAlphaTyVarUnique n) -- b with unique (mkAlphaTyVarUnique n+1) -- ... etc -- Typically called as -- mkTemplateTyVarsFrom (legth kv_bndrs) kinds -- where kv_bndrs are the kind-level binders of a TyCon mkTemplateTyVarsFrom n kinds = [ mkTyVar name kind | (kind, index) <- zip kinds [0..], let ch_ord = index + ord 'a' name_str | ch_ord <= ord 'z' = [chr ch_ord] | otherwise = 't':show index uniq = mkAlphaTyVarUnique (index + n) name = mkInternalName uniq occ noSrcSpan occ = mkTyVarOccFS (mkFastString name_str) ] mkTemplateTyVars :: [Kind] -> [TyVar] mkTemplateTyVars = mkTemplateTyVarsFrom 1 mkTemplateTyConBinders :: [Kind] -- [k1, .., kn] Kinds of kind-forall'd vars -> ([Kind] -> [Kind]) -- Arg is [kv1:k1, ..., kvn:kn] -- same length as first arg -- Result is anon arg kinds -> [TyConBinder] mkTemplateTyConBinders kind_var_kinds mk_anon_arg_kinds = kv_bndrs ++ tv_bndrs where kv_bndrs = mkTemplateKindTyConBinders kind_var_kinds anon_kinds = mk_anon_arg_kinds (mkTyVarTys (binderVars kv_bndrs)) tv_bndrs = mkTemplateAnonTyConBindersFrom (length kv_bndrs) anon_kinds mkTemplateKiTyVars :: [Kind] -- [k1, .., kn] Kinds of kind-forall'd vars -> ([Kind] -> [Kind]) -- Arg is [kv1:k1, ..., kvn:kn] -- same length as first arg -- Result is anon arg kinds [ak1, .., akm] -> [TyVar] -- [kv1:k1, ..., kvn:kn, av1:ak1, ..., avm:akm] -- Example: if you want the tyvars for -- forall (r:RuntimeRep) (a:TYPE r) (b:*). blah -- call mkTemplateKiTyVars [RuntimeRep] (\[r]. [TYPE r, *) mkTemplateKiTyVars kind_var_kinds mk_arg_kinds = kv_bndrs ++ tv_bndrs where kv_bndrs = mkTemplateKindVars kind_var_kinds anon_kinds = mk_arg_kinds (mkTyVarTys kv_bndrs) tv_bndrs = mkTemplateTyVarsFrom (length kv_bndrs) anon_kinds mkTemplateKindTyConBinders :: [Kind] -> [TyConBinder] -- Makes named, Specified binders mkTemplateKindTyConBinders kinds = [mkNamedTyConBinder Specified tv | tv <- mkTemplateKindVars kinds] mkTemplateAnonTyConBinders :: [Kind] -> [TyConBinder] mkTemplateAnonTyConBinders kinds = map mkAnonTyConBinder (mkTemplateTyVars kinds) mkTemplateAnonTyConBindersFrom :: Int -> [Kind] -> [TyConBinder] mkTemplateAnonTyConBindersFrom n kinds = map mkAnonTyConBinder (mkTemplateTyVarsFrom n kinds) alphaTyVars :: [TyVar] alphaTyVars = mkTemplateTyVars $ repeat liftedTypeKind alphaTyVar, betaTyVar, gammaTyVar, deltaTyVar :: TyVar (alphaTyVar:betaTyVar:gammaTyVar:deltaTyVar:_) = alphaTyVars alphaTys :: [Type] alphaTys = mkTyVarTys alphaTyVars alphaTy, betaTy, gammaTy, deltaTy :: Type (alphaTy:betaTy:gammaTy:deltaTy:_) = alphaTys runtimeRep1TyVar, runtimeRep2TyVar :: TyVar (runtimeRep1TyVar : runtimeRep2TyVar : _) = drop 16 (mkTemplateTyVars (repeat runtimeRepTy)) -- selects 'q','r' runtimeRep1Ty, runtimeRep2Ty :: Type runtimeRep1Ty = mkTyVarTy runtimeRep1TyVar runtimeRep2Ty = mkTyVarTy runtimeRep2TyVar openAlphaTyVar, openBetaTyVar :: TyVar [openAlphaTyVar,openBetaTyVar] = mkTemplateTyVars [tYPE runtimeRep1Ty, tYPE runtimeRep2Ty] openAlphaTy, openBetaTy :: Type openAlphaTy = mkTyVarTy openAlphaTyVar openBetaTy = mkTyVarTy openBetaTyVar {- ************************************************************************ * * FunTyCon * * ************************************************************************ -} funTyConName :: Name funTyConName = mkPrimTyConName (fsLit "(->)") funTyConKey funTyCon funTyCon :: TyCon funTyCon = mkFunTyCon funTyConName tc_bndrs tc_rep_nm where tc_bndrs = mkTemplateAnonTyConBinders [liftedTypeKind, liftedTypeKind] -- You might think that (->) should have type (?? -> ? -> *), and you'd be right -- But if we do that we get kind errors when saying -- instance Control.Arrow (->) -- because the expected kind is (*->*->*). The trouble is that the -- expected/actual stuff in the unifier does not go contra-variant, whereas -- the kind sub-typing does. Sigh. It really only matters if you use (->) in -- a prefix way, thus: (->) Int# Int#. And this is unusual. -- because they are never in scope in the source tc_rep_nm = mkPrelTyConRepName funTyConName {- ************************************************************************ * * Kinds * * ************************************************************************ Note [TYPE and RuntimeRep] ~~~~~~~~~~~~~~~~~~~~~~~~~~ All types that classify values have a kind of the form (TYPE rr), where data RuntimeRep -- Defined in ghc-prim:GHC.Types = PtrRepLifted | PtrRepUnlifted | IntRep | FloatRep .. etc .. rr :: RuntimeRep TYPE :: RuntimeRep -> TYPE 'PtrRepLifted -- Built in So for example: Int :: TYPE 'PtrRepLifted Array# Int :: TYPE 'PtrRepUnlifted Int# :: TYPE 'IntRep Float# :: TYPE 'FloatRep Maybe :: TYPE 'PtrRepLifted -> TYPE 'PtrRepLifted We abbreviate '*' specially: type * = TYPE 'PtrRepLifted The 'rr' parameter tells us how the value is represented at runime. Generally speaking, you can't be polymorphic in 'rr'. E.g f :: forall (rr:RuntimeRep) (a:TYPE rr). a -> [a] f = /\(rr:RuntimeRep) (a:rr) \(a:rr). ... This is no good: we could not generate code code for 'f', because the calling convention for 'f' varies depending on whether the argument is a a Int, Int#, or Float#. (You could imagine generating specialised code, one for each instantiation of 'rr', but we don't do that.) Certain functions CAN be runtime-rep-polymorphic, because the code generator never has to manipulate a value of type 'a :: TYPE rr'. * error :: forall (rr:RuntimeRep) (a:TYPE rr). String -> a Code generator never has to manipulate the return value. * unsafeCoerce#, defined in MkId.unsafeCoerceId: Always inlined to be a no-op unsafeCoerce# :: forall (r1 :: RuntimeRep) (r2 :: RuntimeRep) (a :: TYPE r1) (b :: TYPE r2). a -> b * Unboxed tuples, and unboxed sums, defined in TysWiredIn Always inlined, and hence specialised to the call site (#,#) :: forall (r1 :: RuntimeRep) (r2 :: RuntimeRep) (a :: TYPE r1) (b :: TYPE r2). a -> b -> TYPE 'UnboxedTupleRep See Note [Unboxed tuple kinds] Note [Unboxed tuple kinds] ~~~~~~~~~~~~~~~~~~~~~~~~~~ What kind does (# Int, Float# #) have? The "right" answer would be TYPE ('UnboxedTupleRep [PtrRepLifted, FloatRep]) Currently we do not do this. We just have (# Int, Float# #) :: TYPE 'UnboxedTupleRep which does not tell us exactly how is is represented. Note [PrimRep and kindPrimRep] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ As part of its source code, in TyCon, GHC has data PrimRep = PtrRep | IntRep | FloatRep | ...etc... Notice that * RuntimeRep is part of the syntax tree of the program being compiled (defined in a library: ghc-prim:GHC.Types) * PrimRep is part of GHC's source code. (defined in TyCon) We need to get from one to the other; that is what kindPrimRep does. Suppose we have a value (v :: t) where (t :: k) Given this kind k = TyConApp "TYPE" [rep] GHC needs to be able to figure out how 'v' is represented at runtime. It expects 'rep' to be form TyConApp rr_dc args where 'rr_dc' is a promoteed data constructor from RuntimeRep. So now we need to go from 'dc' to the correponding PrimRep. We store this PrimRep in the promoted data constructor itself: see TyCon.promDcRepInfo. -} tYPETyCon, unliftedTypeKindTyCon :: TyCon tYPETyConName, unliftedTypeKindTyConName :: Name tYPETyCon = mkKindTyCon tYPETyConName (mkTemplateAnonTyConBinders [runtimeRepTy]) liftedTypeKind [Nominal] (mkPrelTyConRepName tYPETyConName) -- See Note [TYPE and RuntimeRep] -- NB: unlifted is wired in because there is no way to parse it in -- Haskell. That's the only reason for wiring it in. unliftedTypeKindTyCon = mkSynonymTyCon unliftedTypeKindTyConName [] liftedTypeKind [] (tYPE (TyConApp ptrRepUnliftedDataConTyCon [])) True -- no foralls True -- family free -------------------------- -- ... and now their names -- If you edit these, you may need to update the GHC formalism -- See Note [GHC Formalism] in coreSyn/CoreLint.hs tYPETyConName = mkPrimTyConName (fsLit "TYPE") tYPETyConKey tYPETyCon unliftedTypeKindTyConName = mkPrimTyConName (fsLit "#") unliftedTypeKindTyConKey unliftedTypeKindTyCon mkPrimTyConName :: FastString -> Unique -> TyCon -> Name mkPrimTyConName = mkPrimTcName BuiltInSyntax -- All of the super kinds and kinds are defined in Prim, -- and use BuiltInSyntax, because they are never in scope in the source mkPrimTcName :: BuiltInSyntax -> FastString -> Unique -> TyCon -> Name mkPrimTcName built_in_syntax occ key tycon = mkWiredInName gHC_PRIM (mkTcOccFS occ) key (ATyCon tycon) built_in_syntax ----------------------------- -- | Given a RuntimeRep, applies TYPE to it. -- see Note [TYPE and RuntimeRep] tYPE :: Type -> Type tYPE rr = TyConApp tYPETyCon [rr] {- ************************************************************************ * * \subsection[TysPrim-basic]{Basic primitive types (@Char#@, @Int#@, etc.)} * * ************************************************************************ -} -- only used herein pcPrimTyCon :: Name -> [Role] -> PrimRep -> TyCon pcPrimTyCon name roles rep = mkPrimTyCon name binders result_kind roles where binders = mkTemplateAnonTyConBinders (map (const liftedTypeKind) roles) result_kind = tYPE rr rr = case rep of VoidRep -> voidRepDataConTy PtrRep -> TyConApp ptrRepUnliftedDataConTyCon [] IntRep -> intRepDataConTy WordRep -> wordRepDataConTy Int64Rep -> int64RepDataConTy Word64Rep -> word64RepDataConTy AddrRep -> addrRepDataConTy FloatRep -> floatRepDataConTy DoubleRep -> doubleRepDataConTy VecRep n elem -> TyConApp vecRepDataConTyCon [n', elem'] where n' = case n of 2 -> vec2DataConTy 4 -> vec4DataConTy 8 -> vec8DataConTy 16 -> vec16DataConTy 32 -> vec32DataConTy 64 -> vec64DataConTy _ -> pprPanic "Disallowed VecCount" (ppr n) elem' = case elem of Int8ElemRep -> int8ElemRepDataConTy Int16ElemRep -> int16ElemRepDataConTy Int32ElemRep -> int32ElemRepDataConTy Int64ElemRep -> int64ElemRepDataConTy Word8ElemRep -> word8ElemRepDataConTy Word16ElemRep -> word16ElemRepDataConTy Word32ElemRep -> word32ElemRepDataConTy Word64ElemRep -> word64ElemRepDataConTy FloatElemRep -> floatElemRepDataConTy DoubleElemRep -> doubleElemRepDataConTy pcPrimTyCon0 :: Name -> PrimRep -> TyCon pcPrimTyCon0 name rep = pcPrimTyCon name [] rep charPrimTy :: Type charPrimTy = mkTyConTy charPrimTyCon charPrimTyCon :: TyCon charPrimTyCon = pcPrimTyCon0 charPrimTyConName WordRep intPrimTy :: Type intPrimTy = mkTyConTy intPrimTyCon intPrimTyCon :: TyCon intPrimTyCon = pcPrimTyCon0 intPrimTyConName IntRep int32PrimTy :: Type int32PrimTy = mkTyConTy int32PrimTyCon int32PrimTyCon :: TyCon int32PrimTyCon = pcPrimTyCon0 int32PrimTyConName IntRep int64PrimTy :: Type int64PrimTy = mkTyConTy int64PrimTyCon int64PrimTyCon :: TyCon int64PrimTyCon = pcPrimTyCon0 int64PrimTyConName Int64Rep wordPrimTy :: Type wordPrimTy = mkTyConTy wordPrimTyCon wordPrimTyCon :: TyCon wordPrimTyCon = pcPrimTyCon0 wordPrimTyConName WordRep word32PrimTy :: Type word32PrimTy = mkTyConTy word32PrimTyCon word32PrimTyCon :: TyCon word32PrimTyCon = pcPrimTyCon0 word32PrimTyConName WordRep word64PrimTy :: Type word64PrimTy = mkTyConTy word64PrimTyCon word64PrimTyCon :: TyCon word64PrimTyCon = pcPrimTyCon0 word64PrimTyConName Word64Rep addrPrimTy :: Type addrPrimTy = mkTyConTy addrPrimTyCon addrPrimTyCon :: TyCon addrPrimTyCon = pcPrimTyCon0 addrPrimTyConName AddrRep floatPrimTy :: Type floatPrimTy = mkTyConTy floatPrimTyCon floatPrimTyCon :: TyCon floatPrimTyCon = pcPrimTyCon0 floatPrimTyConName FloatRep doublePrimTy :: Type doublePrimTy = mkTyConTy doublePrimTyCon doublePrimTyCon :: TyCon doublePrimTyCon = pcPrimTyCon0 doublePrimTyConName DoubleRep {- ************************************************************************ * * \subsection[TysPrim-state]{The @State#@ type (and @_RealWorld@ types)} * * ************************************************************************ Note [The equality types story] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ GHC sports a veritable menagerie of equality types: Hetero? Levity Result Role Defining module ------------------------------------------------------------ ~# hetero unlifted # nominal GHC.Prim ~~ hetero lifted Constraint nominal GHC.Types ~ homo lifted Constraint nominal Data.Type.Equality :~: homo lifted * nominal Data.Type.Equality ~R# hetero unlifted # repr GHC.Prim Coercible homo lifted Constraint repr GHC.Types Coercion homo lifted * repr Data.Type.Coercion ~P# hetero unlifted phantom GHC.Prim Recall that "hetero" means the equality can related types of different kinds. Knowing that (t1 ~# t2) or (t1 ~R# t2) or even that (t1 ~P# t2) also means that (k1 ~# k2), where (t1 :: k1) and (t2 :: k2). To produce less confusion for end users, when not dumping and without -fprint-equality-relations, each of these groups is printed as the bottommost listed equality. That is, (~#) and (~~) are both rendered as (~) in error messages, and (~R#) is rendered as Coercible. Let's take these one at a time: -------------------------- (~#) :: forall k1 k2. k1 -> k2 -> # -------------------------- This is The Type Of Equality in GHC. It classifies nominal coercions. This type is used in the solver for recording equality constraints. It responds "yes" to Type.isEqPred and classifies as an EqPred in Type.classifyPredType. All wanted constraints of this type are built with coercion holes. (See Note [Coercion holes] in TyCoRep.) But see also Note [Deferred errors for coercion holes] in TcErrors to see how equality constraints are deferred. Within GHC, ~# is called eqPrimTyCon, and it is defined in TysPrim. -------------------------- (~~) :: forall k1 k2. k1 -> k2 -> Constraint -------------------------- This is (almost) an ordinary class, defined as if by class a ~# b => a ~~ b instance a ~# b => a ~~ b Here's what's unusual about it: * We can't actually declare it that way because we don't have syntax for ~#. And ~# isn't a constraint, so even if we could write it, it wouldn't kind check. * Users cannot write instances of it. * It is "naturally coherent". This means that the solver won't hesitate to solve a goal of type (a ~~ b) even if there is, say (Int ~~ c) in the context. (Normally, it waits to learn more, just in case the given influences what happens next.) This is quite like having IncoherentInstances enabled. * It always terminates. That is, in the UndecidableInstances checks, we don't worry if a (~~) constraint is too big, as we know that solving equality terminates. On the other hand, this behaves just like any class w.r.t. eager superclass unpacking in the solver. So a lifted equality given quickly becomes an unlifted equality given. This is good, because the solver knows all about unlifted equalities. There is some special-casing in TcInteract.matchClassInst to pretend that there is an instance of this class, as we can't write the instance in Haskell. Within GHC, ~~ is called heqTyCon, and it is defined in TysWiredIn. -------------------------- (~) :: forall k. k -> k -> Constraint -------------------------- This is defined in Data.Type.Equality: class a ~~ b => (a :: k) ~ (b :: k) instance a ~~ b => a ~ b This is even more so an ordinary class than (~~), with the following exceptions: * Users cannot write instances of it. * It is "naturally coherent". (See (~~).) * (~) is magical syntax, as ~ is a reserved symbol. It cannot be exported or imported. * It always terminates. Within GHC, ~ is called eqTyCon, and it is defined in PrelNames. Note that it is *not* wired in. -------------------------- (:~:) :: forall k. k -> k -> * -------------------------- This is a perfectly ordinary GADT, wrapping (~). It is not defined within GHC at all. -------------------------- (~R#) :: forall k1 k2. k1 -> k2 -> # -------------------------- The is the representational analogue of ~#. This is the type of representational equalities that the solver works on. All wanted constraints of this type are built with coercion holes. Within GHC, ~R# is called eqReprPrimTyCon, and it is defined in TysPrim. -------------------------- Coercible :: forall k. k -> k -> Constraint -------------------------- This is quite like (~~) in the way it's defined and treated within GHC, but it's homogeneous. Homogeneity helps with type inference (as GHC can solve one kind from the other) and, in my (Richard's) estimation, will be more intuitive for users. An alternative design included HCoercible (like (~~)) and Coercible (like (~)). One annoyance was that we want `coerce :: Coercible a b => a -> b`, and we need the type of coerce to be fully wired-in. So the HCoercible/Coercible split required that both types be fully wired-in. Instead of doing this, I just got rid of HCoercible, as I'm not sure who would use it, anyway. Within GHC, Coercible is called coercibleTyCon, and it is defined in TysWiredIn. -------------------------- Coercion :: forall k. k -> k -> * -------------------------- This is a perfectly ordinary GADT, wrapping Coercible. It is not defined within GHC at all. -------------------------- (~P#) :: forall k1 k2. k1 -> k2 -> # -------------------------- This is the phantom analogue of ~# and it is barely used at all. (The solver has no idea about this one.) Here is the motivation: data Phant a = MkPhant type role Phant phantom Phant <Int, Bool>_P :: Phant Int ~P# Phant Bool We just need to have something to put on that last line. You probably don't need to worry about it. Note [The State# TyCon] ~~~~~~~~~~~~~~~~~~~~~~~ State# is the primitive, unlifted type of states. It has one type parameter, thus State# RealWorld or State# s where s is a type variable. The only purpose of the type parameter is to keep different state threads separate. It is represented by nothing at all. The type parameter to State# is intended to keep separate threads separate. Even though this parameter is not used in the definition of State#, it is given role Nominal to enforce its intended use. -} mkStatePrimTy :: Type -> Type mkStatePrimTy ty = TyConApp statePrimTyCon [ty] statePrimTyCon :: TyCon -- See Note [The State# TyCon] statePrimTyCon = pcPrimTyCon statePrimTyConName [Nominal] VoidRep {- RealWorld is deeply magical. It is *primitive*, but it is not *unlifted* (hence ptrArg). We never manipulate values of type RealWorld; it's only used in the type system, to parameterise State#. -} realWorldTyCon :: TyCon realWorldTyCon = mkLiftedPrimTyCon realWorldTyConName [] liftedTypeKind [] realWorldTy :: Type realWorldTy = mkTyConTy realWorldTyCon realWorldStatePrimTy :: Type realWorldStatePrimTy = mkStatePrimTy realWorldTy -- State# RealWorld -- Note: the ``state-pairing'' types are not truly primitive, -- so they are defined in \tr{TysWiredIn.hs}, not here. voidPrimTy :: Type voidPrimTy = TyConApp voidPrimTyCon [] voidPrimTyCon :: TyCon voidPrimTyCon = pcPrimTyCon voidPrimTyConName [] VoidRep mkProxyPrimTy :: Type -> Type -> Type mkProxyPrimTy k ty = TyConApp proxyPrimTyCon [k, ty] proxyPrimTyCon :: TyCon proxyPrimTyCon = mkPrimTyCon proxyPrimTyConName binders res_kind [Nominal,Nominal] where -- Kind: forall k. k -> Void# binders = mkTemplateTyConBinders [liftedTypeKind] (\ks-> ks) res_kind = tYPE voidRepDataConTy {- ********************************************************************* * * Primitive equality constraints See Note [The equality types story] * * ********************************************************************* -} eqPrimTyCon :: TyCon -- The representation type for equality predicates -- See Note [The equality types story] eqPrimTyCon = mkPrimTyCon eqPrimTyConName binders res_kind roles where -- Kind :: forall k1 k2. k1 -> k2 -> Void# binders = mkTemplateTyConBinders [liftedTypeKind, liftedTypeKind] (\ks -> ks) res_kind = tYPE voidRepDataConTy roles = [Nominal, Nominal, Nominal, Nominal] -- like eqPrimTyCon, but the type for *Representational* coercions -- this should only ever appear as the type of a covar. Its role is -- interpreted in coercionRole eqReprPrimTyCon :: TyCon -- See Note [The equality types story] eqReprPrimTyCon = mkPrimTyCon eqReprPrimTyConName binders res_kind roles where -- Kind :: forall k1 k2. k1 -> k2 -> Void# binders = mkTemplateTyConBinders [liftedTypeKind, liftedTypeKind] (\ks -> ks) res_kind = tYPE voidRepDataConTy roles = [Nominal, Nominal, Representational, Representational] -- like eqPrimTyCon, but the type for *Phantom* coercions. -- This is only used to make higher-order equalities. Nothing -- should ever actually have this type! eqPhantPrimTyCon :: TyCon eqPhantPrimTyCon = mkPrimTyCon eqPhantPrimTyConName binders res_kind roles where -- Kind :: forall k1 k2. k1 -> k2 -> Void# binders = mkTemplateTyConBinders [liftedTypeKind, liftedTypeKind] (\ks -> ks) res_kind = tYPE voidRepDataConTy roles = [Nominal, Nominal, Phantom, Phantom] {- ********************************************************************* * * The primitive array types * * ********************************************************************* -} arrayPrimTyCon, mutableArrayPrimTyCon, mutableByteArrayPrimTyCon, byteArrayPrimTyCon, arrayArrayPrimTyCon, mutableArrayArrayPrimTyCon, smallArrayPrimTyCon, smallMutableArrayPrimTyCon :: TyCon arrayPrimTyCon = pcPrimTyCon arrayPrimTyConName [Representational] PtrRep mutableArrayPrimTyCon = pcPrimTyCon mutableArrayPrimTyConName [Nominal, Representational] PtrRep mutableByteArrayPrimTyCon = pcPrimTyCon mutableByteArrayPrimTyConName [Nominal] PtrRep byteArrayPrimTyCon = pcPrimTyCon0 byteArrayPrimTyConName PtrRep arrayArrayPrimTyCon = pcPrimTyCon0 arrayArrayPrimTyConName PtrRep mutableArrayArrayPrimTyCon = pcPrimTyCon mutableArrayArrayPrimTyConName [Nominal] PtrRep smallArrayPrimTyCon = pcPrimTyCon smallArrayPrimTyConName [Representational] PtrRep smallMutableArrayPrimTyCon = pcPrimTyCon smallMutableArrayPrimTyConName [Nominal, Representational] PtrRep mkArrayPrimTy :: Type -> Type mkArrayPrimTy elt = TyConApp arrayPrimTyCon [elt] byteArrayPrimTy :: Type byteArrayPrimTy = mkTyConTy byteArrayPrimTyCon mkArrayArrayPrimTy :: Type mkArrayArrayPrimTy = mkTyConTy arrayArrayPrimTyCon mkSmallArrayPrimTy :: Type -> Type mkSmallArrayPrimTy elt = TyConApp smallArrayPrimTyCon [elt] mkMutableArrayPrimTy :: Type -> Type -> Type mkMutableArrayPrimTy s elt = TyConApp mutableArrayPrimTyCon [s, elt] mkMutableByteArrayPrimTy :: Type -> Type mkMutableByteArrayPrimTy s = TyConApp mutableByteArrayPrimTyCon [s] mkMutableArrayArrayPrimTy :: Type -> Type mkMutableArrayArrayPrimTy s = TyConApp mutableArrayArrayPrimTyCon [s] mkSmallMutableArrayPrimTy :: Type -> Type -> Type mkSmallMutableArrayPrimTy s elt = TyConApp smallMutableArrayPrimTyCon [s, elt] {- ********************************************************************* * * The mutable variable type * * ********************************************************************* -} mutVarPrimTyCon :: TyCon mutVarPrimTyCon = pcPrimTyCon mutVarPrimTyConName [Nominal, Representational] PtrRep mkMutVarPrimTy :: Type -> Type -> Type mkMutVarPrimTy s elt = TyConApp mutVarPrimTyCon [s, elt] {- ************************************************************************ * * \subsection[TysPrim-synch-var]{The synchronizing variable type} * * ************************************************************************ -} mVarPrimTyCon :: TyCon mVarPrimTyCon = pcPrimTyCon mVarPrimTyConName [Nominal, Representational] PtrRep mkMVarPrimTy :: Type -> Type -> Type mkMVarPrimTy s elt = TyConApp mVarPrimTyCon [s, elt] {- ************************************************************************ * * \subsection[TysPrim-stm-var]{The transactional variable type} * * ************************************************************************ -} tVarPrimTyCon :: TyCon tVarPrimTyCon = pcPrimTyCon tVarPrimTyConName [Nominal, Representational] PtrRep mkTVarPrimTy :: Type -> Type -> Type mkTVarPrimTy s elt = TyConApp tVarPrimTyCon [s, elt] {- ************************************************************************ * * \subsection[TysPrim-stable-ptrs]{The stable-pointer type} * * ************************************************************************ -} stablePtrPrimTyCon :: TyCon stablePtrPrimTyCon = pcPrimTyCon stablePtrPrimTyConName [Representational] AddrRep mkStablePtrPrimTy :: Type -> Type mkStablePtrPrimTy ty = TyConApp stablePtrPrimTyCon [ty] {- ************************************************************************ * * \subsection[TysPrim-stable-names]{The stable-name type} * * ************************************************************************ -} stableNamePrimTyCon :: TyCon stableNamePrimTyCon = pcPrimTyCon stableNamePrimTyConName [Representational] PtrRep mkStableNamePrimTy :: Type -> Type mkStableNamePrimTy ty = TyConApp stableNamePrimTyCon [ty] {- ************************************************************************ * * \subsection[TysPrim-compact-nfdata]{The Compact NFData (CNF) type} * * ************************************************************************ -} compactPrimTyCon :: TyCon compactPrimTyCon = pcPrimTyCon0 compactPrimTyConName PtrRep compactPrimTy :: Type compactPrimTy = mkTyConTy compactPrimTyCon {- ************************************************************************ * * \subsection[TysPrim-BCOs]{The ``bytecode object'' type} * * ************************************************************************ -} bcoPrimTy :: Type bcoPrimTy = mkTyConTy bcoPrimTyCon bcoPrimTyCon :: TyCon bcoPrimTyCon = pcPrimTyCon0 bcoPrimTyConName PtrRep {- ************************************************************************ * * \subsection[TysPrim-Weak]{The ``weak pointer'' type} * * ************************************************************************ -} weakPrimTyCon :: TyCon weakPrimTyCon = pcPrimTyCon weakPrimTyConName [Representational] PtrRep mkWeakPrimTy :: Type -> Type mkWeakPrimTy v = TyConApp weakPrimTyCon [v] {- ************************************************************************ * * \subsection[TysPrim-thread-ids]{The ``thread id'' type} * * ************************************************************************ A thread id is represented by a pointer to the TSO itself, to ensure that they are always unique and we can always find the TSO for a given thread id. However, this has the unfortunate consequence that a ThreadId# for a given thread is treated as a root by the garbage collector and can keep TSOs around for too long. Hence the programmer API for thread manipulation uses a weak pointer to the thread id internally. -} threadIdPrimTy :: Type threadIdPrimTy = mkTyConTy threadIdPrimTyCon threadIdPrimTyCon :: TyCon threadIdPrimTyCon = pcPrimTyCon0 threadIdPrimTyConName PtrRep {- ************************************************************************ * * \subsection{SIMD vector types} * * ************************************************************************ -} #include "primop-vector-tys.hs-incl"
mettekou/ghc
compiler/prelude/TysPrim.hs
Haskell
bsd-3-clause
41,385
----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Config -- Copyright : (c) David Himmelstrup 2005 -- License : BSD-like -- -- Maintainer : lemmih@gmail.com -- Stability : provisional -- Portability : portable -- -- Utilities for handling saved state such as known packages, known servers and -- downloaded packages. ----------------------------------------------------------------------------- module Distribution.Client.Config ( SavedConfig(..), loadConfig, showConfig, showConfigWithComments, parseConfig, defaultCabalDir, defaultConfigFile, defaultCacheDir, defaultCompiler, defaultLogsDir, defaultUserInstall, baseSavedConfig, commentSavedConfig, initialSavedConfig, configFieldDescriptions, haddockFlagsFields, installDirsFields, withProgramsFields, withProgramOptionsFields, userConfigDiff, userConfigUpdate ) where import Distribution.Client.Types ( RemoteRepo(..), Username(..), Password(..) ) import Distribution.Client.BuildReports.Types ( ReportLevel(..) ) import Distribution.Client.Setup ( GlobalFlags(..), globalCommand, defaultGlobalFlags , ConfigExFlags(..), configureExOptions, defaultConfigExFlags , InstallFlags(..), installOptions, defaultInstallFlags , UploadFlags(..), uploadCommand , ReportFlags(..), reportCommand , showRepo, parseRepo ) import Distribution.Utils.NubList ( NubList, fromNubList, toNubList) import Distribution.Simple.Compiler ( DebugInfoLevel(..), OptimisationLevel(..) ) import Distribution.Simple.Setup ( ConfigFlags(..), configureOptions, defaultConfigFlags , HaddockFlags(..), haddockOptions, defaultHaddockFlags , installDirsOptions , programConfigurationPaths', programConfigurationOptions , Flag(..), toFlag, flagToMaybe, fromFlagOrDefault ) import Distribution.Simple.InstallDirs ( InstallDirs(..), defaultInstallDirs , PathTemplate, toPathTemplate ) import Distribution.ParseUtils ( FieldDescr(..), liftField , ParseResult(..), PError(..), PWarning(..) , locatedErrorMsg, showPWarning , readFields, warning, lineNo , simpleField, listField, parseFilePathQ, parseTokenQ ) import Distribution.Client.ParseUtils ( parseFields, ppFields, ppSection ) import qualified Distribution.ParseUtils as ParseUtils ( Field(..) ) import qualified Distribution.Text as Text ( Text(..) ) import Distribution.Simple.Command ( CommandUI(commandOptions), commandDefaultFlags, ShowOrParseArgs(..) , viewAsFieldDescr ) import Distribution.Simple.Program ( defaultProgramConfiguration ) import Distribution.Simple.Utils ( die, notice, warn, lowercase, cabalVersion ) import Distribution.Compiler ( CompilerFlavor(..), defaultCompilerFlavor ) import Distribution.Verbosity ( Verbosity, normal ) import Data.List ( partition, find, foldl' ) import Data.Maybe ( fromMaybe ) import Data.Monoid ( Monoid(..) ) import Control.Monad ( unless, foldM, liftM, liftM2 ) import qualified Distribution.Compat.ReadP as Parse ( option ) import qualified Text.PrettyPrint as Disp ( render, text, empty ) import Text.PrettyPrint ( ($+$) ) import System.Directory ( createDirectoryIfMissing, getAppUserDataDirectory, renameFile ) import Network.URI ( URI(..), URIAuth(..) ) import System.FilePath ( (<.>), (</>), takeDirectory ) import System.IO.Error ( isDoesNotExistError ) import Distribution.Compat.Environment ( getEnvironment ) import Distribution.Compat.Exception ( catchIO ) import qualified Paths_cabal_install ( version ) import Data.Version ( showVersion ) import Data.Char ( isSpace ) import qualified Data.Map as M -- -- * Configuration saved in the config file -- data SavedConfig = SavedConfig { savedGlobalFlags :: GlobalFlags, savedInstallFlags :: InstallFlags, savedConfigureFlags :: ConfigFlags, savedConfigureExFlags :: ConfigExFlags, savedUserInstallDirs :: InstallDirs (Flag PathTemplate), savedGlobalInstallDirs :: InstallDirs (Flag PathTemplate), savedUploadFlags :: UploadFlags, savedReportFlags :: ReportFlags, savedHaddockFlags :: HaddockFlags } instance Monoid SavedConfig where mempty = SavedConfig { savedGlobalFlags = mempty, savedInstallFlags = mempty, savedConfigureFlags = mempty, savedConfigureExFlags = mempty, savedUserInstallDirs = mempty, savedGlobalInstallDirs = mempty, savedUploadFlags = mempty, savedReportFlags = mempty, savedHaddockFlags = mempty } mappend a b = SavedConfig { savedGlobalFlags = combinedSavedGlobalFlags, savedInstallFlags = combinedSavedInstallFlags, savedConfigureFlags = combinedSavedConfigureFlags, savedConfigureExFlags = combinedSavedConfigureExFlags, savedUserInstallDirs = combinedSavedUserInstallDirs, savedGlobalInstallDirs = combinedSavedGlobalInstallDirs, savedUploadFlags = combinedSavedUploadFlags, savedReportFlags = combinedSavedReportFlags, savedHaddockFlags = combinedSavedHaddockFlags } where -- This is ugly, but necessary. If we're mappending two config files, we -- want the values of the *non-empty* list fields from the second one to -- *override* the corresponding values from the first one. Default -- behaviour (concatenation) is confusing and makes some use cases (see -- #1884) impossible. -- -- However, we also want to allow specifying multiple values for a list -- field in a *single* config file. For example, we want the following to -- continue to work: -- -- remote-repo: hackage.haskell.org:http://hackage.haskell.org/ -- remote-repo: private-collection:http://hackage.local/ -- -- So we can't just wrap the list fields inside Flags; we have to do some -- special-casing just for SavedConfig. -- NB: the signature prevents us from using 'combine' on lists. combine' :: (SavedConfig -> flags) -> (flags -> Flag a) -> Flag a combine' field subfield = (subfield . field $ a) `mappend` (subfield . field $ b) lastNonEmpty' :: (SavedConfig -> flags) -> (flags -> [a]) -> [a] lastNonEmpty' field subfield = let a' = subfield . field $ a b' = subfield . field $ b in case b' of [] -> a' _ -> b' lastNonEmptyNL' :: (SavedConfig -> flags) -> (flags -> NubList a) -> NubList a lastNonEmptyNL' field subfield = let a' = subfield . field $ a b' = subfield . field $ b in case fromNubList b' of [] -> a' _ -> b' combinedSavedGlobalFlags = GlobalFlags { globalVersion = combine globalVersion, globalNumericVersion = combine globalNumericVersion, globalConfigFile = combine globalConfigFile, globalSandboxConfigFile = combine globalSandboxConfigFile, globalRemoteRepos = lastNonEmptyNL globalRemoteRepos, globalCacheDir = combine globalCacheDir, globalLocalRepos = lastNonEmptyNL globalLocalRepos, globalLogsDir = combine globalLogsDir, globalWorldFile = combine globalWorldFile, globalRequireSandbox = combine globalRequireSandbox, globalIgnoreSandbox = combine globalIgnoreSandbox } where combine = combine' savedGlobalFlags lastNonEmptyNL = lastNonEmptyNL' savedGlobalFlags combinedSavedInstallFlags = InstallFlags { installDocumentation = combine installDocumentation, installHaddockIndex = combine installHaddockIndex, installDryRun = combine installDryRun, installMaxBackjumps = combine installMaxBackjumps, installReorderGoals = combine installReorderGoals, installIndependentGoals = combine installIndependentGoals, installShadowPkgs = combine installShadowPkgs, installStrongFlags = combine installStrongFlags, installReinstall = combine installReinstall, installAvoidReinstalls = combine installAvoidReinstalls, installOverrideReinstall = combine installOverrideReinstall, installUpgradeDeps = combine installUpgradeDeps, installOnly = combine installOnly, installOnlyDeps = combine installOnlyDeps, installRootCmd = combine installRootCmd, installSummaryFile = lastNonEmptyNL installSummaryFile, installLogFile = combine installLogFile, installBuildReports = combine installBuildReports, installReportPlanningFailure = combine installReportPlanningFailure, installSymlinkBinDir = combine installSymlinkBinDir, installOneShot = combine installOneShot, installNumJobs = combine installNumJobs, installRunTests = combine installRunTests } where combine = combine' savedInstallFlags lastNonEmptyNL = lastNonEmptyNL' savedInstallFlags combinedSavedConfigureFlags = ConfigFlags { configPrograms = configPrograms . savedConfigureFlags $ b, -- TODO: NubListify configProgramPaths = lastNonEmpty configProgramPaths, -- TODO: NubListify configProgramArgs = lastNonEmpty configProgramArgs, configProgramPathExtra = lastNonEmptyNL configProgramPathExtra, configBuildHc = combine configBuildHc, configBuildHcPkg = combine configBuildHcPkg, configHcFlavor = combine configHcFlavor, configHcPath = combine configHcPath, configHcPkg = combine configHcPkg, configVanillaLib = combine configVanillaLib, configProfLib = combine configProfLib, configProf = combine configProf, configSharedLib = combine configSharedLib, configDynExe = combine configDynExe, configProfExe = combine configProfExe, -- TODO: NubListify configConfigureArgs = lastNonEmpty configConfigureArgs, configOptimization = combine configOptimization, configDebugInfo = combine configDebugInfo, configProgPrefix = combine configProgPrefix, configProgSuffix = combine configProgSuffix, -- Parametrised by (Flag PathTemplate), so safe to use 'mappend'. configInstallDirs = (configInstallDirs . savedConfigureFlags $ a) `mappend` (configInstallDirs . savedConfigureFlags $ b), configScratchDir = combine configScratchDir, -- TODO: NubListify configExtraLibDirs = lastNonEmpty configExtraLibDirs, -- TODO: NubListify configExtraIncludeDirs = lastNonEmpty configExtraIncludeDirs, configDistPref = combine configDistPref, configVerbosity = combine configVerbosity, configUserInstall = combine configUserInstall, -- TODO: NubListify configPackageDBs = lastNonEmpty configPackageDBs, configGHCiLib = combine configGHCiLib, configSplitObjs = combine configSplitObjs, configStripExes = combine configStripExes, configStripLibs = combine configStripLibs, -- TODO: NubListify configConstraints = lastNonEmpty configConstraints, -- TODO: NubListify configDependencies = lastNonEmpty configDependencies, configInstantiateWith = lastNonEmpty configInstantiateWith, -- TODO: NubListify configConfigurationsFlags = lastNonEmpty configConfigurationsFlags, configTests = combine configTests, configBenchmarks = combine configBenchmarks, configCoverage = combine configCoverage, configLibCoverage = combine configLibCoverage, configExactConfiguration = combine configExactConfiguration, configFlagError = combine configFlagError, configRelocatable = combine configRelocatable } where combine = combine' savedConfigureFlags lastNonEmpty = lastNonEmpty' savedConfigureFlags lastNonEmptyNL = lastNonEmptyNL' savedConfigureFlags combinedSavedConfigureExFlags = ConfigExFlags { configCabalVersion = combine configCabalVersion, -- TODO: NubListify configExConstraints = lastNonEmpty configExConstraints, -- TODO: NubListify configPreferences = lastNonEmpty configPreferences, configSolver = combine configSolver, configAllowNewer = combine configAllowNewer } where combine = combine' savedConfigureExFlags lastNonEmpty = lastNonEmpty' savedConfigureExFlags -- Parametrised by (Flag PathTemplate), so safe to use 'mappend'. combinedSavedUserInstallDirs = savedUserInstallDirs a `mappend` savedUserInstallDirs b -- Parametrised by (Flag PathTemplate), so safe to use 'mappend'. combinedSavedGlobalInstallDirs = savedGlobalInstallDirs a `mappend` savedGlobalInstallDirs b combinedSavedUploadFlags = UploadFlags { uploadCheck = combine uploadCheck, uploadUsername = combine uploadUsername, uploadPassword = combine uploadPassword, uploadVerbosity = combine uploadVerbosity } where combine = combine' savedUploadFlags combinedSavedReportFlags = ReportFlags { reportUsername = combine reportUsername, reportPassword = combine reportPassword, reportVerbosity = combine reportVerbosity } where combine = combine' savedReportFlags combinedSavedHaddockFlags = HaddockFlags { -- TODO: NubListify haddockProgramPaths = lastNonEmpty haddockProgramPaths, -- TODO: NubListify haddockProgramArgs = lastNonEmpty haddockProgramArgs, haddockHoogle = combine haddockHoogle, haddockHtml = combine haddockHtml, haddockHtmlLocation = combine haddockHtmlLocation, haddockExecutables = combine haddockExecutables, haddockTestSuites = combine haddockTestSuites, haddockBenchmarks = combine haddockBenchmarks, haddockInternal = combine haddockInternal, haddockCss = combine haddockCss, haddockHscolour = combine haddockHscolour, haddockHscolourCss = combine haddockHscolourCss, haddockContents = combine haddockContents, haddockDistPref = combine haddockDistPref, haddockKeepTempFiles = combine haddockKeepTempFiles, haddockVerbosity = combine haddockVerbosity } where combine = combine' savedHaddockFlags lastNonEmpty = lastNonEmpty' savedHaddockFlags updateInstallDirs :: Flag Bool -> SavedConfig -> SavedConfig updateInstallDirs userInstallFlag savedConfig@SavedConfig { savedConfigureFlags = configureFlags, savedUserInstallDirs = userInstallDirs, savedGlobalInstallDirs = globalInstallDirs } = savedConfig { savedConfigureFlags = configureFlags { configInstallDirs = installDirs } } where installDirs | userInstall = userInstallDirs | otherwise = globalInstallDirs userInstall = fromFlagOrDefault defaultUserInstall $ configUserInstall configureFlags `mappend` userInstallFlag -- -- * Default config -- -- | These are the absolute basic defaults. The fields that must be -- initialised. When we load the config from the file we layer the loaded -- values over these ones, so any missing fields in the file take their values -- from here. -- baseSavedConfig :: IO SavedConfig baseSavedConfig = do userPrefix <- defaultCabalDir logsDir <- defaultLogsDir worldFile <- defaultWorldFile return mempty { savedConfigureFlags = mempty { configHcFlavor = toFlag defaultCompiler, configUserInstall = toFlag defaultUserInstall, configVerbosity = toFlag normal }, savedUserInstallDirs = mempty { prefix = toFlag (toPathTemplate userPrefix) }, savedGlobalFlags = mempty { globalLogsDir = toFlag logsDir, globalWorldFile = toFlag worldFile } } -- | This is the initial configuration that we write out to to the config file -- if the file does not exist (or the config we use if the file cannot be read -- for some other reason). When the config gets loaded it gets layered on top -- of 'baseSavedConfig' so we do not need to include it into the initial -- values we save into the config file. -- initialSavedConfig :: IO SavedConfig initialSavedConfig = do cacheDir <- defaultCacheDir logsDir <- defaultLogsDir worldFile <- defaultWorldFile extraPath <- defaultExtraPath return mempty { savedGlobalFlags = mempty { globalCacheDir = toFlag cacheDir, globalRemoteRepos = toNubList [defaultRemoteRepo], globalWorldFile = toFlag worldFile }, savedConfigureFlags = mempty { configProgramPathExtra = toNubList extraPath }, savedInstallFlags = mempty { installSummaryFile = toNubList [toPathTemplate (logsDir </> "build.log")], installBuildReports= toFlag AnonymousReports, installNumJobs = toFlag Nothing } } --TODO: misleading, there's no way to override this default -- either make it possible or rename to simply getCabalDir. defaultCabalDir :: IO FilePath defaultCabalDir = getAppUserDataDirectory "cabal" defaultConfigFile :: IO FilePath defaultConfigFile = do dir <- defaultCabalDir return $ dir </> "config" defaultCacheDir :: IO FilePath defaultCacheDir = do dir <- defaultCabalDir return $ dir </> "packages" defaultLogsDir :: IO FilePath defaultLogsDir = do dir <- defaultCabalDir return $ dir </> "logs" -- | Default position of the world file defaultWorldFile :: IO FilePath defaultWorldFile = do dir <- defaultCabalDir return $ dir </> "world" defaultExtraPath :: IO [FilePath] defaultExtraPath = do dir <- defaultCabalDir return [dir </> "bin"] defaultCompiler :: CompilerFlavor defaultCompiler = fromMaybe GHC defaultCompilerFlavor defaultUserInstall :: Bool defaultUserInstall = True -- We do per-user installs by default on all platforms. We used to default to -- global installs on Windows but that no longer works on Windows Vista or 7. defaultRemoteRepo :: RemoteRepo defaultRemoteRepo = RemoteRepo name uri where name = "hackage.haskell.org" uri = URI "http:" (Just (URIAuth "" name "")) "/packages/archive" "" "" -- -- * Config file reading -- loadConfig :: Verbosity -> Flag FilePath -> Flag Bool -> IO SavedConfig loadConfig verbosity configFileFlag userInstallFlag = addBaseConf $ do let sources = [ ("commandline option", return . flagToMaybe $ configFileFlag), ("env var CABAL_CONFIG", lookup "CABAL_CONFIG" `liftM` getEnvironment), ("default config file", Just `liftM` defaultConfigFile) ] getSource [] = error "no config file path candidate found." getSource ((msg,action): xs) = action >>= maybe (getSource xs) (return . (,) msg) (source, configFile) <- getSource sources minp <- readConfigFile mempty configFile case minp of Nothing -> do notice verbosity $ "Config file path source is " ++ source ++ "." notice verbosity $ "Config file " ++ configFile ++ " not found." notice verbosity $ "Writing default configuration to " ++ configFile commentConf <- commentSavedConfig initialConf <- initialSavedConfig writeConfigFile configFile commentConf initialConf return initialConf Just (ParseOk ws conf) -> do unless (null ws) $ warn verbosity $ unlines (map (showPWarning configFile) ws) return conf Just (ParseFailed err) -> do let (line, msg) = locatedErrorMsg err die $ "Error parsing config file " ++ configFile ++ maybe "" (\n -> ':' : show n) line ++ ":\n" ++ msg where addBaseConf body = do base <- baseSavedConfig extra <- body return (updateInstallDirs userInstallFlag (base `mappend` extra)) readConfigFile :: SavedConfig -> FilePath -> IO (Maybe (ParseResult SavedConfig)) readConfigFile initial file = handleNotExists $ fmap (Just . parseConfig initial) (readFile file) where handleNotExists action = catchIO action $ \ioe -> if isDoesNotExistError ioe then return Nothing else ioError ioe writeConfigFile :: FilePath -> SavedConfig -> SavedConfig -> IO () writeConfigFile file comments vals = do let tmpFile = file <.> "tmp" createDirectoryIfMissing True (takeDirectory file) writeFile tmpFile $ explanation ++ showConfigWithComments comments vals ++ "\n" renameFile tmpFile file where explanation = unlines ["-- This is the configuration file for the 'cabal' command line tool." ,"" ,"-- The available configuration options are listed below." ,"-- Some of them have default values listed." ,"" ,"-- Lines (like this one) beginning with '--' are comments." ,"-- Be careful with spaces and indentation because they are" ,"-- used to indicate layout for nested sections." ,"" ,"-- Cabal library version: " ++ showVersion cabalVersion ,"-- cabal-install version: " ++ showVersion Paths_cabal_install.version ,"","" ] -- | These are the default values that get used in Cabal if a no value is -- given. We use these here to include in comments when we write out the -- initial config file so that the user can see what default value they are -- overriding. -- commentSavedConfig :: IO SavedConfig commentSavedConfig = do userInstallDirs <- defaultInstallDirs defaultCompiler True True globalInstallDirs <- defaultInstallDirs defaultCompiler False True return SavedConfig { savedGlobalFlags = defaultGlobalFlags, savedInstallFlags = defaultInstallFlags, savedConfigureExFlags = defaultConfigExFlags, savedConfigureFlags = (defaultConfigFlags defaultProgramConfiguration) { configUserInstall = toFlag defaultUserInstall }, savedUserInstallDirs = fmap toFlag userInstallDirs, savedGlobalInstallDirs = fmap toFlag globalInstallDirs, savedUploadFlags = commandDefaultFlags uploadCommand, savedReportFlags = commandDefaultFlags reportCommand, savedHaddockFlags = defaultHaddockFlags } -- | All config file fields. -- configFieldDescriptions :: [FieldDescr SavedConfig] configFieldDescriptions = toSavedConfig liftGlobalFlag (commandOptions (globalCommand []) ParseArgs) ["version", "numeric-version", "config-file", "sandbox-config-file"] [] ++ toSavedConfig liftConfigFlag (configureOptions ParseArgs) (["builddir", "constraint", "dependency"] ++ map fieldName installDirsFields) --FIXME: this is only here because viewAsFieldDescr gives us a parser -- that only recognises 'ghc' etc, the case-sensitive flag names, not -- what the normal case-insensitive parser gives us. [simpleField "compiler" (fromFlagOrDefault Disp.empty . fmap Text.disp) (optional Text.parse) configHcFlavor (\v flags -> flags { configHcFlavor = v }) -- TODO: The following is a temporary fix. The "optimization" -- and "debug-info" fields are OptArg, and viewAsFieldDescr -- fails on that. Instead of a hand-written hackaged parser -- and printer, we should handle this case properly in the -- library. ,liftField configOptimization (\v flags -> flags { configOptimization = v }) $ let name = "optimization" in FieldDescr name (\f -> case f of Flag NoOptimisation -> Disp.text "False" Flag NormalOptimisation -> Disp.text "True" Flag MaximumOptimisation -> Disp.text "2" _ -> Disp.empty) (\line str _ -> case () of _ | str == "False" -> ParseOk [] (Flag NoOptimisation) | str == "True" -> ParseOk [] (Flag NormalOptimisation) | str == "0" -> ParseOk [] (Flag NoOptimisation) | str == "1" -> ParseOk [] (Flag NormalOptimisation) | str == "2" -> ParseOk [] (Flag MaximumOptimisation) | lstr == "false" -> ParseOk [caseWarning] (Flag NoOptimisation) | lstr == "true" -> ParseOk [caseWarning] (Flag NormalOptimisation) | otherwise -> ParseFailed (NoParse name line) where lstr = lowercase str caseWarning = PWarning $ "The '" ++ name ++ "' field is case sensitive, use 'True' or 'False'.") ,liftField configDebugInfo (\v flags -> flags { configDebugInfo = v }) $ let name = "debug-info" in FieldDescr name (\f -> case f of Flag NoDebugInfo -> Disp.text "False" Flag MinimalDebugInfo -> Disp.text "1" Flag NormalDebugInfo -> Disp.text "True" Flag MaximalDebugInfo -> Disp.text "3" _ -> Disp.empty) (\line str _ -> case () of _ | str == "False" -> ParseOk [] (Flag NoDebugInfo) | str == "True" -> ParseOk [] (Flag NormalDebugInfo) | str == "0" -> ParseOk [] (Flag NoDebugInfo) | str == "1" -> ParseOk [] (Flag MinimalDebugInfo) | str == "2" -> ParseOk [] (Flag NormalDebugInfo) | str == "3" -> ParseOk [] (Flag MaximalDebugInfo) | lstr == "false" -> ParseOk [caseWarning] (Flag NoDebugInfo) | lstr == "true" -> ParseOk [caseWarning] (Flag NormalDebugInfo) | otherwise -> ParseFailed (NoParse name line) where lstr = lowercase str caseWarning = PWarning $ "The '" ++ name ++ "' field is case sensitive, use 'True' or 'False'.") ] ++ toSavedConfig liftConfigExFlag (configureExOptions ParseArgs) [] [] ++ toSavedConfig liftInstallFlag (installOptions ParseArgs) ["dry-run", "only", "only-dependencies", "dependencies-only"] [] ++ toSavedConfig liftUploadFlag (commandOptions uploadCommand ParseArgs) ["verbose", "check"] [] ++ toSavedConfig liftReportFlag (commandOptions reportCommand ParseArgs) ["verbose", "username", "password"] [] --FIXME: this is a hack, hiding the user name and password. -- But otherwise it masks the upload ones. Either need to -- share the options or make then distinct. In any case -- they should probably be per-server. where toSavedConfig lift options exclusions replacements = [ lift (fromMaybe field replacement) | opt <- options , let field = viewAsFieldDescr opt name = fieldName field replacement = find ((== name) . fieldName) replacements , name `notElem` exclusions ] optional = Parse.option mempty . fmap toFlag -- TODO: next step, make the deprecated fields elicit a warning. -- deprecatedFieldDescriptions :: [FieldDescr SavedConfig] deprecatedFieldDescriptions = [ liftGlobalFlag $ listField "repos" (Disp.text . showRepo) parseRepo (fromNubList . globalRemoteRepos) (\rs cfg -> cfg { globalRemoteRepos = toNubList rs }) , liftGlobalFlag $ simpleField "cachedir" (Disp.text . fromFlagOrDefault "") (optional parseFilePathQ) globalCacheDir (\d cfg -> cfg { globalCacheDir = d }) , liftUploadFlag $ simpleField "hackage-username" (Disp.text . fromFlagOrDefault "" . fmap unUsername) (optional (fmap Username parseTokenQ)) uploadUsername (\d cfg -> cfg { uploadUsername = d }) , liftUploadFlag $ simpleField "hackage-password" (Disp.text . fromFlagOrDefault "" . fmap unPassword) (optional (fmap Password parseTokenQ)) uploadPassword (\d cfg -> cfg { uploadPassword = d }) ] ++ map (modifyFieldName ("user-"++) . liftUserInstallDirs) installDirsFields ++ map (modifyFieldName ("global-"++) . liftGlobalInstallDirs) installDirsFields where optional = Parse.option mempty . fmap toFlag modifyFieldName :: (String -> String) -> FieldDescr a -> FieldDescr a modifyFieldName f d = d { fieldName = f (fieldName d) } liftUserInstallDirs :: FieldDescr (InstallDirs (Flag PathTemplate)) -> FieldDescr SavedConfig liftUserInstallDirs = liftField savedUserInstallDirs (\flags conf -> conf { savedUserInstallDirs = flags }) liftGlobalInstallDirs :: FieldDescr (InstallDirs (Flag PathTemplate)) -> FieldDescr SavedConfig liftGlobalInstallDirs = liftField savedGlobalInstallDirs (\flags conf -> conf { savedGlobalInstallDirs = flags }) liftGlobalFlag :: FieldDescr GlobalFlags -> FieldDescr SavedConfig liftGlobalFlag = liftField savedGlobalFlags (\flags conf -> conf { savedGlobalFlags = flags }) liftConfigFlag :: FieldDescr ConfigFlags -> FieldDescr SavedConfig liftConfigFlag = liftField savedConfigureFlags (\flags conf -> conf { savedConfigureFlags = flags }) liftConfigExFlag :: FieldDescr ConfigExFlags -> FieldDescr SavedConfig liftConfigExFlag = liftField savedConfigureExFlags (\flags conf -> conf { savedConfigureExFlags = flags }) liftInstallFlag :: FieldDescr InstallFlags -> FieldDescr SavedConfig liftInstallFlag = liftField savedInstallFlags (\flags conf -> conf { savedInstallFlags = flags }) liftUploadFlag :: FieldDescr UploadFlags -> FieldDescr SavedConfig liftUploadFlag = liftField savedUploadFlags (\flags conf -> conf { savedUploadFlags = flags }) liftReportFlag :: FieldDescr ReportFlags -> FieldDescr SavedConfig liftReportFlag = liftField savedReportFlags (\flags conf -> conf { savedReportFlags = flags }) parseConfig :: SavedConfig -> String -> ParseResult SavedConfig parseConfig initial = \str -> do fields <- readFields str let (knownSections, others) = partition isKnownSection fields config <- parse others let user0 = savedUserInstallDirs config global0 = savedGlobalInstallDirs config (haddockFlags, user, global, paths, args) <- foldM parseSections (savedHaddockFlags config, user0, global0, [], []) knownSections return config { savedConfigureFlags = (savedConfigureFlags config) { configProgramPaths = paths, configProgramArgs = args }, savedHaddockFlags = haddockFlags, savedUserInstallDirs = user, savedGlobalInstallDirs = global } where isKnownSection (ParseUtils.Section _ "haddock" _ _) = True isKnownSection (ParseUtils.Section _ "install-dirs" _ _) = True isKnownSection (ParseUtils.Section _ "program-locations" _ _) = True isKnownSection (ParseUtils.Section _ "program-default-options" _ _) = True isKnownSection _ = False parse = parseFields (configFieldDescriptions ++ deprecatedFieldDescriptions) initial parseSections accum@(h,u,g,p,a) (ParseUtils.Section _ "haddock" name fs) | name == "" = do h' <- parseFields haddockFlagsFields h fs return (h', u, g, p, a) | otherwise = do warning "The 'haddock' section should be unnamed" return accum parseSections accum@(h,u,g,p,a) (ParseUtils.Section _ "install-dirs" name fs) | name' == "user" = do u' <- parseFields installDirsFields u fs return (h, u', g, p, a) | name' == "global" = do g' <- parseFields installDirsFields g fs return (h, u, g', p, a) | otherwise = do warning "The 'install-paths' section should be for 'user' or 'global'" return accum where name' = lowercase name parseSections accum@(h,u,g,p,a) (ParseUtils.Section _ "program-locations" name fs) | name == "" = do p' <- parseFields withProgramsFields p fs return (h, u, g, p', a) | otherwise = do warning "The 'program-locations' section should be unnamed" return accum parseSections accum@(h, u, g, p, a) (ParseUtils.Section _ "program-default-options" name fs) | name == "" = do a' <- parseFields withProgramOptionsFields a fs return (h, u, g, p, a') | otherwise = do warning "The 'program-default-options' section should be unnamed" return accum parseSections accum f = do warning $ "Unrecognized stanza on line " ++ show (lineNo f) return accum showConfig :: SavedConfig -> String showConfig = showConfigWithComments mempty showConfigWithComments :: SavedConfig -> SavedConfig -> String showConfigWithComments comment vals = Disp.render $ ppFields configFieldDescriptions mcomment vals $+$ Disp.text "" $+$ ppSection "haddock" "" haddockFlagsFields (fmap savedHaddockFlags mcomment) (savedHaddockFlags vals) $+$ Disp.text "" $+$ installDirsSection "user" savedUserInstallDirs $+$ Disp.text "" $+$ installDirsSection "global" savedGlobalInstallDirs $+$ Disp.text "" $+$ configFlagsSection "program-locations" withProgramsFields configProgramPaths $+$ Disp.text "" $+$ configFlagsSection "program-default-options" withProgramOptionsFields configProgramArgs where mcomment = Just comment installDirsSection name field = ppSection "install-dirs" name installDirsFields (fmap field mcomment) (field vals) configFlagsSection name fields field = ppSection name "" fields (fmap (field . savedConfigureFlags) mcomment) ((field . savedConfigureFlags) vals) -- | Fields for the 'install-dirs' sections. installDirsFields :: [FieldDescr (InstallDirs (Flag PathTemplate))] installDirsFields = map viewAsFieldDescr installDirsOptions -- | Fields for the 'haddock' section. haddockFlagsFields :: [FieldDescr HaddockFlags] haddockFlagsFields = [ field | opt <- haddockOptions ParseArgs , let field = viewAsFieldDescr opt name = fieldName field , name `notElem` exclusions ] where exclusions = ["verbose", "builddir"] -- | Fields for the 'program-locations' section. withProgramsFields :: [FieldDescr [(String, FilePath)]] withProgramsFields = map viewAsFieldDescr $ programConfigurationPaths' (++ "-location") defaultProgramConfiguration ParseArgs id (++) -- | Fields for the 'program-default-options' section. withProgramOptionsFields :: [FieldDescr [(String, [String])]] withProgramOptionsFields = map viewAsFieldDescr $ programConfigurationOptions defaultProgramConfiguration ParseArgs id (++) -- | Get the differences (as a pseudo code diff) between the user's -- '~/.cabal/config' and the one that cabal would generate if it didn't exist. userConfigDiff :: GlobalFlags -> IO [String] userConfigDiff globalFlags = do userConfig <- loadConfig normal (globalConfigFile globalFlags) mempty testConfig <- liftM2 mappend baseSavedConfig initialSavedConfig return $ reverse . foldl' createDiff [] . M.toList $ M.unionWith combine (M.fromList . map justFst $ filterShow testConfig) (M.fromList . map justSnd $ filterShow userConfig) where justFst (a, b) = (a, (Just b, Nothing)) justSnd (a, b) = (a, (Nothing, Just b)) combine (Nothing, Just b) (Just a, Nothing) = (Just a, Just b) combine (Just a, Nothing) (Nothing, Just b) = (Just a, Just b) combine x y = error $ "Can't happen : userConfigDiff " ++ show x ++ " " ++ show y createDiff :: [String] -> (String, (Maybe String, Maybe String)) -> [String] createDiff acc (key, (Just a, Just b)) | a == b = acc | otherwise = ("+ " ++ key ++ ": " ++ b) : ("- " ++ key ++ ": " ++ a) : acc createDiff acc (key, (Nothing, Just b)) = ("+ " ++ key ++ ": " ++ b) : acc createDiff acc (key, (Just a, Nothing)) = ("- " ++ key ++ ": " ++ a) : acc createDiff acc (_, (Nothing, Nothing)) = acc filterShow :: SavedConfig -> [(String, String)] filterShow cfg = map keyValueSplit . filter (\s -> not (null s) && any (== ':') s) . map nonComment . lines $ showConfig cfg nonComment [] = [] nonComment ('-':'-':_) = [] nonComment (x:xs) = x : nonComment xs topAndTail = reverse . dropWhile isSpace . reverse . dropWhile isSpace keyValueSplit s = let (left, right) = break (== ':') s in (topAndTail left, topAndTail (drop 1 right)) -- | Update the user's ~/.cabal/config' keeping the user's customizations. userConfigUpdate :: Verbosity -> GlobalFlags -> IO () userConfigUpdate verbosity globalFlags = do userConfig <- loadConfig normal (globalConfigFile globalFlags) mempty newConfig <- liftM2 mappend baseSavedConfig initialSavedConfig commentConf <- commentSavedConfig cabalFile <- defaultConfigFile let backup = cabalFile ++ ".backup" notice verbosity $ "Renaming " ++ cabalFile ++ " to " ++ backup ++ "." renameFile cabalFile backup notice verbosity $ "Writing merged config to " ++ cabalFile ++ "." writeConfigFile cabalFile commentConf (newConfig `mappend` userConfig)
plumlife/cabal
cabal-install/Distribution/Client/Config.hs
Haskell
bsd-3-clause
38,871
{-# LANGUAGE BangPatterns #-} {-# OPTIONS_GHC -fno-warn-unused-binds -fno-warn-name-shadowing#-} -- This code is written by Pedro Vasconcelos for his implementation of -- Tzaar game: HsTzaar. I'm reproducing it here just to be able to -- compare the performance of his implementation with my own. -- -- Thanks to Pedro for providing his code, I've got some nice ideas -- from it. -- -- HsTzaar may downloaded from Hackage: -- http://hackage.haskell.org/package/hstzaar module AI.Algorithms.Tzaar ( alphabeta , negascout , negamax , Gametree(..) , Valuation , Valued(..) , valued , ($+) ) where class Gametree p where children :: p -> [p] -- list of move, position is_terminal :: p -> Bool is_terminal = null . children -- default definition -- | type for valuation functions type Valuation a = a -> Int -- | a pair of something with a strict integer valuation -- supporting equality, ordering and limited arithmetic on valuation data Valued a = Valued { value :: !Int, unvalued :: a } deriving Show instance Functor Valued where fmap f (Valued v x) = Valued v (f x) -- | apply a valuation valued :: Valuation a -> a -> Valued a valued f x = Valued (f x) x -- | modify the valuation revalue :: (Int -> Int) -> Valued a -> Valued a revalue f (Valued v x) = Valued (f v) x instance Eq (Valued a) where x == y = value x==value y instance Ord (Valued a) where compare x y = compare (value x) (value y) -- some instances of numeric type class (only negate and fromInteger) instance Num (Valued a) where (+) = undefined (-) = undefined (*) = undefined negate = revalue negate fromInteger n = valued (const (fromIntegral n)) undefined abs = undefined signum = undefined -- | add a constant to a value infix 6 $+ ($+) :: Int -> Valued a -> Valued a k $+ x = revalue (+k) x -- | Naive negamax algorithm (no prunning) -- wrapper negamax :: Gametree p => Valuation p -> Int -> p -> Valued p negamax node_value depth p = negamax' depth p where -- worker negamax' d p | d==0 || is_terminal p = valued node_value p | otherwise = negate $ minimum [negamax' d' p' | p'<-children p] where d' = d-1 -- | Negamax with alpha-beta prunning -- wrapper alphabeta :: Gametree p => Valuation p -> Int -> p -> Valued p alphabeta node_value depth p = let a = fromIntegral (minBound+1 :: Int) b = fromIntegral (maxBound :: Int) in alpha_beta' depth a b p where -- worker alpha_beta' d alfa beta p | d==0 || is_terminal p = valued node_value p | otherwise = cmx alfa (children p) where d' = d-1 cmx alfa [] = alfa cmx alfa (p:ps) | a'>=beta = a' | otherwise = cmx (max a' alfa) ps where a' = negate $ alpha_beta' d' (negate beta) (negate alfa) p -- | Negascout search -- wrapper negascout :: Gametree p => Valuation p -> Int -> p -> Valued p negascout node_value depth p = let a = fromIntegral (minBound+1 :: Int) b = fromIntegral (maxBound :: Int) in negascout' node_value depth a b p where -- worker negascout' node_value d alfa beta p | d==0 || is_terminal p = valued node_value p | d==1 = valued (negate . node_value) p0 -- short-circuit for depth 1 | b >= beta = b | otherwise = scout (max alfa b) b ps where d' = d-1 ps = children p p0 = unvalued $ minimum $ map (valued node_value) ps -- p0 = estimate_best node_value ps -- child with best static score b = negate $ negascout' node_value d' (negate beta) (negate alfa) p0 -- full search estimate scout _ !b [] = b scout !alfa !b (p:ps) | s>=beta = s | otherwise = scout alfa' b' ps where s = negate $ negascout' node_value d' (negate (1$+alfa)) (negate alfa) p s' | s>alfa = negate $ negascout' node_value d' (negate beta) (negate alfa) p | otherwise = s alfa' = max alfa s' b' = max b s'
sphynx/hamisado
AI/Algorithms/Tzaar.hs
Haskell
bsd-3-clause
4,094
{-# LANGUAGE ConstraintKinds, TypeFamilies #-} {- | App module defines types used by the Spock framework. -} module Guide.App where -- hvect import Data.HVect -- Spock import Web.Spock import Guide.Types.User (User) import Guide.Types.Session (GuideData) import Guide.ServerStuff (ServerState) -- | Type of connection, currently unused. (Acid-State DB stored in 'ServerState') type GuideConn = () -- | Type of user session payload. type GuideSessionData = GuideData -- | Type of server state, accessed with 'getState'. type GuideState = ServerState -- | The fully qualified type of a Spock application/route. type GuideM ctx r = SpockCtxM ctx GuideConn GuideData ServerState r -- | Type of a root application. type GuideApp ctx = GuideM ctx () -- | Type of a Guide action with a generic context. type GuideAction ctx r = ActionCtxT ctx (WebStateM GuideConn GuideData ServerState) r data IsAdmin = IsAdmin type AuthM ctx r = forall n xs. (ctx ~ HVect xs, ListContains n User xs) => GuideM ctx r type AuthAction ctx r = forall n xs. (ctx ~ HVect xs, ListContains n User xs) => GuideAction ctx r type AdminM ctx r = forall n xs. (ctx ~ HVect xs, ListContains n IsAdmin xs) => GuideM ctx r type AdminAction ctx r = forall n xs. (ctx ~ HVect xs, ListContains n IsAdmin xs) => GuideAction ctx r
aelve/hslibs
src/Guide/App.hs
Haskell
bsd-3-clause
1,314
{-# LANGUAGE OverloadedStrings #-} module ServerSpec (spec) where import Test.Hspec import Server main :: IO () main = hspec spec spec :: Spec spec = do describe "reqUri" $ do it "is Nothing for an invalid HTTP GET request" $ do reqUri "FOO" `shouldBe` Nothing it "is Nothing for an HTTP request that is not GET" $ do reqUri "POST / HTTP/1.1" `shouldBe` Nothing it "is Nothing for a non-1.1 HTTP request" $ do reqUri "GET /foo HTTP/1.0" `shouldBe` Nothing it "is Nothing for a GET request with extra spaces after the URI" $ do reqUri "GET /bar HTTP/1.1" `shouldBe` Nothing it "is the request URI for a valid GET request" $ do reqUri "GET / HTTP/1.1" `shouldBe` Just "/" it "captures several path elements" $ do reqUri "GET /foo/bar/baz/ HTTP/1.1" `shouldBe` Just "/foo/bar/baz/" it "captures query parameters" $ do reqUri "GET /x?a=1&b=2 HTTP/1.1" `shouldBe` Just "/x?a=1&b=2" describe "HTTP response" $ do it "has correct zero content-length" $ do response "200 OK" "" `shouldBe` "HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n" it "has correct non-zero content-length" $ do response "404 Not Found" "Eff off." `shouldBe` "HTTP/1.1 404 Not Found\r\nContent-Length: 8\r\n\r\nEff off."
tripped/hlog
test/ServerSpec.hs
Haskell
bsd-3-clause
1,412
module Scheme.Eval ( eval, readEvalPrint, readEvalPrint', readEvalLines, readEvalLines' , primitiveEnv, primitives) where import Control.Monad import Data.Maybe import Data.IORef import Control.Monad.Error import Scheme.Types import System.IO import Scheme.Parser import Scheme.Eval.DynamicTypes import Scheme.Eval.Primitives type Continuation = LispVal -- special primitives receive [LispVal] unevaluated, -- also get full access to continuation and the environment -- are responsible for calling eval if necessary selfRefPrimitives :: [(String, Continuation -> Env -> [LispVal] -> ThrowsErrorIO LispVal)] selfRefPrimitives = [ ("inspect", \cont env -> maxArgs 0 >=> const lispNull) , ("define", \_ env -> define env . List) , ("quote", \_ _ -> oneArgOnly) , ("if", \_ env -> onlyArgs 3 >=> \[maybecond,a,b] -> do cond <- eval env maybecond >>= expect boolType eval env $ if cond then a else b) , ("eval", \cont env -> oneArgOnly >=> \form -> case form of (List [Atom "quote",lisp]) -> eval env lisp lisp -> eval env lisp) , ("lambda", \_ env -> minArgs 2 >=> \(params:body) -> do params' <- expect (atomType `orType` listType `orType` dottedListType) params either (\atom -> makeVarargs (Atom atom) env [] body) (either (\ps -> makeNormalFunc env ps body) (\(ps, varargs) -> makeVarargs varargs env ps body)) params') , ("lambda-closure", \_ env -> oneArgOnly >=> eval env >=> expect funcType >=> \(ps, vars, bod, Env readOnly env) -> do e <- liftIO $ readIORef env liftM List $ mapM (\(n,vIO) -> liftIO (readIORef vIO) >>= \v -> return $ List [String n, v]) e) , ("set!", \_ env -> onlyArgs 2 >=> \[a,b] -> do varName <- expect atomType a setVar env varName b lispNull) , ("load", \_ env -> oneArgOnly >=> expect (stringType `orType` portType) >=> either (\fname -> safeLiftIO $ readFile fname >>= evalAll env fname) (\handle -> safeLiftIO $ hGetContents handle >>= evalAll env (show handle))) ] -- when redefining, try to free any overwritten ports define env (List [Atom v, form]) = eval env form >>= defineVar env v >> lispNull define env (List (List (Atom var : params) : body)) = makeNormalFunc env params body >>= defineVar env var >> lispNull define env (List (DottedList (Atom var : params) varargs : body)) = makeVarargs varargs env params body >>= defineVar env var >> lispNull define _ (List xs) = throwError $ TypeMismatch "first to be an atom or a dotted list or a normal list" (show xs) evalPrimitives = [ ("apply-safe", minArgs 1 >=> \(func:rest) -> apply func $ if length rest == 1 && isList (head rest) then fromJust $ getList (head rest) else rest) , ("read", maxArgs 1 >=> \l -> (case l of [] -> return stdin [p] -> expect portType p) >>= \port -> (safeLiftIO . hGetLine >=> liftIO . readExpr ("read from: " ++ show port)) port) , ("read-all", maxArgs 1 >=> \l -> (case l of [] -> return stdin [p] -> expect portType p) >>= \port -> (safeLiftIO . hGetContents >=> liftIO . readAllExpr ("read-all from: " ++ show port)) port) , ("read-string", oneArgOnly >=> expect stringType >=> readExpr "<string>") , ("read-string-all", oneArgOnly >=> expect stringType >=> readAllExpr "<string>") ] readOnlyPrimitives = toPrimitiveFuncs selfRefPrimitives toPrimitiveFuncs = map (\(name, _) -> (name, PrimitiveFunc name)) primitives = evalPrimitives ++ basicPrimitives ++ ioPrimitives primitiveEnv = nullEnv readOnlyPrimitives >>= flip bindVars (toPrimitiveFuncs primitives) bindVars :: Env -> [(String, LispVal)] -> IO Env bindVars (Env readOnlys env) vars = do e <- readIORef env newE <- liftM (++e) (mapM (\(n,v) -> newIORef v >>= \vRef -> return (n, vRef)) vars) liftM (Env readOnlys) $ newIORef newE makeFunc vararg env params body = do expect (listOf atomType) (List params) maybe (return ()) (void . expect atomType) vararg return $ Func (map show params) (fmap show vararg) body env makeNormalFunc :: Env -> [LispVal] -> [LispVal] -> ThrowsErrorIO LispVal makeNormalFunc = makeFunc Nothing makeVarargs :: LispVal -> Env -> [LispVal] -> [LispVal] -> ThrowsErrorIO LispVal makeVarargs = makeFunc . Just evalExpr env fname s = liftIO $ runErrorT (readLisp fname s >>= eval env) >>= return . either errToLisp id evalAll env fname s = liftIO $ runErrorT (readLisps fname s >>= mapM (eval env)) >>= return . either errToLisp List readExpr fname s = liftIO $ runErrorT (readLisp fname s) >>= return . either errToLisp id readAllExpr fname s = liftIO $ runErrorT (readLisps fname s) >>= return . either errToLisp List readEvalLines output fname s = liftIO $ do env <- primitiveEnv parseResult <- runErrorT $ readLisps fname s either print (mapM_ (showIOThrows . eval env >=> (when output . putStrLn))) parseResult readEvalLines' output env fname s = liftIO $ do parseResult <- runErrorT $ readLisps fname s either print (mapM_ (showIOThrows . eval env >=> (when output . putStrLn))) parseResult readEvalPrint fname lisp = liftIO $ primitiveEnv >>= \env -> readEvalPrint' fname env lisp readEvalPrint' fname env = showIOThrows . (readLisp fname >=> eval env) >=> putFlushLn eval :: Env -> LispVal -> ThrowsErrorIO LispVal eval _ v@(Bool _) = return v eval _ v@(Num _) = return v eval _ v@(Character _) = return v eval _ v@(String _) = return v eval env v@(Atom a) = getVar env a eval env (List (func:args)) = do f <- eval env func eitherName <- expect (atomType `orType` primType `orType` funcType) f let name' = liftM (either id id) (get (atomType `orType` primType) f) maybe (mapM (eval env) args >>= apply f) (\evalFunc -> evalFunc undefined env args) (name' >>= \name -> lookup name selfRefPrimitives) eval _ badExpr = throwError $ BadExpr "unrecognized form" badExpr apply p@(PrimitiveFunc f) args = maybe (throwError $ BadExpr "unrecognized primitive" p) ($args) $ lookup f primitives apply (Func params varargs body closure) args = if length params /= length args && isNothing varargs then throwError $ NumArgs (length params) args else liftIO (bindVars closure $ zip params args) >>= bindVarArgs varargs >>= \env -> liftM last (mapM (eval env) body) where remainingArgs = drop (length params) args bindVarArgs arg env = maybe (return env) (\argName -> liftIO $ bindVars env [(argName, List remainingArgs)]) arg apply f _ = throwError . Default $ "cannot apply " ++ show f ++ " as function" putFlushLn msg = liftIO $ putStrLn msg >> hFlush stdout
hucal/SCMinHS
Scheme/Eval.hs
Haskell
bsd-3-clause
7,024
module Test.Juggernaut.Api where import Juggernaut.Api main :: IO () main = putStrLn "hello"
markhibberd/juggernaut
tests/Test/Juggernaut/Api.hs
Haskell
bsd-3-clause
95
-- Both these functions should successfully simplify -- using the combine-identical-alternatives optimisation module T7360 where import Data.OldList as L data Foo = Foo1 | Foo2 | Foo3 !Int fun1 :: Foo -> () {-# NOINLINE fun1 #-} fun1 x = case x of Foo1 -> () Foo2 -> () Foo3 {} -> () fun2 x = (fun1 Foo1, -- Keep -ddump-simpl output -- in a predicatable order case x of [] -> L.length x (_:_) -> L.length x)
jstolarek/ghc
testsuite/tests/simplCore/should_compile/T7360.hs
Haskell
bsd-3-clause
515
module Data.TTask.Command.Update ( updateTaskStatus , updateStoryStatus , updateSprintStatus ) where import Control.Lens import Data.TTask.Types ------ -- Update status updateTaskStatus :: Id -> TStatusRecord -> Project -> Project updateTaskStatus i r pj = pj&task i%~ (\t -> t { _taskStatus = r `TStatusCons` _taskStatus t }) updateStoryStatus :: Id -> TStatusRecord -> Project -> Project updateStoryStatus i r pj = pj&story i%~ (\s -> s { _storyStatus = r `TStatusCons` _storyStatus s }) updateSprintStatus :: Id -> TStatusRecord -> Project -> Project updateSprintStatus i r pj = pj&sprint i%~ (\s -> s { _sprintStatus = r `TStatusCons` _sprintStatus s })
tokiwoousaka/ttask
src/Data/TTask/Command/Update.hs
Haskell
bsd-3-clause
686
{-# LANGUAGE BangPatterns #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveFunctor #-} {-# LANGUAGE EmptyDataDecls #-} {-# LANGUAGE ExistentialQuantification #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE NoMonomorphismRestriction #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE UndecidableInstances #-} ----------------------------------------------------------------------------- -- | -- Module : Hadron.Controller -- Copyright : Soostone Inc -- License : BSD3 -- -- Maintainer : Ozgun Ataman -- Stability : experimental -- -- High level flow-control of Hadoop programs with ability to define a -- sequence of Map-Reduce operations in a Monad, have strongly typed -- data locations. ---------------------------------------------------------------------------- module Hadron.Controller ( -- * Hadoop Program Construction Controller , connect , connect' , io , orchIO , nodeIO , setVal , getVal , runOnce , MapReduce (..) , mrOptions , Mapper , Reducer , (>.>) , (<.<) , MRKey (..) , CompositeKey , SingleKey (..) , WrapSerialize (..) , WrapSafeCopy (..) -- * Data Sources , Tap (..) , tapProto, tapLocation , tap , taps , mergeTaps , concatTaps , binaryDirTap , setupBinaryDir , fileListTap , fanOutTap, sinkFanOut, sequentialSinkFanout , readTap , readHdfsFile -- * Command Line Entry Point , hadoopMain , HadoopEnv (..) -- * Settings for MapReduce Jobs , MROptions , mroPart , mroNumMap , mroNumReduce , mroCompress , mroOutSep , mroTaskTimeout , PartitionStrategy (..) , Comparator (..) , RerunStrategy (..) -- * Hadoop Utilities , emitCounter , hsEmitCounter , emitStatus , getFileName -- * MapReduce Combinators , mapReduce , firstBy , mapMR , oneSnap , joinMR , joinStep , JoinType (..) , JoinKey -- * Data Serialization Utilities , module Hadron.Protocol , module Hadron.Run ) where ------------------------------------------------------------------------------- import Control.Applicative import Control.Arrow import Control.Concurrent.Async import Control.Concurrent.Chan import Control.Concurrent.QSem import Control.Error import Control.Exception.Lens import Control.Lens import Control.Monad.Catch import Control.Monad.Operational hiding (view) import qualified Control.Monad.Operational as O import Control.Monad.State import Control.Monad.Trans.Resource import Control.Retry import qualified Crypto.Hash.MD5 as Crypto import qualified Data.ByteString.Base16 as Base16 import qualified Data.ByteString.Char8 as B import Data.ByteString.Search as B import Data.Char import Data.Conduit hiding (connect) import Data.Conduit.Binary (sinkHandle, sourceHandle) import qualified Data.Conduit.List as C import Data.Conduit.Zlib import Data.CSV.Conduit import Data.Default import Data.List import qualified Data.Map.Strict as M import Data.Monoid import Data.SafeCopy import Data.Serialize import Data.String import Data.String.Conv import qualified Data.Text as T import Data.Text.Encoding import Data.Time import Data.Typeable import Network.HostName import System.Environment import System.FilePath.Lens import System.IO import System.Locale import Text.Parsec ------------------------------------------------------------------------------- import Hadron.Basic hiding (mapReduce) import Hadron.Conduit import Hadron.Join import Hadron.Logger import Hadron.Protocol import Hadron.Run import Hadron.Run.Hadoop (mrsInput, mrsJobName, mrsNumReduce, mrsOutput) import Hadron.Types import Hadron.Utils ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- echo :: (Applicative m, MonadIO m, LogItem a) => Severity -> a -> LogStr -> m () echo sev cxt msg = runLog $ logF cxt "Run.Hadoop" sev msg ------------------------------------------------------------------------------- echoInfo :: (Applicative m, MonadIO m, LogItem a) => a -> LogStr -> m () echoInfo = echo InfoS newtype SingleKey a = SingleKey { unKey :: a } deriving (Eq,Show,Read,Ord,Serialize) newtype WrapSerialize a = WrapSerialize { _getSerialized :: a } deriving (Eq,Show,Read,Ord,Serialize) newtype WrapSafeCopy a = WrapSafeCopy { _getSafeCopy :: a } deriving (Eq,Show,Read,Ord) deriveSafeCopy 1 'base ''WrapSafeCopy type Parser = Parsec [B.ByteString] () keyToken :: Parser B.ByteString keyToken = tokenPrim B.unpack (\pos _ _ -> incSourceColumn pos 1) Just fromCompKey :: MRKey a => [B.ByteString] -> Either ParseError a fromCompKey s = runParser keyParser () "Key Input" s class MRKey k where toCompKey :: k -> CompositeKey keyParser :: Parser k numKeys :: k -> Int instance MRKey () where toCompKey _ = [""] keyParser = keyToken >> return () numKeys _ = 1 instance MRKey B.ByteString where toCompKey k = [k] keyParser = keyToken numKeys _ = 1 instance MRKey CompositeKey where toCompKey ks = ks keyParser = many1 keyToken numKeys ks = length ks instance MRKey String where toCompKey = toCompKey . B.pack keyParser = B.unpack <$> keyToken numKeys _ = 1 instance MRKey T.Text where toCompKey = toCompKey . encodeUtf8 keyParser = decodeUtf8 <$> keyToken numKeys _ = 1 instance MRKey Int where toCompKey = toCompKey . B.pack . show keyParser = keyParser >>= maybe (fail "Can't read Int MRKey") return . readMay numKeys _ = 1 instance Serialize a => MRKey (WrapSerialize a) where toCompKey = toCompKey . (^. re pSerialize) . _getSerialized keyParser = do a <- (^? pSerialize) <$> keyParser maybe (fail "Can't decode WrapSerialize") (return . WrapSerialize) a numKeys _ = 1 instance SafeCopy a => MRKey (WrapSafeCopy a) where toCompKey = toCompKey . (^. re pSafeCopy) . _getSafeCopy keyParser = do a <- (^? pSafeCopy) <$> keyParser maybe (fail "Can't decode WrapSerialize") (return . WrapSafeCopy) a numKeys _ = 1 utcFormat :: String utcFormat = "%Y-%m-%d %H:%M:%S.%q" instance MRKey UTCTime where toCompKey = toCompKey . formatTime defaultTimeLocale utcFormat keyParser = do res <- parseTime defaultTimeLocale utcFormat <$> keyParser maybe (fail "Can't parse value as UTCTime") return res numKeys _ = 1 instance (MRKey a, MRKey b) => MRKey (a,b) where toCompKey (a,b) = toCompKey a ++ toCompKey b keyParser = (,) <$> keyParser <*> keyParser numKeys (a,b) = numKeys a + numKeys b instance (MRKey a, MRKey b, MRKey c) => MRKey (a,b,c) where toCompKey (a,b,c) = toCompKey a ++ toCompKey b ++ toCompKey c keyParser = (,,) <$> keyParser <*> keyParser <*> keyParser numKeys (a,b,c) = numKeys a + numKeys b + numKeys c instance (MRKey a, MRKey b, MRKey c, MRKey d) => MRKey (a,b,c,d) where toCompKey (a,b,c,d) = toCompKey a ++ toCompKey b ++ toCompKey c ++ toCompKey d keyParser = (,,,) <$> keyParser <*> keyParser <*> keyParser <*> keyParser numKeys (a,b,c,d) = numKeys a + numKeys b + numKeys c + numKeys d ------------------------------------------------------------------------------- -- | Do something with m-r output before writing it to a tap. (>.>) :: MapReduce a b -> Conduit b (ResourceT IO) c -> MapReduce a c (MapReduce o p m c r) >.> f = MapReduce o p m c r' where r' = case r of Left r'' -> Left $ r'' =$= f Right conv -> Right $ conv =$= f ------------------------------------------------------------------------------- -- | Do something with the m-r input before starting the map stage. (<.<) :: Conduit c (ResourceT IO) a -> MapReduce a b -> MapReduce c b f <.< (MapReduce o p m c r) = MapReduce o p (f =$= m) c r ------------------------------------------------------------------------------- -- | A packaged MapReduce step. Make one of these for each distinct -- map-reduce step in your overall 'Controller' flow. data MapReduce a b = forall k v. MRKey k => MapReduce { _mrOptions :: MROptions -- ^ Hadoop and MapReduce options affecting only this specific -- job. , _mrInPrism :: Prism' B.ByteString v -- ^ A serialization scheme for values between the map-reduce -- steps. , _mrMapper :: Mapper a k v , _mrCombiner :: Maybe (Reducer k v (k,v)) , _mrReducer :: Either (Reducer k v b) (Conduit v (ResourceT IO) b) -- ^ Either a reducer or a final value converter for a map-only -- MapReduce job. } -------------------------------------------------------------------------------- mrOptions :: Lens' (MapReduce a b) MROptions mrOptions f (MapReduce o p m c r) = (\ o' -> MapReduce o' p m c r) <$> f o -- | Tap is a data source/sink definition that *knows* how to serve -- records of type 'a'. -- -- It comes with knowledge on how to decode ByteString to target type -- and can be used both as a sink (to save data form MR output) or -- source (to feed MR programs). -- -- Usually, you just define the various data sources and destinations -- your MapReduce program is going to need: -- -- > customers = 'tap' "s3n://my-bucket/customers" (csvProtocol def) data Tap a = Tap { _tapLocation :: [FilePath] , _tapProto :: Protocol' a } makeLenses ''Tap -- | If two 'location's are the same, we consider two Taps equal. instance Eq (Tap a) where a == b = _tapLocation a == _tapLocation b -- | Construct a 'DataDef' tap :: FilePath -> Protocol' a -> Tap a tap fp p = Tap [fp] p taps :: [FilePath] -> Protocol' a -> Tap a taps fp p = Tap fp p ------------------------------------------------------------------------------- -- | Does given file belong to tap? belongsToTap :: Tap a -> FilePath -> Bool belongsToTap t fn = any (`isInfixOf` fn) stem where stem = map (takeWhile (/= '*')) (t ^. tapLocation) ------------------------------------------------------------------------------- concatTaps :: [Tap a] -> Tap a concatTaps ts = Tap locs newP where locs = concatMap _tapLocation ts newP = Protocol enc dec dec = do fn <- liftIO $ getFileName case find (flip belongsToTap fn) ts of Nothing -> error "Unexpected: Can't determine tap in concatTaps." Just t -> t ^. (tapProto . protoDec) enc = head ts ^. tapProto . protoEnc ------------------------------------------------------------------------------- -- | Given a tap directory, enumerate and load all files inside. -- Caution: This is meant only as a way to load small files, or else -- you'll fill up your memory. readTap :: RunContext -> Tap a -> IO [a] readTap rc t = do fs <- concat <$> forM (_tapLocation t) (hdfsLs rc) let chk fp = not (elem (fp ^. filePath . filename) [".", ".."]) && (fp ^. fileSize) > 0 let fs' = filter chk fs runResourceT $ inp (map _filePath fs') =$= (t ^. tapProto . protoDec) $$ C.consume where policy = capDelay 10000000 $ exponentialBackoff 50000 <> limitRetries 10 pullOne sem chan fp = bracket_ (waitQSem sem) (signalQSem sem) $ recoverAll policy $ const $ do a <- runResourceT $ hdfsCat rc fp $$ C.consume writeChan chan (Just (B.concat a)) inp :: [FilePath] -> Producer (ResourceT IO) B.ByteString inp fs = do sem <- liftIO $ newQSem 10 chan <- liftIO newChan a <- liftIO $ async $ do mapConcurrently (pullOne sem chan) fs writeChan chan Nothing liftIO $ link a sourceChan chan ------------------------------------------------------------------------------- -- | Combine two taps intelligently into the Either sum type. -- -- Matches on the prefix path given as part of each tap. It would -- therefore fail to work properly on self joins where the same data -- location is used in both taps. mergeTaps :: Tap a -> Tap b -> Tap (Either a b) mergeTaps ta tb = Tap (_tapLocation ta ++ _tapLocation tb) newP where newP = Protocol enc dec dec = do fn <- liftIO getFileName if belongsToTap ta fn then (ta ^. tapProto . protoDec) =$= C.map Left else (tb ^. tapProto . protoDec) =$= C.map Right as = ta ^. (tapProto . protoEnc) bs = tb ^. (tapProto . protoEnc) enc = awaitForever $ \ res -> case res of Left a -> yield a =$= as Right b -> yield b =$= bs ------------------------------------------------------------------------------ -- | Conduit that takes in hdfs filenames and outputs the file -- contents. Will unpack .gz files automatically. readHdfsFile :: RunContext -> Conduit B.ByteString (ResourceT IO) (FilePath, B.ByteString) readHdfsFile settings = awaitForever $ \s3Uri -> do let uriStr = B.unpack s3Uri getFile = hdfsLocalStream settings uriStr outStream = if isSuffixOf "gz" uriStr then getFile =$= ungzip else getFile outStream =$= C.map (\ s -> (uriStr, s)) ------------------------------------------------------------------------------ -- | Tap for handling file lists. Hadoop can't process raw binary data -- because it splits on newlines. This tap allows you to get around that -- limitation by instead making your input a list of file paths that contain -- binary data. Then the file names get split by hadoop and each map job -- reads from those files as its first step. fileListTap :: RunContext -> FilePath -- ^ A file containing a list of files to be used as input -> Tap (FilePath, B.ByteString) fileListTap settings loc = tap loc (Protocol enc dec) where enc = error "You should never use a fileListTap as output!" dec = linesConduit =$= readHdfsFile settings ------------------------------------------------------------------------------- -- | Sink objects into multiple output files through concurrent -- file-write processes behind the scenes. Work-around for Hadoop -- Streaming limitations in having to sink output into a single -- provided HDFS path. fanOutTap :: RunContext -> FilePath -- ^ Static location where fanout statistics will be written via -- the regular hadoop output. -> FilePath -- ^ A temporary location where in-progress files can be kept. -> (a -> FilePath) -- ^ Decision dispatch of where each object should go. Make sure -- to provide fully qualified hdfs directory paths; a unique token -- will be appended to each file based on the node producing it. -> Conduit a (ResourceT IO) B.ByteString -- ^ How to serialize each object. Make sure this conduit provides -- for all the typical requirements: One record per line, no -- newlines inside the record, etc. -> FanOutSink -- ^ How to sink the fanout, exposed here for flexibility. -> Tap a fanOutTap rc loc tmp dispatch encoder sink = tap loc (Protocol enc dec) where dec = error "fanOutTap can't be used to read input." enc = do hn <- liftIO mkUniqueHostToken let dispatch' a = dispatch a & basename %~ (<> "_" <> hn) fo <- liftIO $ hdfsFanOut rc tmp register $ liftIO $ fanCloseAll fo sink dispatch' conv fo stats <- liftIO $ fanStats fo (forM_ (M.toList stats) $ \ (fp, cnt) -> yield (map B.pack [fp, (show cnt)])) =$= fromCSV def conv a = liftM mconcat $ C.sourceList [a] =$= encoder $$ C.consume ------------------------------------------------------------------------------- mkUniqueHostToken :: IO String mkUniqueHostToken = do tk <- randomToken 64 (toS . Base16.encode . toS . Crypto.hash . toS . (++ tk)) <$> getHostName newtype AppLabel = AppLabel { unAppLabel :: T.Text } deriving (Eq,Show,Read,Ord) ------------------------------------------------------------------------------- mkAppLabel :: T.Text -> AppLabel mkAppLabel txt | all chk (toS txt) = AppLabel txt | otherwise = error "Application labels can only be lowercase alphanumeric characters" where chk c = all ($ c) [isLower, isAlphaNum, not . isSpace] instance IsString AppLabel where fromString = mkAppLabel . toS data ContState = ContState { _csApp :: AppLabel , _csMRCount :: ! Int -- ^ MR run count; one for each 'connect'. , _csMRVars :: ! (M.Map String B.ByteString) -- ^ Arbitrary key-val store that's communicated to nodes. , _csDynId :: ! Int -- ^ Keeps increasing count of dynamic taps in the order they are -- created in the Controller monad. Needed so we can communicate -- the tap locations to MR nodes. , _csRunOnceId :: ! Int -- ^ Increasing count of run-once cache items so we can -- communicate to remote nodes. , _csShortCircuit :: Bool -- ^ Used by the remote nodes. When they hit their primary target -- (the mapper, combiner or the reducer), they should stop -- executing. } makeLenses ''ContState instance Default ContState where def = ContState (AppLabel "_") 0 M.empty 0 0 False ------------------------------------------------------------------------------- -- | load controller varibles back up in worker nodes loadState :: (MonadState ContState m, MonadIO m) => RunContext -> FilePath -> m () loadState settings runToken = do fn <- hdfsTempFilePath settings runToken tmp <- liftIO $ hdfsGet settings fn (app, st) <- liftIO $ withLocalFile settings tmp $ \ local -> do !st <- readFile local <&> read -- removeFile local return st csMRVars %= M.union st csApp .= app ------------------------------------------------------------------------------- -- | Write state from orchestrator for later load by worker nodes writeState :: (MonadIO m, MonadState ContState m) => RunContext -> FilePath -> m () writeState settings runToken = do remote <- hdfsTempFilePath settings runToken let local = LocalFile runToken st <- use csMRVars app <- use csApp withLocalFile settings local $ \ lfp -> liftIO $ writeFile lfp (show (app, st)) -- put settings file into a file named after the -- randomly generated token. liftIO $ hdfsPut settings local remote ------------------------------------------------------------------------------- data ConI a where Connect :: forall i o. MapReduce i o -> [Tap i] -> Tap o -> Maybe String -> ConI () MakeTap :: Protocol' a -> ConI (Tap a) BinaryDirTap :: FilePath -> (FilePath -> Bool) -> ConI (Tap (FilePath, B.ByteString)) ConIO :: IO a -> ConI a -- ^ General IO action; both orchestrator and nodes perform the action OrchIO :: IO a -> ConI () -- ^ Only the orchestrator performs action NodeIO :: IO a -> ConI a -- ^ Only the nodes perform action SetVal :: String -> B.ByteString -> ConI () GetVal :: String -> ConI (Maybe B.ByteString) RunOnce :: Serialize a => IO a -> ConI a -- ^ Only run on orchestrator, then make available to all the -- nodes via HDFS. -- | All MapReduce steps are integrated in the 'Controller' monad. -- -- Warning: We do have an 'io' combinator as an escape valve for you -- to use. However, you need to be careful how you use the result of -- an IO computation. Remember that the same 'main' function will run -- on both the main orchestrator process and on each and every -- map/reduce node. newtype Controller a = Controller (Program ConI a) deriving (Functor, Applicative, Monad) ------------------------------------------------------------------------------- -- | Connect a MapReduce program to a set of inputs, returning the -- output tap that was implicity generated (on hdfs) in the process. connect' :: MapReduce a b -- ^ MapReduce step to run -> [Tap a] -- ^ Input files -> Protocol' b -- ^ Serialization protocol to be used on the output -> Maybe String -- ^ A custom name for the job -> Controller (Tap b) connect' mr inp p nm = do out <- makeTap p connect mr inp out nm return out ------------------------------------------------------------------------------- -- | Connect a typed MapReduce program you supply with a list of -- sources and a destination. connect :: MapReduce a b -> [Tap a] -> Tap b -> Maybe String -> Controller () connect mr inp outp nm = Controller $ singleton $ Connect mr inp outp nm ------------------------------------------------------------------------------- makeTap :: Protocol' a -> Controller (Tap a) makeTap p = Controller $ singleton $ MakeTap p ------------------------------------------------------------------------------- -- | Set a persistent variable in Controller state. This variable will -- be set during main M-R job controller loop and communicated to all -- the map and reduce nodes and will be available there. setVal :: String -> B.ByteString -> Controller () setVal k v = Controller $ singleton $ SetVal k v ------------------------------------------------------------------------------- -- | Get varible from Controller state getVal :: String -> Controller (Maybe B.ByteString) getVal k = Controller $ singleton $ GetVal k ------------------------------------------------------------------------------- -- | Creates a tap for a directory of binary files. binaryDirTap :: FilePath -- ^ A root location to list files under -> (FilePath -> Bool) -- ^ A filter condition to refine the listing -> Controller (Tap (FilePath, B.ByteString)) binaryDirTap loc filt = Controller $ singleton $ BinaryDirTap loc filt ------------------------------------------------------------------------------- -- | Perform an IO operation both on the orchestrator and on the worker nodes. io :: IO a -> Controller a io f = Controller $ singleton $ ConIO f ------------------------------------------------------------------------------- -- | Perform an IO operation only on the orchestrator orchIO :: IO a -> Controller () orchIO = Controller . singleton . OrchIO -- | Perform an IO action in orchestrator to obtain value, then cache it on HDFS and -- magically make it available to nodes during their runtime. runOnce :: Serialize a => IO a -> Controller a runOnce = Controller . singleton . RunOnce ------------------------------------------------------------------------------- -- | Perform an IO operation only on the worker nodes. nodeIO :: IO a -> Controller a nodeIO = Controller . singleton . NodeIO ------------------------------------------------------------------------------- newMRKey :: MonadState ContState m => m String newMRKey = do i <- gets _csMRCount csMRCount %= (+1) return $! show i ------------------------------------------------------------------------------- -- | Grab list of files in destination, write into a file, put file on -- HDFS so it is shared and return the (local, hdfs) paths. setupBinaryDir :: RunContext -> FilePath -> (FilePath -> Bool) -> IO (LocalFile, FilePath) setupBinaryDir settings loc chk = do localFile <- randomLocalFile hdfsFile <- randomRemoteFile settings files <- hdfsLs settings loc <&> map _filePath let files' = filter chk files withLocalFile settings localFile $ \ f -> writeFile f (unlines files') hdfsPut settings localFile hdfsFile return (localFile, hdfsFile) tapLens :: Int -> Lens' ContState (Maybe B.ByteString) tapLens curId = csMRVars.at ("tap_" <> show curId) runCacheLens :: Int -> Lens' ContState (Maybe B.ByteString) runCacheLens curId = csMRVars.at ("runOnce_" <> show curId) pickTapId :: MonadState ContState m => m Int pickTapId = pickIdWith csDynId pickRunCacheId :: MonadState ContState m => m Int pickRunCacheId = pickIdWith csRunOnceId ------------------------------------------------------------------------------- -- | Monotinically increasing counter. pickIdWith :: MonadState ContState m => Lens' ContState Int -> m Int pickIdWith l = do curId <- use l l %= (+1) return curId ------------------------------------------------------------------------------- -- | Interpreter for the central job control process orchestrate :: (MonadMask m, MonadIO m, Applicative m) => Controller a -> RunContext -> RerunStrategy -> ContState -> m (Either String a) orchestrate (Controller p) settings rr s = do bracket (liftIO $ openFile "hadron.log" AppendMode) (liftIO . hClose) (\ h -> do echoInfo () "Initiating orchestration..." evalStateT (runEitherT (go p)) s) where go = eval . O.view eval (Return a) = return a eval (i :>>= f) = eval' i >>= go . f eval' :: (MonadIO m) => ConI a -> EitherT String (StateT ContState m) a eval' (ConIO f) = liftIO f eval' (OrchIO f) = void $ liftIO f eval' (NodeIO _) = return (error "NodeIO can't be used in the orchestrator decision path") -- evaluate the function, write its result into HDFS for later retrieval eval' (RunOnce f) = do a <- liftIO f curId <- pickRunCacheId runCacheLens curId .= Just (encode a) -- loc <- liftIO $ randomRemoteFile settings -- curId <- pickRunCacheId -- runCacheLens curId .= Just (B.pack loc) -- tmp <- randomFileName -- liftIO $ withLocalFile settings tmp $ \ fn -> -- B.writeFile fn (encode a) -- liftIO $ hdfsPut settings tmp loc return a eval' (MakeTap tp) = do loc <- liftIO $ randomRemoteFile settings curId <- pickTapId tapLens curId .= Just (B.pack loc) return $ Tap [loc] tp eval' (BinaryDirTap loc filt) = do (_, hdfsFile) <- liftIO $ setupBinaryDir settings loc filt -- remember location of the file from the original loc -- string curId <- pickTapId tapLens curId .= Just (B.pack hdfsFile) return $ fileListTap settings hdfsFile eval' (SetVal k v) = csMRVars . at k .= Just v eval' (GetVal k) = use (csMRVars . at k) eval' (Connect (MapReduce mro _ _ _ rd) inp outp nm) = go' where go' = do mrKey <- newMRKey let info = sl "Key" mrKey <> sl "Name" nm echoInfo info "Launching MR job" chk <- liftIO $ mapM (hdfsFileExists settings) (_tapLocation outp) case any id chk of False -> do echoInfo info "Destination file does not exist. Proceeding." go'' mrKey True -> case rr of RSFail -> echo ErrorS info $ ls $ "Destination exists: " <> head (_tapLocation outp) RSSkip -> echoInfo info $ "Destination exists. Skipping " <> ls (intercalate ", " (_tapLocation outp)) RSReRun -> do echoInfo info $ ls $ "Destination file exists, will delete and rerun: " <> head (_tapLocation outp) _ <- liftIO $ mapM_ (hdfsDeletePath settings) (_tapLocation outp) go'' mrKey echoInfo info "MR job complete" go'' mrKey = do -- serialize current state to HDFS, to be read by -- individual mappers reducers of this step. runToken <- liftIO $ randomToken 64 writeState settings runToken let mrs = mrOptsToRunOpts mro launchMapReduce settings mrKey runToken $ mrs & mrsInput .~ concatMap _tapLocation inp & mrsOutput .~ head (_tapLocation outp) & mrsJobName .~ nm & (if onlyMap then mrsNumReduce .~ Just 0 else id) onlyMap = case rd of Left{} -> False Right{} -> True data Phase = Map | Combine | Reduce ------------------------------------------------------------------------------- -- | What to do when we notice that a destination file already exists. data RerunStrategy = RSFail -- ^ Fail and log the problem. | RSReRun -- ^ Delete the file and rerun the analysis | RSSkip -- ^ Consider the analaysis already done and skip. deriving (Eq,Show,Read,Ord) instance Default RerunStrategy where def = RSFail ------------------------------------------------------------------------------- -- | Decode key produced by the Map stage. Errors are simply raised as -- key marshalling errors are unacceptable. decodeKey :: MRKey k => (CompositeKey, v) -> (k, v) decodeKey (k,v) = (k', v) where k' = either mkErr id $ fromCompKey k mkErr e = error ("Stage could not decode Map's output: " ++ show e) encodeKey :: MRKey k => (k, v) -> (CompositeKey, v) encodeKey = first toCompKey data NodeError = NodeRunComplete -- ^ Single short circuiting in node workers; map/reduce/combine -- has been completed. deriving (Eq,Show,Read,Ord,Typeable) makePrisms ''NodeError instance Exception NodeError class AsNodeError t where _NodeError :: Prism' t NodeError instance AsNodeError NodeError where _NodeError = id instance AsNodeError SomeException where _NodeError = exception ------------------------------------------------------------------------------- -- | The main entry point. Use this function to produce a command line -- program that encapsulates everything. -- -- When run without arguments, the program will orchestrate the entire -- MapReduce job flow. The same program also doubles as the actual -- mapper/reducer executable when called with right arguments, though -- you don't have to worry about that. hadoopMain :: ( MonadThrow m, MonadMask m , MonadIO m, Functor m, Applicative m ) => [(AppLabel, Controller ())] -- ^ Hadoop streaming applications that can be run. First element -- of tuple is used to lookup the right application to run from -- the command line. -> RunContext -- ^ Hadoop environment info. -> RerunStrategy -- ^ What to do if destination files already exist. -> m () hadoopMain conts settings rr = do args <- liftIO getArgs case args of [nm] -> do let nm' = mkAppLabel (toS nm) case lookup nm' conts of Nothing -> error (show nm <> " is not a known MapReduce application") Just cont -> do res <- orchestrate cont settings rr (def { _csApp = nm' }) echoInfo () ("Completed MR application " <> ls nm) [runToken, arg] -> workNode settings conts runToken arg _ -> error "You must provide the name of the MR application to initiate orchestration." ------------------------------------------------------------------------------- mkArgs :: IsString [a] => [a] -> [(Phase, [a])] mkArgs mrKey = [ (Map, "mapper_" ++ mrKey) , (Reduce, "reducer_" ++ mrKey) , (Combine, "combiner_" ++ mrKey) ] ------------------------------------------------------------------------------- -- | Interpret the Controller in the context of a Hadoop worker node. -- In this mode, the objective is to find the mapper, combiner or the -- reducer that we are supposed to be executing as. workNode :: forall m a. (MonadIO m, MonadThrow m, MonadMask m, Functor m) => RunContext -> [(AppLabel, Controller ())] -> String -> String -> m () workNode settings conts runToken arg = do handling (exception._NodeRunComplete) (const $ return ()) $ do void $ flip evalStateT def $ do loadState settings runToken l <- use csApp case lookup l conts of Nothing -> error ("App not found in worker node: " <> show l) Just (Controller p) -> interpretWithMonad go' p where -- A short-circuiting wrapper for go. We hijack the exception -- system to implement shortcircuting here. It may be a better -- idea to use ContT. go' :: ConI b -> StateT ContState m b go' c = do chk <- use csShortCircuit case chk of True -> throwM NodeRunComplete False -> go c go :: ConI b -> StateT ContState m b go (ConIO f) = liftIO f go (OrchIO _) = return () go (NodeIO f) = liftIO f go (MakeTap lp) = do curId <- pickTapId dynLoc <- use $ tapLens curId case dynLoc of Nothing -> error $ "Dynamic location can't be determined for MakTap at index " <> show curId Just loc' -> return $ Tap ([B.unpack loc']) lp go (BinaryDirTap loc _) = do -- remember location of the file from the original loc -- string curId <- pickTapId dynLoc <- use $ tapLens curId case dynLoc of Nothing -> error $ "Dynamic location can't be determined for BinaryDirTap at: " <> loc Just loc' -> return $ fileListTap settings $ B.unpack loc' -- setting in map-reduce phase is a no-op... There's nobody to -- communicate it to. go (SetVal _ _) = return () go (GetVal k) = use (csMRVars . at k) go (RunOnce _) = do curId <- pickRunCacheId bs <- use (runCacheLens curId) either error return $ note "RunOnce cache missing on remote node" bs >>= decode go (Connect (MapReduce mro mrInPrism mp comb rd) inp outp nm) = do mrKey <- newMRKey let dec = do fn <- getFileName let t = find (flip belongsToTap fn) inp return $ case t of Nothing -> head inp ^. tapProto . protoDec Just t' -> t' ^. tapProto . protoDec let enc = outp ^. tapProto . protoEnc mp' = case rd of Left _ -> mapRegular Right conv -> do setLineBuffering dec' <- liftIO $ dec runResourceT $ sourceHandle stdin =$= dec' =$= mp =$= C.map snd =$= conv =$= enc $$ sinkHandle stdout mapRegular = do dec' <- liftIO dec mapperWith mrInPrism (dec' =$= mp =$= C.map encodeKey) red = case rd of Right _ -> error "Unexpected: Reducer called for a map-only job." Left f -> do setLineBuffering runResourceT $ reducer mro mrInPrism (C.map decodeKey =$= f) =$= enc $$ sinkHandle stdout comb' = case comb of Nothing -> error "Unexpected: No combiner supplied." Just c -> combiner mro mrInPrism (C.map decodeKey =$= c =$= C.map encodeKey) -- error message maker for caught exceptions mkErr :: Maybe FilePath -> String -> SomeException -> b mkErr file stage e = error $ "Exception raised during " <> stage <> " in MR Job #" <> mrKey <> maybe "" (\nm' -> " (" <> nm' <> ") ") nm <> maybe "" (" while processing file " <>) file <> ": " <> show e case find ((== arg) . snd) $ mkArgs mrKey of Just (Map, _) -> do liftIO $ do curFile <- getFileName catching exception mp' (mkErr (Just curFile) "mapper") csShortCircuit .= True Just (Reduce, _) -> do liftIO $ catching exception red (mkErr Nothing "reducer") csShortCircuit .= True Just (Combine, _) -> do liftIO $ catching exception comb' (mkErr Nothing "combiner") csShortCircuit .= True Nothing -> return () -- -- | TODO: See if this works. Objective is to increase type safety of -- -- join inputs. Notice how we have an existential on a. -- -- -- -- A join definition that ultimately produces objects of type b. -- data JoinDef b = forall a. JoinDef { -- joinTap :: Tap a -- , joinType :: JoinType -- , joinMap :: Conduit a IO (JoinKey, b) -- } ------------------------------------------------------------------------------- -- | A convenient way to express map-sde multi-way join operations -- into a single data type. All you need to supply is the map -- operation for each tap, the reduce step is assumed to be the -- Monoidal 'mconcat'. -- -- 'joinMR' is probably easier to use if you can get by with an inner -- join. joinStep :: forall k b a. (Show b, Monoid b, Serialize b, MRKey k) => [(Tap a, JoinType, Mapper a k b)] -- ^ Dataset definitions and how to map each dataset. -> MapReduce a b joinStep fs = MapReduce mro pSerialize mp Nothing (Left rd) where showBS = B.pack . show n = numKeys (undefined :: k) mro = joinOpts { _mroPart = Partition (n+1) n } locations :: [FilePath] locations = concatMap (view (_1 . tapLocation)) fs taps' :: [Tap a] taps' = concatMap ((\t -> replicate (length (_tapLocation t)) t) . view _1) fs locations' = map B.pack locations dataSets :: [(FilePath, DataSet)] dataSets = map (\ (loc, i) -> (loc, DataSet (showBS i))) $ zip locations ([0..] :: [Int]) dsIx :: M.Map FilePath DataSet dsIx = M.fromList dataSets tapIx :: M.Map DataSet (Tap a) tapIx = M.fromList $ zip (map snd dataSets) taps' getTapDS :: Tap a -> [DataSet] getTapDS t = mapMaybe (flip M.lookup dsIx) (_tapLocation t) fs' :: [(DataSet, JoinType)] fs' = concatMap (\ (t, jt, _) -> for (getTapDS t) $ \ ds -> (ds, jt) ) fs for = flip map -- | get dataset name from a given input filename getDS nm = fromMaybe (error "Can't identify current tap from filename.") $ do let nm' = B.pack nm curLoc <- find (\l -> length (B.indices l nm') > 0) locations' M.lookup (B.unpack curLoc) dsIx -- | get the conduit for given dataset name mkMap' ds = fromMaybe (error "Can't identify current tap in IX.") $ do t <- M.lookup ds tapIx cond <- find ((== t) . view _1) fs return $ (cond ^. _3) =$= C.map (_1 %~ toCompKey) mp = joinMapper getDS mkMap' rd = joinReducer fs' mapReduce = undefined -- ------------------------------------------------------------------------------- -- -- | A generic map-reduce function that should be good enough for most -- -- cases. -- mapReduce -- :: forall a k v b. (MRKey k, Serialize v) -- => (a -> MaybeT IO [(k, v)]) -- -- ^ Common map key -- -> (k -> b -> v -> IO b) -- -- ^ Left fold in reduce stage -- -> b -- -- ^ A starting point for fold -- -> MapReduce a (k,b) -- mapReduce mp rd a0 = MapReduce mro pSerialize m Nothing r -- where -- n = numKeys (undefined :: k) -- mro = def { _mroPart = Partition n n } -- m :: Mapper a k v -- m = awaitForever $ \ a -> runMaybeT $ hoist (lift . lift) (mp a) >>= lift . C.sourceList -- r :: Reducer k v (k,b) -- r = do -- (k, b) <- C.foldM step (Nothing, a0) -- case k of -- Nothing -> return () -- Just k' -> yield (k', b) -- step (_, acc) (k, v) = do -- !b <- liftIO $ rd k acc v -- return (Just k, b) firstBy = undefined -- ------------------------------------------------------------------------------- -- -- | Deduplicate input objects that have the same key value; the first -- -- object seen for each key will be kept. -- firstBy -- :: forall a k. (Serialize a, MRKey k) -- => (a -> MaybeT IO [k]) -- -- ^ Key making function -- -> MapReduce a a -- firstBy f = mapReduce mp rd Nothing >.> (C.map snd =$= C.catMaybes) -- where -- mp :: a -> MaybeT IO [(k, a)] -- mp a = do -- k <- f a -- return $ zip k (repeat a) -- rd :: k -> Maybe a -> a -> IO (Maybe a) -- rd _ Nothing a = return $! Just a -- rd _ acc _ = return $! acc mapMR = undefined -- ------------------------------------------------------------------------------- -- -- | A generic map-only MR step. -- mapMR :: (Serialize b) => (v -> IO [b]) -> MapReduce v b -- mapMR f = MapReduce def pSerialize mp Nothing rd -- where -- mp = do -- rng <- liftIO mkRNG -- awaitForever $ \ a -> do -- t <- liftIO $ randomToken 2 rng -- res <- liftIO $ f a -- mapM_ (\x -> yield (t, x)) res -- rd = C.map snd oneSnap = undefined -- ------------------------------------------------------------------------------- -- -- | Do somthing with only the first row we see, putting the result in -- -- the given HDFS destination. -- oneSnap -- :: RunContext -- -> FilePath -- -> (a -> B.ByteString) -- -> Conduit a IO a -- oneSnap settings s3fp f = do -- h <- await -- case h of -- Nothing -> return () -- Just h' -> do -- liftIO $ putHeaders (f h') -- yield h' -- awaitForever yield -- where -- putHeaders x = do -- tmp <- randomFileName -- withLocalFile settings tmp $ \ fn -> B.writeFile fn x -- chk <- hdfsFileExists settings s3fp -- when (not chk) $ void $ hdfsPut settings tmp s3fp -- withLocalFile settings tmp removeFile ------------------------------------------------------------------------------- -- | Monoidal inner (map-side) join for two types. Each type is mapped -- into the common monoid, which is then collapsed during reduce. -- -- Make sure an incoming 'Left' stays 'Left' and a 'Right' stays a -- 'Right'. -- -- TODO: Wrap around this with a better API so the user doesn't have -- to care. joinMR :: forall a b k v. (MRKey k, Monoid v, Serialize v) => Mapper (Either a b) k (Either v v) -- ^ Mapper for the input -> MapReduce (Either a b) v joinMR mp = MapReduce mro pSerialize mp' Nothing (Left red) where mro = def { _mroPart = Partition (n+1) n } n = numKeys (undefined :: k) -- add to key so we know for sure all Lefts arrive before -- Rights. mp' :: Mapper (Either a b) CompositeKey (Either v v) mp' = mp =$= C.map modMap modMap (k, Left v) = (toCompKey k ++ ["1"], Left v) modMap (k, Right v) = (toCompKey k ++ ["2"], Right v) -- cache lefts, start emitting upon seeing the first right. red = go [] where go ls = do inc <- await case inc of Nothing -> return () Just (_, Left r) -> go $! (r:ls) Just (_, Right b) -> do mapM_ yield [mappend a b | a <- ls] go ls
juanpaucar/hadron
src/Hadron/Controller.hs
Haskell
bsd-3-clause
44,398
module Tests ( levelTest , bassVolumeTest , bassTimingTest ) where import Euterpea import Elements -- | A short piece of music for testing that each of the instruments is set up -- correctly, and that the levels are adjusted. Each instrument is played -- indivudally for a few bars, and then several repeats of playing them all at -- once. levelTest :: Music Pitch levelTest = line parts :+: timesM 4 (chord parts) where parts = [coilTest, bassTest, bellTest, padTest, drumTest] coilTest = timesM 2 $ onCoilLong (line coilNotes) :+: onCoilLong (timesM 4 $ chord coilNotes) :+: onCoilShort (timesM 4 $ tempo (4/1) $ line coilNotes) coilNotes = map (note qn) [(B,4),(E,5),(Fs,5),(Gs,5)] bassTest = bassMF $ bassBit :+: transpose 7 bassBit bassBit = line (concatMap (replicate 2) $ openStrings qn) :+: chord [rest wn, strum en] bellTest = onBells $ line $ zipWith note (cycle [qn, qn, qn, dhn]) [ (E,5), (Gs,5), (Fs,5), (B,4) , (E,5), (Fs,5), (Gs,5), (E,5) , (Gs,5), (E,5), (Fs,5), (B,4) , (B,4), (Fs,5), (Gs,5), (E,5) ] drumTest = line $ map (\v -> phrase [Dyn $ StdLoudness v] drumBit) [MP, FF] drumBit = onDrums $ timesM 2 (line (map (perc AcousticSnare) [qn, qn, en, en, en]) :+: rest en) :+: timesM 3 (perc RideCymbal2 sn :+: perc RideCymbal2 sn :+: perc CrashCymbal2 en) :+: perc CrashCymbal2 en :+: perc CrashCymbal2 en padTest = onPad $ timesM 2 $ lowLine :+: highLine lowLine = line $ map (note hn) [(D,4), (B,4), (B,5), (Fs,5)] highLine = line $ map (note qn) [(D,5), (Gs,5), (Cs,6), (E,6)] -- | This test piece plays three arpeggiated chords at each of the 20 different -- loudness levels the MechBass can produce. It was used to learn the relative -- volumes of each setting. -- -- Before each set of chords, the loudness level being tested is counted off by -- playing notes on the G string. This is so that one can tell what is going on -- when listening to an audio recording of the test. -- -- See 'bassFF', 'bassMF', 'bassP', and 'bassPP' in "Elements" for the settings -- used in the peice. bassVolumeTest :: Music Pitch bassVolumeTest = line $ map atVolume volumes where atVolume v = phrase [Dyn $ Loudness 95] (countOff v) :+: rest qn :+: phrase [Dyn $ Loudness $ toVel v] arppeg :+: rest qn volumes = [20,19..1] toVel = fromIntegral . (ceiling :: Double -> Int) . (/20) . (*128) . fromIntegral countOff v = onBassGString $ line $ concat $ zipWith (:) (replicate v $ note sn (G,4)) (cycle [[], [], [rest sn]]) arppeg = line $ map (\n -> transpose n $ strum en) [7, 5, 0] strum :: Dur -> Music Pitch strum = phrase [Art $ Legato 3.5 ] . line . openStrings openStrings :: Dur -> [Music Pitch] openStrings d = [ onBassGString $ note d (G,3) , onBassDString $ note d (D,3) , onBassAString $ note d (A,2) , onBassEString $ note d (E,2) ] -- | A piece of music that tests if the fret shifter timing is correct. -- -- For each pair of starting and ending fret position, the shifter on the G -- string is pre-positioned to the starting position, then played at the ending -- position. At the same time, the shifter on the D string is simply pre- -- positioned and played at the ending fret position, and thus doesn't need to -- move before playing. -- -- If the allocator (see "MechBassAllocator") dosn't allow enough time for the -- shifter motion from starting to ending, the not on the G string will play -- late relative to the D string. If it allows enough (or too much) time, the -- notes will sound together. -- -- See "FindSkews.hs" for a program that can use an audio recoding of this test -- to compute adjustments to the shifter timing in 'MechBass.shifterTimes'. bassTimingTest :: Music Pitch bassTimingTest = line $ map timingTo [0..12] where timingTo t = rest hn :+: line (map (timingToFrom t) [0..12]) timingToFrom t f = onBassGString (fretNote qn (G,3) f 1 :+: fretNote qn (G,3) t 70) :=: onBassDString (fretNote qn (D,3) t 1 :+: fretNote qn (D,3) t 70) fretNote d p t v = phrase [Dyn $ Loudness v] $ note d (trans t p)
mzero/PlainChanges2
src/Tests.hs
Haskell
bsd-3-clause
4,269
{-| This module provides a function for computing the topological sort of a directed acyclic graph. -} module TopologicalSort ( topologicalSort ) where import Prelude hiding (replicate) import Data.Vector.Unboxed.Mutable import Control.Monad.RWS import Control.Monad import Control.Lens import Control.Lens.TH import Control.Monad.ST import GraphUtils import VectorUtils -- | The state threaded through the topological sort algorithm. Simply -- tracks visited vertices. data TSState s = TSState { _visitedVertices :: STVector s Bool, _topologicalSorting :: [Vertex] } makeLenses ''TSState -- | Monad for the topological sort. type TopSort s = RWST Graph () (TSState s) (ST s) -- | Runs a computation in the topological sort monad. runTopSort :: Graph -> (forall s . TopSort s a) -> [Vertex] runTopSort graph tssAction = runST $ do initialVisitedVertices <- replicate (order graph) False (finalState,_) <- execRWST tssAction graph (TSState initialVisitedVertices []) return $ finalState ^. topologicalSorting -- | Computes a topological sort of a directed acyclic graph. -- Uses a DFS, runs in O(|V|+|E|). topologicalSort :: Graph -> [Vertex] topologicalSort graph = runTopSort graph topologicalSort' -- | Computes a topological sort in the topological sort monad. -- Simply the main loop of a DFS. topologicalSort' :: TopSort s () topologicalSort' = do graph <- ask forM_ (vertices graph) $ \vertex -> do visit vertex -- | Visits a vertex, visits its successors, then adds itself -- to the head of the topological sort, which puts all vertices -- in their post ordering, therefore yielding a topological sort. visit :: Vertex -> TopSort s () visit vertex = do isVisited <- readL visitedVertices vertex when (not isVisited) $ do writeL visitedVertices vertex True graph <- ask forM_ (successors graph vertex) $ \successor -> do visit successor topologicalSorting %= (vertex:)
alexisVallet/ag44-graph-algorithms
TopologicalSort.hs
Haskell
bsd-3-clause
1,957
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE RecordWildCards #-} module AuthAPI ( Username , Password , Token , Secret , Storage , LoginArgs(..) , AuthAPI , serveAuth ) where import Control.Monad (when) import Control.Monad.IO.Class (liftIO) import Control.Monad.Trans.Except (ExceptT) import Data.Aeson import Data.Aeson.Types (Options, fieldLabelModifier) import Data.Char (isUpper, toLower) import Data.IORef (IORef, readIORef, modifyIORef) import Data.Map (Map) import GHC.Generics import Servant (Server, (:>), (:<|>)(..), Get, Post, ReqBody, Capture) import Servant (throwError) import Servant.API.ContentTypes (JSON) import Servant.API.Experimental.Auth (AuthProtect) import Servant.Server (ServantErr) import Servant.Server (err403, err404) import Servant.Server.Experimental.Auth.HMAC import System.Random import qualified Data.Map as Map type Username = String type Password = String type Token = String type Secret = String type Storage = IORef (Map Username Token) type instance AuthHmacAccount = Username type instance AuthHmacToken = Token data LoginArgs = LoginArgs { laUsername :: String , laPassword :: String } deriving Generic instance FromJSON LoginArgs where parseJSON = genericParseJSON dropPrefixOptions instance ToJSON LoginArgs where toJSON = genericToJSON dropPrefixOptions dropPrefix :: String -> String dropPrefix "" = "" dropPrefix (c:t) | isUpper c = toLower c : t | otherwise = dropPrefix t dropPrefixOptions :: Options dropPrefixOptions = defaultOptions { fieldLabelModifier = dropPrefix } type AuthAPI = "login" :> ReqBody '[JSON] LoginArgs :> Post '[JSON] String :<|> "secret" :> Capture "username" Username :> AuthProtect "hmac-auth" :> Get '[JSON] String users :: [(Username, (Password, Secret))] users = [ ("mr_foo", ("password1", "War is Peace")) , ("mr_bar", ("letmein" , "Freedom is Slavery")) , ("mr_baz", ("baseball" , "Ignorance is Strength")) ] serveAuth :: Storage -> Server AuthAPI serveAuth storage = serveLogin :<|> serveSecret where serveLogin (LoginArgs {..}) = serve' where serve' = case isValidUser of True -> liftIO $ getToken False -> throwError err403 isValidUser = maybe False (\(password', _) -> password' == laPassword) (lookup laUsername users) getToken :: IO String getToken = (maybe mkToken return) =<< ((Map.lookup laUsername) <$> (readIORef storage)) mkToken :: IO String mkToken = do token <- (take 16 . randomRs ('A', 'Z')) <$> getStdGen modifyIORef storage (Map.insert laUsername token) return token serveSecret :: Username -> (Username, Token) -> ExceptT ServantErr IO String serveSecret username' (username'', _) = do when (username' /= username'') $ throwError err403 -- User can request only his own secret maybe (throwError err404) (\(_, secret') -> return secret') (lookup username' users)
zohl/servant-auth-hmac
example/server/AuthAPI.hs
Haskell
bsd-3-clause
3,178
-- | UPDATE operations on HD wallets module Cardano.Wallet.Kernel.DB.HdWallet.Update ( updateHdRoot , updateHdRootPassword , updateHdAccountName ) where import Universum import Cardano.Wallet.Kernel.DB.HdWallet import Cardano.Wallet.Kernel.DB.Util.AcidState import UTxO.Util (modifyAndGetNew) {------------------------------------------------------------------------------- UPDATE -------------------------------------------------------------------------------} -- | Updates in one gulp the Hd Wallet name and assurance level. updateHdRoot :: HdRootId -> AssuranceLevel -> WalletName -> Update' UnknownHdRoot HdWallets HdRoot updateHdRoot rootId assurance name = zoomHdRootId identity rootId $ do modifyAndGetNew $ set hdRootAssurance assurance . set hdRootName name updateHdRootPassword :: HdRootId -> HasSpendingPassword -> Update' UnknownHdRoot HdWallets HdRoot updateHdRootPassword rootId hasSpendingPassword = zoomHdRootId identity rootId $ do modifyAndGetNew $ hdRootHasPassword .~ hasSpendingPassword updateHdAccountName :: HdAccountId -> AccountName -> Update' UnknownHdAccount HdWallets HdAccount updateHdAccountName accId name = do zoomHdAccountId identity accId $ do modifyAndGetNew $ hdAccountName .~ name
input-output-hk/pos-haskell-prototype
wallet/src/Cardano/Wallet/Kernel/DB/HdWallet/Update.hs
Haskell
mit
1,433
import Control.Arrow import Distribution.PackageDescription import Distribution.Simple hiding (Module) import Distribution.Simple.LocalBuildInfo import Language.Preprocessor.Cpphs import System.FilePath import Text.XkbCommon.ParseDefines import Module import Utils sourceLoc :: FilePath sourceLoc = "./" main :: IO () main = defaultMainWithHooks simpleUserHooks { buildHook = \p l h f -> generateSource sourceLoc >> buildHook simpleUserHooks p l h f , haddockHook = \p l h f -> generateSource sourceLoc >> haddockHook simpleUserHooks p l h f , sDistHook = \p ml h f -> case ml of Nothing -> fail "No local buildinfo available. configure first" Just l -> do generateSource sourceLoc sDistHook simpleUserHooks p ml h f } generateSource :: FilePath -> IO () generateSource fp = do parsedDefs <- getKeysymDefs saveModule fp (keysymsModule parsedDefs) return () keysymsModule :: [(String,Integer)] -> Module keysymsModule defs = Module "Text.XkbCommon.KeysymPatterns" [] $ Import ["Text.XkbCommon.InternalTypes"] : map (\(name,val) -> Pattern ("Keysym_" ++ name) Nothing ("= Keysym " ++ show val)) defs
tulcod/haskell-xkbcommon
Setup.hs
Haskell
mit
1,350
{-# LANGUAGE ScopedTypeVariables, FlexibleContexts, TypeFamilies, TypeSynonymInstances, FlexibleInstances, GADTs, RankNTypes, UndecidableInstances, TypeOperators #-} -- | This module provides types and functions for creating and manipulating -- control signals (ready and ack) associated with protocols. The 'Ack' signal -- indicates that a protocol element has received data from an upstream source, -- and the 'Ready' signal indicates that the component is prepared to accept -- data from an upstream source. module Language.KansasLava.Protocols.Types where import Language.KansasLava.Rep import Language.KansasLava.Signal import Language.KansasLava.Types import Language.KansasLava.Utils import Control.Monad -- It is preferable to be sending a message that expects an Ack, -- but to recieve a message based on your Ready signal. ------------------------------------------------------------------------------------ -- | An Ack is always in response to an incoming packet or message. newtype Ack = Ack { unAck :: Bool } deriving (Eq,Ord) instance Show Ack where show (Ack True) = "A" show (Ack False) = "~" -- TODO: use $(repSynonym ''Ack ''Bool) instance Rep Ack where data X Ack = XAckRep { unXAckRep :: X Bool } type W Ack = W Bool -- The template for using representations unX = liftM Ack . unX . unXAckRep optX = XAckRep . optX . liftM unAck toRep = toRep . unXAckRep fromRep = XAckRep . fromRep repType Witness = repType (Witness :: Witness Bool) showRep = showRepDefault -- | Convert a 'Bool' signal to an 'Ack' signal. toAck :: (sig ~ Signal clk) => sig Bool -> sig Ack toAck = coerce Ack -- | Convert an 'Ack' to a 'Bool' signal. fromAck :: (sig ~ Signal clk) => sig Ack -> sig Bool fromAck = coerce unAck ------------------------------------------------------------------------------------ -- | An Ready is always in response to an incoming packet or message newtype Ready = Ready { unReady :: Bool } deriving (Eq,Ord) instance Show Ready where show (Ready True) = "R" show (Ready False) = "~" instance Rep Ready where data X Ready = XReadyRep { unXReadyRep :: X Bool } type W Ready = W Bool -- The template for using representations unX = liftM Ready . unX . unXReadyRep optX = XReadyRep . optX . liftM unReady toRep = toRep . unXReadyRep fromRep = XReadyRep . fromRep repType Witness = repType (Witness :: Witness Bool) showRep = showRepDefault -- | Convert a Bool signal to a 'Ready' signal. toReady :: (sig ~ Signal clk) => sig Bool -> sig Ready toReady = coerce Ready -- | Convert a 'Ready' signal to a Bool signal. fromReady :: (sig ~ Signal clk) => sig Ready -> sig Bool fromReady = coerce unReady ------------------------------------------------------------------------------------------------------------------------------------------------
andygill/kansas-lava
Language/KansasLava/Protocols/Types.hs
Haskell
bsd-3-clause
2,962
{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- -- Building info tables. -- -- (c) The University of Glasgow 2004-2006 -- ----------------------------------------------------------------------------- module StgCmmLayout ( mkArgDescr, emitCall, emitReturn, adjustHpBackwards, emitClosureProcAndInfoTable, emitClosureAndInfoTable, slowCall, directCall, mkVirtHeapOffsets, mkVirtConstrOffsets, getHpRelOffset, ArgRep(..), toArgRep, argRepSizeW -- re-exported from StgCmmArgRep ) where #include "HsVersions.h" #if __GLASGOW_HASKELL__ >= 709 import Prelude hiding ((<*>)) #endif import StgCmmClosure import StgCmmEnv import StgCmmArgRep -- notably: ( slowCallPattern ) import StgCmmTicky import StgCmmMonad import StgCmmUtils import StgCmmProf (curCCS) import MkGraph import SMRep import Cmm import CmmUtils import CmmInfo import CLabel import StgSyn import Id import TyCon ( PrimRep(..) ) import BasicTypes ( RepArity ) import DynFlags import Module import Util import Data.List import Outputable import FastString import Control.Monad ------------------------------------------------------------------------ -- Call and return sequences ------------------------------------------------------------------------ -- | Return multiple values to the sequel -- -- If the sequel is @Return@ -- -- > return (x,y) -- -- If the sequel is @AssignTo [p,q]@ -- -- > p=x; q=y; -- emitReturn :: [CmmExpr] -> FCode ReturnKind emitReturn results = do { dflags <- getDynFlags ; sequel <- getSequel ; updfr_off <- getUpdFrameOff ; case sequel of Return _ -> do { adjustHpBackwards ; let e = CmmLoad (CmmStackSlot Old updfr_off) (gcWord dflags) ; emit (mkReturn dflags (entryCode dflags e) results updfr_off) } AssignTo regs adjust -> do { when adjust adjustHpBackwards ; emitMultiAssign regs results } ; return AssignedDirectly } -- | @emitCall conv fun args@ makes a call to the entry-code of @fun@, -- using the call/return convention @conv@, passing @args@, and -- returning the results to the current sequel. -- emitCall :: (Convention, Convention) -> CmmExpr -> [CmmExpr] -> FCode ReturnKind emitCall convs fun args = emitCallWithExtraStack convs fun args noExtraStack -- | @emitCallWithExtraStack conv fun args stack@ makes a call to the -- entry-code of @fun@, using the call/return convention @conv@, -- passing @args@, pushing some extra stack frames described by -- @stack@, and returning the results to the current sequel. -- emitCallWithExtraStack :: (Convention, Convention) -> CmmExpr -> [CmmExpr] -> [CmmExpr] -> FCode ReturnKind emitCallWithExtraStack (callConv, retConv) fun args extra_stack = do { dflags <- getDynFlags ; adjustHpBackwards ; sequel <- getSequel ; updfr_off <- getUpdFrameOff ; case sequel of Return _ -> do emit $ mkJumpExtra dflags callConv fun args updfr_off extra_stack return AssignedDirectly AssignTo res_regs _ -> do k <- newLabelC let area = Young k (off, _, copyin) = copyInOflow dflags retConv area res_regs [] copyout = mkCallReturnsTo dflags fun callConv args k off updfr_off extra_stack emit (copyout <*> mkLabel k <*> copyin) return (ReturnedTo k off) } adjustHpBackwards :: FCode () -- This function adjusts the heap pointer just before a tail call or -- return. At a call or return, the virtual heap pointer may be less -- than the real Hp, because the latter was advanced to deal with -- the worst-case branch of the code, and we may be in a better-case -- branch. In that case, move the real Hp *back* and retract some -- ticky allocation count. -- -- It *does not* deal with high-water-mark adjustment. That's done by -- functions which allocate heap. adjustHpBackwards = do { hp_usg <- getHpUsage ; let rHp = realHp hp_usg vHp = virtHp hp_usg adjust_words = vHp -rHp ; new_hp <- getHpRelOffset vHp ; emit (if adjust_words == 0 then mkNop else mkAssign hpReg new_hp) -- Generates nothing when vHp==rHp ; tickyAllocHeap False adjust_words -- ...ditto ; setRealHp vHp } ------------------------------------------------------------------------- -- Making calls: directCall and slowCall ------------------------------------------------------------------------- -- General plan is: -- - we'll make *one* fast call, either to the function itself -- (directCall) or to stg_ap_<pat>_fast (slowCall) -- Any left-over arguments will be pushed on the stack, -- -- e.g. Sp[old+8] = arg1 -- Sp[old+16] = arg2 -- Sp[old+32] = stg_ap_pp_info -- R2 = arg3 -- R3 = arg4 -- call f() return to Nothing updfr_off: 32 directCall :: Convention -> CLabel -> RepArity -> [StgArg] -> FCode ReturnKind -- (directCall f n args) -- calls f(arg1, ..., argn), and applies the result to the remaining args -- The function f has arity n, and there are guaranteed at least n args -- Both arity and args include void args directCall conv lbl arity stg_args = do { argreps <- getArgRepsAmodes stg_args ; direct_call "directCall" conv lbl arity argreps } slowCall :: CmmExpr -> [StgArg] -> FCode ReturnKind -- (slowCall fun args) applies fun to args, returning the results to Sequel slowCall fun stg_args = do dflags <- getDynFlags argsreps <- getArgRepsAmodes stg_args let (rts_fun, arity) = slowCallPattern (map fst argsreps) (r, slow_code) <- getCodeR $ do r <- direct_call "slow_call" NativeNodeCall (mkRtsApFastLabel rts_fun) arity ((P,Just fun):argsreps) emitComment $ mkFastString ("slow_call for " ++ showSDoc dflags (ppr fun) ++ " with pat " ++ unpackFS rts_fun) return r -- Note [avoid intermediate PAPs] let n_args = length stg_args if n_args > arity && optLevel dflags >= 2 then do funv <- (CmmReg . CmmLocal) `fmap` assignTemp fun fun_iptr <- (CmmReg . CmmLocal) `fmap` assignTemp (closureInfoPtr dflags (cmmUntag dflags funv)) -- ToDo: we could do slightly better here by reusing the -- continuation from the slow call, which we have in r. -- Also we'd like to push the continuation on the stack -- before the branch, so that we only get one copy of the -- code that saves all the live variables across the -- call, but that might need some improvements to the -- special case in the stack layout code to handle this -- (see Note [diamond proc point]). fast_code <- getCode $ emitCall (NativeNodeCall, NativeReturn) (entryCode dflags fun_iptr) (nonVArgs ((P,Just funv):argsreps)) slow_lbl <- newLabelC fast_lbl <- newLabelC is_tagged_lbl <- newLabelC end_lbl <- newLabelC let correct_arity = cmmEqWord dflags (funInfoArity dflags fun_iptr) (mkIntExpr dflags n_args) emit (mkCbranch (cmmIsTagged dflags funv) is_tagged_lbl slow_lbl <*> mkLabel is_tagged_lbl <*> mkCbranch correct_arity fast_lbl slow_lbl <*> mkLabel fast_lbl <*> fast_code <*> mkBranch end_lbl <*> mkLabel slow_lbl <*> slow_code <*> mkLabel end_lbl) return r else do emit slow_code return r -- Note [avoid intermediate PAPs] -- -- A slow call which needs multiple generic apply patterns will be -- almost guaranteed to create one or more intermediate PAPs when -- applied to a function that takes the correct number of arguments. -- We try to avoid this situation by generating code to test whether -- we are calling a function with the correct number of arguments -- first, i.e.: -- -- if (TAG(f) != 0} { // f is not a thunk -- if (f->info.arity == n) { -- ... make a fast call to f ... -- } -- } -- ... otherwise make the slow call ... -- -- We *only* do this when the call requires multiple generic apply -- functions, which requires pushing extra stack frames and probably -- results in intermediate PAPs. (I say probably, because it might be -- that we're over-applying a function, but that seems even less -- likely). -- -- This very rarely applies, but if it does happen in an inner loop it -- can have a severe impact on performance (#6084). -------------- direct_call :: String -> Convention -- e.g. NativeNodeCall or NativeDirectCall -> CLabel -> RepArity -> [(ArgRep,Maybe CmmExpr)] -> FCode ReturnKind direct_call caller call_conv lbl arity args | debugIsOn && real_arity > length args -- Too few args = do -- Caller should ensure that there enough args! pprPanic "direct_call" $ text caller <+> ppr arity <+> ppr lbl <+> ppr (length args) <+> ppr (map snd args) <+> ppr (map fst args) | null rest_args -- Precisely the right number of arguments = emitCall (call_conv, NativeReturn) target (nonVArgs args) | otherwise -- Note [over-saturated calls] = do dflags <- getDynFlags emitCallWithExtraStack (call_conv, NativeReturn) target (nonVArgs fast_args) (nonVArgs (stack_args dflags)) where target = CmmLit (CmmLabel lbl) (fast_args, rest_args) = splitAt real_arity args stack_args dflags = slowArgs dflags rest_args real_arity = case call_conv of NativeNodeCall -> arity+1 _ -> arity -- When constructing calls, it is easier to keep the ArgReps and the -- CmmExprs zipped together. However, a void argument has no -- representation, so we need to use Maybe CmmExpr (the alternative of -- using zeroCLit or even undefined would work, but would be ugly). -- getArgRepsAmodes :: [StgArg] -> FCode [(ArgRep, Maybe CmmExpr)] getArgRepsAmodes = mapM getArgRepAmode where getArgRepAmode arg | V <- rep = return (V, Nothing) | otherwise = do expr <- getArgAmode (NonVoid arg) return (rep, Just expr) where rep = toArgRep (argPrimRep arg) nonVArgs :: [(ArgRep, Maybe CmmExpr)] -> [CmmExpr] nonVArgs [] = [] nonVArgs ((_,Nothing) : args) = nonVArgs args nonVArgs ((_,Just arg) : args) = arg : nonVArgs args {- Note [over-saturated calls] The natural thing to do for an over-saturated call would be to call the function with the correct number of arguments, and then apply the remaining arguments to the value returned, e.g. f a b c d (where f has arity 2) --> r = call f(a,b) call r(c,d) but this entails - saving c and d on the stack - making a continuation info table - at the continuation, loading c and d off the stack into regs - finally, call r Note that since there are a fixed number of different r's (e.g. stg_ap_pp_fast), we can also pre-compile continuations that correspond to each of them, rather than generating a fresh one for each over-saturated call. Not only does this generate much less code, it is faster too. We will generate something like: Sp[old+16] = c Sp[old+24] = d Sp[old+32] = stg_ap_pp_info call f(a,b) -- usual calling convention For the purposes of the CmmCall node, we count this extra stack as just more arguments that we are passing on the stack (cml_args). -} -- | 'slowArgs' takes a list of function arguments and prepares them for -- pushing on the stack for "extra" arguments to a function which requires -- fewer arguments than we currently have. slowArgs :: DynFlags -> [(ArgRep, Maybe CmmExpr)] -> [(ArgRep, Maybe CmmExpr)] slowArgs _ [] = [] slowArgs dflags args -- careful: reps contains voids (V), but args does not | gopt Opt_SccProfilingOn dflags = save_cccs ++ this_pat ++ slowArgs dflags rest_args | otherwise = this_pat ++ slowArgs dflags rest_args where (arg_pat, n) = slowCallPattern (map fst args) (call_args, rest_args) = splitAt n args stg_ap_pat = mkCmmRetInfoLabel rtsPackageKey arg_pat this_pat = (N, Just (mkLblExpr stg_ap_pat)) : call_args save_cccs = [(N, Just (mkLblExpr save_cccs_lbl)), (N, Just curCCS)] save_cccs_lbl = mkCmmRetInfoLabel rtsPackageKey (fsLit "stg_restore_cccs") ------------------------------------------------------------------------- ---- Laying out objects on the heap and stack ------------------------------------------------------------------------- -- The heap always grows upwards, so hpRel is easy to compute hpRel :: VirtualHpOffset -- virtual offset of Hp -> VirtualHpOffset -- virtual offset of The Thing -> WordOff -- integer word offset hpRel hp off = off - hp getHpRelOffset :: VirtualHpOffset -> FCode CmmExpr -- See Note [Virtual and real heap pointers] in StgCmmMonad getHpRelOffset virtual_offset = do dflags <- getDynFlags hp_usg <- getHpUsage return (cmmRegOffW dflags hpReg (hpRel (realHp hp_usg) virtual_offset)) mkVirtHeapOffsets :: DynFlags -> Bool -- True <=> is a thunk -> [(PrimRep,a)] -- Things to make offsets for -> (WordOff, -- _Total_ number of words allocated WordOff, -- Number of words allocated for *pointers* [(NonVoid a, ByteOff)]) -- Things with their offsets from start of object in order of -- increasing offset; BUT THIS MAY BE DIFFERENT TO INPUT ORDER -- First in list gets lowest offset, which is initial offset + 1. -- -- Void arguments are removed, so output list may be shorter than -- input list -- -- mkVirtHeapOffsets always returns boxed things with smaller offsets -- than the unboxed things mkVirtHeapOffsets dflags is_thunk things = ( bytesToWordsRoundUp dflags tot_bytes , bytesToWordsRoundUp dflags bytes_of_ptrs , ptrs_w_offsets ++ non_ptrs_w_offsets ) where hdr_words | is_thunk = thunkHdrSize dflags | otherwise = fixedHdrSizeW dflags hdr_bytes = wordsToBytes dflags hdr_words non_void_things = filterOut (isVoidRep . fst) things (ptrs, non_ptrs) = partition (isGcPtrRep . fst) non_void_things (bytes_of_ptrs, ptrs_w_offsets) = mapAccumL computeOffset 0 ptrs (tot_bytes, non_ptrs_w_offsets) = mapAccumL computeOffset bytes_of_ptrs non_ptrs computeOffset bytes_so_far (rep, thing) = (bytes_so_far + wordsToBytes dflags (argRepSizeW dflags (toArgRep rep)), (NonVoid thing, hdr_bytes + bytes_so_far)) -- | Just like mkVirtHeapOffsets, but for constructors mkVirtConstrOffsets :: DynFlags -> [(PrimRep,a)] -> (WordOff, WordOff, [(NonVoid a, ByteOff)]) mkVirtConstrOffsets dflags = mkVirtHeapOffsets dflags False ------------------------------------------------------------------------- -- -- Making argument descriptors -- -- An argument descriptor describes the layout of args on the stack, -- both for * GC (stack-layout) purposes, and -- * saving/restoring registers when a heap-check fails -- -- Void arguments aren't important, therefore (contrast constructSlowCall) -- ------------------------------------------------------------------------- -- bring in ARG_P, ARG_N, etc. #include "../includes/rts/storage/FunTypes.h" mkArgDescr :: DynFlags -> [Id] -> ArgDescr mkArgDescr dflags args = let arg_bits = argBits dflags arg_reps arg_reps = filter isNonV (map idArgRep args) -- Getting rid of voids eases matching of standard patterns in case stdPattern arg_reps of Just spec_id -> ArgSpec spec_id Nothing -> ArgGen arg_bits argBits :: DynFlags -> [ArgRep] -> [Bool] -- True for non-ptr, False for ptr argBits _ [] = [] argBits dflags (P : args) = False : argBits dflags args argBits dflags (arg : args) = take (argRepSizeW dflags arg) (repeat True) ++ argBits dflags args ---------------------- stdPattern :: [ArgRep] -> Maybe Int stdPattern reps = case reps of [] -> Just ARG_NONE -- just void args, probably [N] -> Just ARG_N [P] -> Just ARG_P [F] -> Just ARG_F [D] -> Just ARG_D [L] -> Just ARG_L [V16] -> Just ARG_V16 [V32] -> Just ARG_V32 [V64] -> Just ARG_V64 [N,N] -> Just ARG_NN [N,P] -> Just ARG_NP [P,N] -> Just ARG_PN [P,P] -> Just ARG_PP [N,N,N] -> Just ARG_NNN [N,N,P] -> Just ARG_NNP [N,P,N] -> Just ARG_NPN [N,P,P] -> Just ARG_NPP [P,N,N] -> Just ARG_PNN [P,N,P] -> Just ARG_PNP [P,P,N] -> Just ARG_PPN [P,P,P] -> Just ARG_PPP [P,P,P,P] -> Just ARG_PPPP [P,P,P,P,P] -> Just ARG_PPPPP [P,P,P,P,P,P] -> Just ARG_PPPPPP _ -> Nothing ------------------------------------------------------------------------- -- -- Generating the info table and code for a closure -- ------------------------------------------------------------------------- -- Here we make an info table of type 'CmmInfo'. The concrete -- representation as a list of 'CmmAddr' is handled later -- in the pipeline by 'cmmToRawCmm'. -- When loading the free variables, a function closure pointer may be tagged, -- so we must take it into account. emitClosureProcAndInfoTable :: Bool -- top-level? -> Id -- name of the closure -> LambdaFormInfo -> CmmInfoTable -> [NonVoid Id] -- incoming arguments -> ((Int, LocalReg, [LocalReg]) -> FCode ()) -- function body -> FCode () emitClosureProcAndInfoTable top_lvl bndr lf_info info_tbl args body = do { dflags <- getDynFlags -- Bind the binder itself, but only if it's not a top-level -- binding. We need non-top let-bindings to refer to the -- top-level binding, which this binding would incorrectly shadow. ; node <- if top_lvl then return $ idToReg dflags (NonVoid bndr) else bindToReg (NonVoid bndr) lf_info ; let node_points = nodeMustPointToIt dflags lf_info ; arg_regs <- bindArgsToRegs args ; let args' = if node_points then (node : arg_regs) else arg_regs conv = if nodeMustPointToIt dflags lf_info then NativeNodeCall else NativeDirectCall (offset, _, _) = mkCallEntry dflags conv args' [] ; emitClosureAndInfoTable info_tbl conv args' $ body (offset, node, arg_regs) } -- Data constructors need closures, but not with all the argument handling -- needed for functions. The shared part goes here. emitClosureAndInfoTable :: CmmInfoTable -> Convention -> [LocalReg] -> FCode () -> FCode () emitClosureAndInfoTable info_tbl conv args body = do { blks <- getCode body ; let entry_lbl = toEntryLbl (cit_lbl info_tbl) ; emitProcWithConvention conv (Just info_tbl) entry_lbl args blks }
spacekitteh/smcghc
compiler/codeGen/StgCmmLayout.hs
Haskell
bsd-3-clause
19,947
{-# LANGUAGE CPP,MagicHash,ScopedTypeVariables,FlexibleInstances,RankNTypes,TypeSynonymInstances,MultiParamTypeClasses,BangPatterns #-} -- | By Chris Kuklewicz, drawing heavily from binary and binary-strict, -- but all the bugs are my own. -- -- This file is under the usual BSD3 licence, copyright 2008. -- -- Modified the monad to be strict for version 2.0.0 -- -- This started out as an improvement to -- "Data.Binary.Strict.IncrementalGet" with slightly better internals. -- The simplified 'Get', 'runGet', 'Result' trio with the -- "Data.Binary.Strict.Class.BinaryParser" instance are an _untested_ -- upgrade from IncrementalGet. Especially untested are the -- strictness properties. -- -- 'Get' usefully implements Applicative and Monad, MonadError, -- Alternative and MonadPlus. Unhandled errors are reported along -- with the number of bytes successfully consumed. Effects of -- 'suspend' and 'putAvailable' are visible after -- fail/throwError/mzero. -- -- Each time the parser reaches the end of the input it will return a -- Partial wrapped continuation which requests a (Maybe -- Lazy.ByteString). Passing (Just bs) will append bs to the input so -- far and continue processing. If you pass Nothing to the -- continuation then you are declaring that there will never be more -- input and that the parser should never again return a partial -- contination; it should return failure or finished. -- -- 'suspendUntilComplete' repeatedly uses a partial continuation to -- ask for more input until 'Nothing' is passed and then it proceeds -- with parsing. -- -- The 'getAvailable' command returns the lazy byte string the parser -- has remaining before calling 'suspend'. The 'putAvailable' -- replaces this input and is a bit fancy: it also replaces the input -- at the current offset for all the potential catchError/mplus -- handlers. This change is _not_ reverted by fail/throwError/mzero. -- -- The three 'lookAhead' and 'lookAheadM' and 'lookAheadE' functions are -- very similar to the ones in binary's Data.Binary.Get. -- -- -- Add specialized high-bit-run module Text.ProtocolBuffers.Get (Get,runGet,runGetAll,Result(..) -- main primitives ,ensureBytes,getStorable,getLazyByteString,suspendUntilComplete -- parser state manipulation ,getAvailable,putAvailable -- lookAhead capabilities ,lookAhead,lookAheadM,lookAheadE -- below is for implementation of BinaryParser (for Int64 and Lazy bytestrings) ,skip,bytesRead,isEmpty,isReallyEmpty,remaining,spanOf,highBitRun ,getWord8,getByteString ,getWord16be,getWord32be,getWord64be ,getWord16le,getWord32le,getWord64le ,getWordhost,getWord16host,getWord32host,getWord64host -- -- ,scan ,decode7,decode7size,decode7unrolled ) where -- The Get monad is an instance of binary-strict's BinaryParser: -- import qualified Data.Binary.Strict.Class as P(BinaryParser(..)) -- The Get monad is an instance of all of these library classes: import Control.Applicative(Applicative(pure,(<*>)),Alternative(empty,(<|>))) import Control.Monad(MonadPlus(mzero,mplus),when) import Control.Monad.Error.Class(MonadError(throwError,catchError),Error(strMsg)) -- It can be a MonadCont, but the semantics are too broken without a ton of work. -- implementation imports --import Control.Monad(replicateM,(>=>)) -- XXX testing --import qualified Data.ByteString as S(unpack) -- XXX testing --import qualified Data.ByteString.Lazy as L(pack) -- XXX testing import Control.Monad(ap) -- instead of Functor.fmap; ap for Applicative import Data.Bits(Bits((.|.),(.&.)),shiftL) import qualified Data.ByteString as S(concat,length,null,splitAt,findIndex) import qualified Data.ByteString.Internal as S(ByteString(..),toForeignPtr,inlinePerformIO) import qualified Data.ByteString.Unsafe as S(unsafeIndex,unsafeDrop {-,unsafeTake-}) import qualified Data.ByteString.Lazy as L(take,drop,length,span,toChunks,fromChunks,null,findIndex) import qualified Data.ByteString.Lazy.Internal as L(ByteString(..),chunk) import qualified Data.Foldable as F(foldr,foldr1) -- used with Seq import Data.Int(Int32,Int64) -- index type for L.ByteString import Data.Monoid(Monoid(mempty,mappend)) -- Writer has a Monoid contraint import Data.Sequence(Seq,null,(|>)) -- used for future queue in handler state import Data.Word(Word,Word8,Word16,Word32,Word64) import Foreign.ForeignPtr(withForeignPtr) import Foreign.Ptr(Ptr,castPtr,plusPtr,minusPtr,nullPtr) import Foreign.Storable(Storable(peek,sizeOf)) import System.IO.Unsafe(unsafePerformIO) #if defined(__GLASGOW_HASKELL__) && !defined(__HADDOCK__) import GHC.Base(Int(..),uncheckedShiftL#) import GHC.Word(Word16(..),Word32(..),Word64(..),uncheckedShiftL64#) #endif --import Debug.Trace(trace) trace :: a -> b -> b trace _ = id -- Simple external return type data Result a = Failed {-# UNPACK #-} !Int64 String | Finished !L.ByteString {-# UNPACK #-} !Int64 a | Partial (Maybe L.ByteString -> Result a) -- Internal state type, not exposed to the user. -- Invariant: (S.null _top) implies (L.null _current) data S = S { _top :: {-# UNPACK #-} !S.ByteString , _current :: !L.ByteString , consumed :: {-# UNPACK #-} !Int64 } deriving Show data T3 s = T3 !Int !s !Int --data TU s = TU'OK !s !Int | TU'DO (Get s) data TU s = TU'OK !s !Int {-# SPECIALIZE decode7unrolled :: Get Int64 #-} {-# SPECIALIZE decode7unrolled :: Get Int32 #-} {-# SPECIALIZE decode7unrolled :: Get Word64 #-} {-# SPECIALIZE decode7unrolled :: Get Word32 #-} {-# SPECIALIZE decode7unrolled :: Get Int #-} {-# SPECIALIZE decode7unrolled :: Get Integer #-} decode7unrolled :: forall s. (Num s,Integral s, Bits s) => Get s -- NOINLINE decode7unrolled removed to allow SPECIALIZE to work decode7unrolled = Get $ \ sc sIn@(S ss@(S.PS fp off len) bs n) pc -> trace ("decode7unrolled: "++show (len,n)) $ if S.null ss then trace ("decode7unrolled: S.null ss") $ unGet decode7 sc sIn pc -- decode7 will try suspend then will fail if still bad else let (TU'OK x i) = unsafePerformIO $ withForeignPtr fp $ \ptr0 -> do if ptr0 == nullPtr || len < 1 then error "Get.decode7unrolled: ByteString invariant failed" else do let ok :: s -> Int -> IO (TU s) ok x0 i0 = return (TU'OK x0 i0) more,err :: IO (TU s) more = return (TU'OK 0 0) -- decode7 err = return (TU'OK 0 (-1)) -- throwError {-# INLINE ok #-} {-# INLINE more #-} {-# INLINE err #-} -- -- Next line is segfault fix for null bytestrings from Nathan Howell <nhowell@alphaheavy.com> -- if ptr0 == nullPtr then more else do let start = ptr0 `plusPtr` off :: Ptr Word8 b'1 <- peek start if b'1 < 128 then ok (fromIntegral b'1) 1 else do let !val'1 = fromIntegral (b'1 .&. 0x7F) !end = start `plusPtr` len !ptr2 = start `plusPtr` 1 :: Ptr Word8 if ptr2 >= end then more else do b'2 <- peek ptr2 if b'2 < 128 then ok (val'1 .|. (fromIntegral b'2 `shiftL` 7)) 2 else do let !val'2 = (val'1 .|. (fromIntegral (b'2 .&. 0x7F) `shiftL` 7)) !ptr3 = ptr2 `plusPtr` 1 if ptr3 >= end then more else do b'3 <- peek ptr3 if b'3 < 128 then ok (val'2 .|. (fromIntegral b'3 `shiftL` 14)) 3 else do let !val'3 = (val'2 .|. (fromIntegral (b'3 .&. 0x7F) `shiftL` 14)) !ptr4 = ptr3 `plusPtr` 1 if ptr4 >= end then more else do b'4 <- peek ptr4 if b'4 < 128 then ok (val'3 .|. (fromIntegral b'4 `shiftL` 21)) 4 else do let !val'4 = (val'3 .|. (fromIntegral (b'4 .&. 0x7F) `shiftL` 21)) !ptr5 = ptr4 `plusPtr` 1 if ptr5 >= end then more else do b'5 <- peek ptr5 if b'5 < 128 then ok (val'4 .|. (fromIntegral b'5 `shiftL` 28)) 5 else do let !val'5 = (val'4 .|. (fromIntegral (b'5 .&. 0x7F) `shiftL` 28)) !ptr6 = ptr5 `plusPtr` 1 if ptr6 >= end then more else do b'6 <- peek ptr6 if b'6 < 128 then ok (val'5 .|. (fromIntegral b'6 `shiftL` 35)) 6 else do let !val'6 = (val'5 .|. (fromIntegral (b'6 .&. 0x7F) `shiftL` 35)) !ptr7 = ptr6 `plusPtr` 1 if ptr7 >= end then more else do b'7 <- peek ptr7 if b'7 < 128 then ok (val'6 .|. (fromIntegral b'7 `shiftL` 42)) 7 else do let !val'7 = (val'6 .|. (fromIntegral (b'7 .&. 0x7F) `shiftL` 42)) !ptr8 = ptr7 `plusPtr` 1 if ptr8 >= end then more else do b'8 <- peek ptr8 if b'8 < 128 then ok (val'7 .|. (fromIntegral b'8 `shiftL` 49)) 8 else do let !val'8 = (val'7 .|. (fromIntegral (b'8 .&. 0x7F) `shiftL` 49)) !ptr9 = ptr8 `plusPtr` 1 if ptr9 >= end then more else do b'9 <- peek ptr9 if b'9 < 128 then ok (val'8 .|. (fromIntegral b'9 `shiftL` 56)) 9 else do let !val'9 = (val'8 .|. (fromIntegral (b'9 .&. 0x7F) `shiftL` 56)) !ptrA = ptr9 `plusPtr` 1 if ptrA >= end then more else do b'A <- peek ptrA if b'A < 128 then ok (val'9 .|. (fromIntegral b'A `shiftL` 63)) 10 else do err in if i > 0 then let ss' = (S.unsafeDrop i ss) n' = n+fromIntegral i s'safe = make_safe (S ss' bs n') in sc x s'safe pc else if i==0 then unGet decode7 sc sIn pc else unGet (throwError $ "Text.ProtocolBuffers.Get.decode7unrolled: more than 10 bytes needed at bytes read of "++show n) sc sIn pc {-# SPECIALIZE decode7 :: Get Int64 #-} {-# SPECIALIZE decode7 :: Get Int32 #-} {-# SPECIALIZE decode7 :: Get Word64 #-} {-# SPECIALIZE decode7 :: Get Word32 #-} {-# SPECIALIZE decode7 :: Get Int #-} {-# SPECIALIZE decode7 :: Get Integer #-} decode7 :: forall s. (Integral s, Bits s) => Get s -- NOINLINE decode7 removed to allow SPECIALIZE to work decode7 = go 0 0 where go !s1 !shift1 = trace ("decode7.go: "++show (toInteger s1, shift1)) $ do let -- scanner's inner loop decodes only in current top strict bytestring, does not advance input state scanner (S.PS fp off len) = withForeignPtr fp $ \ptr0 -> do if ptr0 == nullPtr || len < 1 then error "Get.decode7: ByteString invariant failed" else do let start = ptr0 `plusPtr` off -- start is a pointer to the next valid byte end = start `plusPtr` len -- end is a pointer one byte past the last valid byte inner :: (Ptr Word8) -> s -> Int -> IO (T3 s) inner !ptr !s !shift | ptr < end = do w <- peek ptr trace ("w: " ++ show w) $ do if (128>) w then return $ T3 (succ (ptr `minusPtr` start) ) -- length of capture (s .|. ((fromIntegral w) `shiftL` shift)) -- put the last bits into high position (-1) -- negative shift indicates satisfied else inner (ptr `plusPtr` 1) -- loop on next byte (s .|. ((fromIntegral (w .&. 0x7F)) `shiftL` shift)) -- put the new bits into high position (shift+7) -- increase high position for next loop | otherwise = return $ T3 (ptr `minusPtr` start) -- length so far (ptr past end-of-string so no succ) s -- value so far shift -- next shift to use inner start s1 shift1 (S ss bs n) <- getFull trace ("getFull says: "++ show ((S.length ss,ss),(L.length bs),n)) $ do if S.null ss then do continue <- suspend if continue then go s1 shift1 else fail "Get.decode7: Zero length input" -- XXX can be triggered! else do let (T3 i sOut shiftOut) = unsafePerformIO $ scanner ss t = S.unsafeDrop i ss -- Warning: 't' may be mempty n' = n + fromIntegral i trace ("scanner says "++show ((i,toInteger sOut,shiftOut),(S.length t,n'))) $ do if 0 <= shiftOut then do putFull_unsafe (make_state bs n') if L.null bs then do continue <- suspend if continue then go sOut shiftOut else return sOut else do go sOut shiftOut else do putFull_safe (S t bs n') -- bs from getFull is still valid return sOut data T2 = T2 !Int64 !Bool decode7size :: Get Int64 decode7size = go 0 where go !len1 = do let scanner (S.PS fp off len) = withForeignPtr fp $ \ptr0 -> do if ptr0 == nullPtr || len < 1 then error "Get.decode7size: ByteString invariant failed" else do let start = ptr0 `plusPtr` off end = start `plusPtr` len inner :: (Ptr Word8) -> IO T2 inner !ptr | ptr < end = do w <- peek ptr if (128>) w then return $ T2 (fromIntegral (ptr `minusPtr` start)) True else inner (ptr `plusPtr` 1) | otherwise = return $ T2 (fromIntegral (ptr `minusPtr` start)) False inner start (S ss bs n) <- getFull if S.null ss then do continue <- suspend if continue then go len1 else fail "Get.decode7size: zero length input" else do let (T2 i ok) = unsafePerformIO $ scanner ss t = S.unsafeDrop (fromIntegral i) ss n' = n + i len2 = len1 + i if ok then do putFull_unsafe (S t bs n') return len2 else do putFull_unsafe (make_state bs n') if L.null bs then do continue <- suspend if continue then go len2 else return len2 else go len2 -- Private Internal error handling stack type -- This must NOT be exposed by this module -- -- The ErrorFrame is the top-level error handler setup when execution begins. -- It starts with the Bool set to True: meaning suspend can ask for more input. -- Once suspend get 'Nothing' in reply the Bool is set to False, which means -- that 'suspend' should no longer ask for input -- the input is finished. -- Why store the Bool there? It was handy when I needed to add it. data FrameStack b = ErrorFrame (String -> S -> Result b) -- top level handler Bool -- True at start, False if Nothing passed to suspend continuation | HandlerFrame (Maybe ( S -> FrameStack b -> String -> Result b )) -- encapsulated handler S -- stored state to pass to handler (Seq L.ByteString) -- additional input to hass to handler (FrameStack b) -- earlier/shallower/outer handlers type Success b a = (a -> S -> FrameStack b -> Result b) -- Internal monad type newtype Get a = Get { unGet :: forall b. -- the forall hides the CPS style (and prevents use of MonadCont) Success b a -- main continuation -> S -- parser state -> FrameStack b -- error handler stack -> Result b -- operation } -- These implement the checkponting needed to store and revive the -- state for lookAhead. They are fragile because the setCheckpoint -- must preceed either useCheckpoint or clearCheckpoint but not both. -- The FutureFrame must be the most recent handler, so the commands -- must be in the same scope depth. Because of these constraints, the reader -- value 'r' does not need to be stored and can be taken from the Get -- parameter. -- -- IMPORTANT: Any FutureFrame at the top level(s) is discarded by throwError. setCheckpoint,useCheckpoint,clearCheckpoint :: Get () setCheckpoint = Get $ \ sc s pc -> sc () s (HandlerFrame Nothing s mempty pc) useCheckpoint = Get $ \ sc (S _ _ _) frame -> case frame of (HandlerFrame Nothing s future pc) -> sc () (collect s future) pc _ -> error "Text.ProtocolBuffers.Get: Impossible useCheckpoint frame!" clearCheckpoint = Get $ \ sc s frame -> case frame of (HandlerFrame Nothing _s _future pc) -> sc () s pc _ -> error "Text.ProtocolBuffers.Get: Impossible clearCheckpoint frame!" -- | 'lookAhead' runs the @todo@ action and then rewinds only the -- BinaryParser state. Any new input from 'suspend' or changes from -- 'putAvailable' are kept. Changes to the user state (MonadState) -- are kept. The MonadWriter output is retained. -- -- If an error is thrown then the entire monad state is reset to last -- catchError as usual. lookAhead :: Get a -> Get a lookAhead todo = do setCheckpoint a <- todo useCheckpoint return a -- | 'lookAheadM' runs the @todo@ action. If the action returns 'Nothing' then the -- BinaryParser state is rewound (as in 'lookAhead'). If the action return 'Just' then -- the BinaryParser is not rewound, and lookAheadM acts as an identity. -- -- If an error is thrown then the entire monad state is reset to last -- catchError as usual. lookAheadM :: Get (Maybe a) -> Get (Maybe a) lookAheadM todo = do setCheckpoint a <- todo maybe useCheckpoint (const clearCheckpoint) a return a -- | 'lookAheadE' runs the @todo@ action. If the action returns 'Left' then the -- BinaryParser state is rewound (as in 'lookAhead'). If the action return 'Right' then -- the BinaryParser is not rewound, and lookAheadE acts as an identity. -- -- If an error is thrown then the entire monad state is reset to last -- catchError as usual. lookAheadE :: Get (Either a b) -> Get (Either a b) lookAheadE todo = do setCheckpoint a <- todo either (const useCheckpoint) (const clearCheckpoint) a return a -- 'collect' is used by 'putCheckpoint' and 'throwError' collect :: S -> Seq L.ByteString -> S collect s@(S ss bs n) future | Data.Sequence.null future = make_safe $ s | otherwise = make_safe $ S ss (mappend bs (F.foldr1 mappend future)) n -- Put the Show instances here instance (Show a) => Show (Result a) where showsPrec _ (Failed n msg) = ("(Failed "++) . shows n . (' ':) . shows msg . (")"++) showsPrec _ (Finished bs n a) = ("(CFinished ("++) . shows bs . (") ("++) . shows n . (") ("++) . shows a . ("))"++) showsPrec _ (Partial {}) = ("(Partial <Maybe Data.ByteString.Lazy.ByteString-> Result a)"++) instance Show (FrameStack b) where showsPrec _ (ErrorFrame _ p) =(++) "(ErrorFrame <e->s->m b> " . shows p . (")"++) showsPrec _ (HandlerFrame _ s future pc) = ("(HandlerFrame <> ("++) . shows s . (") ("++) . shows future . (") ("++) . shows pc . (")"++) -- | 'runGet' is the simple executor runGet :: Get a -> L.ByteString -> Result a runGet (Get f) bsIn = f scIn sIn (ErrorFrame ec True) where scIn a (S ss bs n) _pc = Finished (L.chunk ss bs) n a sIn = make_state bsIn 0 ec msg sOut = Failed (consumed sOut) msg -- | 'runGetAll' is the simple executor, and will not ask for any continuation because this lazy bytestring is all the input runGetAll :: Get a -> L.ByteString -> Result a runGetAll (Get f) bsIn = f scIn sIn (ErrorFrame ec False) where scIn a (S ss bs n) _pc = Finished (L.chunk ss bs) n a sIn = make_state bsIn 0 ec msg sOut = Failed (consumed sOut) msg -- | Get the input currently available to the parser. getAvailable :: Get L.ByteString getAvailable = Get $ \ sc s@(S ss bs _) pc -> sc (L.chunk ss bs) s pc -- | 'putAvailable' replaces the bytestream past the current # of read -- bytes. This will also affect pending MonadError handler and -- MonadPlus branches. I think all pending branches have to have -- fewer bytesRead than the current one. If this is wrong then an -- error will be thrown. -- -- WARNING : 'putAvailable' is still untested. putAvailable :: L.ByteString -> Get () putAvailable !bsNew = Get $ \ sc (S _ss _bs n) pc -> let !s' = make_state bsNew n rebuild (HandlerFrame catcher (S ss1 bs1 n1) future pc') = HandlerFrame catcher sNew mempty (rebuild pc') where balance = n - n1 whole | balance < 0 = error "Impossible? Cannot rebuild HandlerFrame in MyGet.putAvailable: balance is negative!" | otherwise = L.take balance $ L.chunk ss1 bs1 `mappend` F.foldr mappend mempty future sNew | balance /= L.length whole = error "Impossible? MyGet.putAvailable.rebuild.sNew HandlerFrame assertion failed." | otherwise = make_state (mappend whole bsNew) n1 rebuild x@(ErrorFrame {}) = x in sc () s' (rebuild pc) -- Internal access to full internal state, as helper functions getFull :: Get S getFull = Get $ \ sc s pc -> sc s s pc {-# INLINE putFull_unsafe #-} putFull_unsafe :: S -> Get () putFull_unsafe !s = Get $ \ sc _s pc -> sc () s pc {-# INLINE make_safe #-} make_safe :: S -> S make_safe s@(S ss bs n) = if S.null ss then make_state bs n else s {-# INLINE make_state #-} make_state :: L.ByteString -> Int64 -> S make_state L.Empty n = S mempty mempty n make_state (L.Chunk ss bs) n = S ss bs n putFull_safe :: S -> Get () putFull_safe= putFull_unsafe . make_safe -- | Keep calling 'suspend' until Nothing is passed to the 'Partial' -- continuation. This ensures all the data has been loaded into the -- state of the parser. suspendUntilComplete :: Get () suspendUntilComplete = do continue <- suspend when continue suspendUntilComplete -- | Call suspend and throw and error with the provided @msg@ if -- Nothing has been passed to the 'Partial' continuation. Otherwise -- return (). suspendMsg :: String -> Get () suspendMsg msg = do continue <- suspend if continue then return () else throwError msg -- | check that there are at least @n@ bytes available in the input. -- This will suspend if there is to little data. ensureBytes :: Int64 -> Get () ensureBytes n = do (S ss bs _read) <- getFull if S.null ss then suspendMsg "ensureBytes failed" >> ensureBytes n else do if n < fromIntegral (S.length ss) then return () else do if n == L.length (L.take n (L.chunk ss bs)) then return () else suspendMsg "ensureBytes failed" >> ensureBytes n {-# INLINE ensureBytes #-} -- | Pull @n@ bytes from the input, as a lazy ByteString. This will -- suspend if there is too little data. getLazyByteString :: Int64 -> Get L.ByteString getLazyByteString n | n<=0 = return mempty | otherwise = do (S ss bs offset) <- getFull if S.null ss then do suspendMsg ("getLazyByteString S.null ss failed with "++show (n,(S.length ss,L.length bs,offset))) getLazyByteString n else do case splitAtOrDie n (L.chunk ss bs) of -- safe use of L.chunk because of S.null ss check above Just (consume,rest) -> do putFull_unsafe (make_state rest (offset+n)) return $! consume Nothing -> do suspendMsg ("getLazyByteString (Nothing from splitAtOrDie) failed with "++show (n,(S.length ss,L.length bs,offset))) getLazyByteString n {-# INLINE getLazyByteString #-} -- important -- | 'suspend' is supposed to allow the execution of the monad to be -- halted, awaiting more input. The computation is supposed to -- continue normally if this returns True, and is supposed to halt -- without calling suspend again if this returns False. All future -- calls to suspend will return False automatically and no nothing -- else. -- -- These semantics are too specialized to let this escape this module. class MonadSuspend m where suspend :: m Bool -- The instance here is fairly specific to the stack manipluation done -- by 'addFuture' to ('S' user) and to the packaging of the resumption -- function in 'IResult'('IPartial'). instance MonadSuspend Get where suspend = Get ( let checkBool (ErrorFrame _ b) = b checkBool (HandlerFrame _ _ _ pc) = checkBool pc -- addFuture puts the new data in 'future' where throwError's collect can find and use it addFuture bs (HandlerFrame catcher s future pc) = HandlerFrame catcher s (future |> bs) (addFuture bs pc) addFuture _bs x@(ErrorFrame {}) = x -- Once suspend is given Nothing, it remembers this and always returns False rememberFalse (ErrorFrame ec _) = ErrorFrame ec False rememberFalse (HandlerFrame catcher s future pc) = HandlerFrame catcher s future (rememberFalse pc) in \ sc sIn pcIn -> if checkBool pcIn -- Has Nothing ever been given to a partial continuation? then let f Nothing = let pcOut = rememberFalse pcIn in sc False sIn pcOut f (Just bs') = let sOut = appendBS sIn bs' pcOut = addFuture bs' pcIn in sc True sOut pcOut in Partial f else sc False sIn pcIn -- once Nothing has been given suspend is a no-op ) where appendBS (S ss bs n) bs' = make_safe (S ss (mappend bs bs') n) -- A unique sort of command... -- | 'discardInnerHandler' causes the most recent catchError to be -- discarded, i.e. this reduces the stack of error handlers by removing -- the top one. These are the same handlers which Alternative((<|>)) and -- MonadPlus(mplus) use. This is useful to commit to the current branch and let -- the garbage collector release the suspended handler and its hold on -- the earlier input. discardInnerHandler :: Get () discardInnerHandler = Get $ \ sc s pcIn -> let pcOut = case pcIn of ErrorFrame {} -> pcIn HandlerFrame _ _ _ pc' -> pc' in sc () s pcOut {-# INLINE discardInnerHandler #-} {- Currently unused, commented out to satisfy -Wall -- | 'discardAllHandlers' causes all catchError handler to be -- discarded, i.e. this reduces the stack of error handlers to the top -- level handler. These are the same handlers which Alternative((<|>)) -- and MonadPlus(mplus) use. This is useful to commit to the current -- branch and let the garbage collector release the suspended handlers -- and their hold on the earlier input. discardAllHandlers :: Get () discardAllHandlers = Get $ \ sc s pcIn -> let base pc@(ErrorFrame {}) = pc base (HandlerFrame _ _ _ pc) = base pc in sc () s (base pcIn) {-# INLINE discardAllHandlers #-} -} -- The BinaryParser instance: -- | Discard the next @m@ bytes skip :: Int64 -> Get () skip m | m <=0 = return () | otherwise = do ensureBytes m (S ss bs n) <- getFull -- Could ignore impossible S.null ss due to (ensureBytes m) and (0 < m) but be paranoid let lbs = L.chunk ss bs -- L.chunk is safe putFull_unsafe (make_state (L.drop m lbs) (n+m)) -- drop will not perform less than 'm' bytes due to ensureBytes above -- | Return the number of 'bytesRead' so far. Initially 0, never negative. bytesRead :: Get Int64 bytesRead = fmap consumed getFull -- | Return the number of bytes 'remaining' before the current input -- runs out and 'suspend' might be called. remaining :: Get Int64 remaining = do (S ss bs _) <- getFull return $ fromIntegral (S.length ss) + (L.length bs) -- | Return True if the number of bytes 'remaining' is 0. Any futher -- attempts to read an empty parser will call 'suspend' which might -- result in more input to consume. -- -- Compare with 'isReallyEmpty' isEmpty :: Get Bool isEmpty = do (S ss _bs _n) <- getFull return (S.null ss) -- && (L.null bs) -- | Return True if the input is exhausted and will never be added to. -- Returns False if there is input left to consume. -- -- Compare with 'isEmpty' isReallyEmpty :: Get Bool isReallyEmpty = isEmpty >>= loop where loop False = return False loop True = do continue <- suspend if continue then isReallyEmpty else return True -- | get the longest prefix of the input where the high bit is set as well as following byte. -- This made getVarInt slower. highBitRun :: Get Int64 {-# INLINE highBitRun #-} highBitRun = loop where loop :: Get Int64 {-# INLINE loop #-} loop = do (S ss bs _n) <- getFull -- S.null ss is okay, will lead to Nothing, Nothing, suspend below let mi = S.findIndex (128>) ss case mi of Just i -> return (succ $ fromIntegral i) Nothing -> do let mj = L.findIndex (128>) bs case mj of Just j -> return (fromIntegral (S.length ss) + succ j) Nothing -> do continue <- suspend if continue then loop else fail "highBitRun has failed" -- | get the longest prefix of the input where all the bytes satisfy the predicate. spanOf :: (Word8 -> Bool) -> Get (L.ByteString) spanOf f = do let loop = do (S ss bs n) <- getFull let (pre,post) = L.span f (L.chunk ss bs) -- L.chunk is safe putFull_unsafe (make_state post (n + L.length pre)) if L.null post then do continue <- suspend if continue then fmap ((L.toChunks pre)++) loop else return (L.toChunks pre) else return (L.toChunks pre) fmap L.fromChunks loop {-# INLINE spanOf #-} -- | Pull @n@ bytes from the input, as a strict ByteString. This will -- suspend if there is too little data. If the result spans multiple -- lazy chunks then the result occupies a freshly allocated strict -- bytestring, otherwise it fits in a single chunk and refers to the -- same immutable memory block as the whole chunk. getByteString :: Int -> Get S.ByteString getByteString nIn | nIn <= 0 = return mempty | otherwise = do (S ss bs n) <- getFull if nIn < S.length ss -- Leave at least one character of 'ss' in 'post' allowing putFull_unsafe below then do let (pre,post) = S.splitAt nIn ss putFull_unsafe (S post bs (n+fromIntegral nIn)) return $! pre -- Expect nIn to be less than S.length ss the vast majority of times -- so do not worry about doing anything fancy here. else do now <- fmap (S.concat . L.toChunks) (getLazyByteString (fromIntegral nIn)) return $! now {-# INLINE getByteString #-} -- important getWordhost :: Get Word getWordhost = getStorable {-# INLINE getWordhost #-} getWord8 :: Get Word8 getWord8 = getPtr 1 {-# INLINE getWord8 #-} getWord16be,getWord16le,getWord16host :: Get Word16 getWord16be = do s <- getByteString 2 return $! (fromIntegral (s `S.unsafeIndex` 0) `shiftl_w16` 8) .|. (fromIntegral (s `S.unsafeIndex` 1)) {-# INLINE getWord16be #-} getWord16le = do s <- getByteString 2 return $! (fromIntegral (s `S.unsafeIndex` 1) `shiftl_w16` 8) .|. (fromIntegral (s `S.unsafeIndex` 0) ) {-# INLINE getWord16le #-} getWord16host = getStorable {-# INLINE getWord16host #-} getWord32be,getWord32le,getWord32host :: Get Word32 getWord32be = do s <- getByteString 4 return $! (fromIntegral (s `S.unsafeIndex` 0) `shiftl_w32` 24) .|. (fromIntegral (s `S.unsafeIndex` 1) `shiftl_w32` 16) .|. (fromIntegral (s `S.unsafeIndex` 2) `shiftl_w32` 8) .|. (fromIntegral (s `S.unsafeIndex` 3) ) {-# INLINE getWord32be #-} getWord32le = do s <- getByteString 4 return $! (fromIntegral (s `S.unsafeIndex` 3) `shiftl_w32` 24) .|. (fromIntegral (s `S.unsafeIndex` 2) `shiftl_w32` 16) .|. (fromIntegral (s `S.unsafeIndex` 1) `shiftl_w32` 8) .|. (fromIntegral (s `S.unsafeIndex` 0) ) {-# INLINE getWord32le #-} getWord32host = getStorable {-# INLINE getWord32host #-} getWord64be,getWord64le,getWord64host :: Get Word64 getWord64be = do s <- getByteString 8 return $! (fromIntegral (s `S.unsafeIndex` 0) `shiftl_w64` 56) .|. (fromIntegral (s `S.unsafeIndex` 1) `shiftl_w64` 48) .|. (fromIntegral (s `S.unsafeIndex` 2) `shiftl_w64` 40) .|. (fromIntegral (s `S.unsafeIndex` 3) `shiftl_w64` 32) .|. (fromIntegral (s `S.unsafeIndex` 4) `shiftl_w64` 24) .|. (fromIntegral (s `S.unsafeIndex` 5) `shiftl_w64` 16) .|. (fromIntegral (s `S.unsafeIndex` 6) `shiftl_w64` 8) .|. (fromIntegral (s `S.unsafeIndex` 7) ) {-# INLINE getWord64be #-} getWord64le = do s <- getByteString 8 return $! (fromIntegral (s `S.unsafeIndex` 7) `shiftl_w64` 56) .|. (fromIntegral (s `S.unsafeIndex` 6) `shiftl_w64` 48) .|. (fromIntegral (s `S.unsafeIndex` 5) `shiftl_w64` 40) .|. (fromIntegral (s `S.unsafeIndex` 4) `shiftl_w64` 32) .|. (fromIntegral (s `S.unsafeIndex` 3) `shiftl_w64` 24) .|. (fromIntegral (s `S.unsafeIndex` 2) `shiftl_w64` 16) .|. (fromIntegral (s `S.unsafeIndex` 1) `shiftl_w64` 8) .|. (fromIntegral (s `S.unsafeIndex` 0) ) {-# INLINE getWord64le #-} getWord64host = getStorable {-# INLINE getWord64host #-} -- Below here are the class instances instance Functor Get where fmap f m = Get (\sc -> unGet m (sc . f)) {-# INLINE fmap #-} instance Monad Get where return a = seq a $ Get (\sc -> sc a) {-# INLINE return #-} m >>= k = Get (\sc -> unGet m (\ a -> seq a $ unGet (k a) sc)) {-# INLINE (>>=) #-} fail = throwError . strMsg instance MonadError String Get where throwError msg = Get $ \_sc s pcIn -> let go (ErrorFrame ec _) = ec msg s go (HandlerFrame (Just catcher) s1 future pc1) = catcher (collect s1 future) pc1 msg go (HandlerFrame Nothing _s1 _future pc1) = go pc1 in go pcIn catchError mayFail handler = Get $ \sc s pc -> let pcWithHandler = let catcher s1 pc1 e1 = unGet (handler e1) sc s1 pc1 in HandlerFrame (Just catcher) s mempty pc actionWithCleanup = mayFail >>= \a -> discardInnerHandler >> return a in unGet actionWithCleanup sc s pcWithHandler instance MonadPlus Get where mzero = throwError (strMsg "[mzero:no message]") mplus m1 m2 = catchError m1 (const m2) instance Applicative Get where pure = return (<*>) = ap instance Alternative Get where empty = mzero (<|>) = mplus -- | I use "splitAt" without tolerating too few bytes, so write a Maybe version. -- This is the only place I invoke L.Chunk as constructor instead of pattern matching. -- I claim that the first argument cannot be empty. splitAtOrDie :: Int64 -> L.ByteString -> Maybe (L.ByteString, L.ByteString) splitAtOrDie i ps | i <= 0 = Just (mempty, ps) splitAtOrDie _i L.Empty = Nothing splitAtOrDie i (L.Chunk x xs) | i < len = let (pre,post) = S.splitAt (fromIntegral i) x in Just (L.chunk pre mempty, L.chunk post xs) | otherwise = case splitAtOrDie (i-len) xs of Nothing -> Nothing Just (y1,y2) -> Just (L.chunk x y1,y2) where len = fromIntegral (S.length x) {-# INLINE splitAtOrDie #-} ------------------------------------------------------------------------ -- getPtr copied from binary's Get.hs -- helper, get a raw Ptr onto a strict ByteString copied out of the -- underlying lazy byteString. So many indirections from the raw parser -- state that my head hurts... -- Assume n>0 getPtr :: (Storable a) => Int -> Get a getPtr n = do (fp,o,_) <- fmap S.toForeignPtr (getByteString n) return . S.inlinePerformIO $ withForeignPtr fp $ \p -> peek (castPtr $ p `plusPtr` o) {-# INLINE getPtr #-} -- I pushed the sizeOf into here (uses ScopedTypeVariables) -- Assume sizeOf (undefined :: a)) > 0 getStorable :: forall a. (Storable a) => Get a getStorable = do (fp,o,_) <- fmap S.toForeignPtr (getByteString (sizeOf (undefined :: a))) return . S.inlinePerformIO $ withForeignPtr fp $ \p -> peek (castPtr $ p `plusPtr` o) {-# INLINE getStorable #-} ------------------------------------------------------------------------ ------------------------------------------------------------------------ -- Unchecked shifts copied from binary's Get.hs shiftl_w16 :: Word16 -> Int -> Word16 shiftl_w32 :: Word32 -> Int -> Word32 shiftl_w64 :: Word64 -> Int -> Word64 #if defined(__GLASGOW_HASKELL__) && !defined(__HADDOCK__) shiftl_w16 (W16# w) (I# i) = W16# (w `uncheckedShiftL#` i) shiftl_w32 (W32# w) (I# i) = W32# (w `uncheckedShiftL#` i) #if WORD_SIZE_IN_BITS < 64 shiftl_w64 (W64# w) (I# i) = W64# (w `uncheckedShiftL64#` i) #if __GLASGOW_HASKELL__ <= 606 -- Exported by GHC.Word in GHC 6.8 and higher foreign import ccall unsafe "stg_uncheckedShiftL64" uncheckedShiftL64# :: Word64# -> Int# -> Word64# #endif #else shiftl_w64 (W64# w) (I# i) = W64# (w `uncheckedShiftL#` i) #endif #else shiftl_w16 = shiftL shiftl_w32 = shiftL shiftl_w64 = shiftL #endif
timjb/protocol-buffers
Text/ProtocolBuffers/Get.hs
Haskell
apache-2.0
38,206
-------------------------------------------------------------------------------- -- | -- Module : System.Taffybar.Widget.CPUMonitor -- Copyright : (c) José A. Romero L. -- License : BSD3-style (see LICENSE) -- -- Maintainer : José A. Romero L. <escherdragon@gmail.com> -- Stability : unstable -- Portability : unportable -- -- Simple CPU monitor that uses a PollingGraph to visualize variations in the -- user and system CPU times in one selected core, or in all cores available. -- -------------------------------------------------------------------------------- module System.Taffybar.Widget.CPUMonitor where import Control.Monad.IO.Class import Data.IORef import qualified GI.Gtk import System.Taffybar.Information.CPU2 (getCPUInfo) import System.Taffybar.Information.StreamInfo (getAccLoad) import System.Taffybar.Widget.Generic.PollingGraph -- | Creates a new CPU monitor. This is a PollingGraph fed by regular calls to -- getCPUInfo, associated to an IORef used to remember the values yielded by the -- last call to this function. cpuMonitorNew :: MonadIO m => GraphConfig -- ^ Configuration data for the Graph. -> Double -- ^ Polling period (in seconds). -> String -- ^ Name of the core to watch (e.g. \"cpu\", \"cpu0\"). -> m GI.Gtk.Widget cpuMonitorNew cfg interval cpu = liftIO $ do info <- getCPUInfo cpu sample <- newIORef info pollingGraphNew cfg interval $ probe sample cpu probe :: IORef [Int] -> String -> IO [Double] probe sample cpuName = do load <- getAccLoad sample $ getCPUInfo cpuName case load of l0:l1:l2:_ -> return [ l0 + l1, l2 ] -- user, system _ -> return []
teleshoes/taffybar
src/System/Taffybar/Widget/CPUMonitor.hs
Haskell
bsd-3-clause
1,649
{-# LANGUAGE OverloadedStrings #-} module SearchRepos where import qualified Github.Search as Github import qualified Github.Data as Github import Control.Monad (forM,forM_) import Data.Maybe (fromMaybe) import Data.List (intercalate) import System.Environment (getArgs) import Text.Printf (printf) import Data.Time.Clock (getCurrentTime, UTCTime(..)) import Data.Time.LocalTime (utc,utcToLocalTime,localDay,localTimeOfDay,TimeOfDay(..)) import Data.Time.Calendar (toGregorian) main = do args <- getArgs date <- case args of (x:_) -> return x otherwise -> today let query = "q=language%3Ahaskell created%3A>" ++ date ++ "&per_page=100" let auth = Nothing result <- Github.searchRepos' auth query case result of Left e -> putStrLn $ "Error: " ++ show e Right r -> do forM_ (Github.searchReposRepos r) (\r -> do putStrLn $ formatRepo r putStrLn "" ) putStrLn $ "Count: " ++ show n ++ " Haskell repos created since " ++ date where n = Github.searchReposTotalCount r -- | return today (in UTC) formatted as YYYY-MM-DD today :: IO String today = do now <- getCurrentTime let day = localDay $ utcToLocalTime utc now (y,m,d) = toGregorian day in return $ printf "%d-%02d-%02d" y m d formatRepo :: Github.Repo -> String formatRepo r = let fields = [ ("Name", Github.repoName) ,("URL", Github.repoHtmlUrl) ,("Description", orEmpty . Github.repoDescription) ,("Created-At", formatMaybeDate . Github.repoCreatedAt) ,("Pushed-At", formatMaybeDate . Github.repoPushedAt) ,("Stars", show . Github.repoStargazersCount) ] in intercalate "\n" $ map fmt fields where fmt (s,f) = fill 12 (s ++ ":") ++ " " ++ f r orEmpty = fromMaybe "" fill n s = s ++ replicate n' ' ' where n' = max 0 (n - length s) formatMaybeDate = maybe "???" formatDate formatDate = show . Github.fromGithubDate
adarqui/github
samples/Search/SearchRepos.hs
Haskell
bsd-3-clause
2,058
{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- -- Code generation for foreign calls. -- -- (c) The University of Glasgow 2004-2006 -- ----------------------------------------------------------------------------- module StgCmmForeign ( cgForeignCall, loadThreadState, saveThreadState, emitPrimCall, emitCCall, emitForeignCall, -- For CmmParse emitSaveThreadState, -- will be needed by the Cmm parser emitLoadThreadState, -- ditto emitCloseNursery, emitOpenNursery ) where #include "HsVersions.h" import StgSyn import StgCmmProf (storeCurCCS, ccsType, curCCS) import StgCmmEnv import StgCmmMonad import StgCmmUtils import StgCmmClosure import StgCmmLayout import Cmm import CmmUtils import MkGraph import Type import TysPrim import CLabel import SMRep import ForeignCall import DynFlags import Maybes import Outputable import BasicTypes import Control.Monad import Prelude hiding( succ ) ----------------------------------------------------------------------------- -- Code generation for Foreign Calls ----------------------------------------------------------------------------- -- | emit code for a foreign call, and return the results to the sequel. -- cgForeignCall :: ForeignCall -- the op -> [StgArg] -- x,y arguments -> Type -- result type -> FCode ReturnKind cgForeignCall (CCall (CCallSpec target cconv safety)) stg_args res_ty = do { dflags <- getDynFlags ; let -- in the stdcall calling convention, the symbol needs @size appended -- to it, where size is the total number of bytes of arguments. We -- attach this info to the CLabel here, and the CLabel pretty printer -- will generate the suffix when the label is printed. call_size args | StdCallConv <- cconv = Just (sum (map arg_size args)) | otherwise = Nothing -- ToDo: this might not be correct for 64-bit API arg_size (arg, _) = max (widthInBytes $ typeWidth $ cmmExprType dflags arg) (wORD_SIZE dflags) ; cmm_args <- getFCallArgs stg_args ; (res_regs, res_hints) <- newUnboxedTupleRegs res_ty ; let ((call_args, arg_hints), cmm_target) = case target of StaticTarget _ _ False -> panic "cgForeignCall: unexpected FFI value import" StaticTarget lbl mPkgId True -> let labelSource = case mPkgId of Nothing -> ForeignLabelInThisPackage Just pkgId -> ForeignLabelInPackage pkgId size = call_size cmm_args in ( unzip cmm_args , CmmLit (CmmLabel (mkForeignLabel lbl size labelSource IsFunction))) DynamicTarget -> case cmm_args of (fn,_):rest -> (unzip rest, fn) [] -> panic "cgForeignCall []" fc = ForeignConvention cconv arg_hints res_hints CmmMayReturn call_target = ForeignTarget cmm_target fc -- we want to emit code for the call, and then emitReturn. -- However, if the sequel is AssignTo, we shortcut a little -- and generate a foreign call that assigns the results -- directly. Otherwise we end up generating a bunch of -- useless "r = r" assignments, which are not merely annoying: -- they prevent the common block elimination from working correctly -- in the case of a safe foreign call. -- See Note [safe foreign call convention] -- ; sequel <- getSequel ; case sequel of AssignTo assign_to_these _ -> emitForeignCall safety assign_to_these call_target call_args _something_else -> do { _ <- emitForeignCall safety res_regs call_target call_args ; emitReturn (map (CmmReg . CmmLocal) res_regs) } } {- Note [safe foreign call convention] The simple thing to do for a safe foreign call would be the same as an unsafe one: just emitForeignCall ... emitReturn ... but consider what happens in this case case foo x y z of (# s, r #) -> ... The sequel is AssignTo [r]. The call to newUnboxedTupleRegs picks [r] as the result reg, and we generate r = foo(x,y,z) returns to L1 -- emitForeignCall L1: r = r -- emitReturn goto L2 L2: ... Now L1 is a proc point (by definition, it is the continuation of the safe foreign call). If L2 does a heap check, then L2 will also be a proc point. Furthermore, the stack layout algorithm has to arrange to save r somewhere between the call and the jump to L1, which is annoying: we would have to treat r differently from the other live variables, which have to be saved *before* the call. So we adopt a special convention for safe foreign calls: the results are copied out according to the NativeReturn convention by the call, and the continuation of the call should copyIn the results. (The copyOut code is actually inserted when the safe foreign call is lowered later). The result regs attached to the safe foreign call are only used temporarily to hold the results before they are copied out. We will now generate this: r = foo(x,y,z) returns to L1 L1: r = R1 -- copyIn, inserted by mkSafeCall goto L2 L2: ... r ... And when the safe foreign call is lowered later (see Note [lower safe foreign calls]) we get this: suspendThread() r = foo(x,y,z) resumeThread() R1 = r -- copyOut, inserted by lowerSafeForeignCall jump L1 L1: r = R1 -- copyIn, inserted by mkSafeCall goto L2 L2: ... r ... Now consider what happens if L2 does a heap check: the Adams optimisation kicks in and commons up L1 with the heap-check continuation, resulting in just one proc point instead of two. Yay! -} emitCCall :: [(CmmFormal,ForeignHint)] -> CmmExpr -> [(CmmActual,ForeignHint)] -> FCode () emitCCall hinted_results fn hinted_args = void $ emitForeignCall PlayRisky results target args where (args, arg_hints) = unzip hinted_args (results, result_hints) = unzip hinted_results target = ForeignTarget fn fc fc = ForeignConvention CCallConv arg_hints result_hints CmmMayReturn emitPrimCall :: [CmmFormal] -> CallishMachOp -> [CmmActual] -> FCode () emitPrimCall res op args = void $ emitForeignCall PlayRisky res (PrimTarget op) args -- alternative entry point, used by CmmParse emitForeignCall :: Safety -> [CmmFormal] -- where to put the results -> ForeignTarget -- the op -> [CmmActual] -- arguments -> FCode ReturnKind emitForeignCall safety results target args | not (playSafe safety) = do dflags <- getDynFlags let (caller_save, caller_load) = callerSaveVolatileRegs dflags emit caller_save target' <- load_target_into_temp target args' <- mapM maybe_assign_temp args emit $ mkUnsafeCall target' results args' emit caller_load return AssignedDirectly | otherwise = do dflags <- getDynFlags updfr_off <- getUpdFrameOff target' <- load_target_into_temp target args' <- mapM maybe_assign_temp args k <- newLabelC let (off, _, copyout) = copyInOflow dflags NativeReturn (Young k) results [] -- see Note [safe foreign call convention] emit $ ( mkStore (CmmStackSlot (Young k) (widthInBytes (wordWidth dflags))) (CmmLit (CmmBlock k)) <*> mkLast (CmmForeignCall { tgt = target' , res = results , args = args' , succ = k , ret_args = off , ret_off = updfr_off , intrbl = playInterruptible safety }) <*> mkLabel k <*> copyout ) return (ReturnedTo k off) load_target_into_temp :: ForeignTarget -> FCode ForeignTarget load_target_into_temp (ForeignTarget expr conv) = do tmp <- maybe_assign_temp expr return (ForeignTarget tmp conv) load_target_into_temp other_target@(PrimTarget _) = return other_target -- What we want to do here is create a new temporary for the foreign -- call argument if it is not safe to use the expression directly, -- because the expression mentions caller-saves GlobalRegs (see -- Note [Register Parameter Passing]). -- -- However, we can't pattern-match on the expression here, because -- this is used in a loop by CmmParse, and testing the expression -- results in a black hole. So we always create a temporary, and rely -- on CmmSink to clean it up later. (Yuck, ToDo). The generated code -- ends up being the same, at least for the RTS .cmm code. -- maybe_assign_temp :: CmmExpr -> FCode CmmExpr maybe_assign_temp e = do dflags <- getDynFlags reg <- newTemp (cmmExprType dflags e) emitAssign (CmmLocal reg) e return (CmmReg (CmmLocal reg)) -- ----------------------------------------------------------------------------- -- Save/restore the thread state in the TSO -- This stuff can't be done in suspendThread/resumeThread, because it -- refers to global registers which aren't available in the C world. saveThreadState :: DynFlags -> CmmAGraph saveThreadState dflags = -- CurrentTSO->stackobj->sp = Sp; mkStore (cmmOffset dflags (CmmLoad (cmmOffset dflags stgCurrentTSO (tso_stackobj dflags)) (bWord dflags)) (stack_SP dflags)) stgSp <*> closeNursery dflags -- and save the current cost centre stack in the TSO when profiling: <*> if gopt Opt_SccProfilingOn dflags then mkStore (cmmOffset dflags stgCurrentTSO (tso_CCCS dflags)) curCCS else mkNop emitSaveThreadState :: FCode () emitSaveThreadState = do dflags <- getDynFlags emit (saveThreadState dflags) emitCloseNursery :: FCode () emitCloseNursery = do df <- getDynFlags emit (closeNursery df) -- CurrentNursery->free = Hp+1; closeNursery :: DynFlags -> CmmAGraph closeNursery dflags = mkStore (nursery_bdescr_free dflags) (cmmOffsetW dflags stgHp 1) loadThreadState :: DynFlags -> LocalReg -> LocalReg -> CmmAGraph loadThreadState dflags tso stack = do catAGraphs [ -- tso = CurrentTSO; mkAssign (CmmLocal tso) stgCurrentTSO, -- stack = tso->stackobj; mkAssign (CmmLocal stack) (CmmLoad (cmmOffset dflags (CmmReg (CmmLocal tso)) (tso_stackobj dflags)) (bWord dflags)), -- Sp = stack->sp; mkAssign sp (CmmLoad (cmmOffset dflags (CmmReg (CmmLocal stack)) (stack_SP dflags)) (bWord dflags)), -- SpLim = stack->stack + RESERVED_STACK_WORDS; mkAssign spLim (cmmOffsetW dflags (cmmOffset dflags (CmmReg (CmmLocal stack)) (stack_STACK dflags)) (rESERVED_STACK_WORDS dflags)), -- HpAlloc = 0; -- HpAlloc is assumed to be set to non-zero only by a failed -- a heap check, see HeapStackCheck.cmm:GC_GENERIC mkAssign hpAlloc (zeroExpr dflags), openNursery dflags, -- and load the current cost centre stack from the TSO when profiling: if gopt Opt_SccProfilingOn dflags then storeCurCCS (CmmLoad (cmmOffset dflags (CmmReg (CmmLocal tso)) (tso_CCCS dflags)) (ccsType dflags)) else mkNop] emitLoadThreadState :: FCode () emitLoadThreadState = do dflags <- getDynFlags load_tso <- newTemp (gcWord dflags) load_stack <- newTemp (gcWord dflags) emit $ loadThreadState dflags load_tso load_stack emitOpenNursery :: FCode () emitOpenNursery = do df <- getDynFlags emit (openNursery df) openNursery :: DynFlags -> CmmAGraph openNursery dflags = catAGraphs [ -- Hp = CurrentNursery->free - 1; mkAssign hp (cmmOffsetW dflags (CmmLoad (nursery_bdescr_free dflags) (bWord dflags)) (-1)), -- HpLim = CurrentNursery->start + -- CurrentNursery->blocks*BLOCK_SIZE_W - 1; mkAssign hpLim (cmmOffsetExpr dflags (CmmLoad (nursery_bdescr_start dflags) (bWord dflags)) (cmmOffset dflags (CmmMachOp (mo_wordMul dflags) [ CmmMachOp (MO_SS_Conv W32 (wordWidth dflags)) [CmmLoad (nursery_bdescr_blocks dflags) b32], mkIntExpr dflags (bLOCK_SIZE dflags) ]) (-1) ) ) ] nursery_bdescr_free, nursery_bdescr_start, nursery_bdescr_blocks :: DynFlags -> CmmExpr nursery_bdescr_free dflags = cmmOffset dflags stgCurrentNursery (oFFSET_bdescr_free dflags) nursery_bdescr_start dflags = cmmOffset dflags stgCurrentNursery (oFFSET_bdescr_start dflags) nursery_bdescr_blocks dflags = cmmOffset dflags stgCurrentNursery (oFFSET_bdescr_blocks dflags) tso_stackobj, tso_CCCS, stack_STACK, stack_SP :: DynFlags -> ByteOff tso_stackobj dflags = closureField dflags (oFFSET_StgTSO_stackobj dflags) tso_CCCS dflags = closureField dflags (oFFSET_StgTSO_cccs dflags) stack_STACK dflags = closureField dflags (oFFSET_StgStack_stack dflags) stack_SP dflags = closureField dflags (oFFSET_StgStack_sp dflags) closureField :: DynFlags -> ByteOff -> ByteOff closureField dflags off = off + fixedHdrSize dflags stgSp, stgHp, stgCurrentTSO, stgCurrentNursery :: CmmExpr stgSp = CmmReg sp stgHp = CmmReg hp stgCurrentTSO = CmmReg currentTSO stgCurrentNursery = CmmReg currentNursery sp, spLim, hp, hpLim, currentTSO, currentNursery, hpAlloc :: CmmReg sp = CmmGlobal Sp spLim = CmmGlobal SpLim hp = CmmGlobal Hp hpLim = CmmGlobal HpLim currentTSO = CmmGlobal CurrentTSO currentNursery = CmmGlobal CurrentNursery hpAlloc = CmmGlobal HpAlloc -- ----------------------------------------------------------------------------- -- For certain types passed to foreign calls, we adjust the actual -- value passed to the call. For ByteArray#/Array# we pass the -- address of the actual array, not the address of the heap object. getFCallArgs :: [StgArg] -> FCode [(CmmExpr, ForeignHint)] -- (a) Drop void args -- (b) Add foreign-call shim code -- It's (b) that makes this differ from getNonVoidArgAmodes getFCallArgs args = do { mb_cmms <- mapM get args ; return (catMaybes mb_cmms) } where get arg | isVoidRep arg_rep = return Nothing | otherwise = do { cmm <- getArgAmode (NonVoid arg) ; dflags <- getDynFlags ; return (Just (add_shim dflags arg_ty cmm, hint)) } where arg_ty = stgArgType arg arg_rep = typePrimRep arg_ty hint = typeForeignHint arg_ty add_shim :: DynFlags -> Type -> CmmExpr -> CmmExpr add_shim dflags arg_ty expr | tycon == arrayPrimTyCon || tycon == mutableArrayPrimTyCon = cmmOffsetB dflags expr (arrPtrsHdrSize dflags) | tycon == smallArrayPrimTyCon || tycon == smallMutableArrayPrimTyCon = cmmOffsetB dflags expr (smallArrPtrsHdrSize dflags) | tycon == byteArrayPrimTyCon || tycon == mutableByteArrayPrimTyCon = cmmOffsetB dflags expr (arrWordsHdrSize dflags) | otherwise = expr where UnaryRep rep_ty = repType arg_ty tycon = tyConAppTyCon rep_ty -- should be a tycon app, since this is a foreign call
frantisekfarka/ghc-dsi
compiler/codeGen/StgCmmForeign.hs
Haskell
bsd-3-clause
15,782
module Testsuite.Utils.Test ( Test, ($?), ($$?), TestS(..), summarise, TestM, execTestM, liftIO, runTest ) where import Test.QuickCheck import Test.QuickCheck.Batch import System.IO ( hFlush, stdout ) data Test = Test String Property | Group String [Test] ($?) :: Testable a => String -> a -> Test name $? test = Test name (property test) ($$?) :: String -> [Test] -> Test ($$?) = Group data TestS = TestS { indent :: Int , passCount :: !Int , failCount :: !Int , exhaustedCount :: !Int , abortedCount :: !Int } passed :: TestS -> TestS passed t@(TestS {}) = t { passCount = passCount t + 1 } failed :: TestS -> TestS failed t@(TestS {}) = t { failCount = failCount t + 1 } exhausted :: TestS -> TestS exhausted t@(TestS {}) = t { exhaustedCount = exhaustedCount t + 1 } aborted :: TestS -> TestS aborted t@(TestS {}) = t { abortedCount = abortedCount t + 1 } summarise :: TestS -> [String] summarise s = concat [ [shows_n (passCount s) "passed"] , shows_nz (failCount s) "failed" , shows_nz (exhaustedCount s) "exhausted" , shows_nz (abortedCount s) "aborted" ] where shows_n n s = let t = show n l = length t in replicate (10 - l) ' ' ++ t ++ " " ++ s shows_nz 0 s = [] shows_nz n s = [shows_n n s] newtype TestM a = TestM { runTestM :: TestS -> IO (a, TestS) } instance Monad TestM where return x = TestM $ \s -> return (x,s) TestM f >>= g = TestM $ \s -> do (x,s') <- f s runTestM (g x) s' readTestM :: (TestS -> a) -> TestM a readTestM f = TestM $ \s -> return (f s, s) updTestM :: (TestS -> TestS) -> TestM () updTestM f = TestM $ \s -> return ((), f s) execTestM :: TestM a -> IO (a, TestS) execTestM (TestM f) = f $ TestS { indent = 0 , passCount = 0 , failCount = 0 , exhaustedCount = 0 , abortedCount = 0 } liftIO :: IO a -> TestM a liftIO p = TestM $ \s -> do x <- p return (x,s) runTest :: Test -> TestM () runTest (Group name tests) = do ind <- readTestM indent liftIO . putStrLn $ replicate (ind * 2 + 2) '*' ++ " " ++ name updTestM $ \s -> s { indent = ind + 1 } mapM_ runTest tests updTestM $ \s -> s { indent = ind } runTest (Test name prop) = do liftIO $ do putStr $ name ++ replicate (60 - length name) ' ' ++ "... " hFlush stdout res <- liftIO $ run prop defOpt let (s, ss, upd) = result res liftIO $ do putStrLn s hFlush stdout mapM_ (putStrLn . (" " ++)) ss hFlush stdout updTestM upd {- case res of TestOk _ n _ -> putStrLn $ "pass (" ++ show n ++ ")" TestExausted _ n _ -> putStrLn $ "EXHAUSTED (" ++ show n ++ ")" TestFailed s n -> do putStrLn $ "FAIL (" ++ show n ++ ")" mapM_ putStrLn $ map (" " ++) s TestAborted e -> do putStrLn $ "ABORTED" putStrLn $ " " ++ show e -} result :: TestResult -> (String, [String], TestS -> TestS) result (TestOk _ _ _) = ("pass", [], passed) result (TestExausted _ n _) = ("EXHAUSTED", [], exhausted) result (TestFailed s n) = ("FAIL", s, failed) result (TestAborted e) = ("ABORTED", [show e], aborted)
dolio/vector
old-testsuite/Testsuite/Utils/Test.hs
Haskell
bsd-3-clause
3,761
<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd"> <helpset version="2.0" xml:lang="fa-IR"> <title>Selenium add-on</title> <maps> <homeID>top</homeID> <mapref location="map.jhm"/> </maps> <view> <name>TOC</name> <label>Contents</label> <type>org.zaproxy.zap.extension.help.ZapTocView</type> <data>toc.xml</data> </view> <view> <name>Index</name> <label>Index</label> <type>javax.help.IndexView</type> <data>index.xml</data> </view> <view> <name>Search</name> <label>Search</label> <type>javax.help.SearchView</type> <data engine="com.sun.java.help.search.DefaultSearchEngine"> JavaHelpSearch </data> </view> <view> <name>Favorites</name> <label>Favorites</label> <type>javax.help.FavoritesView</type> </view> </helpset>
msrader/zap-extensions
src/org/zaproxy/zap/extension/selenium/resources/help_fa_IR/helpset_fa_IR.hs
Haskell
apache-2.0
961
{-# LANGUAGE MagicHash, UnboxedTuples #-} import GHC.Exts newtype Eval a = Eval {runEval :: State# RealWorld -> (# State# RealWorld, a #)} -- inline sequence :: [Eval a] -> Eval [a] well_sequenced :: [Eval a] -> Eval [a] well_sequenced = foldr cons nil where cons e es = Eval $ \s -> case runEval e s of (# s', a #) -> case runEval es s' of (# s'', as #) -> (# s'', a : as #) nil = Eval $ \s -> (# s, [] #) -- seemingly demonic use of spark# ill_sequenced :: [Eval a] -> Eval [a] ill_sequenced as = Eval $ spark# (case well_sequenced as of Eval f -> case f realWorld# of (# _, a' #) -> a') -- 'parallelized' version of (show >=> show >=> show >=> show >=> show) main :: IO () main = putStrLn ((layer . layer . layer . layer . layer) (:[]) 'y') where layer :: (Char -> String) -> (Char -> String) layer f = (\(Eval x) -> case x realWorld# of (# _, as #) -> concat as) . well_sequenced -- [Eval String] -> Eval [String] . map ill_sequenced -- [[Eval Char]] -> [Eval String]; -- 'map well_sequenced' is fine . map (map (\x -> Eval $ \s -> (# s, x #))) -- wrap each Char in Eval . chunk' -- String -> [String] . concatMap f . show -- add single quotes chunk' :: String -> [String] chunk' [] = [] chunk' xs = as : chunk' bs where (as,bs) = splitAt 3 xs -- this doesn't work: -- chunk (a:b:c:xs) = [a,b,c]:chunk xs -- chunk xs = [xs]
urbanslug/ghc
testsuite/tests/codeGen/should_run/T10414.hs
Haskell
bsd-3-clause
1,526
module Fast2haskell ( Complex_type, Array_type, Assoc_type, Descr_type, abortstr, delay, fix, force, iff, iffrev, seQ, pair, strcmp, entier, land_i, lnot_i, lor_i, lshift_i, rshift_i, descr, destr_update, indassoc, lowbound, tabulate, upbound, update, valassoc) where { import Data.Bits; -- import Word2; import Data.Word; import Data.Complex; -- 1.3 import Data.Array; -- 1.3 -- import Data.Int ( Num(fromInt) ); type Complex_type = Complex Double; type Array_type b = Array Int b; type Assoc_type a = (Int, a); type Descr_type = (Int,Int); abortstr str = error ("abort:"++str); -- abort (OtherError str); delay x = abortstr "delay not implemented"; fix f = fix_f where {fix_f = f fix_f}; force x = x; -- error "force not implemented"; iff b x y = if b then x else y; iffrev y x b = if b then x else y; seQ x y = x `seq` y; pair [] = False; pair x = True; strcmp :: [Char] -> [Char] -> Bool; strcmp x y = x == y; entier x = fromIntegral (floor x); land_i :: Int -> Int -> Int; land_i x y = x .&. y; lnot_i :: Int -> Int; lnot_i x = complement x; lor_i :: Int -> Int -> Int; lor_i x y = x .|. y; lshift_i :: Int -> Int -> Int; lshift_i x y = x `shiftL` y; rshift_i :: Int -> Int -> Int; rshift_i x y = x `shiftR` y; write x = abortstr "write not implemented"; descr l u = (l,u); destr_update ar i x = ar // [(i,x)]; indassoc (i,v) = i; lowbound (l,u) = l; tabulate f (l,u) = listArray (l,u) [f i | i <- [l..u]]; upbound (l,u) = u; update ar i x = ar // [(i,x)]; valassoc (i,v) = v; }
ghc-android/ghc
testsuite/tests/programs/fast2haskell/Fast2haskell.hs
Haskell
bsd-3-clause
2,658
{-# LANGUAGE BangPatterns #-} module Cmm.ActivityAnalysis ( ActivityStorage(..) , activityAnalysis ) where import Cmm.DirectedGraph import Cmm.Backend (MachineInstr(..), MachineFunction(..), MachinePrg(..)) import Cmm.LabelGenerator (Temp()) import Cmm.ControlFlowGraph (createControlFlowGraph, Unique(..)) import Data.Set (Set) import qualified Data.Set as Set import Data.Map (Map) import qualified Data.Map.Strict as Map import Text.Printf (printf) import Data.List (foldl', find) import Data.Maybe (fromJust) -- | simple data type to store the -- written temps ~ out -- accessed temps ~ in data ActivityStorage = ActivityStorage { out_a :: Set Temp , in_a :: Set Temp } deriving (Show, Eq, Ord) emptyActivityStorage :: ActivityStorage emptyActivityStorage = ActivityStorage { out_a = Set.empty , in_a = Set.empty } -- | analyses the activity of the temporaries -- by reversing the control flow graph, doing an almost correct -- depth-first search (the successors are Sets) -- and applying the proposed algorithm -- activityAnalysis :: (MachineInstr i, Ord i, Show i) => DirectedGraph (Unique i) -> Map (Unique i) ActivityStorage activityAnalysis graph = -- revGraph :: DirectedGraph (Int, i) let revGraph = reverseGraph graph lastReturn = (Set.size (nodes revGraph) + 1, ret) -- revNodes :: (Ord i) => [(Int, i)] revNodes = toList revGraph lastReturn -- livelinessMap :: Map (Int, i) ActivityStorage livelinessMap = Map.fromList $ zip revNodes (repeat emptyActivityStorage) -- runUpate :: Map (Int, i) ActivityStorage -> Map (Int, i) ActivityStorage runUpate lm = fst $ foldl' updateActivities (lm, graph) revNodes -- newMap :: Map (Int, i) ActivityStorage solvedMap = repeatUntilSame livelinessMap runUpate in solvedMap where updateActivities (lm, g) i = -- succs :: [(Int, i)] let succs = Set.toList $ successors g i -- activity_ins :: [Set i] activitiy_ins = map (\i -> in_a $ fromJust $ Map.lookup i lm) succs -- out_ :: Set Temp outs = out_a $ fromJust $ Map.lookup i lm -- in_ :: Set Temp in_ = ((use . snd) i) `Set.union` (outs `Set.difference` ((def . snd) i)) out_ = (Set.unions $ activitiy_ins) activitiy = ActivityStorage { out_a = out_ , in_a = in_ } in (Map.insert i activitiy lm, g) repeatUntilSame :: Eq s => s -> (s -> s) -> s repeatUntilSame state transform = do let newState = transform state case state == newState of True -> state False -> repeatUntilSame newState transform
cirquit/hjc
src/Cmm/ActivityAnalysis.hs
Haskell
mit
2,790
{-# OPTIONS_GHC -Wall -fno-warn-unused-top-binds #-} {-# LANGUAGE DeriveFunctor #-} {-# LANGUAGE DeriveFoldable #-} {-# LANGUAGE DeriveTraversable #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeFamilies #-} module Okasaki.Stack ( StackF(..) , Stack , empty , push , pop , fromList , toList , isEmpty , cat , update , suffixes ) where import Prelude hiding (head, tail) import Data.Functor.Foldable as RS import Text.Show.Deriving data StackF a r = NilF | ConsF !a r deriving (Eq, Functor, Foldable, Traversable, Show) $(deriveShow1 ''StackF) type Stack a = Fix (StackF a) empty :: Stack a empty = Fix NilF push :: a -> Stack a -> Stack a push h t = Fix (ConsF h t) pop :: Stack a -> Maybe (a, Stack a) pop s = case project s of NilF -> Nothing ConsF h t -> Just (h, t) fromList :: [a] -> Stack a fromList = ana coalg where coalg = \case [] -> NilF (h : t) -> ConsF h t toList :: Stack a -> [a] toList = ana coalg where coalg s = case project s of NilF -> Nil ConsF h t -> Cons h t isEmpty :: Stack a -> Bool isEmpty s = case project s of NilF -> True _ -> False cat :: Stack a -> Stack a -> Stack a cat l r = apo coalg (project l) where coalg = \case ConsF h t -> case project t of NilF -> ConsF h (Left r) rest -> ConsF h (Right rest) NilF -> fmap Left (project r) update :: Int -> a -> Stack a -> Stack a update idx x s = apo coalg (idx, s) where coalg (j, stack) = case project stack of NilF -> NilF ConsF h t -> if j <= 0 then ConsF x (Left t) else ConsF h (Right (pred j, t)) -- exercise 2.1 suffixes :: Stack a -> Stack (Stack a) suffixes = ana coalg where coalg stack = case project stack of NilF -> NilF ConsF _ t -> ConsF t t -- test test0 :: Stack Int test0 = fromList [1..3] test1 :: Stack Int test1 = fromList [4..7] test2 :: Stack Int test2 = update 3 100 (cat test0 test1)
jtobin/okasaki
lib/Okasaki/Stack.hs
Haskell
mit
2,028
{-# LANGUAGE DataKinds #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FunctionalDependencies #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TypeFamilies #-} {-| Module: Capnp.Rpc.Server Description: handlers for incoming method calls. The term server in this context refers to a thread that handles method calls for a particular capability (The capnproto rpc protocol itself has no concept of clients and servers). -} module Capnp.Rpc.Server ( Server(..) , ServerOps(..) , CallInfo(..) , runServer -- * Handling methods , MethodHandler -- ** Working with untyped data , untypedHandler , toUntypedHandler , fromUntypedHandler ) where import Control.Concurrent.STM import Data.Word import Data.Typeable (Typeable) import Capnp.Message (Mutability(..)) import Capnp.Rpc.Promise (Fulfiller) import Capnp.Untyped (Ptr) import qualified Internal.TCloseQ as TCloseQ -- | a @'MethodHandler' m p r@ handles a method call with parameters @p@ -- and return type @r@, in monad @m@. -- -- The library represents method handlers via an abstract type -- 'MethodHandler', parametrized over parameter (@p@) and return (@r@) -- types, and the monadic context in which it runs (@m@). This allows us -- to provide different strategies for actually handling methods; there -- are various helper functions which construct these handlers. -- -- At some point we will likely additionally provide handlers affording: -- -- * Working directly with the low-level data types. -- * Replying to the method call asynchronously, allowing later method -- calls to be serviced before the current one is finished. newtype MethodHandler m p r = MethodHandler { handleMethod :: Maybe (Ptr 'Const) -> Fulfiller (Maybe (Ptr 'Const)) -> m () } -- | Convert a 'MethodHandler' for any parameter and return types into -- one that deals with untyped pointers. toUntypedHandler :: MethodHandler m p r -> MethodHandler m (Maybe (Ptr 'Const)) (Maybe (Ptr 'Const)) toUntypedHandler MethodHandler{..} = MethodHandler{..} -- | Inverse of 'toUntypedHandler' fromUntypedHandler :: MethodHandler m (Maybe (Ptr 'Const)) (Maybe (Ptr 'Const)) -> MethodHandler m p r fromUntypedHandler MethodHandler{..} = MethodHandler{..} -- | Construct a method handler from a function accepting an untyped -- pointer for the method's parameter, and a 'Fulfiller' which accepts -- an untyped pointer for the method's return value. untypedHandler :: (Maybe (Ptr 'Const) -> Fulfiller (Maybe (Ptr 'Const)) -> m ()) -> MethodHandler m (Maybe (Ptr 'Const)) (Maybe (Ptr 'Const)) untypedHandler = MethodHandler -- | Base class for things that can act as capnproto servers. class Monad m => Server m a | a -> m where -- | Called when the last live reference to a server is dropped. shutdown :: a -> m () shutdown _ = pure () -- | Try to extract a value of a given type. The default implementation -- always fails (returns 'Nothing'). If an instance chooses to implement -- this, it will be possible to use "reflection" on clients that point -- at local servers to dynamically unwrap the server value. A typical -- implementation will just call Typeable's @cast@ method, but this -- needn't be the case -- a server may wish to allow local peers to -- unwrap some value that is not exactly the data the server has access -- to. unwrap :: Typeable b => a -> Maybe b unwrap _ = Nothing -- | The operations necessary to receive and handle method calls, i.e. -- to implement an object. It is parametrized over the monadic context -- in which methods are serviced. data ServerOps m = ServerOps { handleCall :: Word64 -> Word16 -> MethodHandler m (Maybe (Ptr 'Const)) (Maybe (Ptr 'Const)) -- ^ Handle a method call; takes the interface and method id and returns -- a handler for the specific method. , handleStop :: m () -- ^ Handle shutting-down the receiver; this is called when the last -- reference to the capability is dropped. , handleCast :: forall a. Typeable a => Maybe a -- ^ used to unwrap the server when reflecting on a local client. } -- | A 'CallInfo' contains information about a method call. data CallInfo = CallInfo { interfaceId :: !Word64 -- ^ The id of the interface whose method is being called. , methodId :: !Word16 -- ^ The method id of the method being called. , arguments :: Maybe (Ptr 'Const) -- ^ The arguments to the method call. , response :: Fulfiller (Maybe (Ptr 'Const)) -- ^ A 'Fulfiller' which accepts the method's return value. } -- | Handle incoming messages for a given object. -- -- Accepts a queue of messages to handle, and 'ServerOps' used to handle them. -- returns when it receives a 'Stop' message. runServer :: TCloseQ.Q CallInfo -> ServerOps IO -> IO () runServer q ops = go where go = atomically (TCloseQ.read q) >>= \case Nothing -> pure () Just CallInfo{interfaceId, methodId, arguments, response} -> do handleMethod (handleCall ops interfaceId methodId) arguments response go
zenhack/haskell-capnp
lib/Capnp/Rpc/Server.hs
Haskell
mit
5,431
{- | Values implemented named terms with explicit substitutions. -} {-# LANGUAGE FlexibleContexts, FlexibleInstances, TypeSynonymInstances, MultiParamTypeClasses, OverlappingInstances, IncoherentInstances, UndecidableInstances, PatternGuards, TupleSections #-} module NamedExplSubst where import Prelude hiding (pi,abs,mapM) import Control.Applicative import Control.Monad.Reader hiding (mapM) {- import Data.Map (Map) import qualified Data.Map as Map -} import Data.Traversable import qualified Abstract as A import qualified ListEnv as Env -- import qualified MapEnv as Env import Signature import Util import Value import Fresh -- * Values type Var = A.UID -- | Heads are identifiers excluding @A.Def@. type Head = A.Ident data Val = Ne Head Val [Val] -- ^ @x^a vs^-1 | c^a vs^-1@ | Df A.Name Val Val [Val] -- ^ @d=v^a vs^-1@ a,v are closed! | App Val [Val] -- ^ @v vs^-1@ non-canonical -- last argument first in list! | Sort Sort -- ^ @s@ | K Val -- ^ constant function | Abs A.Name Val -- ^ @\xv@ abstraction | Fun Val Val -- ^ @Pi a b@ | Clos Val Env -- ^ @v[rho]@ | DontCare -- * Environments (Values for expression (=bound) variables) type Env = Env.Env Var Val -- * Application, Substitution -- | @app f v@ computes the whnf of @f v@ without expanding definitions app :: Val -> Val -> Val app f v = case f of K w -> w Ne h t vs -> Ne h t (v:vs) Df h w t vs -> Df h w t (v:vs) App w ws -> rapps w (v:ws) -- evaluate apps Abs x w -> substFree v (A.uid x) w Clos (Abs x w) sigma -> substs (Env.update sigma (A.uid x) v) w Clos w sigma -> substs sigma w `app` v rapps :: Val -> [Val] -> Val rapps f vs = foldr (flip app) f vs -- | @substFree v x w = [v/x]w@ single substitution substFree :: Val -> Var -> Val -> Val substFree w x = substs (Env.singleton x w) -- | parallel substitution, computing whnf substs :: Env -> Val -> Val substs sigma = subst where subst (Ne h@(A.Var y) a vs) = case Env.lookup (A.uid y) sigma of Just w -> rapps w $ map subst vs Nothing -> Ne h (subst a) $ map subst vs subst (Ne h a vs) = Ne h a $ map subst vs -- a is closed subst (Df h v a vs) = Df h v a $ map subst vs -- a,v are closed subst (App v vs) = rapps (subst v) (map subst vs) -- subst (rapps v vs) -- OR: first compute application ? subst (Sort s) = Sort s subst (K v) = K $ subst v subst (Abs x v) = Clos (Abs x v) sigma subst (Clos v tau) = flip substs v $ flip Env.union sigma $ Env.map subst tau -- composing two substitutions (first tau, then sigma) : -- apply sigma to tau -- add all bindings from sigma that are not yet present -- thus, we can take sigma and overwrite it with [sigma]tau subst (Fun a b) = Fun (subst a) $ subst b -- | computing the whnf of a term, pushing a delayed substitution in whnf :: Val -> Val whnf (Clos v rho) = substs rho v whnf (App f vs) = rapps f vs whnf v = v -- * Smart Constructors for values. var :: A.Name -> Val -> Val var x t = Ne (A.Var x) t [] var_ :: A.Name -> Val var_ x = var x DontCare con :: A.Name -> Val -> Val con x t = Ne (A.Con x) t [] def :: A.Name -> Val -> Val -> Val def x v t = Df x v t [] -- non-computing application application :: Val -> Val -> Val application f v = case f of Ne h t vs -> Ne h t (v:vs) Df x w t vs -> Df x w t (v:vs) App w vs -> App w (v:vs) K w -> w -- because K comes from non-dep fun types _ -> App f [v] -- * projections boundName :: Val -> A.Name boundName (Abs n _) = n boundName _ = A.noName -- * Translation -- | @translate e rho = v@ where @rho@ is a renaming. translate :: (Applicative m, Monad m, Signature Val sig, MonadReader sig m, MonadFresh m) => A.Expr -> Renaming -> m Val translate e rho = case e of A.Ident (A.Con x) -> con x . symbType . sigLookup' (A.uid x) <$> ask A.Ident (A.Def x) -> do ~(SigDef t v) <- sigLookup' (A.uid x) <$> ask return $ def x v t A.Ident (A.Var x) -> return $ var_ $ Env.lookupSafe (A.uid x) rho A.App f e -> application <$> (evaluate f rho) <*> (evaluate e rho) A.Lam x mt e -> do y <- fresh x Abs y <$> translate e (Env.update rho (A.uid x) y) A.Pi mx e e' -> Fun <$> (evaluate e rho) <*> case mx of Just x -> do y <- fresh x Abs y <$> translate e (Env.update rho (A.uid x) y) Nothing -> K <$> evaluate e' rho A.Typ -> typ -- * Evaluation monad instance (Applicative m, Monad m, Signature Val sig, MonadReader sig m, MonadFresh m) => MonadEval Head Val Renaming m where typ = return $ Sort Type kind = return $ Sort Kind freeVar h t = return $ Ne h t [] valView v = return $ case (whnf v) of Fun a b -> VPi a b Sort s -> VSort s Ne h t vs -> VNe h t (reverse vs) Df x v t vs -> VDef (A.Def x) t (reverse vs) _ -> VAbs apply f v = return $ app f v evaluate e rho = error "NYI: NamedExplSubst.evaluate" -- evaluate e rho = whnf <$> translate e rho evaluate' e = whnf <$> (translate e =<< renaming) unfold v = case v of Df x f t vs -> appsR f vs _ -> return v unfolds v = case v of Df x f t vs -> unfolds =<< appsR f vs -- unfolding application _ -> return v abstractPi a (n, Ne (A.Var x) _ []) b = return $ Fun a $ Abs x b reify v = quote v -- * Reification -- quote :: Val -> A.SysNameCounter -> EvalM A.Expr quote :: (Applicative m, Monad m, MonadFresh m, MonadEval Head Val Renaming m) => Val -> m A.Expr quote v = case v of Ne h a vs -> foldr (flip A.App) (A.Ident h) <$> mapM quote vs Df x f a vs -> foldr (flip A.App) (A.Ident (A.Def x)) <$> mapM quote vs App f vs -> foldr (flip A.App) <$> quote f <*> mapM quote vs Sort Type -> return A.Typ Sort Kind -> error "cannot quote sort kind" DontCare -> error "cannot quote the dontcare value" Fun a (K b) -> A.Pi Nothing <$> quote a <*> quote b Fun a f -> do u <- quote a (x,t) <- quoteFun f return $ A.Pi (Just x) u t f -> do (x,e) <- quoteFun f return $ A.Lam x Nothing e -- | @quoteFun n v@ expects @v@ to be a function and returns and its -- body as an expression. -- quoteFun :: Val -> A.SysNameCounter -> EvalM (A.Name, A.Expr) quoteFun :: (Applicative m, Monad m, MonadFresh m, MonadEval Head Val Renaming m) => Val -> m (A.Name, A.Expr) quoteFun f = do x <- fresh $ boundName f v <- f `apply` (var_ x) (x,) <$> quote v
andreasabel/helf
src/NamedExplSubst.hs
Haskell
mit
7,012
import Control.Monad import Control.Applicative solve ev od = solve' ev od where solve' :: String -> String -> String solve' [] [] = [] solve' (x:xs) [] = [x] solve' (x:xs) (y:ys) = x : y : solve' xs ys main :: IO () main = do ev <- getLine od <- getLine putStrLn $ solve ev od
pogin503/vbautil
atcoder/beg058/beg058b.hs
Haskell
mit
304
{-# LANGUAGE TypeOperators, FlexibleInstances, FlexibleContexts #-} module Data.Functor.Classes.Show.Generic ( Show1(..) , genericLiftShowsPrec , genericLiftShowList , gliftShowsPrec , gliftShowList ) where import Data.Functor.Classes import GHC.Generics import Text.Show -- | Generically-derivable lifting of the 'Show' class to unary type constructors. class GShow1 f where -- | showsPrec function for an application of the type constructor based on showsPrec and showList functions for the argument type. gliftShowsPrec :: (Int -> a -> ShowS) -> ([a] -> ShowS) -> Int -> f a -> ShowS -- | showList function for an application of the type constructor based on showsPrec and showList functions for the argument type. The default implementation using standard list syntax is correct for most types. gliftShowList :: GShow1 f => (Int -> a -> ShowS) -> ([a] -> ShowS) -> [f a] -> ShowS gliftShowList sp sl = showListWith (gliftShowsPrec sp sl 0) -- | A suitable implementation of Show1’s liftShowsPrec for Generic1 types. genericLiftShowsPrec :: (Generic1 f, GShow1 (Rep1 f)) => (Int -> a -> ShowS) -> ([a] -> ShowS) -> Int -> f a -> ShowS genericLiftShowsPrec sp sl d = gliftShowsPrec sp sl d . from1 -- | A suitable implementation of Show1’s liftShowsPrec for Generic1 types. genericLiftShowList :: (Generic1 f, GShow1 (Rep1 f)) => (Int -> a -> ShowS) -> ([a] -> ShowS) -> [f a] -> ShowS genericLiftShowList sp sl = gliftShowList sp sl . map from1 -- Show1 instances instance GShow1 [] where gliftShowsPrec = liftShowsPrec instance GShow1 Maybe where gliftShowsPrec = liftShowsPrec instance Show a => GShow1 ((,) a) where gliftShowsPrec = liftShowsPrec instance Show a => GShow1 (Either a) where gliftShowsPrec = liftShowsPrec -- Generics instance GShow1 U1 where gliftShowsPrec _ _ _ _ = id instance GShow1 Par1 where gliftShowsPrec sp _ d (Par1 a) = sp d a instance Show c => GShow1 (K1 i c) where gliftShowsPrec _ _ d (K1 a) = showsPrec d a instance Show1 f => GShow1 (Rec1 f) where gliftShowsPrec sp sl d (Rec1 a) = liftShowsPrec sp sl d a instance GShow1 f => GShow1 (M1 D c f) where gliftShowsPrec sp sl d (M1 a) = gliftShowsPrec sp sl d a instance (Constructor c, GShow1 f) => GShow1 (M1 C c f) where gliftShowsPrec sp sl d m = showsUnaryWith (gliftShowsPrec sp sl) (conName m) d (unM1 m) instance GShow1 f => GShow1 (M1 S c f) where gliftShowsPrec sp sl d (M1 a) = gliftShowsPrec sp sl d a instance (GShow1 f, GShow1 g) => GShow1 (f :+: g) where gliftShowsPrec sp sl d (L1 l) = gliftShowsPrec sp sl d l gliftShowsPrec sp sl d (R1 r) = gliftShowsPrec sp sl d r instance (GShow1 f, GShow1 g) => GShow1 (f :*: g) where gliftShowsPrec sp sl d (a :*: b) = gliftShowsPrec sp sl d a . showChar ' ' . gliftShowsPrec sp sl d b instance (Show1 f, GShow1 g) => GShow1 (f :.: g) where gliftShowsPrec sp sl d (Comp1 a) = liftShowsPrec (gliftShowsPrec sp sl) (gliftShowList sp sl) d a
tclem/lilo
src/Data/Functor/Classes/Show/Generic.hs
Haskell
mit
2,930
module SimpleArgvParser (pairArguments) where import Prelude hiding (map) import qualified Data.Map.Strict as Map strIsOption :: String -> Bool strIsOption (a:b:_) = (a == '-') && (b == '-') strIsOption _ = False -- TODO: use either here pairArguments :: [String] -> Maybe (Map.Map String String) pairArguments args = collect args Map.empty where collect [] map = Just map collect [_] _ = Nothing collect (k:v:rst) map = if strIsOption k then collect rst (Map.insert key v map) else Nothing where (_,key) = splitAt 2 k
davidfontenot/haskell-hashtag-viewer
src/SimpleArgvParser.hs
Haskell
mit
550
{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE OverloadedStrings #-} module Client where import Control.Applicative import Control.Monad import Control.Monad.Catch (MonadThrow) import Control.Monad.IO.Class (MonadIO, liftIO) import Control.Monad.Trans.Control (MonadBaseControl) import Data.Aeson import Data.Aeson.Lens -- applyBasicAuth expects a strict ByteString import Data.ByteString.Lazy.Char8 hiding (filter, foldl) import qualified Data.ByteString.Char8 as BC import Data.Maybe import Data.Text as T hiding (foldl) import Network.HTTP.Conduit -------------------------------------------------------------------------------- -- Response -------------------------------------------------------------------------------- -- | Some hacking around....
wayofthepie/tc-rest-client
src/Client.hs
Haskell
mit
839
{-# LANGUAGE CPP #-} {-# OPTIONS_GHC -fno-warn-missing-import-lists #-} {-# OPTIONS_GHC -fno-warn-implicit-prelude #-} module Paths_list_ops ( version, getBinDir, getLibDir, getDynLibDir, getDataDir, getLibexecDir, getDataFileName, getSysconfDir ) where import qualified Control.Exception as Exception import Data.Version (Version(..)) import System.Environment (getEnv) import Prelude #if defined(VERSION_base) #if MIN_VERSION_base(4,0,0) catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a #else catchIO :: IO a -> (Exception.Exception -> IO a) -> IO a #endif #else catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a #endif catchIO = Exception.catch version :: Version version = Version [0,1,0,2] [] bindir, libdir, dynlibdir, datadir, libexecdir, sysconfdir :: FilePath bindir = "/Users/c19/Documents/projects/exercism/haskell/haskell/list-ops/.stack-work/install/x86_64-osx/lts-8.21/8.0.2/bin" libdir = "/Users/c19/Documents/projects/exercism/haskell/haskell/list-ops/.stack-work/install/x86_64-osx/lts-8.21/8.0.2/lib/x86_64-osx-ghc-8.0.2/list-ops-0.1.0.2-JKI50BvNEqOLJomxL6kjuW" dynlibdir = "/Users/c19/Documents/projects/exercism/haskell/haskell/list-ops/.stack-work/install/x86_64-osx/lts-8.21/8.0.2/lib/x86_64-osx-ghc-8.0.2" datadir = "/Users/c19/Documents/projects/exercism/haskell/haskell/list-ops/.stack-work/install/x86_64-osx/lts-8.21/8.0.2/share/x86_64-osx-ghc-8.0.2/list-ops-0.1.0.2" libexecdir = "/Users/c19/Documents/projects/exercism/haskell/haskell/list-ops/.stack-work/install/x86_64-osx/lts-8.21/8.0.2/libexec" sysconfdir = "/Users/c19/Documents/projects/exercism/haskell/haskell/list-ops/.stack-work/install/x86_64-osx/lts-8.21/8.0.2/etc" getBinDir, getLibDir, getDynLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath getBinDir = catchIO (getEnv "list_ops_bindir") (\_ -> return bindir) getLibDir = catchIO (getEnv "list_ops_libdir") (\_ -> return libdir) getDynLibDir = catchIO (getEnv "list_ops_dynlibdir") (\_ -> return dynlibdir) getDataDir = catchIO (getEnv "list_ops_datadir") (\_ -> return datadir) getLibexecDir = catchIO (getEnv "list_ops_libexecdir") (\_ -> return libexecdir) getSysconfDir = catchIO (getEnv "list_ops_sysconfdir") (\_ -> return sysconfdir) getDataFileName :: FilePath -> IO FilePath getDataFileName name = do dir <- getDataDir return (dir ++ "/" ++ name)
c19/Exercism-Haskell
list-ops/.stack-work/dist/x86_64-osx/Cabal-1.24.2.0/build/autogen/Paths_list_ops.hs
Haskell
mit
2,371
module Web.YahooPortfolioManager.App ( module Web.YahooPortfolioManager.Foundation , module Web.YahooPortfolioManager.Dispatch , module Web.YahooPortfolioManager.Handlers ) where import Web.YahooPortfolioManager.Foundation import Web.YahooPortfolioManager.Dispatch () import Web.YahooPortfolioManager.Handlers
lhoghu/intranet
Web/YahooPortfolioManager/App.hs
Haskell
mit
327
-- -- If the numbers 1 to 5 are written out in words: one, two, three, four, five, then there are 3 + 3 + 5 + 4 + 4 = 19 letters used in total. -- -- If all the numbers from 1 to 1000 (one thousand) inclusive were written out in words, how many letters would be used? -- -- NOTE: Do not count spaces or hyphens. For example, 342 (three hundred and forty-two) contains 23 letters and 115 (one hundred and fifteen) contains 20 letters. The use of "and" when writing out numbers is in compliance with British usage. -- english :: Int -> String english n | n < 0 = "minus " ++ english (negate n) | n < 20 = [ "zero", "one", "two", "three", "four" , "five", "six", "seven", "eight", "nine" , "ten", "eleven", "twelve", "thirteen", "fourteen" , "fifteen", "sixteen", "seventeen", "eighteen", "nineteen" ] !! n | n < 100 = [ "?", "?", "twenty", "thirty", "forty" , "fifty", "sixty", "seventy", "eighty", "ninety" ] !! (n `div` 10) ++ (if n `mod` 10 == 0 then "" else "-" ++ english (n `mod` 10)) | n < 1000 = (english (n `div` 100)) ++ " hundred" ++ (if n `mod` 100 == 0 then "" else " and " ++ english (n `mod` 100)) | n < 1000000 = (english (n `div` 1000)) ++ " thousand" ++ (if n `mod` 1000 == 0 then "" else " " ++ english (n `mod` 1000)) main = putStrLn $ show $ length $ filter (`elem` ['a'..'z']) $ foldr (++) [] $ map english [1..1000]
stu-smith/project-euler-haskell
Euler-017.hs
Haskell
mit
1,563
module Properties where main :: IO () main = return ()
nickspinale/wmonad
tests/Properties.hs
Haskell
mit
56
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} module Kubernetes.V1.EndpointSubset where import GHC.Generics import Kubernetes.V1.EndpointAddress import Kubernetes.V1.EndpointPort import qualified Data.Aeson -- | EndpointSubset is a group of addresses with a common set of ports. The expanded set of endpoints is the Cartesian product of Addresses x Ports. For example, given:\n {\n Addresses: [{\"ip\": \"10.10.1.1\"}, {\"ip\": \"10.10.2.2\"}],\n Ports: [{\"name\": \"a\", \"port\": 8675}, {\"name\": \"b\", \"port\": 309}]\n }\nThe resulting set of endpoints can be viewed as:\n a: [ 10.10.1.1:8675, 10.10.2.2:8675 ],\n b: [ 10.10.1.1:309, 10.10.2.2:309 ] data EndpointSubset = EndpointSubset { addresses :: Maybe [EndpointAddress] -- ^ IP addresses which offer the related ports that are marked as ready. These endpoints should be considered safe for load balancers and clients to utilize. , notReadyAddresses :: Maybe [EndpointAddress] -- ^ IP addresses which offer the related ports but are not currently marked as ready because they have not yet finished starting, have recently failed a readiness check, or have recently failed a liveness check. , ports :: Maybe [EndpointPort] -- ^ Port numbers available on the related IP addresses. } deriving (Show, Eq, Generic) instance Data.Aeson.FromJSON EndpointSubset instance Data.Aeson.ToJSON EndpointSubset
minhdoboi/deprecated-openshift-haskell-api
kubernetes/lib/Kubernetes/V1/EndpointSubset.hs
Haskell
apache-2.0
1,521
-- | The main Robot interface. module Test.Robot ( -- * Running your robot Robot() -- hide implementation , runRobot -- * Key and button constants , module Test.Robot.Types -- * Doing things , Pressable(press, release, hold) , moveBy , moveTo , tap -- * Miscellaneous , sleep , module Test.Robot.Connection ) where import Control.Applicative import Control.Concurrent (threadDelay) import Control.Monad.Catch import Control.Monad.IO.Class import Test.Robot.Connection import Test.Robot.Internal import Test.Robot.Types infixr 4 `hold` -- Allow e.g. xs ++ ys `hold` m -- | Represents things that can be pressed: either a single 'Switch' or -- a list of 'Switch'es. class Pressable x where -- | Press a key or button. press :: x -> Robot () -- | Release a key or button. release :: x -> Robot () -- | @hold x act@ holds down @x@ while executing @act@. It is -- equivalent to: -- -- @ -- press x >> act >> release x -- @ -- -- except @hold@ ensures that the argument is released in the event -- of an exception. -- hold :: x -> Robot a -> Robot a hold = bracket_ <$> press <*> release instance Pressable Switch where press = switch True release = switch False -- | Press items from left-to-right, but release from right-to-left. -- -- This behavior ensures the following equivalence holds: -- -- @ -- press xs >> act >> release xs -- === xs \`hold\` act -- === x1 \`hold\` x2 \`hold\` ... xn \`hold\` act -- @ -- instance Pressable x => Pressable [x] where press = mapM_ press release = mapM_ release . reverse hold = foldr (.) id . map hold --hold [] = id --hold (x:xs) = hold x . hold xs -- | Move the pointer by an offset. moveBy :: Int -> Int -> Robot () moveBy = motion True -- | Move the pointer to a point on the screen. moveTo :: Int -> Int -> Robot () moveTo = motion False -- | Press the argument, then release it. -- -- Note that the underlying events are fired very quickly; much faster -- than some applications (such as Xmonad) can handle. If this becomes -- an issue, you may introduce a delay using 'sleep': -- -- @ -- slowTap x = x \`hold\` sleep 0.1 -- @ -- tap :: Pressable x => x -> Robot () tap = (`hold` return ()) -- | Do nothing for the specified number of seconds. sleep :: Rational -> Robot () sleep = liftIO . threadDelay . round . (* 1000000)
lfairy/robot
Test/Robot.hs
Haskell
apache-2.0
2,461
{-# LANGUAGE OverloadedStrings, NoImplicitPrelude, FlexibleContexts #-} module Ethereum.Analyzer.EVM.CfgAugWithTopNPassSpec ( spec ) where import Protolude hiding (show) import Ckev.In.Text import Data.Text as DT import Ethereum.Analyzer.EVM import Ethereum.Analyzer.TestData.Basic import Test.Hspec spec :: Spec spec = describe "doCfgAugWithTopNPass" $ do it "works for hexstring1" $ do let result = unWordLabelMapM $ showText <$> doCfgAugWithTopNPass hexstring1 DT.length result `shouldBe` 4876 it "works for hexstring2" $ do let result = toS $ unWordLabelMapM ((toS . showText <$> doCfgAugWithTopNPass hexstring2) :: WordLabelMapM Text) (result :: [Char]) `shouldContain` "OC: 9: JUMPI -> [L3,L5]"
zchn/ethereum-analyzer
ethereum-analyzer/test/Ethereum/Analyzer/EVM/CfgAugWithTopNPassSpec.hs
Haskell
apache-2.0
795
{-# LANGUAGE OverloadedStrings, NamedFieldPuns #-} module FormEngine.FormElement.Rendering ( ElemAction , ElemBehaviour(..) , foldElements , renderElement ) where import Prelude import Data.Monoid ((<>)) import Data.Foldable (foldlM) import Data.Maybe (fromMaybe) import Data.Char (chr) --import Debug.Trace (traceShow) --import Haste.DOM import FormEngine.JQuery as JQ import FormEngine.FormItem import FormEngine.FormElement.FormElement as Element import FormEngine.FormElement.Identifiers import FormEngine.FormElement.Updating import FormEngine.FormContext import FormEngine.Functionality import FormEngine.FormElement.AutoComplete (autoCompleteHandler) foldElements :: [FormElement] -> FormContext -> ElemBehaviour -> JQuery -> IO JQuery foldElements elems context behaviour jq = foldlM (\jq1 e -> renderElement e context behaviour jq1) jq elems renderElement :: FormElement -> FormContext -> ElemBehaviour -> JQuery -> IO JQuery renderElement element@SimpleGroupElem{} context behaviour jq = renderSimpleGroup element context behaviour jq renderElement element@OptionalGroupElem{} context behaviour jq = renderOptionalGroup element context behaviour jq renderElement element@MultipleGroupElem{} context behaviour jq = renderMultipleGroup element context behaviour jq renderElement element@StringElem{} context behaviour jq = renderStringElement element context behaviour jq renderElement element@TextElem{} context behaviour jq = renderTextElement element context behaviour jq renderElement element@EmailElem{} context behaviour jq = renderEmailElement element context behaviour jq renderElement element@NumberElem{} context behaviour jq = renderNumberElement element context behaviour jq renderElement element@ChoiceElem{} context behaviour jq = renderChoiceElement element context behaviour jq renderElement element@InfoElem{} context behaviour jq = renderInfoElement element context behaviour jq renderElement element@ListElem{} context behaviour jq = renderListElement element context behaviour jq renderElement element@SaveButtonElem{} context _ jq = renderSaveButtonElement element context jq renderElement element@SubmitButtonElem{} context _ jq = renderSubmitButtonElement element context jq renderElement _ _ _ jq = errorjq "renderElement did not unify" jq setLongDescription :: FormElement -> IO () setLongDescription element = do paragraphJq <- select $ "#" ++ descSubpaneParagraphId element spanJq <- findSelector "span" paragraphJq let maybeDesc = iLongDescription $ fiDescriptor $ formItem element case maybeDesc of Nothing -> return () Just desc -> do _ <- setHtml desc spanJq _ <- appearJq paragraphJq return () return () unsetLongDescription :: FormElement -> IO () unsetLongDescription element = do paragraphJq <- select $ "#" ++ descSubpaneParagraphId element _ <- disappearJq paragraphJq return () elementFocusHandler :: FormElement -> FormContext -> ElemBehaviour -> Handler elementFocusHandler element context behaviour _ = do inputFieldUpdate element context applyRules element context case focusAction behaviour of Nothing -> return () Just action -> action element context elementBlurHandler :: FormElement -> FormContext -> ElemBehaviour -> Handler elementBlurHandler element context behaviour _ = do inputFieldUpdate element context applyRules element context case blurAction behaviour of Nothing -> return () Just action -> action element context elementClickHandler :: FormElement -> FormContext -> ElemBehaviour -> Handler elementClickHandler element context behaviour _ = case clickAction behaviour of Nothing -> return () Just action -> action element context renderLabel :: FormElement -> JQuery -> IO JQuery renderLabel element jq = case Element.maybeLabel element of Nothing -> return jq Just label -> case Element.maybeLink element of Nothing -> appendT "<label>" jq >>= setTextInside label Just link -> appendT ("<label class=\"link\" onclick=\"" <> link <> "\">") jq >>= setTextInside label renderHeading :: Maybe String -> Int -> JQuery -> IO JQuery renderHeading Nothing _ jq = return jq renderHeading (Just label) lvl jq = appendT heading jq >>= setTextInside label where heading :: String heading = "<h" <> show lvl <> ">" renderShortDesc :: FormElement -> JQuery -> IO JQuery renderShortDesc element jq = let maybeDesc = iShortDescription $ fiDescriptor $ formItem element in case maybeDesc of Nothing -> return jq Just desc -> appendT "<span class='short-desc'>" jq >>= setTextInside desc renderInput :: IO JQuery -> FormElement -> FormContext -> ElemBehaviour -> JQuery -> IO JQuery renderInput elemIOJq element context behaviour jq = appendT "<table>" jq >>= setMouseEnterHandler (\_ -> setLongDescription element) >>= setMouseLeaveHandler (\_ -> unsetLongDescription element) >>= inside >>= appendT "<tbody>" >>= inside >>= appendT "<tr>" >>= inside >>= (case detailsFunc behaviour of Nothing -> return Just functionality -> renderQuestionDetails functionality) >>= renderLabelCell >>= renderElemCell >>= renderFlagCell >>= JQ.parent >>= appendT "<tr>" >>= inside >>= appendT "<div></div>" >>= setAttrInside "id" (autoCompleteBoxId element) >>= addClassInside "autocomplete-suggestions" >>= JQ.parent >>= JQ.parent >>= JQ.parent >>= renderShortDesc element where renderQuestionDetails detFunc jq1 = appendT "<td>" jq1 >>= inside >>= addClass "more-space functionality" >>= appendT (funcImg detFunc) >>= setClickHandler (\_ -> funcAction detFunc element context) >>= JQ.parent renderLabelCell jq1 = appendT "<td class='labeltd'>" jq1 >>= inside >>= addClass "more-space" >>= renderLabel element >>= JQ.parent renderElemCell jq1 = do elemJq <- elemIOJq appendT "<td>" jq1 >>= inside >>= appendJq elemJq >>= JQ.parent renderFlagCell jq1 = appendT "<td>" jq1 >>= setAttrInside "id" (flagPlaceId element) renderStringElement :: FormElement -> FormContext -> ElemBehaviour -> JQuery -> IO JQuery renderStringElement element context behaviour jq = let elemIOJq = select "<input type='text'>" >>= setAttr "name" (elementId element) >>= setAttr "identity" (Element.identity element) >>= setAttr "value" (seValue element) >>= onMouseEnter (elementFocusHandler element context behaviour) -- >>= onKeyup (elementFocusHandler element context behaviour) >>= onKeyup (handlerCombinator [elementFocusHandler element context behaviour, autoCompleteHandler (chr 10) element context]) >>= onBlur (elementBlurHandler element context behaviour) >>= onMouseLeave (elementBlurHandler element context behaviour) >>= onClick (elementClickHandler element context behaviour) in renderInput elemIOJq element context behaviour jq renderTextElement :: FormElement -> FormContext -> ElemBehaviour -> JQuery -> IO JQuery renderTextElement element context behaviour jq = let elemIOJq = select "<textarea>" >>= setAttr "name" (elementId element) >>= setAttr "identity" (Element.identity element) >>= setHtml (teValue element) >>= onMouseEnter (elementFocusHandler element context behaviour) >>= onKeyup (handlerCombinator [elementFocusHandler element context behaviour, autoCompleteHandler (chr 10) element context]) >>= onBlur (elementBlurHandler element context behaviour) >>= onMouseLeave (elementBlurHandler element context behaviour) >>= onClick (elementClickHandler element context behaviour) in renderInput elemIOJq element context behaviour jq renderEmailElement :: FormElement -> FormContext -> ElemBehaviour -> JQuery -> IO JQuery renderEmailElement element context behaviour jq = let elemIOJq = select "<input type='email'>" >>= setAttr "name" (elementId element) >>= setAttr "identity" (Element.identity element) >>= setAttr "value" (eeValue element) >>= onMouseEnter (elementFocusHandler element context behaviour) >>= onKeyup (elementFocusHandler element context behaviour) >>= onBlur (elementBlurHandler element context behaviour) >>= onMouseLeave (elementBlurHandler element context behaviour) >>= onClick (elementClickHandler element context behaviour) in renderInput elemIOJq element context behaviour jq renderNumberElement :: FormElement -> FormContext -> ElemBehaviour -> JQuery -> IO JQuery renderNumberElement element context behaviour jq = let elemIOJq = select "<span></span>" >>= appendT "<input type='number' step='0.1'>" >>= setAttrInside "id" (elementId element) >>= setAttrInside "name" (elementId element) >>= setAttrInside "identity" (Element.identity element) >>= setAttrInside "value" (fromMaybe "" $ show <$> neMaybeValue element) >>= setMouseEnterHandler (elementFocusHandler element context behaviour) >>= setKeyupHandler (elementFocusHandler element context behaviour) >>= setBlurHandler (elementBlurHandler element context behaviour) >>= setMouseLeaveHandler (elementBlurHandler element context behaviour) >>= setChangeHandler (elementClickHandler element context behaviour) >>= appendT "&nbsp; " >>= case nfiUnit (formItem element) of NoUnit -> return SingleUnit u -> appendT u MultipleUnit units -> renderUnits units where renderUnits :: [String] -> JQuery -> IO JQuery renderUnits units jq1 = foldlM (flip renderUnit) jq1 units where renderUnit :: String -> JQuery -> IO JQuery renderUnit unit jq2 = appendT "<input type='radio'>" jq2 >>= setAttrInside "value" unit >>= setAttrInside "name" (nfiUnitId $ nfi element) >>= setMouseEnterHandler (elementFocusHandler element context behaviour) >>= setClickHandler (elementFocusHandler element context behaviour) >>= setMouseLeaveHandler (elementBlurHandler element context behaviour) >>= case neMaybeUnitValue element of Nothing -> return Just selectedOption -> if selectedOption == unit then setAttrInside "checked" "checked" else return >>= appendT "<label>" >>= setTextInside unit >>= appendT "&nbsp;&nbsp;" in renderInput elemIOJq element context behaviour jq renderListElement :: FormElement -> FormContext -> ElemBehaviour -> JQuery -> IO JQuery renderListElement element context behaviour jq = let selectIOJq = select "<select>" >>= setAttr "name" (elementId element) >>= setAttr "identity" (Element.identity element) >>= onBlur (elementFocusHandler element context behaviour) >>= onChange (elementFocusHandler element context behaviour) >>= onMouseLeave (elementBlurHandler element context behaviour) >>= onClick (elementClickHandler element context behaviour) >>= renderOptions in renderInput selectIOJq element context behaviour jq where renderOptions :: JQuery -> IO JQuery renderOptions jq1 = foldlM (flip renderOption) jq1 (lfiAvailableOptions (formItem element)) where renderOption :: (String, String) -> JQuery -> IO JQuery renderOption (listVal, label) jq2 = appendT "<option>" jq2 >>= setAttrInside "value" listVal >>= setTextInside label >>= case leMaybeValue element of Nothing -> return Just selectedOption -> if listVal == selectedOption then setAttrInside "selected" "selected" else return choiceSwitchHandler :: FormElement -> OptionElement -> Handler choiceSwitchHandler element optionElem _ = do allPanes <- mapM selectOptionSection detailOptionElems mapM_ disappearJq allPanes case optionElem of SimpleOptionElem {} -> return () DetailedOptionElem {} -> do _ <- selectOptionSection optionElem >>= appearJq return () where selectOptionSection :: OptionElement -> IO JQuery selectOptionSection oe = select $ "#" <> optionSectionId element oe detailOptionElems = Prelude.filter justDetailed (cheOptions element) where justDetailed :: OptionElement -> Bool justDetailed SimpleOptionElem{} = False justDetailed DetailedOptionElem{} = True choiceValidateHandler :: FormElement -> FormContext -> Handler choiceValidateHandler element context _ = do isSelected <- isRadioSelected $ radioName element updateValidityFlag element context isSelected -- Now a hack, needs to get the validity from the instances renderRadio :: FormElement -> OptionElement -> FormContext -> ElemBehaviour -> JQuery -> IO JQuery renderRadio element optionElem context behaviour jq = --dumptIO (optionElemValue optionElem) -- dumptIO (choiceIisSelected choiceI) appendT "<input type='radio'>" jq >>= setAttrInside "id" (radioId element optionElem) >>= setAttrInside "name" (radioName element) >>= setAttrInside "identity" (Element.identity element) >>= setAttrInside "value" (optionElemValue optionElem) >>= (if optionElemIsSelected optionElem then setAttrInside "checked" "checked" else return) >>= setClickHandler (handlerCombinator [ choiceSwitchHandler element optionElem , choiceValidateHandler element context , elementClickHandler element context behaviour ]) >>= setMouseLeaveHandler (choiceValidateHandler element context) >>= appendT "<label>" >>= setTextInside (optionElemValue optionElem) >>= appendT appendix where appendix :: String appendix = case optionElem of SimpleOptionElem {} -> "" DetailedOptionElem {} -> "▾" renderChoiceElement :: FormElement -> FormContext -> ElemBehaviour -> JQuery -> IO JQuery renderChoiceElement element context behaviour jq = let elemIOJq = select "<div></div>" >>= renderButtons (cheOptions element) in renderInput elemIOJq element context behaviour jq >>= renderPanes (cheOptions element) where renderButtons :: [OptionElement] -> JQuery -> IO JQuery renderButtons optionElems jq1 = foldlM (flip renderButton) jq1 optionElems where renderButton :: OptionElement -> JQuery -> IO JQuery renderButton optionElem jq2 = renderRadio element optionElem context behaviour jq2 >>= (if optionElem == Prelude.last (cheOptions element) then return else appendT "<br>") renderPanes :: [OptionElement] -> JQuery -> IO JQuery renderPanes optionElems jq1 = foldlM (flip renderPane) jq1 optionElems where renderPane :: OptionElement -> JQuery -> IO JQuery renderPane SimpleOptionElem{} jq2 = return jq2 renderPane optionElem@DetailedOptionElem{ dcheElements } jq2 = appendT "<div>" jq2 >>= setAttrInside "id" (optionSectionId element optionElem) >>= inside >>= disappearJq >>= foldElements dcheElements context behaviour >>= JQ.parent renderInfoElement :: FormElement -> FormContext -> ElemBehaviour -> JQuery -> IO JQuery renderInfoElement element _ _ jq = appendT "<table>" jq >>= setMouseEnterHandler (\_ -> setLongDescription element) >>= setMouseLeaveHandler (\_ -> unsetLongDescription element) >>= inside >>= appendT "<tbody>" >>= inside >>= appendT "<tr>" >>= inside >>= appendT "<td class='more-space intro' colspan='2'>" >>= setTextInside (ifiText $ formItem element) >>= JQ.parent >>= JQ.parent >>= JQ.parent >>= renderShortDesc element renderSimpleGroup :: FormElement -> FormContext -> ElemBehaviour -> JQuery -> IO JQuery renderSimpleGroup element context behaviour jq = let lvl = Element.level element in --dumptIO $ fromMaybe "" (Element.maybeLabel element) appendT "<div class='simple-group'>" jq >>= setAttrInside "level" (show lvl) >>= (if lvl > 1 then addClassInside "framed" else return) >>= inside >>= renderHeading (Element.maybeLabel element) lvl >>= renderShortDesc element >>= foldElements (sgeElements element) context behaviour >>= JQ.parent renderOptionalGroup :: FormElement -> FormContext -> ElemBehaviour -> JQuery -> IO JQuery renderOptionalGroup element context behaviour jq = let lvl = Element.level element in --dumptIO $ fromMaybe "" (Element.maybeLabel element) appendT "<div class='optional-group'>" jq >>= setAttrInside "level" (show lvl) >>= setMouseEnterHandler (\_ -> setLongDescription element) >>= setMouseLeaveHandler (\_ -> unsetLongDescription element) >>= inside >>= renderCheckbox >>= appendT (if not $ null (ogeElements element) then "▾" else "") >>= renderShortDesc element >>= renderOgContents >>= JQ.parent where renderCheckbox :: JQuery -> IO JQuery renderCheckbox jq1 = appendT "<input type='checkbox'>" jq1 >>= setAttrInside "name" (elementId element) >>= (if ogeChecked element then setAttrInside "checked" "checked" else return) >>= setClickHandler (handlerCombinator [handler, elementClickHandler element context behaviour]) >>= renderLabel element where handler ev = do sectionJq <- select $ "#" <> checkboxId element checkBox <- target ev checked <- isChecked checkBox _ <- if checked then appearJq sectionJq else disappearJq sectionJq return () renderOgContents :: JQuery -> IO JQuery renderOgContents jq1 = case ogeElements element of [] -> return jq1 _ -> appendT "<div class='optional-section'>" jq1 >>= setAttrInside "id" (checkboxId element) >>= inside >>= foldElements (ogeElements element) context behaviour >>= JQ.parent renderMultipleGroup :: FormElement -> FormContext -> ElemBehaviour -> JQuery -> IO JQuery renderMultipleGroup element context behaviour jq = let lvl = Element.level element in appendT "<div class='multiple-group'>" jq >>= addClassInside "framed" >>= setAttrInside "level" (show lvl) >>= inside >>= renderHeading (Element.maybeLabel element) lvl >>= renderShortDesc element >>= renderMgGroups (mgeGroups element) >>= renderAddButton >>= JQ.parent where renderMgGroups :: [ElemGroup] -> JQuery -> IO JQuery renderMgGroups groups jq1 = foldlM (flip renderMgGroup) jq1 groups renderMgGroup :: ElemGroup -> JQuery -> IO JQuery renderMgGroup group jq2 = --dumptIO $ show $ getGroupNo $ elementId $ head $ egElements group --dumptIO $ show $ head $ egElements group appendT "<table>" jq2 >>= inside -- MG item holder >>= appendT "<tbody>" >>= inside >>= appendT "<tr>" >>= inside >>= appendT "<td>" >>= inside >>= appendT "<div class='multiple-section'>" >>= inside >>= foldElements (egElements group) context behaviour >>= JQ.parent >>= JQ.parent >>= (if egNumber group > 0 then renderRemoveButton else return) >>= JQ.parent >>= JQ.parent >>= JQ.parent where renderRemoveButton :: JQuery -> IO JQuery renderRemoveButton jq3 = appendT "<td style='vertical-align: middle;'>" jq3 >>= inside >>= appendT (removeImg context) >>= setClickHandler removingHandler >>= JQ.parent where removingHandler :: Handler removingHandler ev = do minusButtonJq <- target ev tableJq <- JQ.parent minusButtonJq >>= JQ.parent >>= JQ.parent >>= JQ.parent -- img -> td -> tr -> tbody -> table _ <- removeJq tableJq return () renderAddButton :: JQuery -> IO JQuery renderAddButton jq2 = appendT (addImg context) jq2 >>= setAttrInside "count" "1" -- must be refactored after real adding of groups >>= setClickHandler addingHandler where addingHandler :: Handler addingHandler ev = do plusButtonJq <- target ev countStr <- getAttr "count" plusButtonJq let countNum = read (show countStr) :: Int _ <- setAttr "count" (show $ countNum + 1) plusButtonJq let newGroup = ElemGroup { egElements = map (setGroupOfElem $ Just newGroup) $ egElements $ Prelude.last $ mgeGroups element, egNumber = countNum } tableJq <- prev plusButtonJq _ <- renderMgGroup newGroup tableJq mapM_ (\e -> selectByName (elementId e) >>= mouseleave) $ egElements newGroup return () renderSubmitButtonElement :: FormElement -> FormContext -> JQuery -> IO JQuery renderSubmitButtonElement element _ jq = appendT "<table style='margin-top: 10px'>" jq >>= inside >>= appendT "<tbody>" >>= inside >>= appendT "<tr>" >>= inside >>= appendT "<td class='labeltd more-space' style='text-align: center'>" >>= inside >>= appendT "<input type='button' class='submit'>" -- >>= setClickHandler submitHandler >>= setAttrInside "value" (fromMaybe "Submit" (show <$> Element.maybeLabel element)) >>= JQ.parent >>= JQ.parent >>= JQ.parent >>= JQ.parent >>= renderShortDesc element renderSaveButtonElement :: FormElement -> FormContext -> JQuery -> IO JQuery renderSaveButtonElement element _ jq = appendT "<table style='margin-top: 10px'>" jq >>= inside >>= appendT "<tbody>" >>= inside >>= appendT "<tr>" >>= inside >>= appendT "<td class='labeltd more-space' style='text-align: center'>" >>= inside >>= appendT "<input type='submit'>" >>= setAttrInside "value" (fromMaybe "Submit" (show <$> Element.maybeLabel element)) >>= JQ.parent >>= JQ.parent >>= JQ.parent >>= JQ.parent >>= renderShortDesc element
DataStewardshipPortal/ds-form-engine
FormElement/Rendering.hs
Haskell
apache-2.0
21,774
{-# LANGUAGE NoMonomorphismRestriction #-} module Tests.OpsTest where import Ops import Lambda import Prelude ( ($), Int, (==), return, sequence, (>>=), and, (.), IO, Bool ) import qualified Control.Monad test1 :: (LOps l) => l Int test1 = app (lam $ \x -> lit 3 + x) (lit 2) test2 :: (LOps l) => l Int test2 = app (lam $ \x -> lit 3 * x) (lit 2) test3 :: (LOps l) => l Int test3 = app (lam $ \x -> lit 2 * x + lit 1) (lit 5 - lit 2) test1ast :: IO Bool test1ast = do t <- ast test1 return $ t "" == "(λa.3+a) 2" test1eval :: IO Bool test1eval = return $ eval test1 == 5 test2ast :: IO Bool test2ast = do t <- ast test2 return $ t "" == "(λa.3*a) 2" test2eval :: IO Bool test2eval = return $ eval test2 == 6 test3ast :: IO Bool test3ast = do t <- ast test3 return $ t "" == "(λa.2*a+1) (5-2)" test3eval :: IO Bool test3eval = return $ eval test3 == 7 tests :: [IO Bool] tests = [ test1ast , test1eval ] runTests :: IO Bool runTests = Control.Monad.liftM and $ sequence tests
agobi/sizechecking
Tests/OpsTest.hs
Haskell
bsd-2-clause
1,031
module HaskHOL.Lib.IndTypes.Pre2 where import HaskHOL.Core hiding (typeOf, lefts) import HaskHOL.Core.Kernel (typeOf) import qualified HaskHOL.Core.State as S (mkType) import HaskHOL.Deductive import HaskHOL.Lib.Pair import HaskHOL.Lib.Recursion import HaskHOL.Lib.Nums import HaskHOL.Lib.CalcNum import HaskHOL.Lib.WF import HaskHOL.Lib.IndTypesPre import qualified HaskHOL.Lib.IndTypes.Pre as Pre defineTypeRaw :: IndTypesPreCtxt thry => [(HOLType, [(Text, [HOLType])])] -> HOL Theory thry (HOLThm, HOLThm) defineTypeRaw def = do (ith, rth) <- Pre.defineTypeRaw def rth' <- generalizeRecursionTheorem rth return (ith, rth') generalizeRecursionTheorem :: BoolCtxt thry => HOLThm -> HOL cls thry HOLThm generalizeRecursionTheorem thm = let (_, ebod) = stripForall $ concl thm (evs, bod) = stripExists ebod n = length evs in if n == 1 then return thm else let tys = map (\ i -> mkVarType $ "Z" `append` textShow i) [0..(n-1)] in do sty <- mkSum tys inls <- mkInls sty outls <- mkOutls sty zty <- typeOf `fmap` (rand . snd . stripForall . head $ conjuncts bod) ith <- primINST_TYPE [(zty, sty)] thm let (_, ebod') = stripForall $ concl ith (evs', bod') = stripExists ebod' fns' <- map2M mkNewfun evs' outls fnalist <- zip evs' `fmap` mapM (rator <=< lhs . concl) fns' let inlalist = zip evs' inls outlalist = zip evs' outls defs <- mapM (hackClause outlalist inlalist) $ conjuncts bod' jth <- ruleBETA $ ruleSPECL (map fst defs) ith bth <- primASSUME . snd . stripExists $ concl jth cth <- foldr1M ruleCONJ =<< mapM (finishClause outlalist) =<< ruleCONJUNCTS bth dth <- ruleELIM_OUTCOMBS cth eth <- ruleGEN_REWRITE convONCE_DEPTH (map ruleSYM fns') dth fth <- foldrM ruleSIMPLE_EXISTS eth (map snd fnalist) let dtms = map (head . hyp) fns' gth <- foldrM (\ e th -> do (l, r) <- destEq e th' <- ruleDISCH e th th'' <- primINST [(l, r)] th' ruleMP th'' $ primREFL r) fth dtms hth <- rulePROVE_HYP jth $ foldrM ruleSIMPLE_CHOOSE gth evs' xvs <- mapM (fmap (fst . stripComb) . (rand . snd . stripForall)) . conjuncts $ concl eth ruleGENL xvs hth where ruleELIM_OUTCOMBS :: BoolCtxt thry => HOLThm -> HOL cls thry HOLThm ruleELIM_OUTCOMBS = ruleGEN_REWRITE convTOP_DEPTH [getRecursiveDefinition "OUTL", getRecursiveDefinition "OUTR"] mkSum :: [HOLType] -> HOL cls thry HOLType mkSum tys = let k = length tys in if k == 1 then return $! head tys else do (tys1, tys2) <- trySplitAt (k `div` 2) tys tys1' <- mkSum tys1 tys2' <- mkSum tys2 mkType "sum" [tys1', tys2'] mkInls :: HOLType -> HOL cls thry [HOLTerm] mkInls typ = do bods <- mkInlsRec typ mapM (\ t -> mkAbs (try' $ findTerm isVar t) t) bods where mkInlsRec :: HOLType -> HOL cls thry [HOLTerm] mkInlsRec ty@TyVar{} = sequence [mkVar "x" ty] mkInlsRec ty = do (_, [ty1, ty2]) <- destType ty inls1 <- mkInlsRec ty1 inls2 <- mkInlsRec ty2 inl <- mkConst "INL" [(tyA, ty1), (tyB, ty2)] inr <- mkConst "INR" [(tyA, ty1), (tyB, ty2)] insl1' <- mapM (mkComb inl) inls1 insl2' <- mapM (mkComb inr) inls2 return $! insl1' ++ insl2' mkOutls :: HOLType -> HOL cls thry [HOLTerm] mkOutls typ = let x = mkVar "x" typ in do inls <- mkOutlsRec x typ mapM (mkAbs x) inls where mkOutlsRec :: HOLTermRep tm cls thry => tm -> HOLType -> HOL cls thry [HOLTerm] mkOutlsRec sof TyVar{} = do tm <- toHTm sof return [tm] mkOutlsRec sof ty = do (_, [ty1, ty2]) <- destType ty outl <- mkConst "OUTL" [(tyA, ty1), (tyB, ty2)] outr <- mkConst "OUTR" [(tyA, ty1), (tyB, ty2)] outl' <- mkOutlsRec (mkComb outl sof) ty1 outr' <- mkOutlsRec (mkComb outr sof) ty2 return $! outl' ++ outr' mkNewfun :: HOLTerm -> HOLTerm -> HOL cls thry HOLThm mkNewfun fn outl = do (s, ty) <- destVar fn dty <- (head . snd) `fmap` destType ty let x = mkVar "x" dty (y, bod) <- destAbs outl fnx <- mkComb fn x r <- mkAbs x =<< varSubst [(y, fnx)] bod let l = mkVar s $ typeOf r etm <- mkEq l r ruleRIGHT_BETAS [x] $ primASSUME etm hackClause :: HOLTermEnv -> HOLTermEnv -> HOLTerm -> HOL cls thry (HOLTerm, HOLTerm) hackClause outlalist inlalist tm = let (_, bod) = stripForall tm in do (l, r) <- destEq bod let (fn, args) = stripComb r pargs <- mapM (\ a -> do g <- genVar $ typeOf a if isVar a then return (g, g) else do outl <- flip assoc outlalist =<< rator a outl' <- mkComb outl g return (outl', g)) args let (args', args'') = unzip pargs inl <- flip assoc inlalist =<< rator l rty <- (head . snd) `fmap` (destType $ typeOf inl) nty <- foldrM (mkFunTy . typeOf) rty args' (fname, _) <- destVar fn let fn' = mkVar fname nty r' <- listMkAbs args'' =<< mkComb inl =<< listMkComb fn' args' return (r', fn) finishClause :: BoolCtxt thry => HOLTermEnv -> HOLThm -> HOL cls thry HOLThm finishClause outlalist t = let (avs, bod) = stripForall $ concl t in do outl <- flip assoc outlalist =<< rator (lHand bod) th' <- ruleSPECL avs t ruleGENL avs . ruleBETA $ ruleAP_TERM outl th' proveConstructorsInjective :: PairCtxt thry => HOLThm -> HOL cls thry HOLThm proveConstructorsInjective ax = let cls = conjuncts . snd . stripExists . snd . stripForall $ concl ax in do pats <- mapM (rand <=< lHand . snd . stripForall) cls foldr1M ruleCONJ =<< mapFilterM proveDistinctness pats where ruleDEPAIR :: PairCtxt thry => HOLThm -> HOL cls thry HOLThm ruleDEPAIR = ruleGEN_REWRITE convTOP_SWEEP [thmPAIR_EQ] proveDistinctness :: PairCtxt thry => HOLTerm -> HOL cls thry HOLThm proveDistinctness pat = let (f, args) = stripComb pat in do rt <- foldr1M mkPair args ty <- mkFunTy (typeOf pat) $ typeOf rt fn <- genVar ty dtm <- mkEq (mkComb fn pat) rt eth <- proveRecursiveFunctionsExist ax =<< listMkForall args dtm let args' = variants args args atm <- mkEq pat =<< listMkComb f args' ath <- primASSUME atm bth <- ruleAP_TERM fn ath cth1 <- ruleSPECL args $ primASSUME =<< snd `fmap` (destExists $ concl eth) cth2 <- primINST (zip args args') cth1 pth <- primTRANS (primTRANS (ruleSYM cth1) bth) cth2 qth <- ruleDEPAIR pth let qtm = concl qth qths <- ruleCONJUNCTS $ primASSUME qtm fth <- primREFL f rth <- foldlM primMK_COMB fth qths tth <- ruleIMP_ANTISYM (ruleDISCH atm qth) $ ruleDISCH qtm rth uth <- ruleGENL args $ ruleGENL args' tth rulePROVE_HYP eth $ ruleSIMPLE_CHOOSE fn uth proveDistinct_pth :: ClassicCtxt thry => HOL cls thry HOLThm proveDistinct_pth = cacheProof "proveDistinct_pth" ctxtClassic $ ruleTAUT [txt| a ==> F <=> ~a |] proveConstructorsDistinct :: WFCtxt thry => HOLThm -> HOL cls thry HOLThm proveConstructorsDistinct ax = let cls = conjuncts . snd . stripExists . snd . stripForall $ concl ax in do lefts <- mapM (destComb <=< lHand . snd . stripForall) cls let fns = foldr (insert . fst) [] lefts pats = map (\ f -> map snd (filter (\ (x,_) -> x == f) lefts)) fns foldr1M ruleCONJ =<< (foldr1 (++)) `fmap` (mapFilterM proveDistinct pats) where allopairs :: Monad m => (a -> a -> m a) -> [a] -> [a] -> m [a] allopairs _ [] _ = return [] allopairs f (l:ls) (_:ms) = do xs <- mapM (f l) ms ys <- allopairs f ls ms return $! xs ++ ys allopairs _ _ _ = return [] ruleNEGATE :: (ClassicCtxt thry, HOLThmRep thm cls thry) => thm -> HOL cls thry HOLThm ruleNEGATE = ruleGEN_ALL . ruleCONV (convREWR proveDistinct_pth) ruleREWRITE' :: (BoolCtxt thry, HOLThmRep thm cls thry) => HOLTerm -> thm -> HOL cls thry HOLThm ruleREWRITE' bod th = do ths <- ruleCONJUNCTS $ primASSUME bod ruleGEN_REWRITE convONCE_DEPTH ths th proveDistinct :: WFCtxt thry => [HOLTerm] -> HOL cls thry [HOLThm] proveDistinct pat = do tyNum <- S.mkType "num" ([]::[HOLType]) nms <- mapM mkNumeral ([0..(length pat -1)] :: [Int]) fn <- genVar =<< mkType "fun" [typeOf $ head pat, tyNum] ls <- mapM (mkComb fn) pat defs <- map2M (\ l r -> do l' <- frees `fmap` rand l listMkForall l' =<< mkEq l r) ls nms eth <- proveRecursiveFunctionsExist ax =<< listMkConj defs (ev, bod) <- destExists $ concl eth pat' <-mapM (\ t -> let (f, args) = if isNumeral t then (t, []) else stripComb t in listMkComb f $ variants args args) pat pairs <- allopairs mkEq pat pat' nths <- mapM (ruleREWRITE' bod . ruleAP_TERM fn . primASSUME) pairs fths <- map2M (\ t th -> ruleNEGATE . ruleDISCH t $ ruleCONV convNUM_EQ th) pairs nths ruleCONJUNCTS . rulePROVE_HYP eth . ruleSIMPLE_CHOOSE ev $ foldr1M ruleCONJ fths
ecaustin/haskhol-math
src/HaskHOL/Lib/IndTypes/Pre2.hs
Haskell
bsd-2-clause
11,566
{-# LANGUAGE ForeignFunctionInterface #-} module Grenade.Layers.Internal.Pooling ( poolForward , poolBackward ) where import qualified Data.Vector.Storable as U ( unsafeToForeignPtr0, unsafeFromForeignPtr0 ) import Foreign ( mallocForeignPtrArray, withForeignPtr ) import Foreign.Ptr ( Ptr ) import Numeric.LinearAlgebra ( Matrix , flatten ) import qualified Numeric.LinearAlgebra.Devel as U import System.IO.Unsafe ( unsafePerformIO ) poolForward :: Int -> Int -> Int -> Int -> Int -> Int -> Int -> Matrix Double -> Matrix Double poolForward channels height width kernelRows kernelColumns strideRows strideColumns dataIm = let vec = flatten dataIm rowOut = (height - kernelRows) `div` strideRows + 1 colOut = (width - kernelColumns) `div` strideColumns + 1 numberOfPatches = rowOut * colOut in unsafePerformIO $ do outPtr <- mallocForeignPtrArray (numberOfPatches * channels) let (inPtr, _) = U.unsafeToForeignPtr0 vec withForeignPtr inPtr $ \inPtr' -> withForeignPtr outPtr $ \outPtr' -> pool_forwards_cpu inPtr' channels height width kernelRows kernelColumns strideRows strideColumns outPtr' let matVec = U.unsafeFromForeignPtr0 outPtr (numberOfPatches * channels) return $ U.matrixFromVector U.RowMajor (rowOut * channels) colOut matVec foreign import ccall unsafe pool_forwards_cpu :: Ptr Double -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Ptr Double -> IO () poolBackward :: Int -> Int -> Int -> Int -> Int -> Int -> Int -> Matrix Double -> Matrix Double -> Matrix Double poolBackward channels height width kernelRows kernelColumns strideRows strideColumns dataIm dataGrad = let vecIm = flatten dataIm vecGrad = flatten dataGrad in unsafePerformIO $ do outPtr <- mallocForeignPtrArray (height * width * channels) let (imPtr, _) = U.unsafeToForeignPtr0 vecIm let (gradPtr, _) = U.unsafeToForeignPtr0 vecGrad withForeignPtr imPtr $ \imPtr' -> withForeignPtr gradPtr $ \gradPtr' -> withForeignPtr outPtr $ \outPtr' -> pool_backwards_cpu imPtr' gradPtr' channels height width kernelRows kernelColumns strideRows strideColumns outPtr' let matVec = U.unsafeFromForeignPtr0 outPtr (height * width * channels) return $ U.matrixFromVector U.RowMajor (height * channels) width matVec foreign import ccall unsafe pool_backwards_cpu :: Ptr Double -> Ptr Double -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Ptr Double -> IO ()
HuwCampbell/grenade
src/Grenade/Layers/Internal/Pooling.hs
Haskell
bsd-2-clause
2,565
{-# LANGUAGE TemplateHaskell #-} module Language.Drasil.Chunk.DefinedQuantity (DefinedQuantityDict, dqd, dqdNoUnit, dqd', dqdQd, dqdWr) where import Language.Drasil.Classes.Core (HasUID(uid), HasSymbol(symbol)) import Language.Drasil.Classes (NamedIdea(term), Idea(getA), Concept, Definition(defn), ConceptDomain(cdom), HasSpace(typ), IsUnit, Quantity) import Language.Drasil.Chunk.Concept (ConceptChunk, cw) import Language.Drasil.Chunk.UnitDefn (UnitDefn, unitWrapper, MayHaveUnit(getUnit)) import Language.Drasil.Space (Space) import Language.Drasil.Stages (Stage) import Language.Drasil.Symbol (Symbol) import Control.Lens ((^.), makeLenses, view) -- | DefinedQuantity = Concept + Quantity data DefinedQuantityDict = DQD { _con :: ConceptChunk , _symb :: Stage -> Symbol , _spa :: Space , _unit' :: Maybe UnitDefn } makeLenses ''DefinedQuantityDict instance HasUID DefinedQuantityDict where uid = con . uid instance Eq DefinedQuantityDict where a == b = (a ^. uid) == (b ^. uid) instance NamedIdea DefinedQuantityDict where term = con . term instance Idea DefinedQuantityDict where getA = getA . view con instance Definition DefinedQuantityDict where defn = con . defn instance ConceptDomain DefinedQuantityDict where cdom = cdom . view con instance HasSpace DefinedQuantityDict where typ = spa instance HasSymbol DefinedQuantityDict where symbol = view symb instance Quantity DefinedQuantityDict where instance MayHaveUnit DefinedQuantityDict where getUnit = view unit' -- For when the symbol is constant through stages dqd :: (IsUnit u) => ConceptChunk -> Symbol -> Space -> u -> DefinedQuantityDict dqd c s sp = DQD c (const s) sp . Just . unitWrapper dqdNoUnit :: ConceptChunk -> Symbol -> Space -> DefinedQuantityDict dqdNoUnit c s sp = DQD c (const s) sp Nothing -- For when the symbol changes depending on the stage dqd' :: ConceptChunk -> (Stage -> Symbol) -> Space -> Maybe UnitDefn -> DefinedQuantityDict dqd' = DQD -- When the input already has all the necessary information. A 'projection' operator dqdWr :: (Quantity c, Concept c, MayHaveUnit c) => c -> DefinedQuantityDict dqdWr c = DQD (cw c) (symbol c) (c ^. typ) (getUnit c) -- When we want to merge a quantity and a concept. This is suspicious. dqdQd :: (Quantity c, MayHaveUnit c) => c -> ConceptChunk -> DefinedQuantityDict dqdQd c cc = DQD cc (symbol c) (c ^. typ) (getUnit c)
JacquesCarette/literate-scientific-software
code/drasil-lang/Language/Drasil/Chunk/DefinedQuantity.hs
Haskell
bsd-2-clause
2,546
{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE TemplateHaskell #-} module Game.Types where import Data.Matrix import GHC.Generics import Data.Aeson import Data.Aeson.TH import Data.Functor data Move = Move { player :: String, outerPos :: Int, innerPos :: Int } deriving (Generic, Show, Read, Eq) data Square = X | O | Empty | Both deriving (Generic, Show, Read, Eq) data Game = Game { playerX :: String, playerO :: String, lastMove :: Move, board :: [[Square]], metaBoard :: [Square], moves :: Int, gameWon :: Square } deriving (Generic, Show, Eq) newGame:: String -> String -> Game newGame playerX playerO = Game playerX playerO (Move "None" 0 0) (map (\_ -> (map (const Empty) [1..9])) [1..9]) (map (const Empty) [1..9]) 0 Empty instance ToJSON Move instance FromJSON Move instance ToJSON Square instance FromJSON Square instance ToJSON Game instance FromJSON Game
octopuscabbage/UltimateTicTacToeServer
src/Game/Types.hs
Haskell
bsd-3-clause
890
-- | Conduit of keys pressed by xinput module where
chrisdone/xinput-conduit
src/.hs
Haskell
bsd-3-clause
54
{-# OPTIONS -fno-implicit-prelude #-} ----------------------------------------------------------------------------- -- | -- Module : Foreign.C.String -- Copyright : (c) The FFI task force 2001 -- License : BSD-style (see the file libraries/base/LICENSE) -- -- Maintainer : ffi@haskell.org -- Stability : provisional -- Portability : portable -- -- Utilities for primitive marshaling -- ----------------------------------------------------------------------------- module Foreign.C.String ( -- representation of strings in C CString, -- = Ptr CChar CStringLen, -- = (CString, Int) -- conversion of C strings into Haskell strings -- peekCString, -- :: CString -> IO String peekCStringLen, -- :: CStringLen -> IO String -- conversion of Haskell strings into C strings -- newCString, -- :: String -> IO CString newCStringLen, -- :: String -> IO CStringLen -- conversion of Haskell strings into C strings using temporary storage -- withCString, -- :: String -> (CString -> IO a) -> IO a withCStringLen, -- :: String -> (CStringLen -> IO a) -> IO a -- conversion between Haskell and C characters *ignoring* the encoding -- castCharToCChar, -- :: Char -> CChar castCCharToChar, -- :: CChar -> Char ) where import Foreign.Marshal.Array import Foreign.C.Types import Foreign.Ptr import Foreign.Storable import Data.Word import Data.Char ( chr, ord ) ----------------------------------------------------------------------------- -- Strings -- representation of strings in C -- ------------------------------ type CString = Ptr CChar -- conventional NUL terminates strings type CStringLen = (CString, Int) -- strings with explicit length -- exported functions -- ------------------ -- -- * the following routines apply the default conversion when converting the -- C-land character encoding into the Haskell-land character encoding -- -- ** NOTE: The current implementation doesn't handle conversions yet! ** -- -- * the routines using an explicit length tolerate NUL characters in the -- middle of a string -- -- marshal a NUL terminated C string into a Haskell string -- peekCString :: CString -> IO String peekCString cp = do cs <- peekArray0 nUL cp; return (cCharsToChars cs) -- marshal a C string with explicit length into a Haskell string -- peekCStringLen :: CStringLen -> IO String peekCStringLen (cp, len) = do cs <- peekArray len cp; return (cCharsToChars cs) -- marshal a Haskell string into a NUL terminated C strings -- -- * the Haskell string may *not* contain any NUL characters -- -- * new storage is allocated for the C string and must be explicitly freed -- newCString :: String -> IO CString newCString = newArray0 nUL . charsToCChars -- marshal a Haskell string into a C string (ie, character array) with -- explicit length information -- -- * new storage is allocated for the C string and must be explicitly freed -- newCStringLen :: String -> IO CStringLen newCStringLen str = do a <- newArray (charsToCChars str) return (pairLength str a) -- marshal a Haskell string into a NUL terminated C strings using temporary -- storage -- -- * the Haskell string may *not* contain any NUL characters -- -- * see the lifetime constraints of `MarshalAlloc.alloca' -- withCString :: String -> (CString -> IO a) -> IO a withCString = withArray0 nUL . charsToCChars -- marshal a Haskell string into a NUL terminated C strings using temporary -- storage -- -- * the Haskell string may *not* contain any NUL characters -- -- * see the lifetime constraints of `MarshalAlloc.alloca' -- withCStringLen :: String -> (CStringLen -> IO a) -> IO a withCStringLen str act = withArray (charsToCChars str) $ act . pairLength str -- auxilliary definitions -- ---------------------- -- C's end of string character -- nUL :: CChar nUL = 0 -- pair a C string with the length of the given Haskell string -- pairLength :: String -> CString -> CStringLen pairLength = flip (,) . length -- cast [CChar] to [Char] -- cCharsToChars :: [CChar] -> [Char] cCharsToChars xs = map castCCharToChar xs -- cast [Char] to [CChar] -- charsToCChars :: [Char] -> [CChar] charsToCChars xs = map castCharToCChar xs castCCharToChar :: CChar -> Char castCCharToChar ch = chr (fromIntegral (fromIntegral ch :: Word8)) castCharToCChar :: Char -> CChar castCharToCChar ch = fromIntegral (ord ch)
OS2World/DEV-UTIL-HUGS
libraries/Foreign/C/String.hs
Haskell
bsd-3-clause
4,462
{-# LANGUAGE LambdaCase, PatternGuards, ViewPatterns #-} {-# OPTIONS_GHC -fwarn-incomplete-patterns #-} module Idris.Elab.Term where import Idris.AbsSyntax import Idris.AbsSyntaxTree import Idris.DSL import Idris.Delaborate import Idris.Error import Idris.ProofSearch import Idris.Output (pshow) import Idris.Core.CaseTree (SC, SC'(STerm), findCalls, findUsedArgs) import Idris.Core.Elaborate hiding (Tactic(..)) import Idris.Core.TT import Idris.Core.Evaluate import Idris.Core.Unify import Idris.Core.ProofTerm (getProofTerm) import Idris.Core.Typecheck (check, recheck, isType) import Idris.Coverage (buildSCG, checkDeclTotality, genClauses, recoverableCoverage, validCoverageCase) import Idris.ErrReverse (errReverse) import Idris.ElabQuasiquote (extractUnquotes) import Idris.Elab.Utils import Idris.Reflection import qualified Util.Pretty as U import Control.Applicative ((<$>)) import Control.Monad import Control.Monad.State.Strict import Data.List import qualified Data.Map as M import Data.Maybe (mapMaybe, fromMaybe, catMaybes) import qualified Data.Set as S import qualified Data.Text as T import Debug.Trace data ElabMode = ETyDecl | ELHS | ERHS deriving Eq data ElabResult = ElabResult { resultTerm :: Term -- ^ The term resulting from elaboration , resultMetavars :: [(Name, (Int, Maybe Name, Type))] -- ^ Information about new metavariables , resultCaseDecls :: [PDecl] -- ^ Deferred declarations as the meaning of case blocks , resultContext :: Context -- ^ The potentially extended context from new definitions , resultTyDecls :: [RDeclInstructions] -- ^ Meta-info about the new type declarations , resultHighlighting :: [(FC, OutputAnnotation)] } -- Using the elaborator, convert a term in raw syntax to a fully -- elaborated, typechecked term. -- -- If building a pattern match, we convert undeclared variables from -- holes to pattern bindings. -- Also find deferred names in the term and their types build :: IState -> ElabInfo -> ElabMode -> FnOpts -> Name -> PTerm -> ElabD ElabResult build ist info emode opts fn tm = do elab ist info emode opts fn tm let tmIn = tm let inf = case lookupCtxt fn (idris_tyinfodata ist) of [TIPartial] -> True _ -> False when (not pattern) $ solveAutos ist fn True hs <- get_holes ivs <- get_instances ptm <- get_term -- Resolve remaining type classes. Two passes - first to get the -- default Num instances, second to clean up the rest when (not pattern) $ mapM_ (\n -> when (n `elem` hs) $ do focus n g <- goal try (resolveTC True False 10 g fn ist) (movelast n)) ivs ivs <- get_instances hs <- get_holes when (not pattern) $ mapM_ (\n -> when (n `elem` hs) $ do focus n g <- goal ptm <- get_term resolveTC True True 10 g fn ist) ivs tm <- get_term ctxt <- get_context probs <- get_probs u <- getUnifyLog hs <- get_holes when (not pattern) $ traceWhen u ("Remaining holes:\n" ++ show hs ++ "\n" ++ "Remaining problems:\n" ++ qshow probs) $ do unify_all; matchProblems True; unifyProblems probs <- get_probs case probs of [] -> return () ((_,_,_,_,e,_,_):es) -> traceWhen u ("Final problems:\n" ++ qshow probs ++ "\nin\n" ++ show tm) $ if inf then return () else lift (Error e) when tydecl (do mkPat update_term liftPats update_term orderPats) EState is _ impls highlights <- getAux tt <- get_term ctxt <- get_context let (tm, ds) = runState (collectDeferred (Just fn) (map fst is) ctxt tt) [] log <- getLog if log /= "" then trace log $ return (ElabResult tm ds (map snd is) ctxt impls highlights) else return (ElabResult tm ds (map snd is) ctxt impls highlights) where pattern = emode == ELHS tydecl = emode == ETyDecl mkPat = do hs <- get_holes tm <- get_term case hs of (h: hs) -> do patvar h; mkPat [] -> return () -- Build a term autogenerated as a typeclass method definition -- (Separate, so we don't go overboard resolving things that we don't -- know about yet on the LHS of a pattern def) buildTC :: IState -> ElabInfo -> ElabMode -> FnOpts -> Name -> PTerm -> ElabD ElabResult buildTC ist info emode opts fn tm = do -- set name supply to begin after highest index in tm let ns = allNamesIn tm let tmIn = tm let inf = case lookupCtxt fn (idris_tyinfodata ist) of [TIPartial] -> True _ -> False initNextNameFrom ns elab ist info emode opts fn tm probs <- get_probs tm <- get_term case probs of [] -> return () ((_,_,_,_,e,_,_):es) -> if inf then return () else lift (Error e) dots <- get_dotterm -- 'dots' are the PHidden things which have not been solved by -- unification when (not (null dots)) $ lift (Error (CantMatch (getInferTerm tm))) EState is _ impls highlights <- getAux tt <- get_term ctxt <- get_context let (tm, ds) = runState (collectDeferred (Just fn) (map fst is) ctxt tt) [] log <- getLog if (log /= "") then trace log $ return (ElabResult tm ds (map snd is) ctxt impls highlights) else return (ElabResult tm ds (map snd is) ctxt impls highlights) where pattern = emode == ELHS -- return whether arguments of the given constructor name can be -- matched on. If they're polymorphic, no, unless the type has beed made -- concrete by the time we get around to elaborating the argument. getUnmatchable :: Context -> Name -> [Bool] getUnmatchable ctxt n | isDConName n ctxt && n /= inferCon = case lookupTyExact n ctxt of Nothing -> [] Just ty -> checkArgs [] [] ty where checkArgs :: [Name] -> [[Name]] -> Type -> [Bool] checkArgs env ns (Bind n (Pi _ t _) sc) = let env' = case t of TType _ -> n : env _ -> env in checkArgs env' (intersect env (refsIn t) : ns) (instantiate (P Bound n t) sc) checkArgs env ns t = map (not . null) (reverse ns) getUnmatchable ctxt n = [] data ElabCtxt = ElabCtxt { e_inarg :: Bool, e_isfn :: Bool, -- ^ Function part of application e_guarded :: Bool, e_intype :: Bool, e_qq :: Bool, e_nomatching :: Bool -- ^ can't pattern match } initElabCtxt = ElabCtxt False False False False False False goal_polymorphic :: ElabD Bool goal_polymorphic = do ty <- goal case ty of P _ n _ -> do env <- get_env case lookup n env of Nothing -> return False _ -> return True _ -> return False -- | Returns the set of declarations we need to add to complete the -- definition (most likely case blocks to elaborate) as well as -- declarations resulting from user tactic scripts (%runElab) elab :: IState -> ElabInfo -> ElabMode -> FnOpts -> Name -> PTerm -> ElabD () elab ist info emode opts fn tm = do let loglvl = opt_logLevel (idris_options ist) when (loglvl > 5) $ unifyLog True compute -- expand type synonyms, etc let fc = maybe "(unknown)" elabE initElabCtxt (elabFC info) tm -- (in argument, guarded, in type, in qquote) est <- getAux sequence_ (get_delayed_elab est) end_unify ptm <- get_term when pattern -- convert remaining holes to pattern vars (do update_term orderPats unify_all matchProblems False -- only the ones we matched earlier unifyProblems mkPat) where pattern = emode == ELHS bindfree = emode == ETyDecl || emode == ELHS get_delayed_elab est = let ds = delayed_elab est in map snd $ sortBy (\(p1, _) (p2, _) -> compare p1 p2) ds tcgen = Dictionary `elem` opts reflection = Reflection `elem` opts isph arg = case getTm arg of Placeholder -> (True, priority arg) tm -> (False, priority arg) toElab ina arg = case getTm arg of Placeholder -> Nothing v -> Just (priority arg, elabE ina (elabFC info) v) toElab' ina arg = case getTm arg of Placeholder -> Nothing v -> Just (elabE ina (elabFC info) v) mkPat = do hs <- get_holes tm <- get_term case hs of (h: hs) -> do patvar h; mkPat [] -> return () -- | elabE elaborates an expression, possibly wrapping implicit coercions -- and forces/delays. If you make a recursive call in elab', it is -- normally correct to call elabE - the ones that don't are desugarings -- typically elabE :: ElabCtxt -> Maybe FC -> PTerm -> ElabD () elabE ina fc' t = do solved <- get_recents as <- get_autos hs <- get_holes -- If any of the autos use variables which have recently been solved, -- have another go at solving them now. mapM_ (\(a, ns) -> if any (\n -> n `elem` solved) ns && head hs /= a then solveAuto ist fn False a else return ()) as itm <- if not pattern then insertImpLam ina t else return t ct <- insertCoerce ina itm t' <- insertLazy ct g <- goal tm <- get_term ps <- get_probs hs <- get_holes --trace ("Elaborating " ++ show t' ++ " in " ++ show g -- ++ "\n" ++ show tm -- ++ "\nholes " ++ show hs -- ++ "\nproblems " ++ show ps -- ++ "\n-----------\n") $ --trace ("ELAB " ++ show t') $ let fc = fileFC "Force" env <- get_env handleError (forceErr t' env) (elab' ina fc' t') (elab' ina fc' (PApp fc (PRef fc (sUN "Force")) [pimp (sUN "t") Placeholder True, pimp (sUN "a") Placeholder True, pexp ct])) forceErr orig env (CantUnify _ (t,_) (t',_) _ _ _) | (P _ (UN ht) _, _) <- unApply (normalise (tt_ctxt ist) env t), ht == txt "Lazy'" = notDelay orig forceErr orig env (CantUnify _ (t,_) (t',_) _ _ _) | (P _ (UN ht) _, _) <- unApply (normalise (tt_ctxt ist) env t'), ht == txt "Lazy'" = notDelay orig forceErr orig env (InfiniteUnify _ t _) | (P _ (UN ht) _, _) <- unApply (normalise (tt_ctxt ist) env t), ht == txt "Lazy'" = notDelay orig forceErr orig env (Elaborating _ _ t) = forceErr orig env t forceErr orig env (ElaboratingArg _ _ _ t) = forceErr orig env t forceErr orig env (At _ t) = forceErr orig env t forceErr orig env t = False notDelay t@(PApp _ (PRef _ (UN l)) _) | l == txt "Delay" = False notDelay _ = True local f = do e <- get_env return (f `elem` map fst e) -- | Is a constant a type? constType :: Const -> Bool constType (AType _) = True constType StrType = True constType VoidType = True constType _ = False -- "guarded" means immediately under a constructor, to help find patvars elab' :: ElabCtxt -- ^ (in an argument, guarded, in a type, in a quasiquote) -> Maybe FC -- ^ The closest FC in the syntax tree, if applicable -> PTerm -- ^ The term to elaborate -> ElabD () elab' ina fc (PNoImplicits t) = elab' ina fc t -- skip elabE step elab' ina fc (PType fc') = do apply RType [] solve highlightSource fc' (AnnType "Type" "The type of types") elab' ina fc (PUniverse u) = do apply (RUType u) []; solve -- elab' (_,_,inty) (PConstant c) -- | constType c && pattern && not reflection && not inty -- = lift $ tfail (Msg "Typecase is not allowed") elab' ina fc tm@(PConstant fc' c) | pattern && not reflection && not (e_qq ina) && not (e_intype ina) && isTypeConst c = lift $ tfail $ Msg ("No explicit types on left hand side: " ++ show tm) | pattern && not reflection && not (e_qq ina) && e_nomatching ina = lift $ tfail $ Msg ("Attempting concrete match on polymorphic argument: " ++ show tm) | otherwise = do apply (RConstant c) [] solve highlightSource fc' (AnnConst c) elab' ina fc (PQuote r) = do fill r; solve elab' ina _ (PTrue fc _) = do hnf_compute g <- goal case g of TType _ -> elab' ina (Just fc) (PRef fc unitTy) UType _ -> elab' ina (Just fc) (PRef fc unitTy) _ -> elab' ina (Just fc) (PRef fc unitCon) elab' ina fc (PResolveTC (FC "HACK" _ _)) -- for chasing parent classes = do g <- goal; resolveTC False False 5 g fn ist elab' ina fc (PResolveTC fc') = do c <- getNameFrom (sMN 0 "class") instanceArg c -- Elaborate the equality type first homogeneously, then -- heterogeneously as a fallback elab' ina _ (PApp fc (PRef _ n) args) | n == eqTy, [Placeholder, Placeholder, l, r] <- map getTm args = try (do tyn <- getNameFrom (sMN 0 "aqty") claim tyn RType movelast tyn elab' ina (Just fc) (PApp fc (PRef fc eqTy) [pimp (sUN "A") (PRef NoFC tyn) True, pimp (sUN "B") (PRef NoFC tyn) False, pexp l, pexp r])) (do atyn <- getNameFrom (sMN 0 "aqty") btyn <- getNameFrom (sMN 0 "bqty") claim atyn RType movelast atyn claim btyn RType movelast btyn elab' ina (Just fc) (PApp fc (PRef fc eqTy) [pimp (sUN "A") (PRef NoFC atyn) True, pimp (sUN "B") (PRef NoFC btyn) False, pexp l, pexp r])) elab' ina _ (PPair fc _ l r) = do hnf_compute g <- goal let (tc, _) = unApply g case g of TType _ -> elab' ina (Just fc) (PApp fc (PRef fc pairTy) [pexp l,pexp r]) UType _ -> elab' ina (Just fc) (PApp fc (PRef fc upairTy) [pexp l,pexp r]) _ -> case tc of P _ n _ | n == upairTy -> elab' ina (Just fc) (PApp fc (PRef fc upairCon) [pimp (sUN "A") Placeholder False, pimp (sUN "B") Placeholder False, pexp l, pexp r]) _ -> elab' ina (Just fc) (PApp fc (PRef fc pairCon) [pimp (sUN "A") Placeholder False, pimp (sUN "B") Placeholder False, pexp l, pexp r]) -- _ -> try' (elab' ina (Just fc) (PApp fc (PRef fc pairCon) -- [pimp (sUN "A") Placeholder False, -- pimp (sUN "B") Placeholder False, -- pexp l, pexp r])) -- (elab' ina (Just fc) (PApp fc (PRef fc upairCon) -- [pimp (sUN "A") Placeholder False, -- pimp (sUN "B") Placeholder False, -- pexp l, pexp r])) -- True elab' ina _ (PDPair fc p l@(PRef _ n) t r) = case t of Placeholder -> do hnf_compute g <- goal case g of TType _ -> asType _ -> asValue _ -> asType where asType = elab' ina (Just fc) (PApp fc (PRef fc sigmaTy) [pexp t, -- TODO: save the FC from the dependent pair -- syntax and put it on this lambda for interactive -- semantic highlighting support. NoFC for now. pexp (PLam fc n NoFC Placeholder r)]) asValue = elab' ina (Just fc) (PApp fc (PRef fc sigmaCon) [pimp (sMN 0 "a") t False, pimp (sMN 0 "P") Placeholder True, pexp l, pexp r]) elab' ina _ (PDPair fc p l t r) = elab' ina (Just fc) (PApp fc (PRef fc sigmaCon) [pimp (sMN 0 "a") t False, pimp (sMN 0 "P") Placeholder True, pexp l, pexp r]) elab' ina fc (PAlternative (ExactlyOne delayok) as) = do hnf_compute ty <- goal ctxt <- get_context let (tc, _) = unApply ty env <- get_env let as' = pruneByType (map fst env) tc ctxt as -- trace (-- show tc ++ " " ++ show as ++ "\n ==> " ++ -- show (length as') ++ "\n" ++ -- showSep ", " (map showTmImpls as') ++ "\nEND") $ (h : hs) <- get_holes case as' of [x] -> elab' ina fc x -- If there's options, try now, and if that fails, postpone -- to later. _ -> handleError isAmbiguous (tryAll (zip (map (elab' ina fc) as') (map showHd as'))) (do movelast h delayElab 5 $ do focus h tryAll (zip (map (elab' ina fc) as') (map showHd as'))) where showHd (PApp _ (PRef _ n) _) = n showHd (PRef _ n) = n showHd (PApp _ h _) = showHd h showHd x = NErased -- We probably should do something better than this here isAmbiguous (CantResolveAlts _) = delayok isAmbiguous (Elaborating _ _ e) = isAmbiguous e isAmbiguous (ElaboratingArg _ _ _ e) = isAmbiguous e isAmbiguous (At _ e) = isAmbiguous e isAmbiguous _ = False elab' ina fc (PAlternative FirstSuccess as) = trySeq as where -- if none work, take the error from the first trySeq (x : xs) = let e1 = elab' ina fc x in try' e1 (trySeq' e1 xs) True trySeq [] = fail "Nothing to try in sequence" trySeq' deferr [] = proofFail deferr trySeq' deferr (x : xs) = try' (do elab' ina fc x solveAutos ist fn False) (trySeq' deferr xs) True elab' ina _ (PPatvar fc n) | bindfree = do patvar n update_term liftPats highlightSource fc (AnnBoundName n False) -- elab' (_, _, inty) (PRef fc f) -- | isTConName f (tt_ctxt ist) && pattern && not reflection && not inty -- = lift $ tfail (Msg "Typecase is not allowed") elab' ec _ tm@(PRef fc n) | pattern && not reflection && not (e_qq ec) && not (e_intype ec) && isTConName n (tt_ctxt ist) = lift $ tfail $ Msg ("No explicit types on left hand side: " ++ show tm) | pattern && not reflection && not (e_qq ec) && e_nomatching ec = lift $ tfail $ Msg ("Attempting concrete match on polymorphic argument: " ++ show tm) | (pattern || (bindfree && bindable n)) && not (inparamBlock n) && not (e_qq ec) = do let ina = e_inarg ec guarded = e_guarded ec inty = e_intype ec ctxt <- get_context let defined = case lookupTy n ctxt of [] -> False _ -> True -- this is to stop us resolve type classes recursively -- trace (show (n, guarded)) $ if (tcname n && ina) then erun fc $ do patvar n update_term liftPats highlightSource fc (AnnBoundName n False) else if (defined && not guarded) then do apply (Var n) [] annot <- findHighlight n solve highlightSource fc annot else try (do apply (Var n) [] annot <- findHighlight n solve highlightSource fc annot) (do patvar n update_term liftPats highlightSource fc (AnnBoundName n False)) where inparamBlock n = case lookupCtxtName n (inblock info) of [] -> False _ -> True bindable (NS _ _) = False bindable (UN xs) = True bindable n = implicitable n elab' ina _ f@(PInferRef fc n) = elab' ina (Just fc) (PApp NoFC f []) elab' ina fc' tm@(PRef fc n) | pattern && not reflection && not (e_qq ina) && not (e_intype ina) && isTConName n (tt_ctxt ist) = lift $ tfail $ Msg ("No explicit types on left hand side: " ++ show tm) | pattern && not reflection && not (e_qq ina) && e_nomatching ina = lift $ tfail $ Msg ("Attempting concrete match on polymorphic argument: " ++ show tm) | otherwise = do fty <- get_type (Var n) -- check for implicits ctxt <- get_context env <- get_env let a' = insertScopedImps fc (normalise ctxt env fty) [] if null a' then erun fc $ do apply (Var n) [] hl <- findHighlight n solve highlightSource fc hl else elab' ina fc' (PApp fc tm []) elab' ina _ (PLam _ _ _ _ PImpossible) = lift . tfail . Msg $ "Only pattern-matching lambdas can be impossible" elab' ina _ (PLam fc n nfc Placeholder sc) = do -- if n is a type constructor name, this makes no sense... ctxt <- get_context when (isTConName n ctxt) $ lift $ tfail (Msg $ "Can't use type constructor " ++ show n ++ " here") checkPiGoal n attack; intro (Just n); -- trace ("------ intro " ++ show n ++ " ---- \n" ++ show ptm) elabE (ina { e_inarg = True } ) (Just fc) sc; solve highlightSource nfc (AnnBoundName n False) elab' ec _ (PLam fc n nfc ty sc) = do tyn <- getNameFrom (sMN 0 "lamty") -- if n is a type constructor name, this makes no sense... ctxt <- get_context when (isTConName n ctxt) $ lift $ tfail (Msg $ "Can't use type constructor " ++ show n ++ " here") checkPiGoal n claim tyn RType explicit tyn attack ptm <- get_term hs <- get_holes introTy (Var tyn) (Just n) focus tyn elabE (ec { e_inarg = True, e_intype = True }) (Just fc) ty elabE (ec { e_inarg = True }) (Just fc) sc solve highlightSource nfc (AnnBoundName n False) elab' ina fc (PPi p n nfc Placeholder sc) = do attack; arg n (is_scoped p) (sMN 0 "ty") elabE (ina { e_inarg = True, e_intype = True }) fc sc solve highlightSource nfc (AnnBoundName n False) elab' ina fc (PPi p n nfc ty sc) = do attack; tyn <- getNameFrom (sMN 0 "ty") claim tyn RType n' <- case n of MN _ _ -> unique_hole n _ -> return n forall n' (is_scoped p) (Var tyn) focus tyn let ec' = ina { e_inarg = True, e_intype = True } elabE ec' fc ty elabE ec' fc sc solve highlightSource nfc (AnnBoundName n False) elab' ina _ (PLet fc n nfc ty val sc) = do attack ivs <- get_instances tyn <- getNameFrom (sMN 0 "letty") claim tyn RType valn <- getNameFrom (sMN 0 "letval") claim valn (Var tyn) explicit valn letbind n (Var tyn) (Var valn) case ty of Placeholder -> return () _ -> do focus tyn explicit tyn elabE (ina { e_inarg = True, e_intype = True }) (Just fc) ty focus valn elabE (ina { e_inarg = True, e_intype = True }) (Just fc) val ivs' <- get_instances env <- get_env elabE (ina { e_inarg = True }) (Just fc) sc when (not pattern) $ mapM_ (\n -> do focus n g <- goal hs <- get_holes if all (\n -> n == tyn || not (n `elem` hs)) (freeNames g) then try (resolveTC True False 10 g fn ist) (movelast n) else movelast n) (ivs' \\ ivs) -- HACK: If the name leaks into its type, it may leak out of -- scope outside, so substitute in the outer scope. expandLet n (case lookup n env of Just (Let t v) -> v other -> error ("Value not a let binding: " ++ show other)) solve highlightSource nfc (AnnBoundName n False) elab' ina _ (PGoal fc r n sc) = do rty <- goal attack tyn <- getNameFrom (sMN 0 "letty") claim tyn RType valn <- getNameFrom (sMN 0 "letval") claim valn (Var tyn) letbind n (Var tyn) (Var valn) focus valn elabE (ina { e_inarg = True, e_intype = True }) (Just fc) (PApp fc r [pexp (delab ist rty)]) env <- get_env computeLet n elabE (ina { e_inarg = True }) (Just fc) sc solve -- elab' ina fc (PLet n Placeholder -- (PApp fc r [pexp (delab ist rty)]) sc) elab' ina _ tm@(PApp fc (PInferRef _ f) args) = do rty <- goal ds <- get_deferred ctxt <- get_context -- make a function type a -> b -> c -> ... -> rty for the -- new function name env <- get_env argTys <- claimArgTys env args fn <- getNameFrom (sMN 0 "inf_fn") let fty = fnTy argTys rty -- trace (show (ptm, map fst argTys)) $ focus fn -- build and defer the function application attack; deferType (mkN f) fty (map fst argTys); solve -- elaborate the arguments, to unify their types. They all have to -- be explicit. mapM_ elabIArg (zip argTys args) where claimArgTys env [] = return [] claimArgTys env (arg : xs) | Just n <- localVar env (getTm arg) = do nty <- get_type (Var n) ans <- claimArgTys env xs return ((n, (False, forget nty)) : ans) claimArgTys env (_ : xs) = do an <- getNameFrom (sMN 0 "inf_argTy") aval <- getNameFrom (sMN 0 "inf_arg") claim an RType claim aval (Var an) ans <- claimArgTys env xs return ((aval, (True, (Var an))) : ans) fnTy [] ret = forget ret fnTy ((x, (_, xt)) : xs) ret = RBind x (Pi Nothing xt RType) (fnTy xs ret) localVar env (PRef _ x) = case lookup x env of Just _ -> Just x _ -> Nothing localVar env _ = Nothing elabIArg ((n, (True, ty)), def) = do focus n; elabE ina (Just fc) (getTm def) elabIArg _ = return () -- already done, just a name mkN n@(NS _ _) = n mkN n@(SN _) = n mkN n = case namespace info of Just xs@(_:_) -> sNS n xs _ -> n elab' ina _ (PMatchApp fc fn) = do (fn', imps) <- case lookupCtxtName fn (idris_implicits ist) of [(n, args)] -> return (n, map (const True) args) _ -> lift $ tfail (NoSuchVariable fn) ns <- match_apply (Var fn') (map (\x -> (x,0)) imps) solve -- if f is local, just do a simple_app -- FIXME: Anyone feel like refactoring this mess? - EB elab' ina topfc tm@(PApp fc (PRef ffc f) args_in) | pattern && not reflection && not (e_qq ina) && e_nomatching ina = lift $ tfail $ Msg ("Attempting concrete match on polymorphic argument: " ++ show tm) | otherwise = implicitApp $ do env <- get_env ty <- goal fty <- get_type (Var f) ctxt <- get_context annot <- findHighlight f mapM_ checkKnownImplicit args_in let args = insertScopedImps fc (normalise ctxt env fty) args_in let unmatchableArgs = if pattern then getUnmatchable (tt_ctxt ist) f else [] -- trace ("BEFORE " ++ show f ++ ": " ++ show ty) $ when (pattern && not reflection && not (e_qq ina) && not (e_intype ina) && isTConName f (tt_ctxt ist)) $ lift $ tfail $ Msg ("No explicit types on left hand side: " ++ show tm) -- trace (show (f, args_in, args)) $ if (f `elem` map fst env && length args == 1 && length args_in == 1) then -- simple app, as below do simple_app False (elabE (ina { e_isfn = True }) (Just fc) (PRef ffc f)) (elabE (ina { e_inarg = True }) (Just fc) (getTm (head args))) (show tm) solve highlightSource ffc annot return [] else do ivs <- get_instances ps <- get_probs -- HACK: we shouldn't resolve type classes if we're defining an instance -- function or default definition. let isinf = f == inferCon || tcname f -- if f is a type class, we need to know its arguments so that -- we can unify with them case lookupCtxt f (idris_classes ist) of [] -> return () _ -> do mapM_ setInjective (map getTm args) -- maybe more things are solvable now unifyProblems let guarded = isConName f ctxt -- trace ("args is " ++ show args) $ return () ns <- apply (Var f) (map isph args) -- trace ("ns is " ++ show ns) $ return () -- mark any type class arguments as injective mapM_ checkIfInjective (map snd ns) unifyProblems -- try again with the new information, -- to help with disambiguation ulog <- getUnifyLog annot <- findHighlight f highlightSource ffc annot elabArgs ist (ina { e_inarg = e_inarg ina || not isinf }) [] fc False f (zip ns (unmatchableArgs ++ repeat False)) (f == sUN "Force") (map (\x -> getTm x) args) -- TODO: remove this False arg imp <- if (e_isfn ina) then do guess <- get_guess env <- get_env case safeForgetEnv (map fst env) guess of Nothing -> return [] Just rguess -> do gty <- get_type rguess let ty_n = normalise ctxt env gty return $ getReqImps ty_n else return [] -- Now we find out how many implicits we needed at the -- end of the application by looking at the goal again -- - Have another go, but this time add the -- implicits (can't think of a better way than this...) case imp of rs@(_:_) | not pattern -> return rs -- quit, try again _ -> do solve hs <- get_holes ivs' <- get_instances -- Attempt to resolve any type classes which have 'complete' types, -- i.e. no holes in them when (not pattern || (e_inarg ina && not tcgen && not (e_guarded ina))) $ mapM_ (\n -> do focus n g <- goal env <- get_env hs <- get_holes if all (\n -> not (n `elem` hs)) (freeNames g) then try (resolveTC False False 10 g fn ist) (movelast n) else movelast n) (ivs' \\ ivs) return [] where -- Run the elaborator, which returns how many implicit -- args were needed, then run it again with those args. We need -- this because we have to elaborate the whole application to -- find out whether any computations have caused more implicits -- to be needed. implicitApp :: ElabD [ImplicitInfo] -> ElabD () implicitApp elab | pattern = do elab; return () | otherwise = do s <- get imps <- elab case imps of [] -> return () es -> do put s elab' ina topfc (PAppImpl tm es) checkKnownImplicit imp | UnknownImp `elem` argopts imp = lift $ tfail $ UnknownImplicit (pname imp) f checkKnownImplicit _ = return () getReqImps (Bind x (Pi (Just i) ty _) sc) = i : getReqImps sc getReqImps _ = [] checkIfInjective n = do env <- get_env case lookup n env of Nothing -> return () Just b -> case unApply (binderTy b) of (P _ c _, args) -> case lookupCtxtExact c (idris_classes ist) of Nothing -> return () Just ci -> -- type class, set as injective do mapM_ setinjArg (getDets 0 (class_determiners ci) args) -- maybe we can solve more things now... ulog <- getUnifyLog probs <- get_probs traceWhen ulog ("Injective now " ++ show args ++ "\n" ++ qshow probs) $ unifyProblems probs <- get_probs traceWhen ulog (qshow probs) $ return () _ -> return () setinjArg (P _ n _) = setinj n setinjArg _ = return () getDets i ds [] = [] getDets i ds (a : as) | i `elem` ds = a : getDets (i + 1) ds as | otherwise = getDets (i + 1) ds as tacTm (PTactics _) = True tacTm (PProof _) = True tacTm _ = False setInjective (PRef _ n) = setinj n setInjective (PApp _ (PRef _ n) _) = setinj n setInjective _ = return () elab' ina _ tm@(PApp fc f [arg]) = erun fc $ do simple_app (not $ headRef f) (elabE (ina { e_isfn = True }) (Just fc) f) (elabE (ina { e_inarg = True }) (Just fc) (getTm arg)) (show tm) solve where headRef (PRef _ _) = True headRef (PApp _ f _) = headRef f headRef (PAlternative _ as) = all headRef as headRef _ = False elab' ina fc (PAppImpl f es) = do appImpl (reverse es) -- not that we look... solve where appImpl [] = elab' (ina { e_isfn = False }) fc f -- e_isfn not set, so no recursive expansion of implicits appImpl (e : es) = simple_app False (appImpl es) (elab' ina fc Placeholder) (show f) elab' ina fc Placeholder = do (h : hs) <- get_holes movelast h elab' ina fc (PMetavar nfc n) = do ptm <- get_term -- When building the metavar application, leave out the unique -- names which have been used elsewhere in the term, since we -- won't be able to use them in the resulting application. let unique_used = getUniqueUsed (tt_ctxt ist) ptm let n' = mkN n attack defer unique_used n' solve highlightSource nfc (AnnName n' (Just MetavarOutput) Nothing Nothing) where mkN n@(NS _ _) = n mkN n = case namespace info of Just xs@(_:_) -> sNS n xs _ -> n elab' ina fc (PProof ts) = do compute; mapM_ (runTac True ist (elabFC info) fn) ts elab' ina fc (PTactics ts) | not pattern = do mapM_ (runTac False ist fc fn) ts | otherwise = elab' ina fc Placeholder elab' ina fc (PElabError e) = lift $ tfail e elab' ina _ (PRewrite fc r sc newg) = do attack tyn <- getNameFrom (sMN 0 "rty") claim tyn RType valn <- getNameFrom (sMN 0 "rval") claim valn (Var tyn) letn <- getNameFrom (sMN 0 "_rewrite_rule") letbind letn (Var tyn) (Var valn) focus valn elab' ina (Just fc) r compute g <- goal rewrite (Var letn) g' <- goal when (g == g') $ lift $ tfail (NoRewriting g) case newg of Nothing -> elab' ina (Just fc) sc Just t -> doEquiv t sc solve where doEquiv t sc = do attack tyn <- getNameFrom (sMN 0 "ety") claim tyn RType valn <- getNameFrom (sMN 0 "eqval") claim valn (Var tyn) letn <- getNameFrom (sMN 0 "equiv_val") letbind letn (Var tyn) (Var valn) focus tyn elab' ina (Just fc) t focus valn elab' ina (Just fc) sc elab' ina (Just fc) (PRef fc letn) solve elab' ina _ c@(PCase fc scr opts) = do attack tyn <- getNameFrom (sMN 0 "scty") claim tyn RType valn <- getNameFrom (sMN 0 "scval") scvn <- getNameFrom (sMN 0 "scvar") claim valn (Var tyn) letbind scvn (Var tyn) (Var valn) focus valn elabE (ina { e_inarg = True }) (Just fc) scr -- Solve any remaining implicits - we need to solve as many -- as possible before making the 'case' type unifyProblems matchProblems True args <- get_env envU <- mapM (getKind args) args let namesUsedInRHS = nub $ scvn : concatMap (\(_,rhs) -> allNamesIn rhs) opts -- Drop the unique arguments used in the term already -- and in the scrutinee (since it's -- not valid to use them again anyway) -- -- Also drop unique arguments which don't appear explicitly -- in either case branch so they don't count as used -- unnecessarily (can only do this for unique things, since we -- assume they don't appear implicitly in types) ptm <- get_term let inOpts = (filter (/= scvn) (map fst args)) \\ (concatMap (\x -> allNamesIn (snd x)) opts) let argsDropped = filter (isUnique envU) (nub $ allNamesIn scr ++ inApp ptm ++ inOpts) let args' = filter (\(n, _) -> n `notElem` argsDropped) args cname <- unique_hole' True (mkCaseName fn) let cname' = mkN cname -- elab' ina fc (PMetavar cname') attack; defer argsDropped cname'; solve -- if the scrutinee is one of the 'args' in env, we should -- inspect it directly, rather than adding it as a new argument let newdef = PClauses fc [] cname' (caseBlock fc cname' (map (isScr scr) (reverse args')) opts) -- elaborate case updateAux (\e -> e { case_decls = (cname', newdef) : case_decls e } ) -- if we haven't got the type yet, hopefully we'll get it later! movelast tyn solve where mkCaseName (NS n ns) = NS (mkCaseName n) ns mkCaseName n = SN (CaseN n) -- mkCaseName (UN x) = UN (x ++ "_case") -- mkCaseName (MN i x) = MN i (x ++ "_case") mkN n@(NS _ _) = n mkN n = case namespace info of Just xs@(_:_) -> sNS n xs _ -> n inApp (P _ n _) = [n] inApp (App _ f a) = inApp f ++ inApp a inApp (Bind n (Let _ v) sc) = inApp v ++ inApp sc inApp (Bind n (Guess _ v) sc) = inApp v ++ inApp sc inApp (Bind n b sc) = inApp sc inApp _ = [] isUnique envk n = case lookup n envk of Just u -> u _ -> False getKind env (n, _) = case lookup n env of Nothing -> return (n, False) -- can't happen, actually... Just b -> do ty <- get_type (forget (binderTy b)) case ty of UType UniqueType -> return (n, True) UType AllTypes -> return (n, True) _ -> return (n, False) tcName tm | (P _ n _, _) <- unApply tm = case lookupCtxt n (idris_classes ist) of [_] -> True _ -> False tcName _ = False usedIn ns (n, b) = n `elem` ns || any (\x -> x `elem` ns) (allTTNames (binderTy b)) elab' ina fc (PUnifyLog t) = do unifyLog True elab' ina fc t unifyLog False elab' ina fc (PQuasiquote t goalt) = do -- First extract the unquoted subterms, replacing them with fresh -- names in the quasiquoted term. Claim their reflections to be -- an inferred type (to support polytypic quasiquotes). finalTy <- goal (t, unq) <- extractUnquotes 0 t let unquoteNames = map fst unq mapM_ (\uqn -> claim uqn (forget finalTy)) unquoteNames -- Save the old state - we need a fresh proof state to avoid -- capturing lexically available variables in the quoted term. ctxt <- get_context datatypes <- get_datatypes saveState updatePS (const . newProof (sMN 0 "q") ctxt datatypes $ P Ref (reflm "TT") Erased) -- Re-add the unquotes, letting Idris infer the (fictional) -- types. Here, they represent the real type rather than the type -- of their reflection. mapM_ (\n -> do ty <- getNameFrom (sMN 0 "unqTy") claim ty RType movelast ty claim n (Var ty) movelast n) unquoteNames -- Determine whether there's an explicit goal type, and act accordingly -- Establish holes for the type and value of the term to be -- quasiquoted qTy <- getNameFrom (sMN 0 "qquoteTy") claim qTy RType movelast qTy qTm <- getNameFrom (sMN 0 "qquoteTm") claim qTm (Var qTy) -- Let-bind the result of elaborating the contained term, so that -- the hole doesn't disappear nTm <- getNameFrom (sMN 0 "quotedTerm") letbind nTm (Var qTy) (Var qTm) -- Fill out the goal type, if relevant case goalt of Nothing -> return () Just gTy -> do focus qTy elabE (ina { e_qq = True }) fc gTy -- Elaborate the quasiquoted term into the hole focus qTm elabE (ina { e_qq = True }) fc t end_unify -- We now have an elaborated term. Reflect it and solve the -- original goal in the original proof state, preserving highlighting env <- get_env EState _ _ _ hs <- getAux loadState updateAux (\aux -> aux { highlighting = hs }) let quoted = fmap (explicitNames . binderVal) $ lookup nTm env isRaw = case unApply (normaliseAll ctxt env finalTy) of (P _ n _, []) | n == reflm "Raw" -> True _ -> False case quoted of Just q -> do ctxt <- get_context (q', _, _) <- lift $ recheck ctxt [(uq, Lam Erased) | uq <- unquoteNames] (forget q) q if pattern then if isRaw then reflectRawQuotePattern unquoteNames (forget q') else reflectTTQuotePattern unquoteNames q' else do if isRaw then -- we forget q' instead of using q to ensure rechecking fill $ reflectRawQuote unquoteNames (forget q') else fill $ reflectTTQuote unquoteNames q' solve Nothing -> lift . tfail . Msg $ "Broken elaboration of quasiquote" -- Finally fill in the terms or patterns from the unquotes. This -- happens last so that their holes still exist while elaborating -- the main quotation. mapM_ elabUnquote unq where elabUnquote (n, tm) = do focus n elabE (ina { e_qq = False }) fc tm elab' ina fc (PUnquote t) = fail "Found unquote outside of quasiquote" elab' ina fc (PQuoteName n) = do ctxt <- get_context env <- get_env case lookup n env of Just _ -> do fill $ reflectName n ; solve Nothing -> case lookupNameDef n ctxt of [(n', _)] -> do fill $ reflectName n' solve [] -> lift . tfail . NoSuchVariable $ n more -> lift . tfail . CantResolveAlts $ map fst more elab' ina fc (PAs _ n t) = lift . tfail . Msg $ "@-pattern not allowed here" elab' ina fc (PHidden t) | reflection = elab' ina fc t | otherwise = do (h : hs) <- get_holes -- Dotting a hole means that either the hole or any outer -- hole (a hole outside any occurrence of it) -- must be solvable by unification as well as being filled -- in directly. -- Delay dotted things to the end, then when we elaborate them -- we can check the result against what was inferred movelast h delayElab 10 $ do focus h dotterm elab' ina fc t elab' ina fc (PRunElab fc' tm) = do attack n <- getNameFrom (sMN 0 "tacticScript") n' <- getNameFrom (sMN 0 "tacticExpr") let scriptTy = RApp (Var (sNS (sUN "Elab") ["Elab", "Reflection", "Language"])) (Var unitTy) claim n scriptTy movelast n letbind n' scriptTy (Var n) focus n elab' ina (Just fc') tm env <- get_env runTactical ist (maybe fc' id fc) env (P Bound n' Erased) solve elab' ina fc x = fail $ "Unelaboratable syntactic form " ++ showTmImpls x -- delay elaboration of 't', with priority 'pri' until after everything -- else is done. -- The delayed things with lower numbered priority will be elaborated -- first. (In practice, this means delayed alternatives, then PHidden -- things.) delayElab pri t = updateAux (\e -> e { delayed_elab = delayed_elab e ++ [(pri, t)] }) isScr :: PTerm -> (Name, Binder Term) -> (Name, (Bool, Binder Term)) isScr (PRef _ n) (n', b) = (n', (n == n', b)) isScr _ (n', b) = (n', (False, b)) caseBlock :: FC -> Name -> [(Name, (Bool, Binder Term))] -> [(PTerm, PTerm)] -> [PClause] caseBlock fc n env opts = let args' = findScr env args = map mkarg (map getNmScr args') in map (mkClause args) opts where -- Find the variable we want as the scrutinee and mark it as -- 'True'. If the scrutinee is in the environment, match on that -- otherwise match on the new argument we're adding. findScr ((n, (True, t)) : xs) = (n, (True, t)) : scrName n xs findScr [(n, (_, t))] = [(n, (True, t))] findScr (x : xs) = x : findScr xs -- [] can't happen since scrutinee is in the environment! findScr [] = error "The impossible happened - the scrutinee was not in the environment" -- To make sure top level pattern name remains in scope, put -- it at the end of the environment scrName n [] = [] scrName n [(_, t)] = [(n, t)] scrName n (x : xs) = x : scrName n xs getNmScr (n, (s, _)) = (n, s) mkarg (n, s) = (PRef fc n, s) -- may be shadowed names in the new pattern - so replace the -- old ones with an _ mkClause args (l, r) = let args' = map (shadowed (allNamesIn l)) args lhs = PApp (getFC fc l) (PRef (getFC fc l) n) (map (mkLHSarg l) args') in PClause (getFC fc l) n lhs [] r [] mkLHSarg l (tm, True) = pexp l mkLHSarg l (tm, False) = pexp tm shadowed new (PRef _ n, s) | n `elem` new = (Placeholder, s) shadowed new t = t getFC d (PApp fc _ _) = fc getFC d (PRef fc _) = fc getFC d (PAlternative _ (x:_)) = getFC d x getFC d x = d insertLazy :: PTerm -> ElabD PTerm insertLazy t@(PApp _ (PRef _ (UN l)) _) | l == txt "Delay" = return t insertLazy t@(PApp _ (PRef _ (UN l)) _) | l == txt "Force" = return t insertLazy (PCoerced t) = return t insertLazy t = do ty <- goal env <- get_env let (tyh, _) = unApply (normalise (tt_ctxt ist) env ty) let tries = if pattern then [t, mkDelay env t] else [mkDelay env t, t] case tyh of P _ (UN l) _ | l == txt "Lazy'" -> return (PAlternative FirstSuccess tries) _ -> return t where mkDelay env (PAlternative b xs) = PAlternative b (map (mkDelay env) xs) mkDelay env t = let fc = fileFC "Delay" in addImplBound ist (map fst env) (PApp fc (PRef fc (sUN "Delay")) [pexp t]) -- Don't put implicit coercions around applications which are marked -- as '%noImplicit', or around case blocks, otherwise we get exponential -- blowup especially where there are errors deep in large expressions. notImplicitable (PApp _ f _) = notImplicitable f -- TMP HACK no coercing on bind (make this configurable) notImplicitable (PRef _ n) | [opts] <- lookupCtxt n (idris_flags ist) = NoImplicit `elem` opts notImplicitable (PAlternative (ExactlyOne _) as) = any notImplicitable as -- case is tricky enough without implicit coercions! If they are needed, -- they can go in the branches separately. notImplicitable (PCase _ _ _) = True notImplicitable _ = False insertScopedImps fc (Bind n (Pi im@(Just i) _ _) sc) xs | tcinstance i = pimp n (PResolveTC fc) True : insertScopedImps fc sc xs | otherwise = pimp n Placeholder True : insertScopedImps fc sc xs insertScopedImps fc (Bind n (Pi _ _ _) sc) (x : xs) = x : insertScopedImps fc sc xs insertScopedImps _ _ xs = xs insertImpLam ina t = do ty <- goal env <- get_env let ty' = normalise (tt_ctxt ist) env ty addLam ty' t where -- just one level at a time addLam (Bind n (Pi (Just _) _ _) sc) t = do impn <- unique_hole n -- (sMN 0 "scoped_imp") if e_isfn ina -- apply to an implicit immediately then return (PApp emptyFC (PLam emptyFC impn NoFC Placeholder t) [pexp Placeholder]) else return (PLam emptyFC impn NoFC Placeholder t) addLam _ t = return t insertCoerce ina t@(PCase _ _ _) = return t insertCoerce ina t | notImplicitable t = return t insertCoerce ina t = do ty <- goal -- Check for possible coercions to get to the goal -- and add them as 'alternatives' env <- get_env let ty' = normalise (tt_ctxt ist) env ty let cs = getCoercionsTo ist ty' let t' = case (t, cs) of (PCoerced tm, _) -> tm (_, []) -> t (_, cs) -> PAlternative FirstSuccess [t , PAlternative (ExactlyOne False) (map (mkCoerce env t) cs)] return t' where mkCoerce env t n = let fc = maybe (fileFC "Coercion") id (highestFC t) in addImplBound ist (map fst env) (PApp fc (PRef fc n) [pexp (PCoerced t)]) -- | Elaborate the arguments to a function elabArgs :: IState -- ^ The current Idris state -> ElabCtxt -- ^ (in an argument, guarded, in a type, in a qquote) -> [Bool] -> FC -- ^ Source location -> Bool -> Name -- ^ Name of the function being applied -> [((Name, Name), Bool)] -- ^ (Argument Name, Hole Name, unmatchable) -> Bool -- ^ under a 'force' -> [PTerm] -- ^ argument -> ElabD () elabArgs ist ina failed fc retry f [] force _ = return () elabArgs ist ina failed fc r f (((argName, holeName), unm):ns) force (t : args) = do hs <- get_holes if holeName `elem` hs then do focus holeName case t of Placeholder -> do movelast holeName elabArgs ist ina failed fc r f ns force args _ -> elabArg t else elabArgs ist ina failed fc r f ns force args where elabArg t = do -- solveAutos ist fn False now_elaborating fc f argName wrapErr f argName $ do hs <- get_holes tm <- get_term -- No coercing under an explicit Force (or it can Force/Delay -- recursively!) let elab = if force then elab' else elabE failed' <- -- trace (show (n, t, hs, tm)) $ -- traceWhen (not (null cs)) (show ty ++ "\n" ++ showImp True t) $ do focus holeName; g <- goal -- Can't pattern match on polymorphic goals poly <- goal_polymorphic ulog <- getUnifyLog traceWhen ulog ("Elaborating argument " ++ show (argName, holeName, g)) $ elab (ina { e_nomatching = unm && poly }) (Just fc) t return failed done_elaborating_arg f argName elabArgs ist ina failed fc r f ns force args wrapErr f argName action = do elabState <- get while <- elaborating_app let while' = map (\(x, y, z)-> (y, z)) while (result, newState) <- case runStateT action elabState of OK (res, newState) -> return (res, newState) Error e -> do done_elaborating_arg f argName lift (tfail (elaboratingArgErr while' e)) put newState return result elabArgs _ _ _ _ _ _ (((arg, hole), _) : _) _ [] = fail $ "Can't elaborate these args: " ++ show arg ++ " " ++ show hole -- For every alternative, look at the function at the head. Automatically resolve -- any nested alternatives where that function is also at the head pruneAlt :: [PTerm] -> [PTerm] pruneAlt xs = map prune xs where prune (PApp fc1 (PRef fc2 f) as) = PApp fc1 (PRef fc2 f) (fmap (fmap (choose f)) as) prune t = t choose f (PAlternative a as) = let as' = fmap (choose f) as fs = filter (headIs f) as' in case fs of [a] -> a _ -> PAlternative a as' choose f (PApp fc f' as) = PApp fc (choose f f') (fmap (fmap (choose f)) as) choose f t = t headIs f (PApp _ (PRef _ f') _) = f == f' headIs f (PApp _ f' _) = headIs f f' headIs f _ = True -- keep if it's not an application -- Rule out alternatives that don't return the same type as the head of the goal -- (If there are none left as a result, do nothing) pruneByType :: [Name] -> Term -> -- head of the goal Context -> [PTerm] -> [PTerm] -- if an alternative has a locally bound name at the head, take it pruneByType env t c as | Just a <- locallyBound as = [a] where locallyBound [] = Nothing locallyBound (t:ts) | Just n <- getName t, n `elem` env = Just t | otherwise = locallyBound ts getName (PRef _ n) = Just n getName (PApp _ f _) = getName f getName (PHidden t) = getName t getName _ = Nothing pruneByType env (P _ n _) ctxt as -- if the goal type is polymorphic, keep e | Nothing <- lookupTyExact n ctxt = as | otherwise = let asV = filter (headIs True n) as as' = filter (headIs False n) as in case as' of [] -> case asV of [] -> as _ -> asV _ -> as' where headIs var f (PRef _ f') = typeHead var f f' headIs var f (PApp _ (PRef _ f') _) = typeHead var f f' headIs var f (PApp _ f' _) = headIs var f f' headIs var f (PPi _ _ _ _ sc) = headIs var f sc headIs var f (PHidden t) = headIs var f t headIs var f t = True -- keep if it's not an application typeHead var f f' = -- trace ("Trying " ++ show f' ++ " for " ++ show n) $ case lookupTyExact f' ctxt of Just ty -> case unApply (getRetTy ty) of (P _ ctyn _, _) | isConName ctyn ctxt -> ctyn == f _ -> let ty' = normalise ctxt [] ty in case unApply (getRetTy ty') of (P _ ftyn _, _) -> ftyn == f (V _, _) -> var -- keep, variable _ -> False _ -> False pruneByType _ t _ as = as -- | Use the local elab context to work out the highlighting for a name findHighlight :: Name -> ElabD OutputAnnotation findHighlight n = do ctxt <- get_context env <- get_env case lookup n env of Just _ -> return $ AnnBoundName n False Nothing -> case lookupTyExact n ctxt of Just _ -> return $ AnnName n Nothing Nothing Nothing Nothing -> lift . tfail . InternalMsg $ "Can't find name" ++ show n -- | Find the names of instances that have been designeated for -- searching (i.e. non-named instances or instances from Elab scripts) findInstances :: IState -> Term -> [Name] findInstances ist t | (P _ n _, _) <- unApply (getRetTy t) = case lookupCtxt n (idris_classes ist) of [CI _ _ _ _ _ ins _] -> [n | (n, True) <- ins, accessible n] _ -> [] | otherwise = [] where accessible n = case lookupDefAccExact n False (tt_ctxt ist) of Just (_, Hidden) -> False _ -> True -- Try again to solve auto implicits solveAuto :: IState -> Name -> Bool -> Name -> ElabD () solveAuto ist fn ambigok n = do hs <- get_holes tm <- get_term when (n `elem` hs) $ do focus n g <- goal isg <- is_guess -- if it's a guess, we're working on it recursively, so stop when (not isg) $ proofSearch' ist True ambigok 100 True Nothing fn [] solveAutos :: IState -> Name -> Bool -> ElabD () solveAutos ist fn ambigok = do autos <- get_autos mapM_ (solveAuto ist fn ambigok) (map fst autos) trivial' ist = trivial (elab ist toplevel ERHS [] (sMN 0 "tac")) ist trivialHoles' h ist = trivialHoles h (elab ist toplevel ERHS [] (sMN 0 "tac")) ist proofSearch' ist rec ambigok depth prv top n hints = do unifyProblems proofSearch rec prv ambigok (not prv) depth (elab ist toplevel ERHS [] (sMN 0 "tac")) top n hints ist -- | Resolve type classes. This will only pick up 'normal' instances, never -- named instances (which is enforced by 'findInstances'). resolveTC :: Bool -- ^ using default Int -> Bool -- ^ allow metavariables in the goal -> Int -- ^ depth -> Term -- ^ top level goal, for error messages -> Name -- ^ top level function name, to prevent loops -> IState -> ElabD () resolveTC def mvok depth top fn ist = do hs <- get_holes resTC' [] def hs depth top fn ist resTC' tcs def topholes 0 topg fn ist = fail $ "Can't resolve type class" resTC' tcs def topholes 1 topg fn ist = try' (trivial' ist) (resolveTC def False 0 topg fn ist) True resTC' tcs defaultOn topholes depth topg fn ist = do compute g <- goal -- Resolution can proceed only if there is something concrete in the -- determining argument positions. Keep track of the holes in the -- non-determining position, because it's okay for 'trivial' to solve -- those holes and no others. let (argsok, okholePos) = case tcArgsOK g topholes of Nothing -> (False, []) Just hs -> (True, hs) if not argsok -- && not mvok) then lift $ tfail $ CantResolve True topg else do ptm <- get_term ulog <- getUnifyLog hs <- get_holes env <- get_env t <- goal let (tc, ttypes) = unApply (getRetTy t) let okholes = case tc of P _ n _ -> zip (repeat n) okholePos _ -> [] traceWhen ulog ("Resolving class " ++ show g ++ "\nin" ++ show env ++ "\n" ++ show okholes) $ try' (trivialHoles' okholes ist) (do addDefault t tc ttypes let stk = map fst (filter snd $ elab_stack ist) let insts = findInstances ist t blunderbuss t depth stk (stk ++ insts)) True where -- returns Just hs if okay, where hs are holes which are okay in the -- goal, or Nothing if not okay to proceed tcArgsOK ty hs | (P _ nc _, as) <- unApply (getRetTy ty), nc == numclass && defaultOn = Just [] tcArgsOK ty hs -- if any determining arguments are metavariables, postpone = let (f, as) = unApply (getRetTy ty) in case f of P _ cn _ -> case lookupCtxtExact cn (idris_classes ist) of Just ci -> tcDetArgsOK 0 (class_determiners ci) hs as Nothing -> if any (isMeta hs) as then Nothing else Just [] _ -> if any (isMeta hs) as then Nothing else Just [] -- return the list of argument positions which can safely be a hole -- or Nothing if one of the determining arguments is a hole tcDetArgsOK i ds hs (x : xs) | i `elem` ds = if isMeta hs x then Nothing else tcDetArgsOK (i + 1) ds hs xs | otherwise = do rs <- tcDetArgsOK (i + 1) ds hs xs case x of P _ n _ -> Just (i : rs) _ -> Just rs tcDetArgsOK _ _ _ [] = Just [] isMeta :: [Name] -> Term -> Bool isMeta ns (P _ n _) = n `elem` ns isMeta _ _ = False notHole hs (P _ n _, c) | (P _ cn _, _) <- unApply (getRetTy c), n `elem` hs && isConName cn (tt_ctxt ist) = False | Constant _ <- c = not (n `elem` hs) notHole _ _ = True -- HACK! Rather than giving a special name, better to have some kind -- of flag in ClassInfo structure chaser (UN nm) | ('@':'@':_) <- str nm = True -- old way chaser (SN (ParentN _ _)) = True chaser (NS n _) = chaser n chaser _ = False numclass = sNS (sUN "Num") ["Classes","Prelude"] addDefault t num@(P _ nc _) [P Bound a _] | nc == numclass && defaultOn = do focus a fill (RConstant (AType (ATInt ITBig))) -- default Integer solve addDefault t f as | all boundVar as = return () -- True -- fail $ "Can't resolve " ++ show t addDefault t f a = return () -- trace (show t) $ return () boundVar (P Bound _ _) = True boundVar _ = False blunderbuss t d stk [] = do -- c <- get_env -- ps <- get_probs lift $ tfail $ CantResolve False topg blunderbuss t d stk (n:ns) | n /= fn -- && (n `elem` stk) = tryCatch (resolve n d) (\e -> case e of CantResolve True _ -> lift $ tfail e _ -> blunderbuss t d stk ns) | otherwise = blunderbuss t d stk ns introImps = do g <- goal case g of (Bind _ (Pi _ _ _) sc) -> do attack; intro Nothing num <- introImps return (num + 1) _ -> return 0 solven 0 = return () solven n = do solve; solven (n - 1) resolve n depth | depth == 0 = fail $ "Can't resolve type class" | otherwise = do lams <- introImps t <- goal let (tc, ttypes) = trace (show t) $ unApply (getRetTy t) -- if (all boundVar ttypes) then resolveTC (depth - 1) fn insts ist -- else do -- if there's a hole in the goal, don't even try let imps = case lookupCtxtName n (idris_implicits ist) of [] -> [] [args] -> map isImp (snd args) -- won't be overloaded! xs -> error "The impossible happened - overloading is not expected here!" ps <- get_probs tm <- get_term args <- map snd <$> try' (apply (Var n) imps) (match_apply (Var n) imps) True solven lams -- close any implicit lambdas we introduced ps' <- get_probs when (length ps < length ps' || unrecoverable ps') $ fail "Can't apply type class" -- traceWhen (all boundVar ttypes) ("Progress: " ++ show t ++ " with " ++ show n) $ mapM_ (\ (_,n) -> do focus n t' <- goal let (tc', ttype) = unApply (getRetTy t') let got = fst (unApply (getRetTy t)) let depth' = if tc' `elem` tcs then depth - 1 else depth resTC' (got : tcs) defaultOn topholes depth' topg fn ist) (filter (\ (x, y) -> not x) (zip (map fst imps) args)) -- if there's any arguments left, we've failed to resolve hs <- get_holes ulog <- getUnifyLog solve traceWhen ulog ("Got " ++ show n) $ return () where isImp (PImp p _ _ _ _) = (True, p) isImp arg = (False, priority arg) collectDeferred :: Maybe Name -> [Name] -> Context -> Term -> State [(Name, (Int, Maybe Name, Type))] Term collectDeferred top casenames ctxt (Bind n (GHole i t) app) = do ds <- get t' <- collectDeferred top casenames ctxt t when (not (n `elem` map fst ds)) $ put (ds ++ [(n, (i, top, tidyArg [] t'))]) collectDeferred top casenames ctxt app where -- Evaluate the top level functions in arguments, if possible, and if it's -- not a name we're immediately going to define in a case block, so that -- any immediate specialisation of the function applied to constructors -- can be done tidyArg env (Bind n b@(Pi im t k) sc) = Bind n (Pi im (tidy ctxt env t) k) (tidyArg ((n, b) : env) sc) tidyArg env t = t tidy ctxt env t | (f, args) <- unApply t, P _ specn _ <- getFn f, n `notElem` casenames = fst $ specialise ctxt env [(specn, 99999)] t tidy ctxt env t@(Bind n (Let _ _) sct) | (f, args) <- unApply sct, P _ specn _ <- getFn f, n `notElem` casenames = fst $ specialise ctxt env [(specn, 99999)] t tidy ctxt env t = t getFn (Bind n (Lam _) t) = getFn t getFn t | (f, a) <- unApply t = f collectDeferred top ns ctxt (Bind n b t) = do b' <- cdb b t' <- collectDeferred top ns ctxt t return (Bind n b' t') where cdb (Let t v) = liftM2 Let (collectDeferred top ns ctxt t) (collectDeferred top ns ctxt v) cdb (Guess t v) = liftM2 Guess (collectDeferred top ns ctxt t) (collectDeferred top ns ctxt v) cdb b = do ty' <- collectDeferred top ns ctxt (binderTy b) return (b { binderTy = ty' }) collectDeferred top ns ctxt (App s f a) = liftM2 (App s) (collectDeferred top ns ctxt f) (collectDeferred top ns ctxt a) collectDeferred top ns ctxt t = return t case_ :: Bool -> Bool -> IState -> Name -> PTerm -> ElabD () case_ ind autoSolve ist fn tm = do attack tyn <- getNameFrom (sMN 0 "ity") claim tyn RType valn <- getNameFrom (sMN 0 "ival") claim valn (Var tyn) letn <- getNameFrom (sMN 0 "irule") letbind letn (Var tyn) (Var valn) focus valn elab ist toplevel ERHS [] (sMN 0 "tac") tm env <- get_env let (Just binding) = lookup letn env let val = binderVal binding if ind then induction (forget val) else casetac (forget val) when autoSolve solveAll runTactical :: IState -> FC -> Env -> Term -> ElabD () runTactical ist fc env tm = do tm' <- eval tm runTacTm tm' return () where eval tm = do ctxt <- get_context return $ normaliseAll ctxt env (finalise tm) returnUnit = return $ P (DCon 0 0 False) unitCon (P (TCon 0 0) unitTy Erased) patvars :: [Name] -> Term -> ([Name], Term) patvars ns (Bind n (PVar t) sc) = patvars (n : ns) (instantiate (P Bound n t) sc) patvars ns tm = (ns, tm) pullVars :: (Term, Term) -> ([Name], Term, Term) pullVars (lhs, rhs) = (fst (patvars [] lhs), snd (patvars [] lhs), snd (patvars [] rhs)) -- TODO alpha-convert rhs defineFunction :: RFunDefn -> ElabD () defineFunction (RDefineFun n clauses) = do ctxt <- get_context ty <- maybe (fail "no type decl") return $ lookupTyExact n ctxt let info = CaseInfo True True False -- TODO document and figure out clauses' <- forM clauses (\case RMkFunClause lhs rhs -> do lhs' <- fmap fst . lift $ check ctxt [] lhs rhs' <- fmap fst . lift $ check ctxt [] rhs return $ Right (lhs', rhs') RMkImpossibleClause lhs -> do lhs' <- fmap fst . lift $ check ctxt [] lhs return $ Left lhs') let clauses'' = map (\case Right c -> pullVars c Left lhs -> let (ns, lhs') = patvars [] lhs' in (ns, lhs', Impossible)) clauses' set_context $ addCasedef n (const []) info False (STerm Erased) True False -- TODO what are these? (map snd $ getArgTys ty) [] -- TODO inaccessible types clauses' clauses'' clauses'' clauses'' clauses'' ty ctxt updateAux $ \e -> e { new_tyDecls = RClausesInstrs n clauses'' : new_tyDecls e} return () -- | Do a step in the reflected elaborator monad. The input is the -- step, the output is the (reflected) term returned. runTacTm :: Term -> ElabD Term runTacTm (unApply -> tac@(P _ n _, args)) | n == tacN "prim__Solve", [] <- args = do solve returnUnit | n == tacN "prim__Goal", [] <- args = do (h:_) <- get_holes t <- goal fmap fst . get_type_val $ rawPair (Var (reflm "TTName"), Var (reflm "TT")) (reflectName h, reflect t) | n == tacN "prim__Holes", [] <- args = do hs <- get_holes fmap fst . get_type_val $ mkList (Var $ reflm "TTName") (map reflectName hs) | n == tacN "prim__Guess", [] <- args = do ok <- is_guess if ok then do guess <- fmap forget get_guess fmap fst . get_type_val $ RApp (RApp (Var (sNS (sUN "Just") ["Maybe", "Prelude"])) (Var (reflm "TT"))) guess else fmap fst . get_type_val $ RApp (Var (sNS (sUN "Nothing") ["Maybe", "Prelude"])) (Var (reflm "TT")) | n == tacN "prim__LookupTy", [n] <- args = do n' <- reifyTTName n ctxt <- get_context let getNameTypeAndType = \case Function ty _ -> (Ref, ty) TyDecl nt ty -> (nt, ty) Operator ty _ _ -> (Ref, ty) CaseOp _ ty _ _ _ _ -> (Ref, ty) -- Idris tuples nest to the right reflectTriple (x, y, z) = raw_apply (Var pairCon) [ Var (reflm "TTName") , raw_apply (Var pairTy) [Var (reflm "NameType"), Var (reflm "TT")] , x , raw_apply (Var pairCon) [ Var (reflm "NameType"), Var (reflm "TT") , y, z]] let defs = [ reflectTriple (reflectName n, reflectNameType nt, reflect ty) | (n, def) <- lookupNameDef n' ctxt , let (nt, ty) = getNameTypeAndType def ] fmap fst . get_type_val $ rawList (raw_apply (Var pairTy) [ Var (reflm "TTName") , raw_apply (Var pairTy) [ Var (reflm "NameType") , Var (reflm "TT")]]) defs | n == tacN "prim__LookupDatatype", [name] <- args = do n' <- reifyTTName name datatypes <- get_datatypes ctxt <- get_context fmap fst . get_type_val $ rawList (Var (tacN "Datatype")) (map reflectDatatype (buildDatatypes ctxt datatypes n')) | n == tacN "prim__SourceLocation", [] <- args = fmap fst . get_type_val $ reflectFC fc | n == tacN "prim__Env", [] <- args = do env <- get_env fmap fst . get_type_val $ reflectEnv env | n == tacN "prim__Fail", [_a, errs] <- args = do errs' <- eval errs parts <- reifyReportParts errs' lift . tfail $ ReflectionError [parts] (Msg "") | n == tacN "prim__PureElab", [_a, tm] <- args = return tm | n == tacN "prim__BindElab", [_a, _b, first, andThen] <- args = do first' <- eval first res <- eval =<< runTacTm first' next <- eval (App Complete andThen res) runTacTm next | n == tacN "prim__Try", [_a, first, alt] <- args = do first' <- eval first alt' <- eval alt try' (runTacTm first') (runTacTm alt') True | n == tacN "prim__Fill", [raw] <- args = do raw' <- reifyRaw =<< eval raw fill raw' returnUnit | n == tacN "prim__Apply", [raw] <- args = do raw' <- reifyRaw =<< eval raw apply raw' [] returnUnit | n == tacN "prim__Gensym", [hint] <- args = do hintStr <- eval hint case hintStr of Constant (Str h) -> do n <- getNameFrom (sMN 0 h) fmap fst $ get_type_val (reflectName n) _ -> fail "no hint" | n == tacN "prim__Claim", [n, ty] <- args = do n' <- reifyTTName n ty' <- reifyRaw ty claim n' ty' returnUnit | n == tacN "prim__Forget", [tt] <- args = do tt' <- reifyTT tt fmap fst . get_type_val . reflectRaw $ forget tt' | n == tacN "prim__Attack", [] <- args = do attack returnUnit | n == tacN "prim__Rewrite", [rule] <- args = do r <- reifyRaw rule rewrite r returnUnit | n == tacN "prim__Focus", [what] <- args = do n' <- reifyTTName what focus n' returnUnit | n == tacN "prim__Unfocus", [what] <- args = do n' <- reifyTTName what movelast n' returnUnit | n == tacN "prim__Intro", [mn] <- args = do n <- case fromTTMaybe mn of Nothing -> return Nothing Just name -> fmap Just $ reifyTTName name intro n returnUnit | n == tacN "prim__Forall", [n, ty] <- args = do n' <- reifyTTName n ty' <- reifyRaw ty forall n' Nothing ty' returnUnit | n == tacN "prim__PatVar", [n] <- args = do n' <- reifyTTName n patvar n' returnUnit | n == tacN "prim__PatBind", [n] <- args = do n' <- reifyTTName n patbind n' returnUnit | n == tacN "prim__Compute", [] <- args = do compute ; returnUnit | n == tacN "prim__DeclareType", [decl] <- args = do (RDeclare n args res) <- reifyTyDecl decl ctxt <- get_context let mkPi arg res = RBind (argName arg) (Pi Nothing (argTy arg) (RUType AllTypes)) res rty = foldr mkPi res args (checked, ty') <- lift $ check ctxt [] rty case normaliseAll ctxt [] (finalise ty') of UType _ -> return () TType _ -> return () ty'' -> lift . tfail . InternalMsg $ show checked ++ " is not a type: it's " ++ show ty'' case lookupDefExact n ctxt of Just _ -> lift . tfail . InternalMsg $ show n ++ " is already defined." Nothing -> return () let decl = TyDecl Ref checked ctxt' = addCtxtDef n decl ctxt set_context ctxt' updateAux $ \e -> e { new_tyDecls = (RTyDeclInstrs n fc (map rArgToPArg args) checked) : new_tyDecls e } aux <- getAux returnUnit | n == tacN "prim__DefineFunction", [decl] <- args = do defn <- reifyFunDefn decl defineFunction defn returnUnit | n == tacN "prim__AddInstance", [cls, inst] <- args = do className <- reifyTTName cls instName <- reifyTTName inst updateAux $ \e -> e { new_tyDecls = RAddInstance className instName : new_tyDecls e} returnUnit | n == tacN "prim__ResolveTC", [fn] <- args = do g <- goal fn <- reifyTTName fn resolveTC False True 100 g fn ist returnUnit | n == tacN "prim__RecursiveElab", [goal, script] <- args = do goal' <- reifyRaw goal ctxt <- get_context script <- eval script (goalTT, goalTy) <- lift $ check ctxt [] goal' lift $ isType ctxt [] goalTy recH <- getNameFrom (sMN 0 "recElabHole") aux <- getAux datatypes <- get_datatypes env <- get_env (_, ES (p, aux') _ _) <- lift $ runElab aux (runTactical ist fc [] script) (newProof recH ctxt datatypes goalTT) let tm_out = getProofTerm (pterm p) updateAux $ const aux' env' <- get_env (tm, ty, _) <- lift $ recheck ctxt env (forget tm_out) tm_out let (tm', ty') = (reflect tm, reflect ty) fmap fst . get_type_val $ rawPair (Var $ reflm "TT", Var $ reflm "TT") (tm', ty') | n == tacN "prim__Debug", [ty, msg] <- args = do let msg' = fromTTMaybe msg case msg' of Nothing -> debugElaborator Nothing Just (Constant (Str m)) -> debugElaborator (Just m) Just x -> lift . tfail . InternalMsg $ "Can't reify message for debugging: " ++ show x runTacTm x = lift . tfail $ ElabScriptStuck x -- Running tactics directly -- if a tactic adds unification problems, return an error runTac :: Bool -> IState -> Maybe FC -> Name -> PTactic -> ElabD () runTac autoSolve ist perhapsFC fn tac = do env <- get_env g <- goal let tac' = fmap (addImplBound ist (map fst env)) tac if autoSolve then runT tac' else no_errors (runT tac') (Just (CantSolveGoal g (map (\(n, b) -> (n, binderTy b)) env))) where runT (Intro []) = do g <- goal attack; intro (bname g) where bname (Bind n _ _) = Just n bname _ = Nothing runT (Intro xs) = mapM_ (\x -> do attack; intro (Just x)) xs runT Intros = do g <- goal attack; intro (bname g) try' (runT Intros) (return ()) True where bname (Bind n _ _) = Just n bname _ = Nothing runT (Exact tm) = do elab ist toplevel ERHS [] (sMN 0 "tac") tm when autoSolve solveAll runT (MatchRefine fn) = do fnimps <- case lookupCtxtName fn (idris_implicits ist) of [] -> do a <- envArgs fn return [(fn, a)] ns -> return (map (\ (n, a) -> (n, map (const True) a)) ns) let tacs = map (\ (fn', imps) -> (match_apply (Var fn') (map (\x -> (x, 0)) imps), fn')) fnimps tryAll tacs when autoSolve solveAll where envArgs n = do e <- get_env case lookup n e of Just t -> return $ map (const False) (getArgTys (binderTy t)) _ -> return [] runT (Refine fn []) = do fnimps <- case lookupCtxtName fn (idris_implicits ist) of [] -> do a <- envArgs fn return [(fn, a)] ns -> return (map (\ (n, a) -> (n, map isImp a)) ns) let tacs = map (\ (fn', imps) -> (apply (Var fn') (map (\x -> (x, 0)) imps), fn')) fnimps tryAll tacs when autoSolve solveAll where isImp (PImp _ _ _ _ _) = True isImp _ = False envArgs n = do e <- get_env case lookup n e of Just t -> return $ map (const False) (getArgTys (binderTy t)) _ -> return [] runT (Refine fn imps) = do ns <- apply (Var fn) (map (\x -> (x,0)) imps) when autoSolve solveAll runT DoUnify = do unify_all when autoSolve solveAll runT (Claim n tm) = do tmHole <- getNameFrom (sMN 0 "newGoal") claim tmHole RType claim n (Var tmHole) focus tmHole elab ist toplevel ERHS [] (sMN 0 "tac") tm focus n runT (Equiv tm) -- let bind tm, then = do attack tyn <- getNameFrom (sMN 0 "ety") claim tyn RType valn <- getNameFrom (sMN 0 "eqval") claim valn (Var tyn) letn <- getNameFrom (sMN 0 "equiv_val") letbind letn (Var tyn) (Var valn) focus tyn elab ist toplevel ERHS [] (sMN 0 "tac") tm focus valn when autoSolve solveAll runT (Rewrite tm) -- to elaborate tm, let bind it, then rewrite by that = do attack; -- (h:_) <- get_holes tyn <- getNameFrom (sMN 0 "rty") -- start_unify h claim tyn RType valn <- getNameFrom (sMN 0 "rval") claim valn (Var tyn) letn <- getNameFrom (sMN 0 "rewrite_rule") letbind letn (Var tyn) (Var valn) focus valn elab ist toplevel ERHS [] (sMN 0 "tac") tm rewrite (Var letn) when autoSolve solveAll runT (Induction tm) -- let bind tm, similar to the others = case_ True autoSolve ist fn tm runT (CaseTac tm) = case_ False autoSolve ist fn tm runT (LetTac n tm) = do attack tyn <- getNameFrom (sMN 0 "letty") claim tyn RType valn <- getNameFrom (sMN 0 "letval") claim valn (Var tyn) letn <- unique_hole n letbind letn (Var tyn) (Var valn) focus valn elab ist toplevel ERHS [] (sMN 0 "tac") tm when autoSolve solveAll runT (LetTacTy n ty tm) = do attack tyn <- getNameFrom (sMN 0 "letty") claim tyn RType valn <- getNameFrom (sMN 0 "letval") claim valn (Var tyn) letn <- unique_hole n letbind letn (Var tyn) (Var valn) focus tyn elab ist toplevel ERHS [] (sMN 0 "tac") ty focus valn elab ist toplevel ERHS [] (sMN 0 "tac") tm when autoSolve solveAll runT Compute = compute runT Trivial = do trivial' ist; when autoSolve solveAll runT TCInstance = runT (Exact (PResolveTC emptyFC)) runT (ProofSearch rec prover depth top hints) = do proofSearch' ist rec False depth prover top fn hints when autoSolve solveAll runT (Focus n) = focus n runT Unfocus = do hs <- get_holes case hs of [] -> return () (h : _) -> movelast h runT Solve = solve runT (Try l r) = do try' (runT l) (runT r) True runT (TSeq l r) = do runT l; runT r runT (ApplyTactic tm) = do tenv <- get_env -- store the environment tgoal <- goal -- store the goal attack -- let f : List (TTName, Binder TT) -> TT -> Tactic = tm in ... script <- getNameFrom (sMN 0 "script") claim script scriptTy scriptvar <- getNameFrom (sMN 0 "scriptvar" ) letbind scriptvar scriptTy (Var script) focus script elab ist toplevel ERHS [] (sMN 0 "tac") tm (script', _) <- get_type_val (Var scriptvar) -- now that we have the script apply -- it to the reflected goal and context restac <- getNameFrom (sMN 0 "restac") claim restac tacticTy focus restac fill (raw_apply (forget script') [reflectEnv tenv, reflect tgoal]) restac' <- get_guess solve -- normalise the result in order to -- reify it ctxt <- get_context env <- get_env let tactic = normalise ctxt env restac' runReflected tactic where tacticTy = Var (reflm "Tactic") listTy = Var (sNS (sUN "List") ["List", "Prelude"]) scriptTy = (RBind (sMN 0 "__pi_arg") (Pi Nothing (RApp listTy envTupleType) RType) (RBind (sMN 1 "__pi_arg") (Pi Nothing (Var $ reflm "TT") RType) tacticTy)) runT (ByReflection tm) -- run the reflection function 'tm' on the -- goal, then apply the resulting reflected Tactic = do tgoal <- goal attack script <- getNameFrom (sMN 0 "script") claim script scriptTy scriptvar <- getNameFrom (sMN 0 "scriptvar" ) letbind scriptvar scriptTy (Var script) focus script ptm <- get_term elab ist toplevel ERHS [] (sMN 0 "tac") (PApp emptyFC tm [pexp (delabTy' ist [] tgoal True True)]) (script', _) <- get_type_val (Var scriptvar) -- now that we have the script apply -- it to the reflected goal restac <- getNameFrom (sMN 0 "restac") claim restac tacticTy focus restac fill (forget script') restac' <- get_guess solve -- normalise the result in order to -- reify it ctxt <- get_context env <- get_env let tactic = normalise ctxt env restac' runReflected tactic where tacticTy = Var (reflm "Tactic") scriptTy = tacticTy runT (Reflect v) = do attack -- let x = reflect v in ... tyn <- getNameFrom (sMN 0 "letty") claim tyn RType valn <- getNameFrom (sMN 0 "letval") claim valn (Var tyn) letn <- getNameFrom (sMN 0 "letvar") letbind letn (Var tyn) (Var valn) focus valn elab ist toplevel ERHS [] (sMN 0 "tac") v (value, _) <- get_type_val (Var letn) ctxt <- get_context env <- get_env let value' = hnf ctxt env value runTac autoSolve ist perhapsFC fn (Exact $ PQuote (reflect value')) runT (Fill v) = do attack -- let x = fill x in ... tyn <- getNameFrom (sMN 0 "letty") claim tyn RType valn <- getNameFrom (sMN 0 "letval") claim valn (Var tyn) letn <- getNameFrom (sMN 0 "letvar") letbind letn (Var tyn) (Var valn) focus valn elab ist toplevel ERHS [] (sMN 0 "tac") v (value, _) <- get_type_val (Var letn) ctxt <- get_context env <- get_env let value' = normalise ctxt env value rawValue <- reifyRaw value' runTac autoSolve ist perhapsFC fn (Exact $ PQuote rawValue) runT (GoalType n tac) = do g <- goal case unApply g of (P _ n' _, _) -> if nsroot n' == sUN n then runT tac else fail "Wrong goal type" _ -> fail "Wrong goal type" runT ProofState = do g <- goal return () runT Skip = return () runT (TFail err) = lift . tfail $ ReflectionError [err] (Msg "") runT SourceFC = case perhapsFC of Nothing -> lift . tfail $ Msg "There is no source location available." Just fc -> do fill $ reflectFC fc solve runT Qed = lift . tfail $ Msg "The qed command is only valid in the interactive prover" runT x = fail $ "Not implemented " ++ show x runReflected t = do t' <- reify ist t runTac autoSolve ist perhapsFC fn t' elaboratingArgErr :: [(Name, Name)] -> Err -> Err elaboratingArgErr [] err = err elaboratingArgErr ((f,x):during) err = fromMaybe err (rewrite err) where rewrite (ElaboratingArg _ _ _ _) = Nothing rewrite (ProofSearchFail e) = fmap ProofSearchFail (rewrite e) rewrite (At fc e) = fmap (At fc) (rewrite e) rewrite err = Just (ElaboratingArg f x during err) withErrorReflection :: Idris a -> Idris a withErrorReflection x = idrisCatch x (\ e -> handle e >>= ierror) where handle :: Err -> Idris Err handle e@(ReflectionError _ _) = do logLvl 3 "Skipping reflection of error reflection result" return e -- Don't do meta-reflection of errors handle e@(ReflectionFailed _ _) = do logLvl 3 "Skipping reflection of reflection failure" return e -- At and Elaborating are just plumbing - error reflection shouldn't rewrite them handle e@(At fc err) = do logLvl 3 "Reflecting body of At" err' <- handle err return (At fc err') handle e@(Elaborating what n err) = do logLvl 3 "Reflecting body of Elaborating" err' <- handle err return (Elaborating what n err') handle e@(ElaboratingArg f a prev err) = do logLvl 3 "Reflecting body of ElaboratingArg" hs <- getFnHandlers f a err' <- if null hs then handle err else applyHandlers err hs return (ElaboratingArg f a prev err') -- ProofSearchFail is an internal detail - so don't expose it handle (ProofSearchFail e) = handle e -- TODO: argument-specific error handlers go here for ElaboratingArg handle e = do ist <- getIState logLvl 2 "Starting error reflection" let handlers = idris_errorhandlers ist applyHandlers e handlers getFnHandlers :: Name -> Name -> Idris [Name] getFnHandlers f arg = do ist <- getIState let funHandlers = maybe M.empty id . lookupCtxtExact f . idris_function_errorhandlers $ ist return . maybe [] S.toList . M.lookup arg $ funHandlers applyHandlers e handlers = do ist <- getIState let err = fmap (errReverse ist) e logLvl 3 $ "Using reflection handlers " ++ concat (intersperse ", " (map show handlers)) let reports = map (\n -> RApp (Var n) (reflectErr err)) handlers -- Typecheck error handlers - if this fails, then something else was wrong earlier! handlers <- case mapM (check (tt_ctxt ist) []) reports of Error e -> ierror $ ReflectionFailed "Type error while constructing reflected error" e OK hs -> return hs -- Normalize error handler terms to produce the new messages ctxt <- getContext let results = map (normalise ctxt []) (map fst handlers) logLvl 3 $ "New error message info: " ++ concat (intersperse " and " (map show results)) -- For each handler term output, either discard it if it is Nothing or reify it the Haskell equivalent let errorpartsTT = mapMaybe unList (mapMaybe fromTTMaybe results) errorparts <- case mapM (mapM reifyReportPart) errorpartsTT of Left err -> ierror err Right ok -> return ok return $ case errorparts of [] -> e parts -> ReflectionError errorparts e solveAll = try (do solve; solveAll) (return ()) -- | Do the left-over work after creating declarations in reflected -- elaborator scripts processTacticDecls :: ElabInfo -> [RDeclInstructions] -> Idris () processTacticDecls info steps = -- The order of steps is important: type declarations might -- establish metavars that later function bodies resolve. forM_ (reverse steps) $ \case RTyDeclInstrs n fc impls ty -> do logLvl 3 $ "Declaration from tactics: " ++ show n ++ " : " ++ show ty logLvl 3 $ " It has impls " ++ show impls updateIState $ \i -> i { idris_implicits = addDef n impls (idris_implicits i) } addIBC (IBCImp n) ds <- checkDef fc (\_ e -> e) [(n, (-1, Nothing, ty))] addIBC (IBCDef n) ctxt <- getContext case lookupDef n ctxt of (TyDecl _ _ : _) -> -- If the function isn't defined at the end of the elab script, -- then it must be added as a metavariable. This needs guarding -- to prevent overwriting case defs with a metavar, if the case -- defs come after the type decl in the same script! let ds' = map (\(n, (i, top, t)) -> (n, (i, top, t, True))) ds in addDeferred ds' _ -> return () RAddInstance className instName -> do -- The type class resolution machinery relies on a special logLvl 2 $ "Adding elab script instance " ++ show instName ++ " for " ++ show className addInstance False True className instName addIBC (IBCInstance False True className instName) RClausesInstrs n cs -> do logLvl 3 $ "Pattern-matching definition from tactics: " ++ show n solveDeferred n let lhss = map (\(_, lhs, _) -> lhs) cs let fc = fileFC "elab_reflected" pmissing <- do ist <- getIState possible <- genClauses fc n lhss (map (\lhs -> delab' ist lhs True True) lhss) missing <- filterM (checkPossible n) possible return (filter (noMatch ist lhss) missing) let tot = if null pmissing then Unchecked -- still need to check recursive calls else Partial NotCovering -- missing cases implies not total setTotality n tot updateIState $ \i -> i { idris_patdefs = addDef n (cs, pmissing) $ idris_patdefs i } addIBC (IBCDef n) ctxt <- getContext case lookupDefExact n ctxt of Just (CaseOp _ _ _ _ _ cd) -> -- Here, we populate the call graph with a list of things -- we refer to, so that if they aren't total, the whole -- thing won't be. let (scargs, sc) = cases_compiletime cd (scargs', sc') = cases_runtime cd calls = findCalls sc' scargs used = findUsedArgs sc' scargs' cg = CGInfo scargs' calls [] used [] in do logLvl 2 $ "Called names in reflected elab: " ++ show cg addToCG n cg addToCalledG n (nub (map fst calls)) addIBC $ IBCCG n Just _ -> return () -- TODO throw internal error Nothing -> return () -- checkDeclTotality requires that the call graph be present -- before calling it. -- TODO: reduce code duplication with Idris.Elab.Clause buildSCG (fc, n) -- Actually run the totality checker. In the main clause -- elaborator, this is deferred until after. Here, we run it -- now to get totality information as early as possible. tot' <- checkDeclTotality (fc, n) setTotality n tot' when (tot' /= Unchecked) $ addIBC (IBCTotal n tot') where -- TODO: see if the code duplication with Idris.Elab.Clause can be -- reduced or eliminated. checkPossible :: Name -> PTerm -> Idris Bool checkPossible fname lhs_in = do ctxt <- getContext ist <- getIState let lhs = addImplPat ist lhs_in let fc = fileFC "elab_reflected_totality" let tcgen = False -- TODO: later we may support dictionary generation case elaborate ctxt (idris_datatypes ist) (sMN 0 "refPatLHS") infP initEState (erun fc (buildTC ist info ELHS [] fname (infTerm lhs))) of OK (ElabResult lhs' _ _ _ _ _, _) -> do -- not recursively calling here, because we don't -- want to run infinitely many times let lhs_tm = orderPats (getInferTerm lhs') case recheck ctxt [] (forget lhs_tm) lhs_tm of OK _ -> return True err -> return False -- if it's a recoverable error, the case may become possible Error err -> if tcgen then return (recoverableCoverage ctxt err) else return (validCoverageCase ctxt err || recoverableCoverage ctxt err) -- TODO: Attempt to reduce/eliminate code duplication with Idris.Elab.Clause noMatch i cs tm = all (\x -> case matchClause i (delab' i x True True) tm of Right _ -> False Left _ -> True) cs
BartAdv/Idris-dev
src/Idris/Elab/Term.hs
Haskell
bsd-3-clause
109,952
{-# LANGUAGE DeriveDataTypeable #-} module Control.Pipe.Coroutine ( Coroutine, resume, suspend, coroutine, step, terminate ) where import Control.Monad import Control.Pipe import Control.Pipe.Exception import qualified Control.Exception as E import Data.Typeable import Prelude hiding (catch) data Coroutine a b m r = Coroutine { resume :: Pipe a b m r , finalizer :: [m ()] } suspend :: Monad m => Pipe a b m r -> Pipe a x m (Either r (b, Coroutine a b m r)) suspend (Pure r w) = Pure (Left r) w suspend (Throw e w) = Throw e w suspend (Yield x p w) = return (Right (x, Coroutine p w)) suspend (M s m h) = M s (liftM suspend m) (suspend . h) suspend (Await k h) = Await (suspend . k) (suspend . h) coroutine :: Monad m => Pipe a b m r -> Coroutine a b m r coroutine p = Coroutine p [] step :: Monad m => Coroutine a b m r -> Pipe a x m (Either r (b, Coroutine a b m r)) step = suspend . resume terminate :: Monad m => Coroutine a b m r -> Pipe a b m () terminate p = mapM_ masked (finalizer p)
pcapriotti/pipes-extra
Control/Pipe/Coroutine.hs
Haskell
bsd-3-clause
1,087
{-# LANGUAGE RankNTypes, ScopedTypeVariables, GADTs #-} {-# LANGUAGE FlexibleInstances, MultiParamTypeClasses #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} -- | @operational@-style programs for 'MonadPlus'. See the -- documentation for "Control.Applicative.Operational" and -- "Control.Monad.Operational" for guidance on how to use this module. module Control.MonadPlus.Operational ( module Control.Operational.Class , ProgramP(..) , interpretP , fromProgramP , ProgramViewP(..) , view ) where import Control.Applicative import Control.Monad import Control.MonadPlus.Free import Control.Operational.Class import Data.Functor.Coyoneda newtype ProgramP instr a = ProgramP { -- | Interpret the program as a free 'MonadPlus'. toFree :: Free (Coyoneda instr) a } deriving (Functor, Applicative, Alternative, Monad, MonadPlus) instance Operational instr (ProgramP instr) where singleton = ProgramP . liftF . liftCoyoneda interpretP :: forall m instr a. (Functor m, MonadPlus m) => (forall x. instr x -> m x) -> ProgramP instr a -> m a interpretP evalI = retract . hoistFree evalF . toFree where evalF :: forall x. Coyoneda instr x -> m x evalF (Coyoneda f i) = fmap f (evalI i) fromProgramP :: (Operational instr m, Functor m, MonadPlus m) => ProgramP instr a -> m a fromProgramP = interpretP singleton data ProgramViewP instr a where Return :: a -> ProgramViewP instr a (:>>=) :: instr a -> (a -> ProgramP instr b) -> ProgramViewP instr b MPlus :: [ProgramViewP instr a] -> ProgramViewP instr a view :: ProgramP instr a -> ProgramViewP instr a view = eval . toFree where eval (Pure a) = Return a eval (Free (Coyoneda f i)) = i :>>= (ProgramP . f) eval (Plus mas) = MPlus $ map eval mas
sacundim/free-operational
Control/MonadPlus/Operational.hs
Haskell
bsd-3-clause
1,859
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ScopedTypeVariables #-} module RunstaskellSpec where import Control.Exception import System.Exit import System.FilePath import System.IO import System.IO.Silently import System.IO.Temp import Test.Hspec import BootstrapSpec import PackageSets import Path import Runstaskell import Sandboxes spec :: Spec spec = do describe "getPackageNameSetFromProgramPath" $ do it "parses runstaskell-test" $ do getPackageNameSetFromProgName (Path "runstaskell-test") `shouldBe` "test" it "parses runstaskell-1.11" $ do getPackageNameSetFromProgName (Path "runstaskell-1.11") `shouldBe` "1.11" it "returns the default for runstaskell" $ do getPackageNameSetFromProgName (Path "runstaskell") `shouldBe` latest it "returns the default for foo" $ do getPackageNameSetFromProgName (Path "foo") `shouldBe` latest describe "runScript" $ do let stdoutCode = unlines $ "import System.Exit" : "import System.IO" : "main = do" : " hPutStrLn stdout $ \"this goes to stdout\"" : [] stderrCode = unlines $ "import System.Exit" : "import System.IO" : "main = do" : " hPutStrLn stderr $ \"this goes to stderr\"" : " exitWith $ ExitFailure 23" : [] argsCode = unlines $ "import System.Environment" : "main = getArgs >>= print" : [] it "inherits stdout/stderr and exitcode from running the script" $ do withBootstrappedScript stdoutCode $ \executable sandboxes scriptPath -> do output <- hCapture_ [stdout] $ runScript executable sandboxes scriptPath [] output `shouldBe` "this goes to stdout\n" it "inherits stderr and exitcode from running the script" $ do withBootstrappedScript stderrCode $ \executable sandboxes scriptPath -> do output <- hCapture_ [stderr] $ runScript executable sandboxes scriptPath [] `catch` \e -> e `shouldBe` ExitFailure 23 output `shouldBe` "this goes to stderr\n" it "passes arguments to the running script" $ do withBootstrappedScript argsCode $ \executable sandboxes scriptPath -> do let args = ["arg1", "arg2"] output <- hCapture_ [stdout] $ runScript executable sandboxes scriptPath args output `shouldBe` (show args ++ "\n") withBootstrappedScript :: String -> (Path ProgName -> Path Sandboxes -> Path Script -> IO ()) -> IO () withBootstrappedScript code action = withBootstrapped "test" $ \ binPath dataPath -> withSystemTempFile "Code.hs" $ \ scriptPath handle -> do hPutStr handle code hClose handle action (Path $ toPath binPath </> "runstaskell-test") (getSandboxes dataPath) (Path scriptPath)
soenkehahn/runstaskell
test/RunstaskellSpec.hs
Haskell
bsd-3-clause
3,049
module Day23 (part1,part2,test1,part1Solution, part2Solution) where import Control.Applicative import Data.Map.Strict (Map) import qualified Data.Map.Strict as M import Data.Sequence (Seq) import qualified Data.Sequence as S import Text.Trifecta type Register = Char data Instruction = CpyI Integer Register | CpyR Register Register | Inc Register | Dec Register | JnzR Register Integer | JnzR' Integer Register | JnzR'' Register Register | JnzI Integer Integer | Tgl Register | Skip Instruction deriving (Eq, Ord, Show) type CurrentPosition = Int data ProgramState = ProgramState CurrentPosition (Seq Instruction) (Map Register Integer) deriving (Eq, Ord, Show) startState :: Map Register Integer -> [Instruction] -> ProgramState startState m is = ProgramState 0 (S.fromList is) m instructionParser :: Parser Instruction instructionParser = try cpyIParser <|> try cpyRParser <|> try incParser <|> try decParser <|> try jnzRParser <|> try jnzR'Parser <|> try tglParser <|> jnzIParser where cpyIParser = string "cpy " *> (CpyI <$> integer <*> letter) cpyRParser = string "cpy " *> (CpyR <$> letter <*> (space *> letter)) incParser = string "inc " *> (Inc <$> letter) decParser = string "dec " *> (Dec <$> letter) jnzRParser = string "jnz " *> (JnzR <$> letter <*> (space *> integer )) jnzR'Parser = string "jnz " *> (JnzR' <$> integer <*> letter) jnzIParser = string "jnz " *> (JnzI <$> integer <*> integer) tglParser = string "tgl " *> (Tgl <$> letter) fromSuccess :: Result x -> x fromSuccess (Success x) = x fromSuccess (Failure x) = error (show x) parseInput :: String -> [Instruction] parseInput = fromSuccess . parseString (some (instructionParser <* skipOptional windowsNewLine)) mempty where windowsNewLine = const () <$ skipOptional newline <*> skipOptional (char '\r') doNextInstruction :: ProgramState -> ProgramState doNextInstruction ps@(ProgramState i instructions rMap) | endOfInstruction ps = ps | otherwise = ProgramState (newI currInstruction) (newInstructions currInstruction) (newMap currInstruction) where currInstruction = S.index instructions i newI (JnzR' x r) | x > 0 = i + fromIntegral (currVal r) | otherwise = i+1 newI (JnzR'' r1 r2) | currVal r1 > 0 = i + fromIntegral (currVal r2) | otherwise = i+1 newI (JnzR r x) | currVal r > 0 = i + fromIntegral x | otherwise = i+1 newI (JnzI x y) | x > 0 = i + fromIntegral y | otherwise = i+1 newI _ = i+1 currVal r = M.findWithDefault 0 r rMap newInstructions (Tgl r) = S.adjust tglInstruction instructionToTgl instructions where instructionToTgl = i + fromIntegral (currVal r) newInstructions _ = instructions newMap (CpyI x r) = M.insert r x rMap newMap (CpyR x y) = M.insert y (currVal x) rMap newMap (Inc r) = M.insert r (currVal r + 1) rMap newMap (Dec r) = M.insert r (currVal r - 1) rMap newMap _ = rMap tglInstruction :: Instruction -> Instruction tglInstruction (CpyI t1 t2) = JnzR' t1 t2 tglInstruction (CpyR t1 t2) = JnzR'' t1 t2 tglInstruction (Inc t) = Dec t tglInstruction (Dec t) = Inc t tglInstruction i@(JnzR _ _) = Skip i tglInstruction (JnzR' t1 t2) = CpyI t1 t2 tglInstruction (JnzR'' t1 t2) = CpyR t1 t2 tglInstruction i@(JnzI _ _) = Skip i tglInstruction (Tgl t) = Inc t tglInstruction (Skip i) = case n of (Skip ins) -> Skip ins _ -> n where n = tglInstruction i endOfInstruction :: ProgramState -> Bool endOfInstruction (ProgramState i instructions _) = i >= S.length instructions getRegister :: Register -> ProgramState -> Integer getRegister r (ProgramState _ _ rMap) = M.findWithDefault 0 r rMap part1 :: Map Register Integer -> Char -> String -> Integer part1 m c = getRegister c . last . takeWhile (not . endOfInstruction) . iterate doNextInstruction . startState m . parseInput part1Solution :: IO Integer part1Solution = part1 (M.fromList [('a',7)]) 'a' <$> readFile "./data/Day23.txt" part2 :: Map Register Integer -> Char -> String -> Integer part2 = part1 part2Solution :: IO Integer part2Solution = part2 (M.fromList [('a',12)]) 'a' <$> readFile "./data/Day23.txt" test1 :: String test1 = "cpy 2 a\ntgl a\ntgl a\ntgl a\ncpy 1 a\ndec a\ndec a"
z0isch/aoc2016
src/Day23.hs
Haskell
bsd-3-clause
4,599
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ViewPatterns #-} {-# LANGUAGE PatternSynonyms #-} module Pt.StateMachine where import Ptui.Types import qualified Data.ByteString.Lazy.Char8 as B import qualified Data.ByteString.Lazy.UTF8 as U import Data.Char (isDigit) import Data.List (foldl',uncons) import Data.Maybe (mapMaybe,fromMaybe) import Text.Read (readMaybe) data CX = C0 | C1 deriving Show data Q_ = Q0 | QSS2 | QSS3 | QESC | QESCSP | QCSI | QDCS | QOSC | QOSC2 | QSGR0 | QSGR38 | QSGR38c | QSGR38r | QSGR38g | QSGR38b | QSGR48 | QSGR48c | QSGR48r | QSGR48g | QSGR48b deriving Show pattern b :> bs <- (B.uncons -> Just (b,bs)) pattern Empty <- (B.uncons -> Nothing) runFSM :: B.ByteString -> [Command] runFSM = transduce Q0 C0 ("","",[]) runFSMC1 :: B.ByteString -> [Command] runFSMC1 = transduce Q0 C1 ("","",[]) transduce :: Q_ -> CX -> (String,String,[String]) -> B.ByteString -> [Command] transduce _ _ _ Empty = [] transduce Q0 cs t ('\x1b':>bs) = transduce QESC cs t bs transduce Q0 cs t ('\x07':>bs) = BEL : transduce Q0 cs t bs transduce Q0 cs t ('\x08':>bs) = BS : transduce Q0 cs t bs transduce Q0 cs t ('\x09':>bs) = HT : transduce Q0 cs t bs transduce Q0 cs t ('\x0a':>bs) = LF : transduce Q0 cs t bs transduce Q0 cs t ('\x0b':>bs) = VT : transduce Q0 cs t bs transduce Q0 cs t ('\x0c':>bs) = FF : transduce Q0 cs t bs transduce Q0 cs t ('\x0d':>bs) = CR : transduce Q0 cs t bs transduce Q0 C1 t ('\x84':>bs) = IND : transduce Q0 C1 t bs transduce Q0 C1 t ('\x85':>bs) = NEL : transduce Q0 C1 t bs transduce Q0 C1 t ('\x88':>bs) = HTS : transduce Q0 C1 t bs transduce Q0 C1 t ('\x8d':>bs) = RI : transduce Q0 C1 t bs transduce Q0 C1 t ('\x8e':>bs) = transduce QSS2 C1 t bs transduce Q0 C1 t ('\x8f':>bs) = transduce QSS3 C1 t bs transduce Q0 C1 t ('\x9b':>bs) = transduce QCSI C1 t bs transduce Q0 cs t bs = maybe [] (\(b,bs') -> Output b : transduce Q0 cs t bs') (U.uncons bs) transduce QSS2 cs t (b:>bs) = SS2 b : transduce Q0 cs t bs transduce QSS3 cs t (b:>bs) = SS3 b : transduce Q0 cs t bs transduce QESC cs t ('D':>bs) = IND : transduce Q0 cs t bs transduce QESC cs t ('E':>bs) = NEL : transduce Q0 cs t bs transduce QESC cs t ('H':>bs) = HTS : transduce Q0 cs t bs transduce QESC cs t ('M':>bs) = RI : transduce Q0 cs t bs transduce QESC cs t ('N':>bs) = transduce QSS2 cs t bs transduce QESC cs t ('O':>bs) = transduce QSS3 cs t bs transduce QESC cs _ ('[':>bs) = transduce QCSI cs ("","",[]) bs transduce QESC cs _ ('(':>bs) = transduce QDCS cs ("(","",[]) bs transduce QESC cs _ (']':>bs) = transduce QOSC cs ("","",[]) bs transduce QESC cs t (' ':>bs) = transduce QESCSP cs t bs transduce QOSC cs t@(p,x,xs) (';':>bs) = transduce QOSC2 cs (p,"",xs++[x]) bs transduce QOSC cs t@(p,x,xs) (b:>bs) | isDigit b = transduce QOSC cs (p,x++[b],xs) bs | otherwise = transduce Q0 cs t bs transduce QOSC2 cs t@(p,x,xs) ('\a':>bs) = makeOSC (xs++[x]) : transduce Q0 cs t bs transduce QOSC2 C1 t@(p,x,xs) ('\x9c':>bs) = makeOSC (xs++[x]) : transduce Q0 C1 t bs transduce QOSC2 cs t@(p,x,xs) bs = maybe [] (\(b,bs') -> transduce QOSC2 cs (p,x++[b],xs) bs') (U.uncons bs) transduce QDCS cs t@(p,x,xs) ('0':>bs) = makeCharsetDesignation p Special : transduce Q0 cs t bs transduce QDCS cs t@(p,x,xs) ('A':>bs) = makeCharsetDesignation p UK : transduce Q0 cs t bs transduce QDCS cs t@(p,x,xs) ('B':>bs) = makeCharsetDesignation p USASCII : transduce Q0 cs t bs transduce QESCSP _ t ('F':>bs) = transduce Q0 C0 t bs transduce QESCSP _ t ('G':>bs) = transduce Q0 C1 t bs transduce QCSI cs t@(p,x,xs) ('A':>bs) = CUU (fromMaybe 1 $ readMaybe x) : transduce Q0 cs t bs transduce QCSI cs t@(p,x,xs) ('B':>bs) = CUD (fromMaybe 1 $ readMaybe x) : transduce Q0 cs t bs transduce QCSI cs t@(p,x,xs) ('C':>bs) = CUF (fromMaybe 1 $ readMaybe x) : transduce Q0 cs t bs transduce QCSI cs t@(p,x,xs) ('D':>bs) = CUB (fromMaybe 1 $ readMaybe x) : transduce Q0 cs t bs transduce QCSI cs t@(p,x,xs) ('E':>bs) = CNL (fromMaybe 1 $ readMaybe x) : transduce Q0 cs t bs transduce QCSI cs t@(p,x,xs) ('F':>bs) = CPL (fromMaybe 1 $ readMaybe x) : transduce Q0 cs t bs transduce QCSI cs t@(p,x,xs) ('G':>bs) = CHA (fromMaybe 1 $ readMaybe x) : transduce Q0 cs t bs transduce QCSI cs t@(p,x,xs) ('H':>bs) = makeCUP (xs++[x]) : transduce Q0 cs t bs transduce QCSI cs t@(p,x,xs) ('I':>bs) = CHT (fromMaybe 1 $ readMaybe x) : transduce Q0 cs t bs transduce QCSI cs t@(p,x,xs) ('m':>bs) = makeSGR (xs++[x]) : transduce Q0 cs t bs transduce QCSI cs t@(p,x,xs) (';':>bs) = transduce QCSI cs (p,"",xs++[x]) bs transduce QCSI cs t@(p,x,xs) (b:>bs) | isDigit b = transduce QCSI cs (p,x++[b],xs) bs | otherwise = transduce Q0 cs t bs transduce _ cs t (b:>bs) = transduce Q0 cs t bs makeCUP :: [String] -> Command makeCUP [] = CUP 1 1 makeCUP [r] = CUP (fromMaybe 1 $ readMaybe r) 1 makeCUP (r:c:_) = CUP (fromMaybe 1 $ readMaybe r) (fromMaybe 1 $ readMaybe c) makeOSC :: [String] -> Command makeOSC [] = Noop makeOSC [_] = Noop makeOSC ("0":t:_) = SetIconTitle t makeOSC _ = Noop makeCharsetDesignation :: String -> CharacterSet -> Command makeCharsetDesignation g cs = case g of "(" -> SetCharset G0 cs ")" -> SetCharset G1 cs "*" -> SetCharset G2 cs "+" -> SetCharset G3 cs "-" -> SetCharset G1 cs "." -> SetCharset G2 cs "/" -> SetCharset G3 cs _ -> Noop makeSGR :: [String] -> Command makeSGR = SGR . transduceSGR QSGR0 [] . mapMaybe (readMaybe :: String -> Maybe Int) where transduceSGR _ _ [] = [] transduceSGR QSGR0 l (0:xs) = Reset : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (1:xs) = Bold True : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (2:xs) = Faint : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (3:xs) = Italic True : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (4:xs) = Underscore True : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (5:xs) = Blink True : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (7:xs) = Reverse True : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (8:xs) = Invisible True : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (9:xs) = Strikethrough True : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (21:xs) = DoubleUnderline : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (22:xs) = Bold False : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (23:xs) = Italic False : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (24:xs) = Underscore False : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (25:xs) = Blink False : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (27:xs) = Reverse False : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (28:xs) = Invisible False : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (29:xs) = Strikethrough False : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (30:xs) = Foreground (Color256 0) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (31:xs) = Foreground (Color256 1) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (32:xs) = Foreground (Color256 2) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (33:xs) = Foreground (Color256 3) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (34:xs) = Foreground (Color256 4) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (35:xs) = Foreground (Color256 5) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (36:xs) = Foreground (Color256 6) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (37:xs) = Foreground (Color256 7) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (38:xs) = transduceSGR QSGR38 l xs transduceSGR QSGR0 l (39:xs) = Foreground Default : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (40:xs) = Background (Color256 0) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (41:xs) = Background (Color256 1) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (42:xs) = Background (Color256 2) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (43:xs) = Background (Color256 3) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (44:xs) = Background (Color256 4) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (45:xs) = Background (Color256 5) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (46:xs) = Background (Color256 6) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (47:xs) = Background (Color256 7) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (48:xs) = transduceSGR QSGR48 l xs transduceSGR QSGR0 l (49:xs) = Background Default : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (90:xs) = Foreground (Color256 0) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (91:xs) = Foreground (Color256 1) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (92:xs) = Foreground (Color256 2) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (93:xs) = Foreground (Color256 3) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (94:xs) = Foreground (Color256 4) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (95:xs) = Foreground (Color256 5) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (96:xs) = Foreground (Color256 6) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (97:xs) = Foreground (Color256 7) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (100:xs) = Background (Color256 0) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (101:xs) = Background (Color256 1) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (102:xs) = Background (Color256 2) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (103:xs) = Background (Color256 3) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (104:xs) = Background (Color256 4) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (105:xs) = Background (Color256 5) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (106:xs) = Background (Color256 6) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (107:xs) = Background (Color256 7) : transduceSGR QSGR0 l xs transduceSGR QSGR0 l (_:xs) = transduceSGR QSGR0 l xs transduceSGR QSGR38 l (5:xs) = transduceSGR QSGR38c l xs transduceSGR QSGR38 l (2:xs) = transduceSGR QSGR38r l xs transduceSGR QSGR38c l (x:xs) = Foreground (Color256 x) : transduceSGR QSGR0 l xs transduceSGR QSGR38r _ (r:xs) = transduceSGR QSGR38g [r] xs transduceSGR QSGR38g [r] (g:xs) = transduceSGR QSGR38b [r,g] xs transduceSGR QSGR38b [r,g] (b:xs) = Foreground (Truecolor r g b) : transduceSGR QSGR0 [] xs transduceSGR QSGR48 l (5:xs) = transduceSGR QSGR48c l xs transduceSGR QSGR48 l (2:xs) = transduceSGR QSGR48r l xs transduceSGR QSGR48c l (x:xs) = Background (Color256 x) : transduceSGR QSGR0 l xs transduceSGR QSGR48r _ (r:xs) = transduceSGR QSGR48g [r] xs transduceSGR QSGR48g [r] (g:xs) = transduceSGR QSGR48b [r,g] xs transduceSGR QSGR48b [r,g] (b:xs) = Background (Truecolor r g b) : transduceSGR QSGR0 [] xs transduceSGR _ _ (_:xs) = transduceSGR QSGR0 [] xs
mrak/ptui
src/Pt/StateMachine.hs
Haskell
bsd-3-clause
11,621
{-# LANGUAGE OverloadedStrings #-} module ETA.CodeGen.Foreign where import ETA.Main.DynFlags import ETA.Types.Type import ETA.Types.TyCon import ETA.StgSyn.StgSyn import ETA.Prelude.ForeignCall import ETA.Utils.FastString import ETA.Utils.Util import ETA.Util import ETA.CodeGen.ArgRep import ETA.CodeGen.Env import ETA.CodeGen.Monad import ETA.CodeGen.Name import ETA.CodeGen.Layout import ETA.CodeGen.Rts import ETA.CodeGen.Types import ETA.Debug import ETA.Util import Codec.JVM import Data.Monoid ((<>)) import Data.List (stripPrefix) import Data.Maybe (catMaybes, fromJust, isJust, maybe) import Data.Foldable (fold) import Control.Monad (when) import qualified Data.Text as T cgForeignCall :: ForeignCall -> [StgArg] -> Type -> CodeGen () cgForeignCall (CCall (CCallSpec target cconv safety)) args resType | StaticTarget label _ _ <- target = do let (hasObj, isStatic, callTarget) = deserializeTarget (unpackFS label) shuffledArgs = if hasObj then last args : init args else args dflags <- getDynFlags argFtCodes <- getNonVoidArgFtCodes shuffledArgs let (argFts, callArgs') = unzip argFtCodes callArgs = if hasObj && isStatic then drop 1 callArgs' else callArgs' mbObj = if hasObj then Just (expectHead "cgForiegnCall: empty callArgs'" callArgs') else Nothing mbObjFt = safeHead argFts sequel <- getSequel case sequel of AssignTo targetLocs -> emitForeignCall safety mbObj targetLocs (callTarget mbObjFt) callArgs _ -> do resLocs <- newUnboxedTupleLocs resType emitForeignCall safety mbObj resLocs (callTarget mbObjFt) callArgs emitReturn resLocs deserializeTarget :: String -> (Bool, Bool, Maybe FieldType -> [Code] -> Code) deserializeTarget label = (hasObj, isStatic, callTarget) where (hasObj':isStatic':callTargetSpec:_) = split '|' label hasObj = read hasObj' isStatic = read isStatic' (tag:restSpec) = split ',' callTargetSpec callTarget = case read tag of 0 -> genNewTarget restSpec 1 -> genFieldTarget restSpec 2 -> genMethodTarget restSpec _ -> error $ "deserializeTarget: deserialization failed: " ++ label genNewTarget [clsName', methodDesc'] = \_ args -> new clsFt <> dup clsFt <> fold args <> invokespecial (mkMethodRef clsName "<init>" argFts void) where clsName = read clsName' clsFt = obj clsName (argFts, _) = expectJust ("deserializeTarget: bad method desc: " ++ label) $ decodeMethodDesc (read methodDesc') genFieldTarget [clsName', fieldName', fieldDesc', instr'] = \_ args -> fold args <> instr (mkFieldRef clsName fieldName fieldFt) where (getInstr, putInstr) = if isStatic then (getstatic, putstatic) else (getfield, putfield) clsName = read clsName' fieldName = read fieldName' fieldFt = expectJust ("deserializeTarget: bad field desc: " ++ label) $ decodeFieldDesc (read fieldDesc') instr = case read instr' of 0 -> putInstr 1 -> getInstr _ -> error $ "deserializeTarget: bad instr: " ++ label genMethodTarget [isInterface', hasSubclass', clsName', methodName', methodDesc'] = \mbObjFt args -> fold args <> instr (mkMethodRef (clsName mbObjFt) methodName argFts resFt) where clsName mbObjFt = if hasSubclass && not isInterface then maybe (error "deserializeTarget: no subclass field type.") getFtClass mbObjFt else read clsName' methodName = read methodName' isInterface = read isInterface' hasSubclass = read hasSubclass' (argFts, resFt) = expectJust ("deserializeTarget: bad method desc: " ++ label) $ decodeMethodDesc (read methodDesc') instr = if isInterface then invokeinterface else if isStatic then invokestatic else invokevirtual emitForeignCall :: Safety -> Maybe Code -> [CgLoc] -> ([Code] -> Code) -> [Code] -> CodeGen () emitForeignCall safety mbObj results target args = wrapSafety $ do maybe (emit callCode) (flip emitAssign callCode) resLoc maybe (return ()) (flip emitAssign (fromJust mbObj)) objLoc where wrapSafety code = do whenSafe $ emit $ suspendThreadMethod (playInterruptible safety) code whenSafe $ emit resumeThreadMethod where whenSafe = when (playSafe safety) callCode = target args (resLoc, objLoc) = if isJust mbObj then case results of [a] -> (Nothing, Just a) [a,b] -> (Just b, Just a) else (case results of [] -> Nothing [a] -> Just a, Nothing)
alexander-at-github/eta
compiler/ETA/CodeGen/Foreign.hs
Haskell
bsd-3-clause
5,276
module Main where import Command main :: IO () main = execCommand
ku00/meow
app/Main.hs
Haskell
bsd-3-clause
68
module Language.Modelica.Test.Expression (test) where import qualified Language.Modelica.Parser.Expression as Expr import Language.Modelica.Test.Utility (testFunc) test :: IO [Bool] test = do res1 <- mapM (testFunc Expr.expression) $ "true <= (false * (5.0^(, , ))^(, \"bla\", 2.0))" : "3.0 : 7.0 + 8.0 : 9.0" : "(3.0 : 7.0) + 8.0 : 9.0" : "3.0 : 7.0 + (8.0 : 9.0)" : "(3.0 : 7.0) + (8.0 : 9.0)" : "3.0 : (7.0 + 8.0) : 9.0" : "(3.0 : 7.0 + 8.0) : 9.0" : "3.0 : (7.0 + 8.0 : 9.0)" : "(3.0 : 7.0 + 8.0 : 9.0)" : "(3.0) : (7.0) + (8.0 : 9.0)" : "[true, .bla[x]; false]" : "()" : "{x, y}" : "{x = 9.0, y = 9.0}" : "f(9.0)" : "f(9.0, g(), x)" : "if true then x else y" : "if true then x elseif false then z else y" : "if true then x elseif false then z elseif x < y then 7.0 else y" : "x.y" : "x .y" : "x. y" : "x . y" : "3 + 4" : "3 .+ 4" : "x + y" : "x .+ y" : "points[m:end] + (x1-x0)" : "end" : [] res2 <- mapM (testFunc Expr.function_arguments) $ "x, y for x in 9.0" : "function x()" : "function x(), function y()" : "x, function y()" : "function x() for bla in 1, blub in true" : "x, y = 7" : "function x.y(), 9+4, x = 8" : "function x.y(), 9+4 for x in 8" : "x, y, a for y, x in (-9.0 + (-a)), a" : "x, y" : "true, \"BlaBlub\", level = Error.level" : "engineFile = \"maps/engine/base.txt\"" : "bla = \"äüöß\"" : "x = 9.0, y = 9.0" : "x = function b()" : "x = function b(), y = 7" : [] res3 <- mapM (testFunc Expr.for_indices) $ "bla in 3.0, blub, x in (3.0, 3.0, (, , ), , )" : [] res4 <- mapM (testFunc Expr.output_expression_list) $ "(, (, , , ), )" : ",,," : "x,,x,," : [] res5 <- mapM (testFunc Expr.named_arguments) $ "bla = blub" : "x = y, y = z" : "x = y, y = z, z = x" : [] res7 <- mapM (testFunc Expr.expression) $ "not 5" : "not true :5" : "true : 6 :3" : "true or (6 : 3 and 7)" : "(.bla[x.x,:], x.y,,).^ 4.e-3 " : "(b0[1:n] - b0[na]/a[na]*a[1:n])*x + b0[na]/a[na]*u" : [] res8 <- mapM (testFunc Expr.function_argument) $ "function b.x()" : "function b(x = -0)" : "x + y" : [] res9 <- mapM (testFunc Expr.expression_list) $ "(, ( ,,), 3 , 3.0 )" : "a, b" : [] res10 <- mapM (testFunc Expr.array_subscripts) $ "[ 3^2, true : false : 3, 4, : , :,:, 3:3 ] " : [] res11 <- mapM (testFunc Expr.component_reference) $ ".bla.dfs[3,: ,:].ads[ :,3 : true].x" : [] res12 <- mapM (testFunc Expr.named_arguments) $ "bla = blub, x = .x[(,,)]" : "y = function f(x =9)" : "y = function .f(x= function g.x(a=7, b=-4.0)), z = -1.0" : [] res13 <- mapM (testFunc Expr.name) $ "bla" : ".a.b.c" : [] res14 <- mapM (testFunc Expr.primary) $ "3" : "\"bla\"" : "\"äöüß\"" : "false" : "true" : "der(1)" : "f(3)" : "points[m:end]" : "cat(1, {0}, points[m:end] .+ (x1-x0), {1})" : [] return $ concat [res1, res2, res3, res4, res5, res7, res8, res9, res10, res11, res12, res13, res14]
xie-dongping/modelicaparser
test/Language/Modelica/Test/Expression.hs
Haskell
bsd-3-clause
3,163
-- | Periodic background activities that Kademlia must perform module Network.DHT.Kademlia.Workers ( module Network.DHT.Kademlia.Workers.Interactive , module Network.DHT.Kademlia.Workers.Persistence , module Network.DHT.Kademlia.Workers.Reapers ) where import Network.DHT.Kademlia.Workers.Interactive import Network.DHT.Kademlia.Workers.Persistence import Network.DHT.Kademlia.Workers.Reapers
phylake/kademlia
Network/DHT/Kademlia/Workers.hs
Haskell
bsd-3-clause
396
[ ("A", [2,2]) , ("C", [0,5]) ]
dan-t/cabal-bounds
tests/inputFiles/FromFile.hs
Haskell
bsd-3-clause
32