code
stringlengths
2
1.05M
repo_name
stringlengths
5
101
path
stringlengths
4
991
language
stringclasses
3 values
license
stringclasses
5 values
size
int64
2
1.05M
{-# LANGUAGE OverloadedStrings #-} module Network.HTTP.Nano( module Network.HTTP.Nano.Types, Network.HTTP.Conduit.Request, tlsManager, mkJSONData, http, http', httpS, httpSJSON, httpJSON, buildReq, addHeaders ) where import Network.HTTP.Nano.Types import Control.Exception (handle) import Control.Lens (review, view) import Control.Monad import Control.Monad.Except (MonadError, throwError) import Control.Monad.Reader (MonadReader) import Control.Monad.Trans (MonadIO, liftIO) import Control.Monad.Trans.Resource (MonadResource) import Data.Aeson (FromJSON, ToJSON (..), eitherDecode, encode) import Data.Bifunctor (bimap) import Data.Conduit (ResumableSource, Conduit, ($=+)) import Data.JsonStream.Parser (value, parseByteString) import Data.String (fromString) import Network.HTTP.Conduit hiding (http) import qualified Data.ByteString.Char8 as B import qualified Data.ByteString.Lazy.Char8 as BL import qualified Data.Conduit.List as CL import qualified Network.HTTP.Conduit as HC -- |Create an HTTP manager tlsManager :: IO Manager tlsManager = newManager tlsManagerSettings -- |Create a JSON request body mkJSONData :: ToJSON d => d -> RequestData mkJSONData = JSONRequestData . toJSON -- |Perform an HTTP request http :: (MonadError e m, MonadReader r m, AsHttpError e, HasHttpCfg r, MonadIO m) => Request -> m BL.ByteString http req = responseBody <$> (view httpManager >>= safeHTTP . httpLbs req) -- |Perform an HTTP request, ignoring the response http' :: (MonadError e m, MonadReader r m, AsHttpError e, HasHttpCfg r, MonadIO m) => Request -> m () http' = void . http httpS :: (MonadError e m, MonadReader r m, AsHttpError e, HasHttpCfg r, MonadResource m) => Request -> m (ResumableSource m B.ByteString) httpS req = responseBody <$> (view httpManager >>= HC.http req) httpSJSON :: (MonadError e m, MonadReader r m, AsHttpError e, HasHttpCfg r, MonadResource m, FromJSON a) => Request -> m (ResumableSource m a) httpSJSON req = do src <- httpS req return $ src $=+ jsonConduit jsonConduit :: (MonadError e m, MonadReader r m, AsHttpError e, HasHttpCfg r, MonadResource m, FromJSON a) => Conduit B.ByteString m a jsonConduit = CL.mapFoldable (parseByteString value) -- |Perform an HTTP request, attempting to parse the response as JSON httpJSON :: (MonadError e m, MonadReader r m, AsHttpError e, HasHttpCfg r, MonadIO m, FromJSON b) => Request -> m b httpJSON req = http req >>= asJSON asJSON :: (MonadError e m, MonadReader r m, AsHttpError e, HasHttpCfg r, MonadIO m, FromJSON b) => BL.ByteString -> m b asJSON bs = case eitherDecode bs of Left err -> throwError (review _ResponseParseError (err ++ "; original data: " ++ BL.unpack bs)) Right b -> return b -- |Build a request buildReq :: (MonadError e m, MonadReader r m, AsHttpError e, HasHttpCfg r, MonadIO m) => HttpMethod -> URL -> RequestData -> m Request buildReq mthd url dta = do breq <- safeHTTP $ parseUrlThrow url return . attachRequestData dta $ breq { method = B.pack $ showHttpMethod mthd } attachRequestData :: RequestData -> Request -> Request attachRequestData NoRequestData req = req attachRequestData (JSONRequestData b) req = req { requestBody = RequestBodyLBS (encode b) } attachRequestData (UrlEncodedRequestData px) req = flip urlEncodedBody req $ fmap (bimap B.pack B.pack) px attachRequestData (RawRequestData bs) req = req { requestBody = RequestBodyLBS bs } -- |Add headers to a request addHeaders :: [(String, String)] -> Request -> Request addHeaders hx req = req { requestHeaders = ehx ++ nhx } where ehx = requestHeaders req nhx = fmap toHeader hx toHeader (n, v) = (fromString n, B.pack v) safeHTTP :: (MonadError e m, MonadReader r m, AsHttpError e, HasHttpCfg r, MonadIO m) => IO a -> m a safeHTTP act = do res <- liftIO $ handle (return . Left) (Right <$> act) case res of Left ex -> throwError $ review _NetworkError ex Right r -> return r
ralphmorton/nano-http
src/Network/HTTP/Nano.hs
Haskell
bsd-3-clause
3,966
{-# LANGUAGE CPP #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE ScopedTypeVariables #-} -- | This module is intended to be imported @qualified@, for example: -- -- > import qualified Test.Tasty.Laws.Monad as Monad -- module Test.Tasty.Laws.Monad ( testUnit , test , testExhaustive ) where #if !MIN_VERSION_base(4,8,0) import Control.Applicative (Applicative) #endif import Control.Monad.Laws (associativity) import Test.DumbCheck (Series, Serial(series), uncurry3, zipA3) import Test.Tasty (TestTree, testGroup) import Test.Tasty.DumbCheck (testSeriesProperty) import qualified Test.Tasty.Laws.Applicative as Applicative -- | @tasty@ 'TestTree' for 'Monad' laws. The type signature forces the -- parameter to be '()' which, unless you are dealing with non-total -- functions, should be enough to test any 'Monad's. testUnit :: ( Applicative m, Monad m , Eq (m ()) , Show (m ()), Show (m (() -> ())) , Serial (m ()), Serial (m (() -> ())) ) => Series (m ()) -> TestTree testUnit = testExhaustive -- | @tasty@ 'TestTree' for 'Monad' laws. Monomorphic sum 'Series'. test :: forall m a . ( Applicative m, Monad m , Eq a, Eq (m a) , Show a, Show (m a), Show (m (a -> a)) , Serial a, Serial (m a), Serial (m (a -> a)) ) => Series (m a) -> TestTree test ms = testGroup "Monad laws" [ Applicative.test ms , testSeriesProperty "(m >>= f) >>= g ≡ m (f >=> g)" (uncurry3 associativity) $ zip3 ms (series :: Series (a -> m a)) (series :: Series (a -> m a)) ] -- | @tasty@ 'TestTree' for 'Monad' laws. Monomorphic product 'Series'. testExhaustive :: forall m a . ( Applicative m, Monad m , Eq a, Eq (m a) , Show a, Show (m a), Show (m (a -> a)) , Serial a, Serial (m a), Serial (m (a -> a)) ) => Series (m a) -> TestTree testExhaustive ms = testGroup "Monad laws" [ Applicative.testExhaustive ms , testSeriesProperty "(m >>= f) >>= g ≡ m (f >=> g)" (uncurry3 associativity) $ zipA3 ms (series :: Series (a -> m a)) (series :: Series (a -> m a)) ]
jdnavarro/tasty-laws
Test/Tasty/Laws/Monad.hs
Haskell
bsd-3-clause
2,144
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE CPP #-} -- | -- Module : Auth.Register -- Copyright : (C) 2015 Ricky Elrod -- License : MIT (see LICENSE file) -- Maintainer : (C) Ricky Elrod <ricky@elrod.me> -- Stability : experimental -- -- Functions and handlers for performing registration. module Auth.Register where import Application #if __GLASGOW_HASKELL__ < 710 import Control.Applicative #endif import Control.Lens import Control.Monad.IO.Class import Data.Text.Encoding import Snap.Snaplet import Snap.Snaplet.Auth import Snap.Snaplet.Heist import Text.Digestive.Heist import Text.Digestive.Snap hiding (method) import Auth.Login import Lenses import Forms.Login -- | Handle new user form submit handleNewUser :: Handler App (AuthManager App) () handleNewUser = do (view', result) <- runForm "new_user" loginForm case result of Just user -> do auth' <- createUser (user ^. username) (user ^. password . to encodeUtf8) case auth' of Left e -> liftIO (print e) >> heistLocal (bindDigestiveSplices view') (render "new_user") Right _ -> handleLoginSubmit view' "new_user" user Nothing -> heistLocal (bindDigestiveSplices view') $ render "new_user"
meoblast001/quotum-snap
src/Auth/Register.hs
Haskell
mit
1,231
{-# LANGUAGE AllowAmbiguousTypes #-} {-# LANGUAGE DeriveLift #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE PatternSynonyms #-} {-# LANGUAGE PolyKinds #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TypeApplications #-} module Database.Persist.Types.Base ( module Database.Persist.Types.Base -- * Re-exports , PersistValue(..) , fromPersistValueText , LiteralType(..) ) where import Control.Exception (Exception) import Data.Char (isSpace) import Data.List.NonEmpty (NonEmpty(..)) import qualified Data.List.NonEmpty as NEL import Data.Map (Map) import Data.Maybe (isNothing) import Data.Text (Text) import qualified Data.Text as T import Data.Word (Word32) import Language.Haskell.TH.Syntax (Lift(..)) import Web.HttpApiData ( FromHttpApiData(..) , ToHttpApiData(..) , parseBoundedTextData , showTextData ) import Web.PathPieces (PathPiece(..)) -- Bring `Lift (Map k v)` instance into scope, as well as `Lift Text` -- instance on pre-1.2.4 versions of `text` import Instances.TH.Lift () import Database.Persist.Names import Database.Persist.PersistValue -- | A 'Checkmark' should be used as a field type whenever a -- uniqueness constraint should guarantee that a certain kind of -- record may appear at most once, but other kinds of records may -- appear any number of times. -- -- /NOTE:/ You need to mark any @Checkmark@ fields as @nullable@ -- (see the following example). -- -- For example, suppose there's a @Location@ entity that -- represents where a user has lived: -- -- @ -- Location -- user UserId -- name Text -- current Checkmark nullable -- -- UniqueLocation user current -- @ -- -- The @UniqueLocation@ constraint allows any number of -- 'Inactive' @Location@s to be @current@. However, there may be -- at most one @current@ @Location@ per user (i.e., either zero -- or one per user). -- -- This data type works because of the way that SQL treats -- @NULL@able fields within uniqueness constraints. The SQL -- standard says that @NULL@ values should be considered -- different, so we represent 'Inactive' as SQL @NULL@, thus -- allowing any number of 'Inactive' records. On the other hand, -- we represent 'Active' as @TRUE@, so the uniqueness constraint -- will disallow more than one 'Active' record. -- -- /Note:/ There may be DBMSs that do not respect the SQL -- standard's treatment of @NULL@ values on uniqueness -- constraints, please check if this data type works before -- relying on it. -- -- The SQL @BOOLEAN@ type is used because it's the smallest data -- type available. Note that we never use @FALSE@, just @TRUE@ -- and @NULL@. Provides the same behavior @Maybe ()@ would if -- @()@ was a valid 'PersistField'. data Checkmark = Active -- ^ When used on a uniqueness constraint, there -- may be at most one 'Active' record. | Inactive -- ^ When used on a uniqueness constraint, there -- may be any number of 'Inactive' records. deriving (Eq, Ord, Read, Show, Enum, Bounded) instance ToHttpApiData Checkmark where toUrlPiece = showTextData instance FromHttpApiData Checkmark where parseUrlPiece = parseBoundedTextData instance PathPiece Checkmark where toPathPiece Active = "active" toPathPiece Inactive = "inactive" fromPathPiece "active" = Just Active fromPathPiece "inactive" = Just Inactive fromPathPiece _ = Nothing data IsNullable = Nullable !WhyNullable | NotNullable deriving (Eq, Show) fieldAttrsContainsNullable :: [FieldAttr] -> IsNullable fieldAttrsContainsNullable s | FieldAttrMaybe `elem` s = Nullable ByMaybeAttr | FieldAttrNullable `elem` s = Nullable ByNullableAttr | otherwise = NotNullable -- | The reason why a field is 'nullable' is very important. A -- field that is nullable because of a @Maybe@ tag will have its -- type changed from @A@ to @Maybe A@. OTOH, a field that is -- nullable because of a @nullable@ tag will remain with the same -- type. data WhyNullable = ByMaybeAttr | ByNullableAttr deriving (Eq, Show) -- | An 'EntityDef' represents the information that @persistent@ knows -- about an Entity. It uses this information to generate the Haskell -- datatype, the SQL migrations, and other relevant conversions. data EntityDef = EntityDef { entityHaskell :: !EntityNameHS -- ^ The name of the entity as Haskell understands it. , entityDB :: !EntityNameDB -- ^ The name of the database table corresponding to the entity. , entityId :: !EntityIdDef -- ^ The entity's primary key or identifier. , entityAttrs :: ![Attr] -- ^ The @persistent@ entity syntax allows you to add arbitrary 'Attr's -- to an entity using the @!@ operator. Those attributes are stored in -- this list. , entityFields :: ![FieldDef] -- ^ The fields for this entity. Note that the ID field will not be -- present in this list. To get all of the fields for an entity, use -- 'keyAndEntityFields'. , entityUniques :: ![UniqueDef] -- ^ The Uniqueness constraints for this entity. , entityForeigns:: ![ForeignDef] -- ^ The foreign key relationships that this entity has to other -- entities. , entityDerives :: ![Text] -- ^ A list of type classes that have been derived for this entity. , entityExtra :: !(Map Text [ExtraLine]) , entitySum :: !Bool -- ^ Whether or not this entity represents a sum type in the database. , entityComments :: !(Maybe Text) -- ^ Optional comments on the entity. -- -- @since 2.10.0 } deriving (Show, Eq, Read, Ord, Lift) -- | The definition for the entity's primary key ID. -- -- @since 2.13.0.0 data EntityIdDef = EntityIdField !FieldDef -- ^ The entity has a single key column, and it is a surrogate key - that -- is, you can't go from @rec -> Key rec@. -- -- @since 2.13.0.0 | EntityIdNaturalKey !CompositeDef -- ^ The entity has a natural key. This means you can write @rec -> Key rec@ -- because all the key fields are present on the datatype. -- -- A natural key can have one or more columns. -- -- @since 2.13.0.0 deriving (Show, Eq, Read, Ord, Lift) -- | Return the @['FieldDef']@ for the entity keys. entitiesPrimary :: EntityDef -> NonEmpty FieldDef entitiesPrimary t = case entityId t of EntityIdNaturalKey fds -> compositeFields fds EntityIdField fd -> pure fd entityPrimary :: EntityDef -> Maybe CompositeDef entityPrimary t = case entityId t of EntityIdNaturalKey c -> Just c _ -> Nothing entityKeyFields :: EntityDef -> NonEmpty FieldDef entityKeyFields = entitiesPrimary -- | Returns a 'NonEmpty' list of 'FieldDef' that correspond with the key -- columns for an 'EntityDef'. keyAndEntityFields :: EntityDef -> NonEmpty FieldDef keyAndEntityFields ent = case entityId ent of EntityIdField fd -> fd :| fields EntityIdNaturalKey _ -> case NEL.nonEmpty fields of Nothing -> error $ mconcat [ "persistent internal guarantee failed: entity is " , "defined with an entityId = EntityIdNaturalKey, " , "but somehow doesn't have any entity fields." ] Just xs -> xs where fields = filter isHaskellField $ entityFields ent type ExtraLine = [Text] type Attr = Text -- | Attributes that may be attached to fields that can affect migrations -- and serialization in backend-specific ways. -- -- While we endeavor to, we can't forsee all use cases for all backends, -- and so 'FieldAttr' is extensible through its constructor 'FieldAttrOther'. -- -- @since 2.11.0.0 data FieldAttr = FieldAttrMaybe -- ^ The 'Maybe' keyword goes after the type. This indicates that the column -- is nullable, and the generated Haskell code will have a @'Maybe'@ type -- for it. -- -- Example: -- -- @ -- User -- name Text Maybe -- @ | FieldAttrNullable -- ^ This indicates that the column is nullable, but should not have -- a 'Maybe' type. For this to work out, you need to ensure that the -- 'PersistField' instance for the type in question can support -- a 'PersistNull' value. -- -- @ -- data What = NoWhat | Hello Text -- -- instance PersistField What where -- fromPersistValue PersistNull = -- pure NoWhat -- fromPersistValue pv = -- Hello <$> fromPersistValue pv -- -- instance PersistFieldSql What where -- sqlType _ = SqlString -- -- User -- what What nullable -- @ | FieldAttrMigrationOnly -- ^ This tag means that the column will not be present on the Haskell code, -- but will not be removed from the database. Useful to deprecate fields in -- phases. -- -- You should set the column to be nullable in the database. Otherwise, -- inserts won't have values. -- -- @ -- User -- oldName Text MigrationOnly -- newName Text -- @ | FieldAttrSafeToRemove -- ^ A @SafeToRemove@ attribute is not present on the Haskell datatype, and -- the backend migrations should attempt to drop the column without -- triggering any unsafe migration warnings. -- -- Useful after you've used @MigrationOnly@ to remove a column from the -- database in phases. -- -- @ -- User -- oldName Text SafeToRemove -- newName Text -- @ | FieldAttrNoreference -- ^ This attribute indicates that we should create a foreign key reference -- from a column. By default, @persistent@ will try and create a foreign key -- reference for a column if it can determine that the type of the column is -- a @'Key' entity@ or an @EntityId@ and the @Entity@'s name was present in -- 'mkPersist'. -- -- This is useful if you want to use the explicit foreign key syntax. -- -- @ -- Post -- title Text -- -- Comment -- postId PostId noreference -- Foreign Post fk_comment_post postId -- @ | FieldAttrReference Text -- ^ This is set to specify precisely the database table the column refers -- to. -- -- @ -- Post -- title Text -- -- Comment -- postId PostId references="post" -- @ -- -- You should not need this - @persistent@ should be capable of correctly -- determining the target table's name. If you do need this, please file an -- issue describing why. | FieldAttrConstraint Text -- ^ Specify a name for the constraint on the foreign key reference for this -- table. -- -- @ -- Post -- title Text -- -- Comment -- postId PostId constraint="my_cool_constraint_name" -- @ | FieldAttrDefault Text -- ^ Specify the default value for a column. -- -- @ -- User -- createdAt UTCTime default="NOW()" -- @ -- -- Note that a @default=@ attribute does not mean you can omit the value -- while inserting. | FieldAttrSqltype Text -- ^ Specify a custom SQL type for the column. Generally, you should define -- a custom datatype with a custom 'PersistFieldSql' instance instead of -- using this. -- -- @ -- User -- uuid Text sqltype="UUID" -- @ | FieldAttrMaxlen Integer -- ^ Set a maximum length for a column. Useful for VARCHAR and indexes. -- -- @ -- User -- name Text maxlen=200 -- -- UniqueName name -- @ | FieldAttrSql Text -- ^ Specify the database name of the column. -- -- @ -- User -- blarghle Int sql="b_l_a_r_g_h_l_e" -- @ -- -- Useful for performing phased migrations, where one column is renamed to -- another column over time. | FieldAttrOther Text -- ^ A grab bag of random attributes that were unrecognized by the parser. deriving (Show, Eq, Read, Ord, Lift) -- | Parse raw field attributes into structured form. Any unrecognized -- attributes will be preserved, identically as they are encountered, -- as 'FieldAttrOther' values. -- -- @since 2.11.0.0 parseFieldAttrs :: [Text] -> [FieldAttr] parseFieldAttrs = fmap $ \case "Maybe" -> FieldAttrMaybe "nullable" -> FieldAttrNullable "MigrationOnly" -> FieldAttrMigrationOnly "SafeToRemove" -> FieldAttrSafeToRemove "noreference" -> FieldAttrNoreference raw | Just x <- T.stripPrefix "reference=" raw -> FieldAttrReference x | Just x <- T.stripPrefix "constraint=" raw -> FieldAttrConstraint x | Just x <- T.stripPrefix "default=" raw -> FieldAttrDefault x | Just x <- T.stripPrefix "sqltype=" raw -> FieldAttrSqltype x | Just x <- T.stripPrefix "maxlen=" raw -> case reads (T.unpack x) of [(n, s)] | all isSpace s -> FieldAttrMaxlen n _ -> error $ "Could not parse maxlen field with value " <> show raw | Just x <- T.stripPrefix "sql=" raw -> FieldAttrSql x | otherwise -> FieldAttrOther raw -- | A 'FieldType' describes a field parsed from the QuasiQuoter and is -- used to determine the Haskell type in the generated code. -- -- @name Text@ parses into @FTTypeCon Nothing "Text"@ -- -- @name T.Text@ parses into @FTTypeCon (Just "T" "Text")@ -- -- @name (Jsonb User)@ parses into: -- -- @ -- FTApp (FTTypeCon Nothing "Jsonb") (FTTypeCon Nothing "User") -- @ data FieldType = FTTypeCon (Maybe Text) Text -- ^ Optional module and name. | FTTypePromoted Text | FTApp FieldType FieldType | FTList FieldType deriving (Show, Eq, Read, Ord, Lift) isFieldNotGenerated :: FieldDef -> Bool isFieldNotGenerated = isNothing . fieldGenerated -- | There are 3 kinds of references -- 1) composite (to fields that exist in the record) -- 2) single field -- 3) embedded data ReferenceDef = NoReference | ForeignRef !EntityNameHS -- ^ A ForeignRef has a late binding to the EntityDef it references via name -- and has the Haskell type of the foreign key in the form of FieldType | EmbedRef EntityNameHS | CompositeRef CompositeDef | SelfReference -- ^ A SelfReference stops an immediate cycle which causes non-termination at compile-time (issue #311). deriving (Show, Eq, Read, Ord, Lift) -- | An EmbedEntityDef is the same as an EntityDef -- But it is only used for fieldReference -- so it only has data needed for embedding data EmbedEntityDef = EmbedEntityDef { embeddedHaskell :: EntityNameHS , embeddedFields :: [EmbedFieldDef] } deriving (Show, Eq, Read, Ord, Lift) -- | An EmbedFieldDef is the same as a FieldDef -- But it is only used for embeddedFields -- so it only has data needed for embedding data EmbedFieldDef = EmbedFieldDef { emFieldDB :: FieldNameDB , emFieldEmbed :: Maybe (Either SelfEmbed EntityNameHS) } deriving (Show, Eq, Read, Ord, Lift) data SelfEmbed = SelfEmbed deriving (Show, Eq, Read, Ord, Lift) -- | Returns 'True' if the 'FieldDef' does not have a 'MigrationOnly' or -- 'SafeToRemove' flag from the QuasiQuoter. -- -- @since 2.13.0.0 isHaskellField :: FieldDef -> Bool isHaskellField fd = FieldAttrMigrationOnly `notElem` fieldAttrs fd && FieldAttrSafeToRemove `notElem` fieldAttrs fd toEmbedEntityDef :: EntityDef -> EmbedEntityDef toEmbedEntityDef ent = embDef where embDef = EmbedEntityDef { embeddedHaskell = entityHaskell ent , embeddedFields = map toEmbedFieldDef $ filter isHaskellField $ entityFields ent } toEmbedFieldDef :: FieldDef -> EmbedFieldDef toEmbedFieldDef field = EmbedFieldDef { emFieldDB = fieldDB field , emFieldEmbed = case fieldReference field of EmbedRef em -> Just $ Right em SelfReference -> Just $ Left SelfEmbed _ -> Nothing } -- | Type for storing the Uniqueness constraint in the Schema. Assume you have -- the following schema with a uniqueness constraint: -- -- @ -- Person -- name String -- age Int -- UniqueAge age -- @ -- -- This will be represented as: -- -- @ -- UniqueDef -- { uniqueHaskell = ConstraintNameHS (packPTH "UniqueAge") -- , uniqueDBName = ConstraintNameDB (packPTH "unique_age") -- , uniqueFields = [(FieldNameHS (packPTH "age"), FieldNameDB (packPTH "age"))] -- , uniqueAttrs = [] -- } -- @ -- data UniqueDef = UniqueDef { uniqueHaskell :: !ConstraintNameHS , uniqueDBName :: !ConstraintNameDB , uniqueFields :: !(NonEmpty (FieldNameHS, FieldNameDB)) , uniqueAttrs :: ![Attr] } deriving (Show, Eq, Read, Ord, Lift) data CompositeDef = CompositeDef { compositeFields :: !(NonEmpty FieldDef) , compositeAttrs :: ![Attr] } deriving (Show, Eq, Read, Ord, Lift) -- | Used instead of FieldDef -- to generate a smaller amount of code type ForeignFieldDef = (FieldNameHS, FieldNameDB) data ForeignDef = ForeignDef { foreignRefTableHaskell :: !EntityNameHS , foreignRefTableDBName :: !EntityNameDB , foreignConstraintNameHaskell :: !ConstraintNameHS , foreignConstraintNameDBName :: !ConstraintNameDB , foreignFieldCascade :: !FieldCascade -- ^ Determine how the field will cascade on updates and deletions. -- -- @since 2.11.0 , foreignFields :: ![(ForeignFieldDef, ForeignFieldDef)] -- this entity plus the primary entity , foreignAttrs :: ![Attr] , foreignNullable :: Bool , foreignToPrimary :: Bool -- ^ Determines if the reference is towards a Primary Key or not. -- -- @since 2.11.0 } deriving (Show, Eq, Read, Ord, Lift) -- | This datatype describes how a foreign reference field cascades deletes -- or updates. -- -- This type is used in both parsing the model definitions and performing -- migrations. A 'Nothing' in either of the field values means that the -- user has not specified a 'CascadeAction'. An unspecified 'CascadeAction' -- is defaulted to 'Restrict' when doing migrations. -- -- @since 2.11.0 data FieldCascade = FieldCascade { fcOnUpdate :: !(Maybe CascadeAction) , fcOnDelete :: !(Maybe CascadeAction) } deriving (Show, Eq, Read, Ord, Lift) -- | A 'FieldCascade' that does nothing. -- -- @since 2.11.0 noCascade :: FieldCascade noCascade = FieldCascade Nothing Nothing -- | Renders a 'FieldCascade' value such that it can be used in SQL -- migrations. -- -- @since 2.11.0 renderFieldCascade :: FieldCascade -> Text renderFieldCascade (FieldCascade onUpdate onDelete) = T.unwords [ foldMap (mappend " ON DELETE " . renderCascadeAction) onDelete , foldMap (mappend " ON UPDATE " . renderCascadeAction) onUpdate ] -- | An action that might happen on a deletion or update on a foreign key -- change. -- -- @since 2.11.0 data CascadeAction = Cascade | Restrict | SetNull | SetDefault deriving (Show, Eq, Read, Ord, Lift) -- | Render a 'CascadeAction' to 'Text' such that it can be used in a SQL -- command. -- -- @since 2.11.0 renderCascadeAction :: CascadeAction -> Text renderCascadeAction action = case action of Cascade -> "CASCADE" Restrict -> "RESTRICT" SetNull -> "SET NULL" SetDefault -> "SET DEFAULT" data PersistException = PersistError Text -- ^ Generic Exception | PersistMarshalError Text | PersistInvalidField Text | PersistForeignConstraintUnmet Text | PersistMongoDBError Text | PersistMongoDBUnsupported Text deriving Show instance Exception PersistException -- | A SQL data type. Naming attempts to reflect the underlying Haskell -- datatypes, eg SqlString instead of SqlVarchar. Different SQL databases may -- have different translations for these types. data SqlType = SqlString | SqlInt32 | SqlInt64 | SqlReal | SqlNumeric Word32 Word32 | SqlBool | SqlDay | SqlTime | SqlDayTime -- ^ Always uses UTC timezone | SqlBlob | SqlOther T.Text -- ^ a backend-specific name deriving (Show, Read, Eq, Ord, Lift) data PersistFilter = Eq | Ne | Gt | Lt | Ge | Le | In | NotIn | BackendSpecificFilter T.Text deriving (Read, Show, Lift) data UpdateException = KeyNotFound String | UpsertError String instance Show UpdateException where show (KeyNotFound key) = "Key not found during updateGet: " ++ key show (UpsertError msg) = "Error during upsert: " ++ msg instance Exception UpdateException data OnlyUniqueException = OnlyUniqueException String instance Show OnlyUniqueException where show (OnlyUniqueException uniqueMsg) = "Expected only one unique key, got " ++ uniqueMsg instance Exception OnlyUniqueException data PersistUpdate = Assign | Add | Subtract | Multiply | Divide | BackendSpecificUpdate T.Text deriving (Read, Show, Lift) -- | A 'FieldDef' represents the inormation that @persistent@ knows about -- a field of a datatype. This includes information used to parse the field -- out of the database and what the field corresponds to. data FieldDef = FieldDef { fieldHaskell :: !FieldNameHS -- ^ The name of the field. Note that this does not corresponds to the -- record labels generated for the particular entity - record labels -- are generated with the type name prefixed to the field, so -- a 'FieldDef' that contains a @'FieldNameHS' "name"@ for a type -- @User@ will have a record field @userName@. , fieldDB :: !FieldNameDB -- ^ The name of the field in the database. For SQL databases, this -- corresponds to the column name. , fieldType :: !FieldType -- ^ The type of the field in Haskell. , fieldSqlType :: !SqlType -- ^ The type of the field in a SQL database. , fieldAttrs :: ![FieldAttr] -- ^ User annotations for a field. These are provided with the @!@ -- operator. , fieldStrict :: !Bool -- ^ If this is 'True', then the Haskell datatype will have a strict -- record field. The default value for this is 'True'. , fieldReference :: !ReferenceDef , fieldCascade :: !FieldCascade -- ^ Defines how operations on the field cascade on to the referenced -- tables. This doesn't have any meaning if the 'fieldReference' is set -- to 'NoReference' or 'SelfReference'. The cascade option here should -- be the same as the one obtained in the 'fieldReference'. -- -- @since 2.11.0 , fieldComments :: !(Maybe Text) -- ^ Optional comments for a 'Field'. There is not currently a way to -- attach comments to a field in the quasiquoter. -- -- @since 2.10.0 , fieldGenerated :: !(Maybe Text) -- ^ Whether or not the field is a @GENERATED@ column, and additionally -- the expression to use for generation. -- -- @since 2.11.0.0 , fieldIsImplicitIdColumn :: !Bool -- ^ 'True' if the field is an implicit ID column. 'False' otherwise. -- -- @since 2.13.0.0 } deriving (Show, Eq, Read, Ord, Lift)
paul-rouse/persistent
persistent/Database/Persist/Types/Base.hs
Haskell
mit
23,580
{-# LANGUAGE ExistentialQuantification,DeriveDataTypeable #-} -- | This module provides typed channels, an alternative -- approach to interprocess messaging. Typed channels -- can be used in combination with or instead of the -- the untyped channels available in the "Remote.Process" -- module via 'send'. module Remote.Channel ( -- * Basic typed channels SendPort,ReceivePort,newChannel,sendChannel,receiveChannel, -- * Combined typed channels CombinedChannelAction,combinedChannelAction, combinePortsBiased,combinePortsRR,mergePortsBiased,mergePortsRR, -- * Terminate a channel terminateChannel) where import Remote.Process (ProcessM,send,getMessageType,getMessagePayload,setDaemonic,getProcess,prNodeRef,getNewMessageLocal,localFromPid,isPidLocal,TransmitException(..),TransmitStatus(..),spawnLocalAnd,ProcessId,Node,UnknownMessageException(..)) import Remote.Encoding (Serializable) import Data.List (foldl') import Data.Binary (Binary,get,put) import Data.Typeable (Typeable) import Control.Exception (throw) import Control.Monad (when) import Control.Monad.Trans (liftIO) import Control.Concurrent.MVar (MVar,newEmptyMVar,takeMVar,readMVar,putMVar) import Control.Concurrent.STM (STM,atomically,retry,orElse) import Control.Concurrent.STM.TVar (TVar,newTVarIO,readTVar,writeTVar) ---------------------------------------------- -- * Channels ---------------------------------------------- -- | A channel is a unidirectional communication pipeline -- with two ends: a sending port, and a receiving port. -- This is the sending port. A process holding this -- value can insert messages into the channel. SendPorts -- themselves can also be sent to other processes. -- The other side of the channel is the 'ReceivePort'. newtype SendPort a = SendPort ProcessId deriving (Typeable) -- | A process holding a ReceivePort can extract messages -- from the channel, which we inserted by -- the holder(s) of the corresponding 'SendPort'. -- Critically, ReceivePorts, unlike SendPorts, are not serializable. -- This means that you can only receive messages through a channel -- on the node on which the channel was created. data ReceivePort a = ReceivePortSimple ProcessId (MVar ()) | ReceivePortBiased [Node -> STM a] | ReceivePortRR (TVar [Node -> STM a]) instance Binary (SendPort a) where put (SendPort pid) = put pid get = get >>= return . SendPort -- | Create a new channel, and returns both the 'SendPort' -- and 'ReceivePort' thereof. newChannel :: (Serializable a) => ProcessM (SendPort a, ReceivePort a) newChannel = do mv <- liftIO $ newEmptyMVar pid <- spawnLocalAnd (body mv) setDaemonic return (SendPort pid, ReceivePortSimple pid mv) where body mv = liftIO (takeMVar mv) -- | Inserts a new value into the channel. sendChannel :: (Serializable a) => SendPort a -> a -> ProcessM () sendChannel (SendPort pid) a = send pid a -- | Extract a value from the channel, in FIFO order. receiveChannel :: (Serializable a) => ReceivePort a -> ProcessM a receiveChannel rc = do p <- getProcess channelCheckPids [rc] node <- liftIO $ readMVar (prNodeRef p) liftIO $ atomically $ receiveChannelImpl node rc receiveChannelImpl :: (Serializable a) => Node -> ReceivePort a -> STM a receiveChannelImpl node rc = case rc of ReceivePortBiased l -> foldl' orElse retry (map (\x -> x node) l) ReceivePortRR mv -> do tv <- readTVar mv writeTVar mv (rotate tv) foldl' orElse retry (map (\x -> x node) tv) ReceivePortSimple _ _ -> receiveChannelSimple node rc where rotate [] = [] rotate (h:t) = t ++ [h] data CombinedChannelAction b = forall a. (Serializable a) => CombinedChannelAction (ReceivePort a) (a -> b) -- | Specifies a port and an adapter for combining ports via 'combinePortsBiased' and -- 'combinePortsRR'. combinedChannelAction :: (Serializable a) => ReceivePort a -> (a -> b) -> CombinedChannelAction b combinedChannelAction = CombinedChannelAction -- | This function lets us respond to messages on multiple channels -- by combining several 'ReceivePort's into one. The resulting port -- is the sum of the input ports, and will extract messages from all -- of them in FIFO order. The input ports are specified by -- 'combinedChannelAction', which also gives a converter function. -- After combining the underlying receive ports can still -- be used independently, as well. -- We provide two ways to combine ports, which differ bias -- they demonstrate in returning messages when more than one -- underlying channel is nonempty. combinePortsBiased will -- check ports in the order given by its argument, and so -- if the first channel always was a message waiting, it will. -- starve the other channels. The alternative is 'combinePortsRR'. combinePortsBiased :: Serializable b => [CombinedChannelAction b] -> ProcessM (ReceivePort b) combinePortsBiased chns = do mapM_ (\(CombinedChannelAction chn _ ) -> channelCheckPids [chn]) chns return $ ReceivePortBiased [(\node -> receiveChannelImpl node chn >>= return . fun) | (CombinedChannelAction chn fun) <- chns] -- | See 'combinePortsBiased'. This function differs from that one -- in that the order that the underlying ports are checked is rotated -- with each invocation, guaranteeing that, given enough invocations, -- every channel will have a chance to contribute a message. combinePortsRR :: Serializable b => [CombinedChannelAction b] -> ProcessM (ReceivePort b) combinePortsRR chns = do mapM_ (\(CombinedChannelAction chn _ ) -> channelCheckPids [chn]) chns tv <- liftIO $ newTVarIO [(\node -> receiveChannelImpl node chn >>= return . fun) | (CombinedChannelAction chn fun) <- chns] return $ ReceivePortRR tv -- | Similar to 'combinePortsBiased', with the difference that the -- the underlying ports must be of the same type, and you don't -- have the opportunity to provide an adapter function. mergePortsBiased :: (Serializable a) => [ReceivePort a] -> ProcessM (ReceivePort a) mergePortsBiased chns = do channelCheckPids chns return $ ReceivePortBiased [(\node -> receiveChannelImpl node chn) | chn <- chns] -- | Similar to 'combinePortsRR', with the difference that the -- the underlying ports must be of the same type, and you don't -- have the opportunity to provide an adapter function. mergePortsRR :: (Serializable a) => [ReceivePort a] -> ProcessM (ReceivePort a) mergePortsRR chns = do channelCheckPids chns tv <- liftIO $ newTVarIO [(\node -> receiveChannelImpl node chn) | chn <- chns] return $ ReceivePortRR tv channelCheckPids :: (Serializable a) => [ReceivePort a] -> ProcessM () channelCheckPids chns = mapM_ checkPid chns where checkPid (ReceivePortSimple pid _) = do islocal <- isPidLocal pid when (not islocal) (throw $ TransmitException QteUnknownPid) checkPid _ = return () receiveChannelSimple :: (Serializable a) => Node -> ReceivePort a -> STM a receiveChannelSimple node (ReceivePortSimple chpid _) = do mmsg <- getNewMessageLocal (node) (localFromPid chpid) case mmsg of Nothing -> badPid Just msg -> case getMessagePayload msg of Nothing -> throw $ UnknownMessageException (getMessageType msg) Just q -> return q where badPid = throw $ TransmitException QteUnknownPid -- | Terminate a channel. After calling this function, 'receiveChannel' -- on that port (or on any combined port based on it) will either -- fail or block indefinitely, and 'sendChannel' on the corresponding -- 'SendPort' will fail. Any unread messages remaining in the channel -- will be lost. terminateChannel :: (Serializable a) => ReceivePort a -> ProcessM () terminateChannel (ReceivePortSimple _ term) = liftIO $ putMVar (term) () terminateChannel _ = throw $ TransmitException QteUnknownPid
jepst/CloudHaskell
Remote/Channel.hs
Haskell
bsd-3-clause
8,558
{- (c) The AQUA Project, Glasgow University, 1993-1998 \section[Simplify]{The main module of the simplifier} -} {-# LANGUAGE CPP #-} module Eta.SimplCore.Simplify ( simplTopBinds, simplExpr, simplRules ) where #include "HsVersions.h" import Eta.Main.DynFlags import Eta.SimplCore.SimplMonad import Eta.Types.Type hiding ( substTy, extendTvSubst, substTyVar ) import Eta.SimplCore.SimplEnv import Eta.SimplCore.SimplUtils import Eta.Types.FamInstEnv ( FamInstEnv ) import Eta.BasicTypes.Literal ( litIsLifted ) --, mkMachInt ) -- temporalily commented out. See #8326 import Eta.BasicTypes.Id import Eta.BasicTypes.MkId ( seqId, voidPrimId ) import Eta.Core.MkCore ( mkImpossibleExpr, castBottomExpr ) import Eta.BasicTypes.IdInfo import Eta.BasicTypes.Name ( Name, mkSystemVarName, isExternalName ) import Eta.Types.Coercion hiding ( substCo, substTy, substCoVar, extendTvSubst ) import Eta.Types.OptCoercion ( optCoercion ) import Eta.Types.FamInstEnv ( topNormaliseType_maybe ) import Eta.BasicTypes.DataCon ( DataCon, dataConWorkId, dataConRepStrictness , isMarkedStrict ) --, dataConTyCon, dataConTag, fIRST_TAG ) --import Eta.Types.TyCon ( isEnumerationTyCon ) -- temporalily commented out. See #8326 import Eta.SimplCore.CoreMonad ( Tick(..), SimplifierMode(..) ) import Eta.Core.CoreSyn import Eta.BasicTypes.Demand ( StrictSig(..), dmdTypeDepth, isStrictDmd ) import Eta.Core.PprCore ( pprCoreExpr ) import Eta.Core.CoreUnfold import Eta.Core.CoreUtils import Eta.Core.CoreArity --import Eta.Prelude.PrimOp ( tagToEnumKey ) -- temporalily commented out. See #8326 import Eta.Specialise.Rules ( mkRuleInfo, lookupRule, getRules ) import Eta.Prelude.TysPrim ( voidPrimTy ) --, intPrimTy ) -- temporalily commented out. See #8326 import Eta.BasicTypes.BasicTypes ( TopLevelFlag(..), isTopLevel, RecFlag(..) ) import Eta.Utils.MonadUtils ( foldlM, mapAccumLM, liftIO ) import Eta.Utils.Maybes ( orElse ) --import Eta.BasicTypes.Unique ( hasKey ) -- temporalily commented out. See #8326 import Control.Monad import Eta.Utils.Outputable import Eta.Utils.FastString import Eta.Utils.Pair import Eta.Utils.Util import Eta.Main.ErrUtils {- The guts of the simplifier is in this module, but the driver loop for the simplifier is in SimplCore.lhs. ----------------------------------------- *** IMPORTANT NOTE *** ----------------------------------------- The simplifier used to guarantee that the output had no shadowing, but it does not do so any more. (Actually, it never did!) The reason is documented with simplifyArgs. ----------------------------------------- *** IMPORTANT NOTE *** ----------------------------------------- Many parts of the simplifier return a bunch of "floats" as well as an expression. This is wrapped as a datatype SimplUtils.FloatsWith. All "floats" are let-binds, not case-binds, but some non-rec lets may be unlifted (with RHS ok-for-speculation). ----------------------------------------- ORGANISATION OF FUNCTIONS ----------------------------------------- simplTopBinds - simplify all top-level binders - for NonRec, call simplRecOrTopPair - for Rec, call simplRecBind ------------------------------ simplExpr (applied lambda) ==> simplNonRecBind simplExpr (Let (NonRec ...) ..) ==> simplNonRecBind simplExpr (Let (Rec ...) ..) ==> simplify binders; simplRecBind ------------------------------ simplRecBind [binders already simplfied] - use simplRecOrTopPair on each pair in turn simplRecOrTopPair [binder already simplified] Used for: recursive bindings (top level and nested) top-level non-recursive bindings Returns: - check for PreInlineUnconditionally - simplLazyBind simplNonRecBind Used for: non-top-level non-recursive bindings beta reductions (which amount to the same thing) Because it can deal with strict arts, it takes a "thing-inside" and returns an expression - check for PreInlineUnconditionally - simplify binder, including its IdInfo - if strict binding simplStrictArg mkAtomicArgs completeNonRecX else simplLazyBind addFloats simplNonRecX: [given a *simplified* RHS, but an *unsimplified* binder] Used for: binding case-binder and constr args in a known-constructor case - check for PreInLineUnconditionally - simplify binder - completeNonRecX ------------------------------ simplLazyBind: [binder already simplified, RHS not] Used for: recursive bindings (top level and nested) top-level non-recursive bindings non-top-level, but *lazy* non-recursive bindings [must not be strict or unboxed] Returns floats + an augmented environment, not an expression - substituteIdInfo and add result to in-scope [so that rules are available in rec rhs] - simplify rhs - mkAtomicArgs - float if exposes constructor or PAP - completeBind completeNonRecX: [binder and rhs both simplified] - if the the thing needs case binding (unlifted and not ok-for-spec) build a Case else completeBind addFloats completeBind: [given a simplified RHS] [used for both rec and non-rec bindings, top level and not] - try PostInlineUnconditionally - add unfolding [this is the only place we add an unfolding] - add arity Right hand sides and arguments ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In many ways we want to treat (a) the right hand side of a let(rec), and (b) a function argument in the same way. But not always! In particular, we would like to leave these arguments exactly as they are, so they will match a RULE more easily. f (g x, h x) g (+ x) It's harder to make the rule match if we ANF-ise the constructor, or eta-expand the PAP: f (let { a = g x; b = h x } in (a,b)) g (\y. + x y) On the other hand if we see the let-defns p = (g x, h x) q = + x then we *do* want to ANF-ise and eta-expand, so that p and q can be safely inlined. Even floating lets out is a bit dubious. For let RHS's we float lets out if that exposes a value, so that the value can be inlined more vigorously. For example r = let x = e in (x,x) Here, if we float the let out we'll expose a nice constructor. We did experiments that showed this to be a generally good thing. But it was a bad thing to float lets out unconditionally, because that meant they got allocated more often. For function arguments, there's less reason to expose a constructor (it won't get inlined). Just possibly it might make a rule match, but I'm pretty skeptical. So for the moment we don't float lets out of function arguments either. Eta expansion ~~~~~~~~~~~~~~ For eta expansion, we want to catch things like case e of (a,b) -> \x -> case a of (p,q) -> \y -> r If the \x was on the RHS of a let, we'd eta expand to bring the two lambdas together. And in general that's a good thing to do. Perhaps we should eta expand wherever we find a (value) lambda? Then the eta expansion at a let RHS can concentrate solely on the PAP case. ************************************************************************ * * \subsection{Bindings} * * ************************************************************************ -} simplTopBinds :: SimplEnv -> [InBind] -> SimplM SimplEnv simplTopBinds env0 binds0 = do { -- Put all the top-level binders into scope at the start -- so that if a transformation rule has unexpectedly brought -- anything into scope, then we don't get a complaint about that. -- It's rather as if the top-level binders were imported. -- See note [Glomming] in OccurAnal. ; env1 <- simplRecBndrs env0 (bindersOfBinds binds0) ; env2 <- simpl_binds env1 binds0 ; freeTick SimplifierDone ; return env2 } where -- We need to track the zapped top-level binders, because -- they should have their fragile IdInfo zapped (notably occurrence info) -- That's why we run down binds and bndrs' simultaneously. -- simpl_binds :: SimplEnv -> [InBind] -> SimplM SimplEnv simpl_binds env [] = return env simpl_binds env (bind:binds) = do { env' <- simpl_bind env bind ; simpl_binds env' binds } simpl_bind env (Rec pairs) = simplRecBind env TopLevel pairs simpl_bind env (NonRec b r) = do { (env', b') <- addBndrRules env b (lookupRecBndr env b) ; simplRecOrTopPair env' TopLevel NonRecursive b b' r } {- ************************************************************************ * * \subsection{Lazy bindings} * * ************************************************************************ simplRecBind is used for * recursive bindings only -} simplRecBind :: SimplEnv -> TopLevelFlag -> [(InId, InExpr)] -> SimplM SimplEnv simplRecBind env0 top_lvl pairs0 = do { (env_with_info, triples) <- mapAccumLM add_rules env0 pairs0 ; env1 <- go (zapFloats env_with_info) triples ; return (env0 `addRecFloats` env1) } -- addFloats adds the floats from env1, -- _and_ updates env0 with the in-scope set from env1 where add_rules :: SimplEnv -> (InBndr,InExpr) -> SimplM (SimplEnv, (InBndr, OutBndr, InExpr)) -- Add the (substituted) rules to the binder add_rules env (bndr, rhs) = do { (env', bndr') <- addBndrRules env bndr (lookupRecBndr env bndr) ; return (env', (bndr, bndr', rhs)) } go env [] = return env go env ((old_bndr, new_bndr, rhs) : pairs) = do { env' <- simplRecOrTopPair env top_lvl Recursive old_bndr new_bndr rhs ; go env' pairs } {- simplOrTopPair is used for * recursive bindings (whether top level or not) * top-level non-recursive bindings It assumes the binder has already been simplified, but not its IdInfo. -} simplRecOrTopPair :: SimplEnv -> TopLevelFlag -> RecFlag -> InId -> OutBndr -> InExpr -- Binder and rhs -> SimplM SimplEnv -- Returns an env that includes the binding simplRecOrTopPair env top_lvl is_rec old_bndr new_bndr rhs = do { dflags <- getDynFlags ; trace_bind dflags $ if preInlineUnconditionally dflags env top_lvl old_bndr rhs -- Check for unconditional inline then do tick (PreInlineUnconditionally old_bndr) return (extendIdSubst env old_bndr (mkContEx env rhs)) else simplLazyBind env top_lvl is_rec old_bndr new_bndr rhs env } where trace_bind dflags thing_inside | not (dopt Opt_D_verbose_core2core dflags) = thing_inside | otherwise = pprTrace "SimplBind" (ppr old_bndr) thing_inside -- trace_bind emits a trace for each top-level binding, which -- helps to locate the tracing for inlining and rule firing {- simplLazyBind is used for * [simplRecOrTopPair] recursive bindings (whether top level or not) * [simplRecOrTopPair] top-level non-recursive bindings * [simplNonRecE] non-top-level *lazy* non-recursive bindings Nota bene: 1. It assumes that the binder is *already* simplified, and is in scope, and its IdInfo too, except unfolding 2. It assumes that the binder type is lifted. 3. It does not check for pre-inline-unconditionally; that should have been done already. -} simplLazyBind :: SimplEnv -> TopLevelFlag -> RecFlag -> InId -> OutId -- Binder, both pre-and post simpl -- The OutId has IdInfo, except arity, unfolding -> InExpr -> SimplEnv -- The RHS and its environment -> SimplM SimplEnv -- Precondition: rhs obeys the let/app invariant simplLazyBind env top_lvl is_rec bndr bndr1 rhs rhs_se = -- pprTrace "simplLazyBind" ((ppr bndr <+> ppr bndr1) $$ ppr rhs $$ ppr (seIdSubst rhs_se)) $ do { let rhs_env = rhs_se `setInScope` env (tvs, body) = case collectTyBinders rhs of (tvs, body) | not_lam body -> (tvs,body) | otherwise -> ([], rhs) not_lam (Lam _ _) = False not_lam (Tick t e) | not (tickishFloatable t) = not_lam e -- eta-reduction could float not_lam _ = True -- Do not do the "abstract tyyvar" thing if there's -- a lambda inside, because it defeats eta-reduction -- f = /\a. \x. g a x -- should eta-reduce. ; (body_env, tvs') <- simplBinders rhs_env tvs -- See Note [Floating and type abstraction] in SimplUtils -- Simplify the RHS ; let rhs_cont = mkRhsStop (substTy body_env (exprType body)) ; (body_env1, body1) <- simplExprF body_env body rhs_cont -- ANF-ise a constructor or PAP rhs ; (body_env2, body2) <- prepareRhs top_lvl body_env1 bndr1 body1 ; (env', rhs') <- if not (doFloatFromRhs top_lvl is_rec False body2 body_env2) then -- No floating, revert to body1 do { rhs' <- mkLam tvs' (wrapFloats body_env1 body1) rhs_cont ; return (env, rhs') } else if null tvs then -- Simple floating do { tick LetFloatFromLet ; return (addFloats env body_env2, body2) } else -- Do type-abstraction first do { tick LetFloatFromLet ; (poly_binds, body3) <- abstractFloats tvs' body_env2 body2 ; rhs' <- mkLam tvs' body3 rhs_cont ; env' <- foldlM (addPolyBind top_lvl) env poly_binds ; return (env', rhs') } ; completeBind env' top_lvl bndr bndr1 rhs' } {- A specialised variant of simplNonRec used when the RHS is already simplified, notably in knownCon. It uses case-binding where necessary. -} simplNonRecX :: SimplEnv -> InId -- Old binder -> OutExpr -- Simplified RHS -> SimplM SimplEnv -- Precondition: rhs satisfies the let/app invariant simplNonRecX env bndr new_rhs | isDeadBinder bndr -- Not uncommon; e.g. case (a,b) of c { (p,q) -> p } = return env -- Here c is dead, and we avoid creating -- the binding c = (a,b) | Coercion co <- new_rhs = return (extendCvSubst env bndr co) | otherwise = do { (env', bndr') <- simplBinder env bndr ; completeNonRecX NotTopLevel env' (isStrictId bndr) bndr bndr' new_rhs } -- simplNonRecX is only used for NotTopLevel things completeNonRecX :: TopLevelFlag -> SimplEnv -> Bool -> InId -- Old binder -> OutId -- New binder -> OutExpr -- Simplified RHS -> SimplM SimplEnv -- Precondition: rhs satisfies the let/app invariant -- See Note [CoreSyn let/app invariant] in CoreSyn completeNonRecX top_lvl env is_strict old_bndr new_bndr new_rhs = do { (env1, rhs1) <- prepareRhs top_lvl (zapFloats env) new_bndr new_rhs ; (env2, rhs2) <- if doFloatFromRhs NotTopLevel NonRecursive is_strict rhs1 env1 then do { tick LetFloatFromLet ; return (addFloats env env1, rhs1) } -- Add the floats to the main env else return (env, wrapFloats env1 rhs1) -- Wrap the floats around the RHS ; completeBind env2 NotTopLevel old_bndr new_bndr rhs2 } {- {- No, no, no! Do not try preInlineUnconditionally in completeNonRecX Doing so risks exponential behaviour, because new_rhs has been simplified once already In the cases described by the folowing commment, postInlineUnconditionally will catch many of the relevant cases. -- This happens; for example, the case_bndr during case of -- known constructor: case (a,b) of x { (p,q) -> ... } -- Here x isn't mentioned in the RHS, so we don't want to -- create the (dead) let-binding let x = (a,b) in ... -- -- Similarly, single occurrences can be inlined vigourously -- e.g. case (f x, g y) of (a,b) -> .... -- If a,b occur once we can avoid constructing the let binding for them. Furthermore in the case-binding case preInlineUnconditionally risks extra thunks -- Consider case I# (quotInt# x y) of -- I# v -> let w = J# v in ... -- If we gaily inline (quotInt# x y) for v, we end up building an -- extra thunk: -- let w = J# (quotInt# x y) in ... -- because quotInt# can fail. | preInlineUnconditionally env NotTopLevel bndr new_rhs = thing_inside (extendIdSubst env bndr (DoneEx new_rhs)) -} ---------------------------------- prepareRhs takes a putative RHS, checks whether it's a PAP or constructor application and, if so, converts it to ANF, so that the resulting thing can be inlined more easily. Thus x = (f a, g b) becomes t1 = f a t2 = g b x = (t1,t2) We also want to deal well cases like this v = (f e1 `cast` co) e2 Here we want to make e1,e2 trivial and get x1 = e1; x2 = e2; v = (f x1 `cast` co) v2 That's what the 'go' loop in prepareRhs does -} prepareRhs :: TopLevelFlag -> SimplEnv -> OutId -> OutExpr -> SimplM (SimplEnv, OutExpr) -- Adds new floats to the env iff that allows us to return a good RHS prepareRhs top_lvl env id (Cast rhs co) -- Note [Float coercions] | Pair ty1 _ty2 <- coercionKind co -- Do *not* do this if rhs has an unlifted type , not (isUnLiftedType ty1) -- see Note [Float coercions (unlifted)] = do { (env', rhs') <- makeTrivialWithInfo top_lvl env sanitised_info rhs ; return (env', Cast rhs' co) } where sanitised_info = vanillaIdInfo `setStrictnessInfo` strictnessInfo info `setDemandInfo` demandInfo info info = idInfo id prepareRhs top_lvl env0 _ rhs0 = do { (_is_exp, env1, rhs1) <- go 0 env0 rhs0 ; return (env1, rhs1) } where go n_val_args env (Cast rhs co) = do { (is_exp, env', rhs') <- go n_val_args env rhs ; return (is_exp, env', Cast rhs' co) } go n_val_args env (App fun (Type ty)) = do { (is_exp, env', rhs') <- go n_val_args env fun ; return (is_exp, env', App rhs' (Type ty)) } go n_val_args env (App fun arg) = do { (is_exp, env', fun') <- go (n_val_args+1) env fun ; case is_exp of True -> do { (env'', arg') <- makeTrivial top_lvl env' arg ; return (True, env'', App fun' arg') } False -> return (False, env, App fun arg) } go n_val_args env (Var fun) = return (is_exp, env, Var fun) where is_exp = isExpandableApp fun n_val_args -- The fun a constructor or PAP -- See Note [CONLIKE pragma] in BasicTypes -- The definition of is_exp should match that in -- OccurAnal.occAnalApp go n_val_args env (Tick t rhs) -- We want to be able to float bindings past this -- tick. Non-scoping ticks don't care. | tickishScoped t == NoScope = do { (is_exp, env', rhs') <- go n_val_args env rhs ; return (is_exp, env', Tick t rhs') } -- On the other hand, for scoping ticks we need to be able to -- copy them on the floats, which in turn is only allowed if -- we can obtain non-counting ticks. | not (tickishCounts t) || tickishCanSplit t = do { (is_exp, env', rhs') <- go n_val_args (zapFloats env) rhs ; let tickIt (id, expr) = (id, mkTick (mkNoCount t) expr) floats' = seFloats $ env `addFloats` mapFloats env' tickIt ; return (is_exp, env' { seFloats = floats' }, Tick t rhs') } go _ env other = return (False, env, other) {- Note [Float coercions] ~~~~~~~~~~~~~~~~~~~~~~ When we find the binding x = e `cast` co we'd like to transform it to x' = e x = x `cast` co -- A trivial binding There's a chance that e will be a constructor application or function, or something like that, so moving the coerion to the usage site may well cancel the coersions and lead to further optimisation. Example: data family T a :: * data instance T Int = T Int foo :: Int -> Int -> Int foo m n = ... where x = T m go 0 = 0 go n = case x of { T m -> go (n-m) } -- This case should optimise Note [Preserve strictness when floating coercions] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In the Note [Float coercions] transformation, keep the strictness info. Eg f = e `cast` co -- f has strictness SSL When we transform to f' = e -- f' also has strictness SSL f = f' `cast` co -- f still has strictness SSL Its not wrong to drop it on the floor, but better to keep it. Note [Float coercions (unlifted)] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BUT don't do [Float coercions] if 'e' has an unlifted type. This *can* happen: foo :: Int = (error (# Int,Int #) "urk") `cast` CoUnsafe (# Int,Int #) Int If do the makeTrivial thing to the error call, we'll get foo = case error (# Int,Int #) "urk" of v -> v `cast` ... But 'v' isn't in scope! These strange casts can happen as a result of case-of-case bar = case (case x of { T -> (# 2,3 #); F -> error "urk" }) of (# p,q #) -> p+q -} makeTrivialArg :: SimplEnv -> ArgSpec -> SimplM (SimplEnv, ArgSpec) makeTrivialArg env (ValArg e) = do { (env', e') <- makeTrivial NotTopLevel env e ; return (env', ValArg e') } makeTrivialArg env arg = return (env, arg) -- CastBy, TyArg makeTrivial :: TopLevelFlag -> SimplEnv -> OutExpr -> SimplM (SimplEnv, OutExpr) -- Binds the expression to a variable, if it's not trivial, returning the variable makeTrivial top_lvl env expr = makeTrivialWithInfo top_lvl env vanillaIdInfo expr makeTrivialWithInfo :: TopLevelFlag -> SimplEnv -> IdInfo -> OutExpr -> SimplM (SimplEnv, OutExpr) -- Propagate strictness and demand info to the new binder -- Note [Preserve strictness when floating coercions] -- Returned SimplEnv has same substitution as incoming one makeTrivialWithInfo top_lvl env info expr | exprIsTrivial expr -- Already trivial || not (bindingOk top_lvl expr expr_ty) -- Cannot trivialise -- See Note [Cannot trivialise] = return (env, expr) | otherwise -- See Note [Take care] below = do { uniq <- getUniqueM ; let name = mkSystemVarName uniq (fsLit "a") var = mkLocalIdWithInfo name expr_ty info ; env' <- completeNonRecX top_lvl env False var var expr ; expr' <- simplVar env' var ; return (env', expr') } -- The simplVar is needed becase we're constructing a new binding -- a = rhs -- And if rhs is of form (rhs1 |> co), then we might get -- a1 = rhs1 -- a = a1 |> co -- and now a's RHS is trivial and can be substituted out, and that -- is what completeNonRecX will do -- To put it another way, it's as if we'd simplified -- let var = e in var where expr_ty = exprType expr bindingOk :: TopLevelFlag -> CoreExpr -> Type -> Bool -- True iff we can have a binding of this expression at this level -- Precondition: the type is the type of the expression bindingOk top_lvl _ expr_ty | isTopLevel top_lvl = not (isUnLiftedType expr_ty) | otherwise = True {- Note [Cannot trivialise] ~~~~~~~~~~~~~~~~~~~~~~~~ Consider tih f :: Int -> Addr# foo :: Bar foo = Bar (f 3) Then we can't ANF-ise foo, even though we'd like to, because we can't make a top-level binding for the Addr# (f 3). And if so we don't want to turn it into foo = let x = f 3 in Bar x because we'll just end up inlining x back, and that makes the simplifier loop. Better not to ANF-ise it at all. A case in point is literal strings (a MachStr is not regarded as trivial): foo = Ptr "blob"# We don't want to ANF-ise this. ************************************************************************ * * \subsection{Completing a lazy binding} * * ************************************************************************ completeBind * deals only with Ids, not TyVars * takes an already-simplified binder and RHS * is used for both recursive and non-recursive bindings * is used for both top-level and non-top-level bindings It does the following: - tries discarding a dead binding - tries PostInlineUnconditionally - add unfolding [this is the only place we add an unfolding] - add arity It does *not* attempt to do let-to-case. Why? Because it is used for - top-level bindings (when let-to-case is impossible) - many situations where the "rhs" is known to be a WHNF (so let-to-case is inappropriate). Nor does it do the atomic-argument thing -} completeBind :: SimplEnv -> TopLevelFlag -- Flag stuck into unfolding -> InId -- Old binder -> OutId -> OutExpr -- New binder and RHS -> SimplM SimplEnv -- completeBind may choose to do its work -- * by extending the substitution (e.g. let x = y in ...) -- * or by adding to the floats in the envt -- -- Precondition: rhs obeys the let/app invariant completeBind env top_lvl old_bndr new_bndr new_rhs | isCoVar old_bndr = case new_rhs of Coercion co -> return (extendCvSubst env old_bndr co) _ -> return (addNonRec env new_bndr new_rhs) | otherwise = ASSERT( isId new_bndr ) do { let old_info = idInfo old_bndr old_unf = unfoldingInfo old_info occ_info = occInfo old_info -- Do eta-expansion on the RHS of the binding -- See Note [Eta-expanding at let bindings] in SimplUtils ; (new_arity, final_rhs) <- tryEtaExpandRhs env new_bndr new_rhs -- Simplify the unfolding ; new_unfolding <- simplLetUnfolding env top_lvl old_bndr final_rhs old_unf ; dflags <- getDynFlags ; if postInlineUnconditionally dflags env top_lvl new_bndr occ_info final_rhs new_unfolding -- Inline and discard the binding then do { tick (PostInlineUnconditionally old_bndr) ; return (extendIdSubst env old_bndr (DoneEx final_rhs)) } -- Use the substitution to make quite, quite sure that the -- substitution will happen, since we are going to discard the binding else do { let info1 = idInfo new_bndr `setArityInfo` new_arity -- Unfolding info: Note [Setting the new unfolding] info2 = info1 `setUnfoldingInfo` new_unfolding -- Demand info: Note [Setting the demand info] -- -- We also have to nuke demand info if for some reason -- eta-expansion *reduces* the arity of the binding to less -- than that of the strictness sig. This can happen: see Note [Arity decrease]. info3 | isEvaldUnfolding new_unfolding || (case strictnessInfo info2 of StrictSig dmd_ty -> new_arity < dmdTypeDepth dmd_ty) = zapDemandInfo info2 `orElse` info2 | otherwise = info2 final_id = new_bndr `setIdInfo` info3 ; -- pprTrace "Binding" (ppr final_id <+> ppr new_unfolding) $ return (addNonRec env final_id final_rhs) } } -- The addNonRec adds it to the in-scope set too ------------------------------ addPolyBind :: TopLevelFlag -> SimplEnv -> OutBind -> SimplM SimplEnv -- Add a new binding to the environment, complete with its unfolding -- but *do not* do postInlineUnconditionally, because we have already -- processed some of the scope of the binding -- We still want the unfolding though. Consider -- let -- x = /\a. let y = ... in Just y -- in body -- Then we float the y-binding out (via abstractFloats and addPolyBind) -- but 'x' may well then be inlined in 'body' in which case we'd like the -- opportunity to inline 'y' too. -- -- INVARIANT: the arity is correct on the incoming binders addPolyBind top_lvl env (NonRec poly_id rhs) = do { unfolding <- simplLetUnfolding env top_lvl poly_id rhs noUnfolding -- Assumes that poly_id did not have an INLINE prag -- which is perhaps wrong. ToDo: think about this ; let final_id = setIdInfo poly_id $ idInfo poly_id `setUnfoldingInfo` unfolding ; return (addNonRec env final_id rhs) } addPolyBind _ env bind@(Rec _) = return (extendFloats env bind) -- Hack: letrecs are more awkward, so we extend "by steam" -- without adding unfoldings etc. At worst this leads to -- more simplifier iterations {- Note [Arity decrease] ~~~~~~~~~~~~~~~~~~~~~~~~ Generally speaking the arity of a binding should not decrease. But it *can* legitimately happen because of RULES. Eg f = g Int where g has arity 2, will have arity 2. But if there's a rewrite rule g Int --> h where h has arity 1, then f's arity will decrease. Here's a real-life example, which is in the output of Specialise: Rec { $dm {Arity 2} = \d.\x. op d {-# RULES forall d. $dm Int d = $s$dm #-} dInt = MkD .... opInt ... opInt {Arity 1} = $dm dInt $s$dm {Arity 0} = \x. op dInt } Here opInt has arity 1; but when we apply the rule its arity drops to 0. That's why Specialise goes to a little trouble to pin the right arity on specialised functions too. Note [Setting the demand info] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If the unfolding is a value, the demand info may go pear-shaped, so we nuke it. Example: let x = (a,b) in case x of (p,q) -> h p q x Here x is certainly demanded. But after we've nuked the case, we'll get just let x = (a,b) in h a b x and now x is not demanded (I'm assuming h is lazy) This really happens. Similarly let f = \x -> e in ...f..f... After inlining f at some of its call sites the original binding may (for example) be no longer strictly demanded. The solution here is a bit ad hoc... ************************************************************************ * * \subsection[Simplify-simplExpr]{The main function: simplExpr} * * ************************************************************************ The reason for this OutExprStuff stuff is that we want to float *after* simplifying a RHS, not before. If we do so naively we get quadratic behaviour as things float out. To see why it's important to do it after, consider this (real) example: let t = f x in fst t ==> let t = let a = e1 b = e2 in (a,b) in fst t ==> let a = e1 b = e2 t = (a,b) in a -- Can't inline a this round, cos it appears twice ==> e1 Each of the ==> steps is a round of simplification. We'd save a whole round if we float first. This can cascade. Consider let f = g d in \x -> ...f... ==> let f = let d1 = ..d.. in \y -> e in \x -> ...f... ==> let d1 = ..d.. in \x -> ...(\y ->e)... Only in this second round can the \y be applied, and it might do the same again. -} simplExpr :: SimplEnv -> CoreExpr -> SimplM CoreExpr simplExpr env expr = simplExprC env expr (mkBoringStop expr_out_ty) where expr_out_ty :: OutType expr_out_ty = substTy env (exprType expr) simplExprC :: SimplEnv -> CoreExpr -> SimplCont -> SimplM CoreExpr -- Simplify an expression, given a continuation simplExprC env expr cont = -- pprTrace "simplExprC" (ppr expr $$ ppr cont {- $$ ppr (seIdSubst env) -} $$ ppr (seFloats env) ) $ do { (env', expr') <- simplExprF (zapFloats env) expr cont ; -- pprTrace "simplExprC ret" (ppr expr $$ ppr expr') $ -- pprTrace "simplExprC ret3" (ppr (seInScope env')) $ -- pprTrace "simplExprC ret4" (ppr (seFloats env')) $ return (wrapFloats env' expr') } -------------------------------------------------- simplExprF :: SimplEnv -> InExpr -> SimplCont -> SimplM (SimplEnv, OutExpr) simplExprF env e cont = {- pprTrace "simplExprF" (vcat [ ppr e , text "cont =" <+> ppr cont , text "inscope =" <+> ppr (seInScope env) , text "tvsubst =" <+> ppr (seTvSubst env) , text "idsubst =" <+> ppr (seIdSubst env) , text "cvsubst =" <+> ppr (seCvSubst env) {- , ppr (seFloats env) -} ]) $ -} simplExprF1 env e cont simplExprF1 :: SimplEnv -> InExpr -> SimplCont -> SimplM (SimplEnv, OutExpr) simplExprF1 env (Var v) cont = simplIdF env v cont simplExprF1 env (Lit lit) cont = rebuild env (Lit lit) cont simplExprF1 env (Tick t expr) cont = simplTick env t expr cont simplExprF1 env (Cast body co) cont = simplCast env body co cont simplExprF1 env (Coercion co) cont = simplCoercionF env co cont simplExprF1 env (Type ty) cont = ASSERT( contIsRhsOrArg cont ) rebuild env (Type (substTy env ty)) cont simplExprF1 env (App fun arg) cont = simplExprF env fun $ case arg of Type ty -> ApplyToTy { sc_arg_ty = substTy env ty , sc_hole_ty = substTy env (exprType fun) , sc_cont = cont } _ -> ApplyToVal { sc_arg = arg, sc_env = env , sc_dup = NoDup, sc_cont = cont } simplExprF1 env expr@(Lam {}) cont = simplLam env zapped_bndrs body cont -- The main issue here is under-saturated lambdas -- (\x1. \x2. e) arg1 -- Here x1 might have "occurs-once" occ-info, because occ-info -- is computed assuming that a group of lambdas is applied -- all at once. If there are too few args, we must zap the -- occ-info, UNLESS the remaining binders are one-shot where (bndrs, body) = collectBinders expr zapped_bndrs | need_to_zap = map zap bndrs | otherwise = bndrs need_to_zap = any zappable_bndr (drop n_args bndrs) n_args = countArgs cont -- NB: countArgs counts all the args (incl type args) -- and likewise drop counts all binders (incl type lambdas) zappable_bndr b = isId b && not (isOneShotBndr b) zap b | isTyVar b = b | otherwise = zapLamIdInfo b simplExprF1 env (Case scrut bndr _ alts) cont = simplExprF env scrut (Select NoDup bndr alts env cont) simplExprF1 env (Let (Rec pairs) body) cont = do { env' <- simplRecBndrs env (map fst pairs) -- NB: bndrs' don't have unfoldings or rules -- We add them as we go down ; env'' <- simplRecBind env' NotTopLevel pairs ; simplExprF env'' body cont } simplExprF1 env (Let (NonRec bndr rhs) body) cont = simplNonRecE env bndr (rhs, env) ([], body) cont --------------------------------- simplType :: SimplEnv -> InType -> SimplM OutType -- Kept monadic just so we can do the seqType simplType env ty = -- pprTrace "simplType" (ppr ty $$ ppr (seTvSubst env)) $ seqType new_ty `seq` return new_ty where new_ty = substTy env ty --------------------------------- simplCoercionF :: SimplEnv -> InCoercion -> SimplCont -> SimplM (SimplEnv, OutExpr) simplCoercionF env co cont = do { co' <- simplCoercion env co ; rebuild env (Coercion co') cont } simplCoercion :: SimplEnv -> InCoercion -> SimplM OutCoercion simplCoercion env co = let opt_co = optCoercion (getCvSubst env) co in seqCo opt_co `seq` return opt_co ----------------------------------- -- | Push a TickIt context outwards past applications and cases, as -- long as this is a non-scoping tick, to let case and application -- optimisations apply. simplTick :: SimplEnv -> Tickish Id -> InExpr -> SimplCont -> SimplM (SimplEnv, OutExpr) simplTick env tickish expr cont -- A scoped tick turns into a continuation, so that we can spot -- (scc t (\x . e)) in simplLam and eliminate the scc. If we didn't do -- it this way, then it would take two passes of the simplifier to -- reduce ((scc t (\x . e)) e'). -- NB, don't do this with counting ticks, because if the expr is -- bottom, then rebuildCall will discard the continuation. -- XXX: we cannot do this, because the simplifier assumes that -- the context can be pushed into a case with a single branch. e.g. -- scc<f> case expensive of p -> e -- becomes -- case expensive of p -> scc<f> e -- -- So I'm disabling this for now. It just means we will do more -- simplifier iterations that necessary in some cases. -- | tickishScoped tickish && not (tickishCounts tickish) -- = simplExprF env expr (TickIt tickish cont) -- For unscoped or soft-scoped ticks, we are allowed to float in new -- cost, so we simply push the continuation inside the tick. This -- has the effect of moving the tick to the outside of a case or -- application context, allowing the normal case and application -- optimisations to fire. | tickish `tickishScopesLike` SoftScope = do { (env', expr') <- simplExprF env expr cont ; return (env', mkTick tickish expr') } -- Push tick inside if the context looks like this will allow us to -- do a case-of-case - see Note [case-of-scc-of-case] | Select {} <- cont, Just expr' <- push_tick_inside = simplExprF env expr' cont -- We don't want to move the tick, but we might still want to allow -- floats to pass through with appropriate wrapping (or not, see -- wrap_floats below) --- | not (tickishCounts tickish) || tickishCanSplit tickish -- = wrap_floats | otherwise = no_floating_past_tick where -- Try to push tick inside a case, see Note [case-of-scc-of-case]. push_tick_inside = case expr0 of Case scrut bndr ty alts -> Just $ Case (tickScrut scrut) bndr ty (map tickAlt alts) _other -> Nothing where (ticks, expr0) = stripTicksTop movable (Tick tickish expr) movable t = not (tickishCounts t) || t `tickishScopesLike` NoScope || tickishCanSplit t tickScrut e = foldr mkTick e ticks -- Alternatives get annotated with all ticks that scope in some way, -- but we don't want to count entries. tickAlt (c,bs,e) = (c,bs, foldr mkTick e ts_scope) ts_scope = map mkNoCount $ filter (not . (`tickishScopesLike` NoScope)) ticks no_floating_past_tick = do { let (inc,outc) = splitCont cont ; (env', expr') <- simplExprF (zapFloats env) expr inc ; let tickish' = simplTickish env tickish ; (env'', expr'') <- rebuild (zapFloats env') (wrapFloats env' expr') (TickIt tickish' outc) ; return (addFloats env env'', expr'') } -- Alternative version that wraps outgoing floats with the tick. This -- results in ticks being duplicated, as we don't make any attempt to -- eliminate the tick if we re-inline the binding (because the tick -- semantics allows unrestricted inlining of HNFs), so I'm not doing -- this any more. FloatOut will catch any real opportunities for -- floating. -- -- wrap_floats = -- do { let (inc,outc) = splitCont cont -- ; (env', expr') <- simplExprF (zapFloats env) expr inc -- ; let tickish' = simplTickish env tickish -- ; let wrap_float (b,rhs) = (zapIdStrictness (setIdArity b 0), -- mkTick (mkNoCount tickish') rhs) -- -- when wrapping a float with mkTick, we better zap the Id's -- -- strictness info and arity, because it might be wrong now. -- ; let env'' = addFloats env (mapFloats env' wrap_float) -- ; rebuild env'' expr' (TickIt tickish' outc) -- } simplTickish env tickish | Breakpoint n ids <- tickish = Breakpoint n (map (getDoneId . substId env) ids) | otherwise = tickish -- Push type application and coercion inside a tick splitCont :: SimplCont -> (SimplCont, SimplCont) splitCont cont@(ApplyToTy { sc_cont = tail }) = (cont { sc_cont = inc }, outc) where (inc,outc) = splitCont tail splitCont (CastIt co c) = (CastIt co inc, outc) where (inc,outc) = splitCont c splitCont other = (mkBoringStop (contHoleType other), other) getDoneId (DoneId id) = id getDoneId (DoneEx e) = getIdFromTrivialExpr e -- Note [substTickish] in CoreSubst getDoneId other = pprPanic "getDoneId" (ppr other) -- Note [case-of-scc-of-case] -- It's pretty important to be able to transform case-of-case when -- there's an SCC in the way. For example, the following comes up -- in nofib/real/compress/Encode.hs: -- -- case scctick<code_string.r1> -- case $wcode_string_r13s wild_XC w1_s137 w2_s138 l_aje -- of _ { (# ww1_s13f, ww2_s13g, ww3_s13h #) -> -- (ww1_s13f, ww2_s13g, ww3_s13h) -- } -- of _ { (ww_s12Y, ww1_s12Z, ww2_s130) -> -- tick<code_string.f1> -- (ww_s12Y, -- ww1_s12Z, -- PTTrees.PT -- @ GHC.Types.Char @ GHC.Types.Int wild2_Xj ww2_s130 r_ajf) -- } -- -- We really want this case-of-case to fire, because then the 3-tuple -- will go away (indeed, the CPR optimisation is relying on this -- happening). But the scctick is in the way - we need to push it -- inside to expose the case-of-case. So we perform this -- transformation on the inner case: -- -- scctick c (case e of { p1 -> e1; ...; pn -> en }) -- ==> -- case (scctick c e) of { p1 -> scc c e1; ...; pn -> scc c en } -- -- So we've moved a constant amount of work out of the scc to expose -- the case. We only do this when the continuation is interesting: in -- for now, it has to be another Case (maybe generalise this later). {- ************************************************************************ * * \subsection{The main rebuilder} * * ************************************************************************ -} rebuild :: SimplEnv -> OutExpr -> SimplCont -> SimplM (SimplEnv, OutExpr) -- At this point the substitution in the SimplEnv should be irrelevant -- only the in-scope set and floats should matter rebuild env expr cont = case cont of Stop {} -> return (env, expr) TickIt t cont -> rebuild env (mkTick t expr) cont CastIt co cont -> rebuild env (mkCast expr co) cont -- NB: mkCast implements the (Coercion co |> g) optimisation Select _ bndr alts se cont -> rebuildCase (se `setFloats` env) expr bndr alts cont StrictArg info _ cont -> rebuildCall env (info `addValArgTo` expr) cont StrictBind b bs body se cont -> do { env' <- simplNonRecX (se `setFloats` env) b expr -- expr satisfies let/app since it started life -- in a call to simplNonRecE ; simplLam env' bs body cont } ApplyToTy { sc_arg_ty = ty, sc_cont = cont} -> rebuild env (App expr (Type ty)) cont ApplyToVal { sc_arg = arg, sc_env = se, sc_dup = dup_flag, sc_cont = cont} -- See Note [Avoid redundant simplification] | isSimplified dup_flag -> rebuild env (App expr arg) cont | otherwise -> do { arg' <- simplExpr (se `setInScope` env) arg ; rebuild env (App expr arg') cont } {- ************************************************************************ * * \subsection{Lambdas} * * ************************************************************************ -} simplCast :: SimplEnv -> InExpr -> Coercion -> SimplCont -> SimplM (SimplEnv, OutExpr) simplCast env body co0 cont0 = do { co1 <- simplCoercion env co0 ; cont1 <- addCoerce co1 cont0 ; simplExprF env body cont1 } where addCoerce co cont = add_coerce co (coercionKind co) cont add_coerce _co (Pair s1 k1) cont -- co :: ty~ty | s1 `eqType` k1 = return cont -- is a no-op add_coerce co1 (Pair s1 _k2) (CastIt co2 cont) | (Pair _l1 t1) <- coercionKind co2 -- e |> (g1 :: S1~L) |> (g2 :: L~T1) -- ==> -- e, if S1=T1 -- e |> (g1 . g2 :: S1~T1) otherwise -- -- For example, in the initial form of a worker -- we may find (coerce T (coerce S (\x.e))) y -- and we'd like it to simplify to e[y/x] in one round -- of simplification , s1 `eqType` t1 = return cont -- The coerces cancel out | otherwise = return (CastIt (mkTransCo co1 co2) cont) add_coerce co (Pair s1s2 _t1t2) cont@(ApplyToTy { sc_arg_ty = arg_ty, sc_cont = tail }) -- (f |> g) ty ---> (f ty) |> (g @ ty) -- This implements the PushT rule from the paper | Just (tyvar,_) <- splitForAllTy_maybe s1s2 = ASSERT( isTyVar tyvar ) do { cont' <- addCoerce new_cast tail ; return (cont { sc_cont = cont' }) } where new_cast = mkInstCo co arg_ty add_coerce co (Pair s1s2 t1t2) (ApplyToVal { sc_arg = arg, sc_env = arg_se , sc_dup = dup, sc_cont = cont }) | isFunTy s1s2 -- This implements the Push rule from the paper , isFunTy t1t2 -- Check t1t2 to ensure 'arg' is a value arg -- (e |> (g :: s1s2 ~ t1->t2)) f -- ===> -- (e (f |> (arg g :: t1~s1)) -- |> (res g :: s2->t2) -- -- t1t2 must be a function type, t1->t2, because it's applied -- to something but s1s2 might conceivably not be -- -- When we build the ApplyTo we can't mix the out-types -- with the InExpr in the argument, so we simply substitute -- to make it all consistent. It's a bit messy. -- But it isn't a common case. -- -- Example of use: Trac #995 = do { let arg' = substExpr arg_se arg -- It's important that this is lazy, because this argument -- may be disarded if turns out to be the argument of -- (\_ -> e) This can make a huge difference; -- see Trac #10527 ; cont' <- addCoerce co2 cont ; return (ApplyToVal { sc_arg = mkCast arg' (mkSymCo co1) , sc_env = zapSubstEnv arg_se , sc_dup = dup , sc_cont = cont' }) } where -- we split coercion t1->t2 ~ s1->s2 into t1 ~ s1 and -- t2 ~ s2 with left and right on the curried form: -- (->) t1 t2 ~ (->) s1 s2 [co1, co2] = decomposeCo 2 co add_coerce co _ cont = return (CastIt co cont) simplArg :: SimplEnv -> DupFlag -> StaticEnv -> CoreExpr -> SimplM (DupFlag, StaticEnv, OutExpr) simplArg env dup_flag arg_env arg | isSimplified dup_flag = return (dup_flag, arg_env, arg) | otherwise = do { arg' <- simplExpr (arg_env `setInScope` env) arg ; return (Simplified, zapSubstEnv arg_env, arg') } {- ************************************************************************ * * \subsection{Lambdas} * * ************************************************************************ Note [Zap unfolding when beta-reducing] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Lambda-bound variables can have stable unfoldings, such as $j = \x. \b{Unf=Just x}. e See Note [Case binders and join points] below; the unfolding for lets us optimise e better. However when we beta-reduce it we want to revert to using the actual value, otherwise we can end up in the stupid situation of let x = blah in let b{Unf=Just x} = y in ...b... Here it'd be far better to drop the unfolding and use the actual RHS. -} simplLam :: SimplEnv -> [InId] -> InExpr -> SimplCont -> SimplM (SimplEnv, OutExpr) simplLam env [] body cont = simplExprF env body cont -- Beta reduction simplLam env (bndr:bndrs) body (ApplyToTy { sc_arg_ty = arg_ty, sc_cont = cont }) = do { tick (BetaReduction bndr) ; simplLam (extendTvSubst env bndr arg_ty) bndrs body cont } simplLam env (bndr:bndrs) body (ApplyToVal { sc_arg = arg, sc_env = arg_se , sc_cont = cont }) = do { tick (BetaReduction bndr) ; simplNonRecE env (zap_unfolding bndr) (arg, arg_se) (bndrs, body) cont } where zap_unfolding bndr -- See Note [Zap unfolding when beta-reducing] | isId bndr, isStableUnfolding (realIdUnfolding bndr) = setIdUnfolding bndr NoUnfolding | otherwise = bndr -- discard a non-counting tick on a lambda. This may change the -- cost attribution slightly (moving the allocation of the -- lambda elsewhere), but we don't care: optimisation changes -- cost attribution all the time. simplLam env bndrs body (TickIt tickish cont) | not (tickishCounts tickish) = simplLam env bndrs body cont -- Not enough args, so there are real lambdas left to put in the result simplLam env bndrs body cont = do { (env', bndrs') <- simplLamBndrs env bndrs ; body' <- simplExpr env' body ; new_lam <- mkLam bndrs' body' cont ; rebuild env' new_lam cont } simplLamBndrs :: SimplEnv -> [InBndr] -> SimplM (SimplEnv, [OutBndr]) simplLamBndrs env bndrs = mapAccumLM simplLamBndr env bndrs ------------- simplLamBndr :: SimplEnv -> Var -> SimplM (SimplEnv, Var) -- Used for lambda binders. These sometimes have unfoldings added by -- the worker/wrapper pass that must be preserved, because they can't -- be reconstructed from context. For example: -- f x = case x of (a,b) -> fw a b x -- fw a b x{=(a,b)} = ... -- The "{=(a,b)}" is an unfolding we can't reconstruct otherwise. simplLamBndr env bndr | isId bndr && hasSomeUnfolding old_unf -- Special case = do { (env1, bndr1) <- simplBinder env bndr ; unf' <- simplUnfolding env1 NotTopLevel bndr old_unf ; let bndr2 = bndr1 `setIdUnfolding` unf' ; return (modifyInScope env1 bndr2, bndr2) } | otherwise = simplBinder env bndr -- Normal case where old_unf = idUnfolding bndr ------------------ simplNonRecE :: SimplEnv -> InBndr -- The binder -> (InExpr, SimplEnv) -- Rhs of binding (or arg of lambda) -> ([InBndr], InExpr) -- Body of the let/lambda -- \xs.e -> SimplCont -> SimplM (SimplEnv, OutExpr) -- simplNonRecE is used for -- * non-top-level non-recursive lets in expressions -- * beta reduction -- -- It deals with strict bindings, via the StrictBind continuation, -- which may abort the whole process -- -- Precondition: rhs satisfies the let/app invariant -- Note [CoreSyn let/app invariant] in CoreSyn -- -- The "body" of the binding comes as a pair of ([InId],InExpr) -- representing a lambda; so we recurse back to simplLam -- Why? Because of the binder-occ-info-zapping done before -- the call to simplLam in simplExprF (Lam ...) -- First deal with type applications and type lets -- (/\a. e) (Type ty) and (let a = Type ty in e) simplNonRecE env bndr (Type ty_arg, rhs_se) (bndrs, body) cont = ASSERT( isTyVar bndr ) do { ty_arg' <- simplType (rhs_se `setInScope` env) ty_arg ; simplLam (extendTvSubst env bndr ty_arg') bndrs body cont } simplNonRecE env bndr (rhs, rhs_se) (bndrs, body) cont = do dflags <- getDynFlags case () of _ | preInlineUnconditionally dflags env NotTopLevel bndr rhs -> do { tick (PreInlineUnconditionally bndr) ; -- pprTrace "preInlineUncond" (ppr bndr <+> ppr rhs) $ simplLam (extendIdSubst env bndr (mkContEx rhs_se rhs)) bndrs body cont } | isStrictId bndr -- Includes coercions -> simplExprF (rhs_se `setFloats` env) rhs (StrictBind bndr bndrs body env cont) | otherwise -> ASSERT( not (isTyVar bndr) ) do { (env1, bndr1) <- simplNonRecBndr env bndr ; (env2, bndr2) <- addBndrRules env1 bndr bndr1 ; env3 <- simplLazyBind env2 NotTopLevel NonRecursive bndr bndr2 rhs rhs_se ; simplLam env3 bndrs body cont } {- ************************************************************************ * * Variables * * ************************************************************************ -} simplVar :: SimplEnv -> InVar -> SimplM OutExpr -- Look up an InVar in the environment simplVar env var | isTyVar var = return (Type (substTyVar env var)) | isCoVar var = return (Coercion (substCoVar env var)) | otherwise = case substId env var of DoneId var1 -> return (Var var1) DoneEx e -> return e ContEx tvs cvs ids e -> simplExpr (setSubstEnv env tvs cvs ids) e simplIdF :: SimplEnv -> InId -> SimplCont -> SimplM (SimplEnv, OutExpr) simplIdF env var cont = case substId env var of DoneEx e -> simplExprF (zapSubstEnv env) e cont ContEx tvs cvs ids e -> simplExprF (setSubstEnv env tvs cvs ids) e cont DoneId var1 -> completeCall env var1 cont -- Note [zapSubstEnv] -- The template is already simplified, so don't re-substitute. -- This is VITAL. Consider -- let x = e in -- let y = \z -> ...x... in -- \ x -> ...y... -- We'll clone the inner \x, adding x->x' in the id_subst -- Then when we inline y, we must *not* replace x by x' in -- the inlined copy!! --------------------------------------------------------- -- Dealing with a call site completeCall :: SimplEnv -> OutId -> SimplCont -> SimplM (SimplEnv, OutExpr) completeCall env var cont = do { ------------- Try inlining ---------------- dflags <- getDynFlags ; let (lone_variable, arg_infos, call_cont) = contArgs cont n_val_args = length arg_infos interesting_cont = interestingCallContext call_cont unfolding = activeUnfolding env var maybe_inline = callSiteInline dflags var unfolding lone_variable arg_infos interesting_cont ; case maybe_inline of { Just expr -- There is an inlining! -> do { checkedTick (UnfoldingDone var) ; dump_inline dflags expr cont ; simplExprF (zapSubstEnv env) expr cont } ; Nothing -> do -- No inlining! { rule_base <- getSimplRules ; let info = mkArgInfo var (getRules rule_base var) n_val_args call_cont ; rebuildCall env info cont }}} where dump_inline dflags unfolding cont | not (dopt Opt_D_dump_inlinings dflags) = return () | not (dopt Opt_D_verbose_core2core dflags) = when (isExternalName (idName var)) $ liftIO $ printInfoForUser dflags alwaysQualify $ sep [text "Inlining done:", nest 4 (ppr var)] | otherwise = liftIO $ printInfoForUser dflags alwaysQualify $ sep [text "Inlining done: " <> ppr var, nest 4 (vcat [text "Inlined fn: " <+> nest 2 (ppr unfolding), text "Cont: " <+> ppr cont])] rebuildCall :: SimplEnv -> ArgInfo -> SimplCont -> SimplM (SimplEnv, OutExpr) rebuildCall env (ArgInfo { ai_fun = fun, ai_args = rev_args, ai_strs = [] }) cont -- When we run out of strictness args, it means -- that the call is definitely bottom; see SimplUtils.mkArgInfo -- Then we want to discard the entire strict continuation. E.g. -- * case (error "hello") of { ... } -- * (error "Hello") arg -- * f (error "Hello") where f is strict -- etc -- Then, especially in the first of these cases, we'd like to discard -- the continuation, leaving just the bottoming expression. But the -- type might not be right, so we may have to add a coerce. | not (contIsTrivial cont) -- Only do this if there is a non-trivial = return (env, castBottomExpr res cont_ty) -- contination to discard, else we do it where -- again and again! res = argInfoExpr fun rev_args cont_ty = contResultType cont rebuildCall env info (CastIt co cont) = rebuildCall env (addCastTo info co) cont rebuildCall env info (ApplyToTy { sc_arg_ty = arg_ty, sc_cont = cont }) = rebuildCall env (info `addTyArgTo` arg_ty) cont rebuildCall env info@(ArgInfo { ai_encl = encl_rules, ai_type = fun_ty , ai_strs = str:strs, ai_discs = disc:discs }) (ApplyToVal { sc_arg = arg, sc_env = arg_se , sc_dup = dup_flag, sc_cont = cont }) | isSimplified dup_flag -- See Note [Avoid redundant simplification] = rebuildCall env (addValArgTo info' arg) cont | str -- Strict argument = -- pprTrace "Strict Arg" (ppr arg $$ ppr (seIdSubst env) $$ ppr (seInScope env)) $ simplExprF (arg_se `setFloats` env) arg (StrictArg info' cci cont) -- Note [Shadowing] | otherwise -- Lazy argument -- DO NOT float anything outside, hence simplExprC -- There is no benefit (unlike in a let-binding), and we'd -- have to be very careful about bogus strictness through -- floating a demanded let. = do { arg' <- simplExprC (arg_se `setInScope` env) arg (mkLazyArgStop (funArgTy fun_ty) cci) ; rebuildCall env (addValArgTo info' arg') cont } where info' = info { ai_strs = strs, ai_discs = discs } cci | encl_rules = RuleArgCtxt | disc > 0 = DiscArgCtxt -- Be keener here | otherwise = BoringCtxt -- Nothing interesting rebuildCall env (ArgInfo { ai_fun = fun, ai_args = rev_args, ai_rules = rules }) cont | null rules = rebuild env (argInfoExpr fun rev_args) cont -- No rules, common case | otherwise = do { -- We've accumulated a simplified call in <fun,rev_args> -- so try rewrite rules; see Note [RULEs apply to simplified arguments] -- See also Note [Rules for recursive functions] ; let env' = zapSubstEnv env -- See Note [zapSubstEnv]; -- and NB that 'rev_args' are all fully simplified ; mb_rule <- tryRules env' rules fun (reverse rev_args) cont ; case mb_rule of { Just (rule_rhs, cont') -> simplExprF env' rule_rhs cont' -- Rules don't match ; Nothing -> rebuild env (argInfoExpr fun rev_args) cont -- No rules } } {- Note [RULES apply to simplified arguments] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ It's very desirable to try RULES once the arguments have been simplified, because doing so ensures that rule cascades work in one pass. Consider {-# RULES g (h x) = k x f (k x) = x #-} ...f (g (h x))... Then we want to rewrite (g (h x)) to (k x) and only then try f's rules. If we match f's rules against the un-simplified RHS, it won't match. This makes a particularly big difference when superclass selectors are involved: op ($p1 ($p2 (df d))) We want all this to unravel in one sweeep. Note [Avoid redundant simplification] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Because RULES apply to simplified arguments, there's a danger of repeatedly simplifying already-simplified arguments. An important example is that of (>>=) d e1 e2 Here e1, e2 are simplified before the rule is applied, but don't really participate in the rule firing. So we mark them as Simplified to avoid re-simplifying them. Note [Shadowing] ~~~~~~~~~~~~~~~~ This part of the simplifier may break the no-shadowing invariant Consider f (...(\a -> e)...) (case y of (a,b) -> e') where f is strict in its second arg If we simplify the innermost one first we get (...(\a -> e)...) Simplifying the second arg makes us float the case out, so we end up with case y of (a,b) -> f (...(\a -> e)...) e' So the output does not have the no-shadowing invariant. However, there is no danger of getting name-capture, because when the first arg was simplified we used an in-scope set that at least mentioned all the variables free in its static environment, and that is enough. We can't just do innermost first, or we'd end up with a dual problem: case x of (a,b) -> f e (...(\a -> e')...) I spent hours trying to recover the no-shadowing invariant, but I just could not think of an elegant way to do it. The simplifier is already knee-deep in continuations. We have to keep the right in-scope set around; AND we have to get the effect that finding (error "foo") in a strict arg position will discard the entire application and replace it with (error "foo"). Getting all this at once is TOO HARD! ************************************************************************ * * Rewrite rules * * ************************************************************************ -} tryRules :: SimplEnv -> [CoreRule] -> Id -> [ArgSpec] -> SimplCont -> SimplM (Maybe (CoreExpr, SimplCont)) -- The SimplEnv already has zapSubstEnv applied to it tryRules env rules fn args call_cont | null rules = return Nothing {- Disabled until we fix #8326 | fn `hasKey` tagToEnumKey -- See Note [Optimising tagToEnum#] , [_type_arg, val_arg] <- args , Select dup bndr ((_,[],rhs1) : rest_alts) se cont <- call_cont , isDeadBinder bndr = do { dflags <- getDynFlags ; let enum_to_tag :: CoreAlt -> CoreAlt -- Takes K -> e into tagK# -> e -- where tagK# is the tag of constructor K enum_to_tag (DataAlt con, [], rhs) = ASSERT( isEnumerationTyCon (dataConTyCon con) ) (LitAlt tag, [], rhs) where tag = mkMachInt dflags (toInteger (dataConTag con - fIRST_TAG)) enum_to_tag alt = pprPanic "tryRules: tagToEnum" (ppr alt) new_alts = (DEFAULT, [], rhs1) : map enum_to_tag rest_alts new_bndr = setIdType bndr intPrimTy -- The binder is dead, but should have the right type ; return (Just (val_arg, Select dup new_bndr new_alts se cont)) } -} | otherwise = do { dflags <- getDynFlags ; case lookupRule dflags (getUnfoldingInRuleMatch env) (activeRule env) fn (argInfoAppArgs args) rules of { Nothing -> return Nothing ; -- No rule matches Just (rule, rule_rhs) -> do { checkedTick (RuleFired (ru_name rule)) ; let cont' = pushSimplifiedArgs env (drop (ruleArity rule) args) call_cont -- (ruleArity rule) says how many args the rule consumed ; dump dflags rule rule_rhs ; return (Just (rule_rhs, cont')) }}} where dump dflags rule rule_rhs | dopt Opt_D_dump_rule_rewrites dflags = log_rule dflags Opt_D_dump_rule_rewrites "Rule fired" $ vcat [ text "Rule:" <+> ftext (ru_name rule) , text "Before:" <+> hang (ppr fn) 2 (sep (map ppr args)) , text "After: " <+> pprCoreExpr rule_rhs , text "Cont: " <+> ppr call_cont ] | dopt Opt_D_dump_rule_firings dflags = log_rule dflags Opt_D_dump_rule_firings "Rule fired:" $ ftext (ru_name rule) | otherwise = return () log_rule dflags flag hdr details = liftIO . dumpSDoc dflags alwaysQualify flag "" $ sep [text hdr, nest 4 details] {- Note [Optimising tagToEnum#] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If we have an enumeration data type: data Foo = A | B | C Then we want to transform case tagToEnum# x of ==> case x of A -> e1 DEFAULT -> e1 B -> e2 1# -> e2 C -> e3 2# -> e3 thereby getting rid of the tagToEnum# altogether. If there was a DEFAULT alternative we retain it (remember it comes first). If not the case must be exhaustive, and we reflect that in the transformed version by adding a DEFAULT. Otherwise Lint complains that the new case is not exhaustive. See #8317. Note [Rules for recursive functions] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ You might think that we shouldn't apply rules for a loop breaker: doing so might give rise to an infinite loop, because a RULE is rather like an extra equation for the function: RULE: f (g x) y = x+y Eqn: f a y = a-y But it's too drastic to disable rules for loop breakers. Even the foldr/build rule would be disabled, because foldr is recursive, and hence a loop breaker: foldr k z (build g) = g k z So it's up to the programmer: rules can cause divergence ************************************************************************ * * Rebuilding a case expression * * ************************************************************************ Note [Case elimination] ~~~~~~~~~~~~~~~~~~~~~~~ The case-elimination transformation discards redundant case expressions. Start with a simple situation: case x# of ===> let y# = x# in e y# -> e (when x#, y# are of primitive type, of course). We can't (in general) do this for algebraic cases, because we might turn bottom into non-bottom! The code in SimplUtils.prepareAlts has the effect of generalise this idea to look for a case where we're scrutinising a variable, and we know that only the default case can match. For example: case x of 0# -> ... DEFAULT -> ...(case x of 0# -> ... DEFAULT -> ...) ... Here the inner case is first trimmed to have only one alternative, the DEFAULT, after which it's an instance of the previous case. This really only shows up in eliminating error-checking code. Note that SimplUtils.mkCase combines identical RHSs. So case e of ===> case e of DEFAULT -> r True -> r False -> r Now again the case may be elminated by the CaseElim transformation. This includes things like (==# a# b#)::Bool so that we simplify case ==# a# b# of { True -> x; False -> x } to just x This particular example shows up in default methods for comparison operations (e.g. in (>=) for Int.Int32) Note [Case elimination: lifted case] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If a case over a lifted type has a single alternative, and is being used as a strict 'let' (all isDeadBinder bndrs), we may want to do this transformation: case e of r ===> let r = e in ...r... _ -> ...r... (a) 'e' is already evaluated (it may so if e is a variable) Specifically we check (exprIsHNF e). In this case we can just allocate the WHNF directly with a let. or (b) 'x' is not used at all and e is ok-for-speculation The ok-for-spec bit checks that we don't lose any exceptions or divergence. NB: it'd be *sound* to switch from case to let if the scrutinee was not yet WHNF but was guaranteed to converge; but sticking with case means we won't build a thunk or (c) 'x' is used strictly in the body, and 'e' is a variable Then we can just substitute 'e' for 'x' in the body. See Note [Eliminating redundant seqs] For (b), the "not used at all" test is important. Consider case (case a ># b of { True -> (p,q); False -> (q,p) }) of r -> blah The scrutinee is ok-for-speculation (it looks inside cases), but we do not want to transform to let r = case a ># b of { True -> (p,q); False -> (q,p) } in blah because that builds an unnecessary thunk. Note [Eliminating redundant seqs] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If we have this: case x of r { _ -> ..r.. } where 'r' is used strictly in (..r..), the case is effectively a 'seq' on 'x', but since 'r' is used strictly anyway, we can safely transform to (...x...) Note that this can change the error behaviour. For example, we might transform case x of { _ -> error "bad" } --> error "bad" which is might be puzzling if 'x' currently lambda-bound, but later gets let-bound to (error "good"). Nevertheless, the paper "A semantics for imprecise exceptions" allows this transformation. If you want to fix the evaluation order, use 'pseq'. See Trac #8900 for an example where the loss of this transformation bit us in practice. See also Note [Empty case alternatives] in CoreSyn. Just for reference, the original code (added Jan 13) looked like this: || case_bndr_evald_next rhs case_bndr_evald_next :: CoreExpr -> Bool -- See Note [Case binder next] case_bndr_evald_next (Var v) = v == case_bndr case_bndr_evald_next (Cast e _) = case_bndr_evald_next e case_bndr_evald_next (App e _) = case_bndr_evald_next e case_bndr_evald_next (Case e _ _ _) = case_bndr_evald_next e case_bndr_evald_next _ = False (This came up when fixing Trac #7542. See also Note [Eta reduction of an eval'd function] in CoreUtils.) Note [Case elimination: unlifted case] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider case a +# b of r -> ...r... Then we do case-elimination (to make a let) followed by inlining, to get .....(a +# b).... If we have case indexArray# a i of r -> ...r... we might like to do the same, and inline the (indexArray# a i). But indexArray# is not okForSpeculation, so we don't build a let in rebuildCase (lest it get floated *out*), so the inlining doesn't happen either. This really isn't a big deal I think. The let can be Further notes about case elimination ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider: test :: Integer -> IO () test = print Turns out that this compiles to: Print.test = \ eta :: Integer eta1 :: Void# -> case PrelNum.< eta PrelNum.zeroInteger of wild { __DEFAULT -> case hPutStr stdout (PrelNum.jtos eta ($w[] @ Char)) eta1 of wild1 { (# new_s, a4 #) -> PrelIO.lvl23 new_s }} Notice the strange '<' which has no effect at all. This is a funny one. It started like this: f x y = if x < 0 then jtos x else if y==0 then "" else jtos x At a particular call site we have (f v 1). So we inline to get if v < 0 then jtos x else if 1==0 then "" else jtos x Now simplify the 1==0 conditional: if v<0 then jtos v else jtos v Now common-up the two branches of the case: case (v<0) of DEFAULT -> jtos v Why don't we drop the case? Because it's strict in v. It's technically wrong to drop even unnecessary evaluations, and in practice they may be a result of 'seq' so we *definitely* don't want to drop those. I don't really know how to improve this situation. -} --------------------------------------------------------- -- Eliminate the case if possible rebuildCase, reallyRebuildCase :: SimplEnv -> OutExpr -- Scrutinee -> InId -- Case binder -> [InAlt] -- Alternatives (inceasing order) -> SimplCont -> SimplM (SimplEnv, OutExpr) -------------------------------------------------- -- 1. Eliminate the case if there's a known constructor -------------------------------------------------- rebuildCase env scrut case_bndr alts cont | Lit lit <- scrut -- No need for same treatment as constructors -- because literals are inlined more vigorously , not (litIsLifted lit) = do { tick (KnownBranch case_bndr) ; case findAlt (LitAlt lit) alts of Nothing -> missingAlt env case_bndr alts cont Just (_, bs, rhs) -> simple_rhs bs rhs } | Just (con, ty_args, other_args) <- exprIsConApp_maybe (getUnfoldingInRuleMatch env) scrut -- Works when the scrutinee is a variable with a known unfolding -- as well as when it's an explicit constructor application = do { tick (KnownBranch case_bndr) ; case findAlt (DataAlt con) alts of Nothing -> missingAlt env case_bndr alts cont Just (DEFAULT, bs, rhs) -> simple_rhs bs rhs Just (_, bs, rhs) -> knownCon env scrut con ty_args other_args case_bndr bs rhs cont } where simple_rhs bs rhs = ASSERT( null bs ) do { env' <- simplNonRecX env case_bndr scrut -- scrut is a constructor application, -- hence satisfies let/app invariant ; simplExprF env' rhs cont } -------------------------------------------------- -- 2. Eliminate the case if scrutinee is evaluated -------------------------------------------------- rebuildCase env scrut case_bndr alts@[(_, bndrs, rhs)] cont -- See if we can get rid of the case altogether -- See Note [Case elimination] -- mkCase made sure that if all the alternatives are equal, -- then there is now only one (DEFAULT) rhs -- 2a. Dropping the case altogether, if -- a) it binds nothing (so it's really just a 'seq') -- b) evaluating the scrutinee has no side effects | is_plain_seq , exprOkForSideEffects scrut -- The entire case is dead, so we can drop it -- if the scrutinee converges without having imperative -- side effects or raising a Haskell exception -- See Note [PrimOp can_fail and has_side_effects] in PrimOp = simplExprF env rhs cont -- 2b. Turn the case into a let, if -- a) it binds only the case-binder -- b) unlifted case: the scrutinee is ok-for-speculation -- lifted case: the scrutinee is in HNF (or will later be demanded) | all_dead_bndrs , if is_unlifted then exprOkForSpeculation scrut -- See Note [Case elimination: unlifted case] else exprIsHNF scrut -- See Note [Case elimination: lifted case] || scrut_is_demanded_var scrut = do { tick (CaseElim case_bndr) ; env' <- simplNonRecX env case_bndr scrut ; simplExprF env' rhs cont } -- 2c. Try the seq rules if -- a) it binds only the case binder -- b) a rule for seq applies -- See Note [User-defined RULES for seq] in MkId | is_plain_seq = do { let scrut_ty = exprType scrut rhs_ty = substTy env (exprType rhs) out_args = [ TyArg { as_arg_ty = scrut_ty , as_hole_ty = seq_id_ty } , TyArg { as_arg_ty = rhs_ty , as_hole_ty = applyTy seq_id_ty scrut_ty } , ValArg scrut] rule_cont = ApplyToVal { sc_dup = NoDup, sc_arg = rhs , sc_env = env, sc_cont = cont } env' = zapSubstEnv env -- Lazily evaluated, so we don't do most of this ; rule_base <- getSimplRules ; mb_rule <- tryRules env' (getRules rule_base seqId) seqId out_args rule_cont ; case mb_rule of Just (rule_rhs, cont') -> simplExprF env' rule_rhs cont' Nothing -> reallyRebuildCase env scrut case_bndr alts cont } where is_unlifted = isUnLiftedType (idType case_bndr) all_dead_bndrs = all isDeadBinder bndrs -- bndrs are [InId] is_plain_seq = all_dead_bndrs && isDeadBinder case_bndr -- Evaluation *only* for effect seq_id_ty = idType seqId scrut_is_demanded_var :: CoreExpr -> Bool -- See Note [Eliminating redundant seqs] scrut_is_demanded_var (Cast s _) = scrut_is_demanded_var s scrut_is_demanded_var (Var _) = isStrictDmd (idDemandInfo case_bndr) scrut_is_demanded_var _ = False rebuildCase env scrut case_bndr alts cont = reallyRebuildCase env scrut case_bndr alts cont -------------------------------------------------- -- 3. Catch-all case -------------------------------------------------- reallyRebuildCase env scrut case_bndr alts cont = do { -- Prepare the continuation; -- The new subst_env is in place (env', dup_cont, nodup_cont) <- prepareCaseCont env alts cont -- Simplify the alternatives ; (scrut', case_bndr', alts') <- simplAlts env' scrut case_bndr alts dup_cont ; dflags <- getDynFlags ; let alts_ty' = contResultType dup_cont ; case_expr <- mkCase dflags scrut' case_bndr' alts_ty' alts' -- Notice that rebuild gets the in-scope set from env', not alt_env -- (which in any case is only build in simplAlts) -- The case binder *not* scope over the whole returned case-expression ; rebuild env' case_expr nodup_cont } {- simplCaseBinder checks whether the scrutinee is a variable, v. If so, try to eliminate uses of v in the RHSs in favour of case_bndr; that way, there's a chance that v will now only be used once, and hence inlined. Historical note: we use to do the "case binder swap" in the Simplifier so there were additional complications if the scrutinee was a variable. Now the binder-swap stuff is done in the occurrence analyer; see OccurAnal Note [Binder swap]. Note [knownCon occ info] ~~~~~~~~~~~~~~~~~~~~~~~~ If the case binder is not dead, then neither are the pattern bound variables: case <any> of x { (a,b) -> case x of { (p,q) -> p } } Here (a,b) both look dead, but come alive after the inner case is eliminated. The point is that we bring into the envt a binding let x = (a,b) after the outer case, and that makes (a,b) alive. At least we do unless the case binder is guaranteed dead. Note [Case alternative occ info] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ When we are simply reconstructing a case (the common case), we always zap the occurrence info on the binders in the alternatives. Even if the case binder is dead, the scrutinee is usually a variable, and *that* can bring the case-alternative binders back to life. See Note [Add unfolding for scrutinee] Note [Improving seq] ~~~~~~~~~~~~~~~~~~~ Consider type family F :: * -> * type instance F Int = Int ... case e of x { DEFAULT -> rhs } ... where x::F Int. Then we'd like to rewrite (F Int) to Int, getting case e `cast` co of x'::Int I# x# -> let x = x' `cast` sym co in rhs so that 'rhs' can take advantage of the form of x'. Notice that Note [Case of cast] (in OccurAnal) may then apply to the result. Nota Bene: We only do the [Improving seq] transformation if the case binder 'x' is actually used in the rhs; that is, if the case is *not* a *pure* seq. a) There is no point in adding the cast to a pure seq. b) There is a good reason not to: doing so would interfere with seq rules (Note [Built-in RULES for seq] in MkId). In particular, this [Improving seq] thing *adds* a cast while [Built-in RULES for seq] *removes* one, so they just flip-flop. You might worry about case v of x { __DEFAULT -> ... case (v `cast` co) of y { I# -> ... }} This is a pure seq (since x is unused), so [Improving seq] won't happen. But it's ok: the simplifier will replace 'v' by 'x' in the rhs to get case v of x { __DEFAULT -> ... case (x `cast` co) of y { I# -> ... }} Now the outer case is not a pure seq, so [Improving seq] will happen, and then the inner case will disappear. The need for [Improving seq] showed up in Roman's experiments. Example: foo :: F Int -> Int -> Int foo t n = t `seq` bar n where bar 0 = 0 bar n = bar (n - case t of TI i -> i) Here we'd like to avoid repeated evaluating t inside the loop, by taking advantage of the `seq`. At one point I did transformation in LiberateCase, but it's more robust here. (Otherwise, there's a danger that we'll simply drop the 'seq' altogether, before LiberateCase gets to see it.) -} simplAlts :: SimplEnv -> OutExpr -> InId -- Case binder -> [InAlt] -- Non-empty -> SimplCont -> SimplM (OutExpr, OutId, [OutAlt]) -- Includes the continuation -- Like simplExpr, this just returns the simplified alternatives; -- it does not return an environment -- The returned alternatives can be empty, none are possible simplAlts env scrut case_bndr alts cont' = do { let env0 = zapFloats env ; (env1, case_bndr1) <- simplBinder env0 case_bndr ; fam_envs <- getFamEnvs ; (alt_env', scrut', case_bndr') <- improveSeq fam_envs env1 scrut case_bndr case_bndr1 alts ; (imposs_deflt_cons, in_alts) <- prepareAlts scrut' case_bndr' alts -- NB: it's possible that the returned in_alts is empty: this is handled -- by the caller (rebuildCase) in the missingAlt function ; alts' <- mapM (simplAlt alt_env' (Just scrut') imposs_deflt_cons case_bndr' cont') in_alts ; -- pprTrace "simplAlts" (ppr case_bndr $$ ppr alts_ty $$ ppr alts_ty' $$ ppr alts $$ ppr cont') $ return (scrut', case_bndr', alts') } ------------------------------------ improveSeq :: (FamInstEnv, FamInstEnv) -> SimplEnv -> OutExpr -> InId -> OutId -> [InAlt] -> SimplM (SimplEnv, OutExpr, OutId) -- Note [Improving seq] improveSeq fam_envs env scrut case_bndr case_bndr1 [(DEFAULT,_,_)] | not (isDeadBinder case_bndr) -- Not a pure seq! See Note [Improving seq] , Just (co, ty2) <- topNormaliseType_maybe fam_envs (idType case_bndr1) = do { case_bndr2 <- newId (fsLit "nt") ty2 ; let rhs = DoneEx (Var case_bndr2 `Cast` mkSymCo co) env2 = extendIdSubst env case_bndr rhs ; return (env2, scrut `Cast` co, case_bndr2) } improveSeq _ env scrut _ case_bndr1 _ = return (env, scrut, case_bndr1) ------------------------------------ simplAlt :: SimplEnv -> Maybe OutExpr -- The scrutinee -> [AltCon] -- These constructors can't be present when -- matching the DEFAULT alternative -> OutId -- The case binder -> SimplCont -> InAlt -> SimplM OutAlt simplAlt env _ imposs_deflt_cons case_bndr' cont' (DEFAULT, bndrs, rhs) = ASSERT( null bndrs ) do { let env' = addBinderUnfolding env case_bndr' (mkOtherCon imposs_deflt_cons) -- Record the constructors that the case-binder *can't* be. ; rhs' <- simplExprC env' rhs cont' ; return (DEFAULT, [], rhs') } simplAlt env scrut' _ case_bndr' cont' (LitAlt lit, bndrs, rhs) = ASSERT( null bndrs ) do { env' <- addAltUnfoldings env scrut' case_bndr' (Lit lit) ; rhs' <- simplExprC env' rhs cont' ; return (LitAlt lit, [], rhs') } simplAlt env scrut' _ case_bndr' cont' (DataAlt con, vs, rhs) = do { -- Deal with the pattern-bound variables -- Mark the ones that are in ! positions in the -- data constructor as certainly-evaluated. -- NB: simplLamBinders preserves this eval info ; let vs_with_evals = add_evals (dataConRepStrictness con) ; (env', vs') <- simplLamBndrs env vs_with_evals -- Bind the case-binder to (con args) ; let inst_tys' = tyConAppArgs (idType case_bndr') con_app :: OutExpr con_app = mkConApp2 con inst_tys' vs' ; env'' <- addAltUnfoldings env' scrut' case_bndr' con_app ; rhs' <- simplExprC env'' rhs cont' ; return (DataAlt con, vs', rhs') } where -- add_evals records the evaluated-ness of the bound variables of -- a case pattern. This is *important*. Consider -- data T = T !Int !Int -- -- case x of { T a b -> T (a+1) b } -- -- We really must record that b is already evaluated so that we don't -- go and re-evaluate it when constructing the result. -- See Note [Data-con worker strictness] in MkId.lhs add_evals the_strs = go vs the_strs where go [] [] = [] go (v:vs') strs | isTyVar v = v : go vs' strs go (v:vs') (str:strs) | isMarkedStrict str = evald_v : go vs' strs | otherwise = zapped_v : go vs' strs where zapped_v = zapIdOccInfo v -- See Note [Case alternative occ info] evald_v = zapped_v `setIdUnfolding` evaldUnfolding go _ _ = pprPanic "cat_evals" (ppr con $$ ppr vs $$ ppr the_strs) addAltUnfoldings :: SimplEnv -> Maybe OutExpr -> OutId -> OutExpr -> SimplM SimplEnv addAltUnfoldings env scrut case_bndr con_app = do { dflags <- getDynFlags ; let con_app_unf = mkSimpleUnfolding dflags con_app env1 = addBinderUnfolding env case_bndr con_app_unf -- See Note [Add unfolding for scrutinee] env2 = case scrut of Just (Var v) -> addBinderUnfolding env1 v con_app_unf Just (Cast (Var v) co) -> addBinderUnfolding env1 v $ mkSimpleUnfolding dflags (Cast con_app (mkSymCo co)) _ -> env1 ; traceSmpl "addAltUnf" (vcat [ppr case_bndr <+> ppr scrut, ppr con_app]) ; return env2 } addBinderUnfolding :: SimplEnv -> Id -> Unfolding -> SimplEnv addBinderUnfolding env bndr unf | debugIsOn, Just tmpl <- maybeUnfoldingTemplate unf = WARN( not (eqType (idType bndr) (exprType tmpl)), ppr bndr $$ ppr (idType bndr) $$ ppr tmpl $$ ppr (exprType tmpl) ) modifyInScope env (bndr `setIdUnfolding` unf) | otherwise = modifyInScope env (bndr `setIdUnfolding` unf) zapBndrOccInfo :: Bool -> Id -> Id -- Consider case e of b { (a,b) -> ... } -- Then if we bind b to (a,b) in "...", and b is not dead, -- then we must zap the deadness info on a,b zapBndrOccInfo keep_occ_info pat_id | keep_occ_info = pat_id | otherwise = zapIdOccInfo pat_id {- Note [Add unfolding for scrutinee] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In general it's unlikely that a variable scrutinee will appear in the case alternatives case x of { ...x unlikely to appear... } because the binder-swap in OccAnal has got rid of all such occcurrences See Note [Binder swap] in OccAnal. BUT it is still VERY IMPORTANT to add a suitable unfolding for a variable scrutinee, in simplAlt. Here's why case x of y (a,b) -> case b of c I# v -> ...(f y)... There is no occurrence of 'b' in the (...(f y)...). But y gets the unfolding (a,b), and *that* mentions b. If f has a RULE RULE f (p, I# q) = ... we want that rule to match, so we must extend the in-scope env with a suitable unfolding for 'y'. It's *essential* for rule matching; but it's also good for case-elimintation -- suppose that 'f' was inlined and did multi-level case analysis, then we'd solve it in one simplifier sweep instead of two. Exactly the same issue arises in SpecConstr; see Note [Add scrutinee to ValueEnv too] in SpecConstr HOWEVER, given case x of y { Just a -> r1; Nothing -> r2 } we do not want to add the unfolding x -> y to 'x', which might seem cool, since 'y' itself has different unfoldings in r1 and r2. Reason: if we did that, we'd have to zap y's deadness info and that is a very useful piece of information. So instead we add the unfolding x -> Just a, and x -> Nothing in the respective RHSs. ************************************************************************ * * \subsection{Known constructor} * * ************************************************************************ We are a bit careful with occurrence info. Here's an example (\x* -> case x of (a*, b) -> f a) (h v, e) where the * means "occurs once". This effectively becomes case (h v, e) of (a*, b) -> f a) and then let a* = h v; b = e in f a and then f (h v) All this should happen in one sweep. -} knownCon :: SimplEnv -> OutExpr -- The scrutinee -> DataCon -> [OutType] -> [OutExpr] -- The scrutinee (in pieces) -> InId -> [InBndr] -> InExpr -- The alternative -> SimplCont -> SimplM (SimplEnv, OutExpr) knownCon env scrut dc dc_ty_args dc_args bndr bs rhs cont = do { env' <- bind_args env bs dc_args ; env'' <- bind_case_bndr env' ; simplExprF env'' rhs cont } where zap_occ = zapBndrOccInfo (isDeadBinder bndr) -- bndr is an InId -- Ugh! bind_args env' [] _ = return env' bind_args env' (b:bs') (Type ty : args) = ASSERT( isTyVar b ) bind_args (extendTvSubst env' b ty) bs' args bind_args env' (b:bs') (arg : args) = ASSERT( isId b ) do { let b' = zap_occ b -- Note that the binder might be "dead", because it doesn't -- occur in the RHS; and simplNonRecX may therefore discard -- it via postInlineUnconditionally. -- Nevertheless we must keep it if the case-binder is alive, -- because it may be used in the con_app. See Note [knownCon occ info] ; env'' <- simplNonRecX env' b' arg -- arg satisfies let/app invariant ; bind_args env'' bs' args } bind_args _ _ _ = pprPanic "bind_args" $ ppr dc $$ ppr bs $$ ppr dc_args $$ text "scrut:" <+> ppr scrut -- It's useful to bind bndr to scrut, rather than to a fresh -- binding x = Con arg1 .. argn -- because very often the scrut is a variable, so we avoid -- creating, and then subsequently eliminating, a let-binding -- BUT, if scrut is a not a variable, we must be careful -- about duplicating the arg redexes; in that case, make -- a new con-app from the args bind_case_bndr env | isDeadBinder bndr = return env | exprIsTrivial scrut = return (extendIdSubst env bndr (DoneEx scrut)) | otherwise = do { dc_args <- mapM (simplVar env) bs -- dc_ty_args are aready OutTypes, -- but bs are InBndrs ; let con_app = Var (dataConWorkId dc) `mkTyApps` dc_ty_args `mkApps` dc_args ; simplNonRecX env bndr con_app } ------------------- missingAlt :: SimplEnv -> Id -> [InAlt] -> SimplCont -> SimplM (SimplEnv, OutExpr) -- This isn't strictly an error, although it is unusual. -- It's possible that the simplifer might "see" that -- an inner case has no accessible alternatives before -- it "sees" that the entire branch of an outer case is -- inaccessible. So we simply put an error case here instead. missingAlt env case_bndr _ cont = WARN( True, ptext (sLit "missingAlt") <+> ppr case_bndr ) return (env, mkImpossibleExpr (contResultType cont)) {- ************************************************************************ * * \subsection{Duplicating continuations} * * ************************************************************************ -} prepareCaseCont :: SimplEnv -> [InAlt] -> SimplCont -> SimplM (SimplEnv, SimplCont, -- Dupable part SimplCont) -- Non-dupable part -- We are considering -- K[case _ of { p1 -> r1; ...; pn -> rn }] -- where K is some enclosing continuation for the case -- Goal: split K into two pieces Kdup,Knodup so that -- a) Kdup can be duplicated -- b) Knodup[Kdup[e]] = K[e] -- The idea is that we'll transform thus: -- Knodup[ (case _ of { p1 -> Kdup[r1]; ...; pn -> Kdup[rn] } -- -- We may also return some extra bindings in SimplEnv (that scope over -- the entire continuation) -- -- When case-of-case is off, just make the entire continuation non-dupable prepareCaseCont env alts cont | not (sm_case_case (getMode env)) = return (env, mkBoringStop (contHoleType cont), cont) | not (many_alts alts) = return (env, cont, mkBoringStop (contResultType cont)) | otherwise = mkDupableCont env cont where many_alts :: [InAlt] -> Bool -- True iff strictly > 1 non-bottom alternative many_alts [] = False -- See Note [Bottom alternatives] many_alts [_] = False many_alts (alt:alts) | is_bot_alt alt = many_alts alts | otherwise = not (all is_bot_alt alts) is_bot_alt (_,_,rhs) = exprIsBottom rhs {- Note [Bottom alternatives] ~~~~~~~~~~~~~~~~~~~~~~~~~~ When we have case (case x of { A -> error .. ; B -> e; C -> error ..) of alts then we can just duplicate those alts because the A and C cases will disappear immediately. This is more direct than creating join points and inlining them away; and in some cases we would not even create the join points (see Note [Single-alternative case]) and we would keep the case-of-case which is silly. See Trac #4930. -} mkDupableCont :: SimplEnv -> SimplCont -> SimplM (SimplEnv, SimplCont, SimplCont) mkDupableCont env cont | contIsDupable cont = return (env, cont, mkBoringStop (contResultType cont)) mkDupableCont _ (Stop {}) = panic "mkDupableCont" -- Handled by previous eqn mkDupableCont env (CastIt ty cont) = do { (env', dup, nodup) <- mkDupableCont env cont ; return (env', CastIt ty dup, nodup) } -- Duplicating ticks for now, not sure if this is good or not mkDupableCont env cont@(TickIt{}) = return (env, mkBoringStop (contHoleType cont), cont) mkDupableCont env cont@(StrictBind {}) = return (env, mkBoringStop (contHoleType cont), cont) -- See Note [Duplicating StrictBind] mkDupableCont env (StrictArg info cci cont) -- See Note [Duplicating StrictArg] = do { (env', dup, nodup) <- mkDupableCont env cont ; (env'', args') <- mapAccumLM makeTrivialArg env' (ai_args info) ; return (env'', StrictArg (info { ai_args = args' }) cci dup, nodup) } mkDupableCont env cont@(ApplyToTy { sc_cont = tail }) = do { (env', dup_cont, nodup_cont) <- mkDupableCont env tail ; return (env', cont { sc_cont = dup_cont }, nodup_cont ) } mkDupableCont env (ApplyToVal { sc_arg = arg, sc_dup = dup, sc_env = se, sc_cont = cont }) = -- e.g. [...hole...] (...arg...) -- ==> -- let a = ...arg... -- in [...hole...] a do { (env', dup_cont, nodup_cont) <- mkDupableCont env cont ; (_, se', arg') <- simplArg env' dup se arg ; (env'', arg'') <- makeTrivial NotTopLevel env' arg' ; let app_cont = ApplyToVal { sc_arg = arg'', sc_env = se' , sc_dup = OkToDup, sc_cont = dup_cont } ; return (env'', app_cont, nodup_cont) } mkDupableCont env cont@(Select _ case_bndr [(_, bs, _rhs)] _ _) -- See Note [Single-alternative case] -- | not (exprIsDupable rhs && contIsDupable case_cont) -- | not (isDeadBinder case_bndr) | all isDeadBinder bs -- InIds && not (isUnLiftedType (idType case_bndr)) -- Note [Single-alternative-unlifted] = return (env, mkBoringStop (contHoleType cont), cont) mkDupableCont env (Select _ case_bndr alts se cont) = -- e.g. (case [...hole...] of { pi -> ei }) -- ===> -- let ji = \xij -> ei -- in case [...hole...] of { pi -> ji xij } do { tick (CaseOfCase case_bndr) ; (env', dup_cont, nodup_cont) <- prepareCaseCont env alts cont -- NB: We call prepareCaseCont here. If there is only one -- alternative, then dup_cont may be big, but that's ok -- because we push it into the single alternative, and then -- use mkDupableAlt to turn that simplified alternative into -- a join point if it's too big to duplicate. -- And this is important: see Note [Fusing case continuations] ; let alt_env = se `setInScope` env' ; (alt_env', case_bndr') <- simplBinder alt_env case_bndr ; alts' <- mapM (simplAlt alt_env' Nothing [] case_bndr' dup_cont) alts -- Safe to say that there are no handled-cons for the DEFAULT case -- NB: simplBinder does not zap deadness occ-info, so -- a dead case_bndr' will still advertise its deadness -- This is really important because in -- case e of b { (# p,q #) -> ... } -- b is always dead, and indeed we are not allowed to bind b to (# p,q #), -- which might happen if e was an explicit unboxed pair and b wasn't marked dead. -- In the new alts we build, we have the new case binder, so it must retain -- its deadness. -- NB: we don't use alt_env further; it has the substEnv for -- the alternatives, and we don't want that ; (env'', alts'') <- mkDupableAlts env' case_bndr' alts' ; return (env'', -- Note [Duplicated env] Select OkToDup case_bndr' alts'' (zapSubstEnv env'') (mkBoringStop (contHoleType nodup_cont)), nodup_cont) } mkDupableAlts :: SimplEnv -> OutId -> [InAlt] -> SimplM (SimplEnv, [InAlt]) -- Absorbs the continuation into the new alternatives mkDupableAlts env case_bndr' the_alts = go env the_alts where go env0 [] = return (env0, []) go env0 (alt:alts) = do { (env1, alt') <- mkDupableAlt env0 case_bndr' alt ; (env2, alts') <- go env1 alts ; return (env2, alt' : alts' ) } mkDupableAlt :: SimplEnv -> OutId -> (AltCon, [CoreBndr], CoreExpr) -> SimplM (SimplEnv, (AltCon, [CoreBndr], CoreExpr)) mkDupableAlt env case_bndr (con, bndrs', rhs') = do dflags <- getDynFlags if exprIsDupable dflags rhs' -- Note [Small alternative rhs] then return (env, (con, bndrs', rhs')) else do { let rhs_ty' = exprType rhs' scrut_ty = idType case_bndr case_bndr_w_unf = case con of DEFAULT -> case_bndr DataAlt dc -> setIdUnfolding case_bndr unf where -- See Note [Case binders and join points] unf = mkInlineUnfolding Nothing rhs rhs = mkConApp2 dc (tyConAppArgs scrut_ty) bndrs' LitAlt {} -> WARN( True, ptext (sLit "mkDupableAlt") <+> ppr case_bndr <+> ppr con ) case_bndr -- The case binder is alive but trivial, so why has -- it not been substituted away? used_bndrs' | isDeadBinder case_bndr = filter abstract_over bndrs' | otherwise = bndrs' ++ [case_bndr_w_unf] abstract_over bndr | isTyVar bndr = True -- Abstract over all type variables just in case | otherwise = not (isDeadBinder bndr) -- The deadness info on the new Ids is preserved by simplBinders ; (final_bndrs', final_args) -- Note [Join point abstraction] <- if (any isId used_bndrs') then return (used_bndrs', varsToCoreExprs used_bndrs') else do { rw_id <- newId (fsLit "w") voidPrimTy ; return ([setOneShotLambda rw_id], [Var voidPrimId]) } ; join_bndr <- newId (fsLit "$j") (mkPiTypes final_bndrs' rhs_ty') -- Note [Funky mkPiTypes] ; let -- We make the lambdas into one-shot-lambdas. The -- join point is sure to be applied at most once, and doing so -- prevents the body of the join point being floated out by -- the full laziness pass really_final_bndrs = map one_shot final_bndrs' one_shot v | isId v = setOneShotLambda v | otherwise = v join_rhs = mkLams really_final_bndrs rhs' join_arity = exprArity join_rhs join_call = mkApps (Var join_bndr) final_args ; env' <- addPolyBind NotTopLevel env (NonRec (join_bndr `setIdArity` join_arity) join_rhs) ; return (env', (con, bndrs', join_call)) } -- See Note [Duplicated env] {- Note [Fusing case continuations] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ It's important to fuse two successive case continuations when the first has one alternative. That's why we call prepareCaseCont here. Consider this, which arises from thunk splitting (see Note [Thunk splitting] in WorkWrap): let x* = case (case v of {pn -> rn}) of I# a -> I# a in body The simplifier will find (Var v) with continuation Select (pn -> rn) ( Select [I# a -> I# a] ( StrictBind body Stop So we'll call mkDupableCont on Select [I# a -> I# a] (StrictBind body Stop) There is just one alternative in the first Select, so we want to simplify the rhs (I# a) with continuation (StricgtBind body Stop) Supposing that body is big, we end up with let $j a = <let x = I# a in body> in case v of { pn -> case rn of I# a -> $j a } This is just what we want because the rn produces a box that the case rn cancels with. See Trac #4957 a fuller example. Note [Case binders and join points] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider this case (case .. ) of c { I# c# -> ....c.... If we make a join point with c but not c# we get $j = \c -> ....c.... But if later inlining scrutines the c, thus $j = \c -> ... case c of { I# y -> ... } ... we won't see that 'c' has already been scrutinised. This actually happens in the 'tabulate' function in wave4main, and makes a significant difference to allocation. An alternative plan is this: $j = \c# -> let c = I# c# in ...c.... but that is bad if 'c' is *not* later scrutinised. So instead we do both: we pass 'c' and 'c#' , and record in c's inlining (a stable unfolding) that it's really I# c#, thus $j = \c# -> \c[=I# c#] -> ...c.... Absence analysis may later discard 'c'. NB: take great care when doing strictness analysis; see Note [Lamba-bound unfoldings] in DmdAnal. Also note that we can still end up passing stuff that isn't used. Before strictness analysis we have let $j x y c{=(x,y)} = (h c, ...) in ... After strictness analysis we see that h is strict, we end up with let $j x y c{=(x,y)} = ($wh x y, ...) and c is unused. Note [Duplicated env] ~~~~~~~~~~~~~~~~~~~~~ Some of the alternatives are simplified, but have not been turned into a join point So they *must* have an zapped subst-env. So we can't use completeNonRecX to bind the join point, because it might to do PostInlineUnconditionally, and we'd lose that when zapping the subst-env. We could have a per-alt subst-env, but zapping it (as we do in mkDupableCont, the Select case) is safe, and at worst delays the join-point inlining. Note [Small alternative rhs] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ It is worth checking for a small RHS because otherwise we get extra let bindings that may cause an extra iteration of the simplifier to inline back in place. Quite often the rhs is just a variable or constructor. The Ord instance of Maybe in PrelMaybe.lhs, for example, took several extra iterations because the version with the let bindings looked big, and so wasn't inlined, but after the join points had been inlined it looked smaller, and so was inlined. NB: we have to check the size of rhs', not rhs. Duplicating a small InAlt might invalidate occurrence information However, if it *is* dupable, we return the *un* simplified alternative, because otherwise we'd need to pair it up with an empty subst-env.... but we only have one env shared between all the alts. (Remember we must zap the subst-env before re-simplifying something). Rather than do this we simply agree to re-simplify the original (small) thing later. Note [Funky mkPiTypes] ~~~~~~~~~~~~~~~~~~~~~~ Notice the funky mkPiTypes. If the contructor has existentials it's possible that the join point will be abstracted over type variables as well as term variables. Example: Suppose we have data T = forall t. C [t] Then faced with case (case e of ...) of C t xs::[t] -> rhs We get the join point let j :: forall t. [t] -> ... j = /\t \xs::[t] -> rhs in case (case e of ...) of C t xs::[t] -> j t xs Note [Join point abstraction] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Join points always have at least one value argument, for several reasons * If we try to lift a primitive-typed something out for let-binding-purposes, we will *caseify* it (!), with potentially-disastrous strictness results. So instead we turn it into a function: \v -> e where v::Void#. The value passed to this function is void, which generates (almost) no code. * CPR. We used to say "&& isUnLiftedType rhs_ty'" here, but now we make the join point into a function whenever used_bndrs' is empty. This makes the join-point more CPR friendly. Consider: let j = if .. then I# 3 else I# 4 in case .. of { A -> j; B -> j; C -> ... } Now CPR doesn't w/w j because it's a thunk, so that means that the enclosing function can't w/w either, which is a lose. Here's the example that happened in practice: kgmod :: Int -> Int -> Int kgmod x y = if x > 0 && y < 0 || x < 0 && y > 0 then 78 else 5 * Let-no-escape. We want a join point to turn into a let-no-escape so that it is implemented as a jump, and one of the conditions for LNE is that it's not updatable. In CoreToStg, see Note [What is a non-escaping let] * Floating. Since a join point will be entered once, no sharing is gained by floating out, but something might be lost by doing so because it might be allocated. I have seen a case alternative like this: True -> \v -> ... It's a bit silly to add the realWorld dummy arg in this case, making $j = \s v -> ... True -> $j s (the \v alone is enough to make CPR happy) but I think it's rare There's a slight infelicity here: we pass the overall case_bndr to all the join points if it's used in *any* RHS, because we don't know its usage in each RHS separately Note [Duplicating StrictArg] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The original plan had (where E is a big argument) e.g. f E [..hole..] ==> let $j = \a -> f E a in $j [..hole..] But this is terrible! Here's an example: && E (case x of { T -> F; F -> T }) Now, && is strict so we end up simplifying the case with an ArgOf continuation. If we let-bind it, we get let $j = \v -> && E v in simplExpr (case x of { T -> F; F -> T }) (ArgOf (\r -> $j r) And after simplifying more we get let $j = \v -> && E v in case x of { T -> $j F; F -> $j T } Which is a Very Bad Thing What we do now is this f E [..hole..] ==> let a = E in f a [..hole..] Now if the thing in the hole is a case expression (which is when we'll call mkDupableCont), we'll push the function call into the branches, which is what we want. Now RULES for f may fire, and call-pattern specialisation. Here's an example from Trac #3116 go (n+1) (case l of 1 -> bs' _ -> Chunk p fpc (o+1) (l-1) bs') If we can push the call for 'go' inside the case, we get call-pattern specialisation for 'go', which is *crucial* for this program. Here is the (&&) example: && E (case x of { T -> F; F -> T }) ==> let a = E in case x of { T -> && a F; F -> && a T } Much better! Notice that * Arguments to f *after* the strict one are handled by the ApplyToVal case of mkDupableCont. Eg f [..hole..] E * We can only do the let-binding of E because the function part of a StrictArg continuation is an explicit syntax tree. In earlier versions we represented it as a function (CoreExpr -> CoreEpxr) which we couldn't take apart. Do *not* duplicate StrictBind and StritArg continuations. We gain nothing by propagating them into the expressions, and we do lose a lot. The desire not to duplicate is the entire reason that mkDupableCont returns a pair of continuations. Note [Duplicating StrictBind] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Unlike StrictArg, there doesn't seem anything to gain from duplicating a StrictBind continuation, so we don't. Note [Single-alternative cases] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ This case is just like the ArgOf case. Here's an example: data T a = MkT !a ...(MkT (abs x))... Then we get case (case x of I# x' -> case x' <# 0# of True -> I# (negate# x') False -> I# x') of y { DEFAULT -> MkT y Because the (case x) has only one alternative, we'll transform to case x of I# x' -> case (case x' <# 0# of True -> I# (negate# x') False -> I# x') of y { DEFAULT -> MkT y But now we do *NOT* want to make a join point etc, giving case x of I# x' -> let $j = \y -> MkT y in case x' <# 0# of True -> $j (I# (negate# x')) False -> $j (I# x') In this case the $j will inline again, but suppose there was a big strict computation enclosing the orginal call to MkT. Then, it won't "see" the MkT any more, because it's big and won't get duplicated. And, what is worse, nothing was gained by the case-of-case transform. So, in circumstances like these, we don't want to build join points and push the outer case into the branches of the inner one. Instead, don't duplicate the continuation. When should we use this strategy? We should not use it on *every* single-alternative case: e.g. case (case ....) of (a,b) -> (# a,b #) Here we must push the outer case into the inner one! Other choices: * Match [(DEFAULT,_,_)], but in the common case of Int, the alternative-filling-in code turned the outer case into case (...) of y { I# _ -> MkT y } * Match on single alternative plus (not (isDeadBinder case_bndr)) Rationale: pushing the case inwards won't eliminate the construction. But there's a risk of case (...) of y { (a,b) -> let z=(a,b) in ... } Now y looks dead, but it'll come alive again. Still, this seems like the best option at the moment. * Match on single alternative plus (all (isDeadBinder bndrs)) Rationale: this is essentially seq. * Match when the rhs is *not* duplicable, and hence would lead to a join point. This catches the disaster-case above. We can test the *un-simplified* rhs, which is fine. It might get bigger or smaller after simplification; if it gets smaller, this case might fire next time round. NB also that we must test contIsDupable case_cont *too, because case_cont might be big! HOWEVER: I found that this version doesn't work well, because we can get let x = case (...) of { small } in ...case x... When x is inlined into its full context, we find that it was a bad idea to have pushed the outer case inside the (...) case. Note [Single-alternative-unlifted] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Here's another single-alternative where we really want to do case-of-case: data Mk1 = Mk1 Int# | Mk2 Int# M1.f = \r [x_s74 y_s6X] case case y_s6X of tpl_s7m { M1.Mk1 ipv_s70 -> ipv_s70; M1.Mk2 ipv_s72 -> ipv_s72; } of wild_s7c { __DEFAULT -> case case x_s74 of tpl_s7n { M1.Mk1 ipv_s77 -> ipv_s77; M1.Mk2 ipv_s79 -> ipv_s79; } of wild1_s7b { __DEFAULT -> ==# [wild1_s7b wild_s7c]; }; }; So the outer case is doing *nothing at all*, other than serving as a join-point. In this case we really want to do case-of-case and decide whether to use a real join point or just duplicate the continuation: let $j s7c = case x of Mk1 ipv77 -> (==) s7c ipv77 Mk1 ipv79 -> (==) s7c ipv79 in case y of Mk1 ipv70 -> $j ipv70 Mk2 ipv72 -> $j ipv72 Hence: check whether the case binder's type is unlifted, because then the outer case is *not* a seq. ************************************************************************ * * Unfoldings * * ************************************************************************ -} simplLetUnfolding :: SimplEnv-> TopLevelFlag -> InId -> OutExpr -> Unfolding -> SimplM Unfolding simplLetUnfolding env top_lvl id new_rhs unf | isStableUnfolding unf = simplUnfolding env top_lvl id unf | otherwise = bottoming `seq` -- See Note [Force bottoming field] do { dflags <- getDynFlags ; return (mkUnfolding dflags InlineRhs (isTopLevel top_lvl) bottoming new_rhs) } -- We make an unfolding *even for loop-breakers*. -- Reason: (a) It might be useful to know that they are WHNF -- (b) In TidyPgm we currently assume that, if we want to -- expose the unfolding then indeed we *have* an unfolding -- to expose. (We could instead use the RHS, but currently -- we don't.) The simple thing is always to have one. where bottoming = isBottomingId id simplUnfolding :: SimplEnv-> TopLevelFlag -> InId -> Unfolding -> SimplM Unfolding -- Note [Setting the new unfolding] simplUnfolding env top_lvl id unf = case unf of NoUnfolding -> return unf OtherCon {} -> return unf DFunUnfolding { df_bndrs = bndrs, df_con = con, df_args = args } -> do { (env', bndrs') <- simplBinders rule_env bndrs ; args' <- mapM (simplExpr env') args ; return (mkDFunUnfolding bndrs' con args') } CoreUnfolding { uf_tmpl = expr, uf_src = src, uf_guidance = guide } | isStableSource src -> do { expr' <- simplExpr rule_env expr ; case guide of UnfWhen { ug_arity = arity, ug_unsat_ok = sat_ok } -- Happens for INLINE things -> let guide' = UnfWhen { ug_arity = arity, ug_unsat_ok = sat_ok , ug_boring_ok = inlineBoringOk expr' } -- Refresh the boring-ok flag, in case expr' -- has got small. This happens, notably in the inlinings -- for dfuns for single-method classes; see -- Note [Single-method classes] in TcInstDcls. -- A test case is Trac #4138 in return (mkCoreUnfolding src is_top_lvl expr' guide') -- See Note [Top-level flag on inline rules] in CoreUnfold _other -- Happens for INLINABLE things -> bottoming `seq` -- See Note [Force bottoming field] do { dflags <- getDynFlags ; return (mkUnfolding dflags src is_top_lvl bottoming expr') } } -- If the guidance is UnfIfGoodArgs, this is an INLINABLE -- unfolding, and we need to make sure the guidance is kept up -- to date with respect to any changes in the unfolding. | otherwise -> return noUnfolding -- Discard unstable unfoldings where bottoming = isBottomingId id is_top_lvl = isTopLevel top_lvl act = idInlineActivation id rule_env = updMode (updModeForStableUnfoldings act) env -- See Note [Simplifying inside stable unfoldings] in SimplUtils {- Note [Force bottoming field] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We need to force bottoming, or the new unfolding holds on to the old unfolding (which is part of the id). Note [Setting the new unfolding] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * If there's an INLINE pragma, we simplify the RHS gently. Maybe we should do nothing at all, but simplifying gently might get rid of more crap. * If not, we make an unfolding from the new RHS. But *only* for non-loop-breakers. Making loop breakers not have an unfolding at all means that we can avoid tests in exprIsConApp, for example. This is important: if exprIsConApp says 'yes' for a recursive thing, then we can get into an infinite loop If there's an stable unfolding on a loop breaker (which happens for INLINEABLE), we hang on to the inlining. It's pretty dodgy, but the user did say 'INLINE'. May need to revisit this choice. ************************************************************************ * * Rules * * ************************************************************************ Note [Rules in a letrec] ~~~~~~~~~~~~~~~~~~~~~~~~ After creating fresh binders for the binders of a letrec, we substitute the RULES and add them back onto the binders; this is done *before* processing any of the RHSs. This is important. Manuel found cases where he really, really wanted a RULE for a recursive function to apply in that function's own right-hand side. See Note [Loop breaking and RULES] in OccAnal. -} addBndrRules :: SimplEnv -> InBndr -> OutBndr -> SimplM (SimplEnv, OutBndr) -- Rules are added back into the bin addBndrRules env in_id out_id | null old_rules = return (env, out_id) | otherwise = do { new_rules <- simplRules env (Just (idName out_id)) old_rules ; let final_id = out_id `setIdSpecialisation` mkRuleInfo new_rules ; return (modifyInScope env final_id, final_id) } where old_rules = ruleInfoRules (idSpecialisation in_id) simplRules :: SimplEnv -> Maybe Name -> [CoreRule] -> SimplM [CoreRule] simplRules env mb_new_nm rules = mapM simpl_rule rules where simpl_rule rule@(BuiltinRule {}) = return rule simpl_rule rule@(Rule { ru_bndrs = bndrs, ru_args = args , ru_fn = fn_name, ru_rhs = rhs }) = do { (env', bndrs') <- simplBinders env bndrs ; let rule_env = updMode updModeForRules env' ; args' <- mapM (simplExpr rule_env) args ; rhs' <- simplExpr rule_env rhs ; return (rule { ru_bndrs = bndrs' , ru_fn = mb_new_nm `orElse` fn_name , ru_args = args' , ru_rhs = rhs' }) }
rahulmutt/ghcvm
compiler/Eta/SimplCore/Simplify.hs
Haskell
bsd-3-clause
123,358
-------------------------------------------------------------------------------- -- Copyright © 2011 National Institute of Aerospace / Galois, Inc. -------------------------------------------------------------------------------- -- | Main import module for the front-end lanugage. {-# LANGUAGE Safe #-} module Copilot ( module Copilot.Language ) where import Copilot.Language --------------------------------------------------------------------------------
niswegmann/copilot-language
src/Copilot.hs
Haskell
bsd-3-clause
467
{-| Module : Main Copyright : (c) Henry J. Wylde, 2015 License : BSD3 Maintainer : hjwylde@gmail.com -} module Main ( main ) where import Control.Monad import Qux.Test.Integration import Qux.Test.Steps import System.Directory import System.FilePath import Test.Tasty import Test.Tasty.Golden main :: IO () main = defaultMain =<< tests tests :: IO TestTree tests = do testsDir <- getCurrentDirectory >>= \dir -> return $ dir </> "test" </> "build" </> "tests" testDirs <- filter ((/= '.') . head) <$> getDirectoryContents testsDir testTrees <- mapM test =<< filterM (\testDir -> not <$> doesFileExist (testDir </> "pending")) (map (combine testsDir) testDirs) return $ testGroup "Tests" testTrees test :: String -> IO TestTree test dir = do let name = takeFileName dir clean dir return $ goldenVsFile name (expectedOutputFilePath dir) (actualOutputFilePath dir) (withCurrentDirectory dir $ build ".")
qux-lang/qux
test/build/app/Main.hs
Haskell
bsd-3-clause
1,004
{-# LANGUAGE ScopedTypeVariables, TypeSynonymInstances, FlexibleInstances, OverlappingInstances, IncoherentInstances #-} -- | Pretty printing utils. -- Taken from BuildBox 2.1.0.1: -- http://hackage.haskell.org/packages/archive/buildbox/2.1.0.1/doc/html/BuildBox-Pretty.html module Pretty ( module Text.PrettyPrint , Pretty(..) , padRc, padR , padLc, padL , blank , pprEngDouble , pprEngInteger , pprTimestampAbs , pprTimestampEng , pprValidate , pprMap , renderLong , padLines , chunks' , chunks , trunc) where import Text.PrettyPrint import Text.Printf import Control.Monad import GHC.RTS.Events (Timestamp) import qualified Data.Map as M import Data.Maybe (fromMaybe) -- Things that can be pretty printed class Pretty a where ppr :: a -> Doc -- Basic instances instance Pretty Doc where ppr = id instance Pretty Float where ppr = text . show instance Pretty Int where ppr = int instance Pretty Integer where ppr = text . show {- instance Pretty UTCTime where ppr = text . show -} instance Pretty Timestamp where ppr = text . show instance Pretty a => Pretty [a] where ppr xx = lbrack <> (hcat $ punctuate (text ", ") (map ppr xx)) <> rbrack instance Pretty String where ppr = text -- | Right justify a doc, padding with a given character. padRc :: Int -> Char -> Doc -> Doc padRc n c str = (text $ replicate (n - length (render str)) c) <> str -- | Right justify a string with spaces. padR :: Int -> Doc -> Doc padR n str = padRc n ' ' str -- | Left justify a string, padding with a given character. padLc :: Int -> Char -> Doc -> Doc padLc n c str = str <> (text $ replicate (n - length (render str)) c) -- | Left justify a string with spaces. padL :: Int -> Doc -> Doc padL n str = padLc n ' ' str -- | Blank text. This is different different from `empty` because it comes out a a newline when used in a `vcat`. blank :: Doc blank = ppr "" -- | Like `pprEngDouble` but don't display fractional part when the value is < 1000. -- Good for units where fractional values might not make sense (like bytes). pprEngInteger :: String -> Integer -> Maybe Doc pprEngInteger unit k | k < 0 = liftM (text "-" <>) $ pprEngInteger unit (-k) | k > 1000 = pprEngDouble unit (fromRational $ toRational k) | otherwise = Just $ text $ printf "%5d%s " k unit -- | Pretty print an engineering value, to 4 significant figures. -- Valid range is 10^(-24) (y\/yocto) to 10^(+24) (Y\/Yotta). -- Out of range values yield Nothing. -- -- examples: -- -- @ -- liftM render $ pprEngDouble \"J\" 102400 ==> Just \"1.024MJ\" -- liftM render $ pprEngDouble \"s\" 0.0000123 ==> Just \"12.30us\" -- @ -- pprEngDouble :: String -> Double -> Maybe Doc pprEngDouble unit k | k < 0 = liftM (text "-" <>) $ pprEngDouble unit (-k) | k >= 1e+27 = Nothing | k >= 1e+24 = Just $ (k*1e-24) `with` ("Y" ++ unit) | k >= 1e+21 = Just $ (k*1e-21) `with` ("Z" ++ unit) | k >= 1e+18 = Just $ (k*1e-18) `with` ("E" ++ unit) | k >= 1e+15 = Just $ (k*1e-15) `with` ("P" ++ unit) | k >= 1e+12 = Just $ (k*1e-12) `with` ("T" ++ unit) | k >= 1e+9 = Just $ (k*1e-9) `with` ("G" ++ unit) | k >= 1e+6 = Just $ (k*1e-6) `with` ("M" ++ unit) | k >= 1e+3 = Just $ (k*1e-3) `with` ("k" ++ unit) | k >= 1 = Just $ k `with` (unit ++ " ") | k >= 1e-3 = Just $ (k*1e+3) `with` ("m" ++ unit) | k >= 1e-6 = Just $ (k*1e+6) `with` ("u" ++ unit) | k >= 1e-9 = Just $ (k*1e+9) `with` ("n" ++ unit) | k >= 1e-12 = Just $ (k*1e+12) `with` ("p" ++ unit) | k >= 1e-15 = Just $ (k*1e+15) `with` ("f" ++ unit) | k >= 1e-18 = Just $ (k*1e+18) `with` ("a" ++ unit) | k >= 1e-21 = Just $ (k*1e+21) `with` ("z" ++ unit) | k >= 1e-24 = Just $ (k*1e+24) `with` ("y" ++ unit) | k >= 1e-27 = Nothing | otherwise = Just $ text $ printf "%5.0f%s " k unit where with (t :: Double) (u :: String) | t >= 1e3 = text $ printf "%.0f%s" t u | t >= 1e2 = text $ printf "%.1f%s" t u | t >= 1e1 = text $ printf "%.2f%s" t u | otherwise = text $ printf "%.3f%s" t u -- | print an absolute time, in the format used by threadscope pprTimestampAbs :: Timestamp -> Doc pprTimestampAbs v = text (printf "%.9fs" v') where v' = fromIntegral v / 1e+9 :: Double pprTimestampEng :: Timestamp -> Doc pprTimestampEng v = fromMaybe (text "-") (pprEngDouble "s" v') where v' = fromIntegral v / 1e+9 pprValidate :: (s -> Doc) -> (i -> Doc) -> Either (s, i) s -> Doc pprValidate pprState pprInput (Left (state, input)) = vcat [ text "Invalid eventlog:" , text "State:" , pprState state , text "Input:" , pprInput input ] pprValidate pprState _ (Right state) = vcat [ text "Valid eventlog: ", pprState state ] pprMap :: Ord k => (k -> Doc) -> (a -> Doc) -> M.Map k a -> Doc pprMap pprKey pprValue m = vcat $ zipWith (<>) (map pprKey . M.keys $ m) (map (pprValue . (M.!) m) . M.keys $ m) renderLong :: Doc -> String renderLong = renderStyle (style { lineLength = 200 }) padLines :: Doc -> String -> Doc padLines left right = let (x:xs) = chunks' trunc_len right pad' = text $ replicate (length (render left)) ' ' in vcat $ (left <> text x) : map (\s -> pad' <> text s) xs trunc_len :: Int trunc_len = 100 trunc :: String -> String trunc l | length l > trunc_len = take (trunc_len-4) l ++ " ..." | otherwise = l chunks' :: Int -> String -> [String] chunks' len str = case chunks len str of (x:xs) -> (x:xs) [] -> [""] chunks :: Int -> String -> [String] chunks len [] = [] chunks len str = let (f,r) = splitAt len str in f : chunks len r
mainland/dph
dph-event-seer/src/Pretty.hs
Haskell
bsd-3-clause
5,727
{-# LANGUAGE TypeFamilies, DataKinds, UndecidableInstances #-} module T6018failclosed1 where -- Id is injective... type family IdClosed a = result | result -> a where IdClosed a = a -- ...but despite that we disallow a call to Id type family IdProxyClosed a = r | r -> a where IdProxyClosed a = IdClosed a
acowley/ghc
testsuite/tests/typecheck/should_fail/T6018failclosed1.hs
Haskell
bsd-3-clause
317
{-| Module : Data.OrgMode.Parse.Types Copyright : © 2014 Parnell Springmeyer License : All Rights Reserved Maintainer : Parnell Springmeyer <parnell@digitalmentat.com> Stability : experimental Types and utility functions. -} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE OverloadedStrings #-} module Data.OrgMode.Parse.Types ( Document (..) , Section (..) , Level (..) , Heading (..) , Priority (..) , Plannings (..) , StateKeyword (..) , Duration , PlanningKeyword (..) , Properties , Timestamp (..) , DateTime (..) , Stats (..) , Tag , TimeUnit (..) , RepeaterType (..) , Repeater (..) , DelayType (..) , Delay (..) , LevelDepth (..) , TitleMeta (..) , YearMonthDay(..) , YearMonthDay'(..) ) where import Control.Applicative import Control.Monad (mzero) import Data.Aeson ((.:), (.=)) import qualified Data.Aeson as A import Data.Hashable (Hashable (..)) import Data.HashMap.Strict (HashMap, fromList, keys, toList) import Data.Text (Text, pack) import Data.Thyme.Calendar (YearMonthDay (..)) import Data.Thyme.LocalTime (Hour, Minute) import Data.Traversable import GHC.Generics data Document = Document { documentText :: Text -- ^ Text occurring before any Org headlines , documentHeadings :: [Heading] -- ^ Toplevel Org headlines } deriving (Show, Eq, Generic) instance A.ToJSON Document where instance A.FromJSON Document where newtype LevelDepth = LevelDepth Int deriving (Eq, Show, Num) data TitleMeta = TitleMeta Text (Maybe Stats) (Maybe [Tag]) deriving (Eq, Show) data Heading = Heading { level :: Level -- ^ Org headline nesting level (1 is at the top) , keyword :: Maybe StateKeyword -- ^ State of the headline (e.g. TODO, DONE) , priority :: Maybe Priority -- , title :: Text -- properties , stats :: Maybe Stats -- , tags :: [Tag] -- , section :: Section -- Next-line , subHeadings :: [Heading] -- elements } deriving (Show, Eq, Generic) newtype Level = Level Int deriving (Eq, Show, Num, Generic) type Properties = HashMap Text Text type Clock = (Maybe Timestamp, Maybe Duration) data Section = Section { sectionPlannings :: Plannings , sectionClocks :: [Clock] , sectionProperties :: Properties , sectionParagraph :: Text } deriving (Show, Eq, Generic) data Timestamp = Timestamp { tsTime :: DateTime , tsActive :: Bool , tsEndTime :: Maybe DateTime } deriving (Show, Eq, Generic) instance A.ToJSON Timestamp where instance A.FromJSON Timestamp where newtype YearMonthDay' = YMD' YearMonthDay deriving (Show, Eq, Generic) instance A.ToJSON YearMonthDay' where toJSON (YMD' (YearMonthDay y m d)) = A.object ["ymdYear" .= y ,"ymdMonth" .= m ,"ymdDay" .= d] instance A.FromJSON YearMonthDay' where parseJSON (A.Object v) = do y <- v .: "ymdYear" m <- v .: "ymdMonth" d <- v .: "ymdDay" return (YMD' (YearMonthDay y m d)) parseJSON _ = mzero data DateTime = DateTime { yearMonthDay :: YearMonthDay' , dayName :: Maybe Text , hourMinute :: Maybe (Hour,Minute) , repeater :: Maybe Repeater , delay :: Maybe Delay } deriving (Show, Eq, Generic) instance A.ToJSON DateTime where instance A.FromJSON DateTime where data RepeaterType = RepeatCumulate | RepeatCatchUp | RepeatRestart deriving (Show, Eq, Generic) instance A.ToJSON RepeaterType instance A.FromJSON RepeaterType data Repeater = Repeater { repeaterType :: RepeaterType , repeaterValue :: Int , repeaterUnit :: TimeUnit } deriving (Show, Eq, Generic) instance A.ToJSON Repeater where instance A.FromJSON Repeater where data DelayType = DelayAll | DelayFirst deriving (Show, Eq, Generic) instance A.ToJSON DelayType where instance A.FromJSON DelayType where data Delay = Delay { delayType :: DelayType , delayValue :: Int , delayUnit :: TimeUnit } deriving (Show, Eq, Generic) instance A.ToJSON Delay where instance A.FromJSON Delay where data TimeUnit = UnitYear | UnitWeek | UnitMonth | UnitDay | UnitHour deriving (Show, Eq, Generic) instance A.ToJSON TimeUnit where instance A.FromJSON TimeUnit where --------------------------------------------------------------------------- --instance A.ToJSON Document where --instance A.FromJSON Document where instance A.ToJSON Level where instance A.FromJSON Level where newtype StateKeyword = StateKeyword {unStateKeyword :: Text} deriving (Show, Eq, Generic) instance A.ToJSON StateKeyword where instance A.FromJSON StateKeyword where data PlanningKeyword = SCHEDULED | DEADLINE | CLOSED deriving (Show, Eq, Enum, Ord, Generic) instance A.ToJSON PlanningKeyword where instance A.FromJSON PlanningKeyword where --instance (A.ToJSON k, A.ToJSON v) => A.ToJSON (HashMap k v) where -- toJSON hm = A.object hm newtype Plannings = Plns (HashMap PlanningKeyword Timestamp) deriving (Show, Eq, Generic) instance A.ToJSON Plannings where toJSON (Plns hm) = A.object $ map jPair (toList hm) where jPair (k, v) = pack (show k) .= A.toJSON v instance A.FromJSON Plannings where parseJSON (A.Object v) = Plns . fromList <$> (traverse jPair (keys v)) where jPair k = v .: k parseJSON _ = mzero instance A.ToJSON Section where instance A.FromJSON Section where instance A.ToJSON Heading where instance A.FromJSON Heading where data Priority = A | B | C deriving (Show, Read, Eq, Ord, Generic) instance A.ToJSON Priority where instance A.FromJSON Priority where type Tag = Text data Stats = StatsPct Int | StatsOf Int Int deriving (Show, Eq, Generic) instance A.ToJSON Stats where instance A.FromJSON Stats where type Duration = (Hour,Minute) instance Hashable PlanningKeyword where hashWithSalt salt k = hashWithSalt salt (fromEnum k) -- -- This might be the form to use if we were supporting <diary> timestamps -- data Timestamp = Dairy Text -- | Time TimestampTime -- deriving (Show, Eq, Generic)
imalsogreg/orgmode-parse
src/Data/OrgMode/Parse/Types.hs
Haskell
bsd-3-clause
6,430
{-# LANGUAGE TypeOperators #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE ScopedTypeVariables #-} module Int where import Language.VHDL (Mode(..)) import Language.Embedded.Hardware import Control.Monad.Identity import Control.Monad.Operational.Higher import Data.ALaCarte import Data.Int import Data.Word import Text.PrettyPrint import Prelude hiding (and, or, not) -------------------------------------------------------------------------------- -- * Example of a program that performs type casting. -------------------------------------------------------------------------------- -- | Command set used for our programs. type CMD = SignalCMD :+: VariableCMD :+: ArrayCMD :+: VArrayCMD :+: LoopCMD :+: ConditionalCMD :+: ComponentCMD :+: ProcessCMD :+: VHDLCMD type HProg = Program CMD (Param2 HExp HType) type HSig = Sig CMD HExp HType Identity -------------------------------------------------------------------------------- integers :: HProg () integers = do arr :: VArray Word8 <- initVArray [0..10] ref :: Variable Word8 <- initVariable 10 setVArray arr 0 20 for 0 10 $ \i -> do v <- getVArray arr (10 - i) setVArray arr i v v0 <- getVArray arr 0 v1 <- getVArray arr 1 iff (v0 `lte` 5) (setVArray arr 0 5) (iff (v1 `lte` 5) (setVArray arr 1 5) (setVArray arr 1 10)) -------------------------------------------------------------------------------- test = icompile integers --------------------------------------------------------------------------------
markus-git/imperative-edsl-vhdl
examples/Int.hs
Haskell
bsd-3-clause
1,630
{-# LINE 1 "Database/MySQL/Base/C.hsc" #-} {-# LANGUAGE CPP, EmptyDataDecls, ForeignFunctionInterface #-} {-# LINE 2 "Database/MySQL/Base/C.hsc" #-} -- | -- Module: Database.MySQL.Base.C -- Copyright: (c) 2011 MailRank, Inc. -- License: BSD3 -- Maintainer: Bryan O'Sullivan <bos@serpentine.com> -- Stability: experimental -- Portability: portable -- -- Direct bindings to the C @mysqlclient@ API. module Database.MySQL.Base.C ( -- * Connection management mysql_init , mysql_options , mysql_ssl_set , mysql_real_connect , mysql_close , mysql_ping , mysql_autocommit , mysql_change_user , mysql_select_db , mysql_set_character_set -- ** Connection information , mysql_thread_id , mysql_get_server_info , mysql_get_host_info , mysql_get_proto_info , mysql_character_set_name , mysql_get_ssl_cipher , mysql_stat -- * Querying , mysql_real_query , mysql_insert_id -- ** Escaping , mysql_real_escape_string -- ** Results , mysql_field_count , mysql_affected_rows , mysql_store_result , mysql_use_result , mysql_fetch_lengths , mysql_fetch_lengths_nonblock , mysql_fetch_row , mysql_fetch_row_nonblock -- * Working with results , mysql_free_result , mysql_free_result_nonblock , mysql_fetch_fields , mysql_fetch_fields_nonblock , mysql_data_seek , mysql_row_seek , mysql_row_tell -- ** Multiple results , mysql_next_result -- * Transactions , mysql_commit , mysql_rollback -- * General information , mysql_get_client_info , mysql_get_client_version -- * Error handling , mysql_errno , mysql_error ) where {-# LINE 69 "Database/MySQL/Base/C.hsc" #-} {-# LINE 70 "Database/MySQL/Base/C.hsc" #-} import Data.ByteString.Unsafe (unsafeUseAsCString) import Database.MySQL.Base.Types import Foreign.C.String (CString, withCString) #if __GLASGOW_HASKELL__ >= 704 import Foreign.C.Types (CChar(..), CInt(..), CUInt(..), CULLong(..), CULong(..)) #else import Foreign.C.Types (CInt, CUInt, CULLong, CULong) #endif import Foreign.Marshal.Utils (with) import Foreign.Ptr (Ptr, nullPtr) foreign import ccall safe mysql_init :: Ptr MYSQL -- ^ should usually be 'nullPtr' -> IO (Ptr MYSQL) mysql_options :: Ptr MYSQL -> Option -> IO CInt mysql_options ptr opt = case opt of ConnectTimeout secs -> withIntegral secs $ go (0) {-# LINE 91 "Database/MySQL/Base/C.hsc" #-} Compress -> go (1) nullPtr {-# LINE 93 "Database/MySQL/Base/C.hsc" #-} NamedPipe -> go (2) nullPtr {-# LINE 95 "Database/MySQL/Base/C.hsc" #-} InitCommand cmd -> unsafeUseAsCString cmd $ go (3) {-# LINE 97 "Database/MySQL/Base/C.hsc" #-} ReadDefaultFile path -> withCString path $ go (4) {-# LINE 99 "Database/MySQL/Base/C.hsc" #-} ReadDefaultGroup group -> unsafeUseAsCString group $ go (5) {-# LINE 101 "Database/MySQL/Base/C.hsc" #-} CharsetDir path -> withCString path $ go (6) {-# LINE 103 "Database/MySQL/Base/C.hsc" #-} CharsetName cs -> withCString cs $ go (7) {-# LINE 105 "Database/MySQL/Base/C.hsc" #-} LocalInFile b -> withBool b $ go (8) {-# LINE 107 "Database/MySQL/Base/C.hsc" #-} Protocol proto -> withIntegral (fromEnum proto) $ go (9) {-# LINE 109 "Database/MySQL/Base/C.hsc" #-} SharedMemoryBaseName name -> unsafeUseAsCString name $ go (10) {-# LINE 111 "Database/MySQL/Base/C.hsc" #-} ReadTimeout secs -> withIntegral secs $ go (11) {-# LINE 113 "Database/MySQL/Base/C.hsc" #-} WriteTimeout secs -> withIntegral secs $ go (12) {-# LINE 115 "Database/MySQL/Base/C.hsc" #-} UseRemoteConnection -> go (14) nullPtr {-# LINE 117 "Database/MySQL/Base/C.hsc" #-} UseEmbeddedConnection -> go (15) nullPtr {-# LINE 119 "Database/MySQL/Base/C.hsc" #-} GuessConnection -> go (16) nullPtr {-# LINE 121 "Database/MySQL/Base/C.hsc" #-} ClientIP ip -> unsafeUseAsCString ip $ go (17) {-# LINE 123 "Database/MySQL/Base/C.hsc" #-} SecureAuth b -> withBool b $ go (18) {-# LINE 125 "Database/MySQL/Base/C.hsc" #-} ReportDataTruncation b -> withBool b $ go (19) {-# LINE 127 "Database/MySQL/Base/C.hsc" #-} Reconnect b -> withBool b $ go (20) {-# LINE 129 "Database/MySQL/Base/C.hsc" #-} SSLVerifyServerCert b -> withBool b $ go (21) {-# LINE 131 "Database/MySQL/Base/C.hsc" #-} -- Other options are accepted by mysql_real_connect, so ignore them. _ -> return 0 where go = mysql_options_ ptr withBool b = with (if b then 1 else 0 :: CUInt) withIntegral i = with (fromIntegral i :: CUInt) foreign import ccall safe "mysql.h mysql_options" mysql_options_ :: Ptr MYSQL -> CInt -> Ptr a -> IO CInt foreign import ccall unsafe "mysql_signals.h _hs_mysql_real_connect" mysql_real_connect :: Ptr MYSQL -- ^ Context (from 'mysql_init'). -> CString -- ^ Host name. -> CString -- ^ User name. -> CString -- ^ Password. -> CString -- ^ Database. -> CInt -- ^ Port. -> CString -- ^ Unix socket. -> CULong -- ^ Flags. -> IO (Ptr MYSQL) foreign import ccall safe mysql_ssl_set :: Ptr MYSQL -> CString -- ^ Key. -> CString -- ^ Cert. -> CString -- ^ CA. -> CString -- ^ CA path. -> CString -- ^ Ciphers. -> IO MyBool foreign import ccall unsafe "mysql_signals.h _hs_mysql_close" mysql_close :: Ptr MYSQL -> IO () foreign import ccall unsafe "mysql_signals.h _hs_mysql_ping" mysql_ping :: Ptr MYSQL -> IO CInt foreign import ccall safe mysql_thread_id :: Ptr MYSQL -> IO CULong foreign import ccall unsafe "mysql_signals.h _hs_mysql_autocommit" mysql_autocommit :: Ptr MYSQL -> MyBool -> IO MyBool foreign import ccall unsafe "mysql_signals.h _hs_mysql_change_user" mysql_change_user :: Ptr MYSQL -> CString -- ^ user -> CString -- ^ password -> CString -- ^ database -> IO MyBool foreign import ccall unsafe "mysql_signals.h _hs_mysql_select_db" mysql_select_db :: Ptr MYSQL -> CString -> IO CInt foreign import ccall safe mysql_get_server_info :: Ptr MYSQL -> IO CString foreign import ccall safe mysql_get_host_info :: Ptr MYSQL -> IO CString foreign import ccall safe mysql_get_proto_info :: Ptr MYSQL -> IO CUInt foreign import ccall safe mysql_character_set_name :: Ptr MYSQL -> IO CString foreign import ccall safe mysql_set_character_set :: Ptr MYSQL -> CString -> IO CInt foreign import ccall safe mysql_get_ssl_cipher :: Ptr MYSQL -> IO CString foreign import ccall unsafe "mysql_signals.h _hs_mysql_stat" mysql_stat :: Ptr MYSQL -> IO CString foreign import ccall unsafe "mysql_signals.h _hs_mysql_real_query" mysql_real_query :: Ptr MYSQL -> CString -> CULong -> IO CInt foreign import ccall safe mysql_insert_id :: Ptr MYSQL -> IO CULLong foreign import ccall safe mysql_field_count :: Ptr MYSQL -> IO CUInt foreign import ccall safe mysql_affected_rows :: Ptr MYSQL -> IO CULLong foreign import ccall unsafe "mysql_signals.h _hs_mysql_store_result" mysql_store_result :: Ptr MYSQL -> IO (Ptr MYSQL_RES) foreign import ccall unsafe "mysql_signals.h _hs_mysql_use_result" mysql_use_result :: Ptr MYSQL -> IO (Ptr MYSQL_RES) foreign import ccall unsafe "mysql_signals.h _hs_mysql_free_result" mysql_free_result :: Ptr MYSQL_RES -> IO () foreign import ccall safe "mysql.h mysql_free_result" mysql_free_result_nonblock :: Ptr MYSQL_RES -> IO () foreign import ccall safe mysql_fetch_fields :: Ptr MYSQL_RES -> IO (Ptr Field) foreign import ccall safe "mysql.h mysql_fetch_fields" mysql_fetch_fields_nonblock :: Ptr MYSQL_RES -> IO (Ptr Field) foreign import ccall safe mysql_data_seek :: Ptr MYSQL_RES -> CULLong -> IO () foreign import ccall safe mysql_row_seek :: Ptr MYSQL_RES -> MYSQL_ROW_OFFSET -> IO MYSQL_ROW_OFFSET foreign import ccall safe mysql_row_tell :: Ptr MYSQL_RES -> IO MYSQL_ROW_OFFSET foreign import ccall unsafe "mysql_signals.h _hs_mysql_next_result" mysql_next_result :: Ptr MYSQL -> IO CInt foreign import ccall unsafe "mysql_signals.h _hs_mysql_commit" mysql_commit :: Ptr MYSQL -> IO MyBool foreign import ccall unsafe "mysql_signals.h _hs_mysql_rollback" mysql_rollback :: Ptr MYSQL -> IO MyBool foreign import ccall unsafe "mysql_signals.h _hs_mysql_fetch_row" mysql_fetch_row :: Ptr MYSQL_RES -> IO MYSQL_ROW foreign import ccall safe "mysql.h mysql_fetch_row" mysql_fetch_row_nonblock :: Ptr MYSQL_RES -> IO MYSQL_ROW foreign import ccall safe mysql_fetch_lengths :: Ptr MYSQL_RES -> IO (Ptr CULong) foreign import ccall safe "mysql.h mysql_fetch_lengths" mysql_fetch_lengths_nonblock :: Ptr MYSQL_RES -> IO (Ptr CULong) foreign import ccall safe mysql_real_escape_string :: Ptr MYSQL -> CString -> CString -> CULong -> IO CULong foreign import ccall safe mysql_get_client_info :: CString foreign import ccall safe mysql_get_client_version :: CULong foreign import ccall safe mysql_errno :: Ptr MYSQL -> IO CInt foreign import ccall safe mysql_error :: Ptr MYSQL -> IO CString
lhuang7/mysql
dist/dist-sandbox-bd9d9ce/build/Database/MySQL/Base/C.hs
Haskell
bsd-3-clause
9,473
module Import00003 where #if defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ >= 708 import Data.Typeable (Typeable) #else import Data.Typeable (Typeable,Typeable1,mkTyCon3,mkTyConApp,typeOf) #endif
charleso/intellij-haskforce
tests/gold/parser/Import00003.hs
Haskell
apache-2.0
203
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd"> <helpset version="2.0" xml:lang="si-LK"> <title>Core Language Files | ZAP Extension</title> <maps> <homeID>top</homeID> <mapref location="map.jhm"/> </maps> <view> <name>TOC</name> <label>Contents</label> <type>org.zaproxy.zap.extension.help.ZapTocView</type> <data>toc.xml</data> </view> <view> <name>Index</name> <label>Index</label> <type>javax.help.IndexView</type> <data>index.xml</data> </view> <view> <name>Search</name> <label>Search</label> <type>javax.help.SearchView</type> <data engine="com.sun.java.help.search.DefaultSearchEngine"> JavaHelpSearch </data> </view> <view> <name>Favorites</name> <label>Favorites</label> <type>javax.help.FavoritesView</type> </view> </helpset>
kingthorin/zap-extensions
addOns/coreLang/src/main/javahelp/org/zaproxy/zap/extension/coreLang/resources/help_si_LK/helpset_si_LK.hs
Haskell
apache-2.0
980
module TupleIn1 where f :: (a, ([Int], c)) -> ([Int], c) f (x, y@([], m)) = case y of y@(b_1, b_2) -> y f (x, y@([], m)) = y
kmate/HaRe
old/testing/introCase/TupleIn1AST.hs
Haskell
bsd-3-clause
146
{-# LANGUAGE CPP #-} #ifndef NO_NEWTYPE_DERIVING {-# LANGUAGE GeneralizedNewtypeDeriving #-} #endif -- | Types to help with testing polymorphic properties. -- -- Types 'A', 'B' and 'C' are @newtype@ wrappers around 'Integer' that -- implement 'Eq', 'Show', 'Arbitrary' and 'CoArbitrary'. Types -- 'OrdA', 'OrdB' and 'OrdC' also implement 'Ord' and 'Num'. -- -- See also "Test.QuickCheck.All" for an automatic way of testing -- polymorphic properties. module Test.QuickCheck.Poly ( A(..), B(..), C(..) , OrdA(..), OrdB(..), OrdC(..) ) where -------------------------------------------------------------------------- -- imports import Test.QuickCheck.Arbitrary -------------------------------------------------------------------------- -- polymorphic A, B, C (in Eq) -- A newtype A = A{ unA :: Integer } deriving ( Eq ) instance Show A where showsPrec n (A x) = showsPrec n x instance Arbitrary A where arbitrary = (A . (+1) . abs) `fmap` arbitrary shrink (A x) = [ A x' | x' <- shrink x, x' > 0 ] instance CoArbitrary A where coarbitrary = coarbitrary . unA -- B newtype B = B{ unB :: Integer } deriving ( Eq ) instance Show B where showsPrec n (B x) = showsPrec n x instance Arbitrary B where arbitrary = (B . (+1) . abs) `fmap` arbitrary shrink (B x) = [ B x' | x' <- shrink x, x' > 0 ] instance CoArbitrary B where coarbitrary = coarbitrary . unB -- C newtype C = C{ unC :: Integer } deriving ( Eq ) instance Show C where showsPrec n (C x) = showsPrec n x instance Arbitrary C where arbitrary = (C . (+1) . abs) `fmap` arbitrary shrink (C x) = [ C x' | x' <- shrink x, x' > 0 ] instance CoArbitrary C where coarbitrary = coarbitrary . unC -------------------------------------------------------------------------- -- polymorphic OrdA, OrdB, OrdC (in Eq, Ord) -- OrdA newtype OrdA = OrdA{ unOrdA :: Integer } deriving ( Eq, Ord #ifndef NO_NEWTYPE_DERIVING , Num #endif ) instance Show OrdA where showsPrec n (OrdA x) = showsPrec n x instance Arbitrary OrdA where arbitrary = (OrdA . (+1) . abs) `fmap` arbitrary shrink (OrdA x) = [ OrdA x' | x' <- shrink x, x' > 0 ] instance CoArbitrary OrdA where coarbitrary = coarbitrary . unOrdA -- OrdB newtype OrdB = OrdB{ unOrdB :: Integer } deriving ( Eq, Ord #ifndef NO_NEWTYPE_DERIVING , Num #endif ) instance Show OrdB where showsPrec n (OrdB x) = showsPrec n x instance Arbitrary OrdB where arbitrary = (OrdB . (+1) . abs) `fmap` arbitrary shrink (OrdB x) = [ OrdB x' | x' <- shrink x, x' > 0 ] instance CoArbitrary OrdB where coarbitrary = coarbitrary . unOrdB -- OrdC newtype OrdC = OrdC{ unOrdC :: Integer } deriving ( Eq, Ord #ifndef NO_NEWTYPE_DERIVING , Num #endif ) instance Show OrdC where showsPrec n (OrdC x) = showsPrec n x instance Arbitrary OrdC where arbitrary = (OrdC . (+1) . abs) `fmap` arbitrary shrink (OrdC x) = [ OrdC x' | x' <- shrink x, x' > 0 ] instance CoArbitrary OrdC where coarbitrary = coarbitrary . unOrdC -------------------------------------------------------------------------- -- the end.
beni55/quickcheck
Test/QuickCheck/Poly.hs
Haskell
bsd-3-clause
3,165
{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeInType #-} module T11732a where import GHC.Generics data Proxy k (a :: k) deriving Generic1 data family ProxyFam (a :: y) (b :: z) data instance ProxyFam k (a :: k) deriving Generic1
ezyang/ghc
testsuite/tests/deriving/should_compile/T11732a.hs
Haskell
bsd-3-clause
264
{-# LANGUAGE ScopedTypeVariables #-} module T5371 where import Language.Haskell.TH f :: a -> Name f (x :: a) = ''a
urbanslug/ghc
testsuite/tests/quotes/T5721.hs
Haskell
bsd-3-clause
117
----------------------------------------------------------------------------- -- -- Module : PhysicalQuantities.Definitions -- Copyright : -- License : MIT -- -- Maintainer : - -- Stability : -- Portability : -- -- | -- {-# LANGUAGE MultiParamTypeClasses , FlexibleContexts , ConstraintKinds , UndecidableInstances , FlexibleInstances #-} module PhysicalQuantities.Definitions ( Dimensions (Scalar, Vector, Dimensionless) , PhysicalQuantity(..), Abs(..) , QuantityDecomposition, BaseQuantity, DerivedQuantity , CmpQ, EqQ , Unit(..), Vec(..) , UnitDecomposition, BaseUnit, DerivedUnit , CmpU, EqU , UnitSystem(..), UnitPrefix(..) , (:*)(..), (:/)(..), (:^)(..) ) where import PhysicalQuantities.Combinations import PhysicalQuantities.Decomposition ( TBase(..), TDerived(..) , Decomposition(..), DecompositionType , CmpD ) import TypeNum.Rational hiding (Abs) import GHC.TypeLits (symbolVal) import Data.Maybe (fromMaybe) import Data.Type.Bool (If) import Data.Type.Equality ----------------------------------------------------------------------------- data Dimensions = Dimensionless | Scalar | Vector deriving (Show, Eq, Ord) instance TypesEq (a :: Dimensions) (b :: Dimensions) where type a ~=~ b = CompareDimensions a b == EQ instance TypesOrd (a :: Dimensions) (b :: Dimensions) where type Cmp a b = CompareDimensions a b type instance (a :: Dimensions) == (b :: Dimensions) = a ~=~ b type family CompareDimensions (a :: Dimensions) (b :: Dimensions) :: Ordering where CompareDimensions Dimensionless Dimensionless = EQ CompareDimensions Dimensionless Scalar = LT CompareDimensions Dimensionless Vector = LT CompareDimensions Scalar Dimensionless = GT CompareDimensions Vector Dimensionless = GT CompareDimensions Scalar Scalar = EQ CompareDimensions Scalar Vector = LT CompareDimensions Vector Scalar = GT CompareDimensions Vector Vector = EQ ----------------------------------------------------------------------------- class PhysicalQuantity q where type QuantityDimensions q :: Dimensions quantityDimensions :: q -> Dimensions quantityName :: q -> String quantityInstance :: q type BaseQuantity q = (PhysicalQuantity q, TBase q) type DerivedQuantity q = (PhysicalQuantity q, TDerived q) type QuantityDecomposition q = (PhysicalQuantity q, Decomposition q) ----------------------------------------------------------------------------- parenth s = "(" ++ s ++ ")" instance ( PhysicalQuantity a, PhysicalQuantity b ) => PhysicalQuantity (a :* b) where type QuantityDimensions (a :* b) = ResultingDimensions' a b quantityDimensions (a :* b) = resultingDimensions' a b quantityName (a :* b) = parenth $ quantityName' " * " a b quantityInstance = quantityInstance :* quantityInstance instance ( PhysicalQuantity a, PhysicalQuantity b ) => PhysicalQuantity (a :/ b) where type QuantityDimensions (a :/ b) = ResultingDimensions' a b quantityDimensions (a :/ b) = resultingDimensions' a b quantityName (a :/ b) = parenth $ quantityName' " / " a b quantityInstance = quantityInstance :/ quantityInstance instance ( PhysicalQuantity a, MaybeRational p, KnownRatio (AsRational p) ) => PhysicalQuantity (a :^ (p :: k)) where type QuantityDimensions (a :^ p) = QuantityDimensions a quantityDimensions (a :^ p) = quantityDimensions a quantityName (a :^ p) = quantityName a ++ "^" ++ show p quantityInstance = quantityInstance :^ Ratio' type family ResultingDimensions (a :: Dimensions) (b ::Dimensions) :: Dimensions where ResultingDimensions a Dimensionless = a ResultingDimensions Dimensionless b = b ResultingDimensions Scalar Scalar = Scalar ResultingDimensions a b = Vector type ResultingDimensions' a b = ResultingDimensions (QuantityDimensions a) (QuantityDimensions b) quantityName' op a b = quantityName a ++ op ++ quantityName b resultingDimensions a Dimensionless = a resultingDimensions Dimensionless b = b resultingDimensions Scalar Scalar = Scalar resultingDimensions _ _ = Vector resultingDimensions' a b = resultingDimensions (quantityDimensions a) (quantityDimensions b) ----------------------------------------------------------------------------- -- | Scalar container for vector quantities. newtype Abs q = Abs q instance (PhysicalQuantity a, QuantityDimensions a ~ Vector) => PhysicalQuantity (Abs a) where type QuantityDimensions (Abs a) = Scalar quantityDimensions _ = Scalar quantityName (Abs a) = "|" ++ quantityName a ++ "|" quantityInstance = Abs quantityInstance type instance DecompositionType (Abs a) = DecompositionType a instance (BaseQuantity a) => TBase (Abs a) where type TSymbol (Abs a) = TSymbol a tFromSymbol = Abs . tFromSymbol instance (DerivedQuantity a) => TDerived (Abs a) where type TStructure (Abs a) = TStructure a ----------------------------------------------------------------------------- -- | Represents a unit as a symbol (combination of symbols); measures no -- specific 'PhysicalQuantity' outside of a 'UnitSystem'. class Unit u where type UnitDimensions u :: Dimensions unitDimensions :: u -> Dimensions unitName :: u -> String unitInstance :: u type BaseUnit u = (Unit u, TBase u) type DerivedUnit u = (Unit u, TDerived u) type UnitDecomposition u = (Unit u, Decomposition u) -- | Vector container for scalar units. newtype Vec u = Vec u -- | Vector container wraps any scalar unit and turns it into vector. instance (Unit u, UnitDimensions u ~ Scalar) => Unit (Vec u) where type UnitDimensions (Vec u) = Vector unitDimensions _ = Vector unitName u = "Vec[" ++ unitName u ++ "]" unitInstance = Vec unitInstance ----------------------------------------------------------------------------- instance ( Unit a, Unit b ) => Unit (a :* b) where type UnitDimensions (a :* b) = ResultingDimensions (UnitDimensions a) (UnitDimensions b) unitDimensions (a :* b) = resultingDimensions (unitDimensions a) (unitDimensions b) unitName (a :* b) = parenth $ unitName' " * " a b unitInstance = unitInstance :* unitInstance instance ( Unit a, Unit b ) => Unit (a :/ b) where type UnitDimensions (a :/ b) = ResultingDimensions (UnitDimensions a) (UnitDimensions b) unitDimensions (a :/ b) = resultingDimensions (unitDimensions a) (unitDimensions b) unitName (a :/ b) = parenth $ unitName' " / " a b unitInstance = unitInstance :/ unitInstance instance ( Unit a, MaybeRational p, KnownRatio (AsRational p) ) => Unit (a :^ (p :: k)) where type UnitDimensions (a :^ p) = UnitDimensions a unitDimensions (a :^ _) = unitDimensions a unitName (a :^ p) = unitName a ++ "^" ++ show p unitInstance = unitInstance :^ Ratio' unitName' op a b = unitName a ++ op ++ unitName b ----------------------------------------------------------------------------- -- | Establishes units for the physical quantities within the system. class (UnitPrefix (Prefix sys)) => UnitSystem sys where unitSystemName :: sys -> String type Prefix sys :: * -> * type UnitFor sys phq :: * unitFor :: (Unit (UnitFor sys phq)) => sys -> phq -> UnitFor sys phq unitFor _ _ = unitInstance class UnitPrefix p where prefixGroup :: p v -> String prefixName :: p v -> String prefixValue :: (Num v, Eq v) => p v -> v prefixFromValue :: (Num v, Eq v) => v -> Maybe (p v) convertPrefix :: p v -> p w ----------------------------------------------------------------------------- ----------------------------------------------------------------------------- -- eq and compare for quantities and units, using decompositions. class ( QuantityDecomposition q1, QuantityDecomposition q2 ) => CompareQuantities q1 q2 where type CmpQ q1 q2 :: Ordering type EqQ q1 q2 :: Bool type EqQ q1 q2 = CmpQ q1 q2 == EQ -- | Compare physical quantities by dimensions and decomposition. instance ( QuantityDecomposition q1, QuantityDecomposition q2 ) => CompareQuantities q1 q2 where type CmpQ q1 q2 = If (QuantityDimensions q1 == QuantityDimensions q2) (CmpD (TDecomposition q1) (TDecomposition q2)) (Cmp (QuantityDimensions q1) (QuantityDimensions q2)) ----------------------------------------------------------------------------- class ( UnitDecomposition u1, UnitDecomposition u2 ) => CompareUnits u1 u2 where type CmpU u1 u2 :: Ordering type EqU u1 u2 :: Bool type EqU u1 u2 = CmpU u1 u2 == EQ instance ( UnitDecomposition u1, UnitDecomposition u2 ) => CompareUnits u1 u2 where type CmpU u1 u2 = If (UnitDimensions u1 == UnitDimensions u2) (CmpD (TDecomposition u1) (TDecomposition u2)) (Cmp (UnitDimensions u1) (UnitDimensions u2)) -----------------------------------------------------------------------------
fehu/PhysicalQuantities
src/PhysicalQuantities/Definitions.hs
Haskell
mit
9,889
module GHCJS.DOM.RequestAnimationFrameCallback ( ) where
manyoo/ghcjs-dom
ghcjs-dom-webkit/src/GHCJS/DOM/RequestAnimationFrameCallback.hs
Haskell
mit
59
{-# LANGUAGE TypeFamilies #-} module Agent.PingPong.Role.SendResult where import AgentSystem.Generic import Agent.PingPong import qualified Agent.PingPong.Simple.SendResult as SendRes import Data.IORef -------------------------------------------------------------------------------- data PingRole = PingRole data PongRole = PongRole -------------------------------------------------------------------------------- instance RoleName PingRole where roleName _ = "Ping" instance AgentRole PingRole where type RoleState PingRole = (IORef Integer, IORef SomeAgentRef, IORef Bool) type RoleResult PingRole = () type RoleSysArgs PingRole = () type RoleArgs PingRole = (Integer, IORef SomeAgentRef) instance RoleName PongRole where roleName _ = "Pong" instance AgentRole PongRole where type RoleState PongRole = (IORef Integer, IORef SomeAgentRef) type RoleResult PongRole = Integer type RoleSysArgs PongRole = () type RoleArgs PongRole = IORef SomeAgentRef -------------------------------------------------------------------------------- pingRoleDescriptor = genericRoleDescriptor PingRole (const $ return . uncurry SendRes.pingDescriptor) pongRoleDescriptor = genericRoleDescriptor PongRole (const $ return . SendRes.pongDescriptor) -------------------------------------------------------------------------------- runPingPong nPings = do pingRef <- newIORef undefined pongRef <- newIORef undefined putStrLn "<< CreateAgentOfRole >> " let pingC = CreateAgentOfRole pingRoleDescriptor (return ()) $ return (nPings, pongRef) pongC = CreateAgentOfRole pongRoleDescriptor (return ()) $ return pingRef ping <- createAgentRef pingC pong <- createAgentRef pongC pingRef `writeIORef` someAgentRef ping pongRef `writeIORef` someAgentRef pong putStrLn "Starting PING" agentStart ping putStrLn "Starting PONG" agentStart pong putStrLn "Waiting PING termination" agentWaitTermination ping putStrLn "Waiting PONG result" res <- agentWaitResult pong putStrLn $ "PONG result: " ++ show res putStrLn "Waiting PONG termination" agentWaitTermination pong putStrLn "Finished"
fehu/h-agents
test/Agent/PingPong/Role/SendResult.hs
Haskell
mit
2,684
module Language.MSH.MethodTable where import qualified Data.Map as M import Language.Haskell.TH import Language.Haskell.TH.Syntax {- Methods -} data MethodTable = MkMethodTable { methodSigs :: M.Map String Dec, methodDefs :: M.Map String Dec } deriving Show emptyMethodTable :: MethodTable emptyMethodTable = MkMethodTable M.empty M.empty addMethodSig :: Name -> Dec -> MethodTable -> MethodTable addMethodSig name dec tbl = tbl { methodSigs = M.insert (nameBase name) dec (methodSigs tbl) } addMethodDef :: Name -> Dec -> MethodTable -> MethodTable addMethodDef name dec tbl = tbl { methodDefs = M.insert (nameBase name) dec (methodDefs tbl) } isImplemented :: Name -> MethodTable -> Bool isImplemented n tbl = M.member (nameBase n) (methodDefs tbl) -- | `preProcessMethods ds' builds a value of type `MethodTable' from a list -- of top-level declarations. preProcessMethods :: [Dec] -> MethodTable preProcessMethods ds = go emptyMethodTable ds where go tbl [] = tbl go tbl (d@(SigD name ty) : ds) = go (addMethodSig name d tbl) ds go tbl (d@(FunD name cs) : ds) = go (addMethodDef name d tbl) ds go tbl (d@(ValD (VarP name) body wh) : ds) = go (addMethodDef name d tbl) ds go tbl (d : ds) = go tbl ds
mbg/monadic-state-hierarchies
Language/MSH/MethodTable.hs
Haskell
mit
1,371
-- From http://decipheringmusictheory.com/?page_id=46 module Other.Harmonisation where import Mezzo v1 = start $ melody :| d :| g :| fs :< g :| a :^ bf :| a :| a :| a :| d' :| c' :| bf :| a :>> g v2 = start $ melody :| d :| ef :| d :| d :| d :| d :| cs :| d :| d :| ef :| d :| d :>> bf_ v3 = start $ melody :| bf_ :| g_ :| a_ :< g_ :| fs_ :^ g_ :| a_ :| a_ :| fs_ :| g_ :| g_ :| g_ :| fs_ :>> g_ v4 = start $ melody :| g__ :| c_ :| c_ :< bf__ :| a__ :^ g__ :| f__ :| a__ :| d__ :| bf__ :| c_ :| d_ :| d_ :>> g__ -- ^ The above tutorial used 'd_' which gave a concealed octave sco = score setKeySig g_min setRuleSet strict withMusic (v1 :-: v2 :-: v3 :-: v4) main = renderScore "rendered/Harmonisation.mid" "4-voice chorale" sco
DimaSamoz/mezzo
examples/src/Other/Harmonisation.hs
Haskell
mit
922
module MedicineSpec (main, spec) where import Test.Hspec import Medicine import Medicine.ParserSpec (parsedMedicine) import Text.ParserCombinators.Parsec (parse) sampleBuild = unlines [ "e => H" , "e => O" , "H => HO" , "H => OH" , "O => HH" , "" , "HOH" ] parsedBuild = case parse pMedicine "" sampleBuild of Left err -> error (show err) Right p -> p main :: IO () main = hspec spec spec :: Spec spec = do describe "getDistinctMolecules" $ do it "should return 4 for HOH" $ getDistinctMolecules parsedMedicine `shouldBe` 4 it "should return 7 for HOHOHO" $ do let med = parsedMedicine { molecule = ["H", "O", "H", "O", "H", "O"]} getDistinctMolecules med `shouldBe` 7 describe "search" $ do it "should take 3 steps to make HOH" $ search parsedBuild `shouldBe` Just 3 it "should take 6 steps to make HOHOHO" $ do let build = parsedBuild { molecule = ["H", "O", "H", "O", "H", "O"] } search build `shouldBe` Just 6
corajr/adventofcode2015
19/test/MedicineSpec.hs
Haskell
mit
1,051
module YourProject where my_func = "This is my package!"
Azabuhs/YourProject
src/YourProject.hs
Haskell
mit
57
{-# LANGUAGE CPP #-} {- Math.hs - Point of switching between MathDirect and MathHmatrix - - Timothy A. Chagnon - CS 636 - Spring 2009 -} module Math ( #ifdef USING_HMATRIX module MathHmatrix, #else module MathDirect, #endif module MatInv ) where #ifdef USING_HMATRIX import MathHmatrix #else import MathDirect #endif import MatInv
tchagnon/cs636-raytracer
a1/Math.hs
Haskell
apache-2.0
356
module Text.Pandoc.RPC where import Text.Pandoc import Text.Pandoc.Rpc.Protocol import Text.Pandoc.Rpc.Protocol.PandocRequest import Text.ProtocolBuffers.Basic import Text.ProtocolBuffers.WireMessage import qualified Text.Pandoc.Rpc.Protocol.PandocResponse as R import Data.ByteString.Lazy.Char8 hiding (putStrLn) import qualified Data.ByteString.Lazy as L import System.ZMQ3 import Control.Concurrent (forkIO) import Control.Monad (forever, forM_) pipeline :: String -> String -> Either String (ReaderOptions -> String -> IO Pandoc, Writer) pipeline inputFmt outputFmt = case getReader inputFmt of Left error -> Left error Right reader@_ -> writer reader where writer reader = case getWriter outputFmt of Left error -> Left error Right writer@_ -> Right (reader, writer) write :: Writer -> Pandoc -> IO String write writer native = case writer of PureStringWriter writer' -> return $ writer' def native IOStringWriter writer' -> writer' def native IOByteStringWriter writer' -> do out <- writer' def native return $ unpack out transform :: (ReaderOptions -> String -> IO Pandoc) -> Writer -> String -> IO String transform reader writer input = do native <- reader def input write writer native pandoc :: String -> String -> String -> IO (Either String String) pandoc inputFmt outputFmt input = do case pipeline inputFmt outputFmt of Left error -> return $ Left error Right (reader, writer) -> do out <- transform reader writer input return $ Right out pandoc' :: PandocRequest -> IO R.PandocResponse pandoc' req = do resp <- pandoc (uToString $ inputFmt req) (uToString $ outputFmt req) (uToString $ input req) return $ case resp of Left error -> R.PandocResponse { R.error = Just $ uFromString error, R.output = Nothing } Right output -> R.PandocResponse { R.error = Nothing, R.output = Just $ uFromString output } worker :: (Receiver a, Sender a) => Socket a -> IO () worker socket = do msg <- receive socket resp <- case messageGet $ fromStrict msg of Left error -> return $ R.PandocResponse { R.error = Just $ uFromString error, R.output = Nothing } Right (msg, _) -> pandoc' msg send socket [] $ toStrict $ messagePut resp workers :: Int -> Context -> IO () workers workerCount context = forM_ [0..workerCount] $ \_ -> forkIO $ do withSocket context Rep $ \responder -> do connect responder "inproc://pandoc" forever $ worker responder main :: String -> Int -> IO () main endpoint workerCount = do withContext $ \context -> do withSocket context Router $ \frontend -> do bind frontend endpoint withSocket context Dealer $ \backend -> do bind backend "inproc://pandoc" workers workerCount context proxy frontend backend Nothing
aolshevskiy/pandoc-rpc
Text/Pandoc/RPC.hs
Haskell
apache-2.0
2,897
{-# LANGUAGE QuasiQuotes #-} ----------------------------------------------------------------------------- -- | -- Module : Application.HXournal.GUI.Menu -- Copyright : (c) 2011, 2012 Ian-Woo Kim -- -- License : BSD3 -- Maintainer : Ian-Woo Kim <ianwookim@gmail.com> -- Stability : experimental -- Portability : GHC -- ----------------------------------------------------------------------------- module Application.HXournal.GUI.Menu where import Application.HXournal.Util.Verbatim import Application.HXournal.Coroutine.Callback import Application.HXournal.Type import Application.HXournal.Type.Clipboard import Application.HXournal.Accessor import Control.Monad.Coroutine.SuspensionFunctors import Data.IORef import Data.Maybe import Control.Category import Data.Label import Prelude hiding ((.),id) import Graphics.UI.Gtk hiding (set,get) import qualified Graphics.UI.Gtk as Gtk (set) import System.FilePath import Data.Xournal.Predefined import Paths_hxournal -- | justMenu :: MenuEvent -> Maybe MyEvent justMenu = Just . Menu -- | uiDeclTest :: String uiDeclTest = [verbatim|<ui> <menubar> <menu action="VMA"> <menuitem action="CONTA" /> <menuitem action="ONEPAGEA" /> <separator /> <menuitem action="FSCRA" /> <separator /> </menu> </menubar> </ui>|] -- | uiDecl :: String uiDecl = [verbatim|<ui> <menubar> <menu action="FMA"> <menuitem action="NEWA" /> <menuitem action="ANNPDFA" /> <menuitem action="OPENA" /> <menuitem action="SAVEA" /> <menuitem action="SAVEASA" /> <separator /> <menuitem action="RECENTA" /> <separator /> <menuitem action="PRINTA" /> <menuitem action="EXPORTA" /> <separator /> <menuitem action="QUITA" /> </menu> <menu action="EMA"> <menuitem action="UNDOA" /> <menuitem action="REDOA" /> <separator /> <menuitem action="CUTA" /> <menuitem action="COPYA" /> <menuitem action="PASTEA" /> <menuitem action="DELETEA" /> <separator /> <menuitem action="NETCOPYA" /> <menuitem action="NETPASTEA" /> </menu> <menu action="VMA"> <menuitem action="CONTA" /> <menuitem action="ONEPAGEA" /> <separator /> <menuitem action="FSCRA" /> <separator /> <menu action="ZOOMA" > <menuitem action="ZMINA" /> <menuitem action="ZMOUTA" /> <menuitem action="NRMSIZEA" /> <menuitem action="PGWDTHA" /> <menuitem action="PGHEIGHTA" /> <menuitem action="SETZMA" /> </menu> <separator /> <menuitem action="FSTPAGEA" /> <menuitem action="PRVPAGEA" /> <menuitem action="NXTPAGEA" /> <menuitem action="LSTPAGEA" /> <separator /> <menuitem action="SHWLAYERA" /> <menuitem action="HIDLAYERA" /> <separator /> <menuitem action="HSPLITA" /> <menuitem action="VSPLITA" /> <menuitem action="DELCVSA" /> </menu> <menu action="JMA"> <menuitem action="NEWPGBA" /> <menuitem action="NEWPGAA" /> <menuitem action="NEWPGEA" /> <menuitem action="DELPGA" /> <separator /> <menuitem action="NEWLYRA" /> <menuitem action="NEXTLAYERA" /> <menuitem action="PREVLAYERA" /> <menuitem action="GOTOLAYERA" /> <menuitem action="DELLYRA" /> <separator /> <menuitem action="PPSIZEA" /> <menuitem action="PPCLRA" /> <menuitem action="PPSTYA" /> <menuitem action="APALLPGA" /> <separator /> <menuitem action="LDBKGA" /> <menuitem action="BKGSCRSHTA" /> <separator /> <menuitem action="DEFPPA" /> <menuitem action="SETDEFPPA" /> </menu> <menu action="TMA"> <menuitem action="PENA" /> <menuitem action="ERASERA" /> <menuitem action="HIGHLTA" /> <menuitem action="TEXTA" /> <separator /> <menuitem action="SHPRECA" /> <menuitem action="RULERA" /> <separator /> <menuitem action="SELREGNA" /> <menuitem action="SELRECTA" /> <menuitem action="VERTSPA" /> <menuitem action="HANDA" /> <separator /> <menu action="CLRA"> <menuitem action="BLACKA" /> <menuitem action="BLUEA" /> <menuitem action="REDA" /> <menuitem action="GREENA" /> <menuitem action="GRAYA" /> <menuitem action="LIGHTBLUEA" /> <menuitem action="LIGHTGREENA" /> <menuitem action="MAGENTAA" /> <menuitem action="ORANGEA" /> <menuitem action="YELLOWA" /> <menuitem action="WHITEA" /> </menu> <menu action="PENOPTA"> <menuitem action="PENVERYFINEA" /> <menuitem action="PENFINEA" /> <menuitem action="PENMEDIUMA" /> <menuitem action="PENTHICKA" /> <menuitem action="PENVERYTHICKA" /> <menuitem action="PENULTRATHICKA" /> </menu> <menuitem action="ERASROPTA" /> <menuitem action="HILTROPTA" /> <menuitem action="TXTFNTA" /> <separator /> <menuitem action="DEFPENA" /> <menuitem action="DEFERSRA" /> <menuitem action="DEFHILTRA" /> <menuitem action="DEFTXTA" /> <menuitem action="SETDEFOPTA" /> </menu> <menu action="OMA"> <menuitem action="UXINPUTA" /> <menuitem action="DCRDCOREA" /> <menuitem action="ERSRTIPA" /> <menuitem action="PRESSRSENSA" /> <menuitem action="PGHILTA" /> <menuitem action="MLTPGVWA" /> <menuitem action="MLTPGA" /> <menuitem action="BTN2MAPA" /> <menuitem action="BTN3MAPA" /> <separator /> <menuitem action="ANTIALIASBMPA" /> <menuitem action="PRGRSBKGA" /> <menuitem action="PRNTPPRULEA" /> <menuitem action="LFTHNDSCRBRA" /> <menuitem action="SHRTNMENUA" /> <separator /> <menuitem action="AUTOSAVEPREFA" /> <menuitem action="SAVEPREFA" /> <menuitem action="RELAUNCHA" /> </menu> <menu action="HMA"> <menuitem action="ABOUTA" /> </menu> </menubar> <toolbar name="toolbar1" > <toolitem action="SAVEA" /> <toolitem action="NEWA" /> <toolitem action="OPENA" /> <separator /> <toolitem action="CUTA" /> <toolitem action="COPYA" /> <toolitem action="PASTEA" /> <separator /> <toolitem action="UNDOA" /> <toolitem action="REDOA" /> <separator /> <toolitem action="FSTPAGEA" /> <toolitem action="PRVPAGEA" /> <toolitem action="NXTPAGEA" /> <toolitem action="LSTPAGEA" /> <separator /> <toolitem action="ZMOUTA" /> <toolitem action="NRMSIZEA" /> <toolitem action="ZMINA" /> <toolitem action="PGWDTHA" /> <toolitem action="SETZMA" /> <toolitem action="FSCRA" /> </toolbar> <toolbar name="toolbar2" > <toolitem action="PENA" /> <toolitem action="ERASERA" /> <toolitem action="HIGHLTA" /> <toolitem action="TEXTA" /> <separator /> <toolitem action="DEFAULTA" /> <toolitem action="DEFPENA" /> <toolitem action="SHPRECA" /> <toolitem action="RULERA" /> <separator /> <toolitem action="SELREGNA" /> <toolitem action="SELRECTA" /> <toolitem action="VERTSPA" /> <toolitem action="HANDA" /> <separator /> <toolitem action="PENFINEA" /> <toolitem action="PENMEDIUMA" /> <toolitem action="PENTHICKA" /> <separator /> <toolitem action="BLACKA" /> <toolitem action="BLUEA" /> <toolitem action="REDA" /> <toolitem action="GREENA" /> <toolitem action="GRAYA" /> <toolitem action="LIGHTBLUEA" /> <toolitem action="LIGHTGREENA" /> <toolitem action="MAGENTAA" /> <toolitem action="ORANGEA" /> <toolitem action="YELLOWA" /> <toolitem action="WHITEA" /> </toolbar> </ui> |] iconList :: [ (String,String) ] iconList = [ ("fullscreen.png" , "myfullscreen") , ("pencil.png" , "mypen") , ("eraser.png" , "myeraser") , ("highlighter.png", "myhighlighter") , ("text-tool.png" , "mytext") , ("shapes.png" , "myshapes") , ("ruler.png" , "myruler") , ("lasso.png" , "mylasso") , ("rect-select.png", "myrectselect") , ("stretch.png" , "mystretch") , ("hand.png" , "myhand") , ("recycled.png" , "mydefault") , ("default-pen.png", "mydefaultpen") , ("thin.png" , "mythin") , ("medium.png" , "mymedium") , ("thick.png" , "mythick") , ("black.png" , "myblack") , ("blue.png" , "myblue") , ("red.png" , "myred") , ("green.png" , "mygreen") , ("gray.png" , "mygray") , ("lightblue.png" , "mylightblue") , ("lightgreen.png" , "mylightgreen") , ("magenta.png" , "mymagenta") , ("orange.png" , "myorange") , ("yellow.png" , "myyellow") , ("white.png" , "mywhite") ] -- | viewmods :: [RadioActionEntry] viewmods = [ RadioActionEntry "CONTA" "Continuous" Nothing Nothing Nothing 0 , RadioActionEntry "ONEPAGEA" "One Page" Nothing Nothing Nothing 1 ] -- | pointmods :: [RadioActionEntry] pointmods = [ RadioActionEntry "PENVERYFINEA" "Very fine" Nothing Nothing Nothing 0 , RadioActionEntry "PENFINEA" "Fine" (Just "mythin") Nothing Nothing 1 , RadioActionEntry "PENTHICKA" "Thick" (Just "mythick") Nothing Nothing 3 , RadioActionEntry "PENVERYTHICKA" "Very Thick" Nothing Nothing Nothing 4 , RadioActionEntry "PENULTRATHICKA" "Ultra Thick" Nothing Nothing Nothing 5 , RadioActionEntry "PENMEDIUMA" "Medium" (Just "mymedium") Nothing Nothing 2 ] -- | penmods :: [RadioActionEntry] penmods = [ RadioActionEntry "PENA" "Pen" (Just "mypen") Nothing Nothing 0 , RadioActionEntry "ERASERA" "Eraser" (Just "myeraser") Nothing Nothing 1 , RadioActionEntry "HIGHLTA" "Highlighter" (Just "myhighlighter") Nothing Nothing 2 , RadioActionEntry "TEXTA" "Text" (Just "mytext") Nothing Nothing 3 , RadioActionEntry "SELREGNA" "Select Region" (Just "mylasso") Nothing Nothing 4 , RadioActionEntry "SELRECTA" "Select Rectangle" (Just "myrectselect") Nothing Nothing 5 , RadioActionEntry "VERTSPA" "Vertical Space" (Just "mystretch") Nothing Nothing 6 , RadioActionEntry "HANDA" "Hand Tool" (Just "myhand") Nothing Nothing 7 ] -- | colormods :: [RadioActionEntry] colormods = [ RadioActionEntry "BLUEA" "Blue" (Just "myblue") Nothing Nothing 1 , RadioActionEntry "REDA" "Red" (Just "myred") Nothing Nothing 2 , RadioActionEntry "GREENA" "Green" (Just "mygreen") Nothing Nothing 3 , RadioActionEntry "GRAYA" "Gray" (Just "mygray") Nothing Nothing 4 , RadioActionEntry "LIGHTBLUEA" "Lightblue" (Just "mylightblue") Nothing Nothing 5 , RadioActionEntry "LIGHTGREENA" "Lightgreen" (Just "mylightgreen") Nothing Nothing 6 , RadioActionEntry "MAGENTAA" "Magenta" (Just "mymagenta") Nothing Nothing 7 , RadioActionEntry "ORANGEA" "Orange" (Just "myorange") Nothing Nothing 8 , RadioActionEntry "YELLOWA" "Yellow" (Just "myyellow") Nothing Nothing 9 , RadioActionEntry "WHITEA" "White" (Just "mywhite") Nothing Nothing 10 , RadioActionEntry "BLACKA" "Black" (Just "myblack") Nothing Nothing 0 ] -- | iconResourceAdd :: IconFactory -> FilePath -> (FilePath, StockId) -> IO () iconResourceAdd iconfac resdir (fp,stid) = do myIconSource <- iconSourceNew iconSourceSetFilename myIconSource (resdir </> fp) iconSourceSetSize myIconSource IconSizeLargeToolbar myIconSourceSmall <- iconSourceNew iconSourceSetFilename myIconSourceSmall (resdir </> fp) iconSourceSetSize myIconSource IconSizeMenu myIconSet <- iconSetNew iconSetAddSource myIconSet myIconSource iconSetAddSource myIconSet myIconSourceSmall iconFactoryAdd iconfac stid myIconSet -- | actionNewAndRegisterRef :: IORef (Await MyEvent (Iteratee MyEvent XournalStateIO ())) -> IORef HXournalState -> String -> String -> Maybe String -> Maybe StockId -> Maybe MyEvent -> IO Action actionNewAndRegisterRef tref sref name label tooltip stockId myevent = do a <- actionNew name label tooltip stockId case myevent of Nothing -> return a Just ev -> do a `on` actionActivated $ do bouncecallback tref sref ev return a -- | getMenuUI :: IORef (Await MyEvent (Iteratee MyEvent XournalStateIO ())) -> IORef HXournalState -> IO UIManager getMenuUI tref sref = do let actionNewAndRegister = actionNewAndRegisterRef tref sref -- icons myiconfac <- iconFactoryNew iconFactoryAddDefault myiconfac resDir <- getDataDir >>= return . (</> "resource") mapM_ (iconResourceAdd myiconfac resDir) iconList fma <- actionNewAndRegister "FMA" "File" Nothing Nothing Nothing ema <- actionNewAndRegister "EMA" "Edit" Nothing Nothing Nothing vma <- actionNewAndRegister "VMA" "View" Nothing Nothing Nothing jma <- actionNewAndRegister "JMA" "Journal" Nothing Nothing Nothing tma <- actionNewAndRegister "TMA" "Tools" Nothing Nothing Nothing oma <- actionNewAndRegister "OMA" "Options" Nothing Nothing Nothing hma <- actionNewAndRegister "HMA" "Help" Nothing Nothing Nothing -- file menu newa <- actionNewAndRegister "NEWA" "New" (Just "Just a Stub") (Just stockNew) (justMenu MenuNew) annpdfa <- actionNewAndRegister "ANNPDFA" "Annotate PDF" (Just "Just a Stub") Nothing (justMenu MenuAnnotatePDF) opena <- actionNewAndRegister "OPENA" "Open" (Just "Just a Stub") (Just stockOpen) (justMenu MenuOpen) savea <- actionNewAndRegister "SAVEA" "Save" (Just "Just a Stub") (Just stockSave) (justMenu MenuSave) saveasa <- actionNewAndRegister "SAVEASA" "Save As" (Just "Just a Stub") (Just stockSaveAs) (justMenu MenuSaveAs) recenta <- actionNewAndRegister "RECENTA" "Recent Document" (Just "Just a Stub") Nothing (justMenu MenuRecentDocument) printa <- actionNewAndRegister "PRINTA" "Print" (Just "Just a Stub") Nothing (justMenu MenuPrint) exporta <- actionNewAndRegister "EXPORTA" "Export" (Just "Just a Stub") Nothing (justMenu MenuExport) quita <- actionNewAndRegister "QUITA" "Quit" (Just "Just a Stub") (Just stockQuit) (justMenu MenuQuit) -- edit menu undoa <- actionNewAndRegister "UNDOA" "Undo" (Just "Just a Stub") (Just stockUndo) (justMenu MenuUndo) redoa <- actionNewAndRegister "REDOA" "Redo" (Just "Just a Stub") (Just stockRedo) (justMenu MenuRedo) cuta <- actionNewAndRegister "CUTA" "Cut" (Just "Just a Stub") (Just stockCut) (justMenu MenuCut) copya <- actionNewAndRegister "COPYA" "Copy" (Just "Just a Stub") (Just stockCopy) (justMenu MenuCopy) pastea <- actionNewAndRegister "PASTEA" "Paste" (Just "Just a Stub") (Just stockPaste) (justMenu MenuPaste) deletea <- actionNewAndRegister "DELETEA" "Delete" (Just "Just a Stub") (Just stockDelete) (justMenu MenuDelete) -- netcopya <- actionNewAndRegister "NETCOPYA" "Copy to NetworkClipboard" (Just "Just a Stub") Nothing (justMenu MenuNetCopy) -- netpastea <- actionNewAndRegister "NETPASTEA" "Paste from NetworkClipboard" (Just "Just a Stub") Nothing (justMenu MenuNetPaste) -- view menu fscra <- actionNewAndRegister "FSCRA" "Full Screen" (Just "Just a Stub") (Just "myfullscreen") (justMenu MenuFullScreen) zooma <- actionNewAndRegister "ZOOMA" "Zoom" (Just "Just a Stub") Nothing Nothing -- (justMenu MenuZoom) zmina <- actionNewAndRegister "ZMINA" "Zoom In" (Just "Zoom In") (Just stockZoomIn) (justMenu MenuZoomIn) zmouta <- actionNewAndRegister "ZMOUTA" "Zoom Out" (Just "Zoom Out") (Just stockZoomOut) (justMenu MenuZoomOut) nrmsizea <- actionNewAndRegister "NRMSIZEA" "Normal Size" (Just "Normal Size") (Just stockZoom100) (justMenu MenuNormalSize) pgwdtha <- actionNewAndRegister "PGWDTHA" "Page Width" (Just "Page Width") (Just stockZoomFit) (justMenu MenuPageWidth) pgheighta <- actionNewAndRegister "PGHEIGHTA" "Page Height" (Just "Page Height") Nothing (justMenu MenuPageHeight) setzma <- actionNewAndRegister "SETZMA" "Set Zoom" (Just "Set Zoom") (Just stockFind) (justMenu MenuSetZoom) fstpagea <- actionNewAndRegister "FSTPAGEA" "First Page" (Just "Just a Stub") (Just stockGotoFirst) (justMenu MenuFirstPage) prvpagea <- actionNewAndRegister "PRVPAGEA" "Previous Page" (Just "Just a Stub") (Just stockGoBack) (justMenu MenuPreviousPage) nxtpagea <- actionNewAndRegister "NXTPAGEA" "Next Page" (Just "Just a Stub") (Just stockGoForward) (justMenu MenuNextPage) lstpagea <- actionNewAndRegister "LSTPAGEA" "Last Page" (Just "Just a Stub") (Just stockGotoLast) (justMenu MenuLastPage) shwlayera <- actionNewAndRegister "SHWLAYERA" "Show Layer" (Just "Just a Stub") Nothing (justMenu MenuShowLayer) hidlayera <- actionNewAndRegister "HIDLAYERA" "Hide Layer" (Just "Just a Stub") Nothing (justMenu MenuHideLayer) hsplita <- actionNewAndRegister "HSPLITA" "Horizontal Split" (Just "horizontal split") Nothing (justMenu MenuHSplit) vsplita <- actionNewAndRegister "VSPLITA" "Vertical Split" (Just "vertical split") Nothing (justMenu MenuVSplit) delcvsa <- actionNewAndRegister "DELCVSA" "Delete Current Canvas" (Just "delete current canvas") Nothing (justMenu MenuDelCanvas) -- journal menu newpgba <- actionNewAndRegister "NEWPGBA" "New Page Before" (Just "Just a Stub") Nothing (justMenu MenuNewPageBefore) newpgaa <- actionNewAndRegister "NEWPGAA" "New Page After" (Just "Just a Stub") Nothing (justMenu MenuNewPageAfter) newpgea <- actionNewAndRegister "NEWPGEA" "New Page At End" (Just "Just a Stub") Nothing (justMenu MenuNewPageAtEnd) delpga <- actionNewAndRegister "DELPGA" "Delete Page" (Just "Just a Stub") Nothing (justMenu MenuDeletePage) newlyra <- actionNewAndRegister "NEWLYRA" "New Layer" (Just "Just a Stub") Nothing (justMenu MenuNewLayer) nextlayera <- actionNewAndRegister "NEXTLAYERA" "Next Layer" (Just "Just a Stub") Nothing (justMenu MenuNextLayer) prevlayera <- actionNewAndRegister "PREVLAYERA" "Prev Layer" (Just "Just a Stub") Nothing (justMenu MenuPrevLayer) gotolayera <- actionNewAndRegister "GOTOLAYERA" "Goto Layer" (Just "Just a Stub") Nothing (justMenu MenuGotoLayer) dellyra <- actionNewAndRegister "DELLYRA" "Delete Layer" (Just "Just a Stub") Nothing (justMenu MenuDeleteLayer) ppsizea <- actionNewAndRegister "PPSIZEA" "Paper Size" (Just "Just a Stub") Nothing (justMenu MenuPaperSize) ppclra <- actionNewAndRegister "PPCLRA" "Paper Color" (Just "Just a Stub") Nothing (justMenu MenuPaperColor) ppstya <- actionNewAndRegister "PPSTYA" "Paper Style" (Just "Just a Stub") Nothing (justMenu MenuPaperStyle) apallpga<- actionNewAndRegister "APALLPGA" "Apply To All Pages" (Just "Just a Stub") Nothing (justMenu MenuApplyToAllPages) ldbkga <- actionNewAndRegister "LDBKGA" "Load Background" (Just "Just a Stub") Nothing (justMenu MenuLoadBackground) bkgscrshta <- actionNewAndRegister "BKGSCRSHTA" "Background Screenshot" (Just "Just a Stub") Nothing (justMenu MenuBackgroundScreenshot) defppa <- actionNewAndRegister "DEFPPA" "Default Paper" (Just "Just a Stub") Nothing (justMenu MenuDefaultPaper) setdefppa <- actionNewAndRegister "SETDEFPPA" "Set As Default" (Just "Just a Stub") Nothing (justMenu MenuSetAsDefaultPaper) -- tools menu shpreca <- actionNewAndRegister "SHPRECA" "Shape Recognizer" (Just "Just a Stub") (Just "myshapes") (justMenu MenuShapeRecognizer) rulera <- actionNewAndRegister "RULERA" "Ruler" (Just "Just a Stub") (Just "myruler") (justMenu MenuRuler) -- selregna <- actionNewAndRegister "SELREGNA" "Select Region" (Just "Just a Stub") (Just "mylasso") (justMenu MenuSelectRegion) -- selrecta <- actionNewAndRegister "SELRECTA" "Select Rectangle" (Just "Just a Stub") (Just "myrectselect") (justMenu MenuSelectRectangle) -- vertspa <- actionNewAndRegister "VERTSPA" "Vertical Space" (Just "Just a Stub") (Just "mystretch") (justMenu MenuVerticalSpace) -- handa <- actionNewAndRegister "HANDA" "Hand Tool" (Just "Just a Stub") (Just "myhand") (justMenu MenuHandTool) clra <- actionNewAndRegister "CLRA" "Color" (Just "Just a Stub") Nothing Nothing penopta <- actionNewAndRegister "PENOPTA" "Pen Options" (Just "Just a Stub") Nothing (justMenu MenuPenOptions) erasropta <- actionNewAndRegister "ERASROPTA" "Eraser Options" (Just "Just a Stub") Nothing (justMenu MenuEraserOptions) hiltropta <- actionNewAndRegister "HILTROPTA" "Highlighter Options" (Just "Just a Stub") Nothing (justMenu MenuHighlighterOptions) txtfnta <- actionNewAndRegister "TXTFNTA" "Text Font" (Just "Just a Stub") Nothing (justMenu MenuTextFont) defpena <- actionNewAndRegister "DEFPENA" "Default Pen" (Just "Just a Stub") (Just "mydefaultpen") (justMenu MenuDefaultPen) defersra <- actionNewAndRegister "DEFERSRA" "Default Eraser" (Just "Just a Stub") Nothing (justMenu MenuDefaultEraser) defhiltra <- actionNewAndRegister "DEFHILTRA" "Default Highlighter" (Just "Just a Stub") Nothing (justMenu MenuDefaultHighlighter) deftxta <- actionNewAndRegister "DEFTXTA" "Default Text" (Just "Just a Stub") Nothing (justMenu MenuDefaultText) setdefopta <- actionNewAndRegister "SETDEFOPTA" "Set As Default" (Just "Just a Stub") Nothing (justMenu MenuSetAsDefaultOption) relauncha <- actionNewAndRegister "RELAUNCHA" "Relaunch Application" (Just "Just a Stub") Nothing (justMenu MenuRelaunch) -- options menu uxinputa <- toggleActionNew "UXINPUTA" "Use XInput" (Just "Just a Stub") Nothing uxinputa `on` actionToggled $ do bouncecallback tref sref (Menu MenuUseXInput) -- AndRegister "UXINPUTA" "Use XInput" (Just "Just a Stub") Nothing (justMenu MenuUseXInput) dcrdcorea <- actionNewAndRegister "DCRDCOREA" "Discard Core Events" (Just "Just a Stub") Nothing (justMenu MenuDiscardCoreEvents) ersrtipa <- actionNewAndRegister "ERSRTIPA" "Eraser Tip" (Just "Just a Stub") Nothing (justMenu MenuEraserTip) pressrsensa <- toggleActionNew "PRESSRSENSA" "Pressure Sensitivity" (Just "Just a Stub") Nothing pressrsensa `on` actionToggled $ do bouncecallback tref sref (Menu MenuPressureSensitivity) -- AndRegister "UXINPUTA" "Use XInput" (Just "Just a Stub") Nothing (justMenu MenuUseXInput) pghilta <- actionNewAndRegister "PGHILTA" "Page Highlight" (Just "Just a Stub") Nothing (justMenu MenuPageHighlight) mltpgvwa <- actionNewAndRegister "MLTPGVWA" "Multiple Page View" (Just "Just a Stub") Nothing (justMenu MenuMultiplePageView) mltpga <- actionNewAndRegister "MLTPGA" "Multiple Pages" (Just "Just a Stub") Nothing (justMenu MenuMultiplePages) btn2mapa <- actionNewAndRegister "BTN2MAPA" "Button 2 Mapping" (Just "Just a Stub") Nothing (justMenu MenuButton2Mapping) btn3mapa <- actionNewAndRegister "BTN3MAPA" "Button 3 Mapping" (Just "Just a Stub") Nothing (justMenu MenuButton3Mapping) antialiasbmpa <- actionNewAndRegister "ANTIALIASBMPA" "Antialiased Bitmaps" (Just "Just a Stub") Nothing (justMenu MenuAntialiasedBitmaps) prgrsbkga <- actionNewAndRegister "PRGRSBKGA" "Progressive Backgrounds" (Just "Just a Stub") Nothing (justMenu MenuProgressiveBackgrounds) prntpprulea <- actionNewAndRegister "PRNTPPRULEA" "Print Paper Ruling" (Just "Just a Stub") Nothing (justMenu MenuPrintPaperRuling) lfthndscrbra <- actionNewAndRegister "LFTHNDSCRBRA" "Left-Handed Scrollbar" (Just "Just a Stub") Nothing (justMenu MenuLeftHandedScrollbar) shrtnmenua <- actionNewAndRegister "SHRTNMENUA" "Shorten Menus" (Just "Just a Stub") Nothing (justMenu MenuShortenMenus) autosaveprefa <- actionNewAndRegister "AUTOSAVEPREFA" "Auto-Save Preferences" (Just "Just a Stub") Nothing (justMenu MenuAutoSavePreferences) saveprefa <- actionNewAndRegister "SAVEPREFA" "Save Preferences" (Just "Just a Stub") Nothing (justMenu MenuSavePreferences) -- help menu abouta <- actionNewAndRegister "ABOUTA" "About" (Just "Just a Stub") Nothing (justMenu MenuAbout) -- others defaulta <- actionNewAndRegister "DEFAULTA" "Default" (Just "Default") (Just "mydefault") (justMenu MenuDefault) agr <- actionGroupNew "AGR" mapM_ (actionGroupAddAction agr) [fma,ema,vma,jma,tma,oma,hma] mapM_ (actionGroupAddAction agr) [ undoa, redoa, cuta, copya, pastea, deletea ] -- actionGroupAddActionWithAccel agr undoa (Just "<control>z") mapM_ (\act -> actionGroupAddActionWithAccel agr act Nothing) [ newa, annpdfa, opena, savea, saveasa, recenta, printa, exporta, quita {- , netcopya, netpastea -} , fscra, zooma, zmina, zmouta, nrmsizea, pgwdtha, pgheighta, setzma , fstpagea, prvpagea, nxtpagea, lstpagea, shwlayera, hidlayera , hsplita, vsplita, delcvsa , newpgba, newpgaa, newpgea, delpga, newlyra, nextlayera, prevlayera, gotolayera, dellyra, ppsizea, ppclra , ppstya, apallpga, ldbkga, bkgscrshta, defppa, setdefppa , shpreca, rulera, clra, penopta , erasropta, hiltropta, txtfnta, defpena, defersra, defhiltra, deftxta , setdefopta, relauncha , dcrdcorea, ersrtipa, pghilta, mltpgvwa , mltpga, btn2mapa, btn3mapa, antialiasbmpa, prgrsbkga, prntpprulea , lfthndscrbra, shrtnmenua, autosaveprefa, saveprefa , abouta , defaulta ] actionGroupAddAction agr uxinputa actionGroupAddAction agr pressrsensa -- actionGroupAddRadioActions agr viewmods 0 (assignViewMode tref sref) actionGroupAddRadioActions agr viewmods 0 (const (return ())) actionGroupAddRadioActions agr pointmods 0 (assignPoint sref) actionGroupAddRadioActions agr penmods 0 (assignPenMode tref sref) actionGroupAddRadioActions agr colormods 0 (assignColor sref) let disabledActions = [ recenta, printa, exporta , cuta, copya, pastea, deletea , fscra, setzma , shwlayera, hidlayera , newpgea, {- delpga, -} ppsizea, ppclra , ppstya, apallpga, ldbkga, bkgscrshta, defppa, setdefppa , shpreca, rulera , erasropta, hiltropta, txtfnta, defpena, defersra, defhiltra, deftxta , setdefopta , dcrdcorea, ersrtipa, pghilta, mltpgvwa , mltpga, btn2mapa, btn3mapa, antialiasbmpa, prgrsbkga, prntpprulea , lfthndscrbra, shrtnmenua, autosaveprefa, saveprefa , abouta , defaulta ] enabledActions = [ opena, savea, saveasa, quita, fstpagea, prvpagea, nxtpagea, lstpagea , clra, penopta, zooma, nrmsizea, pgwdtha ] -- mapM_ (\x->actionSetSensitive x True) enabledActions mapM_ (\x->actionSetSensitive x False) disabledActions -- -- -- radio actions -- ui <- uiManagerNew uiManagerAddUiFromString ui uiDecl uiManagerInsertActionGroup ui agr 0 -- Just ra1 <- actionGroupGetAction agr "ONEPAGEA" -- Gtk.set (castToRadioAction ra1) [radioActionCurrentValue := 1] Just ra2 <- actionGroupGetAction agr "PENFINEA" Gtk.set (castToRadioAction ra2) [radioActionCurrentValue := 2] Just ra3 <- actionGroupGetAction agr "SELREGNA" actionSetSensitive ra3 True Just ra4 <- actionGroupGetAction agr "VERTSPA" actionSetSensitive ra4 False Just ra5 <- actionGroupGetAction agr "HANDA" actionSetSensitive ra5 False Just ra6 <- actionGroupGetAction agr "CONTA" actionSetSensitive ra6 True Just toolbar1 <- uiManagerGetWidget ui "/ui/toolbar1" toolbarSetStyle (castToToolbar toolbar1) ToolbarIcons Just toolbar2 <- uiManagerGetWidget ui "/ui/toolbar2" toolbarSetStyle (castToToolbar toolbar2) ToolbarIcons return ui -- | assignViewMode :: IORef (Await MyEvent (Iteratee MyEvent XournalStateIO ())) -> IORef HXournalState -> RadioAction -> IO () assignViewMode tref sref a = do st <- readIORef sref putStrLn "in assignmViewMode" printCanvasMode (getCurrentCanvasId st) (get currentCanvasInfo st) putStrLn "still in assignViewMode" viewModeToMyEvent a >>= bouncecallback tref sref -- | assignPenMode :: IORef (Await MyEvent (Iteratee MyEvent XournalStateIO ())) -> IORef HXournalState -> RadioAction -> IO () assignPenMode tref sref a = do v <- radioActionGetCurrentValue a let t = int2PenType v st <- readIORef sref case t of Left pm -> do let stNew = set (penType.penInfo) pm st writeIORef sref stNew bouncecallback tref sref ToViewAppendMode Right sm -> do let stNew = set (selectType.selectInfo) sm st writeIORef sref stNew bouncecallback tref sref ToSelectMode -- | assignColor :: IORef HXournalState -> RadioAction -> IO () assignColor sref a = do v <- radioActionGetCurrentValue a let c = int2Color v st <- readIORef sref let callback = get callBack st let stNew = set (penColor.currentTool.penInfo) c st writeIORef sref stNew callback (PenColorChanged c) -- | assignPoint :: IORef HXournalState -> RadioAction -> IO () assignPoint sref a = do v <- radioActionGetCurrentValue a st <- readIORef sref let ptype = get (penType.penInfo) st let w = int2Point ptype v let stNew = set (penWidth.currentTool.penInfo) w st let callback = get callBack st writeIORef sref stNew callback (PenWidthChanged w) -- | int2PenType :: Int -> Either PenType SelectType int2PenType 0 = Left PenWork int2PenType 1 = Left EraserWork int2PenType 2 = Left HighlighterWork int2PenType 3 = Left TextWork int2PenType 4 = Right SelectRegionWork int2PenType 5 = Right SelectRectangleWork int2PenType 6 = Right SelectVerticalSpaceWork int2PenType 7 = Right SelectHandToolWork int2PenType _ = error "No such pentype" -- | int2Point :: PenType -> Int -> Double int2Point PenWork 0 = predefined_veryfine int2Point PenWork 1 = predefined_fine int2Point PenWork 2 = predefined_medium int2Point PenWork 3 = predefined_thick int2Point PenWork 4 = predefined_verythick int2Point PenWork 5 = predefined_ultrathick int2Point HighlighterWork 0 = predefined_highlighter_veryfine int2Point HighlighterWork 1 = predefined_highlighter_fine int2Point HighlighterWork 2 = predefined_highlighter_medium int2Point HighlighterWork 3 = predefined_highlighter_thick int2Point HighlighterWork 4 = predefined_highlighter_verythick int2Point HighlighterWork 5 = predefined_highlighter_ultrathick int2Point EraserWork 0 = predefined_eraser_veryfine int2Point EraserWork 1 = predefined_eraser_fine int2Point EraserWork 2 = predefined_eraser_medium int2Point EraserWork 3 = predefined_eraser_thick int2Point EraserWork 4 = predefined_eraser_verythick int2Point EraserWork 5 = predefined_eraser_ultrathick int2Point TextWork 0 = predefined_veryfine int2Point TextWork 1 = predefined_fine int2Point TextWork 2 = predefined_medium int2Point TextWork 3 = predefined_thick int2Point TextWork 4 = predefined_verythick int2Point TextWork 5 = predefined_ultrathick int2Point _ _ = error "No such point" -- | int2Color :: Int -> PenColor int2Color 0 = ColorBlack int2Color 1 = ColorBlue int2Color 2 = ColorRed int2Color 3 = ColorGreen int2Color 4 = ColorGray int2Color 5 = ColorLightBlue int2Color 6 = ColorLightGreen int2Color 7 = ColorMagenta int2Color 8 = ColorOrange int2Color 9 = ColorYellow int2Color 10 = ColorWhite int2Color _ = error "No such color"
wavewave/hxournal
lib/Application/HXournal/GUI/Menu.hs
Haskell
bsd-2-clause
34,087
{-# LANGUAGE PackageImports #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE PolyKinds #-} module Propellor.Property ( -- * Property combinators requires , before , onChange , onChangeFlagOnFail , flagFile , flagFile' , check , fallback , revert -- * Property descriptions , describe , (==>) -- * Constructing properties , Propellor , property , property' , OuterMetaTypesWitness , ensureProperty , pickOS , withOS , unsupportedOS , unsupportedOS' , makeChange , noChange , doNothing , endAction -- * Property result checking , UncheckedProperty , unchecked , changesFile , changesFileContent , isNewerThan , checkResult , Checkable , assume ) where import System.FilePath import Control.Monad import Data.Monoid import Control.Monad.IfElse import "mtl" Control.Monad.RWS.Strict import System.Posix.Files import qualified Data.Hash.MD5 as MD5 import Data.List import Control.Applicative import Prelude import Propellor.Types import Propellor.Types.Core import Propellor.Types.ResultCheck import Propellor.Types.MetaTypes import Propellor.Types.Singletons import Propellor.Info import Propellor.EnsureProperty import Utility.Exception import Utility.Monad import Utility.Misc import Utility.Directory -- | Makes a perhaps non-idempotent Property be idempotent by using a flag -- file to indicate whether it has run before. -- Use with caution. flagFile :: Property i -> FilePath -> Property i flagFile p = flagFile' p . return flagFile' :: Property i -> IO FilePath -> Property i flagFile' p getflagfile = adjustPropertySatisfy p $ \satisfy -> do flagfile <- liftIO getflagfile go satisfy flagfile =<< liftIO (doesFileExist flagfile) where go _ _ True = return NoChange go satisfy flagfile False = do r <- satisfy when (r == MadeChange) $ liftIO $ unlessM (doesFileExist flagfile) $ do createDirectoryIfMissing True (takeDirectory flagfile) writeFile flagfile "" return r -- | Indicates that the first property depends on the second, -- so before the first is ensured, the second must be ensured. -- -- The combined property uses the description of the first property. requires :: Combines x y => x -> y -> CombinedType x y requires = combineWith -- Run action of y, then x (flip (<>)) -- When reverting, run in reverse order. (<>) -- | Combines together two properties, resulting in one property -- that ensures the first, and if the first succeeds, ensures the second. -- -- The combined property uses the description of the first property. before :: Combines x y => x -> y -> CombinedType x y before = combineWith -- Run action of x, then y (<>) -- When reverting, run in reverse order. (flip (<>)) -- | Whenever a change has to be made for a Property, causes a hook -- Property to also be run, but not otherwise. onChange :: (Combines x y) => x -> y -> CombinedType x y onChange = combineWith combiner revertcombiner where combiner (Just p) (Just hook) = Just $ do r <- p case r of MadeChange -> do r' <- hook return $ r <> r' _ -> return r combiner (Just p) Nothing = Just p combiner Nothing _ = Nothing revertcombiner = (<>) -- | Same as `onChange` except that if property y fails, a flag file -- is generated. On next run, if the flag file is present, property y -- is executed even if property x doesn't change. -- -- With `onChange`, if y fails, the property x `onChange` y returns -- `FailedChange`. But if this property is applied again, it returns -- `NoChange`. This behavior can cause trouble... onChangeFlagOnFail :: (Combines x y) => FilePath -> x -> y -> CombinedType x y onChangeFlagOnFail flagfile = combineWith combiner revertcombiner where combiner (Just s1) s2 = Just $ do r1 <- s1 case r1 of MadeChange -> flagFailed s2 _ -> ifM (liftIO $ doesFileExist flagfile) ( flagFailed s2 , return r1 ) combiner Nothing _ = Nothing revertcombiner = (<>) flagFailed (Just s) = do r <- s liftIO $ case r of FailedChange -> createFlagFile _ -> removeFlagFile return r flagFailed Nothing = return NoChange createFlagFile = unlessM (doesFileExist flagfile) $ do createDirectoryIfMissing True (takeDirectory flagfile) writeFile flagfile "" removeFlagFile = whenM (doesFileExist flagfile) $ removeFile flagfile -- | Changes the description of a property. describe :: IsProp p => p -> Desc -> p describe = setDesc -- | Alias for @flip describe@ (==>) :: IsProp (Property i) => Desc -> Property i -> Property i (==>) = flip describe infixl 1 ==> -- | Tries the first property, but if it fails to work, instead uses -- the second. fallback :: (Combines p1 p2) => p1 -> p2 -> CombinedType p1 p2 fallback = combineWith combiner revertcombiner where combiner (Just a1) (Just a2) = Just $ do r <- a1 if r == FailedChange then a2 else return r combiner (Just a1) Nothing = Just a1 combiner Nothing _ = Nothing revertcombiner = (<>) -- | Indicates that a Property may change a particular file. When the file -- is modified in any way (including changing its permissions or mtime), -- the property will return MadeChange instead of NoChange. changesFile :: Checkable p i => p i -> FilePath -> Property i changesFile p f = checkResult getstat comparestat p where getstat = catchMaybeIO $ getSymbolicLinkStatus f comparestat oldstat = do newstat <- getstat return $ if samestat oldstat newstat then NoChange else MadeChange samestat Nothing Nothing = True samestat (Just a) (Just b) = and -- everything except for atime [ deviceID a == deviceID b , fileID a == fileID b , fileMode a == fileMode b , fileOwner a == fileOwner b , fileGroup a == fileGroup b , specialDeviceID a == specialDeviceID b , fileSize a == fileSize b , modificationTimeHiRes a == modificationTimeHiRes b , isBlockDevice a == isBlockDevice b , isCharacterDevice a == isCharacterDevice b , isNamedPipe a == isNamedPipe b , isRegularFile a == isRegularFile b , isDirectory a == isDirectory b , isSymbolicLink a == isSymbolicLink b , isSocket a == isSocket b ] samestat _ _ = False -- | Like `changesFile`, but compares the content of the file. -- Changes to mtime etc that do not change file content are treated as -- NoChange. changesFileContent :: Checkable p i => p i -> FilePath -> Property i changesFileContent p f = checkResult getmd5 comparemd5 p where getmd5 = catchMaybeIO $ MD5.md5 . MD5.Str <$> readFileStrict f comparemd5 oldmd5 = do newmd5 <- getmd5 return $ if oldmd5 == newmd5 then NoChange else MadeChange -- | Determines if the first file is newer than the second file. -- -- This can be used with `check` to only run a command when a file -- has changed. -- -- > check ("/etc/aliases" `isNewerThan` "/etc/aliases.db") -- > (cmdProperty "newaliases" [] `assume` MadeChange) -- updates aliases.db -- -- Or it can be used with `checkResult` to test if a command made a change. -- -- > checkResult (return ()) -- > (\_ -> "/etc/aliases.db" `isNewerThan` "/etc/aliases") -- > (cmdProperty "newaliases" []) -- -- (If one of the files does not exist, the file that does exist is -- considered to be the newer of the two.) isNewerThan :: FilePath -> FilePath -> IO Bool isNewerThan x y = do mx <- mtime x my <- mtime y return (mx > my) where mtime f = catchMaybeIO $ modificationTimeHiRes <$> getFileStatus f -- | Picks one of the two input properties to use, -- depending on the targeted OS. -- -- If both input properties support the targeted OS, then the -- first will be used. -- -- The resulting property will use the description of the first property -- no matter which property is used in the end. So, it's often a good -- idea to change the description to something clearer. -- -- For example: -- -- > upgraded :: Property (DebianLike + FreeBSD) -- > upgraded = (Apt.upgraded `pickOS` Pkg.upgraded) -- > `describe` "OS upgraded" -- -- If neither input property supports the targeted OS, calls -- `unsupportedOS`. Using the example above on a Fedora system would -- fail that way. pickOS :: ( SingKind ('KProxy :: KProxy ka) , SingKind ('KProxy :: KProxy kb) , DemoteRep ('KProxy :: KProxy ka) ~ [MetaType] , DemoteRep ('KProxy :: KProxy kb) ~ [MetaType] , SingI c -- Would be nice to have this constraint, but -- union will not generate metatypes lists with the same -- order of OS's as is used everywhere else. So, -- would need a type-level sort. --, Union a b ~ c ) => Property (MetaTypes (a :: ka)) -> Property (MetaTypes (b :: kb)) -> Property (MetaTypes c) pickOS a b = c `addChildren` [toChildProperty a, toChildProperty b] where -- This use of getSatisfy is safe, because both a and b -- are added as children, so their info will propigate. c = withOS (getDesc a) $ \_ o -> if matching o a then maybe (pure NoChange) id (getSatisfy a) else if matching o b then maybe (pure NoChange) id (getSatisfy b) else unsupportedOS' matching Nothing _ = False matching (Just o) p = Targeting (systemToTargetOS o) `elem` fromSing (proptype p) proptype (Property t _ _ _ _) = t -- | Makes a property that is satisfied differently depending on specifics -- of the host's operating system. -- -- > myproperty :: Property Debian -- > myproperty = withOS "foo installed" $ \w o -> case o of -- > (Just (System (Debian kernel (Stable release)) arch)) -> ensureProperty w ... -- > (Just (System (Debian kernel suite) arch)) -> ensureProperty w ... -- > _ -> unsupportedOS' -- -- Note that the operating system specifics may not be declared for all hosts, -- which is where Nothing comes in. withOS :: (SingI metatypes) => Desc -> (OuterMetaTypesWitness '[] -> Maybe System -> Propellor Result) -> Property (MetaTypes metatypes) withOS desc a = property desc $ a dummyoutermetatypes =<< getOS where -- Using this dummy value allows ensureProperty to be used -- even though the inner property probably doesn't target everything -- that the outer withOS property targets. dummyoutermetatypes :: OuterMetaTypesWitness ('[]) dummyoutermetatypes = OuterMetaTypesWitness sing -- | A property that always fails with an unsupported OS error. unsupportedOS :: Property UnixLike unsupportedOS = property "unsupportedOS" unsupportedOS' -- | Throws an error, for use in `withOS` when a property is lacking -- support for an OS. unsupportedOS' :: Propellor Result unsupportedOS' = go =<< getOS where go Nothing = error "Unknown host OS is not supported by this property." go (Just o) = error $ "This property is not implemented for " ++ show o -- | Undoes the effect of a RevertableProperty. revert :: RevertableProperty setup undo -> RevertableProperty undo setup revert (RevertableProperty p1 p2) = RevertableProperty p2 p1 makeChange :: IO () -> Propellor Result makeChange a = liftIO a >> return MadeChange noChange :: Propellor Result noChange = return NoChange -- | A no-op property. -- -- This is the same as `mempty` from the `Monoid` instance. doNothing :: SingI t => Property (MetaTypes t) doNothing = mempty -- | Registers an action that should be run at the very end, after -- propellor has checks all the properties of a host. endAction :: Desc -> (Result -> Propellor Result) -> Propellor () endAction desc a = tell [EndAction desc a]
ArchiveTeam/glowing-computing-machine
src/Propellor/Property.hs
Haskell
bsd-2-clause
11,367
{-# LANGUAGE ScopedTypeVariables #-} {-| Module: HaskHOL.Lib.Meson Copyright: (c) Evan Austin 2015 LICENSE: BSD3 Maintainer: e.c.austin@gmail.com Stability: unstable Portability: unknown -} module HaskHOL.Lib.Meson ( tacGEN_MESON , tacASM_MESON , tacASM_MESON_NIL , tacMESON , tacMESON_NIL , ruleMESON , ruleMESON_NIL ) where import HaskHOL.Core import qualified HaskHOL.Core.Kernel as K (typeOf) import HaskHOL.Lib.Bool import HaskHOL.Lib.Canon import HaskHOL.Lib.Classic import HaskHOL.Lib.Equal import HaskHOL.Lib.Theorems import HaskHOL.Lib.Simp import HaskHOL.Lib.DRule import HaskHOL.Lib.Tactics import HaskHOL.Lib.Trivia -- AST for negation normal form terms data FOLTerm = FVar Int | FNApp Int [FOLTerm] deriving (Eq, Ord) type FOLAtom = (Int, [FOLTerm]) -- Type of a MESON proof tree data FOLGoal = Subgoal FOLAtom [FOLGoal] (Int, HOLThm) Int [(FOLTerm, Int)] deriving Eq type FOLTermEnv = [(FOLTerm, Int)] type Tup = (FOLTermEnv, Int, Int) type Rule = (Int, [(([FOLAtom], FOLAtom), (Int, HOLThm))]) type State = ((FOLAtom, [Rule]), Tup) data MesonErr = Fail | Cut deriving (Show, Typeable) instance Exception MesonErr -- Cacheable continuations. type ContMem1 cls thry = (FOLGoal, Tup) -> HOL cls thry (FOLGoal, Tup) type ContMemMany cls thry = ([FOLGoal], Tup) -> HOL cls thry (FOLGoal, Tup) data DeCont1 = Return1 | Base DeCont | Goals11 DeCont [Rule] Int [(FOLAtom, [Rule])] | Cache1 DeCont1 deriving Eq data DeCont = TopRec FOLTermEnv Int FOLTermEnv FOLAtom (Int, HOLThm) DeCont1 Int | Goals1 DeCont [Rule] Int [(FOLAtom, [Rule])] Int | Goals2 DeCont [FOLGoal] | Goals3 Int Int DeCont [Rule] Int [(FOLAtom, [Rule])] | Goals4 Int Int Int DeCont [FOLGoal] | Goals5 DeCont FOLGoal | CacheMany DeCont deriving Eq cachecont :: DeCont1 -> DeCont1 cachecont = Cache1 cacheconts :: DeCont -> DeCont cacheconts = CacheMany -- local MESON state type MesonState = HOLRef MesonRefs data MesonRefs = MesonRefs { infs :: !Int , cutIn :: !Int , vstore :: ![(HOLTerm, Int)] , gstore :: ![(HOLTerm, Int)] , vcounter :: !Int , cstore :: ![(HOLTerm, Int)] , ccounter :: !Int , memory :: ![((Int, HOLTerm), HOLThm)] , cont1 :: ![(DeCont1, [(FOLGoal, Tup)])] , cont2 :: ![(DeCont, [([FOLGoal], Tup)])] } initializeMeson :: TriviaCtxt thry => HOL cls thry MesonRefs initializeMeson = do falseTm <- serve [trivia| F |] return $! MesonRefs 0 1 [] [] 0 [(falseTm, 1)] 2 [] [] [] -- meson settings mesonOffInc, mesonSkew, mesonSplitLimit :: Int mesonOffInc = 10000 mesonSkew = 3 mesonSplitLimit = 8 -- partitioning utility qpartition :: forall a. Eq a => (a -> Bool) -> [a] -> [a] -> ([a], [a]) qpartition p m l = tryd ([], l) $ partRec l where partRec :: [a] -> Catch ([a], [a]) partRec [] = fail' "partRec" partRec lst@(h:t) | lst == m = fail' "partRec" | p h = (do (yes, no) <- partRec t return (h:yes, no)) <|> return ([h], t) | otherwise = (do (yes, no) <- partRec t return (yes, h:no)) <?> "partRec" -- nnf substitution functions folSubst :: [(FOLTerm, Int)] -> FOLTerm -> FOLTerm folSubst theta tm@(FVar v) = revAssocd v theta tm folSubst theta (FNApp f args) = FNApp f $ map (folSubst theta) args folInst :: [(FOLTerm, Int)] -> FOLAtom -> FOLAtom folInst theta (p, args) = (p, map (folSubst theta) args) folSubstBump :: Int -> [(FOLTerm, Int)] -> FOLTerm -> FOLTerm folSubstBump offset theta tm@(FVar v) | v < mesonOffInc = let v' = v + offset in revAssocd v' theta $ FVar v' | otherwise = revAssocd v theta tm folSubstBump offset theta (FNApp f args) = FNApp f $! map (folSubstBump offset theta) args folInstBump :: Int -> [(FOLTerm, Int)] -> FOLAtom -> FOLAtom folInstBump offset theta (p, args) = let args' = map (folSubstBump offset theta) args in (p, args') -- main nnf unification functions isTriv :: MonadThrow m => [(FOLTerm, Int)] -> Int -> FOLTerm -> m Bool isTriv env x (FVar y) | y == x = return True | otherwise = case runCatch $ revAssoc y env of Left{} -> return False Right t' -> isTriv env x t' isTriv env x (FNApp _ args) = do conds <- mapM (isTriv env x) args if or conds then fail' "isTriv: cyclic" else return False folUnify :: (MonadCatch m, MonadThrow m) => Int -> FOLTerm -> FOLTerm -> [(FOLTerm, Int)] -> m [(FOLTerm, Int)] folUnify offset (FNApp f fargs) (FNApp g gargs) sofar | f /= g = fail' "folUnify" | otherwise = foldr2M (folUnify offset) sofar fargs gargs folUnify offset tm1 (FVar x) sofar = case runCatch $ revAssoc x' sofar of Right tm2' -> folUnify 0 tm1 tm2' sofar Left{} -> do cond <- isTriv sofar x' tm1 <?> "folUnify: cyclic" return $! if cond then sofar else (tm1, x'):sofar where x' = x + offset folUnify offset (FVar x) tm2 sofar = case runCatch $ revAssoc x sofar of Right tm1' -> folUnify offset tm1' tm2 sofar Left{} -> do cond <- isTriv sofar x tm2' <?> "folUnify: cyclic" return $! if cond then sofar else (tm2', x):sofar where tm2' = folSubstBump offset [] tm2 -- test for nnf equality folEq :: [(FOLTerm, Int)] -> FOLTerm -> FOLTerm -> Bool folEq insts tm1 tm2 | tm1 == tm2 = True | otherwise = case (tm1, tm2) of (FNApp f fargs, FNApp g gargs) -> let conds = zipWith (folEq insts) fargs gargs in f == g && and conds (_, FVar x) -> case runCatch $ revAssoc x insts of Right tm2' -> folEq insts tm1 tm2' Left{} -> tryd False $ isTriv insts x tm1 (FVar x, _) -> case runCatch $ revAssoc x insts of Right tm1' -> folEq insts tm1' tm2 Left{} -> tryd False $ isTriv insts x tm2 folAtomEq :: [(FOLTerm, Int)] -> FOLAtom -> FOLAtom -> Bool folAtomEq insts (p1, args1) (p2, args2) | p1 /= p2 = False | otherwise = and $ zipWith (folEq insts) args1 args2 -- check ancestor list for repetition checkan :: MonadThrow m => FOLTermEnv -> FOLAtom -> [Rule] -> m [Rule] checkan insts (p, a) ancestors = let p' = negate p t' = (p', a) in case runCatch $ assoc p' ancestors of Left{} -> return ancestors Right ours -> if or $ map (folAtomEq insts t' . snd . fst) ours then fail' "checkan: loop" else return ancestors -- insert new goal's negation in ancestor clause, given refinement insertan :: BoolCtxt thry => FOLTermEnv -> FOLAtom -> [Rule] -> HOL cls thry [Rule] insertan insts (p, a) ancestors = let p' = negate p t' = (p', a) (ourancp, otheranc) = tryd ((p', []), ancestors) $ remove (\ (pr, _) -> pr == p') ancestors ouranc = snd ourancp in if or $ map (folAtomEq insts t' . snd . fst) ouranc then fail "insertan: loop" else do th <- thmTRUTH return ((p', (([], t'), (0, th)):ouranc):otheranc) -- apply a multi-level "graph" instantiation folSubstPartial :: [(FOLTerm, Int)] -> FOLTerm -> FOLTerm folSubstPartial insts tm@(FVar v) = case runCatch $ revAssoc v insts of Left{} -> tm Right t -> folSubstPartial insts t folSubstPartial insts (FNApp f args) = FNApp f $ map (folSubstPartial insts) args -- tease apart local and global instantiations. separateInsts2 :: Int -> FOLTermEnv -> FOLTermEnv -> (FOLTermEnv, FOLTermEnv) separateInsts2 offset old new = let (loc, glob) = qpartition (\ (_, v) -> offset <= v) old new in if glob == old then (map (first (folSubstPartial new)) loc, old) else (map (first (folSubstPartial new)) loc, map (first (folSubstPartial new)) glob) mkNegated :: FOLAtom -> FOLAtom mkNegated (p, a) = (negate p, a) mkContraposes :: Int -> HOLThm -> [FOLAtom] -> [FOLAtom] -> [(([FOLAtom], FOLAtom), (Int, HOLThm))] -> [(([FOLAtom], FOLAtom), (Int, HOLThm))] mkContraposes _ _ _ [] sofar = sofar mkContraposes n th used (h:t) sofar = let nw = ((map mkNegated (used ++ t), h), (n, th)) in mkContraposes (n + 1) th (used ++ [h]) t (nw:sofar) -- optimize set of clausesa optimizeRules :: [Rule] -> [Rule] optimizeRules = map (second optimizeClauseOrder) where optimizeClauseOrder = sort (\ ((l1, _), _) ((l2, _), _) -> length l1 <= length l2) convDISJ_AC :: TheoremsCtxt thry => Conversion cls thry convDISJ_AC = convAC thmDISJ_ACI convImp :: TriviaCtxt thry => Conversion cls thry convImp = convREWR convImp_pth where convImp_pth :: TriviaCtxt thry => HOL cls thry HOLThm convImp_pth = cacheProof "convImp_pth" ctxtTrivia $ ruleTAUT [txt| a \/ b <=> ~b ==> a |] convPush :: TriviaCtxt thry => Conversion cls thry convPush = convGEN_REWRITE convTOP_SWEEP [convPush_pth1, convPush_pth2] where convPush_pth1 :: TriviaCtxt thry => HOL cls thry HOLThm convPush_pth1 = cacheProof "convPush_pth1" ctxtTrivia $ ruleTAUT [txt| ~(a \/ b) <=> ~a /\ ~b |] convPush_pth2 :: TriviaCtxt thry => HOL cls thry HOLThm convPush_pth2 = cacheProof "convPush_pth2" ctxtTrivia $ ruleTAUT [txt| ~(~a) <=> a |] convPull :: TriviaCtxt thry => Conversion cls thry convPull = convGEN_REWRITE convDEPTH [convPull_pth] where convPull_pth :: TriviaCtxt thry => HOL cls thry HOLThm convPull_pth = cacheProof "convPull_pth" ctxtTrivia $ ruleTAUT [txt| ~a \/ ~b <=> ~(a /\ b) |] convImf :: TriviaCtxt thry => Conversion cls thry convImf = convREWR convImf_pth where convImf_pth :: TriviaCtxt thry => HOL cls thry HOLThm convImf_pth = cacheProof "convImf_pth" ctxtTrivia $ ruleTAUT [txt| ~p <=> p ==> F |] -- translate saved proof back to HOL holNegate :: HOLTerm -> HOL cls thry HOLTerm holNegate tm = (destNeg tm <|> mkNeg tm) <?> "holNegate" mergeInst :: (FOLTerm, Int) -> [(FOLTerm, Int)] -> [(FOLTerm, Int)] mergeInst (t, x) current = let t' = folSubst current t in (t', x) : current finishRule :: TriviaCtxt thry => HOLThm -> HOL cls thry HOLThm finishRule = ruleGEN_REWRITE id [finishRule_pth1, finishRule_pth2] where finishRule_pth1 :: TriviaCtxt thry => HOL cls thry HOLThm finishRule_pth1 = cacheProof "finishRule_pth1" ctxtTrivia $ ruleTAUT [txt| (~p ==> p) <=> p |] finishRule_pth2 :: TriviaCtxt thry => HOL cls thry HOLThm finishRule_pth2 = cacheProof "finishRule_pth2" ctxtTrivia $ ruleTAUT [txt| (p ==> ~p) <=> ~p |] -- create equality axioms convImpElim :: TriviaCtxt thry => Conversion cls thry convImpElim = convREWR convImpElim_pth where convImpElim_pth :: TriviaCtxt thry => HOL cls thry HOLThm convImpElim_pth = cacheProof "convImpElim_pth" ctxtTrivia $ ruleTAUT [txt| (a ==> b) <=> ~a \/ b |] ruleEqElim :: TriviaCtxt thry => HOLThm -> HOL cls thry HOLThm ruleEqElim = ruleMATCH_MP ruleEqElim_pth where ruleEqElim_pth :: TriviaCtxt thry => HOL cls thry HOLThm ruleEqElim_pth = cacheProof "ruleEqElim_pth" ctxtTrivia $ ruleTAUT [txt| (a <=> b) ==> b \/ ~a |] createEquivalenceAxioms :: TriviaCtxt thry => (HOLTerm, Int) -> HOL cls thry [HOLThm] createEquivalenceAxioms (eq, _) = (do ths@(th:_) <- sequence eqThms veqTm <- rator $ rator (concl th) tyins <- typeMatch_NIL (typeOf veqTm) (typeOf eq) mapM (primINST_TYPE_FULL tyins) ths) <?> "createEquivalenceAxioms" where eqThms :: TriviaCtxt thry => [HOL cls thry HOLThm] eqThms = cacheProofs ["eqThms1", "eqThms2"] ctxtTrivia . ruleCONJUNCTS $ prove [txt| (x:A = x) /\ (~(x:A = y) \/ ~(x = z) \/ (y = z)) |] (tacREWRITE_NIL `_THEN` tacASM_CASES [txt| x:A = y |] `_THEN` tacASM_REWRITE_NIL `_THEN` tacCONV (Conv ruleTAUT)) tmConsts :: HOLTerm -> [(HOLTerm, Int)] -> [(HOLTerm, Int)] tmConsts tm acc = let (fn, args) = stripComb tm in if null args then acc else foldr tmConsts (insert (fn, length args) acc) args fmConsts :: HOLTerm -> ([(HOLTerm, Int)], [(HOLTerm, Int)]) -> ([(HOLTerm, Int)], [(HOLTerm, Int)]) fmConsts (Forall _ x) acc = fmConsts x acc fmConsts (Exists _ x) acc = fmConsts x acc fmConsts (l :/\ r) acc = fmConsts l $ fmConsts r acc fmConsts (l :\/ r) acc = fmConsts l $ fmConsts r acc fmConsts (l :==> r) acc = fmConsts l $ fmConsts r acc fmConsts (Neg x) acc = fmConsts x acc fmConsts tm@(l := r) acc | K.typeOf l == tyBool = fmConsts r $ fmConsts l acc | otherwise = fmConstsBad tm acc fmConsts tm acc = fmConstsBad tm acc fmConstsBad :: HOLTerm -> ([(HOLTerm, Int)], [(HOLTerm, Int)]) -> ([(HOLTerm, Int)], [(HOLTerm, Int)]) fmConstsBad tm acc@(preds, funs) = let (p, args) = stripComb tm in if null args then acc else (insert (p, length args) preds, foldr tmConsts funs args) createCongruenceAxiom :: TriviaCtxt thry => Bool -> (HOLTerm, Int) -> HOL cls thry HOLThm createCongruenceAxiom pflag (tm, len) = (do (atys, _) <- splitListM destFunTy =<< typeOf tm (ctys, _) <- trySplitAt len atys largs <- mapM genVar ctys rargs <- mapM genVar ctys th1 <- primREFL tm ths <- mapM (primASSUME <=< uncurry mkEq) $ zip largs rargs th2 <- foldlM primMK_COMB th1 ths th3@(Thm asl _) <- if pflag then ruleEqElim th2 else return th2 foldrM (\ e th -> ruleCONV convImpElim $ ruleDISCH e th) th3 asl) <?> "createCongruenceAxiom" createEqualityAxioms :: TriviaCtxt thry => [HOLTerm] -> HOL cls thry [HOLThm] createEqualityAxioms tms = note "createEqualityAxioms" $ let (preds, funs) = foldr fmConsts ([], []) tms (eqs0, noneqs) = partition eqPred preds in if null eqs0 then return [] else do pcongs <- mapM (createCongruenceAxiom True) noneqs fcongs <- mapM (createCongruenceAxiom False) funs let (preds1, _) = foldr fmConsts ([], []) $ map concl (pcongs ++ fcongs) eqs1 = filter eqPred preds1 eqs = eqs0 `union` eqs1 equivs <- foldrM (\ x ys -> do xs <- createEquivalenceAxioms x return $ union xs ys) [] eqs return $! equivs ++ pcongs ++ fcongs where eqPred :: (HOLTerm, Int) -> Bool eqPred (Const "=" _, _) = True eqPred _ = False -- polymorph tactic grabConstants :: HOLTerm -> [HOLTerm] -> [HOLTerm] grabConstants (Forall _ bod) acc = grabConstants bod acc grabConstants (Exists _ bod) acc = grabConstants bod acc grabConstants (l :<=> r) acc = grabConstants r $ grabConstants l acc grabConstants (l :==> r) acc = grabConstants r $ grabConstants l acc grabConstants (l :/\ r) acc = grabConstants r $ grabConstants l acc grabConstants (l :\/ r) acc = grabConstants r $ grabConstants l acc grabConstants (Neg t) acc = grabConstants t acc grabConstants tm acc = findTerms isConst tm `union` acc matchConsts :: (HOLTerm, HOLTerm) -> HOL cls thry SubstTrip matchConsts (Const s1 ty1, Const s2 ty2) | s1 == s2 = typeMatch_NIL ty1 ty2 | otherwise = fail' "matchConsts: consts of different name" matchConsts _ = fail' "matchConsts" polymorph :: [HOLTerm] -> HOLThm -> HOL cls thry [HOLThm] polymorph mconsts th = let tvs = typeVarsInTerm (concl th) \\ (unions . map typeVarsInTerm $ hyp th) in if null tvs then return [th] else let pconsts = grabConstants (concl th) [] in do tyins <- mapFilterM matchConsts $ allpairs (,) pconsts mconsts tyins' <- mapM (`primINST_TYPE_FULL` th) tyins let ths' = setify' ((<=) `on` destThm) (==) tyins' if null ths' then printDebug "No useful-looking instantiation of lemma" $ return [th] else return ths' polymorphAll :: [HOLTerm] -> [HOLThm] -> [HOLThm] -> HOL cls thry [HOLThm] polymorphAll _ [] acc = return acc polymorphAll mconsts (th:ths) acc = do ths' <- polymorph mconsts th let mconsts' = foldr (grabConstants . concl) mconsts ths' polymorphAll mconsts' ths (union' (==) ths' acc) tacPOLY_ASSUME :: BoolCtxt thry => [HOLThm] -> Tactic cls thry tacPOLY_ASSUME ths gl@(Goal asl _) = let mconsts = foldr (grabConstants . concl . snd) [] asl in do ths' <- polymorphAll mconsts ths [] _MAP_EVERY tacASSUME ths' gl tacCONJUNCTS_THEN' :: BoolCtxt thry => ThmTactic HOLThm cls thry -> ThmTactic HOLThm cls thry tacCONJUNCTS_THEN' ttac cth gl = do cthl <- ruleCONJUNCT1 cth cthr <- ruleCONJUNCT2 cth (ttac cthl `_THEN` ttac cthr) gl tacPURE_MESON :: TriviaCtxt thry => MesonState -> Int -> Int -> Int -> Tactic cls thry tacPURE_MESON ref minVal maxVal inc goal = do resetVars resetConsts flushCaches (_FIRST_ASSUM tacCONTR `_ORELSE` (\ g@(Goal asl _) -> do th <- simpleMesonRefute $ map snd asl tacACCEPT th g)) goal where simpleMesonRefute :: TriviaCtxt thry => [HOLThm] -> HOL cls thry HOLThm simpleMesonRefute ths = do dcutin <- liftM cutIn $ readHOLRef ref clearContraposCache modifyHOLRef ref $ \ st -> st { infs = 0 } ths' <- liftM (ths ++) . createEqualityAxioms $ map concl ths rules <- liftM optimizeRules $ folOfHOLClauses ths' (proof, (insts, _, _)) <- solveGoal rules inc (1, []) modifyHOLRef ref $ \ st -> st { cutIn = dcutin } mesonToHOL insts proof solveGoal :: BoolCtxt thry => [Rule] -> Int -> FOLAtom -> HOL cls thry (FOLGoal, Tup) solveGoal rules incsize g = solve minVal (g,[]) where solve :: BoolCtxt thry => Int -> (FOLAtom, [Rule]) -> HOL cls thry (FOLGoal, Tup) solve n gl | n > maxVal = fail "solveGoal: too deep" | otherwise = (do is <- liftM infs $ readHOLRef ref printDebug (show is ++ "..") $ return () gi <- expandGoal rules gl 100000 n Return1 is' <- liftM infs $ readHOLRef ref printDebug ("solved at " ++ show is') $ return () return gi) <|> solve (n + incsize) gl expandGoal :: BoolCtxt thry => [Rule] -> (FOLAtom, [Rule]) -> Int -> Int -> DeCont1 -> HOL cls thry (FOLGoal, Tup) expandGoal rules g maxdep maxinf = expandGoalRec rules maxdep (g, ([], 2 * mesonOffInc, maxinf)) expandGoalRec :: BoolCtxt thry => [Rule] -> Int -> State -> DeCont1 -> HOL cls thry (FOLGoal, Tup) expandGoalRec rules depth state@((g, _), (insts, offset, size)) cont | depth < 0 = fail "expandGoal: too deep" | otherwise = mesonExpand rules state (\ apprule newstate@(_, (pinsts, _, _)) -> let cont' = cacheconts $ TopRec insts offset pinsts g apprule cont size in expandGoals rules (depth - 1) newstate cont') expandGoals :: BoolCtxt thry => [Rule] -> Int -> ([(FOLAtom, [Rule])], Tup) -> DeCont -> HOL cls thry (FOLGoal, Tup) expandGoals _ _ ([], tup) cont = deCont cont ([], tup) expandGoals rules depth ([g], tup) cont = expandGoalRec rules depth (g, tup) $ Base cont expandGoals rules depth (gl@(g:gs), tup@(insts, offset, size)) cont = do dcutin <- liftM cutIn $ readHOLRef ref if size >= dcutin then let lsize = size `div` mesonSkew rsize = size - lsize in do (lgoals, rgoals) <- trySplitAt (length gl `div` 2) gl (let cont' = cacheconts $ Goals1 cont rules depth rgoals rsize in expandGoals rules depth (lgoals, (insts, offset, lsize)) cont') <|> (let cont' = cacheconts $ Goals3 rsize lsize cont rules depth lgoals in expandGoals rules depth (rgoals, (insts, offset, lsize)) cont') else let cont' = cachecont $ Goals11 cont rules depth gs in expandGoalRec rules depth (g, tup) cont' mesonExpand :: forall cls thry. BoolCtxt thry => [Rule] -> State -> ((Int, HOLThm) -> ([(FOLAtom, [Rule])], Tup) -> HOL cls thry (FOLGoal, Tup)) -> HOL cls thry (FOLGoal, Tup) mesonExpand rules ((g, ancestors), tup@(insts, offset, size)) cont = let pr = fst g in do newancestors <- insertan insts g ancestors let newstate = ((g, newancestors), tup) (if pr > 0 then throwM Fail else case lookup pr ancestors of Nothing -> throwM Fail Just arules -> mesonExpandCont 0 arules newstate cont) `catch` errCase pr newstate where errCase :: Int -> State -> MesonErr -> HOL cls thry (FOLGoal, Tup) errCase _ _ Cut = fail "mesonExpand" errCase pr newstate Fail = do prule <- assoc pr rules let crules = filter (\ ((h, _), _) -> length h <= size) prule mesonExpandCont offset crules newstate cont mesonExpandCont :: Int -> [(([FOLAtom], FOLAtom), b)] -> State -> (b -> ([(FOLAtom, [Rule])], Tup) -> HOL cls thry (FOLGoal, Tup)) -> HOL cls thry (FOLGoal, Tup) mesonExpandCont loffset rules state cont = tryFind (\ r -> cont (snd r) =<< mesonSingleExpand loffset r state) rules <|> throwM Fail mesonSingleExpand :: Int -> (([FOLAtom], FOLAtom), b) -> State -> HOL cls thry ([(FOLAtom, [Rule])], Tup) mesonSingleExpand loffset rule (((_, ftms), ancestors), (insts, offset, size)) = let ((hyps, conc), _) = rule in do allEnv <- foldl2M (\ c a b -> folUnify loffset a b c) insts ftms $ snd conc let (loc, glob) = separateInsts2 offset insts allEnv mkIHyp h = let h' = folInstBump offset loc h in do an <- checkan insts h' ancestors return (h', an) newhyps <- mapM mkIHyp hyps modifyHOLRef ref $ \ st -> st { infs = 1 + infs st } return (newhyps, (glob, offset + mesonOffInc, size-length hyps)) flushCaches :: HOL cls thry () flushCaches = modifyHOLRef ref $ \ st -> st { cont1 = [], cont2 = [] } replaceMem1 :: DeCont1 -> [(FOLGoal, Tup)] -> HOL cls thry () replaceMem1 f m = modifyHOLRef ref $ \ st -> st { cont1 = replaceMemRec $ cont1 st } where replaceMemRec [] = [(f, m)] replaceMemRec (x:xs) | fst x == f = (f, m) : xs | otherwise = x : replaceMemRec xs replaceMem :: DeCont -> [([FOLGoal], Tup)] -> HOL cls thry () replaceMem f m = modifyHOLRef ref $ \ st -> st { cont2 = replaceMemRec $ cont2 st } where replaceMemRec [] = [(f, m)] replaceMemRec (x:xs) | fst x == f = (f, m) : xs | otherwise = x : replaceMemRec xs deCont1 :: BoolCtxt thry => DeCont1 -> ContMem1 cls thry deCont1 Return1 x = return x deCont1 (Base cont) (g', stup) = deCont cont ([g'], stup) deCont1 (Goals11 cont rules depth gs) (g', stup) = let cont' = cacheconts $ Goals5 cont g' in expandGoals rules depth (gs, stup) cont' deCont1 (Cache1 cont) input@(_, (insts, _, size)) = do cache <- liftM cont1 $ readHOLRef ref case lookup cont cache of Nothing -> do modifyHOLRef ref $ \ st -> st { cont1 = [(cont, [input])] } deCont1 cont input Just m -> if any (\ (_, (insts', _, size')) -> insts == insts' && (size <= size')) m then fail "cacheconts" else do replaceMem1 cont m deCont1 cont input deCont :: BoolCtxt thry => DeCont -> ContMemMany cls thry deCont (TopRec insts offset pinsts g apprule cont size) (gs, (newinsts, newoffset, newsize)) = let (locin, globin) = separateInsts2 offset pinsts newinsts g' = Subgoal g gs apprule offset locin in if globin == insts && null gs then deCont1 cont (g', (globin, newoffset, size)) <|> throwM Cut else deCont1 cont (g', (globin, newoffset, newsize)) <|> throwM Fail deCont (Goals1 cont rules depth rgoals rsize) (lg', (i, off, n)) = let cont' = cacheconts $ Goals2 cont lg' in expandGoals rules depth (rgoals, (i, off, n + rsize)) cont' deCont (Goals2 cont lg') (rg', ztup) = deCont cont (lg' ++ rg', ztup) deCont (Goals3 rsize lsize cont rules depth lgoals) (rg', (i, off, n)) = let cont' = cacheconts $ Goals4 n rsize lsize cont rg' in expandGoals rules depth (lgoals, (i, off, n + rsize)) cont' deCont (Goals4 n rsize lsize cont rg') (lg', ztup@(_, _, fsize)) = if n + rsize <= lsize + fsize then fail "repetition of demigoal pair" else deCont cont (lg' ++ rg', ztup) deCont (Goals5 cont g') (gs', ftup) = deCont cont (g':gs', ftup) deCont (CacheMany cont) input@(_, (insts, _, size)) = do cache <- liftM cont2 $ readHOLRef ref case lookup cont cache of Nothing -> do modifyHOLRef ref $ \ st -> st { cont2 = [(cont, [input])] } deCont cont input Just m -> if any (\ (_, (insts', _, size')) -> insts == insts' && (size <= size')) m then fail "cacheconts" else do replaceMem cont m deCont cont input clearContraposCache :: HOL cls thry () clearContraposCache = modifyHOLRef ref $ \ st -> st { memory = [] } makeHOLContrapos :: TriviaCtxt thry => Int -> HOLThm -> HOL cls thry HOLThm makeHOLContrapos n th = let tm = concl th key = (n, tm) in do m <- liftM memory $ readHOLRef ref (assoc key m) <|> (if n < 0 then ruleCONV (convPull `_THEN` convImf) th else let djs = disjuncts tm in do acth <- if n == 0 then return th else do (ldjs, rdj:rdjs) <- trySplitAt n djs let ndjs = rdj : (ldjs ++ rdjs) th1 <- runConv convDISJ_AC . mkEq tm $ listMkDisj ndjs primEQ_MP th1 th fth <- if length djs == 1 then return acth else ruleCONV (convImp `_THEN` convPush) acth modifyHOLRef ref $ \ st -> st { memory = (key, fth) : m } return fth) resetVars :: HOL cls thry () resetVars = modifyHOLRef ref $ \ st -> st { vstore = [], gstore = [], vcounter = 0 } resetConsts :: TriviaCtxt thry => HOL cls thry () resetConsts = do falseTm <- serve [trivia| F |] modifyHOLRef ref $ \ st -> st { cstore = [(falseTm, 1)], ccounter = 2 } incVCounter :: HOL cls thry Int incVCounter = do n <- liftM vcounter $ readHOLRef ref let m = n + 1 if m >= mesonOffInc then fail "incVCounter: too many variables" else do modifyHOLRef ref $ \ st -> st { vcounter = m } return n mesonToHOL :: TriviaCtxt thry => [(FOLTerm, Int)] -> FOLGoal -> HOL cls thry HOLThm mesonToHOL insts (Subgoal g gs (n, th) _ locin) = let newInsts = foldr mergeInst insts locin g' = folInst newInsts g in do holG <- holOfLiteral g' ths <- mapM (mesonToHOL newInsts) gs truthTh <- thmTRUTH hth <- if th == truthTh then primASSUME holG else do cth <- makeHOLContrapos n th if null ths then return cth else ruleMATCH_MP cth $ foldr1M ruleCONJ ths ith <- rulePART_MATCH return hth holG tm <- holNegate $ concl ith finishRule =<< ruleDISCH tm ith folOfHOLClause :: HOLThm -> HOL cls thry [(([FOLAtom], FOLAtom), (Int, HOLThm))] folOfHOLClause th = let lconsts = catFrees $ hyp th tm = concl th hlits = disjuncts tm in do flits <- mapM (folOfLiteral [] lconsts) hlits let basics = mkContraposes 0 th [] flits [] return $ if all (\ (p, _) -> p < 0) flits then ((map mkNegated flits, (1, [])), (-1, th)):basics else basics folOfHOLClauses :: [HOLThm] -> HOL cls thry [Rule] folOfHOLClauses thms = do rawrules <- foldrM (\ x ys -> do xs <- folOfHOLClause x return $! union xs ys) [] thms let prs = setify $ map (fst . snd . fst) rawrules prules = map (\ t -> (t, filter ((== t) . fst . snd . fst) rawrules)) prs srules = sort (\ (p, _) (q, _) -> abs p <= abs q) prules return srules holOfTerm :: FOLTerm -> HOL cls thry HOLTerm holOfTerm (FVar v) = holOfVar v holOfTerm (FNApp f args) = do f' <- holOfConst f args' <- mapM holOfTerm args listMkComb f' args' folOfTerm :: [HOLTerm] -> [HOLTerm] -> HOLTerm -> HOL cls thry FOLTerm folOfTerm env consts tm = if isVar tm && (tm `notElem` consts) then liftM FVar $ folOfVar tm else let (f, args) = stripComb tm in if f `elem` env then fail "folOfTerm: higher order" else do ff <- folOfConst f args' <- mapM (folOfTerm env consts) args return $! FNApp ff args' holOfAtom :: FOLAtom -> HOL cls thry HOLTerm holOfAtom (p, args) = do p' <- holOfConst p args' <- mapM holOfTerm args listMkComb p' args' folOfAtom :: [HOLTerm] -> [HOLTerm] -> HOLTerm -> HOL cls thry FOLAtom folOfAtom env consts tm = let (f, args) = stripComb tm in if f `elem` env then fail "folOfAtom: higher order" else do ff <- folOfConst f args' <- mapM (folOfTerm env consts) args return (ff, args') holOfLiteral :: FOLAtom -> HOL cls thry HOLTerm holOfLiteral fa@(p, args) | p < 0 = mkNeg $ holOfAtom (negate p, args) | otherwise = holOfAtom fa folOfLiteral :: [HOLTerm] -> [HOLTerm] -> HOLTerm -> HOL cls thry FOLAtom folOfLiteral env consts (Neg tm') = do (p, a) <- folOfAtom env consts tm' return (negate p, a) folOfLiteral env consts tm = folOfAtom env consts tm holOfConst :: Int -> HOL cls thry HOLTerm holOfConst c = note "holOfConst" $ do cs <- liftM cstore $ readHOLRef ref revAssoc c cs folOfConst :: HOLTerm -> HOL cls thry Int folOfConst c = note "folOfConst" $ do currentconsts <- liftM cstore $ readHOLRef ref (assoc c currentconsts) <|> (do n <- liftM ccounter $ readHOLRef ref modifyHOLRef ref $ \ st -> st { ccounter = n + 1, cstore = (c, n):currentconsts } return n) holOfVar :: Int -> HOL cls thry HOLTerm holOfVar v = holOfVar' v <|> let v' = v `mod` mesonOffInc in do hv' <- holOfVar' v' gv <- genVar $ typeOf hv' modifyHOLRef ref $ \ st -> st { gstore = (gv, v) : gstore st } return gv holOfVar' :: Int -> HOL cls thry HOLTerm holOfVar' v = note "holOfVar" $ do vs <- liftM vstore $ readHOLRef ref (revAssoc v vs) <|> (do gs <- liftM gstore $ readHOLRef ref revAssoc v gs) folOfVar :: HOLTerm -> HOL cls thry Int folOfVar v = do currentvars <- liftM vstore $ readHOLRef ref case lookup v currentvars of Just x -> return x Nothing -> do n <- incVCounter modifyHOLRef ref $ \ st -> st { vstore = (v, n) : currentvars } return n convQUANT_BOOL :: ClassicCtxt thry => Conversion cls thry convQUANT_BOOL = convPURE_REWRITE [ thmFORALL_BOOL, thmEXISTS_BOOL, thmCOND_CLAUSES , thmNOT_CLAUSES, thmIMP_CLAUSES, thmAND_CLAUSES , thmOR_CLAUSES, thmEQ_CLAUSES, thmFORALL_SIMP , thmEXISTS_SIMP ] tacSPLIT :: BoolCtxt thry => Int -> Tactic cls thry tacSPLIT n = (_FIRST_X_ASSUM (tacCONJUNCTS_THEN' tacASSUME) `_THEN` tacSPLIT n) `_ORELSE` (if n > 0 then _FIRST_X_ASSUM tacDISJ_CASES `_THEN` tacSPLIT (n-1) else _NO) `_ORELSE` _ALL tacGEN_MESON :: (TriviaCtxt thry, HOLThmRep thm cls thry) => Int -> Int -> Int -> [thm] -> Tactic cls thry tacGEN_MESON minVal maxVal step ths gl = do val <- initializeMeson ref <- newHOLRef val ths' <- mapM ruleGEN_ALL ths (tacREFUTE_THEN tacASSUME `_THEN` tacPOLY_ASSUME ths' `_THEN` (\ g@(Goal asl _) -> _MAP_EVERY (tacUNDISCH . concl . snd) asl g) `_THEN` tacSELECT_ELIM `_THEN` (\ g@(Goal _ w) -> _MAP_EVERY (\ v -> tacSPEC (v,v)) (frees w) g) `_THEN` tacCONV (convPRESIMP `_THEN` convTOP_DEPTH convBETA `_THEN` convLAMBDA_ELIM `_THEN` convCONDS_CELIM `_THEN` convQUANT_BOOL) `_THEN` _REPEAT (tacGEN `_ORELSE` tacDISCH) `_THEN` tacREFUTE_THEN tacASSUME `_THEN` tacRULE_ASSUM (ruleCONV (convNNF `_THEN` convSKOLEM)) `_THEN` _REPEAT (_FIRST_X_ASSUM tacCHOOSE) `_THEN` tacASM_FOL `_THEN` tacSPLIT mesonSplitLimit `_THEN` tacRULE_ASSUM (ruleCONV (convPRENEX `_THEN` convWEAK_CNF)) `_THEN` tacRULE_ASSUM (repeatM (\ th@(Thm _ (Forall x _)) -> do tm <- genVar $ typeOf x ruleSPEC tm th)) `_THEN` _REPEAT (_FIRST_X_ASSUM (tacCONJUNCTS_THEN' tacASSUME)) `_THEN` tacRULE_ASSUM (ruleCONV (convASSOC thmDISJ_ASSOC)) `_THEN` _REPEAT (_FIRST_X_ASSUM tacSUBST_VAR) `_THEN` tacPURE_MESON ref minVal maxVal step) gl -- common meson tactics tacASM_MESON :: (TriviaCtxt thry, HOLThmRep thm cls thry) => [thm] -> Tactic cls thry tacASM_MESON = tacGEN_MESON 0 50 1 tacASM_MESON_NIL :: TriviaCtxt thry => Tactic cls thry tacASM_MESON_NIL = tacASM_MESON ([] :: [HOLThm]) tacMESON :: (TriviaCtxt thry, HOLThmRep thm cls thry) => [thm] -> Tactic cls thry tacMESON ths = _POP_ASSUM_LIST (const _ALL) `_THEN` tacASM_MESON ths tacMESON_NIL :: TriviaCtxt thry => Tactic cls thry tacMESON_NIL = tacMESON ([] :: [HOLThm]) ruleMESON :: (TriviaCtxt thry, HOLThmRep thm cls thry, HOLTermRep tm cls thry) => [thm] -> tm -> HOL cls thry HOLThm ruleMESON ths tm = prove tm $ tacMESON ths ruleMESON_NIL :: (TriviaCtxt thry, HOLTermRep tm cls thry) => tm -> HOL cls thry HOLThm ruleMESON_NIL tm = prove tm tacMESON_NIL
ecaustin/haskhol-deductive
src/HaskHOL/Lib/Meson.hs
Haskell
bsd-2-clause
37,384
{-# LANGUAGE DeriveDataTypeable #-} module Propellor.Property.Ssh ( installed, restarted, PubKeyText, SshKeyType(..), -- * Daemon configuration sshdConfig, ConfigKeyword, setSshdConfigBool, setSshdConfig, RootLogin(..), permitRootLogin, passwordAuthentication, noPasswords, listenPort, -- * Host keys randomHostKeys, hostKeys, hostKey, hostPubKey, getHostPubKey, -- * User keys and configuration userKeys, userKeyAt, knownHost, unknownHost, authorizedKeysFrom, unauthorizedKeysFrom, authorizedKeys, authorizedKey, unauthorizedKey, hasAuthorizedKeys, getUserPubKeys, ) where import Propellor.Base import qualified Propellor.Property.File as File import qualified Propellor.Property.Service as Service import qualified Propellor.Property.Apt as Apt import Propellor.Property.User import Propellor.Types.Info import Utility.FileMode import System.PosixCompat import qualified Data.Map as M import qualified Data.Set as S import Data.List installed :: Property NoInfo installed = Apt.installed ["ssh"] restarted :: Property NoInfo restarted = Service.restarted "ssh" sshBool :: Bool -> String sshBool True = "yes" sshBool False = "no" sshdConfig :: FilePath sshdConfig = "/etc/ssh/sshd_config" type ConfigKeyword = String setSshdConfigBool :: ConfigKeyword -> Bool -> Property NoInfo setSshdConfigBool setting allowed = setSshdConfig setting (sshBool allowed) setSshdConfig :: ConfigKeyword -> String -> Property NoInfo setSshdConfig setting val = File.fileProperty desc f sshdConfig `onChange` restarted where desc = unwords [ "ssh config:", setting, val ] cfgline = setting ++ " " ++ val wantedline s | s == cfgline = True | (setting ++ " ") `isPrefixOf` s = False | otherwise = True f ls | cfgline `elem` ls = filter wantedline ls | otherwise = filter wantedline ls ++ [cfgline] data RootLogin = RootLogin Bool -- ^ allow or prevent root login | WithoutPassword -- ^ disable password authentication for root, while allowing other authentication methods | ForcedCommandsOnly -- ^ allow root login with public-key authentication, but only if a forced command has been specified for the public key permitRootLogin :: RootLogin -> Property NoInfo permitRootLogin (RootLogin b) = setSshdConfigBool "PermitRootLogin" b permitRootLogin WithoutPassword = setSshdConfig "PermitRootLogin" "without-password" permitRootLogin ForcedCommandsOnly = setSshdConfig "PermitRootLogin" "forced-commands-only" passwordAuthentication :: Bool -> Property NoInfo passwordAuthentication = setSshdConfigBool "PasswordAuthentication" -- | Configure ssh to not allow password logins. -- -- To prevent lock-out, this is done only once root's -- authorized_keys is in place. noPasswords :: Property NoInfo noPasswords = check (hasAuthorizedKeys (User "root")) $ passwordAuthentication False dotDir :: User -> IO FilePath dotDir user = do h <- homedir user return $ h </> ".ssh" dotFile :: FilePath -> User -> IO FilePath dotFile f user = do d <- dotDir user return $ d </> f -- | Makes the ssh server listen on a given port, in addition to any other -- ports it is configured to listen on. -- -- Revert to prevent it listening on a particular port. listenPort :: Int -> RevertableProperty NoInfo listenPort port = enable <!> disable where portline = "Port " ++ show port enable = sshdConfig `File.containsLine` portline `describe` ("ssh listening on " ++ portline) `onChange` restarted disable = sshdConfig `File.lacksLine` portline `describe` ("ssh not listening on " ++ portline) `onChange` restarted hasAuthorizedKeys :: User -> IO Bool hasAuthorizedKeys = go <=< dotFile "authorized_keys" where go f = not . null <$> catchDefaultIO "" (readFile f) -- | Blows away existing host keys and make new ones. -- Useful for systems installed from an image that might reuse host keys. -- A flag file is used to only ever do this once. randomHostKeys :: Property NoInfo randomHostKeys = flagFile prop "/etc/ssh/.unique_host_keys" `onChange` restarted where prop = property "ssh random host keys" $ do void $ liftIO $ boolSystem "sh" [ Param "-c" , Param "rm -f /etc/ssh/ssh_host_*" ] ensureProperty $ scriptProperty [ "DPKG_MAINTSCRIPT_NAME=postinst DPKG_MAINTSCRIPT_PACKAGE=openssh-server /var/lib/dpkg/info/openssh-server.postinst configure" ] -- | The text of a ssh public key, for example, "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIB3BJ2GqZiTR2LEoDXyYFgh/BduWefjdKXAsAtzS9zeI" type PubKeyText = String -- | Installs the specified list of ssh host keys. -- -- The corresponding private keys come from the privdata. -- -- Any host keys that are not in the list are removed from the host. hostKeys :: IsContext c => c -> [(SshKeyType, PubKeyText)] -> Property HasInfo hostKeys ctx l = propertyList desc $ catMaybes $ map (\(t, pub) -> Just $ hostKey ctx t pub) l ++ [cleanup] where desc = "ssh host keys configured " ++ typelist (map fst l) typelist tl = "(" ++ unwords (map fromKeyType tl) ++ ")" alltypes = [minBound..maxBound] staletypes = let have = map fst l in filter (`notElem` have) alltypes removestale b = map (File.notPresent . flip keyFile b) staletypes cleanup | null staletypes || null l = Nothing | otherwise = Just $ toProp $ property ("any other ssh host keys removed " ++ typelist staletypes) $ ensureProperty $ combineProperties desc (removestale True ++ removestale False) `onChange` restarted -- | Installs a single ssh host key of a particular type. -- -- The public key is provided to this function; -- the private key comes from the privdata; hostKey :: IsContext c => c -> SshKeyType -> PubKeyText -> Property HasInfo hostKey context keytype pub = combineProperties desc [ hostPubKey keytype pub , toProp $ property desc $ install File.hasContent True (lines pub) , withPrivData (keysrc "" (SshPrivKey keytype "")) context $ \getkey -> property desc $ getkey $ install File.hasContentProtected False . privDataLines ] `onChange` restarted where desc = "ssh host key configured (" ++ fromKeyType keytype ++ ")" install writer ispub keylines = do let f = keyFile keytype ispub ensureProperty $ writer f (keyFileContent keylines) keysrc ext field = PrivDataSourceFileFromCommand field ("sshkey"++ext) ("ssh-keygen -t " ++ sshKeyTypeParam keytype ++ " -f sshkey") -- Make sure that there is a newline at the end; -- ssh requires this for some types of private keys. keyFileContent :: [String] -> [File.Line] keyFileContent keylines = keylines ++ [""] keyFile :: SshKeyType -> Bool -> FilePath keyFile keytype ispub = "/etc/ssh/ssh_host_" ++ fromKeyType keytype ++ "_key" ++ ext where ext = if ispub then ".pub" else "" -- | Indicates the host key that is used by a Host, but does not actually -- configure the host to use it. Normally this does not need to be used; -- use 'hostKey' instead. hostPubKey :: SshKeyType -> PubKeyText -> Property HasInfo hostPubKey t = pureInfoProperty "ssh pubkey known" . HostKeyInfo . M.singleton t getHostPubKey :: Propellor (M.Map SshKeyType PubKeyText) getHostPubKey = fromHostKeyInfo <$> askInfo newtype HostKeyInfo = HostKeyInfo { fromHostKeyInfo :: M.Map SshKeyType PubKeyText } deriving (Eq, Ord, Typeable, Show) instance IsInfo HostKeyInfo where propagateInfo _ = False instance Monoid HostKeyInfo where mempty = HostKeyInfo M.empty mappend (HostKeyInfo old) (HostKeyInfo new) = -- new first because union prefers values from the first -- parameter when there is a duplicate key HostKeyInfo (new `M.union` old) userPubKeys :: User -> [(SshKeyType, PubKeyText)] -> Property HasInfo userPubKeys u@(User n) l = pureInfoProperty ("ssh pubkey for " ++ n) $ UserKeyInfo (M.singleton u (S.fromList l)) getUserPubKeys :: User -> Propellor [(SshKeyType, PubKeyText)] getUserPubKeys u = maybe [] S.toList . M.lookup u . fromUserKeyInfo <$> askInfo newtype UserKeyInfo = UserKeyInfo { fromUserKeyInfo :: M.Map User (S.Set (SshKeyType, PubKeyText)) } deriving (Eq, Ord, Typeable, Show) instance IsInfo UserKeyInfo where propagateInfo _ = False instance Monoid UserKeyInfo where mempty = UserKeyInfo M.empty mappend (UserKeyInfo old) (UserKeyInfo new) = UserKeyInfo (M.unionWith S.union old new) -- | Sets up a user with the specified public keys, and the corresponding -- private keys from the privdata. -- -- The public keys are added to the Info, so other properties like -- `authorizedKeysFrom` can use them. userKeys :: IsContext c => User -> c -> [(SshKeyType, PubKeyText)] -> Property HasInfo userKeys user@(User name) context ks = combineProperties desc $ userPubKeys user ks : map (userKeyAt Nothing user context) ks where desc = unwords [ name , "has ssh key" , "(" ++ unwords (map (fromKeyType . fst) ks) ++ ")" ] -- | Sets up a user with the specified pubic key, and a private -- key from the privdata. -- -- A file can be specified to write the key to somewhere other than -- the default locations. Allows a user to have multiple keys for -- different roles. userKeyAt :: IsContext c => Maybe FilePath -> User -> c -> (SshKeyType, PubKeyText) -> Property HasInfo userKeyAt dest user@(User u) context (keytype, pubkeytext) = combineProperties desc $ props & pubkey & privkey where desc = unwords $ catMaybes [ Just u , Just "has ssh key" , dest , Just $ "(" ++ fromKeyType keytype ++ ")" ] pubkey = property desc $ install File.hasContent ".pub" [pubkeytext] privkey = withPrivData (SshPrivKey keytype u) context $ \getkey -> property desc $ getkey $ install File.hasContentProtected "" . privDataLines install writer ext key = do f <- liftIO $ keyfile ext ensureProperty $ combineProperties desc [ writer f (keyFileContent key) , File.ownerGroup f user (userGroup user) , File.ownerGroup (takeDirectory f) user (userGroup user) ] keyfile ext = case dest of Nothing -> do home <- homeDirectory <$> getUserEntryForName u return $ home </> ".ssh" </> "id_" ++ fromKeyType keytype ++ ext Just f -> return $ f ++ ext fromKeyType :: SshKeyType -> String fromKeyType SshRsa = "rsa" fromKeyType SshDsa = "dsa" fromKeyType SshEcdsa = "ecdsa" fromKeyType SshEd25519 = "ed25519" -- | Puts some host's ssh public key(s), as set using `hostPubKey` -- or `hostKey` into the known_hosts file for a user. knownHost :: [Host] -> HostName -> User -> Property NoInfo knownHost hosts hn user@(User u) = property desc $ go =<< knownHostLines hosts hn where desc = u ++ " knows ssh key for " ++ hn go [] = do warningMessage $ "no configured ssh host keys for " ++ hn return FailedChange go ls = do f <- liftIO $ dotFile "known_hosts" user modKnownHost user f $ f `File.containsLines` ls `requires` File.dirExists (takeDirectory f) -- | Reverts `knownHost` unknownHost :: [Host] -> HostName -> User -> Property NoInfo unknownHost hosts hn user@(User u) = property desc $ go =<< knownHostLines hosts hn where desc = u ++ " does not know ssh key for " ++ hn go [] = return NoChange go ls = do f <- liftIO $ dotFile "known_hosts" user ifM (liftIO $ doesFileExist f) ( modKnownHost user f $ f `File.lacksLines` ls , return NoChange ) knownHostLines :: [Host] -> HostName -> Propellor [File.Line] knownHostLines hosts hn = keylines <$> fromHost hosts hn getHostPubKey where keylines (Just m) = map (\k -> hn ++ " " ++ k) (M.elems m) keylines Nothing = [] modKnownHost :: User -> FilePath -> Property NoInfo -> Propellor Result modKnownHost user f p = ensureProperty $ p `requires` File.ownerGroup f user (userGroup user) `requires` File.ownerGroup (takeDirectory f) user (userGroup user) -- | Ensures that a local user's authorized_keys contains lines allowing -- logins from a remote user on the specified Host. -- -- The ssh keys of the remote user can be set using `keysImported` -- -- Any other lines in the authorized_keys file are preserved as-is. authorizedKeysFrom :: User -> (User, Host) -> Property NoInfo localuser@(User ln) `authorizedKeysFrom` (remoteuser@(User rn), remotehost) = property desc (go =<< authorizedKeyLines remoteuser remotehost) where remote = rn ++ "@" ++ hostName remotehost desc = ln ++ " authorized_keys from " ++ remote go [] = do warningMessage $ "no configured ssh user keys for " ++ remote return FailedChange go ls = ensureProperty $ combineProperties desc $ map (authorizedKey localuser) ls -- | Reverts `authorizedKeysFrom` unauthorizedKeysFrom :: User -> (User, Host) -> Property NoInfo localuser@(User ln) `unauthorizedKeysFrom` (remoteuser@(User rn), remotehost) = property desc (go =<< authorizedKeyLines remoteuser remotehost) where remote = rn ++ "@" ++ hostName remotehost desc = ln ++ " unauthorized_keys from " ++ remote go [] = return NoChange go ls = ensureProperty $ combineProperties desc $ map (unauthorizedKey localuser) ls authorizedKeyLines :: User -> Host -> Propellor [File.Line] authorizedKeyLines remoteuser remotehost = map snd <$> fromHost' remotehost (getUserPubKeys remoteuser) -- | Makes a user have authorized_keys from the PrivData -- -- This removes any other lines from the file. authorizedKeys :: IsContext c => User -> c -> Property HasInfo authorizedKeys user@(User u) context = withPrivData (SshAuthorizedKeys u) context $ \get -> property desc $ get $ \v -> do f <- liftIO $ dotFile "authorized_keys" user ensureProperty $ combineProperties desc [ File.hasContentProtected f (keyFileContent (privDataLines v)) , File.ownerGroup f user (userGroup user) , File.ownerGroup (takeDirectory f) user (userGroup user) ] where desc = u ++ " has authorized_keys" -- | Ensures that a user's authorized_keys contains a line. -- Any other lines in the file are preserved as-is. authorizedKey :: User -> String -> Property NoInfo authorizedKey user@(User u) l = property desc $ do f <- liftIO $ dotFile "authorized_keys" user modAuthorizedKey f user $ f `File.containsLine` l `requires` File.dirExists (takeDirectory f) where desc = u ++ " has authorized_keys" -- | Reverts `authorizedKey` unauthorizedKey :: User -> String -> Property NoInfo unauthorizedKey user@(User u) l = property desc $ do f <- liftIO $ dotFile "authorized_keys" user ifM (liftIO $ doesFileExist f) ( modAuthorizedKey f user $ f `File.lacksLine` l , return NoChange ) where desc = u ++ " lacks authorized_keys" modAuthorizedKey :: FilePath -> User -> Property NoInfo -> Propellor Result modAuthorizedKey f user p = ensureProperty $ p `requires` File.mode f (combineModes [ownerWriteMode, ownerReadMode]) `requires` File.ownerGroup f user (userGroup user) `requires` File.ownerGroup (takeDirectory f) user (userGroup user)
np/propellor
src/Propellor/Property/Ssh.hs
Haskell
bsd-2-clause
14,661
{-# LANGUAGE Haskell2010 #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE KindSignatures #-} {-# LANGUAGE TypeOperators #-} module PromotedTypes where data RevList a = RNil | RevList a :> a data Pattern :: [*] -> * where Nil :: Pattern '[] Cons :: Maybe h -> Pattern t -> Pattern (h ': t) -- Unlike (:), (:>) does not have to be quoted on type level. data RevPattern :: RevList * -> * where RevNil :: RevPattern RNil RevCons :: Maybe h -> RevPattern t -> RevPattern (t :> h) data Tuple :: (*, *) -> * where Tuple :: a -> b -> Tuple '(a, b)
haskell/haddock
html-test/src/PromotedTypes.hs
Haskell
bsd-2-clause
584
{-# LANGUAGE GADTs, TypeFamilies #-} module JS.Types where import Data.Monoid ((<>)) import Data.Text.Lazy.Builder (Builder, fromText) import Yesod.Core (GWidget, Route) import Data.Text (Text) import Control.Monad.Trans.RWS (RWST) import Data.Aeson (ToJSON) -- Javascript representation, tagged via phantom types. newtype JSVarName = JSVarName Text type RenderUrl url = url -> [(Text, Text)] -> Text type JS master = RWST (RenderUrl (Route master)) () Int (GWidget master master) data JSBody result where JSStmtNoBind :: JSExp a -> JSBody result -> JSBody result JSStmtBind :: JSExp a -> (JSExp a -> JSBody result) -> JSBody result JSStmtExp :: JSExp a -> JSBody a data JSExp a where JSLambda :: Args a => (a -> JSBody b) -> JSExp (ArgsType a -> b) JSText :: Text -> JSExp Text JSValue :: ToJSON value => value -> JSExp value JSVarE :: JSVarName -> JSExp a JSAppE :: Args a => JSExp (ArgsType a -> b) -> a -> JSExp b JSObject :: [JSPair] -> JSExp a JSProperty :: JSExp a -> Text -> JSExp b class Args a where type ArgsType a showArgs :: a -> JS master [Builder] showLambda :: (a -> JSBody b) -> JS master Builder data Arg a = Arg { unArg :: JSExp a } data JSPair where JSPair :: JSExp Text -> JSExp a -> JSPair
snoyberg/yesod-js
samples/ajax/JS/Types.hs
Haskell
bsd-2-clause
1,277
module Haddock.Interface.Rn ( rnDoc, rnHaddockModInfo ) where import Haddock.Types import RnEnv ( dataTcOccs ) import RdrName ( RdrName, gre_name, GlobalRdrEnv, lookupGRE_RdrName ) import Name ( Name ) import Outputable ( ppr, defaultUserStyle ) rnHaddockModInfo :: GlobalRdrEnv -> HaddockModInfo RdrName -> HaddockModInfo Name rnHaddockModInfo gre (HaddockModInfo desc port stab maint) = HaddockModInfo (fmap (rnDoc gre) desc) port stab maint ids2string :: [RdrName] -> String ids2string [] = [] ids2string (x:_) = show $ ppr x defaultUserStyle data Id x = Id {unId::x} instance Monad Id where (Id v)>>=f = f v; return = Id rnDoc :: GlobalRdrEnv -> Doc RdrName -> Doc Name rnDoc gre = unId . do_rn where do_rn doc_to_rn = case doc_to_rn of DocEmpty -> return DocEmpty DocAppend a b -> do a' <- do_rn a b' <- do_rn b return (DocAppend a' b') DocString str -> return (DocString str) DocParagraph doc -> do doc' <- do_rn doc return (DocParagraph doc') DocIdentifier ids -> do let choices = concatMap dataTcOccs ids let gres = concatMap (\rdrName -> map gre_name (lookupGRE_RdrName rdrName gre)) choices case gres of [] -> return (DocMonospaced (DocString (ids2string ids))) ids' -> return (DocIdentifier ids') DocModule str -> return (DocModule str) DocEmphasis doc -> do doc' <- do_rn doc return (DocEmphasis doc') DocMonospaced doc -> do doc' <- do_rn doc return (DocMonospaced doc') DocUnorderedList docs -> do docs' <- mapM do_rn docs return (DocUnorderedList docs') DocOrderedList docs -> do docs' <- mapM do_rn docs return (DocOrderedList docs') DocDefList list -> do list' <- mapM (\(a,b) -> do a' <- do_rn a b' <- do_rn b return (a', b')) list return (DocDefList list') DocCodeBlock doc -> do doc' <- do_rn doc return (DocCodeBlock doc') DocURL str -> return (DocURL str) DocPic str -> return (DocPic str) DocAName str -> return (DocAName str) DocExamples e -> return (DocExamples e)
nominolo/haddock2
src/Haddock/Interface/Rn.hs
Haskell
bsd-2-clause
2,106
{- Data/Singletons/TypeRepStar.hs (c) Richard Eisenberg 2012 eir@cis.upenn.edu This file contains the definitions for considering TypeRep to be the demotion of *. This is still highly experimental, so expect unusual results! -} {-# LANGUAGE RankNTypes, TypeFamilies, KindSignatures, FlexibleInstances, GADTs, UndecidableInstances, ScopedTypeVariables #-} module Data.Singletons.TypeRepStar where import Data.Singletons import Data.Typeable data instance Sing (a :: *) where STypeRep :: Typeable a => Sing a sTypeRep :: forall (a :: *). Typeable a => Sing a sTypeRep = STypeRep instance Typeable a => SingI (a :: *) where sing = STypeRep instance SingE (a :: *) where type Demote a = TypeRep fromSing STypeRep = typeOf (undefined :: a) instance SingKind (Any :: *) where singInstance STypeRep = SingInstance
jonsterling/singletons
Data/Singletons/TypeRepStar.hs
Haskell
bsd-3-clause
839
-- Copyright : (C) 2009 Corey O'Connor -- License : BSD-style (see the file LICENSE) {-# LANGUAGE UnboxedTuples #-} {-# LANGUAGE MagicHash #-} module Bind.Marshal.Action.Base where import Bind.Marshal.Prelude import qualified Prelude import Bind.Marshal.DataModel import Control.DeepSeq import Foreign.Ptr import GHC.Prim -- | All actions have a buffering requirement. In the case of an action with a static data model the -- buffer requirement is the memory required by the marshaled data. -- XXX: All actions? Maybe easier to just have a StaticBufferReq that is only defined for static -- buffer actions. type family BufferReq action -- | All static memory actions act on a buffer region. -- -- The tag type variable differentiates buffers being serialized to versus deserialized from. data BufferRegion tag = BufferRegion { buffer_region_start :: {-# UNPACK #-} !BytePtr , buffer_region_size :: {-# UNPACK #-} !Size } -- | returns the pointer to the end of the BufferRegion buffer_region_end :: BufferRegion tag -> BytePtr buffer_region_end (BufferRegion start size) = start `plusPtr` size {-# INLINE buffer_region_end #-} -- | Produces a new buffer region that skips the given number of bytes of the given buffer. -- -- XXX: Really should be named drop_bytes instead of pop_bytes pop_bytes :: BufferRegion tag -> Size -> BufferRegion tag pop_bytes (BufferRegion start size) !to_pop = BufferRegion (start `plusPtr` to_pop) (size - to_pop) {-# INLINE pop_bytes #-} instance NFData (BufferRegion tag) where rnf (BufferRegion !start !size) = () type Iter = Ptr Word8
coreyoconnor/bind-marshal
src/Bind/Marshal/Action/Base.hs
Haskell
bsd-3-clause
1,639
{-# LANGUAGE TemplateHaskell #-} module TH where import Data.Type.Equality import Language.Haskell.TH import Type triangle :: Int -> Int -> Int -> Q Exp triangle a b c = [|Refl :: Triangle $(intToNat a) $(intToNat b) $(intToNat c)|] where intToNat :: Int -> Q Type intToNat 0 = [t|Zero|] intToNat n = [t|Succ $(intToNat $ pred n)|]
nkaretnikov/triangle-inequality
src/TH.hs
Haskell
bsd-3-clause
352
module Language.Brainfuck.Internals.GenericParser where import Prelude hiding (print, read) import Data.Maybe (catMaybes) import Text.ParserCombinators.Parsec import Language.Brainfuck.Internals.Instructions -- | Encapsulate a parser type GenericParser = Parser [Instr] -- | Record type containing symbols of Brainfuck language data Symbols = Symbols { incr :: String, -- Symbol for + decr :: String, -- Symbol for - right :: String, -- Symbol for > left :: String, -- Symbol for < read :: String, -- Symbol for , print :: String, -- Symbol for . openl :: String, -- Symbol for [ closel :: String, -- Symbol for ] reserved :: String -- None of this is a comment } -- | Used to generate a parser for a Brainfuck's dialect genparser :: Symbols -> GenericParser genparser sym = fmap catMaybes $ many $ try instr <|> try loop <|> comment where comment = noneOf (reserved sym) >> return Nothing instr = choice [ parseInstr incr (Incr 1), parseInstr decr (Decr 1), parseInstr right (MoveRight 1), parseInstr left (MoveLeft 1), parseInstr read Read, parseInstr print Print] loop = between (string $ openl sym) -- Open loop (string $ closel sym) -- Close loop (Just . Loop <$> genparser sym) -- Body parseInstr fun inst = try $ do _ <- string $ fun sym return $ Just inst
remusao/Hodor
src/Language/Brainfuck/Internals/GenericParser.hs
Haskell
bsd-3-clause
1,523
module PivotalTrackerApi.Iteration ( Iteration , number , project_id , PivotalTrackerApi.Iteration.length , team_strength , stories , start , finish , velocity , kind ) where import PivotalTrackerApi.Base import PivotalTrackerApi.Story (Story) data Iteration = Iteration { _number :: Int , _project_id :: Int , _length :: Int , _team_strength :: Double , _stories :: [Story] , _start :: Text , _finish :: Text , _velocity :: Maybe Double , _kind :: Text } deriving (Show, Generic) makeLenses ''Iteration instance FromJSON Iteration where parseJSON = genericParseJSON defaultOptions{fieldLabelModifier = drop 1}
diogob/pivotal-tracker-api
src/PivotalTrackerApi/Iteration.hs
Haskell
bsd-3-clause
791
{-# OPTIONS_HADDOCK show-extensions #-} {-| Module : $Header$ Copyright : (c) 2015 Swinburne Software Innovation Lab License : BSD3 Maintainer : Rhys Adams <rhysadams@swin.edu.au> Stability : unstable Portability : portable Functions for Eclogues API instance election. -} module Eclogues.APIElection ( LeadershipError (..), ManagedZK, ZKURI, whileLeader, advertisedData ) where import Eclogues.API (zkNode) import Control.Monad.Except (ExceptT, throwError, catchError) import Control.Monad.Trans.Control (liftBaseOp) import Data.Aeson (encode) import Data.ByteString (ByteString) import Data.ByteString.Lazy (toStrict) import Data.Word (Word16) import Database.Zookeeper.Election (LeadershipError (..), whenLeader) import Database.Zookeeper.ManagedEvents (ManagedZK, ZKURI, withZookeeper) -- | Contest Zookeeper election with the provided data, and perform some -- action while elected. If leadership is lost, wait until re-elected and -- perform the action again. -- -- This function will never throw 'LeadershipLost'. whileLeader :: ZKURI -> String -- ^ API host -> Word16 -- ^ API port -> (ManagedZK -> IO a) -> ExceptT LeadershipError IO a whileLeader zkUri host port act = liftBaseOp (withZookeeper zkUri) go where go zk = catchError (whenLeader zk zkNode zkData $ act zk) $ \case LeadershipLost -> go zk e -> throwError e zkData = advertisedData host port -- | Create encoded (host, port) to advertise via Zookeeper. advertisedData :: String -> Word16 -> ByteString advertisedData host port = toStrict $ encode (host, port)
futufeld/eclogues
eclogues-impl/app/api/Eclogues/APIElection.hs
Haskell
bsd-3-clause
1,654
{-# LANGUAGE DeriveDataTypeable #-} module HipBot.AbsoluteURI where import Blaze.ByteString.Builder (toLazyByteString) import Control.Monad import qualified Data.Aeson as A import qualified Data.ByteString.Lazy.UTF8 as LB import Data.List (isSuffixOf) import Data.Maybe import Data.Monoid import Data.String import Data.Text (Text) import qualified Data.Text as T import Data.Typeable import Network.HTTP.Types import Network.URI (URI) import qualified Network.URI as URI import Prelude newtype AbsoluteURI = AbsoluteURI URI deriving (Eq, Typeable) parseAbsoluteURI :: String -> Maybe AbsoluteURI parseAbsoluteURI = fmap AbsoluteURI . URI.parseAbsoluteURI appendPath :: AbsoluteURI -> [Text] -> AbsoluteURI appendPath (AbsoluteURI uri) xs = AbsoluteURI uri' where uri' = uri { URI.uriPath = URI.uriPath uri <> dropSlash (relPath xs) } dropSlash s = if "/" `isSuffixOf` URI.uriPath uri then tail s else s relPath :: [Text] -> String relPath = LB.toString . toLazyByteString . encodePathSegments relativeTo :: [Text] -> AbsoluteURI -> AbsoluteURI relativeTo xs (AbsoluteURI uri) = AbsoluteURI (URI.relativeTo rel uri) where rel = fromJust . URI.parseURIReference . drop 1 . relPath $ xs instance Show AbsoluteURI where show (AbsoluteURI u) = show u instance IsString AbsoluteURI where fromString s = fromMaybe (error $ "Not an absolute URI: " <> s) (parseAbsoluteURI s) instance A.ToJSON AbsoluteURI where toJSON = A.toJSON . show instance A.FromJSON AbsoluteURI where parseJSON = A.withText "String" $ \t -> maybe mzero return . parseAbsoluteURI . T.unpack $ t
purefn/hipbot
src/HipBot/AbsoluteURI.hs
Haskell
bsd-3-clause
1,604
module HipChat.Types.Rooms.GetAllRoomsResponse ( GetAllRoomsResponse(..) ) where data GetAllRoomsResponse = GetAllRoomsResponse
oswynb/hipchat-hs
lib/HipChat/Types/Rooms/GetAllRoomsResponse.hs
Haskell
bsd-3-clause
133
{-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE BangPatterns #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE DeriveDataTypeable #-} -- | Versions for packages. module Stack.Types.Version (Version ,Cabal.VersionRange -- TODO in the future should have a newtype wrapper ,MajorVersion (..) ,getMajorVersion ,fromMajorVersion ,parseMajorVersionFromString ,versionParser ,parseVersion ,parseVersionFromString ,versionString ,versionText ,toCabalVersion ,fromCabalVersion ,mkVersion ,versionRangeText ,withinRange) where import Control.Applicative import Control.DeepSeq import Control.Monad.Catch import Data.Aeson import Data.Attoparsec.ByteString.Char8 import Data.Binary (Binary) import Data.ByteString (ByteString) import qualified Data.ByteString.Char8 as S8 import Data.Data import Data.Hashable import Data.List import Data.Map (Map) import qualified Data.Map as Map import Data.Text (Text) import qualified Data.Text as T import Data.Vector.Binary () import Data.Vector.Unboxed (Vector) import qualified Data.Vector.Unboxed as V import Data.Word import Distribution.Text (disp) import qualified Distribution.Version as Cabal import GHC.Generics import Language.Haskell.TH import Language.Haskell.TH.Syntax import Prelude -- Fix warning: Word in Prelude from base-4.8. import Text.PrettyPrint (render) -- | A parse fail. data VersionParseFail = VersionParseFail ByteString | NotAMajorVersion Version deriving (Typeable) instance Exception VersionParseFail instance Show VersionParseFail where show (VersionParseFail bs) = "Invalid version: " ++ show bs show (NotAMajorVersion v) = concat [ "Not a major version: " , versionString v , ", expecting exactly two numbers (e.g. 7.10)" ] -- | A package version. newtype Version = Version {unVersion :: Vector Word} deriving (Eq,Ord,Typeable,Data,Generic,Binary,NFData) -- | The first two components of a version. data MajorVersion = MajorVersion !Word !Word deriving (Typeable, Eq, Ord) instance Show MajorVersion where show (MajorVersion x y) = concat [show x, ".", show y] instance ToJSON MajorVersion where toJSON = toJSON . fromMajorVersion -- | Parse major version from @String@ parseMajorVersionFromString :: MonadThrow m => String -> m MajorVersion parseMajorVersionFromString s = do Version v <- parseVersionFromString s if V.length v == 2 then return $ getMajorVersion (Version v) else throwM $ NotAMajorVersion (Version v) instance FromJSON MajorVersion where parseJSON = withText "MajorVersion" $ either (fail . show) return . parseMajorVersionFromString . T.unpack instance FromJSON a => FromJSON (Map MajorVersion a) where parseJSON val = do m <- parseJSON val fmap Map.fromList $ mapM go $ Map.toList m where go (k, v) = do k' <- either (fail . show) return $ parseMajorVersionFromString k return (k', v) -- | Returns the first two components, defaulting to 0 if not present getMajorVersion :: Version -> MajorVersion getMajorVersion (Version v) = case V.length v of 0 -> MajorVersion 0 0 1 -> MajorVersion (V.head v) 0 _ -> MajorVersion (V.head v) (v V.! 1) -- | Convert a two-component version into a @Version@ fromMajorVersion :: MajorVersion -> Version fromMajorVersion (MajorVersion x y) = Version $ V.fromList [x, y] instance Hashable Version where hashWithSalt i = hashWithSalt i . V.toList . unVersion instance Lift Version where lift (Version n) = appE (conE 'Version) (appE (varE 'V.fromList) (listE (map (litE . IntegerL . fromIntegral) (V.toList n)))) instance Show Version where show (Version v) = intercalate "." (map show (V.toList v)) instance ToJSON Version where toJSON = toJSON . versionText instance FromJSON Version where parseJSON j = do s <- parseJSON j case parseVersionFromString s of Nothing -> fail ("Couldn't parse package version: " ++ s) Just ver -> return ver -- | Attoparsec parser for a package version from bytestring. versionParser :: Parser Version versionParser = do ls <- ((:) <$> num <*> many num') let !v = V.fromList ls return (Version v) where num = decimal num' = point *> num point = satisfy (== '.') -- | Convenient way to parse a package version from a bytestring. parseVersion :: MonadThrow m => ByteString -> m Version parseVersion x = go x where go = either (const (throwM (VersionParseFail x))) return . parseOnly (versionParser <* endOfInput) -- | Migration function. parseVersionFromString :: MonadThrow m => String -> m Version parseVersionFromString = parseVersion . S8.pack -- | Get a string representation of a package version. versionString :: Version -> String versionString (Version v) = intercalate "." (map show (V.toList v)) -- | Get a string representation of a package version. versionText :: Version -> Text versionText (Version v) = T.intercalate "." (map (T.pack . show) (V.toList v)) -- | Convert to a Cabal version. toCabalVersion :: Version -> Cabal.Version toCabalVersion (Version v) = Cabal.Version (map fromIntegral (V.toList v)) [] -- | Convert from a Cabal version. fromCabalVersion :: Cabal.Version -> Version fromCabalVersion (Cabal.Version vs _) = let !v = V.fromList (map fromIntegral vs) in Version v -- | Make a package version. mkVersion :: String -> Q Exp mkVersion s = case parseVersionFromString s of Nothing -> error ("Invalid package version: " ++ show s) Just pn -> [|pn|] -- | Display a version range versionRangeText :: Cabal.VersionRange -> Text versionRangeText = T.pack . render . disp -- | Check if a version is within a version range. withinRange :: Version -> Cabal.VersionRange -> Bool withinRange v r = toCabalVersion v `Cabal.withinRange` r
mietek/stack
src/Stack/Types/Version.hs
Haskell
bsd-3-clause
6,328
----------------------------------------------------------------------------------------- -- A Haskell port of GNU's getopt library -- -- Sven Panne <Sven.Panne@informatik.uni-muenchen.de> Oct. 1996 (small changes Dec. 1997) -- -- Two rather obscure features are missing: The Bash 2.0 non-option hack (if you don't -- already know it, you probably don't want to hear about it...) and the recognition of -- long options with a single dash (e.g. '-help' is recognised as '--help', as long as -- there is no short option 'h'). -- -- Other differences between GNU's getopt and this implementation: -- * To enforce a coherent description of options and arguments, there are explanation -- fields in the option/argument descriptor. -- * Error messages are now more informative, but no longer POSIX compliant... :-( -- -- And a final Haskell advertisement: The GNU C implementation uses well over 1100 lines, -- we need only 195 here, including a 46 line example! :-) ----------------------------------------------------------------------------------------- module Text.CTK.GetOpt (ArgOrder(..), OptDescr(..), ArgDescr(..), usageInfo, getOpt) where import Data.List (isPrefixOf) data ArgOrder a -- what to do with options following non-options: = RequireOrder -- no option processing after first non-option | Permute -- freely intersperse options and non-options | ReturnInOrder (String -> a) -- wrap non-options into options data OptDescr a = -- description of a single options: Option [Char] -- list of short option characters [String] -- list of long option strings (without "--") (ArgDescr a) -- argument descriptor String -- explanation of option for user data ArgDescr a -- description of an argument option: = NoArg a -- no argument expected | ReqArg (String -> a) String -- option requires argument | OptArg (Maybe String -> a) String -- optional argument data OptKind a -- kind of cmd line arg (internal use only): = Opt a -- an option | NonOpt String -- a non-option | EndOfOpts -- end-of-options marker (i.e. "--") | OptErr String -- something went wrong... usageInfo :: String -- header -> [OptDescr a] -- option descriptors -> String -- nicely formatted decription of optionsL usageInfo header optDescr = unlines (header:table) where (ss,ls,ds) = (unzip3 . map fmtOpt) optDescr table = zipWith3 paste (sameLen ss) (sameLen ls) (sameLen ds) paste x y z = " " ++ x ++ " " ++ y ++ " " ++ z sameLen xs = flushLeft ((maximum . map length) xs) xs flushLeft n xs = [ take n (x ++ repeat ' ') | x <- xs ] fmtOpt :: OptDescr a -> (String,String,String) fmtOpt (Option sos los ad descr) = (sepBy ", " (map (fmtShort ad) sos), sepBy ", " (map (fmtLong ad) los), descr) where sepBy sep [] = "" sepBy sep [x] = x sepBy sep (x:xs) = x ++ sep ++ sepBy sep xs fmtShort :: ArgDescr a -> Char -> String fmtShort (NoArg _ ) so = "-" ++ [so] fmtShort (ReqArg _ ad) so = "-" ++ [so] ++ " " ++ ad fmtShort (OptArg _ ad) so = "-" ++ [so] ++ "[" ++ ad ++ "]" fmtLong :: ArgDescr a -> String -> String fmtLong (NoArg _ ) lo = "--" ++ lo fmtLong (ReqArg _ ad) lo = "--" ++ lo ++ "=" ++ ad fmtLong (OptArg _ ad) lo = "--" ++ lo ++ "[=" ++ ad ++ "]" getOpt :: ArgOrder a -- non-option handling -> [OptDescr a] -- option descriptors -> [String] -- the commandline arguments -> ([a],[String],[String]) -- (options,non-options,error messages) getOpt _ _ [] = ([],[],[]) getOpt ordering optDescr (arg:args) = procNextOpt opt ordering where procNextOpt (Opt o) _ = (o:os,xs,es) procNextOpt (NonOpt x) RequireOrder = ([],x:rest,[]) procNextOpt (NonOpt x) Permute = (os,x:xs,es) procNextOpt (NonOpt x) (ReturnInOrder f) = (f x :os, xs,es) procNextOpt EndOfOpts RequireOrder = ([],rest,[]) procNextOpt EndOfOpts Permute = ([],rest,[]) procNextOpt EndOfOpts (ReturnInOrder f) = (map f rest,[],[]) procNextOpt (OptErr e) _ = (os,xs,e:es) (opt,rest) = getNext arg args optDescr (os,xs,es) = getOpt ordering optDescr rest -- take a look at the next cmd line arg and decide what to do with it getNext :: String -> [String] -> [OptDescr a] -> (OptKind a,[String]) getNext "--" rest _ = (EndOfOpts,rest) getNext ('-':'-':xs) rest optDescr = longOpt xs rest optDescr getNext ('-':x:xs) rest optDescr = shortOpt x xs rest optDescr getNext a rest _ = (NonOpt a,rest) -- handle long option longOpt :: String -> [String] -> [OptDescr a] -> (OptKind a,[String]) longOpt xs rest optDescr = long ads arg rest where (opt,arg) = break (=='=') xs options = [ o | o@(Option _ ls _ _) <- optDescr, l <- ls, opt `isPrefixOf` l ] ads = [ ad | Option _ _ ad _ <- options ] optStr = ("--"++opt) long (_:_:_) _ rest = (errAmbig options optStr,rest) long [NoArg a ] [] rest = (Opt a,rest) long [NoArg a ] ('=':xs) rest = (errNoArg optStr,rest) long [ReqArg f d] [] [] = (errReq d optStr,[]) long [ReqArg f _] [] (r:rest) = (Opt (f r),rest) long [ReqArg f _] ('=':xs) rest = (Opt (f xs),rest) long [OptArg f _] [] rest = (Opt (f Nothing),rest) long [OptArg f _] ('=':xs) rest = (Opt (f (Just xs)),rest) long _ _ rest = (errUnrec optStr,rest) -- handle short option shortOpt :: Char -> String -> [String] -> [OptDescr a] -> (OptKind a,[String]) shortOpt x xs rest optDescr = short ads xs rest where options = [ o | o@(Option ss _ _ _) <- optDescr, s <- ss, x == s ] ads = [ ad | Option _ _ ad _ <- options ] optStr = '-':[x] short (_:_:_) _ rest = (errAmbig options optStr,rest) short (NoArg a :_) [] rest = (Opt a,rest) short (NoArg a :_) xs rest = (Opt a,('-':xs):rest) short (ReqArg f d:_) [] [] = (errReq d optStr,[]) short (ReqArg f _:_) [] (r:rest) = (Opt (f r),rest) short (ReqArg f _:_) xs rest = (Opt (f xs),rest) short (OptArg f _:_) [] rest = (Opt (f Nothing),rest) short (OptArg f _:_) xs rest = (Opt (f (Just xs)),rest) short [] [] rest = (errUnrec optStr,rest) short [] xs rest = (errUnrec optStr,('-':xs):rest) -- miscellaneous error formatting errAmbig :: [OptDescr a] -> String -> OptKind a errAmbig ods optStr = OptErr (usageInfo header ods) where header = "option `" ++ optStr ++ "' is ambiguous; could be one of:" errReq :: String -> String -> OptKind a errReq d optStr = OptErr ("option `" ++ optStr ++ "' requires an argument " ++ d ++ "\n") errUnrec :: String -> OptKind a errUnrec optStr = OptErr ("unrecognized option `" ++ optStr ++ "'\n") errNoArg :: String -> OptKind a errNoArg optStr = OptErr ("option `" ++ optStr ++ "' doesn't allow an argument\n") {- ----------------------------------------------------------------------------------------- -- and here a small and hopefully enlightening example: data Flag = Verbose | Version | Name String | Output String | Arg String deriving Show options :: [OptDescr Flag] options = [Option ['v'] ["verbose"] (NoArg Verbose) "verbosely list files", Option ['V','?'] ["version","release"] (NoArg Version) "show version info", Option ['o'] ["output"] (OptArg out "FILE") "use FILE for dump", Option ['n'] ["name"] (ReqArg Name "USER") "only dump USER's files"] out :: Maybe String -> Flag out Nothing = Output "stdout" out (Just o) = Output o test :: ArgOrder Flag -> [String] -> String test order cmdline = case getOpt order options cmdline of (o,n,[] ) -> "options=" ++ show o ++ " args=" ++ show n ++ "\n" (_,_,errs) -> concat errs ++ usageInfo header options where header = "Usage: foobar [OPTION...] files..." -- example runs: -- putStr (test RequireOrder ["foo","-v"]) -- ==> options=[] args=["foo", "-v"] -- putStr (test Permute ["foo","-v"]) -- ==> options=[Verbose] args=["foo"] -- putStr (test (ReturnInOrder Arg) ["foo","-v"]) -- ==> options=[Arg "foo", Verbose] args=[] -- putStr (test Permute ["foo","--","-v"]) -- ==> options=[] args=["foo", "-v"] -- putStr (test Permute ["-?o","--name","bar","--na=baz"]) -- ==> options=[Version, Output "stdout", Name "bar", Name "baz"] args=[] -- putStr (test Permute ["--ver","foo"]) -- ==> option `--ver' is ambiguous; could be one of: -- -v --verbose verbosely list files -- -V, -? --version, --release show version info -- Usage: foobar [OPTION...] files... -- -v --verbose verbosely list files -- -V, -? --version, --release show version info -- -o[FILE] --output[=FILE] use FILE for dump -- -n USER --name=USER only dump USER's files ----------------------------------------------------------------------------------------- -}
mwotton/ctkl
src/Text/CTK/GetOpt.hs
Haskell
bsd-3-clause
9,896
{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE TemplateHaskell #-} module Model where import qualified Data.ByteString.Lazy as B import GHC.Generics (Generic) import Control.Lens import Data.Aeson import Data.Aeson.TH import Common -- -- Model -- type Model = [Step] data Step = Step { _nInputPlane :: Int , _nOutputPlane :: Int , _weight :: [[Kernel]] -- ^ nOutputPlane * nInputPlane * (kW*kH) , _bias :: [Bias] -- ^ nOutputPlane , _kW :: Int , _kH :: Int } deriving (Show, Generic) type Kernel = [[Float]] type Bias = Float deriveJSON defaultOptions{fieldLabelModifier = drop 1} ''Step makeLenses ''Step readModel :: FilePath -> IO Model readModel path = do json_bytes <- B.readFile path let (Just model) = decode' json_bytes return model dumpModel :: Model -> IO () dumpModel model = do dump "model steps" (length model) mapM_ dumpStep model dumpStep :: Step -> IO () dumpStep step = do dumpTitle "Step" dump "nInputPlane" (step ^. nInputPlane) dump "nOutputPlane" (step ^. nOutputPlane) dump "kW, kH" (step ^. kW, step ^. kH)
notae/haskell-exercise
graphics/Model.hs
Haskell
bsd-3-clause
1,133
module Lev3 where import Data.Array import qualified Number.Peano.Inf as P mft :: String -> String -> Array (Int, Int) P.Nat mft f t = m where m = array bounds [ ((i, j), lev i j) | (i,j) <- range bounds ] bounds = ((0, 0), (length t, length f)) lev 0 0 = 0 lev 0 j = succ $ m ! (0 , pred j) lev i 0 = succ $ m ! (pred i, 0 ) lev i j | (f!! p j) == (t !! p i) = m!(p i,p j) | otherwise = 1 + minimum [ m!(p i,j), m!(i,p j), m!(p i,p j) ] score :: String -> String -> Int score f t = let m = mft f t in fromIntegral $ m ! snd (bounds m) p = pred
sordina/mfug_levenshtein_difference_2016_07_07-
src/Lev3.hs
Haskell
bsd-3-clause
588
{-# LINE 1 "fptools\libraries\base\System\CPUTime.hsc" #-} ----------------------------------------------------------------------------- {-# LINE 2 "fptools\libraries\base\System\CPUTime.hsc" #-} -- | -- Module : System.CPUTime -- Copyright : (c) The University of Glasgow 2001 -- License : BSD-style (see the file libraries/core/LICENSE) -- -- Maintainer : libraries@haskell.org -- Stability : provisional -- Portability : portable -- -- The standard CPUTime library. -- ----------------------------------------------------------------------------- module System.CPUTime ( getCPUTime, -- :: IO Integer cpuTimePrecision -- :: Integer ) where import Prelude import Data.Ratio {-# LINE 26 "fptools\libraries\base\System\CPUTime.hsc" #-} import Hugs.Time ( getCPUTime, clockTicks ) {-# LINE 28 "fptools\libraries\base\System\CPUTime.hsc" #-} {-# LINE 35 "fptools\libraries\base\System\CPUTime.hsc" #-} {-# LINE 123 "fptools\libraries\base\System\CPUTime.hsc" #-} -- |The 'cpuTimePrecision' constant is the smallest measurable difference -- in CPU time that the implementation can record, and is given as an -- integral number of picoseconds. cpuTimePrecision :: Integer cpuTimePrecision = round ((1000000000000::Integer) % fromIntegral (clockTicks)) {-# LINE 141 "fptools\libraries\base\System\CPUTime.hsc" #-}
OS2World/DEV-UTIL-HUGS
libraries/System/CPUTime.hs
Haskell
bsd-3-clause
1,371
{-| Module : Idris.Imports Description : Code to handle import declarations. Copyright : License : BSD3 Maintainer : The Idris Community. -} module Idris.Imports( IFileType(..), findImport, findInPath, findPkgIndex , ibcPathNoFallback, installedPackages, pkgIndex ) where import Control.Applicative ((<$>)) import Data.List (isSuffixOf) import Idris.AbsSyntax import Idris.Error import Idris.Core.TT import IRTS.System (getIdrisLibDir) import System.FilePath import System.Directory import Control.Monad.State.Strict data IFileType = IDR FilePath | LIDR FilePath | IBC FilePath IFileType deriving (Show, Eq) -- | Get the index file name for a package name pkgIndex :: String -> FilePath pkgIndex s = "00" ++ s ++ "-idx.ibc" srcPath :: FilePath -> FilePath srcPath fp = let (n, ext) = splitExtension fp in case ext of ".idr" -> fp _ -> fp ++ ".idr" lsrcPath :: FilePath -> FilePath lsrcPath fp = let (n, ext) = splitExtension fp in case ext of ".lidr" -> fp _ -> fp ++ ".lidr" -- Get name of byte compiled version of an import ibcPath :: FilePath -> Bool -> FilePath -> FilePath ibcPath ibcsd use_ibcsd fp = let (d_fp, n_fp) = splitFileName fp d = if (not use_ibcsd) || ibcsd == "" then d_fp else ibcsd </> d_fp n = dropExtension n_fp in d </> n <.> "ibc" ibcPathWithFallback :: FilePath -> FilePath -> IO FilePath ibcPathWithFallback ibcsd fp = do let ibcp = ibcPath ibcsd True fp ibc <- doesFileExist' ibcp return (if ibc then ibcp else ibcPath ibcsd False fp) ibcPathNoFallback :: FilePath -> FilePath -> FilePath ibcPathNoFallback ibcsd fp = ibcPath ibcsd True fp findImport :: [FilePath] -> FilePath -> FilePath -> Idris IFileType findImport [] ibcsd fp = ierror . Msg $ "Can't find import " ++ fp findImport (d:ds) ibcsd fp = do let fp_full = d </> fp ibcp <- runIO $ ibcPathWithFallback ibcsd fp_full let idrp = srcPath fp_full let lidrp = lsrcPath fp_full ibc <- runIO $ doesFileExist' ibcp idr <- runIO $ doesFileExist' idrp lidr <- runIO $ doesFileExist' lidrp let isrc = if lidr then LIDR lidrp else IDR idrp if ibc then return (IBC ibcp isrc) else if (idr || lidr) then return isrc else findImport ds ibcsd fp -- find a specific filename somewhere in a path findInPath :: [FilePath] -> FilePath -> IO FilePath findInPath [] fp = fail $ "Can't find file " ++ fp findInPath (d:ds) fp = do let p = d </> fp e <- doesFileExist' p if e then return p else findInPath ds fp findPkgIndex :: String -> Idris FilePath findPkgIndex p = do let idx = pkgIndex p ids <- allImportDirs runIO $ findInPath ids idx installedPackages :: IO [String] installedPackages = do idir <- getIdrisLibDir filterM (goodDir idir) =<< dirContents idir where allFilesInDir base fp = do let fullpath = base </> fp isDir <- doesDirectoryExist' fullpath if isDir then fmap concat (mapM (allFilesInDir fullpath) =<< dirContents fullpath) else return [fp] dirContents = fmap (filter (not . (`elem` [".", ".."]))) . getDirectoryContents goodDir idir d = any (".ibc" `isSuffixOf`) <$> allFilesInDir idir d -- | Case sensitive file existence check for Mac OS X. doesFileExist' :: FilePath -> IO Bool doesFileExist' = caseSensitive doesFileExist -- | Case sensitive directory existence check for Mac OS X. doesDirectoryExist' :: FilePath -> IO Bool doesDirectoryExist' = caseSensitive doesDirectoryExist caseSensitive :: (FilePath -> IO Bool) -> FilePath -> IO Bool caseSensitive existsCheck name = do exists <- existsCheck name if exists then do contents <- getDirectoryContents (takeDirectory name) return $ (takeFileName name) `elem` contents else return False
enolan/Idris-dev
src/Idris/Imports.hs
Haskell
bsd-3-clause
4,677
module Eval where import System.IO.Unsafe import Data.Either import Data.List (nub) import qualified Data.Map as Map import Foreign.Ptr (FunPtr, castFunPtr) import Control.Monad.Except import qualified LLVM.General.AST as AST import qualified LLVM.General.AST.Type as T import LLVM.General.Analysis import LLVM.General.Context import qualified LLVM.General.ExecutionEngine as EE import LLVM.General.Module as Mod import LLVM.General.PassManager import Codegen import Curry import Emit import Environment import Infer import Lift import Parser import Pretty () import Substitute import Syntax import Type foreign import ccall "dynamic" haskFun :: FunPtr (IO Int) -> IO Int run :: FunPtr a -> IO Int run fn = haskFun (castFunPtr fn :: FunPtr (IO Int)) jit :: Context -> (EE.MCJIT -> IO a) -> IO a jit c = EE.withMCJIT c optlevel model ptrelim fastins where optlevel = Just 2 model = Nothing ptrelim = Nothing fastins = Nothing passes :: PassSetSpec passes = defaultCuratedPassSetSpec {optLevel = Just 3} runJIT :: AST.Module -> Either String Int runJIT mod' = do res <- unsafePerformIO $ withContext $ \context -> jit context $ \executionEngine -> runExceptT $ withModuleFromAST context mod' $ \m -> withPassManager passes $ \pm -> do _ <- runPassManager pm m EE.withModuleInEngine executionEngine m $ \ee -> do mainfn <- EE.getFunction ee (AST.Name "main") case mainfn of Just fn -> do res <- run fn return $ Right res Nothing -> return $ Left "could not find `main` function" case res of Left err -> Left err Right res' -> Right res' codegen :: AST.Module -> Program Typed -> [Type] -> IO AST.Module codegen m fns tys = withContext $ \context -> liftError $ withModuleFromAST context newast $ \m' -> do liftError $ verify m' return newast where modn = do Codegen.declare (T.ptr T.i8) "malloc" [(T.i32, AST.Name "size")] mapM (codegenTop tys (Map.fromList $ definitions' fns)) fns newast = runLLVM m modn eval :: String -> IO (Either String Int) eval source = case parseModule "<string>" source of Left err -> return $ Left $ show err Right prog -> do let defs = definitions prog let subbed = substitute (Map.fromList defs) prog let curried = map curryTop subbed let defs' = definitions curried case inferTop Environment.empty defs' of Left err -> return $ Left $ show err Right env -> do let core = constraintsTop env curried let corel = lefts core let corer = rights core if not (null corel) then return $ Left $ concatMap show corel else do let undef = filterDefinitions corer let (lifted, _) = lambdaLiftProgram 0 [] undef mod' <- Eval.codegen (AST.defaultModule {AST.moduleName = "test"}) lifted typeSymbols return $ runJIT mod'
jjingram/satori
src/Eval.hs
Haskell
bsd-3-clause
3,126
import App.Models.Comments as CommentsModel import Turbinado.Controller create :: Controller () create = do id' <-getSetting_u "id" :: Controller String body_ <- getParam_u "body" author_ <- getParam_u "author" let pId = (Prelude.read id')::Integer CommentsModel.insert Comments{author = author_ ,body = body_ ,comment_id =Nothing,post_id = pId} False redirectTo $ "/Posts/Show/"++id'
abuiles/turbinado-blog
App/Controllers/Comments.hs
Haskell
bsd-3-clause
448
{-# LANGUAGE OverloadedStrings #-} module Main where import Control.Exception import Control.Monad.IO.Class (liftIO) import Control.Monad.Logger (LoggingT, runStderrLoggingT) import Data.Conduit (Source, transPipe, ($$), (=$=)) import Database.Persist (insert) import Database.Persist.Sql (ConnectionPool, SqlPersistM, runMigration, runSqlPersistMPool) import Database.Persist.Sqlite (withSqlitePool) import Handlers import Model import Network.Wai.Middleware.RequestLogger (logStdoutDev) import System.Directory import System.Environment (getEnv) import System.IO.Error import Text.Blaze.Html (Html) import Text.Blaze.Html.Renderer.Text (renderHtml) import Web.Scotty (ActionM, ScottyM, get, html, middleware, param, scotty) main :: IO () main = do removeIfExists "my.db" runStderrLoggingT . withSqlPool $ \pool -> liftIO $ do runSql pool $ do runMigration migrateModel _ <- insert $ Person "John Doe" 35 _ <- insert $ Person "Jane Doe" 32 _ <- insert $ Person "The Dude" 38 _ <- insert $ Person "Me" 34 return () port <- getPort scotty port $ do middleware logStdoutDev router pool router :: ConnectionPool -> ScottyM () router pool = do get "/people/:personId" $ do personId <- param "personId" blazeSql pool $ getPerson personId get "/people" $ blazeSql pool getPeople get "/test/people" $ -- broken liftIO (runSqlSource pool selectAllPeople $$ peopleToHtml =$= foldSink) >>= blaze get "/" $ blazeSql pool getPeople get "/:word" $ do beam <- param "word" blaze $ getWord beam withSqlPool :: (ConnectionPool -> LoggingT IO ()) -> LoggingT IO () withSqlPool = withSqlitePool "my.db" 10 getPort :: IO Int getPort = fmap read $ getEnv "PORT" blaze :: Html -> ActionM () blaze = html . renderHtml blazeSql :: ConnectionPool -> SqlPersistM Html -> ActionM () blazeSql pool sql = liftIO (runSql pool sql) >>= blaze runSql :: ConnectionPool -> SqlPersistM a -> IO a runSql = flip runSqlPersistMPool removeIfExists :: FilePath -> IO () removeIfExists fileName = removeFile fileName `catch` handleExists where handleExists e | isDoesNotExistError e = return () | otherwise = throwIO e runSqlSource :: ConnectionPool -> Source SqlPersistM a -> Source IO a runSqlSource pool = transPipe $ runSql pool
derekjw/scotty-playground
src/Main.hs
Haskell
bsd-3-clause
3,156
module Network.HaskellNet.SSL ( Settings (..) , defaultSettingsWithPort ) where import Network.Socket.Internal (PortNumber) data Settings = Settings { sslPort :: PortNumber , sslMaxLineLength :: Int , sslLogToConsole :: Bool , sslDisableCertificateValidation :: Bool } deriving(Eq, Ord, Show) defaultSettingsWithPort :: PortNumber -> Settings defaultSettingsWithPort p = Settings { sslPort = p , sslMaxLineLength = 10000 , sslLogToConsole = False , sslDisableCertificateValidation = False }
lemol/HaskellNet-SSL
src/Network/HaskellNet/SSL.hs
Haskell
bsd-3-clause
632
-- | A store for stroing and retreiving items -- {-# LANGUAGE ExistentialQuantification, ScopedTypeVariables #-} module Hakyll.Core.Store ( Store , StoreGet (..) , makeStore , storeSet , storeGet ) where import Control.Applicative ((<$>)) import Control.Concurrent.MVar (MVar, newMVar, readMVar, modifyMVar_) import System.FilePath ((</>)) import System.Directory (doesFileExist) import Data.Maybe (fromMaybe) import Data.Map (Map) import qualified Data.Map as M import Data.Binary (Binary, encodeFile, decodeFile) import Data.Typeable (Typeable, TypeRep, cast, typeOf) import Hakyll.Core.Identifier import Hakyll.Core.Util.File -- | Items we can store -- data Storable = forall a. (Binary a, Typeable a) => Storable a -- | Result when an item from the store -- data StoreGet a = Found a | NotFound | WrongType TypeRep TypeRep deriving (Show, Eq) -- | Data structure used for the store -- data Store = Store { -- | All items are stored on the filesystem storeDirectory :: FilePath , -- | And some items are also kept in-memory storeMap :: Maybe (MVar (Map FilePath Storable)) } -- | Initialize the store -- makeStore :: Bool -- ^ Use in-memory caching -> FilePath -- ^ Directory to use for hard disk storage -> IO Store -- ^ Store makeStore inMemory directory = do mvar <- if inMemory then Just <$> newMVar M.empty else return Nothing return Store { storeDirectory = directory , storeMap = mvar } -- | Auxiliary: add an item to the map -- cacheInsert :: (Binary a, Typeable a) => Store -> FilePath -> a -> IO () cacheInsert (Store _ Nothing) _ _ = return () cacheInsert (Store _ (Just mv)) path value = modifyMVar_ mv $ return . M.insert path (Storable value) -- | Auxiliary: get an item from the cache -- cacheLookup :: forall a. (Binary a, Typeable a) => Store -> FilePath -> IO (StoreGet a) cacheLookup (Store _ Nothing) _ = return NotFound cacheLookup (Store _ (Just mv)) path = do map' <- readMVar mv case M.lookup path map' of Nothing -> return NotFound Just (Storable s) -> return $ case cast s of Nothing -> WrongType (typeOf s) $ typeOf (undefined :: a) Just s' -> Found s' -- | Create a path -- makePath :: Store -> String -> Identifier a -> FilePath makePath store name identifier = storeDirectory store </> name </> group </> toFilePath identifier </> "hakyllstore" where group = fromMaybe "" $ identifierGroup identifier -- | Store an item -- storeSet :: (Binary a, Typeable a) => Store -> String -> Identifier a -> a -> IO () storeSet store name identifier value = do makeDirectories path encodeFile path value cacheInsert store path value where path = makePath store name identifier -- | Load an item -- storeGet :: (Binary a, Typeable a) => Store -> String -> Identifier a -> IO (StoreGet a) storeGet store name identifier = do -- First check the in-memory map mv <- cacheLookup store path case mv of -- Not found in the map, try the filesystem NotFound -> do exists <- doesFileExist path if not exists -- Not found in the filesystem either then return NotFound -- Found in the filesystem else do v <- decodeFile path cacheInsert store path v return $ Found v -- Found in the in-memory map, just return s -> return s where path = makePath store name identifier
sol/hakyll
src/Hakyll/Core/Store.hs
Haskell
bsd-3-clause
3,676
module ResultWorthy.Tests.Util where import ResultWorthy.Util import Test.HUnit tests = TestLabel "The Util Tests" $ TestList [ TestLabel "withIndeces" $ TestCase $ assertEqual "Arrays should be equal" (withIndeces ["foo", "bar", "baz", "bong"]) [(0, "foo"), (1, "bar"), (2, "baz"), (3, "bong")] , TestLabel "trimLeadingWhitespace" $ TestCase $ assertEqual "Output should make sense" [" foo", "bar", " baz"] (trimLeadingWhitespace [" foo", " bar", " baz"]) ]
lawrencelomax/ResultWorthy
ResultWorthy/Tests/Util.hs
Haskell
bsd-3-clause
596
{-| Description: SDL audio support. -} module Graphics.UI.SDL.Audio ( ) where
abbradar/MySDL
src/Graphics/UI/SDL/Audio.hs
Haskell
bsd-3-clause
94
{-# LANGUAGE TemplateHaskell #-} {-| Module : AERN2.Poly.Basics Description : Basics of unary sparse polynomials Copyright : (c) Michal Konecny License : BSD3 Maintainer : mikkonecny@gmail.com Stability : experimental Portability : portable Basics of unary sparse polynomials -} module AERN2.Poly.Basics ( PolyCoeffRing, PolyCoeffField, PolyCoeffBall , Poly(..), Degree, Terms , terms_empty , terms_size , terms_insertWith , terms_toList, terms_toDescList , terms_fromList, terms_fromListAddCoeffs , terms_unionWith , terms_map , terms_filterKeepConst , terms_filterMayLoseConst , terms_degree, terms_degrees , terms_coeffs , terms_updateConst, terms_updateReturnConst , terms_lookupCoeff, terms_lookupCoeffDoubleConstTerm , formatTerms ) where import MixedTypesNumPrelude import qualified Prelude as P -- import Text.Printf import qualified Data.Map as Map import qualified Data.List as List -- import Test.Hspec -- import Test.QuickCheck import Control.CollectErrors -- import AERN2.MP.ErrorBound import AERN2.MP.Ball import AERN2.MP.Dyadic import AERN2.Real -- import AERN2.Interval -- import AERN2.RealFun.Operations -- import AERN2.RealFun.UnaryBallFun {- types -} {-| An aggregate sub-class for types suitable as coefficients of our polynomials, loose enough to permit Integer coefficients. -} class (Ring c, HasIntegers c, HasAccuracy c, HasNorm c, Show c) => PolyCoeffRing c instance PolyCoeffRing Integer instance PolyCoeffRing Dyadic instance PolyCoeffRing Rational instance PolyCoeffRing MPBall {-| An aggregate sub-class for types suitable as coefficients of our polynomials, loose enough to permit Rational coefficients. -} class (PolyCoeffRing c, Field c, HasDyadics c, CanAddSubMulDivCNBy c Dyadic) => PolyCoeffField c instance PolyCoeffField Rational instance PolyCoeffField MPBall {-| An aggregate sub-class for types suitable as coefficients of our polynomials -} class (PolyCoeffField c, CanAddSubMulDivCNBy c CauchyReal , IsInterval c, CanMinMaxSameType (IntervalEndpoint c), IsBall c, CanSetPrecision c) => PolyCoeffBall c instance PolyCoeffBall MPBall newtype Poly c = Poly { poly_terms :: Terms c } instance (CanBeErrors es) => CanEnsureCE es (Poly c) instance (CanBeErrors es) => CanExtractCE es Poly where extractCE sample_es (Poly terms) = fmap Poly (extractCE sample_es terms) type Terms c = Map.Map Degree c type Degree = Integer instance (CanBeErrors es) => CanExtractCE es (Map.Map Degree) terms_empty :: Terms c terms_empty = Map.empty terms_size :: Terms c -> Integer terms_size = integer . Map.size terms_insertWith :: (c -> c -> c) -> Degree -> c -> Terms c -> Terms c terms_insertWith = Map.insertWith terms_toList :: Terms c -> [(Degree, c)] terms_toList = Map.toList terms_toDescList :: Terms c -> [(Degree, c)] terms_toDescList = Map.toDescList terms_fromList :: (HasIntegers c) => [(Degree, c)] -> Terms c terms_fromList coeffs = case Map.lookup 0 ts of Nothing -> Map.insert 0 (convertExactly 0) ts _ -> ts where ts = Map.fromList coeffs terms_fromListAddCoeffs :: (CanAddSameType c, HasIntegers c) => [(Degree, c)] -> Terms c terms_fromListAddCoeffs newTerms = foldl addTerm terms_empty ((0, convertExactly 0) : newTerms) where addTerm prevTerms (i,a) = terms_insertWith (+) i a prevTerms terms_unionWith :: (c -> c -> c) -> Terms c -> Terms c -> Terms c terms_unionWith = Map.unionWith terms_filterMayLoseConst :: (Degree -> c -> Bool) -> Terms c -> Terms c terms_filterMayLoseConst = Map.filterWithKey terms_filterKeepConst :: (Degree -> c -> Bool) -> Terms c -> Terms c terms_filterKeepConst cond = Map.filterWithKey cond_leaveConst where cond_leaveConst k a | k == 0 = True | otherwise = cond k a terms_degree :: Terms c -> Degree terms_degree ts | null ts = error "terms_degree called with empty terms" | otherwise = fst $ Map.findMax ts terms_degrees :: Terms c -> [Degree] terms_degrees = Map.keys terms_coeffs :: Terms c -> [c] terms_coeffs = Map.elems terms_map :: (c1 -> c2) -> Terms c1 -> Terms c2 terms_map = Map.map terms_updateConst :: (HasIntegers c) => (c -> c) -> Terms c -> Terms c terms_updateConst updateFn ts = case Map.lookup 0 ts of Nothing -> Map.insert 0 (updateFn $ convertExactly 0) ts Just _ -> Map.adjust updateFn 0 ts terms_updateReturnConst :: (HasIntegers c) => (c -> c) -> Terms c -> (Terms c,c,c) terms_updateReturnConst updateFn ts = case Map.lookup 0 ts of Nothing -> let new = updateFn z in (Map.insert 0 new ts, z, new) Just old -> let new = updateFn old in (Map.insert 0 new ts, old, new) where z = convertExactly 0 terms_lookupCoeffDoubleConstTerm :: (HasIntegers c, CanAddSameType c) => (Terms c) -> Degree -> c terms_lookupCoeffDoubleConstTerm t i | i == 0 = c+c | otherwise = c where c = terms_lookupCoeff t i terms_lookupCoeff :: (HasIntegers c) => (Terms c) -> Degree -> c terms_lookupCoeff t i = case Map.lookup i t of Just c -> c _ -> convertExactly 0 {- precision -} instance (HasPrecision c) => HasPrecision (Poly c) where getPrecision (Poly ts) = foldl1 max $ map getPrecision $ terms_coeffs ts instance (CanSetPrecision c) => CanSetPrecision (Poly c) where setPrecision p (Poly ts) = Poly $ terms_map (setPrecision p) ts {- accuracy -} instance (HasAccuracy c) => HasAccuracy (Poly c) where getAccuracy (Poly ts) = foldl1 min $ map getAccuracy $ terms_coeffs ts getFiniteAccuracy (Poly ts) = foldl1 min $ map getFiniteAccuracy $ terms_coeffs ts {- negation -} instance (CanNegSameType c) => CanNeg (Poly c) where type NegType (Poly c) = Poly c negate (Poly t1) = Poly $ terms_map negate t1 {- addition -} instance (CanAddSameType c) => CanAddAsymmetric (Poly c) (Poly c) where type AddType (Poly c) (Poly c) = Poly c add (Poly t1) (Poly t2) = Poly $ terms_unionWith (+) t1 t2 $(declForTypes [[t| Integer |], [t| Int |], [t| Rational |], [t| Dyadic |], [t| MPBall |], [t| CauchyReal |]] (\ t -> [d| instance (CanAddThis c $t, HasIntegers c) => CanAddAsymmetric $t (Poly c) where type AddType $t (Poly c) = Poly c add n (Poly t2) = Poly $ terms_updateConst (+ n) t2 instance (CanAddThis c $t, HasIntegers c) => CanAddAsymmetric (Poly c) $t where type AddType (Poly c) $t = Poly c add (Poly t1) n = Poly $ terms_updateConst (+ n) t1 |])) {- subtraction -} instance (CanNegSameType c, CanAddSameType c) => CanSub (Poly c) (Poly c) $(declForTypes [[t| Integer |], [t| Int |], [t| Rational |], [t| Dyadic |], [t| MPBall |], [t| CauchyReal |]] (\ t -> [d| instance (CanNegSameType c, CanAddThis c $t, HasIntegers c) => CanSub $t (Poly c) instance (CanAddThis c $t, HasIntegers c) => CanSub (Poly c) $t |])) {- scaling -} $(declForTypes [[t| Integer |], [t| Int |], [t| Rational |], [t| Dyadic |], [t| MPBall |], [t| CauchyReal |]] (\ t -> [d| instance (CanMulBy c $t) => CanMulAsymmetric $t (Poly c) where type MulType $t (Poly c) = Poly c mul n (Poly t2) = Poly $ terms_map (* n) t2 instance (CanMulBy c $t) => CanMulAsymmetric (Poly c) $t where type MulType (Poly c) $t = Poly c mul (Poly t1) n = Poly $ terms_map (* n) t1 |])) $(declForTypes [[t| Integer |], [t| Int |], [t| Rational |], [t| Dyadic |], [t| MPBall |], [t| CauchyReal |]] (\ t -> [d| instance (CanDivCNBy c $t, CanEnsureCN (DivType c $t), EnsureNoCN (DivType c $t) ~ c) => CanDiv (Poly c) $t where type DivType (Poly c) $t = (Poly (EnsureCN c)) divide (Poly t1) n = Poly $ terms_map (/ n) t1 type DivTypeNoCN (Poly c) $t = Poly c divideNoCN (Poly t1) n = Poly $ terms_map (/! n) t1 |])) {- show -} instance (Show c, HasIntegers c) => Show (Poly c) where show (Poly terms) = formatTerms showCf terms where showCf c = --(show (c::MPBall), (c == (convertExactly 0)) == Just True, (c == (convertExactly 1)) == Just True) (show c, False, False) formatTerms :: (HasIntegers c) => (c -> (String, Bool, Bool)) -> Terms c -> String formatTerms showCf terms = showTerms ("", "-") $ List.sortBy (\(a,_) (b,_) -> P.compare a b) $ termsToShow where showTerms (connectivePos, connectiveNeg) (term : rest) = termS ++ (showTerms (" + ", " - ") rest) where termS = case s of '-':ss -> connectiveNeg ++ ss _ -> connectivePos ++ s s = showTerm term showTerms _ [] = "" termsToShow = if null termsToShow_pre then [(0, convertExactly 0)] else termsToShow_pre termsToShow_pre = filter coeffNotExactZero $ terms_toList terms coeffNotExactZero (_, cf) = not isZero where (_, isZero, _) = showCf cf showTerm (deg, coeff) | deg == 0 = coeffS | isOne = showPower | otherwise = coeffS ++ "*" ++ showPower where (coeffS, _, isOne) = showCf coeff showPower | deg == 1 = "x" | otherwise = "x^" ++ show deg
michalkonecny/aern2
aern2-fun-univariate/src/AERN2/Poly/Basics.hs
Haskell
bsd-3-clause
9,205
{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE OverloadedStrings #-} module React.Flux.Mui.Badge where import Protolude import Data.Aeson import Data.Aeson.Casing import Data.String (String) import React.Flux import React.Flux.Mui.Util data Badge = Badge { badgeClassName :: !(Maybe Text) , badgePrimary :: !(Maybe Bool) , badgeSecondary :: !(Maybe Bool) } deriving (Generic, Show) instance ToJSON Badge where toJSON = genericToJSON $ aesonDrop (length ("Badge" :: String)) camelCase defBadge :: Badge defBadge = Badge { badgeClassName = Nothing , badgePrimary = Just False , badgeSecondary = Just False } badge_ :: Badge -> [PropertyOrHandler handler] -> ReactElementM handler () -> ReactElementM handler () badge_ args props = foreign_ "Badge" (fromMaybe [] (toProps args) ++ props)
pbogdan/react-flux-mui
react-flux-mui/src/React/Flux/Mui/Badge.hs
Haskell
bsd-3-clause
820
{-# LANGUAGE DeriveGeneric, OverloadedStrings #-} module Pygmalion.Core ( CommandInfo (..) , SourceFile , SourceFileHash , SourceFileWrapper , unwrapSourceFile , SourceFileHashWrapper , unwrapSourceFileHash , WorkingPath , Time , TimeHash , Language (..) , Inclusion (..) , DefInfo (..) , DefUpdate (..) , SourceLocation (..) , SourceRange (..) , Identifier , USRHash , SourceLine , SourceCol , SourceKind (..) , Override (..) , Invocation (..) , Reference (..) , ReferenceUpdate (..) , SourceReferenced (..) , SourceReference (..) , SourceContext , mkSourceFile , unSourceFile --, unSourceFileText , queryExecutable , daemonExecutable , indexExecutable , pygmalionDir , dbFile , configFile , socketFile , compileCommandsFile , tagsFile , stableHash , stableHashWithSalt ) where import Control.Applicative import qualified Data.ByteString.UTF8 as B import Data.Int import Data.Hashable import Data.Serialize import Database.SQLite.Simple (FromRow(..), field) import GHC.Generics import System.FilePath.Posix ((</>)) import Pygmalion.SourceKind -- The information we collect about a compilation command. data CommandInfo = CommandInfo { ciSourceFile :: !SourceFile , ciWorkingPath :: !WorkingPath , ciCommand :: !B.ByteString , ciArgs :: ![B.ByteString] , ciLanguage :: !Language } deriving (Eq, Read, Show, Generic) {- instance Serialize T.Text where put = put . TE.encodeUtf16BE get = liftM (TE.decodeUtf16BEWith onError) get where onError _ _ = Nothing -} instance Serialize CommandInfo instance FromRow CommandInfo where fromRow = CommandInfo <$> field -- ciSourceFile <*> field -- ciWorkingPath <*> field -- ciCommand <*> (B.lines <$> field) -- ciArgs <*> fromRow -- ciLanguage type SourceFile = B.ByteString type SourceFileHash = Int mkSourceFile :: FilePath -> SourceFile mkSourceFile = B.fromString unSourceFile :: SourceFile -> FilePath unSourceFile = B.toString newtype SourceFileWrapper = SourceFileWrapper SourceFile unwrapSourceFile :: SourceFileWrapper -> SourceFile unwrapSourceFile (SourceFileWrapper sf) = sf instance FromRow SourceFileWrapper where fromRow = SourceFileWrapper <$> field newtype SourceFileHashWrapper = SourceFileHashWrapper SourceFileHash unwrapSourceFileHash :: SourceFileHashWrapper -> SourceFileHash unwrapSourceFileHash (SourceFileHashWrapper sf) = sf instance FromRow SourceFileHashWrapper where fromRow = SourceFileHashWrapper <$> field type WorkingPath = B.ByteString type Time = Int64 type TimeHash = Int data Language = CLanguage | CPPLanguage | UnknownLanguage deriving (Eq, Enum, Generic, Ord, Read, Show) instance Serialize Language instance FromRow Language where fromRow = toEnum <$> field -- | Inclusion metadata. data Inclusion = Inclusion { icInclusion :: !SourceFile -- ^ The included file. , icIncluder :: !SourceFileHash -- ^ The file which does the including. } deriving (Eq, Show, Generic) instance Serialize Inclusion -- The information we collect about definitions in source code. data DefInfo = DefInfo { diIdentifier :: !Identifier , diUSR :: !USRHash , diSourceLocation :: !SourceLocation , diDefKind :: !SourceKind , diContext :: !USRHash } deriving (Eq, Show, Generic) instance Serialize DefInfo instance FromRow DefInfo where fromRow = DefInfo <$> field <*> field <*> fromRow <*> fromRow <*> field -- Cheaper variant of DefInfo used for database updates. data DefUpdate = DefUpdate { diuIdentifier :: !Identifier , diuUSR :: !USRHash , diuFileHash :: !SourceFileHash , diuLine :: !SourceLine , diuCol :: !SourceCol , diuDefKind :: !SourceKind , diuContext :: !USRHash } deriving (Eq, Show, Generic) instance Serialize DefUpdate data SourceLocation = SourceLocation { slFile :: !SourceFile , slLine :: !SourceLine , slCol :: !SourceCol } deriving (Eq, Show, Generic) instance Serialize SourceLocation instance FromRow SourceLocation where fromRow = SourceLocation <$> field <*> field <*> field data SourceRange = SourceRange { srFile :: !SourceFile , srLine :: !SourceLine , srCol :: !SourceCol , srEndLine :: !SourceLine , srEndCol :: !SourceCol } deriving (Eq, Show, Generic) instance Serialize SourceRange instance FromRow SourceRange where fromRow = SourceRange <$> field <*> field <*> field <*> field <*> field type Identifier = B.ByteString type USRHash = Int type RefHash = Int type SourceLine = Int type SourceCol = Int -- This would be the cheaper variant of Override, but we never return these -- directly from queries (we always return DefInfos) so we don't need the full -- version at all. data Override = Override { orDef :: !USRHash , orOverrided :: !USRHash } deriving (Eq, Show, Generic) instance Serialize Override data Invocation = Invocation { ivDefInfo :: !DefInfo , ivSourceLocation :: !SourceLocation } deriving (Eq, Show, Generic) instance Serialize Invocation instance FromRow Invocation where fromRow = Invocation <$> fromRow <*> fromRow data Reference = Reference { rfRange :: !SourceRange , rfKind :: !SourceKind , rfContext :: !USRHash , rfUSR :: !USRHash } deriving (Eq, Show, Generic) instance Serialize Reference -- Cheaper variant of Reference used for database updates. data ReferenceUpdate = ReferenceUpdate { rfuId :: !RefHash , rfuFileHash :: !SourceFileHash , rfuLine :: !SourceLine , rfuCol :: !SourceCol , rfuEndLine :: !SourceLine , rfuEndCol :: !SourceCol , rfuKind :: !SourceKind , rfuViaHash :: !USRHash , rfuDeclHash :: !RefHash , rfuContextHash :: !USRHash , rfuUSRHash :: !USRHash } deriving (Eq, Show, Generic) instance Serialize ReferenceUpdate data SourceReferenced = SourceReferenced { sdDef :: !DefInfo , sdRange :: !SourceRange , sdKind :: !SourceKind , sdViaHash :: !USRHash , sdDeclHash :: !RefHash } deriving (Eq, Show, Generic) instance Serialize SourceReferenced instance FromRow SourceReferenced where fromRow = SourceReferenced <$> fromRow <*> fromRow <*> fromRow <*> field <*> field data SourceReference = SourceReference { srLocation :: !SourceLocation , srKind :: !SourceKind , srContext :: !SourceContext } deriving (Eq, Show, Generic) instance Serialize SourceReference instance FromRow SourceReference where fromRow = SourceReference <$> fromRow <*> fromRow <*> field type SourceContext = B.ByteString -- Tool names. queryExecutable, daemonExecutable, indexExecutable :: String queryExecutable = "pyg" daemonExecutable = "pygd" indexExecutable = "pygindex-clang" -- Data files. pygmalionDir, dbFile, configFile, socketFile, compileCommandsFile, tagsFile :: FilePath pygmalionDir = ".pygmalion" dbFile = pygmalionDir </> "index.sqlite" configFile = pygmalionDir </> "pygmalion.yaml" socketFile = pygmalionDir </> "socket" compileCommandsFile = "compile_commands.json" tagsFile = "TAGS" -- | The value returned by hashable's 'hash' is different for every process -- because it uses a number derived from the process's start time as a salt. -- We need a stable hash, so we use 0 as a salt no matter what. stableHash :: Hashable a => a -> Int stableHash = hashWithSalt 0 -- | Like 'stableHash', but for 'hashWithSalt'. This is identical to -- 'hashWithSalt', but having this means that we can avoid importing -- "Data.Hashable" at all and be sure that we don't accidentally use -- 'hash' without realizing it. stableHashWithSalt :: Hashable a => Int -> a -> Int stableHashWithSalt = hashWithSalt
sethfowler/pygmalion
src/Pygmalion/Core.hs
Haskell
bsd-3-clause
7,991
-- |NullPoint: -- Pointed types (usually containers) that can be empty. -- Corresponds to Data.Monoid.mempty module Data.NullPoint ( -- * Classes NullPoint (..) ) where import qualified Data.ByteString as B import qualified Data.ByteString.Lazy as L -- ---------------------------------------------- -- |NullPoint class. Containers that have a null representation, corresponding -- to Data.Monoid.mempty. class NullPoint c where empty :: c instance NullPoint [a] where empty = [] instance NullPoint B.ByteString where empty = B.empty instance NullPoint L.ByteString where empty = L.empty
iteloo/tsuru-sample
iteratee-0.8.9.6/src/Data/NullPoint.hs
Haskell
bsd-3-clause
609
-- * Implements set theoristic operators for the Ord type. -- | Haskell Prelude "intersect", "(\\)" and "nub" implementations require an -- Eq instance. -- If we have an implementations for Ord type as well, the evaluation can be -- sped up significantly. module Data.Extension.Ord ( intersect , nub , subset , (\\) ) where import qualified Data.Map.Strict as Map import qualified Data.Set as Set intersect :: (Ord a) => [a] -> [a] -> [a] intersect a b = filter (`Set.member` bset) a where bset = Set.fromList b nub :: (Ord a) => [a] -> [a] nub = nub' Set.empty where nub' _ [] = [] nub' acc (x:xs) = if x `Set.member` acc then nub' acc xs else x : nub' (Set.insert x acc) xs subset :: Ord a => [a] -> [a] -> Bool x `subset` y = null (x \\ y) infix 5 \\ (\\) :: (Ord a) => [a] -> [a] -> [a] a \\ b = diff' init a where init = Map.fromListWith (+) [(x, 1 :: Int) | x <- b] diff' _ [] = [] diff' hist (x:xs) = case Map.lookup x hist of Just n | n > 0 -> diff' (Map.insert x (n - 1) hist) xs _ -> x : diff' hist xs
shingoOKAWA/hsarg-haskell
src/Data/Extension/Ord.hs
Haskell
bsd-3-clause
1,231
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE BangPatterns #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE FlexibleContexts #-} {-# OPTIONS_GHC -Wall -fenable-rewrite-rules -ddump-rule-firings -ddump-to-file #-} import Control.Applicative import Control.Arrow (first) import qualified Control.Exception as Ex import Control.Monad.Writer import qualified Data.Foldable as Fold import Data.IORef import Data.Maybe import qualified Data.Sequence as Q import Data.Typeable import Test.Framework import Test.Framework.Providers.QuickCheck2 import Test.QuickCheck (Arbitrary, arbitrary) import qualified Test.QuickCheck as QC import qualified Test.QuickCheck.Property as QC import Text.PrettyPrint import Text.Show.Functions () import qualified NoRewrite as NR import qualified Transducers.Fold as TrFold import Transducers.FreeMonad import Transducers.Par import Transducers.Transducers as Tr main :: IO () main = defaultMain [ testProperty "yield" prop_yield , testProperty "tryAwait" prop_tryAwait , testProperty "panic" prop_panic , testProperty "return" prop_return , testProperty "bind" prop_bind , testProperty "bind_assoc" prop_bind_assoc , testProperty "comp" prop_comp -- , testProperty "comp_assoc" _prop_comp_assoc -- , testProperty "comp_left_id" _prop_comp_left_id -- , testProperty "comp_right_id" _prop_comp_right_id , testProperty "tmap" prop_tmap , testProperty "tfilter" prop_tfilter , testProperty "mapM" prop_mapM , testProperty "mapM" prop_dropWhileM , testProperty "tfold" prop_tfold , testProperty "tscanl" prop_tscanl , testProperty "feed" prop_feed , testProperty "rewrite_tmap" prop_rewrite_tmap , testProperty "rewrite_tfilter" prop_rewrite_tfilter , testProperty "rewrite_flatten" prop_rewrite_flatten , testProperty "rewrite_tscanl" prop_rewrite_tscanl , testProperty "rewrite_tfold" prop_rewrite_tfold , testProperty "rewrite_mapM" prop_rewrite_mapM , testProperty "rewrite_dropWhileM" prop_rewrite_dropWhileM , testProperty "flatten/tfilter" prop_flatten_tfilter , testProperty "par" prop_par ] -- | A monad where we can easily observe all side effects. type TestMonad = Writer (Q.Seq SideEffect) -- | A side effect in 'TestMonad'. type SideEffect = Int -- | Result of executing a transducer data Trace i o a = Trace [i] -- ^ remaining input [Event o] -- ^ history of events (Maybe a) -- ^ result, or Nothing if the input ends prematurely deriving (Eq, Show) -- | A thing a transducer can do. data Event o = TryE | YieldE o | PanicE TestException | TLiftE (Q.Seq SideEffect) deriving (Show, Eq) -- | An exception type for testing. We only allow this type of exception, -- because other exception types are not comparable in general. newtype TestException = TestException Int deriving (Typeable, Show, Eq) instance Ex.Exception TestException instance Arbitrary TestException where arbitrary = TestException <$> arbitrary -- | A type-restricted version of 'Transducer', together with a string -- representation. data TestTransducer = TestTransducer { testTransducerExpression :: String , unTestTransducer :: Transducer Ex.SomeException Int Int TestMonad Int } instance Show TestTransducer where show = testTransducerExpression instance Arbitrary TestTransducer where arbitrary = fmap (uncurry TestTransducer . first renderPDoc) $ arbitraryTransducerWith $ \gen -> do (doc, val) <- gen effects <- arbitrary let doc' = opl 1 ">>" (app (lit "tell") $ app (lit "Q.fromList") (lit $ show effects)) (app (lit "return") doc) return (doc', val <$ tell (Q.fromList effects)) -- | Pretty-printed expression with operator precedence. type PDoc = Int -> Doc -- | Turn a PDoc into String. renderPDoc :: PDoc -> String renderPDoc = render . ($0) -- | Left-associative operator opl :: Int -> String -> PDoc -> PDoc -> PDoc opl prec opr x y p = parensIf (prec < p) $ fsep [ x prec , text opr , y (prec + 1) ] -- | Literal lit :: String -> PDoc lit str _ = text str -- | Function application app :: PDoc -> PDoc -> PDoc app x y p = parensIf (10 < p) $ fsep [ x 10 , y 11 ] parensIf :: Bool -> Doc -> Doc parensIf True = parens parensIf False = id arbitraryTransducerWith :: forall o a i m . (Arbitrary o, Arbitrary a, Show a, Show o) => (forall r. QC.Gen (PDoc, r) -> QC.Gen (PDoc, m r)) -> QC.Gen (PDoc, Transducer Ex.SomeException i o m a) arbitraryTransducerWith genM = fun1 "Trs" Trs <$> go where go :: QC.Gen (PDoc, FreeMonad (TransducerF Ex.SomeException i o m) a) go = fmap (fun1 "fromView" fromView) $ QC.sized $ \size -> if size == 0 then fun1 "Pure" Pure . addDoc <$> arbitrary else fmap (fun1 "Impure" Impure) $ QC.resize (size-1) $ QC.oneof [ fun1 "Try" Try . mkConst <$> go , fun2 "Yield" Yield . addDoc <$> arbitrary <*> go , fun2 "Panic" Panic <$> (fun1 "Ex.toException" Ex.toException . fun1 "TestException" TestException . addDoc <$> arbitrary) <*> go , fun1 "TLift" TLift <$> genM go ] mkConst = fun1 "const" const addDoc x = (\_ -> parens $ text (show x), x) fun1 str f (doc, val) = (app (lit str) doc, f val) fun2 str f (doc0, val0) (doc1, val1) = (app (app (lit str) doc0) doc1, f val0 val1) --arbitraryFunction :: CoArbitrary a => QC.Gen b -> QC.Gen (a -> b) --arbitraryFunction gen = QC.promote (`QC.coarbitrary` gen) -- | Run a transducer using the given input. exec :: Transducer Ex.SomeException i o TestMonad a -> [i] -> Trace i o a exec = execWith True -- | Run a transducer using the given input. Does not assume the EOF -- at the end of the input. _execPartial :: Transducer Ex.SomeException i o TestMonad a -> [i] -> Trace i o a _execPartial = execWith False execWith :: Bool -> Transducer Ex.SomeException i o TestMonad a -> [i] -> Trace i o a execWith terminate trans0 is0 = Trace remaining (Fold.toList events) out where ((out, remaining), events) = runWriter $ go (unTRS trans0) is0 go trans is = case toView trans of Pure x -> return (Just x, is) Impure act -> case act of Try cont -> do emit TryE case is of [] | terminate -> go (cont Nothing) [] | otherwise -> return (Nothing, is) i:rest -> go (cont (Just i)) rest Yield o cont -> do emit $ YieldE o go cont is Panic e cont -> case Ex.fromException e of Just myEx -> do emit $ PanicE myEx go cont is Nothing -> error $ "exec: unknown exception " ++ show e TLift action -> do let !(cont, sideEffects) = runWriter action emit $ TLiftE sideEffects go cont is emit :: MonadWriter (Q.Seq a) m => a -> m () emit = tell . Q.singleton -- | Run a transducer, and returns a summary of execution summary :: Transducer Ex.SomeException i o TestMonad a -> (Bool, [i]) -> ([i], [o], [SideEffect], Maybe a) summary trans (terminate, inp) = (remaining, out, effects, end) where !(Trace remaining evts end) = execWith terminate trans inp out = [o | YieldE o <- evts ] effects = Fold.toList $ mconcat [e | TLiftE e <- evts ] output' :: Transducer Ex.SomeException i o TestMonad a -> (Bool, [i]) -> (Bool, [o]) output' trans inp = (isJust end, out) where !(_, out, _, end) = summary trans inp output :: Transducer Ex.SomeException i o TestMonad a -> (Bool, [i]) -> [o] output trans inp = snd $ output' trans inp -- Primitives prop_yield :: Int -> [Int] -> Bool prop_yield x is = exec (yield x) is == Trace is [YieldE x] (Just ()) prop_tryAwait :: [Int] -> Bool prop_tryAwait is = exec tryAwait is == case is of i:rest -> Trace rest [TryE::Event ()] (Just (Just i)) [] -> Trace is [TryE] (Just Nothing) prop_panic :: TestException -> [Int] -> Bool prop_panic ex is = exec (panic (Ex.toException ex)) is == Trace is [PanicE ex::Event()] (Just ()) -- Transducers as a monad prop_return :: Int -> [Int] -> Bool prop_return x is = exec (return x) is == Trace is ([]::[Event()]) (Just x) prop_bind :: TestTransducer -> (Int -> TestTransducer) -> [Int] -> Bool prop_bind (TestTransducer _ x) f is = exec (x >>= unTestTransducer . f) is == expected where expected = let r0@(Trace is1 evts0 out0) = exec x is in case out0 of Nothing -> r0 Just val -> let !(Trace is2 evts1 out1) = exec (unTestTransducer $ f val) is1 in Trace is2 (evts0 ++ evts1) out1 prop_bind_assoc :: TestTransducer -> (Int -> TestTransducer) -> (Int -> TestTransducer) -> [Int] -> Bool prop_bind_assoc (TestTransducer _ x) f g is = exec (x >>= (unTestTransducer . f) >>= (unTestTransducer . g)) is == exec (x >>= (\v -> unTestTransducer (f v) >>= (unTestTransducer . g))) is -- Composition prop_comp :: TestTransducer -> TestTransducer -> [Int] -> Bool prop_comp (TestTransducer _ x) (TestTransducer _ y) is = output (x ><> y) (True, is) == output y (output' x (True, is)) -- Note: this is not a complete specification of (><>), because it -- doesn't care how other events are ordered except for YieldEs. -- does not hold, see counterexamples below. _prop_comp_assoc :: TestTransducer -> TestTransducer -> TestTransducer -> [Int] -> Bool _prop_comp_assoc (TestTransducer _ x) (TestTransducer _ y) (TestTransducer _ z) is = exec (x ><> (y ><> z)) is == exec ((x ><> y) ><> z) is -- | The identity transducer identity :: (Monad m) => Transducer Ex.SomeException i i m () identity = tryAwait >>= Fold.mapM_ (\item -> yield item >> identity) -- does not hold, because (identity ><> x) asks for input even if x does not. _prop_comp_left_id :: TestTransducer -> [Int] -> Bool _prop_comp_left_id (TestTransducer _ x) is = exec (identity ><> x) is == exec x is -- does not hold _prop_comp_right_id :: TestTransducer -> [Int] -> Bool _prop_comp_right_id (TestTransducer _ x) is = exec (x ><> identity) is == exec (void x) is -- Higher-level API prop_tmap :: [Int] -> Bool prop_tmap is = summary (tmap (+1)) (True, is) == ([], map (+1) is, [], Just ()) prop_tfilter :: [Int] -> Bool prop_tfilter is = summary (tfilter even) (True, is) == ([], filter even is, [], Just ()) prop_mapM :: [Int] -> Bool prop_mapM is = summary (Tr.mapM f) (True, is) == ([], map (+1) is, is, Just ()) where f x = do emit x return $ x + 1 prop_dropWhileM :: [Int] -> Bool prop_dropWhileM is = summary (Tr.dropWhileM f) (True, is) == ([], rest, dropped, Just ()) where (dropped,rest) = span even is f x = do -- dropWhileM has to run the predicate until it fails, so -- here we check the predicate before emitting the tested value when (even x) $ emit x return $ even x prop_tfold :: [Int] -> Bool prop_tfold is = summary (tfold (TrFold.foldM f 0)) (True, is) == ([], []::[()], is, Just (sum is)) where f x y = do emit y return $! x + y prop_tscanl :: [Int] -> Bool prop_tscanl is = summary (tscanl (TrFold.foldM f 0)) (True, is) == ([], scanl1 (+) is, is, Just ()) where f x y = do emit y return $! x + y prop_feed :: TestTransducer -> Int -> [Int] -> Bool prop_feed (TestTransducer _ x) i is = output (feed i x) (True,is) == output x (True, i:is) -- Fusion prop_rewrite_tmap :: [Int] -> Bool prop_rewrite_tmap iss = exec (tmap (+1)) iss == exec (NR.tmap (+1)) iss prop_rewrite_tfilter :: [Int] -> Bool prop_rewrite_tfilter iss = exec (tfilter even) iss == exec (NR.tfilter even) iss prop_rewrite_flatten :: [[Int]] -> Bool prop_rewrite_flatten iss = exec flatten iss == exec NR.flatten iss prop_rewrite_tscanl :: [Int] -> Bool prop_rewrite_tscanl iss = exec (tscanl (TrFold.foldM f 0)) iss == exec (NR.tscanl (TrFold.foldM f 0)) iss where f x y = do emit y return $! x + y -- does not hold, bug? prop_rewrite_tfold :: [Int] -> Bool prop_rewrite_tfold iss = exec (tfold (TrFold.foldM f 0)) iss == (exec (NR.tfold (TrFold.foldM f 0)) iss :: Trace Int () Int) where f x y = do emit y return $! x + y prop_rewrite_mapM :: [Int] -> Bool prop_rewrite_mapM iss = exec (Tr.mapM f) iss == exec (NR.mapM f) iss where f x = do emit x return $! x + 1 prop_rewrite_dropWhileM :: [Int] -> Bool prop_rewrite_dropWhileM iss = exec (Tr.dropWhileM f) iss == exec (NR.dropWhileM f) iss where f x = do emit x return $ even x prop_flatten_tfilter :: [[Int]] -> Bool prop_flatten_tfilter iss = exec (flatten ><> tfilter even) iss == exec (flatten ><> noFusion (tfilter even)) iss noFusion :: a -> a noFusion = id {-# NOINLINE noFusion #-} -- Parallel prop_par :: [Int] -> QC.Property prop_par is = QC.morallyDubiousIOProperty $ do historyRef <- newIORef Q.empty return $ QC.forAll (arbitraryIOTrans historyRef) $ \(IOTrans _ iot) -> QC.morallyDubiousIOProperty $ (==) <$> evalIOTrans historyRef iot is <*> evalIOTrans historyRef (parT iot) is evalIOTrans :: IORef (Q.Seq Int) -> Transducer Ex.SomeException i o IO a -> [i] -> IO ([o], [Int], Maybe a) evalIOTrans historyRef trans input = do writeIORef historyRef Q.empty outRef <- newIORef Q.empty resultRef <- newIORef Nothing runIOTrans $ yieldList input ><> (trans >>= lift . writeIORef resultRef . Just) ><> Tr.tfold (TrFold.mapM_ $ \a -> modifyIORef outRef (Q.|>a)) out <- readIORef outRef effects <- readIORef historyRef result <- readIORef resultRef return (Fold.toList out, Fold.toList effects, result) runIOTrans :: Transducer Ex.SomeException i o IO a -> IO () runIOTrans (Trs j) = loop j where loop x = case toView x of Pure _ -> return () Impure (Yield _ cont) -> loop cont Impure (Try cont) -> loop (cont Nothing) Impure (Panic _ cont) -> loop cont Impure (TLift a) -> a >>= loop arbitraryIOTrans :: IORef (Q.Seq Int) -> QC.Gen IOTrans arbitraryIOTrans historyRef = fmap make $ arbitraryTransducerWith $ \gen -> do (doc, val) <- gen effect <- arbitrary let doc' = opl 1 ">>" (app (lit "write") $ lit $ show effect) (app (lit "return") doc) return (doc', val <$ modifyIORef historyRef (Q.|>effect)) where make (doc, trans) = IOTrans (renderPDoc doc) trans data IOTrans = IOTrans String (Transducer Ex.SomeException Int Int IO Int) instance Show IOTrans where show (IOTrans s _) = s -- Counterexamples -- prop_comp_assoc _ce_trans30, _ce_trans31, _ce_trans32 :: Transducer Ex.SomeException Int Int TestMonad Int _ce_trans30 = Trs (fromView (Impure (Try (const (fromView (Pure (1))))))) _ce_trans31 = Trs (fromView (Pure 1)) _ce_trans32 = Trs (fromView (Impure (Try (const (fromView (Impure (TLift (tell (Q.fromList [1]) >> return (fromView (Pure (0)))))))))))
JohnLato/transducers
tests/transducers.hs
Haskell
bsd-3-clause
14,965
{-# LINE 1 "GHC.Stats.hsc" #-} {-# LANGUAGE Trustworthy #-} {-# LINE 2 "GHC.Stats.hsc" #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE RecordWildCards #-} {-# OPTIONS_GHC -funbox-strict-fields #-} ----------------------------------------------------------------------------- -- | This module provides access to internal garbage collection and -- memory usage statistics. These statistics are not available unless -- a program is run with the @-T@ RTS flag. -- -- This module is GHC-only and should not be considered portable. -- -- @since 4.5.0.0 ----------------------------------------------------------------------------- module GHC.Stats ( GCStats(..) , getGCStats , getGCStatsEnabled ) where import Control.Monad import Data.Int import GHC.Base import GHC.Read ( Read ) import GHC.Show ( Show ) import GHC.IO.Exception import Foreign.Marshal.Alloc import Foreign.Storable import Foreign.Ptr {-# LINE 32 "GHC.Stats.hsc" #-} foreign import ccall "getGCStats" getGCStats_ :: Ptr () -> IO () -- | Returns whether GC stats have been enabled (with @+RTS -T@, for example). -- -- @since 4.6.0.0 foreign import ccall "getGCStatsEnabled" getGCStatsEnabled :: IO Bool -- I'm probably violating a bucket of constraints here... oops. -- | Statistics about memory usage and the garbage collector. Apart from -- 'currentBytesUsed' and 'currentBytesSlop' all are cumulative values since -- the program started. -- -- @since 4.5.0.0 data GCStats = GCStats { -- | Total number of bytes allocated bytesAllocated :: !Int64 -- | Number of garbage collections performed (any generation, major and -- minor) , numGcs :: !Int64 -- | Maximum number of live bytes seen so far , maxBytesUsed :: !Int64 -- | Number of byte usage samples taken, or equivalently -- the number of major GCs performed. , numByteUsageSamples :: !Int64 -- | Sum of all byte usage samples, can be used with -- 'numByteUsageSamples' to calculate averages with -- arbitrary weighting (if you are sampling this record multiple -- times). , cumulativeBytesUsed :: !Int64 -- | Number of bytes copied during GC , bytesCopied :: !Int64 -- | Number of live bytes at the end of the last major GC , currentBytesUsed :: !Int64 -- | Current number of bytes lost to slop , currentBytesSlop :: !Int64 -- | Maximum number of bytes lost to slop at any one time so far , maxBytesSlop :: !Int64 -- | Maximum number of megabytes allocated , peakMegabytesAllocated :: !Int64 -- | CPU time spent running mutator threads. This does not include -- any profiling overhead or initialization. , mutatorCpuSeconds :: !Double -- | Wall clock time spent running mutator threads. This does not -- include initialization. , mutatorWallSeconds :: !Double -- | CPU time spent running GC , gcCpuSeconds :: !Double -- | Wall clock time spent running GC , gcWallSeconds :: !Double -- | Total CPU time elapsed since program start , cpuSeconds :: !Double -- | Total wall clock time elapsed since start , wallSeconds :: !Double -- | Number of bytes copied during GC, minus space held by mutable -- lists held by the capabilities. Can be used with -- 'parMaxBytesCopied' to determine how well parallel GC utilized -- all cores. , parTotBytesCopied :: !Int64 -- | Sum of number of bytes copied each GC by the most active GC -- thread each GC. The ratio of 'parTotBytesCopied' divided by -- 'parMaxBytesCopied' approaches 1 for a maximally sequential -- run and approaches the number of threads (set by the RTS flag -- @-N@) for a maximally parallel run. , parMaxBytesCopied :: !Int64 } deriving (Show, Read) {- , initCpuSeconds :: !Double , initWallSeconds :: !Double -} -- | Retrieves garbage collection and memory statistics as of the last -- garbage collection. If you would like your statistics as recent as -- possible, first run a 'System.Mem.performGC'. -- -- @since 4.5.0.0 getGCStats :: IO GCStats getGCStats = do statsEnabled <- getGCStatsEnabled unless statsEnabled . ioError $ IOError Nothing UnsupportedOperation "" "getGCStats: GC stats not enabled. Use `+RTS -T -RTS' to enable them." Nothing Nothing allocaBytes ((144)) $ \p -> do {-# LINE 123 "GHC.Stats.hsc" #-} getGCStats_ p bytesAllocated <- ((\hsc_ptr -> peekByteOff hsc_ptr 0)) p {-# LINE 125 "GHC.Stats.hsc" #-} numGcs <- ((\hsc_ptr -> peekByteOff hsc_ptr 8)) p {-# LINE 126 "GHC.Stats.hsc" #-} numByteUsageSamples <- ((\hsc_ptr -> peekByteOff hsc_ptr 16)) p {-# LINE 127 "GHC.Stats.hsc" #-} maxBytesUsed <- ((\hsc_ptr -> peekByteOff hsc_ptr 24)) p {-# LINE 128 "GHC.Stats.hsc" #-} cumulativeBytesUsed <- ((\hsc_ptr -> peekByteOff hsc_ptr 32)) p {-# LINE 129 "GHC.Stats.hsc" #-} bytesCopied <- ((\hsc_ptr -> peekByteOff hsc_ptr 40)) p {-# LINE 130 "GHC.Stats.hsc" #-} currentBytesUsed <- ((\hsc_ptr -> peekByteOff hsc_ptr 48)) p {-# LINE 131 "GHC.Stats.hsc" #-} currentBytesSlop <- ((\hsc_ptr -> peekByteOff hsc_ptr 56)) p {-# LINE 132 "GHC.Stats.hsc" #-} maxBytesSlop <- ((\hsc_ptr -> peekByteOff hsc_ptr 64)) p {-# LINE 133 "GHC.Stats.hsc" #-} peakMegabytesAllocated <- ((\hsc_ptr -> peekByteOff hsc_ptr 72)) p {-# LINE 134 "GHC.Stats.hsc" #-} {- initCpuSeconds <- (# peek GCStats, init_cpu_seconds) p initWallSeconds <- (# peek GCStats, init_wall_seconds) p -} mutatorCpuSeconds <- ((\hsc_ptr -> peekByteOff hsc_ptr 96)) p {-# LINE 139 "GHC.Stats.hsc" #-} mutatorWallSeconds <- ((\hsc_ptr -> peekByteOff hsc_ptr 104)) p {-# LINE 140 "GHC.Stats.hsc" #-} gcCpuSeconds <- ((\hsc_ptr -> peekByteOff hsc_ptr 112)) p {-# LINE 141 "GHC.Stats.hsc" #-} gcWallSeconds <- ((\hsc_ptr -> peekByteOff hsc_ptr 120)) p {-# LINE 142 "GHC.Stats.hsc" #-} cpuSeconds <- ((\hsc_ptr -> peekByteOff hsc_ptr 128)) p {-# LINE 143 "GHC.Stats.hsc" #-} wallSeconds <- ((\hsc_ptr -> peekByteOff hsc_ptr 136)) p {-# LINE 144 "GHC.Stats.hsc" #-} parTotBytesCopied <- ((\hsc_ptr -> peekByteOff hsc_ptr 80)) p {-# LINE 145 "GHC.Stats.hsc" #-} parMaxBytesCopied <- ((\hsc_ptr -> peekByteOff hsc_ptr 88)) p {-# LINE 146 "GHC.Stats.hsc" #-} return GCStats { .. } {- -- Nontrivial to implement: TaskStats needs arbitrarily large -- amounts of memory, spark stats wants to use SparkCounters -- but that needs a new rts/ header. data TaskStats = TaskStats { taskMutCpuSeconds :: Int64 , taskMutWallSeconds :: Int64 , taskGcCpuSeconds :: Int64 , taskGcWallSeconds :: Int64 } deriving (Show, Read) data SparkStats = SparkStats { sparksCreated :: Int64 , sparksDud :: Int64 , sparksOverflowed :: Int64 , sparksConverted :: Int64 , sparksGcd :: Int64 , sparksFizzled :: Int64 } deriving (Show, Read) -- We also could get per-generation stats, which requires a -- non-constant but at runtime known about of memory. -}
phischu/fragnix
builtins/base/GHC.Stats.hs
Haskell
bsd-3-clause
7,011
{-- snippet plus --} a `plus` b = a + b data a `Pair` b = a `Pair` b deriving (Show) -- we can use the constructor either prefix or infix foo = Pair 1 2 bar = True `Pair` "quux" {-- /snippet plus --}
binesiyu/ifl
examples/ch04/Plus.hs
Haskell
mit
220
<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd"> <helpset version="2.0" xml:lang="el-GR"> <title>Customizable HTML Report</title> <maps> <homeID>top</homeID> <mapref location="map.jhm"/> </maps> <view> <name>TOC</name> <label>Contents</label> <type>org.zaproxy.zap.extension.help.ZapTocView</type> <data>toc.xml</data> </view> <view> <name>Index</name> <label>Index</label> <type>javax.help.IndexView</type> <data>index.xml</data> </view> <view> <name>Search</name> <label>Search</label> <type>javax.help.SearchView</type> <data engine="com.sun.java.help.search.DefaultSearchEngine"> JavaHelpSearch </data> </view> <view> <name>Favorites</name> <label>Favorites</label> <type>javax.help.FavoritesView</type> </view> </helpset>
veggiespam/zap-extensions
addOns/customreport/src/main/javahelp/org/zaproxy/zap/extension/customreport/resources/help_el_GR/helpset_el_GR.hs
Haskell
apache-2.0
970
{-# LANGUAGE LambdaCase #-} {-# LANGUAGE ScopedTypeVariables #-} module Cardano.Wallet.Kernel.CoinSelection.Generic.Random ( PrivacyMode(..) , random , findRandomOutput ) where import Universum import Cardano.Wallet.Kernel.CoinSelection.Generic import qualified Cardano.Wallet.Kernel.CoinSelection.Generic.LargestFirst as LargestFirst {------------------------------------------------------------------------------- Random input selection algorithm -------------------------------------------------------------------------------} data PrivacyMode = PrivacyModeOn | PrivacyModeOff -- | Random input selection -- -- Random input selection has the advantage that is it self correcting, in the -- following sense: suppose that 90% of our UTxO consists of small outputs; -- then random selection has a 90% change of choosing those small outputs. -- -- For each output we add a change output that is between 0.5 and 2 times the -- size of the output, making it hard to identify. This has the additional -- benefit of introducing another self-correction: if there are frequent -- requests for payments around certain size, the UTxO will contain lots of -- available change outputs of around that size. random :: forall utxo m. (MonadRandom m, PickFromUtxo utxo) => PrivacyMode -- ^ Hide change addresses? -> Word64 -- ^ Maximum number of inputs -> [Output (Dom utxo)] -- ^ Outputs to include -> CoinSelT utxo CoinSelHardErr m [CoinSelResult (Dom utxo)] random privacyMode maxNumInputs outs = do balance <- gets utxoBalance mapCoinSelErr (withTotalBalance balance) $ coinSelPerGoal step maxNumInputs outs `catchError` (\_ -> LargestFirst.largestFirst maxNumInputs outs) where -- | Perform a coin selection on the next output using the remaining -- inputs. `coinSelPerGoal` reduces the UTxO (and the number of allowed) -- inputs as it maps over the outputs. So, in the first iteration we have: -- -- `remainingNumInputs == maxNumInputs`, and for the second one, we have -- -- `remainingNumInputs == maxNumInputs - k`, where `k` is the number of -- inputs selected during the first iteration. step :: Word64 -> Output (Dom utxo) -> CoinSelT utxo CoinSelHardErr m (CoinSelResult (Dom utxo)) step remainingNumInputs out = defCoinSelResult out <$> inRange remainingNumInputs (target privacyMode (outVal out)) -- | Because of the recursive and stateful nature of `coinSelPerGoal`, -- errors are thrown within each step using values available at the moment -- where the error gets thrown. As a result, errors reports non-sensical -- balances and UTxO state. -- As a work-around, we remap errors to what they ought to be... withTotalBalance :: Value (Dom utxo) -> CoinSelHardErr -> CoinSelHardErr withTotalBalance balance = \case e@CoinSelHardErrOutputCannotCoverFee{} -> e e@CoinSelHardErrOutputIsRedeemAddress{} -> e e@CoinSelHardErrCannotCoverFee{} -> e CoinSelHardErrMaxInputsReached _ -> CoinSelHardErrMaxInputsReached (show maxNumInputs) CoinSelHardErrUtxoExhausted _ _ -> CoinSelHardErrUtxoExhausted (pretty balance) (pretty payment) where payment = unsafeValueSum $ outVal <$> outs target :: PrivacyMode -> Value (Dom utxo) -> TargetRange (Dom utxo) target PrivacyModeOn val = fromMaybe (target PrivacyModeOff val) (idealRange val) target PrivacyModeOff val = TargetRange { targetMin = val , targetAim = val , targetMax = val } idealRange :: Value (Dom utxo) -> Maybe (TargetRange (Dom utxo)) idealRange val = do -- Minimum value: no change at all let targetMin = val -- Ideal case: change equal to the value targetAim <- valueAdjust RoundUp 2.0 val -- Terminating condition: change twice the value targetMax <- valueAdjust RoundUp 3.0 val return TargetRange{..} {------------------------------------------------------------------------------- Lower level API -------------------------------------------------------------------------------} -- | Target range for picking inputs data TargetRange dom = TargetRange { targetMin :: Value dom , targetAim :: Value dom , targetMax :: Value dom } -- | Select random inputs in the specified range -- -- If we exceed the maximum number of inputs whilst trying to reach the minimum -- end of the range, fallback on largest first to cover the minimum, then -- proceed as normal with random selection to try and improve the change amount. inRange :: (PickFromUtxo utxo, MonadRandom m) => Word64 -> TargetRange (Dom utxo) -> CoinSelT utxo CoinSelHardErr m (SelectedUtxo (Dom utxo)) inRange maxNumInputs TargetRange{..} = do atLeastWithFallback maxNumInputs targetMin >>= improve maxNumInputs targetAim targetMax -- | Select random inputs to cover the required minimum value. -- -- Falls back on 'LargestFirst.atLeast' if we exceed 'maxNumInputs' atLeastWithFallback :: forall utxo m. (PickFromUtxo utxo, MonadRandom m) => Word64 -> Value (Dom utxo) -> CoinSelT utxo CoinSelHardErr m (SelectedUtxo (Dom utxo)) atLeastWithFallback maxNumInputs targetMin = atLeastNoFallback maxNumInputs targetMin `catchJustSoft` \_ -> LargestFirst.atLeast maxNumInputs targetMin -- | Select random inputs to cover the required minimum value. -- -- Fails if we exceed 'maxNumInputs' atLeastNoFallback :: forall utxo m. (PickFromUtxo utxo, MonadRandom m) => Word64 -> Value (Dom utxo) -> CoinSelT utxo CoinSelErr m (SelectedUtxo (Dom utxo)) atLeastNoFallback maxNumInputs targetMin = do balance <- gets utxoBalance go emptySelection balance where go :: SelectedUtxo (Dom utxo) -> Value (Dom utxo) -> CoinSelT utxo CoinSelErr m (SelectedUtxo (Dom utxo)) go selected balance | sizeToWord (selectedSize selected) > maxNumInputs = throwError $ CoinSelErrSoft CoinSelSoftErr | selectedBalance selected >= targetMin = return selected | otherwise = do io <- findRandomOutput >>= maybe (throwError $ errUtxoExhausted balance) return go (select io selected) balance errUtxoExhausted :: Value (Dom utxo) -> CoinSelErr errUtxoExhausted balance = CoinSelErrHard $ CoinSelHardErrUtxoExhausted (pretty balance) (pretty targetMin) -- | Select random additional inputs with the aim of improving the change amount -- -- This never throws an error. improve :: forall utxo e m. (PickFromUtxo utxo, MonadRandom m) => Word64 -- ^ Total maximum number of inputs -> Value (Dom utxo) -- ^ Total UTxO balance to aim for -> Value (Dom utxo) -- ^ Maximum total UTxO balance -> SelectedUtxo (Dom utxo) -- ^ UTxO selected so far -> CoinSelT utxo e m (SelectedUtxo (Dom utxo)) improve maxNumInputs targetAim targetMax = go where -- Preconditions -- -- > 0 <= acc < targetAim -- -- Invariant: -- -- > acc == utxoBalance selected -- -- Relies on the following self-correcting property: if the UTxO -- has many small entries, then we should be able to reach close -- to the aim value. BUT if this is the case, then the probability -- that when we pick a random value from the UTxO that we overshoot -- the upper end of the range is low. Here we terminate early if we -- happen to pick a value from the UTxO that overshoots the upper -- of the range; this is likely to happen precisely when we have -- a low probability of finding a value close to the aim. go :: SelectedUtxo (Dom utxo) -> CoinSelT utxo e m (SelectedUtxo (Dom utxo)) go selected = do mIO <- tryFindRandomOutput isImprovement case mIO of Nothing -> return selected Just selected' -> if selectedBalance selected' >= targetAim then return selected' else go selected' where -- A new value is an improvement if -- -- * We don't overshoot the upper end of the range -- * We get closer to the aim -- * We don't use more than the maximum number of inputs -- -- Note that the second property is a bit subtle: it is trivially -- true if both @acc@ and @acc + val@ are smaller than @targetAim@ -- -- > value | ------|------------|----------------|------------- -- > acc (acc + val) targetAim -- -- but if @acc + val@ exceeds the aim, we are comparing (absolute) -- distance to the aim -- -- > value | ------|-----------|---------------|-------- -- > acc targetAim (acc + val) isImprovement :: UtxoEntry (Dom utxo) -> Maybe (SelectedUtxo (Dom utxo)) isImprovement io = do guard $ and [ selectedBalance selected' <= targetMax , valueDist targetAim (selectedBalance selected') < valueDist targetAim (selectedBalance selected) , sizeToWord (selectedSize selected') <= maxNumInputs ] return selected' where selected' = select io selected {------------------------------------------------------------------------------- Auxiliary: selecting random outputs -------------------------------------------------------------------------------} -- | Select a random output findRandomOutput :: (MonadRandom m, PickFromUtxo utxo) => CoinSelT utxo e m (Maybe (UtxoEntry (Dom utxo))) findRandomOutput = tryFindRandomOutput Just -- | Find a random output, and return it if it satisfies the predicate -- -- If the predicate is not satisfied, state is not changed. tryFindRandomOutput :: forall utxo e m a. (MonadRandom m, PickFromUtxo utxo) => (UtxoEntry (Dom utxo) -> Maybe a) -> CoinSelT utxo e m (Maybe a) tryFindRandomOutput p = do utxo <- get mIO <- (>>= p') <$> pickRandom utxo case mIO of Nothing -> return Nothing Just (a, utxo') -> do put utxo' ; return $ Just a where p' :: (UtxoEntry (Dom utxo), utxo) -> Maybe (a, utxo) p' (io, utxo) = (, utxo) <$> p io
input-output-hk/cardano-sl
wallet/src/Cardano/Wallet/Kernel/CoinSelection/Generic/Random.hs
Haskell
apache-2.0
10,737
{-# LANGUAGE BangPatterns, FlexibleInstances, UndecidableInstances, CPP #-} #include "fusion-phases.h" -- | Irregular two dimensional arrays. --- -- * TODO: The inner arrays should be unboxed so we don't get an unboxing overhead -- for every call to unsafeIndex2. This might need an extension to the GHC -- runtime if we alwo want to convert a U.Vector directly to this form. -- -- * TODO: We currently only allow primitive types to be in a Vectors, but -- in future we'll want `Vectors` of tuples etc. -- module Data.Array.Parallel.Unlifted.Vectors ( Vectors(..) , Unboxes , empty , singleton , length , index , index2 , unsafeIndex , unsafeIndex2 , unsafeIndexUnpack , append , fromVector , toVector) where import qualified Data.Array.Parallel.Base as B import qualified Data.Array.Parallel.Unlifted.ArrayArray as AA import qualified Data.Primitive.ByteArray as P import qualified Data.Primitive.Types as P import qualified Data.Primitive as P import qualified Data.Vector.Generic as G import qualified Data.Vector.Primitive as R import qualified Data.Vector.Unboxed as U import qualified Data.Vector as V import Data.Vector.Unboxed (Unbox) import Prelude hiding (length) import Data.Word import Control.Monad.ST -- | Class of element types that can be used in a `Vectors` class R.Prim a => Unboxes a instance Unboxes Int instance Unboxes Word8 instance Unboxes Float instance Unboxes Double -- | A 2-dimensional array, -- where the inner arrays can all have different lengths. data Vectors a = Vectors {-# UNPACK #-} !Int -- number of inner vectors {-# UNPACK #-} !P.ByteArray -- starting index of each vector in its chunk {-# UNPACK #-} !P.ByteArray -- lengths of each inner vector {-# UNPACK #-} !(AA.ArrayArray P.ByteArray) -- chunks instance (Unboxes a, Unbox a, Show a) => Show (Vectors a) where show = show . toVector {-# NOINLINE show #-} -- | Construct an empty `Vectors` with no arrays of no elements. empty :: Vectors a empty = runST $ do mba <- P.newByteArray 0 ba <- P.unsafeFreezeByteArray mba maa <- AA.newArrayArray 0 AA.writeArrayArray maa 0 ba aa <- AA.unsafeFreezeArrayArray maa return $ Vectors 0 ba ba aa {-# INLINE_U empty #-} -- | Construct a `Vectors` containing data from a single unboxed array. singleton :: (Unboxes a, Unbox a) => U.Vector a -> Vectors a singleton vec = runST $ do R.MVector start len mbaData <- R.unsafeThaw $ G.convert vec baData <- P.unsafeFreezeByteArray mbaData mbaStarts <- P.newByteArray (P.sizeOf (undefined :: Int)) P.writeByteArray mbaStarts 0 start baStarts <- P.unsafeFreezeByteArray mbaStarts mbaLengths <- P.newByteArray (P.sizeOf (undefined :: Int)) P.writeByteArray mbaLengths 0 len baLengths <- P.unsafeFreezeByteArray mbaLengths maaChunks <- AA.newArrayArray 1 AA.writeArrayArray maaChunks 0 baData aaChunks <- AA.unsafeFreezeArrayArray maaChunks return $ Vectors 1 baStarts baLengths aaChunks {-# INLINE_U singleton #-} -- | Yield the number of vectors in a `Vectors`. length :: Unboxes a => Vectors a -> Int length (Vectors len _ _ _) = len {-# INLINE_U length #-} -- | Take one of the outer vectors from a `Vectors`. unsafeIndex :: (Unboxes a, Unbox a) => Vectors a -> Int -> U.Vector a unsafeIndex (Vectors _ starts lens arrs) ix = G.convert $ runST $ do let start = P.indexByteArray starts ix let len = P.indexByteArray lens ix let arr = AA.indexArrayArray arrs ix marr <- P.unsafeThawByteArray arr let mvec = R.MVector start len marr R.unsafeFreeze mvec {-# INLINE_U unsafeIndex #-} -- | Take one of the outer vectors from a `Vectors`, with bounds checking index :: (Unboxes a, Unbox a) => String -- ^ source position -> Vectors a -> Int -> U.Vector a index here vec ix = B.check here (length vec) ix $ unsafeIndex vec ix {-# INLINE_U index #-} -- | Retrieve a single element from a `Vectors`, -- given the outer and inner indices. unsafeIndex2 :: Unboxes a => Vectors a -> Int -> Int -> a unsafeIndex2 (Vectors _ starts _ arrs) ix1 ix2 = (arrs `AA.indexArrayArray` ix1) `P.indexByteArray` ((starts `P.indexByteArray` ix1) + ix2) {-# INLINE_U unsafeIndex2 #-} -- | Retrieve a single element from a `Vectors`, -- given the outer and inner indices, with bounds checking. index2 :: Unboxes a => String -- ^ source position -> Vectors a -> Int -> Int -> a index2 here vec@(Vectors _ _ lens _) ix1 ix2 = B.check (here++"(index2.ix1)") (length vec) ix1 $ B.check (here++"(index2.ix2)") (lens `P.indexByteArray` ix1) ix2 $ unsafeIndex2 vec ix1 ix2 {-# INLINE_U index2 #-} -- | Retrieve an inner array from a `Vectors`, returning the array data, -- starting index in the data, and vector length. unsafeIndexUnpack :: Unboxes a => Vectors a -> Int -> (P.ByteArray, Int, Int) unsafeIndexUnpack (Vectors _ starts lens arrs) ix = ( arrs `AA.indexArrayArray` ix , starts `P.indexByteArray` ix , lens `P.indexByteArray` ix) {-# INLINE_U unsafeIndexUnpack #-} -- | Appending two `Vectors` uses work proportional to -- the length of the outer arrays. append :: (Unboxes a, Unbox a) => Vectors a -> Vectors a -> Vectors a append (Vectors len1 starts1 lens1 chunks1) (Vectors len2 starts2 lens2 chunks2) = runST $ do let len' = len1 + len2 -- append starts into result let lenStarts1 = P.sizeofByteArray starts1 let lenStarts2 = P.sizeofByteArray starts2 maStarts <- P.newByteArray (lenStarts1 + lenStarts2) P.copyByteArray maStarts 0 starts1 0 lenStarts1 P.copyByteArray maStarts lenStarts1 starts2 0 lenStarts2 starts' <- P.unsafeFreezeByteArray maStarts -- append lens into result let lenLens1 = P.sizeofByteArray lens1 let lenLens2 = P.sizeofByteArray lens2 maLens <- P.newByteArray (lenLens1 + lenLens2) P.copyByteArray maLens 0 lens1 0 lenLens1 P.copyByteArray maLens lenStarts1 lens2 0 lenLens2 lens' <- P.unsafeFreezeByteArray maLens -- append arrs into result maChunks <- AA.newArrayArray len' AA.copyArrayArray maChunks 0 chunks1 0 len1 AA.copyArrayArray maChunks len1 chunks2 0 len2 chunks' <- AA.unsafeFreezeArrayArray maChunks let result = Vectors len' starts' lens' chunks' return $ result {-# INLINE_U append #-} -- | Convert a boxed vector of unboxed vectors to a `Vectors`. fromVector :: (Unboxes a, Unbox a) => V.Vector (U.Vector a) -> Vectors a fromVector vecs = runST $ do let len = V.length vecs let (_, vstarts, vlens) = V.unzip3 $ V.map unpackUVector vecs let (baStarts, _, _) = unpackUVector $ V.convert vstarts let (baLens, _, _) = unpackUVector $ V.convert vlens mchunks <- AA.newArrayArray len V.zipWithM_ (\i vec -> let (ba, _, _) = unpackUVector vec in AA.writeArrayArray mchunks i ba) (V.enumFromN 0 len) vecs chunks <- AA.unsafeFreezeArrayArray mchunks return $ Vectors len baStarts baLens chunks {-# INLINE_U fromVector #-} -- | Convert a `Vectors` to a boxed vector of unboxed vectors. toVector :: (Unboxes a, Unbox a) => Vectors a -> V.Vector (U.Vector a) toVector vectors = V.map (unsafeIndex vectors) $ V.enumFromN 0 (length vectors) {-# INLINE_U toVector #-} -- | Unpack an unboxed vector into array data, starting index, and vector length. unpackUVector :: (Unbox a, P.Prim a) => U.Vector a -> (P.ByteArray, Int, Int) unpackUVector vec = runST $ do let pvec = V.convert vec R.MVector start len mba <- R.unsafeThaw pvec ba <- P.unsafeFreezeByteArray mba return (ba, start, len) {-# INLINE_U unpackUVector #-}
mainland/dph
dph-prim-seq/Data/Array/Parallel/Unlifted/Vectors.hs
Haskell
bsd-3-clause
8,684
-- | -- Module: Network.Riak -- Copyright: (c) 2011 MailRank, Inc. -- License: Apache -- Maintainer: Mark Hibberd <mark@hibberd.id.au>, Nathan Hunter <nhunter@janrain.com> -- Stability: experimental -- Portability: portable -- -- A client for the Riak decentralized data store. -- -- The functions in this module use JSON as the storage -- representation, and automatically perform conflict resolution -- during storage and retrieval. -- -- This library is organized to allow a tradeoff between power -- and ease of use. If you would like a different degree of -- automation with storage and conflict resolution, you may want to -- use one of the following modules (ranked from easiest to most -- tricky to use): -- -- [Network.Riak.JSON.Resolvable] JSON for storage, automatic conflict -- resolution. (This module actually re-exports its definitions.) -- This is the easiest module to work with. -- -- [Network.Riak.JSON] JSON for storage, manual conflict resolution. -- -- [Network.Riak.Value.Resolvable] More complex (but still automatic) -- storage, automatic conflict resolution. -- -- [Network.Riak.Value] More complex (but still automatic) storage, -- manual conflict resolution. -- -- [Network.Riak.Basic] manual storage, manual conflict resolution. -- This is the most demanding module to work with, as you must encode -- and decode data yourself, and handle all conflict resolution -- yourself. -- -- [Network.Riak.CRDT] CRDT operations. -- -- A short getting started guide is available at <http://docs.basho.com/riak/latest/dev/taste-of-riak/haskell/> -- module Network.Riak ( -- * Client configuration and identification ClientID , Client(..) , defaultClient , getClientID -- * Connection management , Connection(..) , connect , disconnect , ping , getServerInfo -- * Data management , Quorum(..) , Resolvable(..) , get , getMany , getByIndex , addIndexes , modify , modify_ , delete -- ** Low-level modification functions , put , putIndexed , putMany -- * Metadata , listBuckets , foldKeys , getBucket , setBucket -- * Map/reduce , mapReduce -- * Types , IndexQuery(..) , IndexValue(..) ) where import Network.Riak.Basic hiding (get, put, put_) import Network.Riak.JSON.Resolvable (get, getMany, modify, modify_, put, putIndexed, putMany) import Network.Riak.Resolvable (Resolvable(..)) import Network.Riak.Value (getByIndex, addIndexes) import Network.Riak.Types.Internal (IndexQuery(..), IndexValue(..))
k-bx/riak-haskell-client
src/Network/Riak.hs
Haskell
apache-2.0
2,586
{-# LANGUAGE CPP #-} #if __GLASGOW_HASKELL__ >= 702 {-# LANGUAGE Trustworthy #-} -- Imports internal modules #endif {-# OPTIONS_GHC -fno-warn-warnings-deprecations #-} -- | -- Module : Data.Attoparsec.Text.Lazy -- Copyright : Bryan O'Sullivan 2007-2015 -- License : BSD3 -- -- Maintainer : bos@serpentine.com -- Stability : experimental -- Portability : unknown -- -- Simple, efficient combinator parsing that can consume lazy 'Text' -- strings, loosely based on the Parsec library. -- -- This is essentially the same code as in the 'Data.Attoparsec.Text' -- module, only with a 'parse' function that can consume a lazy -- 'Text' incrementally, and a 'Result' type that does not allow -- more input to be fed in. Think of this as suitable for use with a -- lazily read file, e.g. via 'L.readFile' or 'L.hGetContents'. -- -- /Note:/ The various parser functions and combinators such as -- 'string' still expect /strict/ 'T.Text' parameters, and return -- strict 'T.Text' results. Behind the scenes, strict 'T.Text' values -- are still used internally to store parser input and manipulate it -- efficiently. module Data.Attoparsec.Text.Lazy ( Result(..) , module Data.Attoparsec.Text -- * Running parsers , parse , parseTest -- ** Result conversion , maybeResult , eitherResult ) where import Control.DeepSeq (NFData(rnf)) import Data.List (intercalate) import Data.Text.Lazy.Internal (Text(..), chunk) import qualified Data.Attoparsec.Internal.Types as T import qualified Data.Attoparsec.Text as A import qualified Data.Text as T import Data.Attoparsec.Text hiding (IResult(..), Result, eitherResult, maybeResult, parse, parseWith, parseTest) -- | The result of a parse. data Result r = Fail Text [String] String -- ^ The parse failed. The 'Text' is the input -- that had not yet been consumed when the failure -- occurred. The @[@'String'@]@ is a list of contexts -- in which the error occurred. The 'String' is the -- message describing the error, if any. | Done Text r -- ^ The parse succeeded. The 'Text' is the -- input that had not yet been consumed (if any) when -- the parse succeeded. deriving (Show) instance NFData r => NFData (Result r) where rnf (Fail bs ctxs msg) = rnf bs `seq` rnf ctxs `seq` rnf msg rnf (Done bs r) = rnf bs `seq` rnf r {-# INLINE rnf #-} fmapR :: (a -> b) -> Result a -> Result b fmapR _ (Fail st stk msg) = Fail st stk msg fmapR f (Done bs r) = Done bs (f r) instance Functor Result where fmap = fmapR -- | Run a parser and return its result. parse :: A.Parser a -> Text -> Result a parse p s = case s of Chunk x xs -> go (A.parse p x) xs empty -> go (A.parse p T.empty) empty where go (T.Fail x stk msg) ys = Fail (chunk x ys) stk msg go (T.Done x r) ys = Done (chunk x ys) r go (T.Partial k) (Chunk y ys) = go (k y) ys go (T.Partial k) empty = go (k T.empty) empty -- | Run a parser and print its result to standard output. parseTest :: (Show a) => A.Parser a -> Text -> IO () parseTest p s = print (parse p s) -- | Convert a 'Result' value to a 'Maybe' value. maybeResult :: Result r -> Maybe r maybeResult (Done _ r) = Just r maybeResult _ = Nothing -- | Convert a 'Result' value to an 'Either' value. eitherResult :: Result r -> Either String r eitherResult (Done _ r) = Right r eitherResult (Fail _ [] msg) = Left msg eitherResult (Fail _ ctxs msg) = Left (intercalate " > " ctxs ++ ": " ++ msg)
beni55/attoparsec
Data/Attoparsec/Text/Lazy.hs
Haskell
bsd-3-clause
3,702
module MediaWiki.API.Query.Revisions.Import where import MediaWiki.API.Types import MediaWiki.API.Utils import MediaWiki.API.Query.Revisions import Text.XML.Light.Types import Text.XML.Light.Proc ( strContent ) import Control.Monad import Data.Maybe stringXml :: String -> Either (String,[{-Error msg-}String]) RevisionsResponse stringXml s = parseDoc xml s xml :: Element -> Maybe RevisionsResponse xml e = do guard (elName e == nsName "api") let es1 = children e p <- pNode "query" es1 let es = children p ps <- fmap (mapMaybe xmlPage) (fmap children $ pNode "pages" es) let cont = pNode "query-continue" es1 >>= xmlContinue "revisions" "rvstartid" return emptyRevisionsResponse { rvPages = ps , rvContinue = cont } xmlPage :: Element -> Maybe (PageTitle,[Revision]) xmlPage e = do guard (elName e == nsName "page") let ns = fromMaybe mainNamespace $ pAttr "ns" e let tit = fromMaybe "" $ pAttr "title" e let pid = pAttr "pageid" e let es = children e p <- pNode "revisions" es let pg = emptyPageTitle{pgNS = ns, pgTitle=tit, pgMbId = pid} rs <- fmap (mapMaybe (xmlRevision pg)) (fmap children $ pNode "rev" (children p)) return (pg, rs) xmlRevision :: PageTitle -> Element -> Maybe Revision xmlRevision pg e = do guard (elName e == nsName "page") let rid = fromMaybe "" $ pAttr "revid" e let mino = isJust (pAttr "minor" e) let usr = fromMaybe "" $ pAttr "user" e let anon = isJust (pAttr "anon" e) let ts = fromMaybe "" $ pAttr "timestamp" e let size = fromMaybe 0 (pAttr "size" e >>= readMb) let com = pAttr "comment" e let con = case strContent e of { "" -> Nothing ; xs -> Just xs} return (emptyRevision pg) { revRevId = rid , revIsMinor = mino , revUser = usr , revIsAnon = anon , revTimestamp = ts , revSize = size , revComment = com , revContent = con }
neobrain/neobot
mediawiki/MediaWiki/API/Query/Revisions/Import.hs
Haskell
bsd-3-clause
1,926
{-# LANGUAGE CPP #-} #if __GLASGOW_HASKELL__ < 709 import qualified HastePkg708 as Real #else import qualified HastePkg710 as Real #endif main :: IO () main = Real.main
beni55/haste-compiler
utils/haste-pkg/haste-pkg.hs
Haskell
bsd-3-clause
171
{- (c) The University of Glasgow 2006 (c) The GRASP/AQUA Project, Glasgow University, 1992-1998 \section[NameEnv]{@NameEnv@: name environments} -} {-# LANGUAGE CPP #-} module NameEnv ( -- * Var, Id and TyVar environments (maps) NameEnv, -- ** Manipulating these environments mkNameEnv, emptyNameEnv, isEmptyNameEnv, unitNameEnv, nameEnvElts, extendNameEnv_C, extendNameEnv_Acc, extendNameEnv, extendNameEnvList, extendNameEnvList_C, filterNameEnv, anyNameEnv, plusNameEnv, plusNameEnv_C, alterNameEnv, lookupNameEnv, lookupNameEnv_NF, delFromNameEnv, delListFromNameEnv, elemNameEnv, mapNameEnv, disjointNameEnv, DNameEnv, emptyDNameEnv, lookupDNameEnv, mapDNameEnv, alterDNameEnv, -- ** Dependency analysis depAnal ) where #include "HsVersions.h" import Digraph import Name import UniqFM import UniqDFM import Maybes {- ************************************************************************ * * \subsection{Name environment} * * ************************************************************************ -} {- Note [depAnal determinism] ~~~~~~~~~~~~~~~~~~~~~~~~~~ depAnal is deterministic provided it gets the nodes in a deterministic order. The order of lists that get_defs and get_uses return doesn't matter, as these are only used to construct the edges, and stronglyConnCompFromEdgedVertices is deterministic even when the edges are not in deterministic order as explained in Note [Deterministic SCC] in Digraph. -} depAnal :: (node -> [Name]) -- Defs -> (node -> [Name]) -- Uses -> [node] -> [SCC node] -- Peform dependency analysis on a group of definitions, -- where each definition may define more than one Name -- -- The get_defs and get_uses functions are called only once per node depAnal get_defs get_uses nodes = stronglyConnCompFromEdgedVerticesUniq (map mk_node keyed_nodes) where keyed_nodes = nodes `zip` [(1::Int)..] mk_node (node, key) = (node, key, mapMaybe (lookupNameEnv key_map) (get_uses node)) key_map :: NameEnv Int -- Maps a Name to the key of the decl that defines it key_map = mkNameEnv [(name,key) | (node, key) <- keyed_nodes, name <- get_defs node] {- ************************************************************************ * * \subsection{Name environment} * * ************************************************************************ -} -- | Name Environment type NameEnv a = UniqFM a -- Domain is Name emptyNameEnv :: NameEnv a isEmptyNameEnv :: NameEnv a -> Bool mkNameEnv :: [(Name,a)] -> NameEnv a nameEnvElts :: NameEnv a -> [a] alterNameEnv :: (Maybe a-> Maybe a) -> NameEnv a -> Name -> NameEnv a extendNameEnv_C :: (a->a->a) -> NameEnv a -> Name -> a -> NameEnv a extendNameEnv_Acc :: (a->b->b) -> (a->b) -> NameEnv b -> Name -> a -> NameEnv b extendNameEnv :: NameEnv a -> Name -> a -> NameEnv a plusNameEnv :: NameEnv a -> NameEnv a -> NameEnv a plusNameEnv_C :: (a->a->a) -> NameEnv a -> NameEnv a -> NameEnv a extendNameEnvList :: NameEnv a -> [(Name,a)] -> NameEnv a extendNameEnvList_C :: (a->a->a) -> NameEnv a -> [(Name,a)] -> NameEnv a delFromNameEnv :: NameEnv a -> Name -> NameEnv a delListFromNameEnv :: NameEnv a -> [Name] -> NameEnv a elemNameEnv :: Name -> NameEnv a -> Bool unitNameEnv :: Name -> a -> NameEnv a lookupNameEnv :: NameEnv a -> Name -> Maybe a lookupNameEnv_NF :: NameEnv a -> Name -> a filterNameEnv :: (elt -> Bool) -> NameEnv elt -> NameEnv elt anyNameEnv :: (elt -> Bool) -> NameEnv elt -> Bool mapNameEnv :: (elt1 -> elt2) -> NameEnv elt1 -> NameEnv elt2 disjointNameEnv :: NameEnv a -> NameEnv a -> Bool nameEnvElts x = eltsUFM x emptyNameEnv = emptyUFM isEmptyNameEnv = isNullUFM unitNameEnv x y = unitUFM x y extendNameEnv x y z = addToUFM x y z extendNameEnvList x l = addListToUFM x l lookupNameEnv x y = lookupUFM x y alterNameEnv = alterUFM mkNameEnv l = listToUFM l elemNameEnv x y = elemUFM x y plusNameEnv x y = plusUFM x y plusNameEnv_C f x y = plusUFM_C f x y extendNameEnv_C f x y z = addToUFM_C f x y z mapNameEnv f x = mapUFM f x extendNameEnv_Acc x y z a b = addToUFM_Acc x y z a b extendNameEnvList_C x y z = addListToUFM_C x y z delFromNameEnv x y = delFromUFM x y delListFromNameEnv x y = delListFromUFM x y filterNameEnv x y = filterUFM x y anyNameEnv f x = foldUFM ((||) . f) False x disjointNameEnv x y = isNullUFM (intersectUFM x y) lookupNameEnv_NF env n = expectJust "lookupNameEnv_NF" (lookupNameEnv env n) -- | Deterministic Name Environment -- -- See Note [Deterministic UniqFM] in UniqDFM for explanation why we need -- DNameEnv. type DNameEnv a = UniqDFM a emptyDNameEnv :: DNameEnv a emptyDNameEnv = emptyUDFM lookupDNameEnv :: DNameEnv a -> Name -> Maybe a lookupDNameEnv = lookupUDFM mapDNameEnv :: (a -> b) -> DNameEnv a -> DNameEnv b mapDNameEnv = mapUDFM alterDNameEnv :: (Maybe a -> Maybe a) -> DNameEnv a -> Name -> DNameEnv a alterDNameEnv = alterUDFM
olsner/ghc
compiler/basicTypes/NameEnv.hs
Haskell
bsd-3-clause
5,502
{-# LANGUAGE DeriveGeneric #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.InstalledPackageInfo -- Copyright : (c) The University of Glasgow 2004 -- -- Maintainer : libraries@haskell.org -- Portability : portable -- -- This is the information about an /installed/ package that -- is communicated to the @ghc-pkg@ program in order to register -- a package. @ghc-pkg@ now consumes this package format (as of version -- 6.4). This is specific to GHC at the moment. -- -- The @.cabal@ file format is for describing a package that is not yet -- installed. It has a lot of flexibility, like conditionals and dependency -- ranges. As such, that format is not at all suitable for describing a package -- that has already been built and installed. By the time we get to that stage, -- we have resolved all conditionals and resolved dependency version -- constraints to exact versions of dependent packages. So, this module defines -- the 'InstalledPackageInfo' data structure that contains all the info we keep -- about an installed package. There is a parser and pretty printer. The -- textual format is rather simpler than the @.cabal@ format: there are no -- sections, for example. -- This module is meant to be local-only to Distribution... module Distribution.InstalledPackageInfo ( InstalledPackageInfo(..), libraryName, OriginalModule(..), ExposedModule(..), ParseResult(..), PError(..), PWarning, emptyInstalledPackageInfo, parseInstalledPackageInfo, showInstalledPackageInfo, showInstalledPackageInfoField, showSimpleInstalledPackageInfoField, fieldsInstalledPackageInfo, ) where import Distribution.ParseUtils ( FieldDescr(..), ParseResult(..), PError(..), PWarning , simpleField, listField, parseLicenseQ , showFields, showSingleNamedField, showSimpleSingleNamedField , parseFieldsFlat , parseFilePathQ, parseTokenQ, parseModuleNameQ, parsePackageNameQ , showFilePath, showToken, boolField, parseOptVersion , parseFreeText, showFreeText, parseOptCommaList ) import Distribution.License ( License(..) ) import Distribution.Package ( PackageName(..), PackageIdentifier(..) , PackageId, InstalledPackageId(..) , packageName, packageVersion, PackageKey(..) , LibraryName(..) ) import qualified Distribution.Package as Package import Distribution.ModuleName ( ModuleName ) import Distribution.Version ( Version(..) ) import Distribution.Text ( Text(disp, parse) ) import Text.PrettyPrint as Disp import qualified Distribution.Compat.ReadP as Parse import Distribution.Compat.Binary (Binary) import Data.Maybe (fromMaybe) import GHC.Generics (Generic) -- ----------------------------------------------------------------------------- -- The InstalledPackageInfo type data InstalledPackageInfo = InstalledPackageInfo { -- these parts are exactly the same as PackageDescription installedPackageId :: InstalledPackageId, sourcePackageId :: PackageId, packageKey :: PackageKey, license :: License, copyright :: String, maintainer :: String, author :: String, stability :: String, homepage :: String, pkgUrl :: String, synopsis :: String, description :: String, category :: String, -- these parts are required by an installed package only: exposed :: Bool, exposedModules :: [ExposedModule], instantiatedWith :: [(ModuleName, OriginalModule)], hiddenModules :: [ModuleName], trusted :: Bool, importDirs :: [FilePath], libraryDirs :: [FilePath], dataDir :: FilePath, hsLibraries :: [String], extraLibraries :: [String], extraGHCiLibraries:: [String], -- overrides extraLibraries for GHCi includeDirs :: [FilePath], includes :: [String], depends :: [InstalledPackageId], ccOptions :: [String], ldOptions :: [String], frameworkDirs :: [FilePath], frameworks :: [String], haddockInterfaces :: [FilePath], haddockHTMLs :: [FilePath], pkgRoot :: Maybe FilePath } deriving (Generic, Read, Show) libraryName :: InstalledPackageInfo -> LibraryName libraryName ipi = Package.packageKeyLibraryName (sourcePackageId ipi) (packageKey ipi) instance Binary InstalledPackageInfo instance Package.Package InstalledPackageInfo where packageId = sourcePackageId instance Package.HasInstalledPackageId InstalledPackageInfo where installedPackageId = installedPackageId instance Package.PackageInstalled InstalledPackageInfo where installedDepends = depends emptyInstalledPackageInfo :: InstalledPackageInfo emptyInstalledPackageInfo = InstalledPackageInfo { installedPackageId = InstalledPackageId "", sourcePackageId = PackageIdentifier (PackageName "") noVersion, packageKey = OldPackageKey (PackageIdentifier (PackageName "") noVersion), license = UnspecifiedLicense, copyright = "", maintainer = "", author = "", stability = "", homepage = "", pkgUrl = "", synopsis = "", description = "", category = "", exposed = False, exposedModules = [], hiddenModules = [], instantiatedWith = [], trusted = False, importDirs = [], libraryDirs = [], dataDir = "", hsLibraries = [], extraLibraries = [], extraGHCiLibraries= [], includeDirs = [], includes = [], depends = [], ccOptions = [], ldOptions = [], frameworkDirs = [], frameworks = [], haddockInterfaces = [], haddockHTMLs = [], pkgRoot = Nothing } noVersion :: Version noVersion = Version [] [] -- ----------------------------------------------------------------------------- -- Exposed modules data OriginalModule = OriginalModule { originalPackageId :: InstalledPackageId, originalModuleName :: ModuleName } deriving (Generic, Eq, Read, Show) data ExposedModule = ExposedModule { exposedName :: ModuleName, exposedReexport :: Maybe OriginalModule, exposedSignature :: Maybe OriginalModule -- This field is unused for now. } deriving (Generic, Read, Show) instance Text OriginalModule where disp (OriginalModule ipi m) = disp ipi <> Disp.char ':' <> disp m parse = do ipi <- parse _ <- Parse.char ':' m <- parse return (OriginalModule ipi m) instance Text ExposedModule where disp (ExposedModule m reexport signature) = Disp.sep [ disp m , case reexport of Just m' -> Disp.sep [Disp.text "from", disp m'] Nothing -> Disp.empty , case signature of Just m' -> Disp.sep [Disp.text "is", disp m'] Nothing -> Disp.empty ] parse = do m <- parseModuleNameQ Parse.skipSpaces reexport <- Parse.option Nothing $ do _ <- Parse.string "from" Parse.skipSpaces fmap Just parse Parse.skipSpaces signature <- Parse.option Nothing $ do _ <- Parse.string "is" Parse.skipSpaces fmap Just parse return (ExposedModule m reexport signature) instance Binary OriginalModule instance Binary ExposedModule -- To maintain backwards-compatibility, we accept both comma/non-comma -- separated variants of this field. You SHOULD use the comma syntax if you -- use any new functions, although actually it's unambiguous due to a quirk -- of the fact that modules must start with capital letters. showExposedModules :: [ExposedModule] -> Disp.Doc showExposedModules xs | all isExposedModule xs = fsep (map disp xs) | otherwise = fsep (Disp.punctuate comma (map disp xs)) where isExposedModule (ExposedModule _ Nothing Nothing) = True isExposedModule _ = False parseExposedModules :: Parse.ReadP r [ExposedModule] parseExposedModules = parseOptCommaList parse -- ----------------------------------------------------------------------------- -- Parsing parseInstalledPackageInfo :: String -> ParseResult InstalledPackageInfo parseInstalledPackageInfo = parseFieldsFlat (fieldsInstalledPackageInfo ++ deprecatedFieldDescrs) emptyInstalledPackageInfo parseInstantiatedWith :: Parse.ReadP r (ModuleName, OriginalModule) parseInstantiatedWith = do k <- parse _ <- Parse.char '=' n <- parse _ <- Parse.char '@' p <- parse return (k, OriginalModule p n) -- ----------------------------------------------------------------------------- -- Pretty-printing showInstalledPackageInfo :: InstalledPackageInfo -> String showInstalledPackageInfo = showFields fieldsInstalledPackageInfo showInstalledPackageInfoField :: String -> Maybe (InstalledPackageInfo -> String) showInstalledPackageInfoField = showSingleNamedField fieldsInstalledPackageInfo showSimpleInstalledPackageInfoField :: String -> Maybe (InstalledPackageInfo -> String) showSimpleInstalledPackageInfoField = showSimpleSingleNamedField fieldsInstalledPackageInfo showInstantiatedWith :: (ModuleName, OriginalModule) -> Doc showInstantiatedWith (k, OriginalModule p m) = disp k <> text "=" <> disp m <> text "@" <> disp p -- ----------------------------------------------------------------------------- -- Description of the fields, for parsing/printing fieldsInstalledPackageInfo :: [FieldDescr InstalledPackageInfo] fieldsInstalledPackageInfo = basicFieldDescrs ++ installedFieldDescrs basicFieldDescrs :: [FieldDescr InstalledPackageInfo] basicFieldDescrs = [ simpleField "name" disp parsePackageNameQ packageName (\name pkg -> pkg{sourcePackageId=(sourcePackageId pkg){pkgName=name}}) , simpleField "version" disp parseOptVersion packageVersion (\ver pkg -> pkg{sourcePackageId=(sourcePackageId pkg){pkgVersion=ver}}) , simpleField "id" disp parse installedPackageId (\ipid pkg -> pkg{installedPackageId=ipid}) , simpleField "key" disp parse packageKey (\pk pkg -> pkg{packageKey=pk}) , simpleField "license" disp parseLicenseQ license (\l pkg -> pkg{license=l}) , simpleField "copyright" showFreeText parseFreeText copyright (\val pkg -> pkg{copyright=val}) , simpleField "maintainer" showFreeText parseFreeText maintainer (\val pkg -> pkg{maintainer=val}) , simpleField "stability" showFreeText parseFreeText stability (\val pkg -> pkg{stability=val}) , simpleField "homepage" showFreeText parseFreeText homepage (\val pkg -> pkg{homepage=val}) , simpleField "package-url" showFreeText parseFreeText pkgUrl (\val pkg -> pkg{pkgUrl=val}) , simpleField "synopsis" showFreeText parseFreeText synopsis (\val pkg -> pkg{synopsis=val}) , simpleField "description" showFreeText parseFreeText description (\val pkg -> pkg{description=val}) , simpleField "category" showFreeText parseFreeText category (\val pkg -> pkg{category=val}) , simpleField "author" showFreeText parseFreeText author (\val pkg -> pkg{author=val}) ] installedFieldDescrs :: [FieldDescr InstalledPackageInfo] installedFieldDescrs = [ boolField "exposed" exposed (\val pkg -> pkg{exposed=val}) , simpleField "exposed-modules" showExposedModules parseExposedModules exposedModules (\xs pkg -> pkg{exposedModules=xs}) , listField "hidden-modules" disp parseModuleNameQ hiddenModules (\xs pkg -> pkg{hiddenModules=xs}) , listField "instantiated-with" showInstantiatedWith parseInstantiatedWith instantiatedWith (\xs pkg -> pkg{instantiatedWith=xs}) , boolField "trusted" trusted (\val pkg -> pkg{trusted=val}) , listField "import-dirs" showFilePath parseFilePathQ importDirs (\xs pkg -> pkg{importDirs=xs}) , listField "library-dirs" showFilePath parseFilePathQ libraryDirs (\xs pkg -> pkg{libraryDirs=xs}) , simpleField "data-dir" showFilePath (parseFilePathQ Parse.<++ return "") dataDir (\val pkg -> pkg{dataDir=val}) , listField "hs-libraries" showFilePath parseTokenQ hsLibraries (\xs pkg -> pkg{hsLibraries=xs}) , listField "extra-libraries" showToken parseTokenQ extraLibraries (\xs pkg -> pkg{extraLibraries=xs}) , listField "extra-ghci-libraries" showToken parseTokenQ extraGHCiLibraries (\xs pkg -> pkg{extraGHCiLibraries=xs}) , listField "include-dirs" showFilePath parseFilePathQ includeDirs (\xs pkg -> pkg{includeDirs=xs}) , listField "includes" showFilePath parseFilePathQ includes (\xs pkg -> pkg{includes=xs}) , listField "depends" disp parse depends (\xs pkg -> pkg{depends=xs}) , listField "cc-options" showToken parseTokenQ ccOptions (\path pkg -> pkg{ccOptions=path}) , listField "ld-options" showToken parseTokenQ ldOptions (\path pkg -> pkg{ldOptions=path}) , listField "framework-dirs" showFilePath parseFilePathQ frameworkDirs (\xs pkg -> pkg{frameworkDirs=xs}) , listField "frameworks" showToken parseTokenQ frameworks (\xs pkg -> pkg{frameworks=xs}) , listField "haddock-interfaces" showFilePath parseFilePathQ haddockInterfaces (\xs pkg -> pkg{haddockInterfaces=xs}) , listField "haddock-html" showFilePath parseFilePathQ haddockHTMLs (\xs pkg -> pkg{haddockHTMLs=xs}) , simpleField "pkgroot" (const Disp.empty) parseFilePathQ (fromMaybe "" . pkgRoot) (\xs pkg -> pkg{pkgRoot=Just xs}) ] deprecatedFieldDescrs :: [FieldDescr InstalledPackageInfo] deprecatedFieldDescrs = [ listField "hugs-options" showToken parseTokenQ (const []) (const id) ]
x-y-z/cabal
Cabal/Distribution/InstalledPackageInfo.hs
Haskell
bsd-3-clause
16,058
{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Dependency.Types -- Copyright : (c) Duncan Coutts 2008 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- Common types for dependency resolution. ----------------------------------------------------------------------------- module Distribution.Client.Dependency.TopDown ( topDownResolver ) where import Distribution.Client.Dependency.TopDown.Types import qualified Distribution.Client.Dependency.TopDown.Constraints as Constraints import Distribution.Client.Dependency.TopDown.Constraints ( Satisfiable(..) ) import Distribution.Client.Types ( SourcePackage(..), ConfiguredPackage(..) , enableStanzas, ConfiguredId(..), fakeComponentId ) import Distribution.Client.Dependency.Types ( DependencyResolver, ResolverPackage(..) , PackageConstraint(..), unlabelPackageConstraint , PackagePreferences(..), InstalledPreference(..) , Progress(..), foldProgress ) import qualified Distribution.Client.PackageIndex as PackageIndex import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex import Distribution.Simple.PackageIndex (InstalledPackageIndex) import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo import Distribution.Client.ComponentDeps ( ComponentDeps ) import qualified Distribution.Client.ComponentDeps as CD import Distribution.Client.PackageIndex ( PackageIndex ) import Distribution.Package ( PackageName(..), PackageId, PackageIdentifier(..) , ComponentId(..) , Package(..), packageVersion, packageName , Dependency(Dependency), thisPackageVersion, simplifyDependency ) import Distribution.PackageDescription ( PackageDescription(buildDepends) ) import Distribution.Client.PackageUtils ( externalBuildDepends ) import Distribution.PackageDescription.Configuration ( finalizePackageDescription, flattenPackageDescription ) import Distribution.Version ( Version(..), VersionRange, withinRange, simplifyVersionRange , UpperBound(..), asVersionIntervals ) import Distribution.Compiler ( CompilerInfo ) import Distribution.System ( Platform ) import Distribution.Simple.Utils ( equating, comparing ) import Distribution.Text ( display ) import Data.List ( foldl', maximumBy, minimumBy, nub, sort, sortBy, groupBy ) import Data.Maybe ( fromJust, fromMaybe, catMaybes ) #if !MIN_VERSION_base(4,8,0) import Data.Monoid ( Monoid(mempty) ) #endif import Control.Monad ( guard ) import qualified Data.Set as Set import Data.Set (Set) import qualified Data.Map as Map import qualified Data.Graph as Graph import qualified Data.Array as Array import Control.Exception ( assert ) -- ------------------------------------------------------------ -- * Search state types -- ------------------------------------------------------------ type Constraints = Constraints.Constraints InstalledPackageEx UnconfiguredPackage ExclusionReason type SelectedPackages = PackageIndex SelectedPackage -- ------------------------------------------------------------ -- * The search tree type -- ------------------------------------------------------------ data SearchSpace inherited pkg = ChoiceNode inherited [[(pkg, SearchSpace inherited pkg)]] | Failure Failure -- ------------------------------------------------------------ -- * Traverse a search tree -- ------------------------------------------------------------ explore :: (PackageName -> PackagePreferences) -> SearchSpace (SelectedPackages, Constraints, SelectionChanges) SelectablePackage -> Progress Log Failure (SelectedPackages, Constraints) explore _ (Failure failure) = Fail failure explore _ (ChoiceNode (s,c,_) []) = Done (s,c) explore pref (ChoiceNode _ choices) = case [ choice | [choice] <- choices ] of ((_, node'):_) -> Step (logInfo node') (explore pref node') [] -> Step (logInfo node') (explore pref node') where choice = minimumBy (comparing topSortNumber) choices pkgname = packageName . fst . head $ choice (_, node') = maximumBy (bestByPref pkgname) choice where topSortNumber choice = case fst (head choice) of InstalledOnly (InstalledPackageEx _ i _) -> i SourceOnly (UnconfiguredPackage _ i _ _) -> i InstalledAndSource _ (UnconfiguredPackage _ i _ _) -> i bestByPref pkgname = case packageInstalledPreference of PreferLatest -> comparing (\(p,_) -> ( isPreferred p, packageId p)) PreferInstalled -> comparing (\(p,_) -> (isInstalled p, isPreferred p, packageId p)) where isInstalled (SourceOnly _) = False isInstalled _ = True isPreferred p = packageVersion p `withinRange` preferredVersions (PackagePreferences preferredVersions packageInstalledPreference) = pref pkgname logInfo node = Select selected discarded where (selected, discarded) = case node of Failure _ -> ([], []) ChoiceNode (_,_,changes) _ -> changes -- ------------------------------------------------------------ -- * Generate a search tree -- ------------------------------------------------------------ type ConfigurePackage = PackageIndex SelectablePackage -> SelectablePackage -> Either [Dependency] SelectedPackage -- | (packages selected, packages discarded) type SelectionChanges = ([SelectedPackage], [PackageId]) searchSpace :: ConfigurePackage -> Constraints -> SelectedPackages -> SelectionChanges -> Set PackageName -> SearchSpace (SelectedPackages, Constraints, SelectionChanges) SelectablePackage searchSpace configure constraints selected changes next = assert (Set.null (selectedSet `Set.intersection` next)) $ assert (selectedSet `Set.isSubsetOf` Constraints.packages constraints) $ assert (next `Set.isSubsetOf` Constraints.packages constraints) $ ChoiceNode (selected, constraints, changes) [ [ (pkg, select name pkg) | pkg <- PackageIndex.lookupPackageName available name ] | name <- Set.elems next ] where available = Constraints.choices constraints selectedSet = Set.fromList (map packageName (PackageIndex.allPackages selected)) select name pkg = case configure available pkg of Left missing -> Failure $ ConfigureFailed pkg [ (dep, Constraints.conflicting constraints dep) | dep <- missing ] Right pkg' -> case constrainDeps pkg' newDeps (addDeps constraints newPkgs) [] of Left failure -> Failure failure Right (constraints', newDiscarded) -> searchSpace configure constraints' selected' (newSelected, newDiscarded) next' where selected' = foldl' (flip PackageIndex.insert) selected newSelected newSelected = case Constraints.isPaired constraints (packageId pkg) of Nothing -> [pkg'] Just pkgid' -> [pkg', pkg''] where Just pkg'' = fmap (\(InstalledOnly p) -> InstalledOnly p) (PackageIndex.lookupPackageId available pkgid') newPkgs = [ name' | (Dependency name' _, _) <- newDeps , null (PackageIndex.lookupPackageName selected' name') ] newDeps = concatMap packageConstraints newSelected next' = Set.delete name $ foldl' (flip Set.insert) next newPkgs packageConstraints :: SelectedPackage -> [(Dependency, Bool)] packageConstraints = either installedConstraints availableConstraints . preferSource where preferSource (InstalledOnly pkg) = Left pkg preferSource (SourceOnly pkg) = Right pkg preferSource (InstalledAndSource _ pkg) = Right pkg installedConstraints (InstalledPackageEx _ _ deps) = [ (thisPackageVersion dep, True) | dep <- deps ] availableConstraints (SemiConfiguredPackage _ _ _ deps) = [ (dep, False) | dep <- deps ] addDeps :: Constraints -> [PackageName] -> Constraints addDeps = foldr $ \pkgname cs -> case Constraints.addTarget pkgname cs of Satisfiable cs' () -> cs' _ -> impossible "addDeps unsatisfiable" constrainDeps :: SelectedPackage -> [(Dependency, Bool)] -> Constraints -> [PackageId] -> Either Failure (Constraints, [PackageId]) constrainDeps pkg [] cs discard = case addPackageSelectConstraint (packageId pkg) cs of Satisfiable cs' discard' -> Right (cs', discard' ++ discard) _ -> impossible "constrainDeps unsatisfiable(1)" constrainDeps pkg ((dep, installedConstraint):deps) cs discard = case addPackageDependencyConstraint (packageId pkg) dep installedConstraint cs of Satisfiable cs' discard' -> constrainDeps pkg deps cs' (discard' ++ discard) Unsatisfiable -> impossible "constrainDeps unsatisfiable(2)" ConflictsWith conflicts -> Left (DependencyConflict pkg dep installedConstraint conflicts) -- ------------------------------------------------------------ -- * The main algorithm -- ------------------------------------------------------------ search :: ConfigurePackage -> (PackageName -> PackagePreferences) -> Constraints -> Set PackageName -> Progress Log Failure (SelectedPackages, Constraints) search configure pref constraints = explore pref . searchSpace configure constraints mempty ([], []) -- ------------------------------------------------------------ -- * The top level resolver -- ------------------------------------------------------------ -- | The main exported resolver, with string logging and failure types to fit -- the standard 'DependencyResolver' interface. -- topDownResolver :: DependencyResolver topDownResolver platform cinfo installedPkgIndex sourcePkgIndex preferences constraints targets = mapMessages $ topDownResolver' platform cinfo (convertInstalledPackageIndex installedPkgIndex) sourcePkgIndex preferences (map unlabelPackageConstraint constraints) targets where mapMessages :: Progress Log Failure a -> Progress String String a mapMessages = foldProgress (Step . showLog) (Fail . showFailure) Done -- | The native resolver with detailed structured logging and failure types. -- topDownResolver' :: Platform -> CompilerInfo -> PackageIndex InstalledPackage -> PackageIndex SourcePackage -> (PackageName -> PackagePreferences) -> [PackageConstraint] -> [PackageName] -> Progress Log Failure [ResolverPackage] topDownResolver' platform cinfo installedPkgIndex sourcePkgIndex preferences constraints targets = fmap (uncurry finalise) . (\cs -> search configure preferences cs initialPkgNames) =<< pruneBottomUp platform cinfo =<< addTopLevelConstraints constraints =<< addTopLevelTargets targets emptyConstraintSet where configure = configurePackage platform cinfo emptyConstraintSet :: Constraints emptyConstraintSet = Constraints.empty (annotateInstalledPackages topSortNumber installedPkgIndex') (annotateSourcePackages constraints topSortNumber sourcePkgIndex') (installedPkgIndex', sourcePkgIndex') = selectNeededSubset installedPkgIndex sourcePkgIndex initialPkgNames topSortNumber = topologicalSortNumbering installedPkgIndex' sourcePkgIndex' initialPkgNames = Set.fromList targets finalise selected' constraints' = map toResolverPackage . PackageIndex.allPackages . fst . improvePlan installedPkgIndex' constraints' . PackageIndex.fromList $ finaliseSelectedPackages preferences selected' constraints' toResolverPackage :: FinalSelectedPackage -> ResolverPackage toResolverPackage (SelectedInstalled (InstalledPackage pkg _)) = PreExisting pkg toResolverPackage (SelectedSource pkg) = Configured pkg addTopLevelTargets :: [PackageName] -> Constraints -> Progress a Failure Constraints addTopLevelTargets [] cs = Done cs addTopLevelTargets (pkg:pkgs) cs = case Constraints.addTarget pkg cs of Satisfiable cs' () -> addTopLevelTargets pkgs cs' Unsatisfiable -> Fail (NoSuchPackage pkg) ConflictsWith _conflicts -> impossible "addTopLevelTargets conflicts" addTopLevelConstraints :: [PackageConstraint] -> Constraints -> Progress Log Failure Constraints addTopLevelConstraints [] cs = Done cs addTopLevelConstraints (PackageConstraintFlags _ _ :deps) cs = addTopLevelConstraints deps cs addTopLevelConstraints (PackageConstraintVersion pkg ver:deps) cs = case addTopLevelVersionConstraint pkg ver cs of Satisfiable cs' pkgids -> Step (AppliedVersionConstraint pkg ver pkgids) (addTopLevelConstraints deps cs') Unsatisfiable -> Fail (TopLevelVersionConstraintUnsatisfiable pkg ver) ConflictsWith conflicts -> Fail (TopLevelVersionConstraintConflict pkg ver conflicts) addTopLevelConstraints (PackageConstraintInstalled pkg:deps) cs = case addTopLevelInstalledConstraint pkg cs of Satisfiable cs' pkgids -> Step (AppliedInstalledConstraint pkg InstalledConstraint pkgids) (addTopLevelConstraints deps cs') Unsatisfiable -> Fail (TopLevelInstallConstraintUnsatisfiable pkg InstalledConstraint) ConflictsWith conflicts -> Fail (TopLevelInstallConstraintConflict pkg InstalledConstraint conflicts) addTopLevelConstraints (PackageConstraintSource pkg:deps) cs = case addTopLevelSourceConstraint pkg cs of Satisfiable cs' pkgids -> Step (AppliedInstalledConstraint pkg SourceConstraint pkgids) (addTopLevelConstraints deps cs') Unsatisfiable -> Fail (TopLevelInstallConstraintUnsatisfiable pkg SourceConstraint) ConflictsWith conflicts -> Fail (TopLevelInstallConstraintConflict pkg SourceConstraint conflicts) addTopLevelConstraints (PackageConstraintStanzas _ _ : deps) cs = addTopLevelConstraints deps cs -- | Add exclusion on available packages that cannot be configured. -- pruneBottomUp :: Platform -> CompilerInfo -> Constraints -> Progress Log Failure Constraints pruneBottomUp platform comp constraints = foldr prune Done (initialPackages constraints) constraints where prune pkgs rest cs = foldr addExcludeConstraint rest unconfigurable cs where unconfigurable = [ (pkg, missing) -- if necessary we could look up missing reasons | (Just pkg', pkg) <- zip (map getSourcePkg pkgs) pkgs , Left missing <- [configure cs pkg'] ] addExcludeConstraint (pkg, missing) rest cs = let reason = ExcludedByConfigureFail missing in case addPackageExcludeConstraint (packageId pkg) reason cs of Satisfiable cs' [pkgid]| packageId pkg == pkgid -> Step (ExcludeUnconfigurable pkgid) (rest cs') Satisfiable _ _ -> impossible "pruneBottomUp satisfiable" _ -> Fail $ ConfigureFailed pkg [ (dep, Constraints.conflicting cs dep) | dep <- missing ] configure cs (UnconfiguredPackage (SourcePackage _ pkg _ _) _ flags stanzas) = finalizePackageDescription flags (dependencySatisfiable cs) platform comp [] (enableStanzas stanzas pkg) dependencySatisfiable cs = not . null . PackageIndex.lookupDependency (Constraints.choices cs) -- collect each group of packages (by name) in reverse topsort order initialPackages = reverse . sortBy (comparing (topSortNumber . head)) . PackageIndex.allPackagesByName . Constraints.choices topSortNumber (InstalledOnly (InstalledPackageEx _ i _)) = i topSortNumber (SourceOnly (UnconfiguredPackage _ i _ _)) = i topSortNumber (InstalledAndSource _ (UnconfiguredPackage _ i _ _)) = i getSourcePkg (InstalledOnly _ ) = Nothing getSourcePkg (SourceOnly spkg) = Just spkg getSourcePkg (InstalledAndSource _ spkg) = Just spkg configurePackage :: Platform -> CompilerInfo -> ConfigurePackage configurePackage platform cinfo available spkg = case spkg of InstalledOnly ipkg -> Right (InstalledOnly ipkg) SourceOnly apkg -> fmap SourceOnly (configure apkg) InstalledAndSource ipkg apkg -> fmap (InstalledAndSource ipkg) (configure apkg) where configure (UnconfiguredPackage apkg@(SourcePackage _ p _ _) _ flags stanzas) = case finalizePackageDescription flags dependencySatisfiable platform cinfo [] (enableStanzas stanzas p) of Left missing -> Left missing Right (pkg, flags') -> Right $ SemiConfiguredPackage apkg flags' stanzas (externalBuildDepends pkg) dependencySatisfiable = not . null . PackageIndex.lookupDependency available -- | Annotate each installed packages with its set of transitive dependencies -- and its topological sort number. -- annotateInstalledPackages :: (PackageName -> TopologicalSortNumber) -> PackageIndex InstalledPackage -> PackageIndex InstalledPackageEx annotateInstalledPackages dfsNumber installed = PackageIndex.fromList [ InstalledPackageEx pkg (dfsNumber (packageName pkg)) (transitiveDepends pkg) | pkg <- PackageIndex.allPackages installed ] where transitiveDepends :: InstalledPackage -> [PackageId] transitiveDepends = map (packageId . toPkg) . tail . Graph.reachable graph . fromJust . toVertex . packageId (graph, toPkg, toVertex) = dependencyGraph installed -- | Annotate each available packages with its topological sort number and any -- user-supplied partial flag assignment. -- annotateSourcePackages :: [PackageConstraint] -> (PackageName -> TopologicalSortNumber) -> PackageIndex SourcePackage -> PackageIndex UnconfiguredPackage annotateSourcePackages constraints dfsNumber sourcePkgIndex = PackageIndex.fromList [ UnconfiguredPackage pkg (dfsNumber name) (flagsFor name) (stanzasFor name) | pkg <- PackageIndex.allPackages sourcePkgIndex , let name = packageName pkg ] where flagsFor = fromMaybe [] . flip Map.lookup flagsMap flagsMap = Map.fromList [ (name, flags) | PackageConstraintFlags name flags <- constraints ] stanzasFor = fromMaybe [] . flip Map.lookup stanzasMap stanzasMap = Map.fromListWith (++) [ (name, stanzas) | PackageConstraintStanzas name stanzas <- constraints ] -- | One of the heuristics we use when guessing which path to take in the -- search space is an ordering on the choices we make. It's generally better -- to make decisions about packages higer in the dep graph first since they -- place constraints on packages lower in the dep graph. -- -- To pick them in that order we annotate each package with its topological -- sort number. So if package A depends on package B then package A will have -- a lower topological sort number than B and we'll make a choice about which -- version of A to pick before we make a choice about B (unless there is only -- one possible choice for B in which case we pick that immediately). -- -- To construct these topological sort numbers we combine and flatten the -- installed and source package sets. We consider only dependencies between -- named packages, not including versions and for not-yet-configured packages -- we look at all the possible dependencies, not just those under any single -- flag assignment. This means we can actually get impossible combinations of -- edges and even cycles, but that doesn't really matter here, it's only a -- heuristic. -- topologicalSortNumbering :: PackageIndex InstalledPackage -> PackageIndex SourcePackage -> (PackageName -> TopologicalSortNumber) topologicalSortNumbering installedPkgIndex sourcePkgIndex = \pkgname -> let Just vertex = toVertex pkgname in topologicalSortNumbers Array.! vertex where topologicalSortNumbers = Array.array (Array.bounds graph) (zip (Graph.topSort graph) [0..]) (graph, _, toVertex) = Graph.graphFromEdges $ [ ((), packageName pkg, nub deps) | pkgs@(pkg:_) <- PackageIndex.allPackagesByName installedPkgIndex , let deps = [ packageName dep | pkg' <- pkgs , dep <- sourceDeps pkg' ] ] ++ [ ((), packageName pkg, nub deps) | pkgs@(pkg:_) <- PackageIndex.allPackagesByName sourcePkgIndex , let deps = [ depName | SourcePackage _ pkg' _ _ <- pkgs , Dependency depName _ <- buildDepends (flattenPackageDescription pkg') ] ] -- | We don't need the entire index (which is rather large and costly if we -- force it by examining the whole thing). So trace out the maximul subset of -- each index that we could possibly ever need. Do this by flattening packages -- and looking at the names of all possible dependencies. -- selectNeededSubset :: PackageIndex InstalledPackage -> PackageIndex SourcePackage -> Set PackageName -> (PackageIndex InstalledPackage ,PackageIndex SourcePackage) selectNeededSubset installedPkgIndex sourcePkgIndex = select mempty mempty where select :: PackageIndex InstalledPackage -> PackageIndex SourcePackage -> Set PackageName -> (PackageIndex InstalledPackage ,PackageIndex SourcePackage) select installedPkgIndex' sourcePkgIndex' remaining | Set.null remaining = (installedPkgIndex', sourcePkgIndex') | otherwise = select installedPkgIndex'' sourcePkgIndex'' remaining'' where (next, remaining') = Set.deleteFindMin remaining moreInstalled = PackageIndex.lookupPackageName installedPkgIndex next moreSource = PackageIndex.lookupPackageName sourcePkgIndex next moreRemaining = -- we filter out packages already included in the indexes -- this avoids an infinite loop if a package depends on itself -- like base-3.0.3.0 with base-4.0.0.0 filter notAlreadyIncluded $ [ packageName dep | pkg <- moreInstalled , dep <- sourceDeps pkg ] ++ [ name | SourcePackage _ pkg _ _ <- moreSource , Dependency name _ <- buildDepends (flattenPackageDescription pkg) ] installedPkgIndex'' = foldl' (flip PackageIndex.insert) installedPkgIndex' moreInstalled sourcePkgIndex'' = foldl' (flip PackageIndex.insert) sourcePkgIndex' moreSource remaining'' = foldl' (flip Set.insert) remaining' moreRemaining notAlreadyIncluded name = null (PackageIndex.lookupPackageName installedPkgIndex' name) && null (PackageIndex.lookupPackageName sourcePkgIndex' name) -- | The old top down solver assumes that installed packages are indexed by -- their source package id. But these days they're actually indexed by an -- installed package id and there can be many installed packages with the same -- source package id. This function tries to do a convertion, but it can only -- be partial. -- convertInstalledPackageIndex :: InstalledPackageIndex -> PackageIndex InstalledPackage convertInstalledPackageIndex index' = PackageIndex.fromList -- There can be multiple installed instances of each package version, -- like when the same package is installed in the global & user DBs. -- InstalledPackageIndex.allPackagesBySourcePackageId gives us the -- installed packages with the most preferred instances first, so by -- picking the first we should get the user one. This is almost but not -- quite the same as what ghc does. [ InstalledPackage ipkg (sourceDepsOf index' ipkg) | (_,ipkg:_) <- InstalledPackageIndex.allPackagesBySourcePackageId index' ] where -- The InstalledPackageInfo only lists dependencies by the -- ComponentId, which means we do not directly know the corresponding -- source dependency. The only way to find out is to lookup the -- ComponentId to get the InstalledPackageInfo and look at its -- source PackageId. But if the package is broken because it depends on -- other packages that do not exist then we have a problem we cannot find -- the original source package id. Instead we make up a bogus package id. -- This should have the same effect since it should be a dependency on a -- nonexistent package. sourceDepsOf index ipkg = [ maybe (brokenPackageId depid) packageId mdep | let depids = InstalledPackageInfo.depends ipkg getpkg = InstalledPackageIndex.lookupComponentId index , (depid, mdep) <- zip depids (map getpkg depids) ] brokenPackageId (ComponentId str) = PackageIdentifier (PackageName (str ++ "-broken")) (Version [] []) -- ------------------------------------------------------------ -- * Post processing the solution -- ------------------------------------------------------------ finaliseSelectedPackages :: (PackageName -> PackagePreferences) -> SelectedPackages -> Constraints -> [FinalSelectedPackage] finaliseSelectedPackages pref selected constraints = map finaliseSelected (PackageIndex.allPackages selected) where remainingChoices = Constraints.choices constraints finaliseSelected (InstalledOnly ipkg ) = finaliseInstalled ipkg finaliseSelected (SourceOnly apkg) = finaliseSource Nothing apkg finaliseSelected (InstalledAndSource ipkg apkg) = case PackageIndex.lookupPackageId remainingChoices (packageId ipkg) of --picked package not in constraints Nothing -> impossible "finaliseSelected no pkg" -- to constrain to avail only: Just (SourceOnly _) -> impossible "finaliseSelected src only" Just (InstalledOnly _) -> finaliseInstalled ipkg Just (InstalledAndSource _ _) -> finaliseSource (Just ipkg) apkg finaliseInstalled (InstalledPackageEx pkg _ _) = SelectedInstalled pkg finaliseSource mipkg (SemiConfiguredPackage pkg flags stanzas deps) = SelectedSource (ConfiguredPackage pkg flags stanzas deps') where -- We cheat in the cabal solver, and classify all dependencies as -- library dependencies. deps' :: ComponentDeps [ConfiguredId] deps' = CD.fromLibraryDeps $ map (confId . pickRemaining mipkg) deps -- InstalledOrSource indicates that we either have a source package -- available, or an installed one, or both. In the case that we have both -- available, we don't yet know if we can pick the installed one (the -- dependencies may not match up, for instance); this is verified in -- `improvePlan`. -- -- This means that at this point we cannot construct a valid installed -- package ID yet for the dependencies. We therefore have two options: -- -- * We could leave the installed package ID undefined here, and have a -- separate pass over the output of the top-down solver, fixing all -- dependencies so that if we depend on an already installed package we -- use the proper installed package ID. -- -- * We can _always_ use fake installed IDs, irrespective of whether we the -- dependency is on an already installed package or not. This is okay -- because (i) the top-down solver does not (and never will) support -- multiple package instances, and (ii) we initialize the FakeMap with -- fake IDs for already installed packages. -- -- For now we use the second option; if however we change the implementation -- of these fake IDs so that we do away with the FakeMap and update a -- package reverse dependencies as we execute the install plan and discover -- real package IDs, then this is no longer possible and we have to -- implement the first option (see also Note [FakeMap] in Cabal). confId :: InstalledOrSource InstalledPackageEx UnconfiguredPackage -> ConfiguredId confId pkg = ConfiguredId { confSrcId = packageId pkg , confInstId = fakeComponentId (packageId pkg) } pickRemaining mipkg dep@(Dependency _name versionRange) = case PackageIndex.lookupDependency remainingChoices dep of [] -> impossible "pickRemaining no pkg" [pkg'] -> pkg' remaining -> assert (checkIsPaired remaining) $ maximumBy bestByPref remaining where -- We order candidate packages to pick for a dependency by these -- three factors. The last factor is just highest version wins. bestByPref = comparing (\p -> (isCurrent p, isPreferred p, packageVersion p)) -- Is the package already used by the installed version of this -- package? If so we should pick that first. This stops us from doing -- silly things like deciding to rebuild haskell98 against base 3. isCurrent = case mipkg :: Maybe InstalledPackageEx of Nothing -> \_ -> False Just ipkg -> \p -> packageId p `elem` sourceDeps ipkg -- If there is no upper bound on the version range then we apply a -- preferred version according to the hackage or user's suggested -- version constraints. TODO: distinguish hacks from prefs bounded = boundedAbove versionRange isPreferred p | bounded = True -- any constant will do | otherwise = packageVersion p `withinRange` preferredVersions where (PackagePreferences preferredVersions _) = pref (packageName p) boundedAbove :: VersionRange -> Bool boundedAbove vr = case asVersionIntervals vr of [] -> True -- this is the inconsistent version range. intervals -> case last intervals of (_, UpperBound _ _) -> True (_, NoUpperBound ) -> False -- We really only expect to find more than one choice remaining when -- we're finalising a dependency on a paired package. checkIsPaired [p1, p2] = case Constraints.isPaired constraints (packageId p1) of Just p2' -> packageId p2' == packageId p2 Nothing -> False checkIsPaired _ = False -- | Improve an existing installation plan by, where possible, swapping -- packages we plan to install with ones that are already installed. -- This may add additional constraints due to the dependencies of installed -- packages on other installed packages. -- improvePlan :: PackageIndex InstalledPackage -> Constraints -> PackageIndex FinalSelectedPackage -> (PackageIndex FinalSelectedPackage, Constraints) improvePlan installed constraints0 selected0 = foldl' improve (selected0, constraints0) (reverseTopologicalOrder selected0) where improve (selected, constraints) = fromMaybe (selected, constraints) . improvePkg selected constraints -- The idea is to improve the plan by swapping a configured package for -- an equivalent installed one. For a particular package the condition is -- that the package be in a configured state, that a the same version be -- already installed with the exact same dependencies and all the packages -- in the plan that it depends on are in the installed state improvePkg selected constraints pkgid = do SelectedSource pkg <- PackageIndex.lookupPackageId selected pkgid ipkg <- PackageIndex.lookupPackageId installed pkgid guard $ all (isInstalled selected) (sourceDeps pkg) tryInstalled selected constraints [ipkg] isInstalled selected pkgid = case PackageIndex.lookupPackageId selected pkgid of Just (SelectedInstalled _) -> True _ -> False tryInstalled :: PackageIndex FinalSelectedPackage -> Constraints -> [InstalledPackage] -> Maybe (PackageIndex FinalSelectedPackage, Constraints) tryInstalled selected constraints [] = Just (selected, constraints) tryInstalled selected constraints (pkg:pkgs) = case constraintsOk (packageId pkg) (sourceDeps pkg) constraints of Nothing -> Nothing Just constraints' -> tryInstalled selected' constraints' pkgs' where selected' = PackageIndex.insert (SelectedInstalled pkg) selected pkgs' = catMaybes (map notSelected (sourceDeps pkg)) ++ pkgs notSelected pkgid = case (PackageIndex.lookupPackageId installed pkgid ,PackageIndex.lookupPackageId selected pkgid) of (Just pkg', Nothing) -> Just pkg' _ -> Nothing constraintsOk _ [] constraints = Just constraints constraintsOk pkgid (pkgid':pkgids) constraints = case addPackageDependencyConstraint pkgid dep True constraints of Satisfiable constraints' _ -> constraintsOk pkgid pkgids constraints' _ -> Nothing where dep = thisPackageVersion pkgid' reverseTopologicalOrder :: PackageIndex FinalSelectedPackage -> [PackageId] reverseTopologicalOrder index = map (packageId . toPkg) . Graph.topSort . Graph.transposeG $ graph where (graph, toPkg, _) = dependencyGraph index -- ------------------------------------------------------------ -- * Adding and recording constraints -- ------------------------------------------------------------ addPackageSelectConstraint :: PackageId -> Constraints -> Satisfiable Constraints [PackageId] ExclusionReason addPackageSelectConstraint pkgid = Constraints.constrain pkgname constraint reason where pkgname = packageName pkgid constraint ver _ = ver == packageVersion pkgid reason = SelectedOther pkgid addPackageExcludeConstraint :: PackageId -> ExclusionReason -> Constraints -> Satisfiable Constraints [PackageId] ExclusionReason addPackageExcludeConstraint pkgid reason = Constraints.constrain pkgname constraint reason where pkgname = packageName pkgid constraint ver installed | ver == packageVersion pkgid = installed | otherwise = True addPackageDependencyConstraint :: PackageId -> Dependency -> Bool -> Constraints -> Satisfiable Constraints [PackageId] ExclusionReason addPackageDependencyConstraint pkgid dep@(Dependency pkgname verrange) installedConstraint = Constraints.constrain pkgname constraint reason where constraint ver installed = ver `withinRange` verrange && if installedConstraint then installed else True reason = ExcludedByPackageDependency pkgid dep installedConstraint addTopLevelVersionConstraint :: PackageName -> VersionRange -> Constraints -> Satisfiable Constraints [PackageId] ExclusionReason addTopLevelVersionConstraint pkgname verrange = Constraints.constrain pkgname constraint reason where constraint ver _installed = ver `withinRange` verrange reason = ExcludedByTopLevelConstraintVersion pkgname verrange addTopLevelInstalledConstraint, addTopLevelSourceConstraint :: PackageName -> Constraints -> Satisfiable Constraints [PackageId] ExclusionReason addTopLevelInstalledConstraint pkgname = Constraints.constrain pkgname constraint reason where constraint _ver installed = installed reason = ExcludedByTopLevelConstraintInstalled pkgname addTopLevelSourceConstraint pkgname = Constraints.constrain pkgname constraint reason where constraint _ver installed = not installed reason = ExcludedByTopLevelConstraintSource pkgname -- ------------------------------------------------------------ -- * Reasons for constraints -- ------------------------------------------------------------ -- | For every constraint we record we also record the reason that constraint -- is needed. So if we end up failing due to conflicting constraints then we -- can give an explnanation as to what was conflicting and why. -- data ExclusionReason = -- | We selected this other version of the package. That means we exclude -- all the other versions. SelectedOther PackageId -- | We excluded this version of the package because it failed to -- configure probably because of unsatisfiable deps. | ExcludedByConfigureFail [Dependency] -- | We excluded this version of the package because another package that -- we selected imposed a dependency which this package did not satisfy. | ExcludedByPackageDependency PackageId Dependency Bool -- | We excluded this version of the package because it did not satisfy -- a dependency given as an original top level input. -- | ExcludedByTopLevelConstraintVersion PackageName VersionRange | ExcludedByTopLevelConstraintInstalled PackageName | ExcludedByTopLevelConstraintSource PackageName deriving Eq -- | Given an excluded package and the reason it was excluded, produce a human -- readable explanation. -- showExclusionReason :: PackageId -> ExclusionReason -> String showExclusionReason pkgid (SelectedOther pkgid') = display pkgid ++ " was excluded because " ++ display pkgid' ++ " was selected instead" showExclusionReason pkgid (ExcludedByConfigureFail missingDeps) = display pkgid ++ " was excluded because it could not be configured. " ++ "It requires " ++ listOf displayDep missingDeps showExclusionReason pkgid (ExcludedByPackageDependency pkgid' dep installedConstraint) = display pkgid ++ " was excluded because " ++ display pkgid' ++ " requires " ++ (if installedConstraint then "an installed instance of " else "") ++ displayDep dep showExclusionReason pkgid (ExcludedByTopLevelConstraintVersion pkgname verRange) = display pkgid ++ " was excluded because of the top level constraint " ++ displayDep (Dependency pkgname verRange) showExclusionReason pkgid (ExcludedByTopLevelConstraintInstalled pkgname) = display pkgid ++ " was excluded because of the top level constraint '" ++ display pkgname ++ " installed' which means that only installed instances " ++ "of the package may be selected." showExclusionReason pkgid (ExcludedByTopLevelConstraintSource pkgname) = display pkgid ++ " was excluded because of the top level constraint '" ++ display pkgname ++ " source' which means that only source versions " ++ "of the package may be selected." -- ------------------------------------------------------------ -- * Logging progress and failures -- ------------------------------------------------------------ data Log = Select [SelectedPackage] [PackageId] | AppliedVersionConstraint PackageName VersionRange [PackageId] | AppliedInstalledConstraint PackageName InstalledConstraint [PackageId] | ExcludeUnconfigurable PackageId data Failure = NoSuchPackage PackageName | ConfigureFailed SelectablePackage [(Dependency, [(PackageId, [ExclusionReason])])] | DependencyConflict SelectedPackage Dependency Bool [(PackageId, [ExclusionReason])] | TopLevelVersionConstraintConflict PackageName VersionRange [(PackageId, [ExclusionReason])] | TopLevelVersionConstraintUnsatisfiable PackageName VersionRange | TopLevelInstallConstraintConflict PackageName InstalledConstraint [(PackageId, [ExclusionReason])] | TopLevelInstallConstraintUnsatisfiable PackageName InstalledConstraint showLog :: Log -> String showLog (Select selected discarded) = case (selectedMsg, discardedMsg) of ("", y) -> y (x, "") -> x (x, y) -> x ++ " and " ++ y where selectedMsg = "selecting " ++ case selected of [] -> "" [s] -> display (packageId s) ++ " " ++ kind s (s:ss) -> listOf id $ (display (packageId s) ++ " " ++ kind s) : [ display (packageVersion s') ++ " " ++ kind s' | s' <- ss ] kind (InstalledOnly _) = "(installed)" kind (SourceOnly _) = "(source)" kind (InstalledAndSource _ _) = "(installed or source)" discardedMsg = case discarded of [] -> "" _ -> "discarding " ++ listOf id [ element | (pkgid:pkgids) <- groupBy (equating packageName) (sort discarded) , element <- display pkgid : map (display . packageVersion) pkgids ] showLog (AppliedVersionConstraint pkgname ver pkgids) = "applying constraint " ++ display (Dependency pkgname ver) ++ if null pkgids then "" else " which excludes " ++ listOf display pkgids showLog (AppliedInstalledConstraint pkgname inst pkgids) = "applying constraint " ++ display pkgname ++ " '" ++ (case inst of InstalledConstraint -> "installed"; _ -> "source") ++ "' " ++ if null pkgids then "" else "which excludes " ++ listOf display pkgids showLog (ExcludeUnconfigurable pkgid) = "excluding " ++ display pkgid ++ " (it cannot be configured)" showFailure :: Failure -> String showFailure (NoSuchPackage pkgname) = "The package " ++ display pkgname ++ " is unknown." showFailure (ConfigureFailed pkg missingDeps) = "cannot configure " ++ displayPkg pkg ++ ". It requires " ++ listOf (displayDep . fst) missingDeps ++ '\n' : unlines (map (uncurry whyNot) missingDeps) where whyNot (Dependency name ver) [] = "There is no available version of " ++ display name ++ " that satisfies " ++ displayVer ver whyNot dep conflicts = "For the dependency on " ++ displayDep dep ++ " there are these packages: " ++ listOf display pkgs ++ ". However none of them are available.\n" ++ unlines [ showExclusionReason (packageId pkg') reason | (pkg', reasons) <- conflicts, reason <- reasons ] where pkgs = map fst conflicts showFailure (DependencyConflict pkg dep installedConstraint conflicts) = "dependencies conflict: " ++ displayPkg pkg ++ " requires " ++ (if installedConstraint then "an installed instance of " else "") ++ displayDep dep ++ " however:\n" ++ unlines [ showExclusionReason (packageId pkg') reason | (pkg', reasons) <- conflicts, reason <- reasons ] showFailure (TopLevelVersionConstraintConflict name ver conflicts) = "constraints conflict: we have the top level constraint " ++ displayDep (Dependency name ver) ++ ", but\n" ++ unlines [ showExclusionReason (packageId pkg') reason | (pkg', reasons) <- conflicts, reason <- reasons ] showFailure (TopLevelVersionConstraintUnsatisfiable name ver) = "There is no available version of " ++ display name ++ " that satisfies " ++ displayVer ver showFailure (TopLevelInstallConstraintConflict name InstalledConstraint conflicts) = "constraints conflict: " ++ "top level constraint '" ++ display name ++ " installed' however\n" ++ unlines [ showExclusionReason (packageId pkg') reason | (pkg', reasons) <- conflicts, reason <- reasons ] showFailure (TopLevelInstallConstraintUnsatisfiable name InstalledConstraint) = "There is no installed version of " ++ display name showFailure (TopLevelInstallConstraintConflict name SourceConstraint conflicts) = "constraints conflict: " ++ "top level constraint '" ++ display name ++ " source' however\n" ++ unlines [ showExclusionReason (packageId pkg') reason | (pkg', reasons) <- conflicts, reason <- reasons ] showFailure (TopLevelInstallConstraintUnsatisfiable name SourceConstraint) = "There is no available source version of " ++ display name displayVer :: VersionRange -> String displayVer = display . simplifyVersionRange displayDep :: Dependency -> String displayDep = display . simplifyDependency -- ------------------------------------------------------------ -- * Utils -- ------------------------------------------------------------ impossible :: String -> a impossible msg = internalError $ "assertion failure: " ++ msg internalError :: String -> a internalError msg = error $ "internal error: " ++ msg displayPkg :: Package pkg => pkg -> String displayPkg = display . packageId listOf :: (a -> String) -> [a] -> String listOf _ [] = [] listOf disp [x0] = disp x0 listOf disp (x0:x1:xs) = disp x0 ++ go x1 xs where go x [] = " and " ++ disp x go x (x':xs') = ", " ++ disp x ++ go x' xs' -- ------------------------------------------------------------ -- * Construct a dependency graph -- ------------------------------------------------------------ -- | Builds a graph of the package dependencies. -- -- Dependencies on other packages that are not in the index are discarded. -- You can check if there are any such dependencies with 'brokenPackages'. -- -- The top-down solver gets its own implementation, because both -- `dependencyGraph` in `Distribution.Client.PlanIndex` (in cabal-install) and -- `dependencyGraph` in `Distribution.Simple.PackageIndex` (in Cabal) both work -- with `PackageIndex` from `Cabal` (that is, a package index indexed by -- installed package IDs rather than package names). -- -- Ideally we would switch the top-down solver over to use that too, so that -- this duplication could be avoided, but that's a bit of work and the top-down -- solver is legacy code anyway. -- -- (NOTE: This is called at two types: InstalledPackage and FinalSelectedPackage.) dependencyGraph :: PackageSourceDeps pkg => PackageIndex pkg -> (Graph.Graph, Graph.Vertex -> pkg, PackageId -> Maybe Graph.Vertex) dependencyGraph index = (graph, vertexToPkg, pkgIdToVertex) where graph = Array.listArray bounds $ map (catMaybes . map pkgIdToVertex . sourceDeps) pkgs vertexToPkg vertex = pkgTable Array.! vertex pkgIdToVertex = binarySearch 0 topBound pkgTable = Array.listArray bounds pkgs pkgIdTable = Array.listArray bounds (map packageId pkgs) pkgs = sortBy (comparing packageId) (PackageIndex.allPackages index) topBound = length pkgs - 1 bounds = (0, topBound) binarySearch a b key | a > b = Nothing | otherwise = case compare key (pkgIdTable Array.! mid) of LT -> binarySearch a (mid-1) key EQ -> Just mid GT -> binarySearch (mid+1) b key where mid = (a + b) `div` 2
randen/cabal
cabal-install/Distribution/Client/Dependency/TopDown.hs
Haskell
bsd-3-clause
48,703
module T7702Plugin ( plugin ) where import GhcPlugins -- A plugin that does nothing but tickle CoreM's writer. plugin :: Plugin plugin = defaultPlugin { installCoreToDos = install } where install :: [CommandLineOption] -> [CoreToDo] -> CoreM [CoreToDo] install _ todos = do putMsgS "T7702Plugin" -- 1 million times, so the allocation in this plugin dominates allocation due -- to other compiler flags and the test framework can easily catch the difference -- can't use replicateM_ because it causes its own problems nothingX100000 ; nothingX100000 ; nothingX100000 ; nothingX100000 ; nothingX100000 nothingX100000 ; nothingX100000 ; nothingX100000 ; nothingX100000 ; nothingX100000 return todos -- this will result in a call to plusWriter in CoreM's -- >>= implementation, which was causing the space leak nothing :: CoreM () nothing = liftIO (return ()) nothingX10 :: CoreM () nothingX10 = do nothing ; nothing ; nothing ; nothing ; nothing nothing ; nothing ; nothing ; nothing ; nothing nothingX100 :: CoreM () nothingX100 = do nothingX10 ; nothingX10 ; nothingX10 ; nothingX10 ; nothingX10 nothingX10 ; nothingX10 ; nothingX10 ; nothingX10 ; nothingX10 nothingX1000 :: CoreM () nothingX1000 = do nothingX100 ; nothingX100 ; nothingX100 ; nothingX100 ; nothingX100 nothingX100 ; nothingX100 ; nothingX100 ; nothingX100 ; nothingX100 nothingX10000 :: CoreM () nothingX10000 = do nothingX1000 ; nothingX1000 ; nothingX1000 ; nothingX1000 ; nothingX1000 nothingX1000 ; nothingX1000 ; nothingX1000 ; nothingX1000 ; nothingX1000 nothingX100000 :: CoreM () nothingX100000 = do nothingX10000 ; nothingX10000 ; nothingX10000 ; nothingX10000 ; nothingX10000 nothingX10000 ; nothingX10000 ; nothingX10000 ; nothingX10000 ; nothingX10000
ezyang/ghc
testsuite/tests/simplCore/should_compile/T7702plugin/T7702Plugin.hs
Haskell
bsd-3-clause
1,886
-- !!! Conflicting re-exportation of dcon module M (module Mod144_A,module M) where import Mod144_A data Foo1 = Bar
urbanslug/ghc
testsuite/tests/module/mod144.hs
Haskell
bsd-3-clause
119
-- This caused 6.10.1 to segfault when run with +RTS -hb -- trac #3001 module Main (main) where main :: IO () main = print $ replicate 40000 'x'
urbanslug/ghc
testsuite/tests/profiling/should_run/T3001.hs
Haskell
bsd-3-clause
148
{-# LANGUAGE OverloadedStrings #-} module MacFinder.Controllers.Delete ( deleteMac ) where import Web.Scotty.Trans as S import Web.Scotty.Hastache import Control.Monad.IO.Class (liftIO) import qualified Database.Redis as R import qualified Data.Text.Lazy as T import qualified Data.ByteString as B import Control.Monad (when, unless) import MacFinder.Util (convertTextToByteString) deleteMac :: R.Connection -> ScottyH' () deleteMac redisConn = post "/delete" $ do textMac <- param "mac" when ((T.length . T.strip $ textMac) < 17) (raise "Bad MAC") let keyToDelete = B.concat ["macs:", convertTextToByteString . T.toLower $ textMac] value <- liftIO $ R.runRedis redisConn $ R.del [keyToDelete] liftIO $ print value either couldntDelete checkNumDeleted value redirect "/" `rescue` text where checkNumDeleted x = unless (x > 0) (couldntDelete ()) couldntDelete _ = raise "Couldn't delete"
tlunter/MacFinder
src/MacFinder/Controllers/Delete.hs
Haskell
mit
954
module System.RedProx.Chaos ( ) where
adarqui/redprox-core
src/System/RedProx/Chaos.hs
Haskell
mit
38
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE CPP #-} {-# LANGUAGE FlexibleInstances #-} module Text.Hamlet.Parse ( Result (..) , Content (..) , Doc (..) , parseDoc , HamletSettings (..) , defaultHamletSettings , xhtmlHamletSettings , CloseStyle (..) , Binding (..) , NewlineStyle (..) , specialOrIdent , DataConstr (..) , Module (..) ) where import Text.Shakespeare.Base import Control.Applicative ((<$>), Applicative (..)) import Control.Monad import Control.Arrow import Data.Char (isUpper) import Data.Data import Text.ParserCombinators.Parsec hiding (Line) import Data.Set (Set) import qualified Data.Set as Set import Data.Maybe (mapMaybe, fromMaybe, isNothing) import Language.Haskell.TH.Syntax (Lift (..)) data Result v = Error String | Ok v deriving (Show, Eq, Read, Data, Typeable) instance Monad Result where return = Ok Error s >>= _ = Error s Ok v >>= f = f v fail = Error instance Functor Result where fmap = liftM instance Applicative Result where pure = return (<*>) = ap data Content = ContentRaw String | ContentVar Deref | ContentUrl Bool Deref -- ^ bool: does it include params? | ContentEmbed Deref | ContentMsg Deref | ContentAttrs Deref deriving (Show, Eq, Read, Data, Typeable) data Line = LineForall Deref Binding | LineIf Deref | LineElseIf Deref | LineElse | LineWith [(Deref, Binding)] | LineMaybe Deref Binding | LineNothing | LineCase Deref | LineOf Binding | LineTag { _lineTagName :: String , _lineAttr :: [(Maybe Deref, String, Maybe [Content])] , _lineContent :: [Content] , _lineClasses :: [(Maybe Deref, [Content])] , _lineAttrs :: [Deref] , _lineNoNewline :: Bool } | LineContent [Content] Bool -- ^ True == avoid newlines deriving (Eq, Show, Read) parseLines :: HamletSettings -> String -> Result (Maybe NewlineStyle, HamletSettings, [(Int, Line)]) parseLines set s = case parse parser s s of Left e -> Error $ show e Right x -> Ok x where parser = do mnewline <- parseNewline let set' = case mnewline of Nothing -> case hamletNewlines set of DefaultNewlineStyle -> set { hamletNewlines = AlwaysNewlines } _ -> set Just n -> set { hamletNewlines = n } res <- many (parseLine set') return (mnewline, set', res) parseNewline = (try (many eol' >> spaceTabs >> string "$newline ") >> parseNewline' >>= \nl -> eol' >> return nl) <|> return Nothing parseNewline' = (try (string "always") >> return (Just AlwaysNewlines)) <|> (try (string "never") >> return (Just NoNewlines)) <|> (try (string "text") >> return (Just NewlinesText)) eol' = (char '\n' >> return ()) <|> (string "\r\n" >> return ()) parseLine :: HamletSettings -> Parser (Int, Line) parseLine set = do ss <- fmap sum $ many ((char ' ' >> return 1) <|> (char '\t' >> fail "Tabs are not allowed in Hamlet indentation")) x <- doctype <|> doctypeDollar <|> comment <|> ssiInclude <|> htmlComment <|> doctypeRaw <|> backslash <|> controlIf <|> controlElseIf <|> (try (string "$else") >> spaceTabs >> eol >> return LineElse) <|> controlMaybe <|> (try (string "$nothing") >> spaceTabs >> eol >> return LineNothing) <|> controlForall <|> controlWith <|> controlCase <|> controlOf <|> angle <|> invalidDollar <|> (eol' >> return (LineContent [] True)) <|> (do (cs, avoidNewLines) <- content InContent isEof <- (eof >> return True) <|> return False if null cs && ss == 0 && isEof then fail "End of Hamlet template" else return $ LineContent cs avoidNewLines) return (ss, x) where eol' = (char '\n' >> return ()) <|> (string "\r\n" >> return ()) eol = eof <|> eol' doctype = do try $ string "!!!" >> eol return $ LineContent [ContentRaw $ hamletDoctype set ++ "\n"] True doctypeDollar = do _ <- try $ string "$doctype " name <- many $ noneOf "\r\n" eol case lookup name $ hamletDoctypeNames set of Nothing -> fail $ "Unknown doctype name: " ++ name Just val -> return $ LineContent [ContentRaw $ val ++ "\n"] True doctypeRaw = do x <- try $ string "<!" y <- many $ noneOf "\r\n" eol return $ LineContent [ContentRaw $ concat [x, y, "\n"]] True invalidDollar = do _ <- char '$' fail "Received a command I did not understand. If you wanted a literal $, start the line with a backslash." comment = do _ <- try $ string "$#" _ <- many $ noneOf "\r\n" eol return $ LineContent [] True ssiInclude = do x <- try $ string "<!--#" y <- many $ noneOf "\r\n" eol return $ LineContent [ContentRaw $ x ++ y] False htmlComment = do _ <- try $ string "<!--" _ <- manyTill anyChar $ try $ string "-->" x <- many nonComments eol return $ LineContent [ContentRaw $ concat x] False {- FIXME -} -- FIXME handle variables? nonComments = (many1 $ noneOf "\r\n<") <|> (do _ <- char '<' (do _ <- try $ string "!--" _ <- manyTill anyChar $ try $ string "-->" return "") <|> return "<") backslash = do _ <- char '\\' (eol >> return (LineContent [ContentRaw "\n"] True)) <|> (uncurry LineContent <$> content InContent) controlIf = do _ <- try $ string "$if" spaces x <- parseDeref _ <- spaceTabs eol return $ LineIf x controlElseIf = do _ <- try $ string "$elseif" spaces x <- parseDeref _ <- spaceTabs eol return $ LineElseIf x binding = do y <- identPattern spaces _ <- string "<-" spaces x <- parseDeref _ <- spaceTabs return (x,y) bindingSep = char ',' >> spaceTabs controlMaybe = do _ <- try $ string "$maybe" spaces (x,y) <- binding eol return $ LineMaybe x y controlForall = do _ <- try $ string "$forall" spaces (x,y) <- binding eol return $ LineForall x y controlWith = do _ <- try $ string "$with" spaces bindings <- (binding `sepBy` bindingSep) `endBy` eol return $ LineWith $ concat bindings -- concat because endBy returns a [[(Deref,Ident)]] controlCase = do _ <- try $ string "$case" spaces x <- parseDeref _ <- spaceTabs eol return $ LineCase x controlOf = do _ <- try $ string "$of" spaces x <- identPattern _ <- spaceTabs eol return $ LineOf x content cr = do x <- many $ content' cr case cr of InQuotes -> void $ char '"' NotInQuotes -> return () NotInQuotesAttr -> return () InContent -> eol return (cc $ map fst x, any snd x) where cc [] = [] cc (ContentRaw a:ContentRaw b:c) = cc $ ContentRaw (a ++ b) : c cc (a:b) = a : cc b content' cr = contentHash <|> contentAt <|> contentCaret <|> contentUnder <|> contentReg' cr contentHash = do x <- parseHash case x of Left str -> return (ContentRaw str, null str) Right deref -> return (ContentVar deref, False) contentAt = do x <- parseAt return $ case x of Left str -> (ContentRaw str, null str) Right (s, y) -> (ContentUrl y s, False) contentCaret = do x <- parseCaret case x of Left str -> return (ContentRaw str, null str) Right deref -> return (ContentEmbed deref, False) contentUnder = do x <- parseUnder case x of Left str -> return (ContentRaw str, null str) Right deref -> return (ContentMsg deref, False) contentReg' x = (flip (,) False) <$> contentReg x contentReg InContent = (ContentRaw . return) <$> noneOf "#@^\r\n" contentReg NotInQuotes = (ContentRaw . return) <$> noneOf "@^#. \t\n\r>" contentReg NotInQuotesAttr = (ContentRaw . return) <$> noneOf "@^ \t\n\r>" contentReg InQuotes = (ContentRaw . return) <$> noneOf "#@^\"\n\r" tagAttribValue notInQuotes = do cr <- (char '"' >> return InQuotes) <|> return notInQuotes fst <$> content cr tagIdent = char '#' >> TagIdent <$> tagAttribValue NotInQuotes tagCond = do d <- between (char ':') (char ':') parseDeref tagClass (Just d) <|> tagAttrib (Just d) tagClass x = do clazz <- char '.' >> tagAttribValue NotInQuotes let hasHash (ContentRaw s) = any (== '#') s hasHash _ = False if any hasHash clazz then fail $ "Invalid class: " ++ show clazz ++ ". Did you want a space between a class and an ID?" else return (TagClass (x, clazz)) tagAttrib cond = do s <- many1 $ noneOf " \t=\r\n><" v <- (char '=' >> Just <$> tagAttribValue NotInQuotesAttr) <|> return Nothing return $ TagAttrib (cond, s, v) tagAttrs = do _ <- char '*' d <- between (char '{') (char '}') parseDeref return $ TagAttribs d tag' = foldr tag'' ("div", [], [], []) tag'' (TagName s) (_, y, z, as) = (s, y, z, as) tag'' (TagIdent s) (x, y, z, as) = (x, (Nothing, "id", Just s) : y, z, as) tag'' (TagClass s) (x, y, z, as) = (x, y, s : z, as) tag'' (TagAttrib s) (x, y, z, as) = (x, s : y, z, as) tag'' (TagAttribs s) (x, y, z, as) = (x, y, z, s : as) ident :: Parser Ident ident = do i <- many1 (alphaNum <|> char '_' <|> char '\'') white return (Ident i) <?> "identifier" parens = between (char '(' >> white) (char ')' >> white) brackets = between (char '[' >> white) (char ']' >> white) braces = between (char '{' >> white) (char '}' >> white) comma = char ',' >> white atsign = char '@' >> white equals = char '=' >> white white = skipMany $ char ' ' wildDots = string ".." >> white isVariable (Ident (x:_)) = not (isUpper x) isVariable (Ident []) = error "isVariable: bad identifier" isConstructor (Ident (x:_)) = isUpper x isConstructor (Ident []) = error "isConstructor: bad identifier" identPattern :: Parser Binding identPattern = gcon True <|> apat where apat = choice [ varpat , gcon False , parens tuplepat , brackets listpat ] varpat = do v <- try $ do v <- ident guard (isVariable v) return v option (BindVar v) $ do atsign b <- apat return (BindAs v b) <?> "variable" gcon :: Bool -> Parser Binding gcon allowArgs = do c <- try $ do c <- dataConstr return c choice [ record c , fmap (BindConstr c) (guard allowArgs >> many apat) , return (BindConstr c []) ] <?> "constructor" dataConstr = do p <- dcPiece ps <- many dcPieces return $ toDataConstr p ps dcPiece = do x@(Ident y) <- ident guard $ isConstructor x return y dcPieces = do _ <- char '.' dcPiece toDataConstr x [] = DCUnqualified $ Ident x toDataConstr x (y:ys) = go (x:) y ys where go front next [] = DCQualified (Module $ front []) (Ident next) go front next (rest:rests) = go (front . (next:)) rest rests record c = braces $ do (fields, wild) <- option ([], False) $ go return (BindRecord c fields wild) where go = (wildDots >> return ([], True)) <|> (do x <- recordField (xs,wild) <- option ([],False) (comma >> go) return (x:xs,wild)) recordField = do field <- ident p <- option (BindVar field) -- support punning (equals >> identPattern) return (field,p) tuplepat = do xs <- identPattern `sepBy` comma return $ case xs of [x] -> x _ -> BindTuple xs listpat = BindList <$> identPattern `sepBy` comma angle = do _ <- char '<' name' <- many $ noneOf " \t.#\r\n!>" let name = if null name' then "div" else name' xs <- many $ try ((many $ oneOf " \t\r\n") >> (tagIdent <|> tagCond <|> tagClass Nothing <|> tagAttrs <|> tagAttrib Nothing)) _ <- many $ oneOf " \t\r\n" _ <- char '>' (c, avoidNewLines) <- content InContent let (tn, attr, classes, attrsd) = tag' $ TagName name : xs if '/' `elem` tn then fail "A tag name may not contain a slash. Perhaps you have a closing tag in your HTML." else return $ LineTag tn attr c classes attrsd avoidNewLines data TagPiece = TagName String | TagIdent [Content] | TagClass (Maybe Deref, [Content]) | TagAttrib (Maybe Deref, String, Maybe [Content]) | TagAttribs Deref deriving Show data ContentRule = InQuotes | NotInQuotes | NotInQuotesAttr | InContent data Nest = Nest Line [Nest] nestLines :: [(Int, Line)] -> [Nest] nestLines [] = [] nestLines ((i, l):rest) = let (deeper, rest') = span (\(i', _) -> i' > i) rest in Nest l (nestLines deeper) : nestLines rest' data Doc = DocForall Deref Binding [Doc] | DocWith [(Deref, Binding)] [Doc] | DocCond [(Deref, [Doc])] (Maybe [Doc]) | DocMaybe Deref Binding [Doc] (Maybe [Doc]) | DocCase Deref [(Binding, [Doc])] | DocContent Content deriving (Show, Eq, Read, Data, Typeable) nestToDoc :: HamletSettings -> [Nest] -> Result [Doc] nestToDoc _set [] = Ok [] nestToDoc set (Nest (LineForall d i) inside:rest) = do inside' <- nestToDoc set inside rest' <- nestToDoc set rest Ok $ DocForall d i inside' : rest' nestToDoc set (Nest (LineWith dis) inside:rest) = do inside' <- nestToDoc set inside rest' <- nestToDoc set rest Ok $ DocWith dis inside' : rest' nestToDoc set (Nest (LineIf d) inside:rest) = do inside' <- nestToDoc set inside (ifs, el, rest') <- parseConds set ((:) (d, inside')) rest rest'' <- nestToDoc set rest' Ok $ DocCond ifs el : rest'' nestToDoc set (Nest (LineMaybe d i) inside:rest) = do inside' <- nestToDoc set inside (nothing, rest') <- case rest of Nest LineNothing ninside:x -> do ninside' <- nestToDoc set ninside return (Just ninside', x) _ -> return (Nothing, rest) rest'' <- nestToDoc set rest' Ok $ DocMaybe d i inside' nothing : rest'' nestToDoc set (Nest (LineCase d) inside:rest) = do let getOf (Nest (LineOf x) insideC) = do insideC' <- nestToDoc set insideC Ok (x, insideC') getOf _ = Error "Inside a $case there may only be $of. Use '$of _' for a wildcard." cases <- mapM getOf inside rest' <- nestToDoc set rest Ok $ DocCase d cases : rest' nestToDoc set (Nest (LineTag tn attrs content classes attrsD avoidNewLine) inside:rest) = do let attrFix (x, y, z) = (x, y, [(Nothing, z)]) let takeClass (a, "class", b) = Just (a, fromMaybe [] b) takeClass _ = Nothing let clazzes = classes ++ mapMaybe takeClass attrs let notClass (_, x, _) = x /= "class" let noclass = filter notClass attrs let attrs' = case clazzes of [] -> map attrFix noclass _ -> (testIncludeClazzes clazzes, "class", map (second Just) clazzes) : map attrFix noclass let closeStyle = if not (null content) || not (null inside) then CloseSeparate else hamletCloseStyle set tn let end = case closeStyle of CloseSeparate -> DocContent $ ContentRaw $ "</" ++ tn ++ ">" _ -> DocContent $ ContentRaw "" seal = case closeStyle of CloseInside -> DocContent $ ContentRaw "/>" _ -> DocContent $ ContentRaw ">" start = DocContent $ ContentRaw $ "<" ++ tn attrs'' = concatMap attrToContent attrs' newline' = DocContent $ ContentRaw $ case hamletNewlines set of { AlwaysNewlines | not avoidNewLine -> "\n"; _ -> "" } inside' <- nestToDoc set inside rest' <- nestToDoc set rest Ok $ start : attrs'' ++ map (DocContent . ContentAttrs) attrsD ++ seal : map DocContent content ++ inside' ++ end : newline' : rest' nestToDoc set (Nest (LineContent content avoidNewLine) inside:rest) = do inside' <- nestToDoc set inside rest' <- nestToDoc set rest let newline' = DocContent $ ContentRaw $ case hamletNewlines set of { NoNewlines -> ""; _ -> if nextIsContent && not avoidNewLine then "\n" else "" } nextIsContent = case (inside, rest) of ([], Nest LineContent{} _:_) -> True ([], Nest LineTag{} _:_) -> True _ -> False Ok $ map DocContent content ++ newline':inside' ++ rest' nestToDoc _set (Nest (LineElseIf _) _:_) = Error "Unexpected elseif" nestToDoc _set (Nest LineElse _:_) = Error "Unexpected else" nestToDoc _set (Nest LineNothing _:_) = Error "Unexpected nothing" nestToDoc _set (Nest (LineOf _) _:_) = Error "Unexpected 'of' (did you forget a $case?)" compressDoc :: [Doc] -> [Doc] compressDoc [] = [] compressDoc (DocForall d i doc:rest) = DocForall d i (compressDoc doc) : compressDoc rest compressDoc (DocWith dis doc:rest) = DocWith dis (compressDoc doc) : compressDoc rest compressDoc (DocMaybe d i doc mnothing:rest) = DocMaybe d i (compressDoc doc) (fmap compressDoc mnothing) : compressDoc rest compressDoc (DocCond [(a, x)] Nothing:DocCond [(b, y)] Nothing:rest) | a == b = compressDoc $ DocCond [(a, x ++ y)] Nothing : rest compressDoc (DocCond x y:rest) = DocCond (map (second compressDoc) x) (compressDoc `fmap` y) : compressDoc rest compressDoc (DocCase d cs:rest) = DocCase d (map (second compressDoc) cs) : compressDoc rest compressDoc (DocContent (ContentRaw ""):rest) = compressDoc rest compressDoc ( DocContent (ContentRaw x) : DocContent (ContentRaw y) : rest ) = compressDoc $ (DocContent $ ContentRaw $ x ++ y) : rest compressDoc (DocContent x:rest) = DocContent x : compressDoc rest parseDoc :: HamletSettings -> String -> Result (Maybe NewlineStyle, [Doc]) parseDoc set s = do (mnl, set', ls) <- parseLines set s let notEmpty (_, LineContent [] _) = False notEmpty _ = True let ns = nestLines $ filter notEmpty ls ds <- nestToDoc set' ns return (mnl, compressDoc ds) attrToContent :: (Maybe Deref, String, [(Maybe Deref, Maybe [Content])]) -> [Doc] attrToContent (Just cond, k, v) = [DocCond [(cond, attrToContent (Nothing, k, v))] Nothing] attrToContent (Nothing, k, []) = [DocContent $ ContentRaw $ ' ' : k] attrToContent (Nothing, k, [(Nothing, Nothing)]) = [DocContent $ ContentRaw $ ' ' : k] attrToContent (Nothing, k, [(Nothing, Just v)]) = DocContent (ContentRaw (' ' : k ++ "=\"")) : map DocContent v ++ [DocContent $ ContentRaw "\""] attrToContent (Nothing, k, v) = -- only for class DocContent (ContentRaw (' ' : k ++ "=\"")) : concatMap go (init v) ++ go' (last v) ++ [DocContent $ ContentRaw "\""] where go (Nothing, x) = map DocContent (fromMaybe [] x) ++ [DocContent $ ContentRaw " "] go (Just b, x) = [ DocCond [(b, map DocContent (fromMaybe [] x) ++ [DocContent $ ContentRaw " "])] Nothing ] go' (Nothing, x) = maybe [] (map DocContent) x go' (Just b, x) = [ DocCond [(b, maybe [] (map DocContent) x)] Nothing ] -- | Settings for parsing of a hamlet document. data HamletSettings = HamletSettings { -- | The value to replace a \"!!!\" with. Do not include the trailing -- newline. hamletDoctype :: String -- | Should we add newlines to the output, making it more human-readable? -- Useful for client-side debugging but may alter browser page layout. , hamletNewlines :: NewlineStyle -- | How a tag should be closed. Use this to switch between HTML, XHTML -- or even XML output. , hamletCloseStyle :: String -> CloseStyle -- | Mapping from short names in \"$doctype\" statements to full doctype. , hamletDoctypeNames :: [(String, String)] } data NewlineStyle = NoNewlines -- ^ never add newlines | NewlinesText -- ^ add newlines between consecutive text lines | AlwaysNewlines -- ^ add newlines everywhere | DefaultNewlineStyle deriving Show instance Lift NewlineStyle where lift NoNewlines = [|NoNewlines|] lift NewlinesText = [|NewlinesText|] lift AlwaysNewlines = [|AlwaysNewlines|] lift DefaultNewlineStyle = [|DefaultNewlineStyle|] instance Lift (String -> CloseStyle) where lift _ = [|\s -> htmlCloseStyle s|] instance Lift HamletSettings where lift (HamletSettings a b c d) = [|HamletSettings $(lift a) $(lift b) $(lift c) $(lift d)|] -- See the html specification for a list of all void elements: -- https://www.w3.org/TR/html/syntax.html#void-elements htmlEmptyTags :: Set String htmlEmptyTags = Set.fromAscList [ "area" , "base" , "basefont" -- not html 5 , "br" , "col" , "embed" , "frame" -- not html 5 , "hr" , "img" , "input" , "isindex" -- not html 5 , "keygen" , "link" , "meta" , "param" , "source" , "track" , "wbr" ] -- | Defaults settings: HTML5 doctype and HTML-style empty tags. defaultHamletSettings :: HamletSettings defaultHamletSettings = HamletSettings "<!DOCTYPE html>" DefaultNewlineStyle htmlCloseStyle doctypeNames xhtmlHamletSettings :: HamletSettings xhtmlHamletSettings = HamletSettings doctype DefaultNewlineStyle xhtmlCloseStyle doctypeNames where doctype = "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\" " ++ "\"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\">" htmlCloseStyle :: String -> CloseStyle htmlCloseStyle s = if Set.member s htmlEmptyTags then NoClose else CloseSeparate xhtmlCloseStyle :: String -> CloseStyle xhtmlCloseStyle s = if Set.member s htmlEmptyTags then CloseInside else CloseSeparate data CloseStyle = NoClose | CloseInside | CloseSeparate parseConds :: HamletSettings -> ([(Deref, [Doc])] -> [(Deref, [Doc])]) -> [Nest] -> Result ([(Deref, [Doc])], Maybe [Doc], [Nest]) parseConds set front (Nest LineElse inside:rest) = do inside' <- nestToDoc set inside Ok (front [], Just inside', rest) parseConds set front (Nest (LineElseIf d) inside:rest) = do inside' <- nestToDoc set inside parseConds set (front . (:) (d, inside')) rest parseConds _ front rest = Ok (front [], Nothing, rest) doctypeNames :: [(String, String)] doctypeNames = [ ("5", "<!DOCTYPE html>") , ("html", "<!DOCTYPE html>") , ("1.1", "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.1//EN\" \"http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd\">") , ("strict", "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\">") ] data Binding = BindVar Ident | BindAs Ident Binding | BindConstr DataConstr [Binding] | BindTuple [Binding] | BindList [Binding] | BindRecord DataConstr [(Ident, Binding)] Bool deriving (Eq, Show, Read, Data, Typeable) data DataConstr = DCQualified Module Ident | DCUnqualified Ident deriving (Eq, Show, Read, Data, Typeable) newtype Module = Module [String] deriving (Eq, Show, Read, Data, Typeable) spaceTabs :: Parser String spaceTabs = many $ oneOf " \t" -- | When using conditional classes, it will often be a single class, e.g.: -- -- > <div :isHome:.homepage> -- -- If isHome is False, we do not want any class attribute to be present. -- However, due to combining multiple classes together, the most obvious -- implementation would produce a class="". The purpose of this function is to -- work around that. It does so by checking if all the classes on this tag are -- optional. If so, it will only include the class attribute if at least one -- conditional is true. testIncludeClazzes :: [(Maybe Deref, [Content])] -> Maybe Deref testIncludeClazzes cs | any (isNothing . fst) cs = Nothing | otherwise = Just $ DerefBranch (DerefIdent specialOrIdent) $ DerefList $ mapMaybe fst cs -- | This funny hack is to allow us to refer to the 'or' function without -- requiring the user to have it in scope. See how this function is used in -- Text.Hamlet. specialOrIdent :: Ident specialOrIdent = Ident "__or__hamlet__special"
psibi/shakespeare
Text/Hamlet/Parse.hs
Haskell
mit
25,913
{-# LANGUAGE CPP #-} module GHCJS.DOM.HTMLOptGroupElement ( #if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT) module GHCJS.DOM.JSFFI.Generated.HTMLOptGroupElement #else module Graphics.UI.Gtk.WebKit.DOM.HTMLOptGroupElement #endif ) where #if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT) import GHCJS.DOM.JSFFI.Generated.HTMLOptGroupElement #else import Graphics.UI.Gtk.WebKit.DOM.HTMLOptGroupElement #endif
plow-technologies/ghcjs-dom
src/GHCJS/DOM/HTMLOptGroupElement.hs
Haskell
mit
480
data Tree a = Empty | Node a (Tree a) (Tree a) deriving Show treeMap :: Tree a -> (a -> b) -> Tree b treeMap Empty _ = Empty treeMap (Node x l r) f = Node (f x) (treeMap l f) (treeMap r f) clone :: Num a => Tree a -> a -> a -> Tree a clone t x y = Node x increased increased where increased = treeMap t (+ y) cloningTrees :: Num t => [Tree t] cloningTrees = Node 1 Empty Empty : map (\ t@(Node x _ _) -> clone t x 1) cloningTrees
fmi-lab/fp-elective-2017
exams/02/variant-b/task-2.hs
Haskell
mit
437
{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE NoMonomorphismRestriction #-} module Main where import Asm as A import AsmOptimize import Assembler import Compiler import Memory import Parser import Pretty ( pretty_ ) import PrettyAsm ( pretty ) import Data.Attoparsec.ByteString ( parseOnly ) import qualified Data.ByteString.Lazy as BS import qualified Data.ByteString.Char8 as C8 import qualified Data.Text.Lazy.IO as TIO import Foreign import System.Environment ( getArgs ) import System.Exit ( exitFailure ) assembleIO asm = case assemble asm of Left e -> do putStr "Assembler error: " case e of InvalidOpcode -> putStrLn "invalid opcode" UnsupportedOpcode _ -> putStrLn "unsupported opcode" UnassignedLabel -> putStrLn "unassigned label" LabelError _ -> putStrLn "label lookup failed" exitFailure Right bs -> do return bs main :: IO () main = do arg1:_ <- getArgs file <- C8.readFile arg1 bf <- case parseOnly brainfuck file of Left e -> do putStrLn "Parse error: " putStrLn e exitFailure Right x -> return x let prettyBf = pretty_ bf writeFile "out.bf" prettyBf let asm = asmFunction $ do mov rax rdi compileFuck bf let optimizedAsm = optimizeAsm asm TIO.writeFile "opt.asm" (pretty optimizedAsm) TIO.writeFile "out.asm" (pretty asm) code <- assembleIO asm optimizedCode <- assembleIO optimizedAsm let b = BS.toStrict code let bOpt = BS.toStrict optimizedCode let nb = C8.length b let nbOpt = C8.length bOpt putStrLn $ "Assembled " ++ show nb ++ " bytes (unoptimized)." putStrLn $ "Assembled " ++ show nbOpt ++ " bytes (optimized)." putStrLn $ show (fromIntegral (nb - nbOpt) / fromIntegral nb * 100) ++ "% improvement" f <- byteStringFunction bOpt mem <- callocBytes 4096 () <- f mem free mem
djeik/fuckdown2
src/Main.hs
Haskell
mit
2,041
{-| Module : Main Description : Parses command line and dispatches to correct backend Copyright : (c) Rodrigo Setti, 2017 License : MIT Maintainer : rodrigosetti@gmail.com Stability : experimental Portability : POSIX -} {-# LANGUAGE UnicodeSyntax #-} module Main (main) where import Data.List (intercalate) import qualified Data.List.NonEmpty as NE import Data.Maybe (catMaybes, fromMaybe) import Data.Semigroup ((<>)) import qualified Data.Text.IO as TIO import MasterPlan.Backend.Graph import MasterPlan.Data import qualified MasterPlan.Parser as P import Options.Applicative import System.Exit (die) import System.IO (stdin) -- |Type output from the command line parser data Opts = Opts { inputPath :: Maybe FilePath , outputPath :: Maybe FilePath , rootKey :: ProjectKey -- ^ name of the root project , projFilter :: ProjFilter -- ^ filter to consider , renderParsingError :: Bool -- ^ will render the parsing error instead of printing , parseStrict :: Bool -- ^ every project has to be defined , renderOptions :: RenderOptions } type ProjFilter = ProjectExpr → Bool noFilter ∷ ProjFilter noFilter = const True readEnum ∷ [(String, a)] → ReadM a readEnum mapping = maybeReader $ flip lookup mapping -- |The command line parser cmdParser ∷ Parser Opts cmdParser = Opts <$> optional (strArgument ( help "plan file to read from (default from stdin)" <> metavar "FILENAME" )) <*> optional (strOption ( long "output" <> short 'o' <> help "output file name (.png, .tif, .bmp, .jpg and .pdf supported)" <> metavar "FILENAME" )) <*> strOption ( long "root" <> short 'r' <> help "name of the root project definition" <> value "root" <> showDefault <> metavar "NAME") <*> (filterParser <|> pure noFilter) <*> switch ( long "render-parse-error" <> help "instead of printing parsing errors, render as an image") <*> switch ( long "strict" <> help "strict parsing: every project has to be defined") <*> renderOptionsParser where renderOptionsParser ∷ Parser RenderOptions renderOptionsParser = RenderOptions <$> switch ( long "color" <> short 'c' <> help "color each project by progress") <*> option auto ( long "width" <> short 'w' <> help "width of the output image" <> value (-1) <> metavar "NUMBER") <*> option auto ( long "height" <> help "height of the output image" <> value (-1) <> metavar "NUMBER") <*> (invertProps <$> many (option property ( long "hide" <> help "hide a particular property" <> metavar (intercalate "|" $ map fst propertyNames)))) propertyNames = map (\p -> (show p, p)) [minBound :: ProjAttribute ..] property = readEnum propertyNames invertProps ∷ [ProjAttribute] → [ProjAttribute] invertProps l = filter (`notElem` l) $ map snd propertyNames filterParser ∷ Parser ProjFilter filterParser = (mkProgressFilter . Progress) <$> option auto ( long "progress-below" <> help "only display projects which progress is < N%" <> metavar "N" ) where mkProgressFilter n p = progress p * 100 < n main ∷ IO () main = masterPlan =<< execParser opts where opts = info (cmdParser <**> helper) ( fullDesc <> progDesc "See documentation on how to write project plan files" <> header "master-plan - project management tool for hackers" ) filterProj ∷ ProjFilter -> ProjectExpr → Maybe ProjectExpr filterProj f p@(Sum r ps) = filterHelper p f ps (Sum r) filterProj f p@(Product r ps) = filterHelper p f ps (Product r) filterProj f p@(Sequence r ps) = filterHelper p f ps (Sequence r) filterProj f p = if f p then Just p else Nothing filterHelper :: ProjectExpr -> ProjFilter -> NE.NonEmpty ProjectExpr -> (NE.NonEmpty ProjectExpr -> ProjectExpr) -> Maybe ProjectExpr filterHelper p f ps c = if f p then c <$> filterProjs ps else Nothing where filterProjs ps' = NE.nonEmpty (catMaybes $ NE.toList $ filterProj f <$> ps') masterPlan ∷ Opts → IO () masterPlan opts = do contents <- maybe (TIO.hGetContents stdin) TIO.readFile $ inputPath opts let outfile = fromMaybe (fromMaybe "output" (outputPath opts) ++ ".pdf") $ outputPath opts case P.runParser (parseStrict opts) (fromMaybe "stdin" $ inputPath opts) contents (rootKey opts) of Left e -> if renderParsingError opts then renderText outfile (renderOptions opts) (lines e) else die e Right p -> do let p' = fromMaybe defaultAtomic $ prioritize <$> filterProj (projFilter opts) p render outfile (renderOptions opts) p'
rodrigosetti/master-plan
app/Main.hs
Haskell
mit
6,328
module Melchior.EventSources.Mouse where import Control.Applicative import Melchior.Control import Melchior.Data.String import Melchior.Dom import Melchior.Dom.Events position :: Element -> Signal (Int, Int) position el = (\x -> coords x) <$> s where s = createEventedSignal (Of MouseMove) el (MouseEvt MouseMove) click :: Element -> Signal MouseEvent click e = createEventedSignal (Of ClickEvt) e (MouseEvt ClickEvt) foreign import js "Events.coordinates(%1)" coords :: MouseEvent -> (Int, Int)
kjgorman/melchior
Melchior/EventSources/Mouse.hs
Haskell
mit
515
{-# LANGUAGE DataKinds #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE KindSignatures #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE Rank2Types #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE UndecidableInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE FlexibleInstances #-} module Data.Type.Nat ( Nat(..) , type (+) , Value(..) ) where import Data.Proxy data Nat = Z | S Nat type family (n :: Nat) + (m :: Nat) :: Nat type instance n + Z = n type instance n + (S m) = (S n) + m -- Links the type- and value-level class Value (n :: Nat) where value :: Proxy n -> Nat instance Value Z where value _ = Z instance Value n => Value (S n) where value _ = S $ value (Proxy :: Proxy n)
nickspinale/lambda-calculi
src/Data/Type/Nat.hs
Haskell
mit
830
{-# LANGUAGE OverloadedStrings #-} module EDDA.Schema.ShipyardV1 where import EDDA.Types import EDDA.Schema.Util import Data.Aeson import Data.Aeson.Types import qualified Data.Text as T import qualified Data.Text.Encoding as TE import qualified Data.HashMap.Strict as HM import qualified Data.HashSet as HS getShips :: Value -> ConfigT (Maybe [Str]) getShips v = return $ getStrArray v "ships" parseShipyard :: Value -> ConfigT (Maybe MessageInfo) parseShipyard v = do ships <- getShips v return $ do systemName <- getStr v "systemName" stationName <- getStr v "stationName" timestamp <- getTimestamp v "timestamp" s <- ships Just $ ShipyardInfo { shipyardInfoSystemName = systemName, shipyardInfoStationName = stationName, shipyardInfoTimestamp = timestamp, shipyardInfoShips = HS.fromList s }
troydm/edda
src/EDDA/Schema/ShipyardV1.hs
Haskell
mit
1,160
{-# Language TupleSections, ViewPatterns #-} {- | Module : Language.Egison.Core Copyright : Satoshi Egi Licence : MIT This module provides functions to evaluate various objects. -} module Language.Egison.Core ( -- * Egison code evaluation evalTopExprs , evalTopExprsTestOnly , evalTopExprsNoIO , evalTopExpr , evalExpr , evalExprDeep , evalRef , evalRefDeep , evalWHNF , applyFunc -- * Environment , recursiveBind -- * Pattern matching , patternMatch -- * Collection , isEmptyCollection , unconsCollection , unsnocCollection -- * Utiltiy functions , packStringValue ) where import Prelude hiding (mapM, mappend) import Control.Arrow import Control.Applicative import Control.Monad.Error hiding (mapM) import Control.Monad.State hiding (mapM, state) import Control.Monad.Trans.Maybe import Data.Sequence (Seq, ViewL(..), ViewR(..), (><)) import qualified Data.Sequence as Sq import Data.Ratio import Data.Foldable (toList) import Data.Traversable (mapM) import Data.IORef import Data.Maybe import Data.Array ((!)) import qualified Data.Array as Array import qualified Data.HashMap.Lazy as HL import Data.Text (Text) import qualified Data.Text as T import Language.Egison.Types import Language.Egison.Parser -- -- Evaluator -- evalTopExprs :: Env -> [EgisonTopExpr] -> EgisonM Env evalTopExprs env exprs = do (bindings, rest) <- collectDefs exprs [] [] env <- recursiveBind env bindings forM_ rest $ evalTopExpr env return env where collectDefs (expr:exprs) bindings rest = case expr of Define name expr -> collectDefs exprs ((name, expr) : bindings) rest Load file -> do exprs' <- loadLibraryFile file collectDefs (exprs' ++ exprs) bindings rest LoadFile file -> do exprs' <- loadFile file collectDefs (exprs' ++ exprs) bindings rest Execute _ -> collectDefs exprs bindings (expr : rest) _ -> collectDefs exprs bindings rest collectDefs [] bindings rest = return (bindings, reverse rest) evalTopExprsTestOnly :: Env -> [EgisonTopExpr] -> EgisonM Env evalTopExprsTestOnly env exprs = do (bindings, rest) <- collectDefs exprs [] [] env <- recursiveBind env bindings forM_ rest $ evalTopExpr env return env where collectDefs (expr:exprs) bindings rest = case expr of Define name expr -> collectDefs exprs ((name, expr) : bindings) rest Load file -> do exprs' <- loadLibraryFile file collectDefs (exprs' ++ exprs) bindings rest LoadFile file -> do exprs' <- loadFile file collectDefs (exprs' ++ exprs) bindings rest Test _ -> collectDefs exprs bindings (expr : rest) _ -> collectDefs exprs bindings rest collectDefs [] bindings rest = return (bindings, reverse rest) evalTopExprsNoIO :: Env -> [EgisonTopExpr] -> EgisonM Env evalTopExprsNoIO env exprs = do (bindings, rest) <- collectDefs exprs [] [] env <- recursiveBind env bindings forM_ rest $ evalTopExpr env return env where collectDefs (expr:exprs) bindings rest = case expr of Define name expr -> collectDefs exprs ((name, expr) : bindings) rest Load _ -> throwError $ strMsg "No IO support" LoadFile _ -> throwError $ strMsg "No IO support" _ -> collectDefs exprs bindings (expr : rest) collectDefs [] bindings rest = return (bindings, reverse rest) evalTopExpr :: Env -> EgisonTopExpr -> EgisonM Env evalTopExpr env topExpr = do ret <- evalTopExpr' env topExpr case fst ret of Nothing -> return () Just output -> liftIO $ putStrLn output return $ snd ret evalTopExpr' :: Env -> EgisonTopExpr -> EgisonM (Maybe String, Env) evalTopExpr' env (Define name expr) = recursiveBind env [(name, expr)] >>= return . ((,) Nothing) evalTopExpr' env (Test expr) = do val <- evalExprDeep env expr return (Just (show val), env) evalTopExpr' env (Execute expr) = do io <- evalExpr env expr case io of Value (IOFunc m) -> m >> return (Nothing, env) _ -> throwError $ TypeMismatch "io" io evalTopExpr' env (Load file) = loadLibraryFile file >>= evalTopExprs env >>= return . ((,) Nothing) evalTopExpr' env (LoadFile file) = loadFile file >>= evalTopExprs env >>= return . ((,) Nothing) evalExpr :: Env -> EgisonExpr -> EgisonM WHNFData evalExpr _ (CharExpr c) = return . Value $ Char c evalExpr _ (StringExpr s) = return $ Value $ toEgison s evalExpr _ (BoolExpr b) = return . Value $ Bool b evalExpr _ (NumberExpr x y) = return . Value $ reduceFraction (Number x y) evalExpr _ (FloatExpr x y) = return . Value $ Float x y evalExpr env (VarExpr name) = refVar env name >>= evalRef evalExpr _ (InductiveDataExpr name []) = return . Value $ InductiveData name [] evalExpr env (InductiveDataExpr name exprs) = Intermediate . IInductiveData name <$> mapM (newObjectRef env) exprs evalExpr _ (TupleExpr []) = return . Value $ Tuple [] evalExpr env (TupleExpr [expr]) = evalExpr env expr evalExpr env (TupleExpr exprs) = Intermediate . ITuple <$> mapM (newObjectRef env) exprs evalExpr _ (CollectionExpr []) = return . Value $ Collection Sq.empty evalExpr env (CollectionExpr inners) = do inners' <- mapM fromInnerExpr inners innersSeq <- liftIO $ newIORef $ Sq.fromList inners' return $ Intermediate $ ICollection innersSeq where fromInnerExpr :: InnerExpr -> EgisonM Inner fromInnerExpr (ElementExpr expr) = IElement <$> newObjectRef env expr fromInnerExpr (SubCollectionExpr expr) = ISubCollection <$> newObjectRef env expr evalExpr env (ArrayExpr exprs) = do refs' <- mapM (newObjectRef env) exprs return . Intermediate . IArray $ Array.listArray (1, toInteger (length exprs)) refs' evalExpr env (HashExpr assocs) = do let (keyExprs, exprs) = unzip assocs keyWhnfs <- mapM (evalExpr env) keyExprs keys <- mapM makeHashKey keyWhnfs refs <- mapM (newObjectRef env) exprs case keys of [] -> do let keys' = map (\key -> case key of IntKey i -> i) keys return . Intermediate . IIntHash $ HL.fromList $ zip keys' refs _ -> case head keys of IntKey _ -> do let keys' = map (\key -> case key of IntKey i -> i) keys return . Intermediate . IIntHash $ HL.fromList $ zip keys' refs CharKey _ -> do let keys' = map (\key -> case key of CharKey c -> c) keys return . Intermediate . ICharHash $ HL.fromList $ zip keys' refs StrKey _ -> do let keys' = map (\key -> case key of StrKey s -> s) keys return . Intermediate . IStrHash $ HL.fromList $ zip keys' refs where makeHashKey :: WHNFData -> EgisonM EgisonHashKey makeHashKey (Value val) = case val of Number _ _ -> fromEgison val >>= (return . IntKey) Char c -> return (CharKey c) String str -> return (StrKey str) _ -> throwError $ TypeMismatch "integer or string" $ Value val makeHashKey whnf = throwError $ TypeMismatch "integer or string" $ whnf evalExpr env (IndexedExpr expr indices) = do array <- evalExpr env expr indices' <- mapM (evalExprDeep env) indices refArray array indices' evalExpr env (LambdaExpr names expr) = return . Value $ Func env names expr evalExpr env (PatternFunctionExpr names pattern) = return . Value $ PatternFunc env names pattern evalExpr env (IfExpr test expr expr') = do test <- evalExpr env test >>= fromWHNF evalExpr env $ if test then expr else expr' evalExpr env (LetExpr bindings expr) = mapM extractBindings bindings >>= flip evalExpr expr . extendEnv env . concat where extractBindings :: BindingExpr -> EgisonM [Binding] extractBindings ([name], expr) = makeBindings [name] . (:[]) <$> newObjectRef env expr extractBindings (names, expr) = makeBindings names <$> (evalExpr env expr >>= fromTuple) evalExpr env (LetRecExpr bindings expr) = let bindings' = evalState (concat <$> mapM extractBindings bindings) 0 in recursiveBind env bindings' >>= flip evalExpr expr where extractBindings :: BindingExpr -> State Int [(String, EgisonExpr)] extractBindings ([name], expr) = return [(name, expr)] extractBindings (names, expr) = do var <- genVar let k = length names target = VarExpr var matcher = TupleExpr $ replicate k SomethingExpr nth n = let pattern = TuplePat $ flip map [1..k] $ \i -> if i == n then PatVar "#_" else WildCard in MatchExpr target matcher [(pattern, VarExpr "#_")] return ((var, expr) : map (second nth) (zip names [1..])) genVar :: State Int String genVar = modify (1+) >> gets (('#':) . show) evalExpr env (DoExpr bindings expr) = return $ Value $ IOFunc $ do let body = foldr genLet (ApplyExpr expr $ TupleExpr [VarExpr "#1"]) bindings applyFunc (Value $ Func env ["#1"] body) $ Value World where genLet (names, expr) expr' = LetExpr [(["#1", "#2"], ApplyExpr expr $ TupleExpr [VarExpr "#1"])] $ LetExpr [(names, VarExpr "#2")] expr' evalExpr env (IoExpr expr) = do io <- evalExpr env expr case io of Value (IOFunc m) -> do val <- m >>= evalWHNF case val of Tuple [_, val'] -> return $ Value val' _ -> throwError $ TypeMismatch "io" io evalExpr env (MatchAllExpr target matcher (pattern, expr)) = do target <- newObjectRef env target matcher <- evalExpr env matcher >>= evalMatcherWHNF result <- patternMatch env pattern target matcher mmap (flip evalExpr expr . extendEnv env) result >>= fromMList where fromMList :: MList EgisonM WHNFData -> EgisonM WHNFData fromMList MNil = return . Value $ Collection Sq.empty fromMList (MCons val m) = do head <- IElement <$> newEvalutedObjectRef val tail <- ISubCollection <$> (liftIO . newIORef . Thunk $ m >>= fromMList) seqRef <- liftIO . newIORef $ Sq.fromList [head, tail] return . Intermediate $ ICollection $ seqRef evalExpr env (MatchExpr target matcher clauses) = do target <- newObjectRef env target matcher <- evalExpr env matcher >>= evalMatcherWHNF let tryMatchClause (pattern, expr) cont = do result <- patternMatch env pattern target matcher case result of MCons bindings _ -> evalExpr (extendEnv env bindings) expr MNil -> cont foldr tryMatchClause (throwError $ strMsg "failed pattern match") clauses evalExpr env (SeqExpr expr1 expr2) = do evalExprDeep env expr1 evalExpr env expr2 evalExpr env (ApplyExpr func arg) = do func <- evalExpr env func arg <- evalExpr env arg case func of Value (MemoizedFunc ref hashRef env names body) -> do indices <- evalWHNF arg indices' <- mapM fromEgison $ fromTupleValue indices hash <- liftIO $ readIORef hashRef case HL.lookup indices' hash of Just objRef -> do evalRef objRef Nothing -> do whnf <- applyFunc (Value (Func env names body)) arg retRef <- newEvalutedObjectRef whnf hash <- liftIO $ readIORef hashRef liftIO $ writeIORef hashRef (HL.insert indices' retRef hash) writeObjectRef ref (Value (MemoizedFunc ref hashRef env names body)) return whnf _ -> applyFunc func arg evalExpr env (MemoizeExpr memoizeFrame expr) = do mapM (\(x, y, z) -> do x' <- evalExprDeep env x case x' of (MemoizedFunc ref hashRef env' names body) -> do indices <- evalExprDeep env y indices' <- mapM fromEgison $ fromTupleValue indices hash <- liftIO $ readIORef hashRef ret <- evalExprDeep env z retRef <- newEvalutedObjectRef (Value ret) liftIO $ writeIORef hashRef (HL.insert indices' retRef hash) writeObjectRef ref (Value (MemoizedFunc ref hashRef env' names body)) _ -> throwError $ TypeMismatch "memoized-function" (Value x')) memoizeFrame evalExpr env expr evalExpr env (MatcherBFSExpr info) = return $ Value $ UserMatcher env BFSMode info evalExpr env (MatcherDFSExpr info) = return $ Value $ UserMatcher env DFSMode info evalExpr env (GenerateArrayExpr (name:[]) (TupleExpr (sizeExpr:[])) expr) = generateArray env name sizeExpr expr evalExpr env (GenerateArrayExpr (name:xs) (TupleExpr (sizeExpr:ys)) expr) = generateArray env name sizeExpr (GenerateArrayExpr xs (TupleExpr ys) expr) evalExpr env (GenerateArrayExpr names size expr) = evalExpr env (GenerateArrayExpr names (TupleExpr [size]) expr) evalExpr env (ArraySizeExpr expr) = evalExpr env expr >>= arraySize where arraySize :: WHNFData -> EgisonM WHNFData arraySize (Intermediate (IArray arr)) = return . Value . toEgison $ (snd (Array.bounds arr)) % 1 arraySize (Value (Array arr)) = return . Value . toEgison $ (snd (Array.bounds arr)) % 1 arraySize val = throwError $ TypeMismatch "array" val evalExpr _ SomethingExpr = return $ Value Something evalExpr _ UndefinedExpr = return $ Value Undefined evalExpr _ expr = throwError $ NotImplemented ("evalExpr for " ++ show expr) evalExprDeep :: Env -> EgisonExpr -> EgisonM EgisonValue evalExprDeep env expr = evalExpr env expr >>= evalWHNF evalRef :: ObjectRef -> EgisonM WHNFData evalRef ref = do obj <- liftIO $ readIORef ref case obj of WHNF val -> return val Thunk thunk -> do val <- thunk writeObjectRef ref val return val evalRefDeep :: ObjectRef -> EgisonM EgisonValue evalRefDeep ref = do obj <- liftIO $ readIORef ref case obj of WHNF (Value val) -> return val WHNF val -> do val <- evalWHNF val writeObjectRef ref $ Value val return val Thunk thunk -> do val <- thunk >>= evalWHNF writeObjectRef ref $ Value val return val evalWHNF :: WHNFData -> EgisonM EgisonValue evalWHNF (Value val) = return val evalWHNF (Intermediate (IInductiveData name refs)) = InductiveData name <$> mapM evalRefDeep refs evalWHNF (Intermediate (IArray refs)) = do refs' <- mapM evalRefDeep $ Array.elems refs return $ Array $ Array.listArray (Array.bounds refs) refs' evalWHNF (Intermediate (IIntHash refs)) = do refs' <- mapM evalRefDeep refs return $ IntHash refs' evalWHNF (Intermediate (ICharHash refs)) = do refs' <- mapM evalRefDeep refs return $ CharHash refs' evalWHNF (Intermediate (IStrHash refs)) = do refs' <- mapM evalRefDeep refs return $ StrHash refs' evalWHNF (Intermediate (ITuple [ref])) = evalRefDeep ref evalWHNF (Intermediate (ITuple refs)) = Tuple <$> mapM evalRefDeep refs evalWHNF coll = Collection <$> (fromCollection coll >>= fromMList >>= mapM evalRefDeep . Sq.fromList) applyFunc :: WHNFData -> WHNFData -> EgisonM WHNFData applyFunc (Value (Func env [name] body)) arg = do ref <- newEvalutedObjectRef arg evalExpr (extendEnv env $ makeBindings [name] [ref]) body applyFunc (Value (Func env names body)) arg = do refs <- fromTuple arg if length names == length refs then evalExpr (extendEnv env $ makeBindings names refs) body else throwError $ ArgumentsNumWithNames names (length names) (length refs) applyFunc (Value (PrimitiveFunc func)) arg = func arg applyFunc (Value (IOFunc m)) arg = do case arg of Value World -> m _ -> throwError $ TypeMismatch "world" arg applyFunc val _ = throwError $ TypeMismatch "function" val generateArray :: Env -> String -> EgisonExpr -> EgisonExpr -> EgisonM WHNFData generateArray env name sizeExpr expr = do size <- evalExpr env sizeExpr >>= fromWHNF >>= return . fromInteger elems <- mapM genElem (enumFromTo 1 size) return $ Intermediate $ IArray $ Array.listArray (1, size) elems where genElem :: Integer -> EgisonM ObjectRef genElem i = do env' <- bindEnv env name $ toInteger i newObjectRef env' expr bindEnv :: Env -> String -> Integer -> EgisonM Env bindEnv env name i = do ref <- newEvalutedObjectRef (Value (Number (i,0) (1,0))) return $ extendEnv env [(name, ref)] refArray :: WHNFData -> [EgisonValue] -> EgisonM WHNFData refArray val [] = return val refArray (Value (Array array)) (index:indices) = do i <- (liftM fromInteger . fromEgison) index if (\(a,b) -> if a <= i && i <= b then True else False) $ Array.bounds array then refArray (Value (array ! i)) indices else return $ Value Undefined refArray (Intermediate (IArray array)) (index:indices) = do i <- (liftM fromInteger . fromEgison) index if (\(a,b) -> if a <= i && i <= b then True else False) $ Array.bounds array then let ref = array ! i in evalRef ref >>= flip refArray indices else return $ Value Undefined refArray (Value (IntHash hash)) (index:indices) = do key <- fromEgison index case HL.lookup key hash of Just val -> refArray (Value val) indices Nothing -> return $ Value Undefined refArray (Intermediate (IIntHash hash)) (index:indices) = do key <- fromEgison index case HL.lookup key hash of Just ref -> evalRef ref >>= flip refArray indices Nothing -> return $ Value Undefined refArray (Value (CharHash hash)) (index:indices) = do key <- fromEgison index case HL.lookup key hash of Just val -> refArray (Value val) indices Nothing -> return $ Value Undefined refArray (Intermediate (ICharHash hash)) (index:indices) = do key <- fromEgison index case HL.lookup key hash of Just ref -> evalRef ref >>= flip refArray indices Nothing -> return $ Value Undefined refArray (Value (StrHash hash)) (index:indices) = do key <- fromEgison index case HL.lookup key hash of Just val -> refArray (Value val) indices Nothing -> return $ Value Undefined refArray (Intermediate (IStrHash hash)) (index:indices) = do key <- fromEgison index case HL.lookup key hash of Just ref -> evalRef ref >>= flip refArray indices Nothing -> return $ Value Undefined refArray val _ = throwError $ TypeMismatch "array or hash" val newThunk :: Env -> EgisonExpr -> Object newThunk env expr = Thunk $ evalExpr env expr newObjectRef :: Env -> EgisonExpr -> EgisonM ObjectRef newObjectRef env expr = liftIO $ newIORef $ newThunk env expr writeObjectRef :: ObjectRef -> WHNFData -> EgisonM () writeObjectRef ref val = liftIO . writeIORef ref $ WHNF val newEvalutedObjectRef :: WHNFData -> EgisonM ObjectRef newEvalutedObjectRef = liftIO . newIORef . WHNF makeBindings :: [String] -> [ObjectRef] -> [Binding] makeBindings = zip recursiveBind :: Env -> [(String, EgisonExpr)] -> EgisonM Env recursiveBind env bindings = do let (names, exprs) = unzip bindings refs <- replicateM (length bindings) $ newObjectRef nullEnv UndefinedExpr let env' = extendEnv env $ makeBindings names refs zipWithM_ (\ref expr -> case expr of MemoizedLambdaExpr names body -> do hashRef <- liftIO $ newIORef HL.empty liftIO . writeIORef ref . WHNF . Value $ MemoizedFunc ref hashRef env' names body _ -> liftIO . writeIORef ref . Thunk $ evalExpr env' expr) refs exprs return env' -- -- Pattern Match -- patternMatch :: Env -> EgisonPattern -> ObjectRef -> Matcher -> EgisonM (MList EgisonM Match) patternMatch env pattern target matcher = processMStates [msingleton $ MState env [] [] [MAtom pattern target matcher]] processMStates :: [MList EgisonM MatchingState] -> EgisonM (MList EgisonM Match) processMStates [] = return MNil processMStates streams = do (matches, streams') <- mapM processMStates' streams >>= extractMatches . concat mappend (fromList matches) $ processMStates streams' processMStates' :: MList EgisonM MatchingState -> EgisonM [MList EgisonM MatchingState] processMStates' MNil = return [] processMStates' stream@(MCons state _) = case pmMode (getMatcher (topMAtom state)) of DFSMode -> processMStatesDFS stream BFSMode -> processMStatesBFS stream gatherBindings :: MatchingState -> Maybe [Binding] gatherBindings (MState _ _ bindings []) = return bindings gatherBindings (MState _ _ bindings trees) = isResolved trees >> return bindings where isResolved :: [MatchingTree] -> Maybe () isResolved [] = return () isResolved (MAtom _ _ _ : _) = Nothing isResolved (MNode _ state : rest) = gatherBindings state >> isResolved rest extractMatches :: [MList EgisonM MatchingState] -> EgisonM ([Match], [MList EgisonM MatchingState]) extractMatches = extractMatches' ([], []) where extractMatches' :: ([Match], [MList EgisonM MatchingState]) -> [MList EgisonM MatchingState] -> EgisonM ([Match], [MList EgisonM MatchingState]) extractMatches' (xs, ys) [] = return (xs, ys) extractMatches' (xs, ys) ((MCons (gatherBindings -> Just bindings) states):rest) = do states' <- states extractMatches' (xs ++ [bindings], ys ++ [states']) rest extractMatches' (xs, ys) (stream:rest) = extractMatches' (xs, ys ++ [stream]) rest processMStatesDFS :: MList EgisonM MatchingState -> EgisonM [(MList EgisonM MatchingState)] processMStatesDFS (MCons state stream) = do stream' <- processMState state newStream <- mappend stream' stream return [newStream] processMStatesBFS :: MList EgisonM MatchingState -> EgisonM [(MList EgisonM MatchingState)] processMStatesBFS (MCons state stream) = do newStream <- processMState state newStream' <- stream return [newStream, newStream'] topMAtom :: MatchingState -> MatchingTree topMAtom (MState _ _ _ (mAtom@(MAtom _ _ _):_)) = mAtom topMAtom (MState _ _ _ ((MNode _ mstate):_)) = topMAtom mstate getMatcher :: MatchingTree -> Matcher getMatcher (MAtom _ _ matcher) = matcher processMState :: MatchingState -> EgisonM (MList EgisonM MatchingState) processMState state = do if isNotPat state then do let (state1, state2) = splitMState state result <- processMStates [msingleton state1] case result of MNil -> return $ msingleton state2 _ -> return MNil else processMState' state where isNotPat :: MatchingState -> Bool isNotPat state = case topMAtom state of MAtom (NotPat _) _ _ -> True _ -> False splitMState :: MatchingState -> (MatchingState, MatchingState) splitMState (MState env loops bindings ((MAtom (NotPat pattern) target matcher) : trees)) = (MState env loops bindings [MAtom pattern target matcher], MState env loops bindings trees) splitMState (MState env loops bindings ((MNode penv state') : trees)) = let (state1, state2) = splitMState state' in (MState env loops bindings [MNode penv state1], MState env loops bindings (MNode penv state2 : trees)) processMState' :: MatchingState -> EgisonM (MList EgisonM MatchingState) processMState' (MState _ _ _ []) = throwError $ EgisonBug "should not reach here (empty matching-state)" processMState' (MState _ _ _ ((MNode _ (MState _ _ _ [])):_)) = throwError $ EgisonBug "should not reach here (empty matching-node)" processMState' (MState env loops bindings (MNode penv (MState env' loops' bindings' ((MAtom (VarPat name) target matcher):trees')):trees)) = do case lookup name penv of Just pattern -> case trees' of [] -> return $ msingleton $ MState env loops bindings ((MAtom pattern target matcher):trees) _ -> return $ msingleton $ MState env loops bindings ((MAtom pattern target matcher):(MNode penv (MState env' loops' bindings' trees')):trees) Nothing -> throwError $ UnboundVariable name processMState' (MState env loops bindings (MNode penv (MState env' loops' bindings' ((MAtom (IndexedPat (VarPat name) indices) target matcher):trees')):trees)) = do case lookup name penv of Just pattern -> do let env'' = extendEnvForNonLinearPatterns env' bindings loops' indices' <- mapM (evalExpr env'' >=> liftM fromInteger . fromWHNF) indices let pattern' = IndexedPat pattern $ map (\i -> NumberExpr (i,0) (1,0)) indices' case trees' of [] -> return $ msingleton $ MState env loops bindings ((MAtom pattern' target matcher):trees) _ -> return $ msingleton $ MState env loops bindings ((MAtom pattern' target matcher):(MNode penv (MState env' loops' bindings' trees')):trees) Nothing -> throwError $ UnboundVariable name processMState' (MState env loops bindings ((MNode penv state):trees)) = do processMState' state >>= mmap (\state' -> case state' of MState _ _ _ [] -> return $ MState env loops bindings trees _ -> (return . MState env loops bindings . (: trees) . MNode penv) state') processMState' (MState env loops bindings ((MAtom pattern target matcher):trees)) = do let env' = extendEnvForNonLinearPatterns env bindings loops case pattern of NotPat _ -> throwError $ EgisonBug "should not reach here (not pattern)" VarPat _ -> throwError $ strMsg "cannot use variable except in pattern function" LetPat bindings' pattern' -> let extractBindings ([name], expr) = makeBindings [name] . (:[]) <$> newObjectRef env' expr extractBindings (names, expr) = makeBindings names <$> (evalExpr env' expr >>= fromTuple) in liftM concat (mapM extractBindings bindings') >>= (\b -> return $ msingleton $ MState env loops (b ++ bindings) ((MAtom pattern' target matcher):trees)) PredPat predicate -> do func <- evalExpr env' predicate arg <- evalRef target result <- applyFunc func arg >>= fromWHNF if result then return $ msingleton $ (MState env loops bindings trees) else return MNil ApplyPat func args -> do func' <- evalExpr env' func case func' of Value (PatternFunc env'' names expr) -> let penv = zip names args in return $ msingleton $ MState env loops bindings (MNode penv (MState env'' [] [] [MAtom expr target matcher]) : trees) _ -> throwError $ TypeMismatch "pattern constructor" func' LoopPat name (LoopRange start ends endPat) pat pat' -> do startNum <- evalExpr env' start >>= fromWHNF startNumRef <- newEvalutedObjectRef $ Value $ Number ((startNum - 1),0) (1,0) ends' <- evalExpr env' ends if isPrimitiveValue ends' then do endsRef <- newEvalutedObjectRef ends' inners <- liftIO $ newIORef $ Sq.fromList [IElement endsRef] endsRef' <- liftIO $ newIORef (WHNF (Intermediate (ICollection inners))) return $ msingleton $ MState env ((LoopPatContext (name, startNumRef) endsRef' endPat pat pat'):loops) bindings ((MAtom ContPat target matcher):trees) else do endsRef <- newEvalutedObjectRef ends' return $ msingleton $ MState env ((LoopPatContext (name, startNumRef) endsRef endPat pat pat'):loops) bindings ((MAtom ContPat target matcher):trees) ContPat -> case loops of [] -> throwError $ strMsg "cannot use cont pattern except in loop pattern" LoopPatContext (name, startNumRef) endsRef endPat pat pat' : loops' -> do startNum <- evalRef startNumRef >>= fromWHNF nextNumRef <- newEvalutedObjectRef $ Value $ Number ((startNum + 1),0) (1,0) ends <- evalRef endsRef b <- isEmptyCollection ends if b then return MNil else do (carEndsRef, cdrEndsRef) <- fromJust <$> runMaybeT (unconsCollection ends) carEndsNum <- evalRef carEndsRef >>= fromWHNF if startNum > carEndsNum then return MNil else if startNum == carEndsNum then return $ fromList [MState env loops' bindings ((MAtom endPat startNumRef Something):(MAtom pat' target matcher):trees), MState env ((LoopPatContext (name, nextNumRef) cdrEndsRef endPat pat pat'):loops') bindings ((MAtom pat target matcher):trees)] else return $ fromList [MState env ((LoopPatContext (name, nextNumRef) endsRef endPat pat pat'):loops') bindings ((MAtom pat target matcher):trees)] AndPat patterns -> let trees' = map (\pat -> MAtom pat target matcher) patterns ++ trees in return $ msingleton $ MState env loops bindings trees' OrPat patterns -> return $ fromList $ flip map patterns $ \pat -> MState env loops bindings (MAtom pat target matcher : trees) _ -> case matcher of UserMatcher _ _ _ -> do (patterns, targetss, matchers) <- inductiveMatch env' pattern target matcher mfor targetss $ \ref -> do targets <- evalRef ref >>= fromTuple let trees' = zipWith3 MAtom patterns targets matchers ++ trees return $ MState env loops bindings trees' Tuple matchers -> do case pattern of ValuePat _ -> return $ msingleton $ MState env loops bindings ((MAtom pattern target Something):trees) WildCard -> return $ msingleton $ MState env loops bindings ((MAtom pattern target Something):trees) PatVar _ -> return $ msingleton $ MState env loops bindings ((MAtom pattern target Something):trees) IndexedPat _ _ -> return $ msingleton $ MState env loops bindings ((MAtom pattern target Something):trees) TuplePat patterns -> do targets <- evalRef target >>= fromTuple if not (length patterns == length targets) then throwError $ ArgumentsNum (length patterns) (length targets) else return () if not (length patterns == length matchers) then throwError $ ArgumentsNum (length patterns) (length matchers) else return () let trees' = zipWith3 MAtom patterns targets matchers ++ trees return $ msingleton $ MState env loops bindings trees' _ -> throwError $ strMsg $ "should not reach here. matcher: " ++ show matcher ++ ", pattern: " ++ show pattern Something -> case pattern of ValuePat valExpr -> do val <- evalExprDeep env' valExpr tgtVal <- evalRefDeep target if val == tgtVal then return $ msingleton $ MState env loops bindings trees else return MNil WildCard -> return $ msingleton $ MState env loops bindings trees PatVar name -> return $ msingleton $ MState env loops ((name, target):bindings) trees IndexedPat (PatVar name) indices -> do indices <- mapM (evalExpr env' >=> liftM fromInteger . fromWHNF) indices case lookup name bindings of Just ref -> do obj <- evalRef ref >>= updateHash indices >>= newEvalutedObjectRef return $ msingleton $ MState env loops (subst name obj bindings) trees Nothing -> do obj <- updateHash indices (Intermediate . IIntHash $ HL.empty) >>= newEvalutedObjectRef return $ msingleton $ MState env loops ((name,obj):bindings) trees where updateHash :: [Integer] -> WHNFData -> EgisonM WHNFData updateHash [index] (Intermediate (IIntHash hash)) = do return . Intermediate . IIntHash $ HL.insert index target hash updateHash (index:indices) (Intermediate (IIntHash hash)) = do val <- maybe (return $ Intermediate $ IIntHash HL.empty) evalRef $ HL.lookup index hash ref <- updateHash indices val >>= newEvalutedObjectRef return . Intermediate . IIntHash $ HL.insert index ref hash updateHash indices (Value (IntHash hash)) = do keys <- return $ HL.keys hash vals <- mapM (newEvalutedObjectRef . Value) $ HL.elems hash updateHash indices (Intermediate $ IIntHash $ HL.fromList $ zip keys vals) updateHash _ v = throwError $ strMsg $ "expected hash value: " ++ show v subst :: (Eq a) => a -> b -> [(a, b)] -> [(a, b)] subst k nv ((k', v'):xs) | k == k' = (k', nv):(subst k nv xs) | otherwise = (k', v'):(subst k nv xs) subst _ _ [] = [] IndexedPat pattern indices -> throwError $ strMsg ("invalid indexed-pattern: " ++ show pattern) TuplePat patterns -> do targets <- evalRef target >>= fromTuple if not (length patterns == length targets) then throwError $ ArgumentsNum (length patterns) (length targets) else return () let trees' = zipWith3 MAtom patterns targets (take (length patterns) (repeat Something)) ++ trees return $ msingleton $ MState env loops bindings trees' _ -> throwError $ strMsg "something can only match with a pattern variable" _ -> throwError $ EgisonBug $ "should not reach here. matcher: " ++ show matcher ++ ", pattern: " ++ show pattern inductiveMatch :: Env -> EgisonPattern -> ObjectRef -> Matcher -> EgisonM ([EgisonPattern], MList EgisonM ObjectRef, [Matcher]) inductiveMatch env pattern target (UserMatcher matcherEnv _ clauses) = do foldr tryPPMatchClause failPPPatternMatch clauses where tryPPMatchClause (pat, matchers, clauses) cont = do result <- runMaybeT $ primitivePatPatternMatch env pat pattern case result of Just (patterns, bindings) -> do targetss <- foldr tryPDMatchClause failPDPatternMatch clauses matchers <- evalExpr matcherEnv matchers >>= evalMatcherWHNF >>= (return . fromTupleValue) return (patterns, targetss, matchers) where tryPDMatchClause (pat, expr) cont = do result <- runMaybeT $ primitiveDataPatternMatch pat target case result of Just bindings' -> do let env = extendEnv matcherEnv $ bindings ++ bindings' evalExpr env expr >>= fromCollection _ -> cont _ -> cont failPPPatternMatch = throwError $ strMsg "failed primitive pattern pattern match" failPDPatternMatch = throwError $ strMsg "failed primitive data pattern match" primitivePatPatternMatch :: Env -> PrimitivePatPattern -> EgisonPattern -> MatchM ([EgisonPattern], [Binding]) primitivePatPatternMatch _ PPWildCard _ = return ([], []) primitivePatPatternMatch _ PPPatVar pattern = return ([pattern], []) primitivePatPatternMatch env (PPValuePat name) (ValuePat expr) = do ref <- lift $ newObjectRef env expr return ([], [(name, ref)]) primitivePatPatternMatch env (PPInductivePat name patterns) (InductivePat name' exprs) | name == name' = (concat *** concat) . unzip <$> zipWithM (primitivePatPatternMatch env) patterns exprs | otherwise = matchFail primitivePatPatternMatch _ _ _ = matchFail primitiveDataPatternMatch :: PrimitiveDataPattern -> ObjectRef -> MatchM [Binding] primitiveDataPatternMatch PDWildCard _ = return [] primitiveDataPatternMatch (PDPatVar name) ref = return [(name, ref)] primitiveDataPatternMatch (PDInductivePat name patterns) ref = do whnf <- lift $ evalRef ref case whnf of Intermediate (IInductiveData name' refs) | name == name' -> concat <$> zipWithM primitiveDataPatternMatch patterns refs Value (InductiveData name' vals) | name == name' -> do refs <- lift $ mapM (newEvalutedObjectRef . Value) vals concat <$> zipWithM primitiveDataPatternMatch patterns refs _ -> matchFail primitiveDataPatternMatch PDEmptyPat ref = do whnf <- lift $ evalRef ref isEmpty <- lift $ isEmptyCollection whnf if isEmpty then return [] else matchFail primitiveDataPatternMatch (PDConsPat pattern pattern') ref = do whnf <- lift $ evalRef ref (head, tail) <- unconsCollection whnf (++) <$> primitiveDataPatternMatch pattern head <*> primitiveDataPatternMatch pattern' tail primitiveDataPatternMatch (PDSnocPat pattern pattern') ref = do whnf <- lift $ evalRef ref (init, last) <- unsnocCollection whnf (++) <$> primitiveDataPatternMatch pattern init <*> primitiveDataPatternMatch pattern' last primitiveDataPatternMatch (PDConstantPat expr) ref = do target <- lift (evalRef ref) >>= either (const matchFail) return . extractPrimitiveValue isEqual <- lift $ (==) <$> evalExprDeep nullEnv expr <*> pure target if isEqual then return [] else matchFail expandCollection :: WHNFData -> EgisonM (Seq Inner) expandCollection (Value (Collection vals)) = mapM (liftM IElement . newEvalutedObjectRef . Value) vals expandCollection (Intermediate (ICollection innersRef)) = liftIO $ readIORef innersRef expandCollection val = throwError $ TypeMismatch "collection" val isEmptyCollection :: WHNFData -> EgisonM Bool isEmptyCollection (Value (Collection col)) = return $ Sq.null col isEmptyCollection coll@(Intermediate (ICollection innersRef)) = do inners <- liftIO $ readIORef innersRef case Sq.viewl inners of EmptyL -> return True (ISubCollection ref') :< tInners -> do hInners <- evalRef ref' >>= expandCollection liftIO $ writeIORef innersRef (hInners >< tInners) isEmptyCollection coll _ -> return False isEmptyCollection _ = return False unconsCollection :: WHNFData -> MatchM (ObjectRef, ObjectRef) unconsCollection (Value (Collection col)) = case Sq.viewl col of EmptyL -> matchFail val :< vals -> lift $ (,) <$> newEvalutedObjectRef (Value val) <*> newEvalutedObjectRef (Value $ Collection vals) unconsCollection coll@(Intermediate (ICollection innersRef)) = do inners <- liftIO $ readIORef innersRef case Sq.viewl inners of EmptyL -> matchFail (IElement ref') :< tInners -> do tInnersRef <- liftIO $ newIORef tInners lift $ (ref', ) <$> newEvalutedObjectRef (Intermediate $ ICollection tInnersRef) (ISubCollection ref') :< tInners -> do hInners <- lift $ evalRef ref' >>= expandCollection liftIO $ writeIORef innersRef (hInners >< tInners) unconsCollection coll unconsCollection _ = matchFail unsnocCollection :: WHNFData -> MatchM (ObjectRef, ObjectRef) unsnocCollection (Value (Collection col)) = case Sq.viewr col of EmptyR -> matchFail vals :> val -> lift $ (,) <$> newEvalutedObjectRef (Value $ Collection vals) <*> newEvalutedObjectRef (Value val) unsnocCollection coll@(Intermediate (ICollection innersRef)) = do inners <- liftIO $ readIORef innersRef case Sq.viewr inners of EmptyR -> matchFail hInners :> (IElement ref') -> do hInnersRef <- liftIO $ newIORef hInners lift $ (, ref') <$> newEvalutedObjectRef (Intermediate $ ICollection hInnersRef) hInners :> (ISubCollection ref') -> do tInners <- lift $ evalRef ref' >>= expandCollection liftIO $ writeIORef innersRef (hInners >< tInners) unsnocCollection coll unsnocCollection _ = matchFail extendEnvForNonLinearPatterns :: Env -> [Binding] -> [LoopPatContext] -> Env extendEnvForNonLinearPatterns env bindings loops = extendEnv env $ bindings ++ map (\(LoopPatContext binding _ _ _ _) -> binding) loops evalMatcherWHNF :: WHNFData -> EgisonM Matcher evalMatcherWHNF (Value matcher@Something) = return matcher evalMatcherWHNF (Value matcher@(UserMatcher _ _ _)) = return matcher evalMatcherWHNF (Value (Tuple ms)) = Tuple <$> mapM (evalMatcherWHNF . Value) ms evalMatcherWHNF (Intermediate (ITuple refs)) = do whnfs <- mapM evalRef refs ms <- mapM evalMatcherWHNF whnfs return $ Tuple ms evalMatcherWHNF whnf = throwError $ TypeMismatch "matcher" whnf -- -- Util -- fromTuple :: WHNFData -> EgisonM [ObjectRef] fromTuple (Intermediate (ITuple refs)) = return refs fromTuple (Value (Tuple vals)) = mapM (newEvalutedObjectRef . Value) vals fromTuple whnf = return <$> newEvalutedObjectRef whnf fromTupleValue :: EgisonValue -> [EgisonValue] fromTupleValue (Tuple vals) = vals fromTupleValue val = [val] fromCollection :: WHNFData -> EgisonM (MList EgisonM ObjectRef) fromCollection (Value (Collection vals)) = if Sq.null vals then return MNil else fromSeq <$> mapM (newEvalutedObjectRef . Value) vals fromCollection whnf@(Intermediate (ICollection _)) = do isEmpty <- isEmptyCollection whnf if isEmpty then return MNil else do (head, tail) <- fromJust <$> runMaybeT (unconsCollection whnf) tail' <- evalRef tail return $ MCons head (fromCollection tail') fromCollection whnf = throwError $ TypeMismatch "collection" whnf -- -- String -- packStringValue :: EgisonValue -> EgisonM Text packStringValue (Collection seq) = do let ls = toList seq str <- mapM (\val -> case val of Char c -> return c _ -> throwError $ TypeMismatch "char" (Value val)) ls return $ T.pack str packStringValue (Tuple [val]) = packStringValue val packStringValue val = throwError $ TypeMismatch "string" (Value val) -- -- Util -- data EgisonHashKey = IntKey Integer | CharKey Char | StrKey Text extractPrimitiveValue :: WHNFData -> Either EgisonError EgisonValue extractPrimitiveValue (Value val@(Char _)) = return val extractPrimitiveValue (Value val@(Bool _)) = return val extractPrimitiveValue (Value val@(Number _ _)) = return val extractPrimitiveValue (Value val@(Float _ _)) = return val extractPrimitiveValue whnf = throwError $ TypeMismatch "primitive value" whnf isPrimitiveValue :: WHNFData -> Bool isPrimitiveValue (Value (Char _)) = True isPrimitiveValue (Value (Bool _)) = True isPrimitiveValue (Value (Number _ _)) = True isPrimitiveValue (Value (Float _ _)) = True isPrimitiveValue _ = False
beni55/egison
hs-src/Language/Egison/Core.hs
Haskell
mit
41,566
{-# htermination intersectFM_C :: (b1 -> b2 -> b3) -> FiniteMap Float b1 -> FiniteMap Float b2 -> FiniteMap Float b3 #-} import FiniteMap
ComputationWithBoundedResources/ara-inference
doc/tpdb_trs/Haskell/full_haskell/FiniteMap_intersectFM_C_6.hs
Haskell
mit
138
--The sum of the squares of the first ten natural numbers is, --12 + 22 + ... + 102 = 385 --The square of the sum of the first ten natural numbers is, --(1 + 2 + ... + 10)2 = 552 = 3025 --Hence the difference between the sum of the squares of the first ten natural numbers and the square of the sum is 3025 − 385 = 2640. --Find the difference between the sum of the squares of the first one hundred natural numbers and the square of the sum sumOfSqs :: Int -> Int sumOfSqs n = quot (n * (n + 1) * ((2 * n) + 1)) 6 sqSums :: Int -> Int sqSums n = (quot (n * (n + 1)) 2) ^ 2 sqDiff :: Int -> Int sqDiff n = (sqSums n) - (sumOfSqs n)
sravan-s/euler
euler-0006/sumSquareDiff.hs
Haskell
mit
642
{- (**) Decode a run-length encoded list. Given a run-length code list generated as specified in problem 11. Construct its uncompressed version. Example in Haskell: P12> decodeModified [Multiple 4 'a',Single 'b',Multiple 2 'c', Multiple 2 'a',Single 'd',Multiple 4 'e'] "aaaabccaadeeee" -} data Item a = Multiple Int a | Single a deriving (Show) decodeModified :: [Item a] -> [a] decodeModified = concatMap expand where expand (Multiple n x) = replicate n x expand (Single x) = [x]
gaoce/haskell_99
12.hs
Haskell
mit
534
{-# LANGUAGE ScopedTypeVariables #-} module JSaddleHello ( main ) where import Data.Monoid ((<>)) import Control.Monad (forever) import Control.Monad.IO.Class (MonadIO(..)) import Control.Concurrent (forkIO) import Control.Concurrent.MVar (takeMVar, putMVar, newEmptyMVar) import Control.Lens ((^.)) import Language.Javascript.JSaddle (jsg, jsg3, js, js1, jss, fun, valToNumber, syncPoint, nextAnimationFrame, runJSM, askJSM, global) main = do doc <- jsg "document" doc ^. js "body" ^. jss "innerHTML" "<h1>Kia ora (Hi)</h1>" -- Create a haskell function call back for the onclick event doc ^. jss "onclick" (fun $ \ _ _ [e] -> do x <- e ^. js "clientX" >>= valToNumber y <- e ^. js "clientY" >>= valToNumber newParagraph <- doc ^. js1 "createElement" "p" newParagraph ^. js1 "appendChild" ( doc ^. js1 "createTextNode" ("Click " ++ show (x, y))) doc ^. js "body" ^. js1 "appendChild" newParagraph return ()) -- Make an exit button exitMVar <- liftIO newEmptyMVar exit <- doc ^. js1 "createElement" "span" exit ^. js1 "appendChild" ( doc ^. js1 "createTextNode" "Click here to exit") doc ^. js "body" ^. js1 "appendChild" exit exit ^. jss "onclick" (fun $ \ _ _ _ -> liftIO $ putMVar exitMVar ()) -- Force all all the lazy evaluation to be executed syncPoint -- Animate the color of the exit button ctx <- askJSM liftIO . forkIO . forever $ (`runJSM` ctx) . nextAnimationFrame $ \ t -> do let n = floor ((sin (3 * t) + 1) * 128) (h1, h2) = n `divMod` 16 hexDigits = ['0'..'9'] <> ['A'..'F'] exit ^. js "style" ^. jss "color" ("#0000" <> [hexDigits !! h1, hexDigits !! h2]) return () -- In GHC compiled version the WebSocket connection will end when this -- thread ends. So we will wait until the user clicks exit. liftIO $ takeMVar exitMVar doc ^. js "body" ^. jss "innerHTML" "<h1>Ka kite ano (See you later)</h1>" return ()
ghcjs/jsaddle-hello
src/JSaddleHello.hs
Haskell
mit
2,076
{-# LANGUAGE DeriveDataTypeable, TypeFamilies, TypeOperators, StandaloneDeriving, FlexibleContexts #-} -- Workaround {-# LANGUAGE CPP, UndecidableInstances #-} module Main where import Flow import Flow.Vector import Flow.Kernel import Flow.Builder import Control.Monad import Data.Typeable -- ---------------------------------------------------------------------------- -- --- Functional --- -- ---------------------------------------------------------------------------- -- Data tags data Tag -- ^ Initialisation (e.g. FFT plans) data Vis -- ^ Visibilities (File name to OSKAR / raw visibilities / binned ...) data UVGrid -- ^ UV grid data Image -- ^ Image data GCFs -- ^ A set of GCFs data CleanResult -- ^ Result of cleaning (e.g. model + residual) deriving instance Typeable Tag deriving instance Typeable Vis deriving instance Typeable UVGrid deriving instance Typeable Image deriving instance Typeable GCFs deriving instance Typeable CleanResult -- Abstract kernel signatures. -- -- TODO: The string we use here is somewhat important for keeping them -- apart - it would be more elegant if we could enforce them to be -- unique in some other way. createGrid :: Flow UVGrid createGrid = flow "create grid" grid :: Flow Vis -> Flow GCFs -> Flow UVGrid -> Flow UVGrid grid = flow "grid" degrid :: Flow UVGrid -> Flow GCFs -> Flow Vis -> Flow Vis degrid = flow "degrid" idft :: Flow UVGrid -> Flow Image idft = flow "idft" dft :: Flow Image -> Flow UVGrid dft = flow "dft" gcf :: Flow Vis -> Flow GCFs gcf = flow "gcf" initRes :: Flow Image initRes = flow "residual init" psfVis :: Flow Vis -> Flow Vis psfVis = flow "prepare vis for PSF" clean :: Flow Image -> Flow Image -> Flow CleanResult clean = flow "clean" cleanModel :: Flow CleanResult -> Flow Image cleanModel = flow "clean/model" cleanResidual :: Flow CleanResult -> Flow Image cleanResidual = flow "clean/residual" imageSum :: Flow Image -> Flow Image imageSum = flow "image sum" -- | Compound gridder actor gridder :: Flow Vis -> Flow GCFs -> Flow Image gridder vis gcfs = idft (grid vis gcfs createGrid) -- | Compound degridder actor degridder :: Flow Image -> Flow Vis -> Flow GCFs -> Flow Vis degridder img vis gcfs = degrid (dft img) gcfs vis -- | Compound PSF gridder actor psfGrid :: Flow Vis -> Flow GCFs -> Flow Image psfGrid vis gcfs = gridder (psfVis vis) gcfs -- | Compound cleaning actor cleaner :: Flow Image -> Flow Image -> (Flow Image, Flow Image) cleaner dirty psf = (cleanResidual result, cleanModel result) where result = clean dirty psf -- | Compound major loop iteration actor majorIter :: Flow GCFs -> Flow Image -> (Flow Image, Flow Vis) -> Int -> (Flow Image, Flow Vis) majorIter gcfs psf (_res, vis) _i = (res', vis') where img = gridder vis gcfs (res', mod') = cleaner img psf vis' = degridder mod' vis gcfs -- | Compound major loop actor majorLoop :: Int -> Flow Vis -> (Flow Image, Flow Vis) majorLoop n vis = foldl (majorIter gcfs psf) (initRes, vis) [1..n] where gcfs = gcf vis psf = psfGrid vis gcfs majorLoopSum :: Int -> Flow Vis -> Flow Image majorLoopSum n vis = imageSum $ fst $ majorLoop n vis -- ---------------------------------------------------------------------------- -- --- Kernels --- -- ---------------------------------------------------------------------------- data Config = Config { cfgInput :: [(FilePath, Int)] , cfgOutput :: FilePath , cfgMajorLoops :: Int , cfgGrid :: GridPar } data GridPar = GridPar -- Make data representations. Sadly, this can not be *quite* done with -- deriving yet (#8165). In the meantime, we use preprocessor #define DATAREPR_INSTANCE(NewRepr, Repr) \ instance DataRepr NewRepr where \ type ReprType NewRepr = ReprType (Repr); \ reprNop (NewRepr r) = reprNop r; \ reprAccess (NewRepr r) = reprAccess r; \ reprCompatible (NewRepr r1) (NewRepr r2) = reprCompatible r1 r2 newtype ImageRepr = ImageRepr (VectorRepr () Image) deriving (Typeable, Show) DATAREPR_INSTANCE(ImageRepr, VectorRepr () Image) newtype UVGridRepr = UVGridRepr (VectorRepr () UVGrid) deriving (Typeable, Show) DATAREPR_INSTANCE(UVGridRepr, VectorRepr () UVGrid) -- By default images and grids are always consumed by the caller, as -- they are large objects with lots of write operations, and we don't -- want to duplicate them. imgRepr :: ImageRepr imgRepr = ImageRepr $ VectorRepr WriteAccess uvgRepr :: UVGridRepr uvgRepr = UVGridRepr $ VectorRepr WriteAccess -- Plan representation is used by many kernels planRepr :: VectorRepr () Tag planRepr = VectorRepr ReadAccess newtype RawVisRepr = RawVisRepr (VectorRepr () Vis) deriving (Typeable, Show) DATAREPR_INSTANCE(RawVisRepr, VectorRepr () Vis) newtype SortedVisRepr = SortedVisRepr (VectorRepr () Vis) deriving (Typeable, Show) DATAREPR_INSTANCE(SortedVisRepr, VectorRepr () Vis) -- Visibilities generally remain constant rawVisRepr :: RawVisRepr rawVisRepr = RawVisRepr $ VectorRepr ReadAccess visRepr :: SortedVisRepr visRepr = SortedVisRepr $ VectorRepr ReadAccess -- GCFs too gcfsRepr :: VectorRepr () GCFs gcfsRepr = VectorRepr ReadAccess dummy :: (DataRepr r, IsReprs rs, IsReprKern (ReprType r) rs) => String -> rs -> r -> ReprKernFun (ReprType r) rs dummy name rs r = mappingKernel name rs r code where code _ _ = putStrLn name >> return nullVector halideWrapper :: (DataRepr r, IsReprs rs, IsReprKern (ReprType r) rs) => String -> rs -> r -> ReprKernFun (ReprType r) rs halideWrapper _ = dummy "halide" cWrapper :: (DataRepr r, IsReprs rs, IsReprKern (ReprType r) rs) => String -> rs -> r -> ReprKernFun (ReprType r) rs cWrapper _ = dummy "c" oskarReader :: Typeable d => Domain d -> [(FilePath, Int)] -> Kernel Vis oskarReader d _ = dummy "oskar" Z (RegionRepr d rawVisRepr) sorter :: Flow Vis -> Kernel Vis sorter = dummy "sorter" (rawVisRepr :. Z) visRepr setOnes :: Flow Vis -> Kernel Vis setOnes = dummy "ones" (visRepr :. Z) visRepr gcfKernel :: GridPar -> Flow Tag -> Flow Vis -> Kernel GCFs gcfKernel _ = halideWrapper "gcfs" (planRepr :. visRepr :. Z) gcfsRepr fftCreatePlans :: GridPar -> Kernel Tag fftCreatePlans _ = dummy "fftPlans" Z planRepr fftKern :: GridPar -> Flow Tag -> Flow Image -> Kernel UVGrid fftKern _ = dummy "fftKern" (planRepr :. imgRepr :. Z) uvgRepr ifftKern :: GridPar -> Flow Tag -> Flow UVGrid -> Kernel Image ifftKern _ = dummy "ifftKern" (planRepr :. uvgRepr :. Z) imgRepr gridInit :: GridPar -> Kernel UVGrid gridInit _ = dummy "gridInit" Z uvgRepr gridKernel :: GridPar -> Flow Vis -> Flow GCFs -> Flow UVGrid -> Kernel UVGrid gridKernel _ = dummy "gridKernel" (visRepr :. gcfsRepr :. uvgRepr :. Z) uvgRepr psfGridKernel :: GridPar -> Flow Vis -> Flow GCFs -> Flow UVGrid -> Kernel UVGrid psfGridKernel _ = dummy "psfGridKernel" (visRepr :. gcfsRepr :. uvgRepr :. Z) uvgRepr degridKernel :: GridPar -> Flow UVGrid -> Flow GCFs -> Flow Vis -> Kernel Vis degridKernel _ = dummy "degridKernel" (uvgRepr :. gcfsRepr :. visRepr :. Z) visRepr cleanResRepr :: VectorRepr () CleanResult cleanResRepr = VectorRepr WriteAccess cleanKernel :: Flow Image -> Flow Image -> Kernel CleanResult cleanKernel = halideWrapper "clean" (imgRepr :. imgRepr :. Z) cleanResRepr splitModel :: Flow CleanResult -> Kernel Image splitModel = dummy "splitModel" (cleanResRepr :. Z) imgRepr splitResidual :: Flow CleanResult -> Kernel Image splitResidual = dummy "splitResidual" (cleanResRepr :. Z) imgRepr imageSumKernel :: Typeable d => Domain d -> Flow Image -> Kernel Image imageSumKernel dom = dummy "image summation" (RegionRepr dom imgRepr :. Z) imgRepr imageWriter :: FilePath -> Flow Image -> Kernel Image imageWriter _ = dummy "image writer" (imgRepr :. Z) NoRepr -- ---------------------------------------------------------------------------- -- --- Strategy --- -- ---------------------------------------------------------------------------- scatterImaging :: Typeable d => Config -> Domain d -> Flow Tag -> Flow Vis -> Strategy () scatterImaging cfg dh tag vis = implementing (fst $ majorLoop (cfgMajorLoops cfg) vis) $ do -- Sort visibility data let addDom :: IsKernelDef kf => kf -> kf addDom = regionKernel dh rebind vis $ addDom sorter -- Initialise FFTs let gpar = cfgGrid cfg bind tag $ addDom $ fftCreatePlans gpar -- Generate GCF let gcfs = gcf vis bind gcfs $ addDom $ gcfKernel gpar tag vis -- Make rules bindRule idft (addDom $ ifftKern gpar tag) bindRule dft (addDom $ fftKern gpar tag) bindRule createGrid (addDom $ gridInit gpar) bindRule grid (addDom $ gridKernel gpar) bindRule degrid (addDom $ degridKernel gpar) bindRule clean (addDom cleanKernel) bindRule cleanResidual (addDom splitResidual) bindRule cleanModel (addDom splitModel) -- PSF. Note that we bind a kernel here that implements *two* -- abstract kernel nodes! --let psfg = grid (psfVis vis) gcfs createGrid --bind psfg (psfGridKernel gpar vis gcfs createGrid) bindRule (grid . psfVis) (addDom $ psfGridKernel gpar) calculate $ psfGrid vis gcfs -- Loop forM_ [1..cfgMajorLoops cfg-1] $ \i -> do -- Force grid calculation - we do not want to share this between -- loop iterations! calculate createGrid -- Generate new visibilities calculate $ snd $ majorLoop i vis -- Calculate residual of last loop iteration calculate createGrid calculate $ fst $ majorLoop (cfgMajorLoops cfg) vis -- Strategy implements imaging loop for a number of data sets scatterImagingMain :: Config -> Strategy () scatterImagingMain cfg = do -- Make data set domain let dataSets = length (cfgInput cfg) dataSetRepeats = sum $ map snd $ cfgInput cfg dom <- makeRangeDomain 0 dataSetRepeats -- Create data flow for visibilities, build abstract data flow to do -- configured number of major loops over this input data tag <- uniq (flow "tag") let vis = flow "vis" tag -- Split by datasets let result = majorLoopSum (cfgMajorLoops cfg) vis ds <- split dom dataSets distribute ds ParSchedule $ void $ do -- Split by number of runs. -- TODO: Number of runs should depend on data set! rep <- split ds 3 distribute rep SeqSchedule $ void $ do -- Read in visibilities. The domain handle passed in tells the -- kernel which of the datasets to load. bind vis $ oskarReader rep $ cfgInput cfg -- Implement this data flow scatterImaging cfg rep tag vis -- calculate $ fst $ majorLoop (cfgMajorLoops cfg) vis -- Sum up local images (TODO: accumulate?) bindRule imageSum (imageSumKernel rep) calculate result -- Sum and write out the result rebind result $ imageWriter (cfgOutput cfg) -- Strategy implements imaging loop for a number of data sets scatterSimple :: Config -> Strategy () scatterSimple cfg = do -- Create data flow for visibilities, build abstract data flow to do -- configured number of major loops over this input data vis <- uniq (flow "vis") tag <- uniq (flow "tag") -- Read in visibilities. The domain handle passed in tells the -- kernel which of the datasets to load. dom <- makeRangeDomain 0 1 bind vis $ oskarReader dom $ cfgInput cfg -- Implement this data flow scatterImaging cfg dom tag vis -- Sum and write out the result bindRule imageSum (imageSumKernel dom) let result = majorLoopSum (cfgMajorLoops cfg) vis rebind result (imageWriter (cfgOutput cfg)) testStrat :: Strategy () testStrat = scatterImagingMain $ Config [("input.vis", 3), ("input2.vis", 3), ("input3.vis", 3)] "output.img" 1 GridPar main :: IO () main = dumpSteps testStrat >> execStrategy testStrat
SKA-ScienceDataProcessor/RC
MS5/programs/imaging.hs
Haskell
apache-2.0
11,853
{-# LANGUAGE TemplateHaskell #-} module Session where import Control.Applicative import Control.Monad.Reader -- lens import Control.Lens -- happstack framework import Data.Acid import Data.SafeCopy import Happstack.Server import Happstack.Server.ClientSession import Text.I18n -- local import State import State.Helper import State.Users type ServerT a = ClientSessionT SessionData AcidServerT a runServerT :: AcidState BlogState -> SessionConf -> ServerT a -> ServerPart a runServerT acid sconf srvt = runReaderT (withClientSessionT sconf srvt) acid -- -- Type definition -- data SessionData = SessionData { _sessionUser :: Maybe UserID , _sessionLocale :: Maybe String } makeLenses ''SessionData deriveSafeCopy 0 'base ''SessionData instance ClientSession SessionData where emptySession = SessionData { _sessionUser = Nothing , _sessionLocale = Nothing } -- -- Requests -- getUserID :: ServerT (Maybe UserID) getUserID = liftSessionStateT $ use sessionUser setUserID :: Maybe UserID -> ServerT () setUserID mid = liftSessionStateT $ sessionUser .= mid getSessionUser :: ServerT (Maybe User) getSessionUser = do muid <- getUserID maybe (return Nothing) (runQuery . GetUserById) muid getSessionLocale :: ServerT (Maybe Locale) getSessionLocale = liftSessionStateT $ fmap Locale <$> use sessionLocale setSessionLocale :: Maybe Locale -> ServerT () setSessionLocale mloc = liftSessionStateT $ sessionLocale .= (unloc <$> mloc) where unloc (Locale s) = s
mcmaniac/blog.nils.cc
src/Session.hs
Haskell
apache-2.0
1,519
{-# LANGUAGE ExistentialQuantification, OverloadedStrings #-} module HStyle.Rule ( Rule (..) , Options (..) , FileState (..) , FileM , runFileM , runRule ) where import Control.Monad (forM_, unless) import Control.Monad.Reader (ReaderT, ask, runReaderT) import Control.Monad.State (State, get, put, runState) import Control.Monad.Writer (WriterT, runWriterT, tell) import Data.Text (Text) import qualified Data.Text as T import HStyle.Block import HStyle.Checker import HStyle.Fixer import HStyle.Parse import HStyle.Selector -- | Compose the elements of a rule. Use ExistentialQuantification so the -- internal state of a rule cannot be touched from the outside. data Rule = forall a. Rule (Selector a) (Checker a) (Fixer a) -- | Options for checking files data Options = Options { -- | Attempt to fix files optionsFix :: Bool , -- | Be quiet optionsQuiet :: Bool } deriving (Show) data FileState = FileState { -- | File we're fixing filePath :: FilePath , -- | The module in the file fileModule :: Module , -- | A block holding the file contents fileBlock :: Block , -- | Flag indicating whether or not the in-memory representation differs -- from the file on disk fileUpdated :: Bool , -- | Flag indicating that all checks were OK fileOk :: Bool } deriving (Show) -- | We prefer to keep the file checking out of the IO monad. type FileM = ReaderT Options (WriterT [Text] (State FileState)) runFileM :: FileM a -> Options -> FileState -> (a, FileState, [Text]) runFileM fm options fs = -- Peel of the monads one by one let w = runReaderT fm options s = runWriterT w ((x, ts), fs') = runState s fs in (x, fs', ts) -- | Write some text followed by a newline putLn :: Text -> FileM () putLn = tell . return -- | Represents fixing status data Fix = DontFix -- ^ User doesn't want to fix it | CouldntFix -- ^ Our library is unable to fix it | Fixed -- ^ Fixed, result deriving (Eq, Show) runRule :: Rule -> FileM () runRule rule@(Rule selector checker fixer) = do fs <- get check $ selector (fileModule fs) (fileBlock fs) where -- Check the files one by one. However, note that if we fixed a file, we -- need to re-run the current rule, because e.g. line number might have -- changed, so our selections will no longer be valid. check [] = return () check ((x, r) : selections) = do fix <- checkBlock checker fixer x r case fix of Fixed -> runRule rule _ -> check selections checkBlock :: Checker a -> Fixer a -> a -> Range -> FileM Fix checkBlock checker fixer x range = do -- Query monad states fs <- get options <- ask -- Determine problems, and attempt to fix (lazily) let block = fileBlock fs problems = checker x block range needFix = optionsFix options && not (null problems) (fix, block') = case (needFix, fixer x block range) of (False, _) -> (DontFix, block) (True, Nothing) -> (CouldntFix, block) (True, Just ls) -> (Fixed, updateRange range ls block) -- Output our results for this check forM_ problems $ \(line, problem) -> do -- let line = absoluteLineNumber i block putLn $ T.pack (filePath fs) `T.append` ":" `T.append` T.pack (show line) `T.append` ": " `T.append` problem unless (optionsQuiet options) $ do case fix of DontFix -> putLn " Found:" CouldntFix -> putLn " Couldn't fix:" Fixed -> putLn " Fixed:" putLn $ prettyRange 4 block range -- If we fixed anything, re-parse the module. Parsing here should really, -- not fail, because if it does, we made the code unparseable with our own -- fix... let (module', _) = case fix of Fixed -> either error id $ parseModule (Just $ filePath fs) (toText block') _ -> (fileModule fs, block') -- Save updated file state put fs { fileModule = module' , fileBlock = block' , fileUpdated = fileUpdated fs || fix == Fixed , fileOk = fileOk fs && null problems } -- Return fix resolution return fix
jaspervdj/hstyle
src/HStyle/Rule.hs
Haskell
bsd-3-clause
4,448
{-# LANGUAGE OverloadedStrings #-} module Main (main) where import System.Environment (lookupEnv) import System.Exit (exitFailure) import System.IO (hPutStrLn, stderr) import qualified Data.ByteString.Base64 as BS import qualified Data.ByteString.Char8 as BS import HerokuBuild main :: IO () main = withOptions $ \(Options app cmd) -> do k <- getApiKey let builds = "/apps/" ++ app ++ "/builds/" case cmd of Start u v -> p buildId =<< postHeroku k builds (newBuild u v) Status b -> p status =<< getHeroku k (builds ++ b) Release b a -> do mb <- getHeroku k (builds ++ b) case mb of Just (Build _ Success _ s) -> do postHeroku' k ("/apps/" ++ a ++ "/releases") s putStrLn "Success" _ -> err "API error or build not found or not succeeded" where p :: Show a => (Build -> a) -> Maybe Build -> IO () p acc = maybe (err "failed to parse API response") (print . acc) getApiKey :: IO ApiKey getApiKey = do mk <- lookupEnv "HEROKU_API_KEY" case mk of Just k -> return $ encode k Nothing -> err "HEROKU_API_KEY environment variable not set" where encode :: String -> ApiKey encode k = let encoded = BS.encode $ ":" `BS.append` BS.pack k in "Basic " `BS.append` encoded err :: String -> IO a err msg = do hPutStrLn stderr $ "Error: " ++ msg exitFailure
pbrisbin/heroku-build
main.hs
Haskell
bsd-3-clause
1,455
{-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# OPTIONS_GHC -fno-warn-orphans #-} module Orphans () where import Data.Vector (Vector) import qualified Data.Vector as V import Test.SmallCheck.Series as SC import Test.Tasty.QuickCheck as QC instance Serial m a => Serial m (Vector a) where series = fmap V.fromList series instance Arbitrary a => Arbitrary (Vector a) where arbitrary = fmap V.fromList arbitrary shrink = map V.fromList . shrink . V.toList
quchen/qa-playground
test/Testsuite/Orphans.hs
Haskell
bsd-3-clause
561
{-# LANGUAGE TypeFamilies, DeriveFunctor, DeriveDataTypeable #-} module Test.LazySmallCheck2012.FunctionalValues.Instances where import Control.Applicative import Data.Data import Data.Typeable import Test.LazySmallCheck2012.Instances import Test.LazySmallCheck2012.FunctionalValues instance Argument () where type Base () = () toBase = id fromBase = id instance (Argument a, Argument b) => Argument (a, b) where type Base (a, b) = (BaseCast a, BaseCast b) toBase (i, j) = (toBaseCast i, toBaseCast j) fromBase (i, j) = (fromBaseCast i, fromBaseCast j) instance (Argument a, Argument b, Argument c) => Argument (a, b, c) where type Base (a, b, c) = (BaseCast a, (BaseCast b, BaseCast c)) toBase (i, j, k) = (toBaseCast i, (toBaseCast j, toBaseCast k)) fromBase (i, (j, k)) = (fromBaseCast i, fromBaseCast j, fromBaseCast k) instance (Argument a, Argument b, Argument c, Argument d) => Argument (a, b, c, d) where type Base (a, b, c, d) = (BaseCast a, (BaseCast b, (BaseCast c, BaseCast d))) toBase (i, j, k, l) = (toBaseCast i, (toBaseCast j, (toBaseCast k, toBaseCast l))) fromBase (i, (j, (k, l))) = (fromBaseCast i, fromBaseCast j, fromBaseCast k, fromBaseCast l) instance (Argument a, Argument b, Argument c, Argument d, Argument e) => Argument (a, b, c, d, e) where type Base (a, b, c, d, e) = (BaseCast a, (BaseCast b, (BaseCast c, (BaseCast d, BaseCast e)))) toBase (i, j, k, l, m) = (toBaseCast i, (toBaseCast j, (toBaseCast k, (toBaseCast l, toBaseCast m)))) fromBase (i, (j, (k, (l, m)))) = (fromBaseCast i, fromBaseCast j, fromBaseCast k, fromBaseCast l, fromBaseCast m) instance Argument a => Argument [a] where type Base [a] = Either () (BaseCast a, BaseCast [a]) toBase [] = Left () toBase (x:xs) = Right (toBaseCast x, toBaseCast xs) fromBase (Left ()) = [] fromBase (Right (x, xs)) = (fromBaseCast x:fromBaseCast xs) instance Argument Bool where type Base Bool = Either () () toBase False = Left () toBase True = Right () fromBase (Left ()) = False fromBase (Right ()) = True instance Argument a => Argument (Maybe a) where type Base (Maybe a) = Either () (BaseCast a) toBase Nothing = Left () toBase (Just x) = Right (toBaseCast x) fromBase (Left ()) = Nothing fromBase (Right x) = Just (fromBaseCast x) instance (Argument a, Argument b) => Argument (Either a b) where type Base (Either a b) = Either (BaseCast a) (BaseCast b) toBase = either (Left . toBaseCast) (Right . toBaseCast) fromBase = either (Left . fromBaseCast) (Right . fromBaseCast) instance Argument Nat where type Base Nat = Nat toBase = id fromBase = id instance Argument Int where type Base Int = Nat toBase = fst $ isoIntNat 0 fromBase = snd $ isoIntNat 0 instance Argument Char where type Base Char = Nat toBase = (fst $ isoIntNat (fromEnum 'a')) . fromEnum fromBase = toEnum . (snd $ isoIntNat (fromEnum 'a'))
UoYCS-plasma/LazySmallCheck2012
Test/LazySmallCheck2012/FunctionalValues/Instances.hs
Haskell
bsd-3-clause
2,972
{-# LANGUAGE OverloadedStrings #-} module Data.LTSV.String ( ltsv , record , encode , decode , decodeLTSV , ToRecord(..) , FromRecord(..) , Field, FieldMap, Record, LTSV ) where import Control.Monad (liftM2) import Control.Applicative ((<*)) import Text.Parsec ( parse , Parsec , newline, sepBy, tab, char, many1, alphaNum, oneOf, many, noneOf, (<|>), try ) import Data.List (intercalate) import qualified Data.Map as Map type Field = (String, String) type FieldMap = Map.Map String String type Record = [Field] type LTSV = [Record] -- | -- LTSV format parser. -- -- >>> import Text.Parsec (parse) -- >>> parse ltsv "(stdin)" "aaa:111\tbbb:222" -- Right [[("aaa","111"),("bbb","222")]] -- >>> parse ltsv "(stdin)" "aaa:111\tbbb:222\nccc:333\tddd:444" -- Right [[("aaa","111"),("bbb","222")],[("ccc","333"),("ddd","444")]] -- >>> parse ltsv "(stdin)" "aaa:111\tbbb:222\nccc:333\tddd:444\n" -- Right [[("aaa","111"),("bbb","222")],[("ccc","333"),("ddd","444")]] -- ltsv :: Parsec String () LTSV ltsv = do rs <- many $ try recordNL r <- record return $ if null r then rs else rs ++ [r] recordNL :: Parsec String () Record recordNL = record <* newline record :: Parsec String () Record record = sepBy field tab where field = liftM2 (,) (label <* colon) value label = many1 lbyte lbyte = alphaNum <|> oneOf "_.-" value = many $ noneOf "\t\n\r" colon = char ':' class ToRecord a where toRecord :: a -> Record class FromRecord a where fromRecord :: FieldMap -> Maybe a -- | -- Serialize a record value as a String. -- -- >>> data Person = Person { name :: String, age :: Int } deriving Show -- >>> instance ToRecord Person where toRecord p = [("name", name p), ("age", show . age $ p)] -- >>> encode $ Person "krdlab" 128 -- "name:krdlab\tage:128" -- encode :: (ToRecord a) => a -> String encode = serialize . toRecord serialize :: Record -> String serialize [] = "" serialize ts = intercalate "\t" $ map s ts where s (l, v) = l ++ ":" ++ v -- | -- deserialize a record value from a String. -- -- >>> import Control.Applicative ((<$>)) -- >>> data Person = Person { name :: String, age :: Int } deriving Show -- >>> instance FromRecord Person where fromRecord m = liftM2 Person (Map.lookup "name" m) (read <$> Map.lookup "age" m) -- >>> decode "name:krdlab\tage:128" :: Maybe Person -- Just (Person {name = "krdlab", age = 128}) -- decode :: (FromRecord a) => String -> Maybe a decode = decodeWith record fromRecord decodeWith :: Parsec String () Record -> (FieldMap -> Maybe a) -> String -> Maybe a decodeWith p conv s = case parse p "(decode)" s of Right r -> conv $ Map.fromList r _ -> Nothing decodeLTSV :: (FromRecord a) => LTSV -> Maybe [a] decodeLTSV = mapM (fromRecord . Map.fromList)
krdlab/haskell-ltsv
Data/LTSV/String.hs
Haskell
bsd-3-clause
2,981
module Language.Java.JVM.Generator where import Language.Java.JVM.JavapParser import Language.Java.JVM.SignatureParser import Control.Monad.Identity import Control.Monad.State import Data.Char (toUpper) import Data.List (foldl') import qualified Data.Set as Set import Language.Haskell.Exts.Syntax import Text.Parsec import System.Info import System.FilePath import System.Process addExeExtension :: FilePath -> String addExeExtension fn= case os of "mingw32"->addExtension fn "exe" "cygwin32"->addExtension fn "exe" "win32"->addExtension fn "exe" _->fn runJavap :: FilePath -> String -> IO(String) runJavap javaHome className=do let javapPath=javaHome </> "bin" </> (addExeExtension "javap") s<-readProcess javapPath ["-s",className] "" let l=lines s case l of (_:xs)-> return $ unlines xs _ -> return "" parseClass :: FilePath -> String -> IO (Either ParseError TypeDecl) parseClass javaHome className=do contents<-runJavap javaHome className return $ parseTypeDecl contents generate :: TypeDecl -> (Module,FilePath) generate td=let cls=td_name td moduleName=map toMod (zip [0..] cls) fp=addExtension moduleName "hs" decls=concat $ runIdentity $ evalStateT (mapM (generateDecl cls) (td_decls td)) ((SrcLoc fp 6 1),Set.empty) impTypes=ImportDecl (SrcLoc fp 3 1) (ModuleName "Language.Java.JVM.Types") False False Nothing Nothing Nothing impAPI=ImportDecl (SrcLoc fp 3 1) (ModuleName "Language.Java.JVM.API") False False Nothing Nothing Nothing in (Module (SrcLoc fp 2 1) (ModuleName ("Language.Java.JVM.Bindings."++moduleName)) [LanguagePragma (SrcLoc fp 1 1) [Ident "RankNTypes"]] Nothing Nothing [impTypes,impAPI] decls,fp) where toMod (_,'/')='_' toMod (0,a)=toUpper a toMod (_,a)=a generateDecl :: String -> JDecl -> SrcLocT [Decl] generateDecl cls (JMethodDecl name signature static)=do slTyp<-srcLoc slFun<-srcLoc id<-identName name let Right (JSignature params ret)=parseSignature signature (exp,ret')=if name=="<init>" then (App (App (Var (UnQual (Ident "newObject"))) (Lit $ String cls)) (Lit $ String signature),Just "JObj") else let methoddef=App (App (App (Con (UnQual (Ident "Method"))) (Lit $ String cls)) (Lit $ String name)) (Lit $ String signature) methodInvocation=Var (UnQual (Ident $ wrapperToMethod ret)) obj=Var (UnQual (Ident "obj")) in (App (App methodInvocation obj) methoddef,ret) pats=zipWith (\_ idx->PVar $ Ident ("p"++(show idx))) params [0..] patsWithObj=if name /= "<init>" && (not static) then ((PVar $ Ident "obj") : pats) else pats parms=zipWith (\w idx->App (Var $ UnQual $ Ident w) $ cast w (Var $ UnQual $ Ident ("p"++(show idx)))) params [0..] rhs=UnGuardedRhs $ App exp $ List parms m0=Match slFun (Ident id) patsWithObj Nothing rhs (BDecls []) retType=TyApp (TyVar $ Ident "m") (TyVar $ Ident $ wrapperToUnwrapped ret') paramType=foldl' (\t p->(TyFun (TyVar $ Ident $ wrapperToUnwrapped $ Just p) t)) retType params objType=if name=="<init>" then paramType else (TyFun (TyVar $ Ident "JObjectPtr") paramType) typ=TyForall Nothing [ClassA (UnQual $ Ident "WithJava") [(TyVar $ Ident "m")]] objType sig=TypeSig slFun [Ident id] typ return $ [sig,FunBind [m0]] generateDecl cls (JFieldDecl name signature static)=undefined type SrcLocT=StateT GenState Identity type GenState=(SrcLoc,Set.Set String) srcLoc :: SrcLocT SrcLoc srcLoc= do sl<-gets fst modify (\((SrcLoc fp l c),g)->(SrcLoc fp (l+1) c,g)) return sl identName:: String -> SrcLocT String identName n=do names<-gets snd let n'=if n=="<init>" then "new" else n possibleNames=[n'] ++ (map (\idx->(n')++(show idx)) [1..]) okNames=filter (\pn->Set.notMember pn names) possibleNames firstOK=head okNames modify (\(s,ns)->(s,Set.insert firstOK ns)) return firstOK cast :: String -> Exp -> Exp cast "JLong"= App (Var $ UnQual $ Ident "fromIntegral") cast "JInt"= App (Var $ UnQual $ Ident "fromIntegral") cast _=id wrapperToUnwrapped :: Maybe String -> String wrapperToUnwrapped (Just "JObj")="JObjectPtr" wrapperToUnwrapped (Just "JInt")="Integer" wrapperToUnwrapped (Just "JBool")="Bool" wrapperToUnwrapped (Just "JByte")="Int" wrapperToUnwrapped (Just "JChar")="Char" wrapperToUnwrapped (Just "JShort")="Int" wrapperToUnwrapped (Just "JLong")="Integer" wrapperToUnwrapped (Just "JFloat")="Float" wrapperToUnwrapped (Just "JDouble")="Double" wrapperToUnwrapped Nothing="()" wrapperToUnwrapped (Just a)=error ("undefined wrapper"++a) wrapperToMethod :: Maybe String -> String wrapperToMethod (Just "JObj")="objectMethod" wrapperToMethod (Just "JInt")="intMethod" wrapperToMethod (Just "JBool")="booleanMethod" wrapperToMethod (Just "JByte")="byteMethod" wrapperToMethod (Just "JChar")="charMethod" wrapperToMethod (Just "JShort")="shortMethod" wrapperToMethod (Just "JLong")="longMethod" wrapperToMethod (Just "JFloat")="floatMethod" wrapperToMethod (Just "JDouble")="doubleMethod" wrapperToMethod Nothing="voidMethod" wrapperToMethod (Just a)=error ("undefined wrapper"++a)
JPMoresmau/HJVM
src/Language/Java/JVM/Generator.hs
Haskell
bsd-3-clause
5,998