code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Main where
-------------------------------------------------------------------------------
import Control.Lens hiding (elements)
import Control.Monad
import Control.Monad.Trans
import qualified Data.Configurator as C
import Data.Maybe
import Data.Map.Syntax ((##))
import Heist
import Heist.Charade
import Snap
import Snap.Extras.SpliceUtils.Interpreted
import Snap.Extras.Tabs
import Snap.Util.FileServe
import Snap.Snaplet.Heist
import System.FilePath
------------------------------------------------------------------------------
data App = App
{ _heist :: Snaplet (Heist App)
}
makeLenses ''App
instance HasHeist App where
heistLens = subSnaplet heist
------------------------------------------------------------------------------
-- The web app
------------------------------------------------------------------------------
charadeAppInit :: SnapletInit App App
charadeAppInit = makeSnaplet "charade" "A heist charade" Nothing $ do
rootDir <- getSnapletFilePath
cfg <- getSnapletUserConfig
tdir <- liftM (fromMaybe (error "Must specify tdir in charade.cfg")) $
liftIO $ C.lookup cfg "tdir"
staticRoute <- liftIO $ C.lookupDefault
(error "charade: Must specify staticRoute in charade.cfg")
cfg "staticRoute"
staticDir <- liftIO $ C.lookupDefault
(error "charade: Must specify staticDir in charade.cfg")
cfg "staticDir"
-- I didn't use the "templates" directory like we usually use. This
-- probably needs to be a configurable parameter.
h <- nestSnaplet "heist" heist $
heistInit' "" $ emptyHeistConfig
& hcLoadTimeSplices .~ defaultLoadTimeSplices
& hcInterpretedSplices .~
("staticscripts" ## scriptsSplice "static/js" "/")
addRoutes [ ("", heistServe)
, ("heist/heistReload", with heist $ failIfNotLocal heistReloader)
, (staticRoute, serveDirectory staticDir)
]
initTabs h
addTemplatesAt h "" (rootDir </> tdir)
charadeInit h cfg
return $ App h
main :: IO ()
main = do
(_,s,_) <- runSnaplet (Just "charade") charadeAppInit
quickHttpServe s
|
Soostone/charade
|
src/Charade.hs
|
bsd-3-clause
| 2,431
| 0
| 15
| 635
| 465
| 245
| 220
| 50
| 1
|
{- |
Module : TestPuffyToolsJournal
Description : Test the PuffyTools journal functions
Copyright : 2014, Peter Harpending
License : BSD3
Maintainer : Peter Harpending <pharpend2@gmail.com>
Stability : experimental
Portability : Linux
-}
module TestPuffyToolsJournal where
import Control.Applicative ((<$>), (<*>), pure)
import Control.Monad ((<=<))
import Data.Aeson
import qualified Data.Text as T
import Data.Time
import qualified Data.Vector as V
import PuffyTools.Journal
import System.IO.Unsafe
import TestPuffyToolsSlug ()
import Test.QuickCheck
-- |So, it turns out, encoding & decoding isn't an identity. However,
-- (encode . decode . encode) = encode, and (decode . encode . decode)
-- = decode
--
-- https://github.com/liyang/thyme/issues/12
--
-- First, (encode . decode . encode) = (encode)
prop_encDecEnc :: Journal -> Bool
prop_encDecEnc j = Just (encode j) == (encode <$> (de j))
-- |This is (decode . encode . decode . encode) = (encode . decode)
prop_decEncDecEnc :: Journal -> Bool
prop_decEncDecEnc j = (de <=< de) j == de j
-- |For the hell of it, we'll decode/encode a bunch of times
prop_dEn
:: Journal -- ^Journal
-> Natural -- ^Number of times to execute decode/encode
-> Bool
prop_dEn j n = de j == foldl (\journal _ -> de =<< journal) (de j) [(Natural 1) .. n]
-- |This is a helper function
de :: Journal -> Maybe Journal
de = decode . encode
--
-- For reference
--
-- -- |A Journal is really a wrapper around a list of entries
-- data Journal = Journal { journalTitle :: Text
-- , journalLastEdited :: UTCTime
-- , journalCreated :: UTCTime
-- , journalDescription :: Maybe Text
-- , journalEntries :: Vector Entry
-- }
-- deriving (Show, Eq)
-- -- |Entries
-- data Entry = Entry { entrySummary :: Text
-- , entryCreated :: UTCTime
-- , entryLastEdited :: UTCTime
-- , entryDescription :: Maybe Text
-- }
-- deriving (Show, Eq)
instance Arbitrary T.Text where
arbitrary = T.pack <$> arbitrary
instance Arbitrary UTCTime where
arbitrary = pure $ unsafePerformIO getCurrentTime
instance Arbitrary Entry where
arbitrary = Entry <$> arbitrary <*> arbitrary <*> arbitrary
instance Arbitrary x => Arbitrary (V.Vector x) where
arbitrary = V.fromList <$> arbitrary
instance Arbitrary Journal where
arbitrary = Journal <$> arbitrary
<*> arbitrary
<*> arbitrary
<*> arbitrary
<*> arbitrary
<*> arbitrary
-- This type is NOT isomorphic!
newtype Natural = Natural { unNatural :: Int }
deriving (Eq, Show)
instance Enum Natural where
toEnum = mkNatural
fromEnum = unNatural
mkNatural :: Int -> Natural
mkNatural i
| i < 0 = Natural $ (-1) * i
| i == 0 = Natural 1
| otherwise = Natural i
instance Arbitrary Natural where
arbitrary = mkNatural <$> arbitrary
|
pharpend/puffytools
|
test/TestPuffyToolsJournal.hs
|
bsd-3-clause
| 3,172
| 0
| 11
| 936
| 547
| 311
| 236
| 49
| 1
|
-- Copyright (c) 2006-2010
-- The President and Fellows of Harvard College.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
-- 3. Neither the name of the University nor the names of its contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
-- THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY AND CONTRIBUTORS ``AS IS'' AND
-- ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-- ARE DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OR CONTRIBUTORS BE LIABLE
-- FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
-- OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
-- HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-- LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
-- OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
--------------------------------------------------------------------------------
-- |
-- Module : Language.C.Syntax
-- Copyright : (c) Harvard University 2006-2010
-- License : BSD-style
-- Maintainer : mainland@eecs.harvard.edu
--
--------------------------------------------------------------------------------
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleInstances #-}
module Language.C.Syntax where
import Data.Generics
import Data.Loc
data Extensions = Gcc
| CUDA
| OpenCL
deriving (Eq, Ord, Enum, Show)
data Id = Id String !SrcLoc
| AntiId String !SrcLoc
deriving (Eq, Ord, Data, Typeable)
data Storage = Tauto !SrcLoc
| Tregister !SrcLoc
| Tstatic !SrcLoc
| Textern !SrcLoc
| TexternL String !SrcLoc
| Ttypedef !SrcLoc
deriving (Eq, Ord, Data, Typeable)
data TypeQual = Tconst !SrcLoc
| Tvolatile !SrcLoc
| Tinline !SrcLoc
-- C99
| Trestrict !SrcLoc
-- CUDA
| Tdevice !SrcLoc
| Tglobal !SrcLoc
| Thost !SrcLoc
| Tconstant !SrcLoc
| Tshared !SrcLoc
| Tnoinline !SrcLoc
-- OpenCL
| TCLPrivate !SrcLoc
| TCLLocal !SrcLoc
| TCLGlobal !SrcLoc
| TCLConstant !SrcLoc
| TCLReadOnly !SrcLoc
| TCLWriteOnly !SrcLoc
| TCLKernel !SrcLoc
deriving (Eq, Ord, Data, Typeable)
data Sign = Tsigned !SrcLoc
| Tunsigned !SrcLoc
deriving (Eq, Ord, Data, Typeable)
data TypeSpec = Tvoid !SrcLoc
| Tchar (Maybe Sign) !SrcLoc
| Tshort (Maybe Sign) !SrcLoc
| Tint (Maybe Sign) !SrcLoc
| Tlong (Maybe Sign) !SrcLoc
| Tlong_long (Maybe Sign) !SrcLoc
| Tfloat !SrcLoc
| Tdouble !SrcLoc
| Tlong_double !SrcLoc
| Tstruct (Maybe Id) (Maybe [FieldGroup]) [Attr] !SrcLoc
| Tunion (Maybe Id) (Maybe [FieldGroup]) [Attr] !SrcLoc
| Tenum (Maybe Id) [CEnum] [Attr] !SrcLoc
| Tnamed Id !SrcLoc
| TtypeofExp Exp !SrcLoc
| TtypeofType Type !SrcLoc
| Tva_list !SrcLoc
deriving (Eq, Ord, Data, Typeable)
data DeclSpec = DeclSpec [Storage] [TypeQual] TypeSpec !SrcLoc
| AntiDeclSpec String !SrcLoc
| AntiTypeDeclSpec [Storage] [TypeQual] String !SrcLoc
deriving (Eq, Ord, Data, Typeable)
-- | There are two types of declarators in C, regular declarators and abstract
-- declarators. The former is for declaring variables, function parameters,
-- typedefs, etc. and the latter for abstract types---@typedef int
-- ({*}foo)(void)@ vs. @\tt int ({*})(void)@. The difference between the two is
-- just whether or not an identifier is attached to the declarator. We therefore
-- only define one 'Decl' type and use it for both cases.
data ArraySize = ArraySize Bool Exp !SrcLoc
| VariableArraySize !SrcLoc
| NoArraySize !SrcLoc
deriving (Eq, Ord, Data, Typeable)
data Decl = DeclRoot !SrcLoc
| Ptr [TypeQual] Decl !SrcLoc
| Array [TypeQual] ArraySize Decl !SrcLoc
| Proto Decl Params !SrcLoc
| OldProto Decl [Id] !SrcLoc
| AntiTypeDecl String !SrcLoc
deriving (Eq, Ord, Data, Typeable)
data Type = Type DeclSpec Decl !SrcLoc
| AntiType String !SrcLoc
deriving (Eq, Ord, Data, Typeable)
data Designator = IndexDesignator Exp !SrcLoc
| MemberDesignator Id !SrcLoc
deriving (Eq, Ord, Data, Typeable)
data Designation = Designation [Designator] !SrcLoc
deriving (Eq, Ord, Data, Typeable)
data Initializer = ExpInitializer Exp !SrcLoc
| CompoundInitializer [(Maybe Designation, Initializer)] !SrcLoc
deriving (Eq, Ord, Data, Typeable)
type AsmLabel = String
data Init = Init Id Decl (Maybe AsmLabel) (Maybe Initializer) [Attr] !SrcLoc
deriving (Eq, Ord, Data, Typeable)
data Typedef = Typedef Id Decl [Attr] !SrcLoc
deriving (Eq, Ord, Data, Typeable)
data InitGroup = InitGroup DeclSpec [Attr] [Init] !SrcLoc
| TypedefGroup DeclSpec [Attr] [Typedef] !SrcLoc
| AntiDecl String !SrcLoc
| AntiDecls String !SrcLoc
deriving (Eq, Ord, Data, Typeable)
data Field = Field (Maybe Id) (Maybe Decl) (Maybe Exp) !SrcLoc
deriving (Eq, Ord, Data, Typeable)
data FieldGroup = FieldGroup DeclSpec [Field] !SrcLoc
| AntiSdecl String !SrcLoc
| AntiSdecls String !SrcLoc
deriving (Eq, Ord, Data, Typeable)
data CEnum = CEnum Id (Maybe Exp) !SrcLoc
| AntiEnum String !SrcLoc
| AntiEnums String !SrcLoc
deriving (Eq, Ord, Data, Typeable)
data Attr = Attr Id [Exp] !SrcLoc
deriving (Eq, Ord, Data, Typeable)
data Param = Param (Maybe Id) DeclSpec Decl !SrcLoc
| AntiParam String !SrcLoc
| AntiParams String !SrcLoc
deriving (Eq, Ord, Data, Typeable)
data Params = Params [Param] Bool !SrcLoc
deriving (Eq, Ord, Data, Typeable)
data Func = Func DeclSpec Id Decl Params [BlockItem] !SrcLoc
| OldFunc DeclSpec Id Decl [Id] (Maybe [InitGroup]) [BlockItem] !SrcLoc
deriving (Eq, Ord, Data, Typeable)
data Definition = FuncDef Func !SrcLoc
| DecDef InitGroup !SrcLoc
| EscDef String !SrcLoc
| AntiFunc String !SrcLoc
| AntiEsc String !SrcLoc
| AntiEdecl String !SrcLoc
| AntiEdecls String !SrcLoc
deriving (Eq, Ord, Data, Typeable)
data Stm = Label Id Stm !SrcLoc
| Case Exp Stm !SrcLoc
| Default Stm !SrcLoc
| Exp (Maybe Exp) !SrcLoc
| Block [BlockItem] !SrcLoc
| If Exp Stm (Maybe Stm) !SrcLoc
| Switch Exp Stm !SrcLoc
| While Exp Stm !SrcLoc
| DoWhile Stm Exp !SrcLoc
| For (Either InitGroup (Maybe Exp)) (Maybe Exp) (Maybe Exp) Stm
!SrcLoc
| Goto Id !SrcLoc
| Continue !SrcLoc
| Break !SrcLoc
| Return (Maybe Exp) !SrcLoc
| Asm Bool [Attr] [String] [(String, Exp)] [(String, Exp)] [String] !SrcLoc
| AntiStm String !SrcLoc
| AntiStms String !SrcLoc
deriving (Eq, Ord, Data, Typeable)
data BlockItem = BlockDecl InitGroup
| BlockStm Stm
| AntiBlockItem String !SrcLoc
| AntiBlockItems String !SrcLoc
deriving (Eq, Ord, Data, Typeable)
funcProto :: Func -> InitGroup
funcProto f@(Func decl_spec id decl params _ _) =
InitGroup decl_spec [] [Init id (Proto decl params loc) Nothing Nothing [] loc] loc
where
loc = locOf f
funcProto f@(OldFunc decl_spec id decl params _ _ _) =
InitGroup decl_spec [] [Init id (OldProto decl params loc) Nothing Nothing [] loc]
loc
where
loc = locOf f
isPtr :: Type -> Bool
isPtr (Type _ decl _) = go decl
where
go (DeclRoot _) = False
go (Ptr _ _ _) = True
go (Array _ _ _ _) = True
go (Proto _ _ _) = False
go (OldProto _ _ _) = False
go (AntiTypeDecl _ _) = error "isPtr: encountered antiquoted type declaration"
isPtr (AntiType _ _) = error "isPtr: encountered antiquoted type"
data Signed = Signed
| Unsigned
deriving (Eq, Ord, Data, Typeable)
data Const = IntConst String Signed Integer !SrcLoc
| LongIntConst String Signed Integer !SrcLoc
| LongLongIntConst String Signed Integer !SrcLoc
| FloatConst String Rational !SrcLoc
| DoubleConst String Rational !SrcLoc
| LongDoubleConst String Rational !SrcLoc
| CharConst String Char !SrcLoc
| StringConst [String] String !SrcLoc
| AntiInt String !SrcLoc
| AntiUInt String !SrcLoc
| AntiLInt String !SrcLoc
| AntiULInt String !SrcLoc
| AntiFloat String !SrcLoc
| AntiDouble String !SrcLoc
| AntiLongDouble String !SrcLoc
| AntiChar String !SrcLoc
| AntiString String !SrcLoc
deriving (Eq, Ord, Data, Typeable)
data ExeConfig = ExeConfig
{ exeGridDim :: Exp
, exeBlockDim :: Exp
, exeSharedSize :: Maybe Exp
, exeStream :: Maybe Exp
, exeLoc :: !SrcLoc
}
deriving (Eq, Ord, Data, Typeable)
data Exp = Var Id !SrcLoc
| Const Const !SrcLoc
| BinOp BinOp Exp Exp !SrcLoc
| Assign Exp AssignOp Exp !SrcLoc
| PreInc Exp !SrcLoc
| PostInc Exp !SrcLoc
| PreDec Exp !SrcLoc
| PostDec Exp !SrcLoc
| UnOp UnOp Exp !SrcLoc
| SizeofExp Exp !SrcLoc
| SizeofType Type !SrcLoc
| Cast Type Exp !SrcLoc
| Cond Exp Exp Exp !SrcLoc
| Member Exp Id !SrcLoc
| PtrMember Exp Id !SrcLoc
| Index Exp Exp !SrcLoc
| FnCall Exp [Exp] !SrcLoc
| CudaCall Exp ExeConfig [Exp] !SrcLoc
| Seq Exp Exp !SrcLoc
| CompoundLit Type [(Maybe Designation, Initializer)] !SrcLoc
| StmExpr [BlockItem] !SrcLoc
| BuiltinVaArg Exp Type !SrcLoc
| AntiExp String !SrcLoc
| AntiArgs String !SrcLoc
deriving (Eq, Ord, Data, Typeable)
data BinOp = Add
| Sub
| Mul
| Div
| Mod
| Eq
| Ne
| Lt
| Gt
| Le
| Ge
| Land
| Lor
| And
| Or
| Xor
| Lsh
| Rsh
deriving (Eq, Ord, Data, Typeable)
data AssignOp = JustAssign
| AddAssign
| SubAssign
| MulAssign
| DivAssign
| ModAssign
| LshAssign
| RshAssign
| AndAssign
| XorAssign
| OrAssign
deriving (Eq, Ord, Data, Typeable)
data UnOp = AddrOf
| Deref
| Positive
| Negate
| Not
| Lnot
deriving (Eq, Ord, Data, Typeable)
instance Located Id where
getLoc (Id _ loc) = getLoc loc
getLoc (AntiId _ loc) = getLoc loc
instance Located Storage where
getLoc (Tauto loc) = getLoc loc
getLoc (Tregister loc) = getLoc loc
getLoc (Tstatic loc) = getLoc loc
getLoc (Textern loc) = getLoc loc
getLoc (TexternL _ loc) = getLoc loc
getLoc (Ttypedef loc) = getLoc loc
instance Located TypeQual where
getLoc (Tconst loc) = getLoc loc
getLoc (Tvolatile loc) = getLoc loc
getLoc (Tinline loc) = getLoc loc
getLoc (Trestrict loc) = getLoc loc
getLoc (Tdevice loc) = getLoc loc
getLoc (Tglobal loc) = getLoc loc
getLoc (Thost loc) = getLoc loc
getLoc (Tconstant loc) = getLoc loc
getLoc (Tshared loc) = getLoc loc
getLoc (Tnoinline loc) = getLoc loc
getLoc (TCLPrivate loc) = getLoc loc
getLoc (TCLLocal loc) = getLoc loc
getLoc (TCLGlobal loc) = getLoc loc
getLoc (TCLConstant loc) = getLoc loc
getLoc (TCLReadOnly loc) = getLoc loc
getLoc (TCLWriteOnly loc) = getLoc loc
getLoc (TCLKernel loc) = getLoc loc
instance Located Sign where
getLoc (Tsigned loc) = getLoc loc
getLoc (Tunsigned loc) = getLoc loc
instance Located TypeSpec where
getLoc (Tvoid loc) = getLoc loc
getLoc (Tchar _ loc) = getLoc loc
getLoc (Tshort _ loc) = getLoc loc
getLoc (Tint _ loc) = getLoc loc
getLoc (Tlong _ loc) = getLoc loc
getLoc (Tlong_long _ loc) = getLoc loc
getLoc (Tfloat loc) = getLoc loc
getLoc (Tdouble loc) = getLoc loc
getLoc (Tlong_double loc) = getLoc loc
getLoc (Tstruct _ _ _ loc) = getLoc loc
getLoc (Tunion _ _ _ loc) = getLoc loc
getLoc (Tenum _ _ _ loc) = getLoc loc
getLoc (Tnamed _ loc) = getLoc loc
getLoc (TtypeofExp _ loc) = getLoc loc
getLoc (TtypeofType _ loc) = getLoc loc
getLoc (Tva_list loc) = getLoc loc
instance Located DeclSpec where
getLoc (DeclSpec _ _ _ loc) = getLoc loc
getLoc (AntiDeclSpec _ loc) = getLoc loc
getLoc (AntiTypeDeclSpec _ _ _ loc) = getLoc loc
instance Located ArraySize where
getLoc (ArraySize _ _ loc) = getLoc loc
getLoc (VariableArraySize loc) = getLoc loc
getLoc (NoArraySize loc) = getLoc loc
instance Located Decl where
getLoc (DeclRoot loc) = getLoc loc
getLoc (Ptr _ _ loc) = getLoc loc
getLoc (Array _ _ _ loc) = getLoc loc
getLoc (Proto _ _ loc) = getLoc loc
getLoc (OldProto _ _ loc) = getLoc loc
getLoc (AntiTypeDecl _ loc) = getLoc loc
instance Located Type where
getLoc (Type _ _ loc) = getLoc loc
getLoc (AntiType _ loc) = getLoc loc
instance Located Designator where
getLoc (IndexDesignator _ loc) = getLoc loc
getLoc (MemberDesignator _ loc) = getLoc loc
instance Located Designation where
getLoc (Designation _ loc) = getLoc loc
instance Located Initializer where
getLoc (ExpInitializer _ loc) = getLoc loc
getLoc (CompoundInitializer _ loc) = getLoc loc
instance Located Init where
getLoc (Init _ _ _ _ _ loc) = getLoc loc
instance Located Typedef where
getLoc (Typedef _ _ _ loc) = getLoc loc
instance Located InitGroup where
getLoc (InitGroup _ _ _ loc) = getLoc loc
getLoc (TypedefGroup _ _ _ loc) = getLoc loc
getLoc (AntiDecl _ loc) = getLoc loc
getLoc (AntiDecls _ loc) = getLoc loc
instance Located Field where
getLoc (Field _ _ _ loc) = getLoc loc
instance Located FieldGroup where
getLoc (FieldGroup _ _ loc) = getLoc loc
getLoc (AntiSdecl _ loc) = getLoc loc
getLoc (AntiSdecls _ loc) = getLoc loc
instance Located CEnum where
getLoc (CEnum _ _ loc) = getLoc loc
getLoc (AntiEnum _ loc) = getLoc loc
getLoc (AntiEnums _ loc) = getLoc loc
instance Located Attr where
getLoc (Attr _ _ loc) = getLoc loc
instance Located Param where
getLoc (Param _ _ _ loc) = getLoc loc
getLoc (AntiParam _ loc) = getLoc loc
getLoc (AntiParams _ loc) = getLoc loc
instance Located Params where
getLoc (Params _ _ loc) = getLoc loc
instance Located Func where
getLoc (Func _ _ _ _ _ loc) = getLoc loc
getLoc (OldFunc _ _ _ _ _ _ loc) = getLoc loc
instance Located Definition where
getLoc (FuncDef _ loc) = getLoc loc
getLoc (DecDef _ loc) = getLoc loc
getLoc (EscDef _ loc) = getLoc loc
getLoc (AntiFunc _ loc) = getLoc loc
getLoc (AntiEsc _ loc) = getLoc loc
getLoc (AntiEdecl _ loc) = getLoc loc
getLoc (AntiEdecls _ loc) = getLoc loc
instance Located Stm where
getLoc (Label _ _ loc) = getLoc loc
getLoc (Case _ _ loc) = getLoc loc
getLoc (Default _ loc) = getLoc loc
getLoc (Exp _ loc) = getLoc loc
getLoc (Block _ loc) = getLoc loc
getLoc (If _ _ _ loc) = getLoc loc
getLoc (Switch _ _ loc) = getLoc loc
getLoc (While _ _ loc) = getLoc loc
getLoc (DoWhile _ _ loc) = getLoc loc
getLoc (For _ _ _ _ loc) = getLoc loc
getLoc (Goto _ loc) = getLoc loc
getLoc (Continue loc) = getLoc loc
getLoc (Break loc) = getLoc loc
getLoc (Return _ loc) = getLoc loc
getLoc (Asm _ _ _ _ _ _ loc) = getLoc loc
getLoc (AntiStm _ loc) = getLoc loc
getLoc (AntiStms _ loc) = getLoc loc
instance Located BlockItem where
getLoc (BlockDecl decl) = getLoc decl
getLoc (BlockStm stm) = getLoc stm
getLoc (AntiBlockItem _ loc) = getLoc loc
getLoc (AntiBlockItems _ loc) = getLoc loc
instance Located Const where
getLoc (IntConst _ _ _ loc) = getLoc loc
getLoc (LongIntConst _ _ _ loc) = getLoc loc
getLoc (LongLongIntConst _ _ _ loc) = getLoc loc
getLoc (FloatConst _ _ loc) = getLoc loc
getLoc (DoubleConst _ _ loc) = getLoc loc
getLoc (LongDoubleConst _ _ loc) = getLoc loc
getLoc (CharConst _ _ loc) = getLoc loc
getLoc (StringConst _ _ loc) = getLoc loc
getLoc (AntiInt _ loc) = getLoc loc
getLoc (AntiUInt _ loc) = getLoc loc
getLoc (AntiLInt _ loc) = getLoc loc
getLoc (AntiULInt _ loc) = getLoc loc
getLoc (AntiFloat _ loc) = getLoc loc
getLoc (AntiDouble _ loc) = getLoc loc
getLoc (AntiLongDouble _ loc) = getLoc loc
getLoc (AntiChar _ loc) = getLoc loc
getLoc (AntiString _ loc) = getLoc loc
instance Located ExeConfig where
getLoc conf = getLoc (exeLoc conf)
instance Located Exp where
getLoc (Var _ loc) = getLoc loc
getLoc (Const _ loc) = getLoc loc
getLoc (BinOp _ _ _ loc) = getLoc loc
getLoc (Assign _ _ _ loc) = getLoc loc
getLoc (PreInc _ loc) = getLoc loc
getLoc (PostInc _ loc) = getLoc loc
getLoc (PreDec _ loc) = getLoc loc
getLoc (PostDec _ loc) = getLoc loc
getLoc (UnOp _ _ loc) = getLoc loc
getLoc (SizeofExp _ loc) = getLoc loc
getLoc (SizeofType _ loc) = getLoc loc
getLoc (Cast _ _ loc) = getLoc loc
getLoc (Cond _ _ _ loc) = getLoc loc
getLoc (Member _ _ loc) = getLoc loc
getLoc (PtrMember _ _ loc) = getLoc loc
getLoc (Index _ _ loc) = getLoc loc
getLoc (FnCall _ _ loc) = getLoc loc
getLoc (CudaCall _ _ _ loc) = getLoc loc
getLoc (Seq _ _ loc) = getLoc loc
getLoc (CompoundLit _ _ loc) = getLoc loc
getLoc (StmExpr _ loc) = getLoc loc
getLoc (BuiltinVaArg _ _ loc) = getLoc loc
getLoc (AntiExp _ loc) = getLoc loc
getLoc (AntiArgs _ loc) = getLoc loc
ctypedef :: Id -> Decl -> [Attr] -> Typedef
ctypedef id decl attrs =
Typedef id decl attrs ((id <--> decl :: Loc) <--> attrs)
cdeclSpec :: [Storage] -> [TypeQual] -> TypeSpec -> DeclSpec
cdeclSpec storage quals spec =
DeclSpec storage quals spec ((storage <--> quals :: Loc) <--> spec)
cinitGroup :: DeclSpec -> [Attr] -> [Init] -> InitGroup
cinitGroup dspec attrs inis =
InitGroup dspec attrs inis ((dspec <--> attrs :: Loc) <--> inis)
ctypedefGroup :: DeclSpec -> [Attr] -> [Typedef] -> InitGroup
ctypedefGroup dspec attrs typedefs =
TypedefGroup dspec attrs typedefs ((dspec <--> attrs :: Loc) <--> typedefs)
|
HIPERFIT/language-c-quote
|
Language/C/Syntax.hs
|
bsd-3-clause
| 20,604
| 0
| 10
| 6,670
| 6,370
| 3,208
| 3,162
| 747
| 6
|
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
module Duckling.Temperature.Tests (tests) where
import Prelude
import Data.String
import Test.Tasty
import qualified Duckling.Temperature.EN.Tests as EN
import qualified Duckling.Temperature.ES.Tests as ES
import qualified Duckling.Temperature.FR.Tests as FR
import qualified Duckling.Temperature.GA.Tests as GA
import qualified Duckling.Temperature.HR.Tests as HR
import qualified Duckling.Temperature.IT.Tests as IT
import qualified Duckling.Temperature.JA.Tests as JA
import qualified Duckling.Temperature.KO.Tests as KO
import qualified Duckling.Temperature.PT.Tests as PT
import qualified Duckling.Temperature.RO.Tests as RO
import qualified Duckling.Temperature.ZH.Tests as ZH
tests :: TestTree
tests = testGroup "Temperature Tests"
[ EN.tests
, ES.tests
, FR.tests
, GA.tests
, HR.tests
, IT.tests
, JA.tests
, KO.tests
, PT.tests
, RO.tests
, ZH.tests
]
|
rfranek/duckling
|
tests/Duckling/Temperature/Tests.hs
|
bsd-3-clause
| 1,192
| 0
| 7
| 178
| 211
| 151
| 60
| 28
| 1
|
module Main where
import Music.Prelude.CmdLine
main = converterMain "writeMusicXml" "xml"
|
music-suite/music-preludes
|
tools/music2musicxml.hs
|
bsd-3-clause
| 93
| 0
| 5
| 13
| 20
| 12
| 8
| 3
| 1
|
module Model where
import Prelude
import Yesod
import Data.Text (Text)
import Database.Persist.Quasi
import Data.Time
import Data.Int (Int64)
newtype Identifier = Identifier Text
deriving (Read, Show, Eq, PathPiece, PersistField)
-- You can define all of your database entities in the entities file.
-- You can find more information on persistent and how to declare entities
-- at:
-- http://www.yesodweb.com/book/persistent/
share [mkPersist sqlOnlySettings, mkMigrate "migrateAll"]
$(persistFileWith lowerCaseSettings "config/models")
|
ehamberg/rssqueue
|
Model.hs
|
bsd-3-clause
| 546
| 0
| 8
| 75
| 105
| 60
| 45
| -1
| -1
|
module Frenetic.Slices.Compile
( -- * Compilation
transform
, transformEdge
, dynTransform
, compileSlice
, edgeCompileSlice
-- * Internal tools
, modifyVlan
, setVlan
, matchesSwitch
) where
import Control.Monad
import Frenetic.Common
import qualified Data.Map as Map
import qualified Data.Set as Set
import Frenetic.NetCore.Types
import Frenetic.NetCore.Util
import Frenetic.NetCore.Short
import Frenetic.Pattern
import Frenetic.NetCore.Reduce
import Frenetic.NetCore.Pretty
import Frenetic.Slices.Slice
import Frenetic.Slices.VlanAssignment
import Frenetic.Topo
-- |Match a specific vlan tag
vlanMatch :: Vlan -> Predicate
vlanMatch vlan = DlVlan (Just vlan)
-- TODO(astory): also take in the packet channel and handle that
-- |Compile a list of slices and dynamic policies as they change.
dynTransform :: [(Slice, Chan Policy)] -> IO (Chan Policy)
dynTransform combined = do
updateChan <- newChan :: IO (Chan (Vlan, Policy))
outputChan <- newChan :: IO (Chan Policy)
let tagged = zip [1..] combined
-- Fork off threads to poll the input policy channels, compile them, and write
-- them into the unified output channel.
let poll (vlan, (slice, policyChan)) = do
let loop = do
update <- readChan policyChan
let compiled = compileSlice slice vlan update
writeChan updateChan (vlan, compiled)
forkIO $ forever $ loop
mapM_ poll tagged
-- Poll from the unified channel, and update a map containing the most recent
-- compiled version of each slice. After each update, take the union and send
-- the result down the pipe.
let loop map = do
(vlan, compiled) <- readChan updateChan
let map' = Map.insert vlan compiled map
writeChan outputChan $ mconcat (Map.elems map')
loop map'
forkIO $ loop Map.empty
return outputChan
-- |Produce the combined policy by compiling a list of slices and policies with
-- the vanilla compiler
transform :: [(Slice, Policy)] -> Policy
transform combined = mconcat policies
where
tagged = sequential combined
policies = map (\(vlan, (slice, policy)) -> compileSlice slice vlan policy)
tagged
-- |Produce the combined policy by compiling a list of slices and policies with
-- the edge compiler
transformEdge :: Graph -> [(Slice, Policy)] -> Policy
transformEdge topo combined = mconcat policies where
tagged = edge topo combined
policies = map (\(assignment, (slice, policy)) ->
edgeCompileSlice slice assignment policy)
tagged
-- TODO(astory): egress predicates
-- |Compile a slice with a vlan key
compileSlice :: Slice -> Vlan -> Policy -> Policy
compileSlice slice vlan policy =
if poUsesVlans policy then error "input policy uses VLANs." else
let localPolicy = localize slice policy in
-- A postcondition of localize is that all the forwarding actions of the
-- policy make sense wrt the slice, and that every PoBasic matches at most one
-- switch. This is a precondition for outport
let safePolicy = isolate slice vlan localPolicy in
let inportPolicy = inportPo slice vlan localPolicy in
let safeInportPolicy = PoUnion safePolicy inportPolicy in
reduce $ outport slice safeInportPolicy
-- | Compile a slice with an assignment of VLAN tags to ports. For this to work
-- properly, the assignment of tags to both ends of an edge must be the same
edgeCompileSlice :: Slice -> Map.Map Loc Vlan -> Policy -> Policy
edgeCompileSlice slice assignment policy = mconcat (queryPols : forwardPols)
where
localPolicy = localize slice policy
-- separate out queries to avoid multiplying them during the fracturing that
-- goes on in creating the internal and external policies.
queryPols = queryOnly slice assignment localPolicy
forwardPols = forwardEdges slice assignment localPolicy
-- |Produce a list of policies that together implement just the query portion of
-- the policy running on the slice
queryOnly :: Slice -> Map.Map Loc Vlan -> Policy -> Policy
queryOnly slice assignment policy = justQueries <%> (onSlice <||> inBound) where
onSlice = prOr . map onPort . Set.toList $ internal slice
inBound = ingressPredicate slice <&&> (DlVlan Nothing)
justQueries = removeForwards policy
onPort l@(Loc s p) = inport s p <&&> (DlVlan (Just vlan)) <&&>
Map.findWithDefault Any l (ingress slice) where
vlan = case Map.lookup l assignment of
Just v -> v
Nothing -> error $
"assignment map incomplete at " ++ show l ++
"\nmap: " ++ show assignment ++
"\nslice: " ++ show (internal slice)
-- |Remove forwarding actions from policy leaving only queries
removeForwards :: Policy -> Policy
removeForwards PoBottom = PoBottom
removeForwards (PoBasic pred acts) = pred ==> (filter isQuery acts)
removeForwards (PoUnion p1 p2) = PoUnion p1' p2' where
p1' = removeForwards p1
p2' = removeForwards p2
removeForwards (Restrict pol pred) = Restrict pol' pred
where pol' = removeForwards pol
removeForwards (SendPackets chan) = SendPackets chan -- TODO(arjun): why?
-- |Remove queries from policy leaving only forwarding actions
removeQueries :: Policy -> Policy
removeQueries PoBottom = PoBottom
removeQueries (PoBasic pred acts) = pred ==> (filter isForward acts)
removeQueries (PoUnion p1 p2) = PoUnion p1' p2' where
p1' = removeQueries p1
p2' = removeQueries p2
removeQueries (Restrict pol pred) = Restrict pol' pred
where pol' = removeQueries pol
removeQueries (SendPackets chan) = SendPackets chan -- TODO(arjun): why?
-- |Remove forwarding actions to ports other than p
justTo :: Port -> Policy -> Policy
justTo _ PoBottom = PoBottom
justTo p (PoBasic pred acts) = pred ==> (filter pr acts) where
pr (Forward (Physical p') _) = p == p'
pr (Forward AllPorts _) = error "AllPorts found while compiling."
pr _ = True
justTo p (PoUnion p1 p2) = PoUnion p1' p2' where
p1' = justTo p p1
p2' = justTo p p2
justTo p (Restrict pol pred) = Restrict (justTo p pol) pred
justTo p (SendPackets chan) = SendPackets chan -- TODO(arjun): needs work
-- TODO(astory): egress predicates
-- |Produce a list of policies that together instrument the edge-compiled
-- internal policy. This only considers internal -> internal and internal ->
-- external forwarding.
forwardEdges :: Slice -> Map.Map Loc Vlan -> Policy -> [Policy]
forwardEdges slice assignment policy = concatMap buildPort locs where
int = internal slice
ing = Map.keysSet (ingress slice)
egr = Map.keysSet (egress slice)
portLookup = portsOfSet (Set.union int (Set.union ing egr))
locs = Set.toList (Set.union int ing)
buildPort :: Loc -> [Policy] -- Get the policies for packets to one location
buildPort l@(Loc s p) = map hop $ Set.toList destinations where
destinations = case Map.lookup s portLookup of
Just dests -> dests
Nothing -> error "Port lookup malformed."
ourVlan = if Set.member l ing then DlVlan Nothing
else case Map.lookup l assignment of
Just v -> DlVlan (Just v)
Nothing -> error "Vlan assignment malformed."
restriction = inport s p <&&>
ourVlan <&&>
Map.findWithDefault Any l (ingress slice)
policy' = policy <%> restriction
hop :: Port -> Policy -- Get the policies for one switch forwarding
hop port = policy''' where
loc = Loc s port
targetVlan = if Set.member loc egr then Nothing
else case Map.lookup loc assignment of
Just v -> Just v
Nothing -> error "Vlan assignment malformed."
policy'' = justTo port policy'
-- It's safe to use modifyVlan because we only have actions on this one
-- forwarding hop.
policy''' = modifyVlan targetVlan policy''
portsOfSet :: Set.Set Loc -> Map.Map Switch (Set.Set Port)
portsOfSet = Map.fromListWith Set.union .
map (\(Loc s p) -> (s, Set.singleton p)) .
Set.toList
-- |Produce a policy that only considers traffic on this vlan and on internal
-- ports. Note that if the policy does not modify vlans, then it also only
-- emits traffic on this vlan.
isolate :: Slice -> Vlan -> Policy -> Policy
isolate slice vlan policy = policy <%> (vlPred <&&> intern)
where
vlPred = vlanMatch vlan
intern = prOr . map (\(Loc s p) -> inport s p) . Set.toList $
internal slice
locToPred :: Loc -> Predicate
locToPred (Loc switch port) = inport switch port
-- |Produce a policy that moves packets into the vlan as defined by the slice's
-- input policy.
inportPo :: Slice -> Vlan -> Policy -> Policy
inportPo slice vlan policy =
let incoming = ingressPredicate slice in
let policyIntoVlan = modifyVlan (Just vlan) policy in
policyIntoVlan <%> (incoming <&&> DlVlan Nothing)
-- |Produce a new policy the same as the old, but wherever a packet leaves an
-- outgoing edge, unset its VLAN. Precondition: every PoBasic must match at
-- most one switch.
outport :: Slice -> Policy -> Policy
outport slice policy = foldr stripVlan policy locs
where locs = Map.keys (egress slice)
-- |Produce a predicate matching any of the inports (and their predicate)
-- specified
ingressPredicate :: Slice -> Predicate
ingressPredicate slice =
prOr . map ingressSpecToPred . Map.assocs $ ingress slice
-- |Produce a predicate matching the ingress predicate at a particular location
ingressSpecToPred :: (Loc, Predicate) -> Predicate
ingressSpecToPred (loc, pred) = And pred (locToPred loc)
-- |Walk through the policy and globally set VLAN to vlan at each forwarding
-- action
modifyVlan :: Maybe Vlan -> Policy -> Policy
modifyVlan _ PoBottom = PoBottom
modifyVlan vlan (PoBasic pred acts) = pred ==> (map f acts)
where f (Forward p m) = Forward p (m { modifyDlVlan = Just vlan })
f act = act
modifyVlan vlan (PoUnion p1 p2) = PoUnion (modifyVlan vlan p1)
(modifyVlan vlan p2)
modifyVlan vlan (Restrict pol pred) = Restrict pol' pred
where pol' = modifyVlan vlan pol
modifyVlan vlan (SendPackets chan) = SendPackets chan
-- TODO(arjun): needs work
-- |Unset vlan for packets forwarded to location (without link transfer) and
-- leave rest of policy unchanged. Note that this assumes that each PoBasic
-- matches at most one switch.
stripVlan :: Loc -> Policy -> Policy
stripVlan = setVlan Nothing
-- |Set vlan tag for packets forwarded to location (without link transfer) and
-- leave rest of policy unchanged. Note that this assumes that each PoBasic
-- matches at most one switch.
setVlan :: Maybe Vlan -> Loc -> Policy -> Policy
setVlan _ _ PoBottom = PoBottom
setVlan vlan loc (PoUnion p1 p2) = PoUnion (setVlan vlan loc p1)
(setVlan vlan loc p2)
setVlan vlan (Loc switch port) pol@(PoBasic pred acts) =
if matchesSwitch switch pred then PoBasic pred m'
else pol
where
m' = map setVlanOnPort acts
setVlanOnPort (Forward (Physical p) mod) =
if p == port then (Forward (Physical p) mod {modifyDlVlan = Just vlan})
else (Forward (Physical p) mod)
setVlanOnPort (Forward AllPorts mod) =
error "AllPorts encountered in slice compilation. Did you localize?"
setVlanOnPort act = act
setVlan vlan loc (Restrict pol pred) = Restrict (setVlan vlan loc pol) pred
setVlan vlan loc (SendPackets chan) = SendPackets chan
-- TODO(arjun): needs work--clearly just transform outputs to set VLAN
-- |Determine if a predicate can match any packets on a switch (overapproximate)
matchesSwitch :: Switch -> Predicate -> Bool
matchesSwitch s1 (Frenetic.NetCore.Types.Switch s2) = s1 == s2
matchesSwitch s (Or p1 p2) = matchesSwitch s p1 || matchesSwitch s p2
matchesSwitch s (And p1 p2) = matchesSwitch s p1 && matchesSwitch s p2
matchesSwitch s (Not _) = True
matchesSwitch s _ = True
|
frenetic-lang/netcore-1.0
|
src/Frenetic/Slices/Compile.hs
|
bsd-3-clause
| 11,996
| 0
| 20
| 2,731
| 3,004
| 1,538
| 1,466
| -1
| -1
|
{- OPLSS, Summer 2013 -}
{-# LANGUAGE GADTs, NamedFieldPuns, FlexibleContexts, ViewPatterns, RankNTypes #-}
{-# OPTIONS_GHC -Wall -fno-warn-unused-matches #-}
-- | Utilities for managing a typechecking context.
module Environment
(
TcMonad, runTcMonad,
Env,Hint(..),
emptyEnv,
lookupTy, lookupTyMaybe, lookupDef, lookupHint, lookupTCon,
lookupDCon, lookupDConAll, getTys,
getCtx, getLocalCtx, extendCtx, extendCtxTele,
extendCtxs, extendCtxsGlobal,
extendCtxMods,
extendHints,
extendSourceLocation, getSourceLocation,
getDefs, substDefs,
err, warn, extendErr, D(..),Err(..),
) where
import Syntax
import PrettyPrint
import Unbound.LocallyNameless hiding (Data)
import Text.PrettyPrint.HughesPJ
import Text.ParserCombinators.Parsec.Pos(SourcePos)
import Control.Monad.Reader
import Control.Monad.Error
import Data.List
import Data.Maybe (listToMaybe, catMaybes)
-- | The type checking Monad includes a reader (for the
-- environment), freshness state (for supporting locally-nameless
-- representations), error (for error reporting), and IO
-- (for e.g. warning messages).
type TcMonad = FreshMT (ReaderT Env (ErrorT Err IO))
-- | Entry point for the type checking monad, given an
-- initial environment, returns either an error message
-- or some result.
runTcMonad :: Env -> TcMonad a -> IO (Either Err a)
runTcMonad env m = runErrorT $
runReaderT (runFreshMT m) env
-- | Marked locations in the source code
data SourceLocation where
SourceLocation :: forall a. Disp a => SourcePos -> a -> SourceLocation
-- | Type declarations
data Hint = Hint TName Term
-- | Environment manipulation and accessing functions
-- The context 'gamma' is a list
data Env = Env { ctx :: [Decl],
-- ^ elaborated term and datatype declarations.
globals :: Int,
-- ^ how long the tail of "global" variables in the context is
-- (used to supress printing those in error messages)
hints :: [Hint],
-- ^ Type declarations (signatures): it's not safe to
-- put these in the context until a corresponding term
-- has been checked.
sourceLocation :: [SourceLocation]
-- ^ what part of the file we are in (for errors/warnings)
}
--deriving Show
-- | The initial environment.
emptyEnv :: Env
emptyEnv = Env { ctx = [] , globals = 0, hints = [], sourceLocation = [] }
instance Disp Env where
disp e = vcat [disp decl | decl <- ctx e]
-- | Return a list of all type bindings, and their names.
getTys :: (MonadReader Env m) => m [(TName,Term)]
getTys = do
ctx <- asks ctx
return $ catMaybes (map unwrap ctx)
where unwrap (Sig v ty) = Just (v,ty)
unwrap _ = Nothing
-- | Find a name's user supplied type signature.
lookupHint :: (MonadReader Env m) => TName -> m (Maybe Term)
lookupHint v = do
hints <- asks hints
return $ listToMaybe [ ty | Hint v' ty <- hints, v == v']
-- | Find a name's type in the context.
lookupTyMaybe :: (MonadReader Env m, MonadError Err m)
=> TName -> m (Maybe Term)
lookupTyMaybe v = do
ctx <- asks ctx
return $ listToMaybe [ty | Sig v' ty <- ctx, v == v']
-- | Find the type of a name specified in the context
-- throwing an error if the name doesn't exist
lookupTy :: (MonadReader Env m, MonadError Err m)
=> TName -> m Term
lookupTy v =
do x <- lookupTyMaybe v
gamma <- getLocalCtx
case x of
Just res -> return res
Nothing -> err [DS ("The variable " ++ show v++ " was not found."),
DS "in context", DD gamma]
-- | Find a name's def in the context.
lookupDef :: (MonadReader Env m, MonadError Err m, MonadIO m)
=> TName -> m (Maybe Term)
lookupDef v = do
ctx <- asks ctx
return $ listToMaybe [a | Def v' a <- ctx, v == v']
-- | Find a type constructor in the context
lookupTCon :: (MonadReader Env m, MonadError Err m)
=> TCName -> m (Telescope,Int,Maybe [ConstructorDef])
lookupTCon v = do
g <- asks ctx
scanGamma g
where
scanGamma [] = do currentEnv <- asks ctx
err [DS "The type constructor", DD v, DS "was not found.",
DS "The current environment is", DD currentEnv]
scanGamma ((Data v' delta lev cs):g) =
if v == v'
then return $ (delta,lev,Just cs)
else scanGamma g
scanGamma ((AbsData v' delta lev):g) =
if v == v'
then return $ (delta,lev,Nothing)
else scanGamma g
scanGamma (_:g) = scanGamma g
-- | Find a data constructor in the context, returns a list of
-- all potential matches
lookupDConAll :: (MonadReader Env m, MonadError Err m)
=> DCName -> m [(TCName,(Telescope,ConstructorDef))]
lookupDConAll v = do
g <- asks ctx
scanGamma g
where
scanGamma [] = return []
scanGamma ((Data v' delta lev cs):g) =
case find (\(ConstructorDef _ v'' tele) -> v''==v ) cs of
Nothing -> scanGamma g
Just c -> do more <- scanGamma g
return $ [ (v', (delta, c)) ] ++ more
scanGamma ((AbsData v' delta lev):g) = scanGamma g
scanGamma (_:g) = scanGamma g
-- | Given the name of a data constructor and the type that it should
-- construct, find the telescopes for its parameters and arguments.
-- Throws an error if the data constructor cannot be found for that type.
lookupDCon :: (MonadReader Env m, MonadError Err m)
=> DCName -> TCName -> m (Telescope,Telescope)
lookupDCon c tname = do
matches <- lookupDConAll c
case lookup tname matches of
Just (delta, ConstructorDef _ _ deltai) ->
return (delta, deltai)
Nothing ->
err ([DS "Cannot find data constructor", DS c,
DS "for type", DD tname,
DS "Potential matches were:"] ++
(map (DD . fst) matches) ++
(map (DD . snd . snd) matches))
-- | Extend the context with a new binding.
extendCtx :: (MonadReader Env m) => Decl -> m a -> m a
extendCtx d =
local (\ m@(Env {ctx = cs}) -> m { ctx = d:cs })
-- | Extend the context with a list of bindings
extendCtxs :: (MonadReader Env m) => [Decl] -> m a -> m a
extendCtxs ds =
local (\ m@(Env {ctx = cs}) -> m { ctx = ds ++ cs })
-- | Extend the context with a list of bindings, marking them as "global"
extendCtxsGlobal :: (MonadReader Env m) => [Decl] -> m a -> m a
extendCtxsGlobal ds =
local (\ m@(Env {ctx = cs}) -> m { ctx = ds ++ cs,
globals = length (ds ++ cs)})
-- | Extend the context with a telescope
extendCtxTele :: (MonadReader Env m) => Telescope -> m a -> m a
extendCtxTele Empty m = m
extendCtxTele (Cons (unrebind -> ((x,unembed->ty),tele))) m =
extendCtx (Sig x ty) $ extendCtxTele tele m
-- | Extend the context with a module
-- Note we must reverse the order.
extendCtxMod :: (MonadReader Env m) => Module -> m a -> m a
extendCtxMod m k = extendCtxs (reverse $ moduleEntries m) k
-- | Extend the context with a list of modules
extendCtxMods :: (MonadReader Env m) => [Module] -> m a -> m a
extendCtxMods mods k = foldr extendCtxMod k mods
-- | Get the complete current context
getCtx :: MonadReader Env m => m [Decl]
getCtx = asks ctx
-- | Get the prefix of the context that corresponds to local variables.
getLocalCtx :: MonadReader Env m => m [Decl]
getLocalCtx = do
g <- asks ctx
glen <- asks globals
return $ take (length g - glen) g
-- | Push a new source position on the location stack.
extendSourceLocation :: (MonadReader Env m, Disp t) => SourcePos -> t -> m a -> m a
extendSourceLocation p t =
local (\ e@(Env {sourceLocation = locs}) -> e {sourceLocation = (SourceLocation p t):locs})
-- | access current source location
getSourceLocation :: MonadReader Env m => m [SourceLocation]
getSourceLocation = asks sourceLocation
-- | Add a type hint
extendHints :: (MonadReader Env m) => Hint -> m a -> m a
extendHints h = local (\m@(Env {hints = hs}) -> m { hints = h:hs })
-- | access all definitions
getDefs :: MonadReader Env m => m [(TName,Term)]
getDefs = do
ctx <- getCtx
return $ filterDefs ctx
where filterDefs ((Def x a):ctx) = (x,a) : filterDefs ctx
filterDefs (_:ctx) = filterDefs ctx
filterDefs [] = []
-- | substitute all of the defs through a term
substDefs :: MonadReader Env m => Term -> m Term
substDefs tm = do
ctx <- getCtx
return $ substs (expandDefs ctx) tm
where expandDefs ((Def x a):ctx) =
let defs = expandDefs ctx
in ((x, substs defs a) : defs)
expandDefs (_:ctx) = expandDefs ctx
expandDefs [] = []
-- | An error that should be reported to the user
data Err = Err [SourceLocation] Doc
-- | Augment the error message with addition information
extendErr :: MonadError Err m => m a -> Doc -> m a
extendErr ma msg' =
ma `catchError` \(Err ps msg) ->
throwError $ Err ps (msg $$ msg')
instance Error Err where
strMsg msg = Err [] (text msg)
instance Disp Err where
disp (Err [] msg) = msg
disp (Err ((SourceLocation p term):_) msg) =
disp p $$
nest 2 msg $$
nest 2 (text "In the expression" $$ nest 2 (disp term))
-- | Throw an error
err :: (Disp a, MonadError Err m, MonadReader Env m) => a -> m b
err d = do
loc <- getSourceLocation
throwError $ Err loc (disp d)
-- | Print a warning
warn :: (Disp a, MonadReader Env m, MonadIO m) => a -> m ()
warn e = do
loc <- getSourceLocation
liftIO $ putStrLn $ "warning: " ++ render (disp (Err loc (disp e)))
|
jonsterling/ETT-Lite
|
src/Environment.hs
|
bsd-3-clause
| 9,542
| 0
| 17
| 2,424
| 3,013
| 1,585
| 1,428
| 181
| 6
|
{-# language MultiParamTypeClasses#-}
{-# language TemplateHaskell#-}
{-# language ScopedTypeVariables#-}
{-# language ViewPatterns#-}
{-# language FlexibleInstances#-}
{-# language DeriveFunctor#-}
{-# language StandaloneDeriving#-}
{-# language GADTs#-}
{-# language NoMonomorphismRestriction#-}
{-# language FlexibleContexts#-}
{-# language ConstraintKinds #-}
import Data.List
import Data.Tree hiding (Forest)
import Control.Monad.State hiding (modify)
import System.Console.Haskeline hiding (Exception)
import Control.Monad.Cont
import Data.Foldable
import Test.QuickCheck (sample')
import Data.Types.Isomorphic (to, Injective, Iso)
import Data.Proxy
import HDynTs.Lib.Tree (arbitraryForest, drawTreeU)
import HDynTs.Interface
import HDynTs.EulerTours.Forest
import Data.Char
data Lang a = L a a | D a a | P a a | N Int | S | I a deriving Read
doc = "l x y: link x and y verteces\nd x y: unlink x and y verteces\np x y : compute path between x and y\nn x : new random forest of max with x\ni x: spanning tree of x\ns: ouput the forest\nCTRL-d: exit\n"
parseErrors :: Show a => Exception a b -> String
parseErrors (AlreadySeparatedVerteces x y) =
"verteces " ++ show x ++ " " ++ show y ++ " are not linked"
parseErrors (AlreadyConnectedVerteces x y) =
"verteces " ++ show x ++ " " ++ show y ++ " are in the same graph"
parseErrors (VertexNotFound x) =
"vertex " ++ show x ++ " was not found in the forest"
parseErrors (OrException e1 e2) = parseErrors e1 ++ " or " ++ parseErrors e2
parseErrors (NotConnectedVerteces x y) =
"verteces " ++ show x ++ " " ++ show y ++ " are in different graphs"
errore x = "## CONDITION: " ++ x ++ "\n"
report x = "## RESULT: " ++ x ++ "\n"
-- error treatment
catchErrorM :: (MonadIO m ,Show a)
=> (String -> m ()) -- error report callback
-> (r -> m ()) -- result callback
-> Either (Exception a b) r -- result
-> m ()
catchErrorM f g = either (f . errore . parseErrors) g
-- t constraints
type Env t = (Interpreter t Int, Iso [Tree Int] (t Int))
-- fresh populated structure
news :: (Env t, MonadIO m) => Int -> m (t Int)
news n = to . head <$> (liftIO . sample' $ arbitraryForest 2 n)
help = lift . lift . outputStrLn $ doc
sep = " ....."
out :: forall t. Env t => String -> StateT (t Int) (InputT IO) ()
out x = do
let g t = do
putStrLn . drawTreeU . fmap show $ t
get >>= liftIO . mapM_ g . (to :: t Int -> [Tree Int])
lift . outputStrLn $ x
loop :: forall t . Env t => ContT () (StateT (t Int) (InputT IO)) ()
loop = callCC $ \stop -> do
let gl = do
mr <- lift . lift $ getInputLine "> "
lift . lift $ outputStrLn ""
return mr
q p h = gets (query p) >>= catchErrorM out h
m p = modify p >>= catchErrorM out return
u Nothing = stop ()
u (Just (map toUpper -> r)) = do
case reads r of
[(L x y,_)] -> lift $ m (Link x y)
[(D x y,_)] -> lift $ m (Cut x y)
[(P x y,_)] -> lift $ q (Path x y) $
lift . outputStrLn . report . show
[(I x,_)] -> lift $ q (Spanning x) $
liftIO . putStrLn . drawTreeU . fmap show
[(S,_)] -> lift $ out ""
[(N n,_)] -> lift (news n >>= put) >> lift (out "")
_ -> help
gl >>= u
help >> gl >>= u
run :: forall t . Env t => Proxy (t Int) -> IO ()
run _ = (news :: Int -> IO (t Int)) 5 >>=
runInputT defaultSettings .
evalStateT (runContT loop return)
main = run (Proxy :: Proxy (TourForest Int))
|
paolino/HDynTs
|
src/executables/Console.hs
|
bsd-3-clause
| 3,639
| 0
| 24
| 1,014
| 1,325
| 680
| 645
| 82
| 8
|
{-# LANGUAGE ScopedTypeVariables #-}
module GhcUtilsSpec (main, spec) where
import Test.Hspec
import TestUtils
import qualified GHC as GHC
import qualified Data.Generics as SYB
import qualified GHC.SYB.Utils as SYB
import Language.Haskell.GHC.ExactPrint.Utils
import Language.Haskell.Refact.Utils.Binds
import Language.Haskell.Refact.Utils.GhcUtils
import Language.Haskell.Refact.Utils.GhcVersionSpecific
import Language.Haskell.Refact.Utils.Monad
import Language.Haskell.Refact.Utils.MonadFunctions
import Language.Haskell.Refact.Utils.TypeUtils
import Language.Haskell.Refact.Utils.Utils
import Language.Haskell.Refact.Utils.Variables
-- import TestUtils
-- ---------------------------------------------------------------------
main :: IO ()
main = do
hspec spec
spec :: Spec
spec = do
describe "onelayerStaged" $ do
it "only descends one layer into a structure" $ do
let s' = (2,[3,4],5) :: (Int,[Int],Int)
let
worker' (i::Int) = [i]
let g = onelayerStaged SYB.Renamer [] ([] `SYB.mkQ` worker') s'
let g1 = SYB.gmapQ ([] `SYB.mkQ` worker') s'
let g2 = SYB.gmapQl (++) [] ([] `SYB.mkQ` worker') s'
(show g) `shouldBe` "[[2],[],[5]]"
(show g1) `shouldBe` "[[2],[],[5]]"
(show g2) `shouldBe` "[2,5]"
-- ---------------------------------
it "Finds a GHC.Name at top level only" $ do
let
comp = do
parseSourceFileGhc "./DupDef/Dd1.hs"
renamed <- getRefactRenamed
let mn = locToName (4,1) renamed
let (Just (ln@(GHC.L _ n))) = mn
let mx = locToName (4,10) renamed
let (Just (lx@(GHC.L _ x))) = mx
let declsr = hsBinds renamed
duplicatedDecls = definingDeclsNames [n] declsr True False
res = findEntity ln duplicatedDecls
res2 = findEntity n duplicatedDecls
resx = findEntity lx duplicatedDecls
resx2 = findEntity x duplicatedDecls
worker (nn::GHC.Name) = [showGhc nn]
g = onelayerStaged SYB.Renamer ["-1"] (["-10"] `SYB.mkQ` worker) duplicatedDecls
worker2 ((GHC.L _ (GHC.FunBind (GHC.L _ n') _ _ _ _ _))::GHC.Located (GHC.HsBind GHC.Name))
| n == n' = ["found"]
worker2 _ = []
g2 = onelayerStaged SYB.Renamer ["-1"] (["-10"] `SYB.mkQ` worker2) duplicatedDecls
return (res,res2,resx,resx2,duplicatedDecls,g,g2,ln,lx)
((r,r2,rx,rx2,d,gg,gg2,_l,_x),_s) <- ct $ runRefactGhc comp initialState testOptions
-- (SYB.showData SYB.Renamer 0 d) `shouldBe` ""
(showGhcQual d) `shouldBe` "[DupDef.Dd1.toplevel x = DupDef.Dd1.c GHC.Num.* x]"
(showGhcQual _l) `shouldBe` "DupDef.Dd1.toplevel"
(showGhc _x) `shouldBe` "x"
(show gg) `shouldBe` "[[\"-10\"],[\"-10\"]]"
(show gg2) `shouldBe` "[[\"found\"],[\"-10\"]]"
r `shouldBe` True
r2 `shouldBe` True
rx `shouldBe` False
rx2 `shouldBe` True
-- ---------------------------------------------------------------------
|
kmate/HaRe
|
test/GhcUtilsSpec.hs
|
bsd-3-clause
| 3,069
| 0
| 30
| 734
| 959
| 531
| 428
| 64
| 2
|
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section{@Vars@: Variables}
-}
{-# LANGUAGE CPP, DeriveDataTypeable, MultiWayIf #-}
-- |
-- #name_types#
-- GHC uses several kinds of name internally:
--
-- * 'OccName.OccName': see "OccName#name_types"
--
-- * 'RdrName.RdrName': see "RdrName#name_types"
--
-- * 'Name.Name': see "Name#name_types"
--
-- * 'Id.Id': see "Id#name_types"
--
-- * 'Var.Var' is a synonym for the 'Id.Id' type but it may additionally
-- potentially contain type variables, which have a 'TyCoRep.Kind'
-- rather than a 'TyCoRep.Type' and only contain some extra
-- details during typechecking.
--
-- These 'Var.Var' names may either be global or local, see "Var#globalvslocal"
--
-- #globalvslocal#
-- Global 'Id's and 'Var's are those that are imported or correspond
-- to a data constructor, primitive operation, or record selectors.
-- Local 'Id's and 'Var's are those bound within an expression
-- (e.g. by a lambda) or at the top level of the module being compiled.
module Var (
-- * The main data type and synonyms
Var, CoVar, Id, NcId, DictId, DFunId, EvVar, EqVar, EvId, IpId,
TyVar, TypeVar, KindVar, TKVar, TyCoVar,
-- ** Taking 'Var's apart
varName, varUnique, varType,
-- ** Modifying 'Var's
setVarName, setVarUnique, setVarType, updateVarType,
updateVarTypeM,
-- ** Constructing, taking apart, modifying 'Id's
mkGlobalVar, mkLocalVar, mkExportedLocalVar, mkCoVar,
idInfo, idDetails,
lazySetIdInfo, setIdDetails, globaliseId,
setIdExported, setIdNotExported,
-- ** Predicates
isId, isTKVar, isTyVar, isTcTyVar,
isLocalVar, isLocalId, isCoVar, isNonCoVarId, isTyCoVar,
isGlobalId, isExportedId,
mustHaveLocalBinding,
-- ** Constructing 'TyVar's
mkTyVar, mkTcTyVar,
-- ** Taking 'TyVar's apart
tyVarName, tyVarKind, tcTyVarDetails, setTcTyVarDetails,
-- ** Modifying 'TyVar's
setTyVarName, setTyVarUnique, setTyVarKind, updateTyVarKind,
updateTyVarKindM
) where
#include "HsVersions.h"
import {-# SOURCE #-} TyCoRep( Type, Kind, pprKind )
import {-# SOURCE #-} TcType( TcTyVarDetails, pprTcTyVarDetails, vanillaSkolemTv )
import {-# SOURCE #-} IdInfo( IdDetails, IdInfo, coVarDetails, isCoVarDetails, vanillaIdInfo, pprIdDetails )
import Name hiding (varName)
import Unique
import Util
import DynFlags
import Outputable
import Data.Data
{-
************************************************************************
* *
Synonyms
* *
************************************************************************
-- These synonyms are here and not in Id because otherwise we need a very
-- large number of SOURCE imports of Id.hs :-(
-}
type Id = Var -- A term-level identifier
-- predicate: isId
type CoVar = Id -- See Note [Evidence: EvIds and CoVars]
-- predicate: isCoVar
type NcId = Id -- A term-level (value) variable that is
-- /not/ an (unlifted) coercion
-- predicate: isNonCoVarId
type TyVar = Var -- Type *or* kind variable (historical)
type TKVar = Var -- Type *or* kind variable (historical)
type TypeVar = Var -- Definitely a type variable
type KindVar = Var -- Definitely a kind variable
-- See Note [Kind and type variables]
-- See Note [Evidence: EvIds and CoVars]
type EvId = Id -- Term-level evidence: DictId, IpId, or EqVar
type EvVar = EvId -- ...historical name for EvId
type DFunId = Id -- A dictionary function
type DictId = EvId -- A dictionary variable
type IpId = EvId -- A term-level implicit parameter
type EqVar = EvId -- Boxed equality evidence
type TyCoVar = Id -- Type, kind, *or* coercion variable
-- predicate: isTyCoVar
{- Note [Evidence: EvIds and CoVars]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* An EvId (evidence Id) is a term-level evidence variable
(dictionary, implicit parameter, or equality). Could be boxed or unboxed.
* DictId, IpId, and EqVar are synonyms when we know what kind of
evidence we are talking about. For example, an EqVar has type (t1 ~ t2).
* A CoVar is always an un-lifted coercion, of type (t1 ~# t2) or (t1 ~R# t2)
Note [Kind and type variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Before kind polymorphism, TyVar were used to mean type variables. Now
they are use to mean kind *or* type variables. KindVar is used when we
know for sure that it is a kind variable. In future, we might want to
go over the whole compiler code to use:
- TKVar to mean kind or type variables
- TypeVar to mean type variables only
- KindVar to mean kind variables
************************************************************************
* *
\subsection{The main data type declarations}
* *
************************************************************************
Every @Var@ has a @Unique@, to uniquify it and for fast comparison, a
@Type@, and an @IdInfo@ (non-essential info about it, e.g.,
strictness). The essential info about different kinds of @Vars@ is
in its @VarDetails@.
-}
-- | Essentially a typed 'Name', that may also contain some additional information
-- about the 'Var' and it's use sites.
data Var
= TyVar { -- Type and kind variables
-- see Note [Kind and type variables]
varName :: !Name,
realUnique :: {-# UNPACK #-} !Int,
-- ^ Key for fast comparison
-- Identical to the Unique in the name,
-- cached here for speed
varType :: Kind -- ^ The type or kind of the 'Var' in question
}
| TcTyVar { -- Used only during type inference
-- Used for kind variables during
-- inference, as well
varName :: !Name,
realUnique :: {-# UNPACK #-} !Int,
varType :: Kind,
tc_tv_details :: TcTyVarDetails
}
| Id {
varName :: !Name,
realUnique :: {-# UNPACK #-} !Int,
varType :: Type,
idScope :: IdScope,
id_details :: IdDetails, -- Stable, doesn't change
id_info :: IdInfo } -- Unstable, updated by simplifier
deriving Typeable
data IdScope -- See Note [GlobalId/LocalId]
= GlobalId
| LocalId ExportFlag
data ExportFlag -- See Note [ExportFlag on binders]
= NotExported -- ^ Not exported: may be discarded as dead code.
| Exported -- ^ Exported: kept alive
{- Note [ExportFlag on binders]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
An ExportFlag of "Exported" on a top-level binder says "keep this
binding alive; do not drop it as dead code". This transitively
keeps alive all the other top-level bindings that this binding refers
to. This property is persisted all the way down the pipeline, so that
the binding will be compiled all the way to object code, and its
symbols will appear in the linker symbol table.
However, note that this use of "exported" is quite different to the
export list on a Haskell module. Setting the ExportFlag on an Id does
/not/ mean that if you import the module (in Haskell source code) you
will see this Id. Of course, things that appear in the export list
of the source Haskell module do indeed have their ExportFlag set.
But many other things, such as dictionary functions, are kept alive
by having their ExportFlag set, even though they are not exported
in the source-code sense.
We should probably use a different term for ExportFlag, like
KeepAlive.
Note [GlobalId/LocalId]
~~~~~~~~~~~~~~~~~~~~~~~
A GlobalId is
* always a constant (top-level)
* imported, or data constructor, or primop, or record selector
* has a Unique that is globally unique across the whole
GHC invocation (a single invocation may compile multiple modules)
* never treated as a candidate by the free-variable finder;
it's a constant!
A LocalId is
* bound within an expression (lambda, case, local let(rec))
* or defined at top level in the module being compiled
* always treated as a candidate by the free-variable finder
After CoreTidy, top-level LocalIds are turned into GlobalIds
-}
instance Outputable Var where
ppr var = sdocWithDynFlags $ \dflags ->
getPprStyle $ \ppr_style ->
if | debugStyle ppr_style && (not (gopt Opt_SuppressVarKinds dflags))
-> parens (ppr (varName var) <+> ppr_debug var ppr_style <+>
dcolon <+> pprKind (tyVarKind var))
| otherwise
-> ppr (varName var) <> ppr_debug var ppr_style
ppr_debug :: Var -> PprStyle -> SDoc
ppr_debug (TyVar {}) sty
| debugStyle sty = brackets (text "tv")
ppr_debug (TcTyVar {tc_tv_details = d}) sty
| dumpStyle sty || debugStyle sty = brackets (pprTcTyVarDetails d)
ppr_debug (Id { idScope = s, id_details = d }) sty
| debugStyle sty = brackets (ppr_id_scope s <> pprIdDetails d)
ppr_debug _ _ = empty
ppr_id_scope :: IdScope -> SDoc
ppr_id_scope GlobalId = text "gid"
ppr_id_scope (LocalId Exported) = text "lidx"
ppr_id_scope (LocalId NotExported) = text "lid"
instance NamedThing Var where
getName = varName
instance Uniquable Var where
getUnique = varUnique
instance Eq Var where
a == b = realUnique a == realUnique b
instance Ord Var where
a <= b = realUnique a <= realUnique b
a < b = realUnique a < realUnique b
a >= b = realUnique a >= realUnique b
a > b = realUnique a > realUnique b
a `compare` b = varUnique a `compare` varUnique b
instance Data Var where
-- don't traverse?
toConstr _ = abstractConstr "Var"
gunfold _ _ = error "gunfold"
dataTypeOf _ = mkNoRepType "Var"
varUnique :: Var -> Unique
varUnique var = mkUniqueGrimily (realUnique var)
setVarUnique :: Var -> Unique -> Var
setVarUnique var uniq
= var { realUnique = getKey uniq,
varName = setNameUnique (varName var) uniq }
setVarName :: Var -> Name -> Var
setVarName var new_name
= var { realUnique = getKey (getUnique new_name),
varName = new_name }
setVarType :: Id -> Type -> Id
setVarType id ty = id { varType = ty }
updateVarType :: (Type -> Type) -> Id -> Id
updateVarType f id = id { varType = f (varType id) }
updateVarTypeM :: Monad m => (Type -> m Type) -> Id -> m Id
updateVarTypeM f id = do { ty' <- f (varType id)
; return (id { varType = ty' }) }
{-
************************************************************************
* *
\subsection{Type and kind variables}
* *
************************************************************************
-}
tyVarName :: TyVar -> Name
tyVarName = varName
tyVarKind :: TyVar -> Kind
tyVarKind = varType
setTyVarUnique :: TyVar -> Unique -> TyVar
setTyVarUnique = setVarUnique
setTyVarName :: TyVar -> Name -> TyVar
setTyVarName = setVarName
setTyVarKind :: TyVar -> Kind -> TyVar
setTyVarKind tv k = tv {varType = k}
updateTyVarKind :: (Kind -> Kind) -> TyVar -> TyVar
updateTyVarKind update tv = tv {varType = update (tyVarKind tv)}
updateTyVarKindM :: (Monad m) => (Kind -> m Kind) -> TyVar -> m TyVar
updateTyVarKindM update tv
= do { k' <- update (tyVarKind tv)
; return $ tv {varType = k'} }
mkTyVar :: Name -> Kind -> TyVar
mkTyVar name kind = TyVar { varName = name
, realUnique = getKey (nameUnique name)
, varType = kind
}
mkTcTyVar :: Name -> Kind -> TcTyVarDetails -> TyVar
mkTcTyVar name kind details
= -- NB: 'kind' may be a coercion kind; cf, 'TcMType.newMetaCoVar'
TcTyVar { varName = name,
realUnique = getKey (nameUnique name),
varType = kind,
tc_tv_details = details
}
tcTyVarDetails :: TyVar -> TcTyVarDetails
tcTyVarDetails (TcTyVar { tc_tv_details = details }) = details
tcTyVarDetails (TyVar {}) = vanillaSkolemTv
tcTyVarDetails var = pprPanic "tcTyVarDetails" (ppr var <+> dcolon <+> pprKind (tyVarKind var))
setTcTyVarDetails :: TyVar -> TcTyVarDetails -> TyVar
setTcTyVarDetails tv details = tv { tc_tv_details = details }
{-
%************************************************************************
%* *
\subsection{Ids}
* *
************************************************************************
-}
idInfo :: Id -> IdInfo
idInfo (Id { id_info = info }) = info
idInfo other = pprPanic "idInfo" (ppr other)
idDetails :: Id -> IdDetails
idDetails (Id { id_details = details }) = details
idDetails other = pprPanic "idDetails" (ppr other)
-- The next three have a 'Var' suffix even though they always build
-- Ids, because Id.hs uses 'mkGlobalId' etc with different types
mkGlobalVar :: IdDetails -> Name -> Type -> IdInfo -> Id
mkGlobalVar details name ty info
= mk_id name ty GlobalId details info
mkLocalVar :: IdDetails -> Name -> Type -> IdInfo -> Id
mkLocalVar details name ty info
= mk_id name ty (LocalId NotExported) details info
mkCoVar :: Name -> Type -> CoVar
-- Coercion variables have no IdInfo
mkCoVar name ty = mk_id name ty (LocalId NotExported) coVarDetails vanillaIdInfo
-- | Exported 'Var's will not be removed as dead code
mkExportedLocalVar :: IdDetails -> Name -> Type -> IdInfo -> Id
mkExportedLocalVar details name ty info
= mk_id name ty (LocalId Exported) details info
mk_id :: Name -> Type -> IdScope -> IdDetails -> IdInfo -> Id
mk_id name ty scope details info
= Id { varName = name,
realUnique = getKey (nameUnique name),
varType = ty,
idScope = scope,
id_details = details,
id_info = info }
-------------------
lazySetIdInfo :: Id -> IdInfo -> Var
lazySetIdInfo id info = id { id_info = info }
setIdDetails :: Id -> IdDetails -> Id
setIdDetails id details = id { id_details = details }
globaliseId :: Id -> Id
-- ^ If it's a local, make it global
globaliseId id = id { idScope = GlobalId }
setIdExported :: Id -> Id
-- ^ Exports the given local 'Id'. Can also be called on global 'Id's, such as data constructors
-- and class operations, which are born as global 'Id's and automatically exported
setIdExported id@(Id { idScope = LocalId {} }) = id { idScope = LocalId Exported }
setIdExported id@(Id { idScope = GlobalId }) = id
setIdExported tv = pprPanic "setIdExported" (ppr tv)
setIdNotExported :: Id -> Id
-- ^ We can only do this to LocalIds
setIdNotExported id = ASSERT( isLocalId id )
id { idScope = LocalId NotExported }
{-
************************************************************************
* *
\subsection{Predicates over variables}
* *
************************************************************************
-}
isTyVar :: Var -> Bool
isTyVar = isTKVar -- Historical
isTKVar :: Var -> Bool -- True of both type and kind variables
isTKVar (TyVar {}) = True
isTKVar (TcTyVar {}) = True
isTKVar _ = False
isTcTyVar :: Var -> Bool
isTcTyVar (TcTyVar {}) = True
isTcTyVar _ = False
isTyCoVar :: Var -> Bool
isTyCoVar v = isTyVar v || isCoVar v
isId :: Var -> Bool
isId (Id {}) = True
isId _ = False
isCoVar :: Var -> Bool
-- A coercion variable
isCoVar (Id { id_details = details }) = isCoVarDetails details
isCoVar _ = False
isNonCoVarId :: Var -> Bool
-- A term variable (Id) that is /not/ a coercion variable
isNonCoVarId (Id { id_details = details }) = not (isCoVarDetails details)
isNonCoVarId _ = False
isLocalId :: Var -> Bool
isLocalId (Id { idScope = LocalId _ }) = True
isLocalId _ = False
-- | 'isLocalVar' returns @True@ for type variables as well as local 'Id's
-- These are the variables that we need to pay attention to when finding free
-- variables, or doing dependency analysis.
isLocalVar :: Var -> Bool
isLocalVar v = not (isGlobalId v)
isGlobalId :: Var -> Bool
isGlobalId (Id { idScope = GlobalId }) = True
isGlobalId _ = False
-- | 'mustHaveLocalBinding' returns @True@ of 'Id's and 'TyVar's
-- that must have a binding in this module. The converse
-- is not quite right: there are some global 'Id's that must have
-- bindings, such as record selectors. But that doesn't matter,
-- because it's only used for assertions
mustHaveLocalBinding :: Var -> Bool
mustHaveLocalBinding var = isLocalVar var
-- | 'isExportedIdVar' means \"don't throw this away\"
isExportedId :: Var -> Bool
isExportedId (Id { idScope = GlobalId }) = True
isExportedId (Id { idScope = LocalId Exported}) = True
isExportedId _ = False
|
tjakway/ghcjvm
|
compiler/basicTypes/Var.hs
|
bsd-3-clause
| 17,675
| 0
| 19
| 4,931
| 2,872
| 1,597
| 1,275
| 230
| 1
|
module Wash.Locking (obtainLock, releaseLock) where
import Wash.Auxiliary
import Directory
import IO
import System
obtainLock :: FilePath -> IO ()
releaseLock :: FilePath -> IO ()
lockPath name = name ++ ".lockdir"
obtainLock name =
assertDirectoryExists (lockPath name)
(system "sleep 1" >> obtainLock name)
releaseLock name =
removeDirectory (lockPath name)
|
florianpilz/autotool
|
src/Wash/Locking.hs
|
gpl-2.0
| 394
| 0
| 8
| 83
| 117
| 61
| 56
| 13
| 1
|
module Utils (v3toGL, transMatrix) where
import Linear
import qualified Graphics.Rendering.OpenGL as GL
v3toGL :: V3 Double -> V3 GL.GLfloat
v3toGL x = fmap realToFrac x
transMatrix :: V3 Double -> M44 GL.GLfloat
transMatrix pos =
mkTransformation
(axisAngle (V3 1 1 1) (realToFrac 0)) (v3toGL pos)
|
sgillis/HaskHull
|
src/Utils.hs
|
gpl-3.0
| 314
| 0
| 9
| 60
| 114
| 60
| 54
| 9
| 1
|
import System.Process
import System.IO
import System.Posix
import System.Exit
tmpfile = "process007.tmp"
main = do
writeFile tmpfile "You bad pie-rats!\n"
fd <- handleToFd =<< openFile tmpfile ReadMode
rawSystem "./process007_fd" [show fd]
closeFd fd
fd <- handleToFd =<< openFile tmpfile ReadMode
nul <- openFile "/dev/null" WriteMode
(_,_,_,p) <- createProcess (shell ("./process007_fd " ++ show fd))
{ close_fds = True,
std_err = UseHandle nul }
e <- waitForProcess p
case e of
ExitSuccess -> putStrLn "eek!"
_ -> putStrLn "failed, as expected"
closeFd fd
|
DavidAlphaFox/ghc
|
libraries/process/tests/process007.hs
|
bsd-3-clause
| 661
| 0
| 14
| 183
| 199
| 95
| 104
| 20
| 2
|
-- Copyright (c) 2015 Eric McCorkle. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
--
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- 3. Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
{-# OPTIONS_GHC -funbox-strict-fields -Wall -Werror #-}
-- | This module defines Locations. These are used to represent how
-- to get at values during compilation.
module IR.FlatIR.Location(
) where
-- | Locations are stored in ValMaps to indicate how a given variable
-- is represented.
data Location =
-- | A variable stored in an SSA binding
BindLoc !LLVM.ValueRef
-- | A variable stored in a memory location
| MemLoc Type !Mutability !LLVM.ValueRef
-- | A value located in a
| StructLoc !(UArray Fieldname Word)
-- | A variant, which refers to
|
emc2/chill
|
src/IR/FlatIR/Location.hs
|
bsd-3-clause
| 2,130
| 0
| 9
| 385
| 87
| 64
| 23
| 14
| 0
|
{-# language GADTSyntax #-}
{-# language KindSignatures #-}
{-# language UnliftedFFITypes #-}
{-# language ForeignFunctionInterface #-}
{-# language MagicHash #-}
{-# language UnboxedTuples #-}
-- Test for shims when passing an array of unlifted values
-- to a foreign function.
-- See test T16650a for more commentary.
import GHC.Exts
import GHC.Word
import GHC.IO
import Data.Kind (Type)
main :: IO ()
main = do
mb0 <- luckySingleton
mb1 <- luckySingleton
mbs <- newByteArrays 2
writeByteArrays mbs 0 mb0
writeByteArrays mbs 1 mb0
case box mbs of
Box x -> print =<< c_is_doubleton_homogeneous (unsafeCoerce# x)
writeByteArrays mbs 1 mb1
case box mbs of
Box x -> print =<< c_is_doubleton_homogeneous (unsafeCoerce# x)
foreign import ccall unsafe "is_doubleton_homogenous"
c_is_doubleton_homogeneous :: MutableArrayArray# RealWorld -> IO Word8
data Box :: Type where
Box :: (Any :: TYPE 'UnliftedRep) -> Box
-- An array of bytes
data MutableByteArray :: Type where
MutableByteArray :: MutableByteArray# RealWorld -> MutableByteArray
-- A mutable array of mutable byte arrays
data MutableByteArrays :: Type where
MutableByteArrays :: MutableArrayArray# RealWorld -> MutableByteArrays
box :: MutableByteArrays -> Box
{-# noinline box #-}
box (MutableByteArrays x) = Box (unsafeCoerce# x)
luckySingleton :: IO MutableByteArray
luckySingleton = IO $ \s0 -> case newByteArray# 1# s0 of
(# s1, marr# #) -> case writeWord8Array# marr# 0# 42## s1 of
s2 -> (# s2, MutableByteArray marr# #)
readByteArray :: MutableByteArray -> Int -> IO Word8
readByteArray (MutableByteArray b#) (I# i#) = IO $ \s0 ->
case readWord8Array# b# i# s0 of
(# s1, w #) -> (# s1, W8# w #)
-- Write a mutable byte array to the array of mutable byte arrays
-- at the given index.
writeByteArrays :: MutableByteArrays -> Int -> MutableByteArray -> IO ()
writeByteArrays (MutableByteArrays maa#) (I# i#) (MutableByteArray a) = IO $ \s0 ->
case writeMutableByteArrayArray# maa# i# a s0 of
s1 -> (# s1, () #)
-- Allocate a new array of mutable byte arrays. All elements are
-- uninitialized. Attempting to read them will cause a crash.
newByteArrays :: Int -> IO MutableByteArrays
newByteArrays (I# len#) = IO $ \s0 -> case newArrayArray# len# s0 of
(# s1, a# #) -> (# s1, MutableByteArrays a# #)
|
sdiehl/ghc
|
testsuite/tests/ffi/should_run/T16650b.hs
|
bsd-3-clause
| 2,322
| 1
| 14
| 421
| 593
| 305
| 288
| -1
| -1
|
mingle :: String -> String -> String
mingle [] [] = []
mingle (p:ps) (q:qs) = p:q:mingle ps qs
main = do
a <- getLine
b <- getLine
putStrLn $ mingle a b
|
EdisonAlgorithms/HackerRank
|
practice/fp/recursion/string-mingling/string-mingling.hs
|
mit
| 167
| 0
| 8
| 47
| 98
| 48
| 50
| 7
| 1
|
module Rushhour.Quiz where
import qualified Rushhour.Data as I
import qualified Rushhour.Config as C
import Rushhour.Solve
import Autolib.Util.Zufall
import Autolib.Util.Sort
import Autolib.FiniteMap
import Autolib.ToDoc
import Data.Array
import Control.Monad
import System.IO
roll0 c = do
( i, (k,zs) : _ ) <- do
i <- create c
hPutStrLn stderr $ show $ I.present i
let kzs = solutions ( C.max_solution c ) ( C.max_search_width c ) i
hPutStrLn stderr $ show $ take 5 $ map (length . snd ) kzs
return ( i, kzs )
`repeat_until` \ ( i, kzs ) ->
case kzs of
[] -> False
(k , zs) : _ -> length zs >= C.min_solution c
return ( i, zs )
roll2 c = do
i <- create_solvable c
let kzs = reachables1 ( C.max_solution c ) ( C.max_search_width c ) i
sequence_ $ do
(k, zs) <- kzs
let sols = solutions ( C.max_solution c ) ( C.max_search_width c ) k
return $ case sols of
[] -> hPutStr stderr "."
( j, ms ) : _ -> do
print $ vcat [ I.present k , toDoc ms, toDoc ( length ms ) ]
roll1 c = do
i <- create_only_target c
handler c i
create c = do
i <- create_only_target c
add_cars c i
create_solvable c = create c `repeat_until` solvable c
solvable c i = not
$ null $ solutions ( C.max_solution c ) ( C.max_search_width c ) i
names = do x <- ['A' .. ] ; return $ read [x]
handler c i = do
hPutStrLn stderr $ show $ I.present i
let zks = next i
hPutStrLn stderr $ show $ length zks
let sols = solutions ( C.max_solution c ) ( C.max_search_width c ) i
if null sols
then do
hPutStrLn stderr "unsolvable"
i <- remove_car i
handler c i
else case sols of
(k,zs) : _ -> do
let l = length zs
hPutStrLn stderr $ "solution of length " ++ show l
if l >= C.min_solution c
then return i
else adder c i
adder c i = do
let free n = not $ n `elem` keysFM ( I.cars i )
name = head $ filter free names
i <- add_car c i name
handler c i
create_only_target :: RandomC m
=> C.Config
-> m I.Instance
create_only_target c = do
o <- eins [ minBound .. maxBound ]
p <- case o of
I.Vertical -> do
x <- randomRIO ( negate $ C.height c , C.height c )
return (x, negate $ C.width c )
I.Horizontal -> do
y <- randomRIO ( negate $ C.width c , C.width c )
return (negate $ C.height c, y )
e <- randomRIO ( C.min_extension c, C.max_extension c )
let car = I.Car { I.extension = e, I.position = p, I.orientation = o }
name = read "X"
let i = I.Instance
{ I.width = C.width c
, I.height = C.height c
, I.cars = listToFM [ ( name, car ) ]
, I.target = name
}
return i
pick_target i = do
t <- eins $ keysFM $ I.cars i
return $ i { I.target = t }
add_cars c i =
let names = do x <- [ 'A' .. ] ; return $ read [ x ]
in foldM ( add_car c ) i $ take ( C.num_cars c ) names
positions car = do
let ( dx, dy ) = I.offset $ I.orientation car
d <- [ 0 .. I.extension car - 1 ]
let (x,y) = I.position car
return (x+d*dx, y+d*dy)
remove_car i =
case filter ( not . ( == I.target i ) ) $ keysFM $ I.cars i of
[] -> return i
ns -> do
name <- eins ns
return $ i { I.cars = delFromFM ( I.cars i ) name }
add_car c i name = do
let emin = C.min_extension c
case I.spaces emin i of
[] -> return i
sp -> do
let third (x,y,z) = z
( p, o, emax ) <- vorderes $ sortBy ( negate . third) sp
e <- -- return $ min ( C.max_extension c ) emax
randomRIO ( emin, min ( C.max_extension c ) emax )
let car = I.Car
{ I.orientation = o, I.extension = e, I.position = p }
let i' = i { I.cars = addToFM ( I.cars i ) name car }
when ( not $ I.consistent i' ) $ error $ show $ vcat
[ text "before:" <+> I.present i
, text "add :" <+> toDoc (name, car)
, text "after :" <+> I.present i'
]
return $ i'
vorderes [x] = return x
vorderes (x : xs) = do
i <- randomRIO ( 0 :: Int, 1 )
if 0 == i
then return x
else vorderes xs
add_car0 c i name = do
let occ = I.occupied i
free car = and $ do
p <- positions car
return $ inRange ( bounds occ ) p && null ( occ ! p )
car <- do
e <- randomRIO ( C.min_extension c, C.max_extension c )
o <- eins [ I.Horizontal, I.Vertical ]
let ( dx, dy ) = I.offset o
let ( ex, ey ) = ( e * dx, e * dy )
x <- randomRIO ( negate ( I.width i ), I.width i - ex + 1 )
y <- randomRIO ( negate ( I.height i ), I.height i - ey + 1 )
return $ I.Car
{ I.orientation = o, I.extension = e, I.position = (x,y) }
`repeat_until` free
return $ i { I.cars = addToFM ( I.cars i ) name car }
|
florianpilz/autotool
|
src/Rushhour/Quiz.hs
|
gpl-2.0
| 5,212
| 2
| 21
| 1,939
| 2,236
| 1,101
| 1,135
| 142
| 3
|
module Robots.QSearch where
import Robots.Solver
import Robots.Config
import Robots.Data
import Robots.Examples
import Autolib.Util.Hide
import Autolib.ToDoc
import Control.Monad ( guard, when )
import Data.Set ( Set )
import Data.Maybe
import qualified Data.Set as S
import System.IO
search :: Ord a
=> ( a -> [ (z, a) ] ) -- ^ ( move, neighbour reached )
-> ( a -> Double ) -- ^ badness (0 == finished)
-> a -- ^ start
-> [(Double, a, [z])] -- ^ list of good move sequences
search neigh badness start =
let helper done todo = case S.minView todo of
Nothing -> []
Just (t @ ( b, k, c, Hide zs ), odo ) ->
let next = S.fromList $ do
( z, c' ) <- neigh c
guard $ not $ S.member c' done
return ( badness c', k+1, c', Hide $ z : zs )
in ( b, c, zs )
: helper ( S.insert c done )
( S.union odo next )
in helper S.empty $ S.singleton ( badness start, 0, start, Hide [] )
badness c = fromIntegral ( goal_distance c ) -- * area c )
-- / ( fromIntegral ( length $ robots c ) )
goal_distance c = sum $ do
r <- robots c
return $ case ziel r of
Nothing -> 0
Just (x,y) -> let (a,b) = position r
in abs ( (a-x) ) + abs ( (b-y) )
decreasing [] = []
decreasing ( x @ (b,_,_): rest ) =
x : decreasing ( filter ( \ (c,_,_) -> c < b ) rest )
qsolve k = do
let bczs = takeUntil ( \ (b,_,_) -> b <= 0 )
-- $ decreasing
$ take 100
$ search znachfolger_all_onboard badness k
when False $ mapM_ ( \ (b,c,zs) ->
print $ besides [ vcat [ toDoc b, toDoc ( length zs ) ]
, nice c, toDoc zs
]
) bczs
when True $ mapM_ ( \ (b,c,zs) -> do
hPutStr stderr $ ( take 4 $ show b ) ++ " "
) bczs
print $ text "qsolve:" <+> toDoc ( length bczs )
return $ last bczs
ist_final k = and $ do
r @ Robot { ziel = Just z, position = p } <- robots k
return $ z == p
takeUntil p [] = []
takeUntil p (x : xs) = x : if p x then [] else takeUntil p xs
|
Erdwolf/autotool-bonn
|
src/Robots/QSearch.hs
|
gpl-2.0
| 2,279
| 15
| 21
| 882
| 911
| 478
| 433
| 57
| 2
|
-------------------------------------------------------------------------------
-- |
-- Module : System.Hardware.Haskino.ShallowDeepPlugin.Utils
-- Copyright : (c) University of Kansas
-- License : BSD3
-- Stability : experimental
--
-- Shallow Deep Plugin Utility Functions
-------------------------------------------------------------------------------
{-# LANGUAGE CPP #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE ExplicitForAll #-}
module System.Hardware.Haskino.ShallowDeepPlugin.Utils (absExpr,
absVar,
anyExprClassType,
buildDictionaryT,
buildDictionaryTyConT,
buildDictionaryTyConTs,
buildId,
collectLets,
fmapAbsExpr,
fmapRepBindReturn,
fmapRepExpr,
isExprClassType,
isBindTopLevel,
modId,
repExpr,
stringToId,
stringToId_maybe,
thNameToId,
thNameToTyCon,
thNameTyToDict,
thNameTysToDict,
thNameTyToTyConApp,
PassCoreM(..),
pattern (:$),
varString,
-- DSL specific names
eitherTyConTH,
exprClassTyConTH,
exprTyConTH,
monadCondTyConTH,
monadIterateTyConTH,
monadTyConTH,
absNameTH,
eqNameTH,
repNameTH,
ifThenElseNameTH,
ifThenElseEitherNameTH,
ifBNameTH,
iterateETH,
leftNameTH,
leftZeroNameTH,
litZeroNameTH,
rightNameTH,
-- General Haskell names
apNameTH,
andNameTH,
bindNameTH,
functTyConTH,
intTyConTH,
listTyConTH,
unitTyConTH,
unitValueTH,
bindThenNameTH,
falseNameTH,
fmapNameTH,
monadClassTyConTH,
notNameTH,
returnNameTH) where
import Control.Arrow (second)
import Data.Char
import Data.Functor
import DsBinds
import DsMonad (initDsTc)
import Encoding (zEncodeString)
import ErrUtils
import GhcPlugins
import qualified Language.Haskell.TH as TH
import MonadUtils
import OccName
import TcRnMonad
import TcSMonad
import TcSimplify
import TcEvidence
import Var
import System.Hardware.Haskino.ShallowDeepPlugin.Typechecker (initTcFromModGuts)
-- The following line contain the imports specific to the DSL language
-- being trnasformed, as well as Template Haskell definintions of the
-- DSL Monad and Expr types, names for the Worker/Wrapper abs/rep,
-- and names of the conditionals in the DSL.
import qualified System.Hardware.Haskino
eitherTyConTH :: TH.Name
eitherTyConTH = ''System.Hardware.Haskino.ExprEither
exprClassTyConTH :: TH.Name
exprClassTyConTH = ''System.Hardware.Haskino.ExprB
exprTyConTH :: TH.Name
exprTyConTH = ''System.Hardware.Haskino.Expr
monadCondTyConTH :: TH.Name
monadCondTyConTH = ''System.Hardware.Haskino.ArduinoConditional
monadTyConTH :: TH.Name
monadTyConTH = ''System.Hardware.Haskino.Arduino
monadIterateTyConTH :: TH.Name
monadIterateTyConTH = ''System.Hardware.Haskino.ArduinoIterate
absNameTH :: TH.Name
absNameTH = 'System.Hardware.Haskino.abs_
eqNameTH :: TH.Name
eqNameTH = 'System.Hardware.Haskino.eqE
repNameTH :: TH.Name
repNameTH = 'System.Hardware.Haskino.rep_
ifThenElseNameTH :: TH.Name
ifThenElseNameTH = 'System.Hardware.Haskino.ifThenElseE
ifThenElseEitherNameTH :: TH.Name
ifThenElseEitherNameTH = 'System.Hardware.Haskino.ifThenElseEither
ifBNameTH :: TH.Name
ifBNameTH = 'System.Hardware.Haskino.ifBE
iterateETH :: TH.Name
iterateETH = 'System.Hardware.Haskino.iterateE
leftNameTH :: TH.Name
leftNameTH = 'System.Hardware.Haskino.ExprLeft
leftZeroNameTH :: TH.Name
leftZeroNameTH = 'System.Hardware.Haskino.exprLeft
litZeroNameTH :: TH.Name
litZeroNameTH = 'System.Hardware.Haskino.litZero
rightNameTH :: TH.Name
rightNameTH = 'System.Hardware.Haskino.ExprRight
litUnitNameTH :: TH.Name
litUnitNameTH = 'System.Hardware.Haskino.LitUnit
litBNameTH :: TH.Name
litBNameTH = 'System.Hardware.Haskino.LitB
litW8NameTH :: TH.Name
litW8NameTH = 'System.Hardware.Haskino.LitW8
litW16NameTH :: TH.Name
litW16NameTH = 'System.Hardware.Haskino.LitW16
litW32NameTH :: TH.Name
litW32NameTH = 'System.Hardware.Haskino.LitW32
litI8NameTH :: TH.Name
litI8NameTH = 'System.Hardware.Haskino.LitI8
litI16NameTH :: TH.Name
litI16NameTH = 'System.Hardware.Haskino.LitI16
litI32NameTH :: TH.Name
litI32NameTH = 'System.Hardware.Haskino.LitI32
litINameTH :: TH.Name
litINameTH = 'System.Hardware.Haskino.LitI
litL8NameTH :: TH.Name
litL8NameTH = 'System.Hardware.Haskino.LitList8
litFloatNameTH :: TH.Name
litFloatNameTH = 'System.Hardware.Haskino.LitFloat
litPinModeNameTH :: TH.Name
litPinModeNameTH = 'System.Hardware.Haskino.LitPinMode
exprClassNames :: [TH.Name]
exprClassNames = [litUnitNameTH, litBNameTH, litW8NameTH, litW16NameTH,
litW32NameTH, litI8NameTH, litI16NameTH, litI32NameTH,
litINameTH, litL8NameTH, litFloatNameTH, litPinModeNameTH]
isExprClassType :: PassCoreM m => Type -> m Bool
isExprClassType ty = do
ectys <- exprClassTypes
return $ any (eqType ty) ectys
where
exprClassTypes :: PassCoreM m => m [Type]
exprClassTypes = mapM thNameToReturnBaseType exprClassNames
anyExprClassType :: PassCoreM m => [Type] -> m Bool
anyExprClassType tys = do
anyM isExprClassType tys
isBindTopLevel :: Var -> Bool
isBindTopLevel b =
case unfoldingInfo (idInfo b) of
cu@CoreUnfolding{ } -> uf_is_top cu
_ -> False
-- The following lines contain definitions of Template Haskell namde
-- for standard Haskell functions.
functTyConTH :: TH.Name
functTyConTH = ''Data.Functor.Functor
monadClassTyConTH :: TH.Name
monadClassTyConTH = ''Prelude.Monad
unitTyConTH :: TH.Name
unitTyConTH = ''()
unitValueTH :: TH.Name
unitValueTH = '()
listTyConTH :: TH.Name
listTyConTH = ''[]
intTyConTH :: TH.Name
intTyConTH = ''Int
bindNameTH :: TH.Name
bindNameTH = '(>>=)
bindThenNameTH :: TH.Name
bindThenNameTH = '(>>)
falseNameTH :: TH.Name
falseNameTH = 'Prelude.False
trueNameTH :: TH.Name
trueNameTH = 'Prelude.True
fmapNameTH :: TH.Name
fmapNameTH = '(<$>)
apNameTH :: TH.Name
apNameTH = '($)
returnNameTH :: TH.Name
returnNameTH = 'Prelude.return
notNameTH :: TH.Name
notNameTH = 'not
andNameTH :: TH.Name
andNameTH = '(&&)
-- An infix pattern synonym for `App` to make applications with multiple
-- arguments easier to manipulate:
infixl 0 :$
pattern (:$) :: forall t. Expr t -> Arg t -> Expr t
pattern f :$ x = App f x
class (Monad m, MonadIO m) => PassCoreM m where
-- | 'CoreM' can be lifted to this monad.
liftCoreM :: CoreM a -> m a
getModGuts :: m ModGuts
instance PassCoreM CoreM where
liftCoreM = id
getModGuts = error "Cannot get modguts from CoreM"
thNameToId :: PassCoreM m => TH.Name -> m Id
thNameToId n = do
name_m <- liftCoreM $ thNameToGhcName n
case name_m of
(Just name) -> liftCoreM $ lookupId name
_ -> error "Unable to Lookup ID"
stringToId_maybe :: PassCoreM m => String -> m (Maybe Id)
stringToId_maybe str = do
let lookId x = do
id' <- liftCoreM $ lookupId $ gre_name x
return $ Just id'
guts <- getModGuts
let gres = lookupGlobalRdrEnv (mg_rdr_env guts) (mkVarOcc str)
case gres of
[] -> return Nothing
[x] -> lookId x
x:_ -> lookId x -- Need to fix this, if there are multiples, need to
-- find one we are looking for.
stringToId :: PassCoreM m => String -> m Id
stringToId str = do
id_m <- stringToId_maybe str
case id_m of
(Just id') -> return id'
_ -> error $ "Error unable to Lookup ID " ++ str ++ "."
thNameToTyCon :: PassCoreM m => TH.Name -> m TyCon
thNameToTyCon n = do
name_m <- liftCoreM $ thNameToGhcName n
case name_m of
(Just name) -> liftCoreM $ lookupTyCon name
_ -> error "Unable to Lookup TyCon"
thNameTyToDict :: PassCoreM m => TH.Name -> Type -> m CoreExpr
thNameTyToDict n ty = do
tyCon <- thNameToTyCon n
buildDictionaryTyConT tyCon ty
thNameTysToDict :: PassCoreM m => TH.Name -> [Type] -> m CoreExpr
thNameTysToDict n tys = do
tyCon <- thNameToTyCon n
buildDictionaryTyConTs tyCon tys
thNameTyToTyConApp :: PassCoreM m => TH.Name -> Type -> m Type
thNameTyToTyConApp n ty = do
tyCon <- thNameToTyCon n
return $ mkTyConApp tyCon [ty]
thNameToReturnBaseType :: PassCoreM m => TH.Name -> m Type
thNameToReturnBaseType th = do
id' <- thNameToId th
let (_, retTy) = splitFunTys $ varType id'
let (_, [baseTy]) = splitTyConApp retTy
return baseTy
varString :: Id -> String
varString = occNameString . nameOccName . Var.varName
buildId :: PassCoreM m => String -> Type -> m Id
buildId varName' typ = do
dunique <- liftCoreM getUniqueM
let name = mkInternalName dunique (mkOccName OccName.varName varName') noSrcSpan
return $ mkLocalVar VanillaId name typ vanillaIdInfo
modId :: PassCoreM m => Id -> String -> m Id
modId v s = do
dunique <- liftCoreM getUniqueM
guts <- getModGuts
let newString = (varString v) ++ s
let newName' = mkOccName OccName.varName newString
let name = mkExternalName dunique (mg_module guts) newName' noSrcSpan
let v' = setIdUnique (setVarName v name) dunique
return v'
repExpr :: PassCoreM m => CoreExpr -> m CoreExpr
repExpr e = do
case e of
Let bind body -> do
body' <- repExpr body
return $ Let bind body'
_ -> do
let ty = exprType e
repId <- thNameToId repNameTH
repDict <- thNameTyToDict exprClassTyConTH ty
return $ mkCoreApps (Var repId) [Type ty, repDict, e]
absExpr :: PassCoreM m => CoreExpr -> m CoreExpr
absExpr e = do
case e of
Let bind body -> do
body' <- absExpr body
return $ Let bind body'
_ -> do
let (_, [ty']) = splitTyConApp $ exprType e
absId <- thNameToId absNameTH
return $ mkCoreApps (Var absId) [Type ty', e]
absVar :: PassCoreM m => Id -> m CoreExpr
absVar v = do
exprTyCon <- thNameToTyCon exprTyConTH
let exprTyConApp = mkTyConApp exprTyCon [varType v]
absId <- thNameToId absNameTH
return $ mkCoreApps (Var absId) [Type exprTyConApp, Var v]
fmapAbsExpr :: PassCoreM m => Type -> Type -> CoreExpr -> m CoreExpr
fmapAbsExpr tyConTy ty e = do
absId <- thNameToId absNameTH
exprTyConApp <- thNameTyToTyConApp exprTyConTH ty
fmapId <- thNameToId fmapNameTH
functDict <- thNameTyToDict functTyConTH tyConTy
let absApp = mkCoreApps (Var absId) [Type ty]
return $ mkCoreApps (Var fmapId) [Type tyConTy, Type exprTyConApp, Type ty,
functDict, absApp, e]
fmapRepExpr :: PassCoreM m => Type -> Type -> CoreExpr -> m CoreExpr
fmapRepExpr tyConTy ty e = do
repId <- thNameToId repNameTH
repDict <- thNameTyToDict exprClassTyConTH ty
exprTyConApp <- thNameTyToTyConApp exprTyConTH ty
fmapId <- thNameToId fmapNameTH
functDict <- thNameTyToDict functTyConTH tyConTy
let repApp = mkCoreApps (Var repId) [Type ty, repDict]
return $ mkCoreApps (Var fmapId) [Type tyConTy, Type ty, Type exprTyConApp,
functDict, repApp, e]
fmapRepBindReturn :: PassCoreM m => CoreExpr -> m CoreExpr
fmapRepBindReturn e = do
ee <- fmapRepBindReturn' e
case ee of
Right e' -> return e'
-- The following case should not happen.
Left e' -> return e'
where
fmapRepBindReturn' :: PassCoreM m => CoreExpr -> m (Either CoreExpr CoreExpr)
fmapRepBindReturn' e1 = do
let (ls, e') = collectLets e1
let (bs, e'') = collectBinders e'
let (f, args) = collectArgs e''
bindId <- thNameToId bindNameTH
thenId <- thNameToId bindThenNameTH
case f of
Var fv -> do
if fv == bindId || fv == thenId
then do
rla <- fmapRepBindReturn' $ last args
case rla of
-- If applicationg of rep_ at next was successful
Right la' -> do
let args' = init args ++ [la']
return $ Right $ mkLets ls $ mkLams bs (mkCoreApps f args')
-- Application of rep at next level was not successful
-- so apply it at this level.
Left _ -> fmapRepReturn ls bs e''
else fmapRepReturn ls bs e''
Case ec tb ty alts -> do
alts' <- fmapRepBindReturnAlts alts
return $ Right $ Case ec tb ty alts'
Let lb lbody -> do
lbody' <- fmapRepBindReturn lbody
return $ Right $ Let lb lbody'
_ -> return $ Right e1
-- Apply rep_ <$> to the end of the bind chain if possible.
-- If the end is a partially applied function, then rep_ <$>
-- will need to be applied one level
fmapRepReturn :: PassCoreM m => [CoreBind] -> [CoreBndr] -> CoreExpr -> m (Either CoreExpr CoreExpr)
fmapRepReturn ls bs e' = do
let tyCon_m = splitTyConApp_maybe $ exprType e'
case tyCon_m of
Just (tyCon,[ty]) -> do
retExpr <- fmapRepExpr (mkTyConTy tyCon) ty e'
return $ Right $ mkLets ls $ mkLams bs retExpr
_ -> return $ Left $ e'
fmapRepBindReturnAlts :: PassCoreM m => [GhcPlugins.Alt CoreBndr] -> m [GhcPlugins.Alt CoreBndr]
fmapRepBindReturnAlts [] = return []
fmapRepBindReturnAlts ((ac, b, a) : as) = do
a' <- fmapRepBindReturn a
as' <- fmapRepBindReturnAlts as
return $ (ac, b, a') : as'
collectLets :: CoreExpr -> ([CoreBind], CoreExpr)
collectLets (Let b e) = let (bs,expr) = collectLets e in (b:bs, expr)
collectLets expr = ([],expr)
-- Adapted from HERMIT.Monad
runTcM :: PassCoreM m => TcM a -> m a
runTcM m = do
env <- liftCoreM getHscEnv
dflags <- liftCoreM getDynFlags
guts <- getModGuts
(msgs, mr) <- liftIO $ initTcFromModGuts env guts HsSrcFile False m
let showMsgs (warns, errs) = showSDoc dflags $ vcat
$ text "Errors:" : pprErrMsgBagWithLoc errs
++ text "Warnings:" : pprErrMsgBagWithLoc warns
maybe (fail $ showMsgs msgs) return mr
newCondName :: PassCoreM m => String -> m Name
newCondName nm = mkSystemVarName <$> (liftCoreM getUniqueM) <*> return (mkFastString nm)
newIdH :: PassCoreM m => String -> Type -> m Id
newIdH name ty = do name' <- newCondName name
return $ mkLocalId name' ty
-- Adapted from HERMIT
buildDictionary :: PassCoreM m => Id -> m (Id, [CoreBind])
buildDictionary evar = do
runTcM $ do
#if __GLASGOW_HASKELL__ > 710
loc <- getCtLocM (GivenOrigin UnkSkol) Nothing
#else
loc <- getCtLoc $ GivenOrigin UnkSkol
#endif
let predTy = varType evar
#if __GLASGOW_HASKELL__ > 710
nonC = mkNonCanonical $ CtWanted { ctev_pred = predTy, ctev_dest = EvVarDest evar, ctev_loc = loc }
wCs = mkSimpleWC [cc_ev nonC]
(_wCs', bnds) <- second evBindMapBinds <$> runTcS (solveWanteds wCs)
#else
nonC = mkNonCanonical $ CtWanted { ctev_pred = predTy, ctev_evar = evar, ctev_loc = loc }
wCs = mkSimpleWC [nonC]
(_wCs', bnds) <- solveWantedsTcM wCs
#endif
bnds1 <- initDsTc $ dsEvBinds bnds
return (evar, bnds1)
buildDictionaryT :: PassCoreM m => Type -> m CoreExpr
buildDictionaryT ty = do
dflags <- liftCoreM getDynFlags
binder <- newIdH ("$d" ++ zEncodeString (filter (not . isSpace) (showPpr dflags ty))) ty
(i,bnds) <- buildDictionary binder
return $ case bnds of
[NonRec v e] | i == v -> e -- the common case that we would have gotten a single non-recursive let
_ -> mkCoreLets bnds (varToCoreExpr i)
buildDictionaryTyConT :: PassCoreM m => TyCon -> Type -> m CoreExpr
buildDictionaryTyConT tyCon ty =
buildDictionaryTyConTs tyCon [ty]
buildDictionaryTyConTs :: PassCoreM m => TyCon -> [Type] -> m CoreExpr
buildDictionaryTyConTs tyCon tys =
buildDictionaryT $ GhcPlugins.mkTyConApp tyCon tys
|
ku-fpg/kansas-amber
|
System/Hardware/Haskino/ShallowDeepPlugin/Utils.hs
|
bsd-3-clause
| 18,678
| 0
| 26
| 6,520
| 4,413
| 2,283
| 2,130
| 389
| 9
|
module Timing
( time, showTime, prettyTime
, wallTime, cpuTime, milliseconds)
where
import System.CPUTime
import System.Time
import Debug.Trace
-- Time -----------------------------------------------------------------------
data Time
= Time
{ cpu_time :: Integer
, wall_time :: Integer
}
zipT :: (Integer -> Integer -> Integer) -> Time -> Time -> Time
zipT f (Time cpu1 wall1) (Time cpu2 wall2)
= Time (f cpu1 cpu2) (f wall1 wall2)
minus :: Time -> Time -> Time
minus = zipT (-)
-- TimeUnit -------------------------------------------------------------------
type TimeUnit
= Integer -> Integer
milliseconds :: TimeUnit
milliseconds n = n `div` 1000000000
cpuTime :: TimeUnit -> Time -> Integer
cpuTime f = f . cpu_time
wallTime :: TimeUnit -> Time -> Integer
wallTime f = f . wall_time
-- | Get the current time.
getTime :: IO Time
getTime =
do
cpu <- getCPUTime
TOD sec pico <- getClockTime
return $ Time cpu (pico + sec * 1000000000000)
-- | Show a time as a string, in milliseconds.
showTime :: Time -> String
showTime t = (show $ wallTime milliseconds t)
++ "/"
++ (show $ cpuTime milliseconds t)
-- | Pretty print the times.
prettyTime :: Time -> String
prettyTime t
= unlines
[ "elapsedTimeMS = " ++ (show $ wallTime milliseconds t)
, "cpuTimeMS = " ++ (show $ cpuTime milliseconds t) ]
-- Timing benchmarks ----------------------------------------------------------
time :: IO a -> IO (a, Time)
{-# NOINLINE time #-}
time p = do
start <- getTime
traceEventIO "dph-examples: start timing"
x <- p
traceEventIO "dph-examples: finished timing"
end <- getTime
return (x, end `minus` start)
|
agremm/Matryoshka
|
examples/lib/Timing.hs
|
bsd-3-clause
| 1,769
| 14
| 11
| 433
| 502
| 266
| 236
| 47
| 1
|
{-# OPTIONS_GHC -fno-warn-redundant-constraints #-}
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies,
FlexibleInstances, FlexibleContexts, UndecidableInstances #-}
-- Example of improvement, due to George Russel
module Folders where
data Folder = Folder
newtype SB x = SB x
newtype SS x = SS x
data NodeArcsHidden = NodeArcsHidden
class HasSS hasS x | hasS -> x where
toSS :: hasS -> SS x
instance HasSS (SB x) x where
toSS (SB x) = (SS x)
class HMV option graph node where
modd :: option -> graph -> node value -> IO ()
instance HMV NodeArcsHidden graph node
=> HMV (Maybe NodeArcsHidden) graph node
where
modd = error "burk"
gn :: HMV NodeArcsHidden graph node
=> graph
-> SS (graph -> node Int -> IO ())
gn graph = fmapSS (\ arcsHidden -> (\ graph node -> modd arcsHidden graph node))
(toSS (error "C" :: SB (Maybe NodeArcsHidden)))
-- The call to modd gives rise to
-- HMV option graph node
-- The call to toSS gives rise to
-- HasSS (SB (Maybe NodeArcsHidden)) x
-- where (toSS (error ...)) :: SS x
-- and hence arcsHidden :: x
--
-- Then improvement should give x = Maybe NodeArcsHidden
-- and hence option=Maybe NodeArcsHidden
fmapSS :: (a->b) -> SS a -> SS b
fmapSS = error "urk"
|
oldmanmike/ghc
|
testsuite/tests/typecheck/should_compile/tc181.hs
|
bsd-3-clause
| 1,271
| 2
| 12
| 284
| 328
| 174
| 154
| 24
| 1
|
{-# LANGUAGE TemplateHaskell, TypeOperators #-}
module T7667 where
import Language.Haskell.TH
$( return [ TySynD (mkName "+") [PlainTV (mkName "a"), PlainTV (mkName "b")]
(AppT (AppT (ConT ''Either) (VarT $ mkName "a")) (VarT $ mkName "b")) ] )
|
urbanslug/ghc
|
testsuite/tests/th/T7667.hs
|
bsd-3-clause
| 266
| 0
| 16
| 57
| 105
| 54
| 51
| 5
| 0
|
{-# LANGUAGE OverloadedStrings #-}
-----------------------------------------------------------------------------
-- |
-- Module : Network.Policy.Client
-- Copyright : (c) 2014 Stefan Bühler
-- License : MIT-style (see the file COPYING)
--
-- Maintainer : stbuehler@web.de
-- Stability : experimental
-- Portability : portable
--
-- Client implementation for policy requests.
--
-----------------------------------------------------------------------------
module Network.Policy.Client
( makeRequest
, makeRequest'
) where
import Network.Policy.Serialize
import Network.Policy.Types
import Network.Policy.Utils
import Control.Exception
import Network.Socket hiding (send, sendTo, recv, recvFrom)
import Network.Socket.ByteString
import qualified Data.Attoparsec.ByteString as A
import qualified Data.ByteString as B
{-|
Make a request to a given 'SockAddr' (creating a TCP/Unix connection to it).
-}
makeRequest :: SockAddr -> PolicyParameters -> IO PolicyAction
makeRequest addr params = do
s <- socket (sockAddrFamily addr) Stream defaultProtocol
finally (makeRequest' s params) (close s)
{-|
Sends a request on an already established socket and returns the response;
doesn't close the socket. Don't pipeline requests, the parser fails if it
receives data for more than one result (and breaks the second response too by
throwing away the data).
-}
makeRequest' :: Socket -> PolicyParameters -> IO PolicyAction
makeRequest' sock params = do
raw <- formatPolicyParameters params
sendAll sock raw
result <- (recv sock 4096) >>= A.parseWith (recv sock 4096) parsePolicyAction
case result of
A.Done remainder res -> do
if not (B.null remainder)
then error "Received too much data"
else return res
A.Fail _ _ msg -> error $ "Couldn't parse response: " ++ show msg
A.Partial _ -> error "Unexpected connection close"
|
stbuehler/haskell-mail-policy
|
src/Network/Policy/Client.hs
|
mit
| 1,863
| 8
| 16
| 293
| 339
| 183
| 156
| 28
| 4
|
module Timeline.Parser.Graph
( barParser
, lineParser
, scatterPlotParser
, stackedBarParser
, boxPlotParser
) where
import Data.Text (Text)
import qualified Data.Text as T
import Text.Megaparsec
import Text.Megaparsec.Text
import Timeline.Parser.Internal
import Timeline.Types
barParser :: Parser TimeSeriesGraph
barParser = BarGraph <$> (chartTypeIntroduction "bar" *> commaDelimitedDecimals)
lineParser :: Parser TimeSeriesGraph
lineParser = LineGraph <$> (chartTypeIntroduction "line" *> commaDelimitedDecimals)
scatterPlotParser :: Parser TimeSeriesGraph
scatterPlotParser = ScatterPlotGraph <$> (chartTypeIntroduction "scatter-plot" *> commaDelimitedThreeTuples)
boxPlotParser :: Parser TimeSeriesGraph
boxPlotParser = do
lists <- chartTypeIntroduction "box-plot" *> commaDelimitedLists
if all validListLength lists
then return $ BoxGraph (buildBoxAndWhisker lists)
else fail "Box plot members do not contain the correct number of elements"
where
validListLength xs = null xs || length xs == 5
buildBoxAndWhisker :: [[Double]] -> [Maybe BoxAndWhisker]
buildBoxAndWhisker = map go
where
go (low:q1:median:q3:high:_) = Just $ BoxAndWhisker low q1 median q3 high
go _ = Nothing
stackedBarParser :: Parser TimeSeriesGraph
stackedBarParser = do
parsedLists <- chartTypeIntroduction "stacked-bar" *> commaDelimitedLists
if differentListLengths length parsedLists
then fail "Stacked bar items do not have equal lengths"
else return $ StackedBarGraph parsedLists
chartTypeIntroduction :: Text -> Parser ()
chartTypeIntroduction t = string (T.unpack $ T.snoc t ':') *> space
commaDelimitedDecimals :: Parser [Double]
commaDelimitedDecimals = (space *> double) `sepBy` comma
commaDelimitedLists :: Parser [[Double]]
commaDelimitedLists = (space *> brackets commaDelimitedDecimals) `sepBy` comma
commaDelimitedThreeTuples :: Parser [(Text, Double, Double)]
commaDelimitedThreeTuples = (space *> parens innerTuple) `sepBy` comma
where
innerTuple = do
value <- T.pack <$> manyTill anyChar comma
p1 <- space *> double <* comma
p2 <- space *> double
return (value, p1, p2)
|
joshuaclayton/timeline
|
src/Timeline/Parser/Graph.hs
|
mit
| 2,244
| 0
| 13
| 432
| 582
| 307
| 275
| 48
| 2
|
module Sequences.Geometric (
recGeoSequence,
expGeoSequence
) where
import Sequences.General
recGeoSequence :: (Real a) => a -> a -> Sequence a
recGeoSequence a0 q = recSequence a0 (*q)
expGeoSequence :: (Real a) => a -> a -> Sequence a
expGeoSequence a0 q = expSequence (\n -> a0 * q^n)
|
DevWurm/numeric-sequences-haskell
|
src/Sequences/Geometric.hs
|
mit
| 299
| 0
| 9
| 59
| 117
| 63
| 54
| 8
| 1
|
module CFDI.Types.CompanyRfc where
import CFDI.Chainable
import CFDI.Types.Type
import Data.Text (Text, pack, unpack)
import Text.Regex (mkRegex)
import Text.Regex.Posix (matchTest)
newtype CompanyRfc = CompanyRfc Text deriving (Eq, Show)
instance Chainable CompanyRfc where
chain (CompanyRfc r) = r
instance Type CompanyRfc where
parseExpr str
| matchTest regExp str = Right . CompanyRfc $ pack str
| otherwise = Left
$ DoesNotMatchExpr "[A-Z&Ñ]{3}[0-9]{2}(0[1-9]|1[012])(0[1-9]|\
\[12][0-9]|3[01])[A-Z0-9]{2}[0-9A]"
where
regExp = mkRegex "^[A-Z&Ñ]{3}[0-9]{2}(0[1-9]|1[012])(0[1-9]|[12][0-9]|3\
\[01])[A-Z0-9]{2}[0-9A]$"
render (CompanyRfc r) = unpack r
|
yusent/cfdis
|
src/CFDI/Types/CompanyRfc.hs
|
mit
| 774
| 0
| 9
| 188
| 177
| 94
| 83
| 16
| 0
|
{-# LANGUAGE OverloadedStrings #-}
module Qwu.Html.NewPost where
import Control.Monad
import Lucid
newPost :: Monad m => HtmlT m ()
newPost =
form_ [action_ "/newpost", method_ "post"]
(div_ [class_ "newPostForm"]
(do (textarea_ [name_ "msg"
,rows_ "10"
,cols_ "50"]
"Write a new post")
(div_ [class_ "button"]
(button_ [type_ "submit"] "Submit!"))))
|
bryangarza/qwu
|
src/Qwu/Html/NewPost.hs
|
mit
| 434
| 0
| 16
| 131
| 134
| 69
| 65
| 14
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
module Data.Track.Log where
import Control.DeepSeq
import Control.Lens
import Data.Aeson
import Data.Data
import Data.Text
import Data.Time
import Data.Track.Rules
import GHC.Generics
data Entry
= Entry
{ entryTimestamp :: UTCTime
, entryAbout :: Text
, entryMessage :: Text
}
deriving (Eq, Show, Read, Data, Typeable, Generic)
instance NFData Entry
instance ToJSON Entry where toJSON = trackToJSON
instance FromJSON Entry where parseJSON = trackParseJSON
|
AndrewRademacher/track
|
src/Data/Track/Log.hs
|
mit
| 658
| 0
| 8
| 203
| 137
| 80
| 57
| 20
| 0
|
{-# LANGUAGE NoImplicitPrelude #-}
{- |
Generate XML-isomorphic types from declarative descriptions.
There are two kinds of XML-isomorphic types: enumerations and records.
Enumerations are simple enum-types generated via "Data.THGen.Enum"
plus a `FromContent` instance and a `ToXML` instance which are derived
from `Read` and `Show`. Records are a bit more complicated: to define
a record you need to supply its name, a prefix for fields, and a list
of field descriptions. A field description contains the XML tag name
and repetition kind (mandatory, optional, repeated or multiplied).
The repetition kind determines both the parsing strategy and the
wrapper around the field type:
* @a@ for mandatory fields
* @Maybe a@ for optional fields
* @[a]@ for repeated fields
* @NonEmpty a@ for multiplied fields
Example 1.
> "Color" =:= enum ParserAndGenerator
> & "R"
> & "G"
> & "B"
produces
> data XmlColor
> = XmlColorR
> | XmlColorG
> | XmlColorB
> | UnknownXmlColor String
with a `FromContent` instance that expects the current element content
to be either @R@, @G@ or @B@.
Example 2.
> "Message" =:= record ParserAndGenerator
> ! "author"
> + "recipient"
> ? "message" [t|Text|]
> * "attachement"
produces
> data Message = Message
> { _mAuthor :: Author
> , _mRecipient :: NonEmpty Recipient
> , _mMessage :: Maybe Text
> , _mAttachement :: [Attachement]
> } deriving (...)
with a corresponding `FromDom` instance. Lenses are generated
automatically as well.
The examples above also demonstrate that to define the declarative
descriptions of data types we provide a terse and convenient EDSL.
To define an enumeration, use the `enum` function followed by the name
of the data type to be generated. You can optionally specify if the
enumeration is exhaustive (contains only the listed constructors) or
non-exhaustive (also contains a constructor for unknown values; this
is the default):
> "Enum1" Exhaustive =:= enum Generator
> ...
> "Enum2" NonExhaustive =:= enum Generator
> ...
To define a record, use the `record` function followed by the name of
the data type to be generated. The prefix for the record fields is
inferred automatically by taking all of the uppercase letters in the
name. You can override it manually like so:
> "Reference" "ref" =:= record Parser
> ...
To describe a record field you must supply its name as it appears
in the XML tag, prefixed by its repetition kind:
* @!@ for mandatory fields
* @?@ for optional fields
* @*@ for repeated fields
* @+@ for multiplied fields
The type of the field is inferred automatically from its name, so
if the field is called @"author"@ its type will be @Author@. You can
override the type by specifying it in quasiquotes like so:
> "Message" =:= record ParserAndGenerator
> ! "author" [t|Person|].
> ...
Both @record@ and @enum@ take a @GenType@ argument which tells it to generate
generator, parser or both. It's being used to optimize compilation time by
skipping the unnecessary TH instance generation.
-}
module Data.THGen.XML
( Exhaustiveness(..)
, PrefixName(..)
, ExhaustivenessName(..)
, GenType(..)
, record
, enum
, (!)
, (?)
, (*)
, (+)
, (!%)
, (?%)
, (&)
, (^)
, (=:=)
-- Re-exports
, Text
, P.Int
, P.Integer
, Nillable(..)
) where
import Data.THGen.Enum
import Data.THGen.Orphans ()
import Data.THGen.XML.Internal
import Data.Text
import Text.XML.Nillable
import qualified Prelude as P
|
typeable/xml-isogen
|
src/Data/THGen/XML.hs
|
mit
| 3,542
| 2
| 5
| 730
| 150
| 109
| 41
| 27
| 0
|
module Main where
import Haste
import Haste.DOM
import Haste.Events
import ProcNum
main = withElems ["fv","ty","r","result"] calculator
calculator [fv,ty,r,result] = do
onEvent fv KeyUp $ \_ -> recalculate
onEvent ty KeyUp $ \_ -> recalculate
onEvent r Change $ \_ -> recalculate
where
recalculate = do
vfv <- getValue fv
vty <- getValue ty
vr <- getValue r
case (str2dbl vfv, str2dbl vty, str2dbl vr) of
(Just fv', Just ty', Just r') -> setProp result "innerHTML" (prtdbl2 $ calc fv' ty' r')
_ -> return ()
calc :: Double -> Double -> Double -> Double
calc fv ty r = fv / (1.0 + r)**ty
|
CodiePP/Hedge-o-Mat
|
hs/src/calculators/presentv/calculator.hs
|
mit
| 678
| 0
| 15
| 195
| 275
| 140
| 135
| 19
| 2
|
{-# LANGUAGE RecordWildCards #-}
module Language.PureScript.Docs.AsMarkdown (
renderModulesAsMarkdown
) where
import Control.Monad.Writer hiding (First)
import Data.Foldable (for_)
import Data.List (partition)
import qualified Language.PureScript as P
import Language.PureScript.Docs.Types
import Language.PureScript.Docs.RenderedCode
import qualified Language.PureScript.Docs.Convert as Convert
import qualified Language.PureScript.Docs.Render as Render
-- |
-- Take a list of modules and render them all in order, returning a single
-- Markdown-formatted String.
--
renderModulesAsMarkdown :: [P.Module] -> String
renderModulesAsMarkdown =
runDocs . modulesAsMarkdown . map Convert.convertModule
modulesAsMarkdown :: [Module] -> Docs
modulesAsMarkdown = mapM_ moduleAsMarkdown
moduleAsMarkdown :: Module -> Docs
moduleAsMarkdown Module{..} = do
headerLevel 2 $ "Module " ++ modName
spacer
for_ modComments tell'
mapM_ declAsMarkdown modDeclarations
spacer
declAsMarkdown :: Declaration -> Docs
declAsMarkdown decl@Declaration{..} = do
headerLevel 4 (ticks declTitle)
spacer
let (instances, children) = partition (isChildInstance . cdeclInfo) declChildren
fencedBlock $ do
tell' (codeToString $ Render.renderDeclaration decl)
zipWithM_ (\f c -> tell' (childToString f c)) (First : repeat NotFirst) children
spacer
for_ declFixity (\fixity -> fixityAsMarkdown fixity >> spacer)
for_ declComments tell'
unless (null instances) $ do
headerLevel 5 "Instances"
fencedBlock $ mapM_ (tell' . childToString NotFirst) instances
spacer
where
isChildInstance (ChildInstance _ _) = True
isChildInstance _ = False
codeToString :: RenderedCode -> String
codeToString = outputWith elemAsMarkdown
where
elemAsMarkdown (Syntax x) = x
elemAsMarkdown (Ident x) = x
elemAsMarkdown (Ctor x _) = x
elemAsMarkdown (Kind x) = x
elemAsMarkdown (Keyword x) = x
elemAsMarkdown Space = " "
fixityAsMarkdown :: P.Fixity -> Docs
fixityAsMarkdown (P.Fixity associativity precedence) =
tell' $ concat [ "_"
, associativityStr
, " / precedence "
, show precedence
, "_"
]
where
associativityStr = case associativity of
P.Infixl -> "left-associative"
P.Infixr -> "right-associative"
P.Infix -> "non-associative"
childToString :: First -> ChildDeclaration -> String
childToString f decl@ChildDeclaration{..} =
case cdeclInfo of
ChildDataConstructor _ ->
let c = if f == First then "=" else "|"
in " " ++ c ++ " " ++ str
ChildTypeClassMember _ ->
" " ++ str
ChildInstance _ _ ->
str
where
str = codeToString $ Render.renderChildDeclaration decl
data First
= First
| NotFirst
deriving (Show, Read, Eq, Ord)
type Docs = Writer [String] ()
runDocs :: Docs -> String
runDocs = unlines . execWriter
tell' :: String -> Docs
tell' = tell . (:[])
spacer :: Docs
spacer = tell' ""
headerLevel :: Int -> String -> Docs
headerLevel level hdr = tell' (replicate level '#' ++ ' ' : hdr)
fencedBlock :: Docs -> Docs
fencedBlock inner = do
tell' "``` purescript"
inner
tell' "```"
ticks :: String -> String
ticks = ("`" ++) . (++ "`")
|
michaelficarra/purescript
|
src/Language/PureScript/Docs/AsMarkdown.hs
|
mit
| 3,253
| 0
| 15
| 690
| 957
| 499
| 458
| 90
| 6
|
-- | Internal helpers that provide strict atomic MutVar access.
--
-- These functions allow us to avoid the overhead of MVar as long
-- as we can factor the impure sections of code out in such a way
-- that the pure metric calculations can be executed without requiring
-- access to multiple MutVars at a time.
module Data.Metrics.Internal (
updateRef,
applyWithRef,
updateAndApplyToRef,
MV
) where
import Control.Monad.Primitive
import Data.Primitive.MutVar
-- | Perform a strict update on a MutVar. Pretty much identical to the strict variant of atomicModifyIORef.
updateRef :: PrimMonad m => MV m a -> (a -> a) -> m ()
updateRef r f = do
b <- atomicModifyMutVar r (\x -> let (a, b) = (f x, ()) in (a, a `seq` b))
b `seq` return b
{-# INLINE updateRef #-}
-- | Strictly apply a function on a MutVar while blocking other access to it.
--
-- I really think this is probably not implemented correctly in terms of being excessively strict.
applyWithRef :: PrimMonad m => MV m a -> (a -> b) -> m b
applyWithRef r f = do
b <- atomicModifyMutVar r (\x -> let app = f x in let (a, b) = (x, app) in (a, a `seq` b))
b `seq` return b
{-# INLINE applyWithRef #-}
-- | A function which combines the previous two, updating a value atomically
-- and then returning some value calculated with the update in a single step.
updateAndApplyToRef :: PrimMonad m => MV m a -> (a -> a) -> (a -> b) -> m b
updateAndApplyToRef r fa fb = do
b <- atomicModifyMutVar r $ \x ->
let appA = fa x in
let appB = fb appA in
let (a, b) = (appA, appB) in
(a, a `seq` b)
b `seq` return b
{-# INLINE updateAndApplyToRef #-}
-- | MutVar (PrimState m) is a little verbose.
type MV m = MutVar (PrimState m)
|
iand675/metrics
|
src/Data/Metrics/Internal.hs
|
mit
| 1,708
| 0
| 18
| 364
| 465
| 251
| 214
| 27
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-containerdefinitions.html
module Stratosphere.ResourceProperties.ECSTaskDefinitionContainerDefinition where
import Stratosphere.ResourceImports
import Stratosphere.ResourceProperties.ECSTaskDefinitionKeyValuePair
import Stratosphere.ResourceProperties.ECSTaskDefinitionHostEntry
import Stratosphere.ResourceProperties.ECSTaskDefinitionHealthCheck
import Stratosphere.ResourceProperties.ECSTaskDefinitionLinuxParameters
import Stratosphere.ResourceProperties.ECSTaskDefinitionLogConfiguration
import Stratosphere.ResourceProperties.ECSTaskDefinitionMountPoint
import Stratosphere.ResourceProperties.ECSTaskDefinitionPortMapping
import Stratosphere.ResourceProperties.ECSTaskDefinitionRepositoryCredentials
import Stratosphere.ResourceProperties.ECSTaskDefinitionUlimit
import Stratosphere.ResourceProperties.ECSTaskDefinitionVolumeFrom
-- | Full data type definition for ECSTaskDefinitionContainerDefinition. See
-- 'ecsTaskDefinitionContainerDefinition' for a more convenient constructor.
data ECSTaskDefinitionContainerDefinition =
ECSTaskDefinitionContainerDefinition
{ _eCSTaskDefinitionContainerDefinitionCommand :: Maybe (ValList Text)
, _eCSTaskDefinitionContainerDefinitionCpu :: Maybe (Val Integer)
, _eCSTaskDefinitionContainerDefinitionDisableNetworking :: Maybe (Val Bool)
, _eCSTaskDefinitionContainerDefinitionDnsSearchDomains :: Maybe (ValList Text)
, _eCSTaskDefinitionContainerDefinitionDnsServers :: Maybe (ValList Text)
, _eCSTaskDefinitionContainerDefinitionDockerLabels :: Maybe Object
, _eCSTaskDefinitionContainerDefinitionDockerSecurityOptions :: Maybe (ValList Text)
, _eCSTaskDefinitionContainerDefinitionEntryPoint :: Maybe (ValList Text)
, _eCSTaskDefinitionContainerDefinitionEnvironment :: Maybe [ECSTaskDefinitionKeyValuePair]
, _eCSTaskDefinitionContainerDefinitionEssential :: Maybe (Val Bool)
, _eCSTaskDefinitionContainerDefinitionExtraHosts :: Maybe [ECSTaskDefinitionHostEntry]
, _eCSTaskDefinitionContainerDefinitionHealthCheck :: Maybe ECSTaskDefinitionHealthCheck
, _eCSTaskDefinitionContainerDefinitionHostname :: Maybe (Val Text)
, _eCSTaskDefinitionContainerDefinitionImage :: Val Text
, _eCSTaskDefinitionContainerDefinitionLinks :: Maybe (ValList Text)
, _eCSTaskDefinitionContainerDefinitionLinuxParameters :: Maybe ECSTaskDefinitionLinuxParameters
, _eCSTaskDefinitionContainerDefinitionLogConfiguration :: Maybe ECSTaskDefinitionLogConfiguration
, _eCSTaskDefinitionContainerDefinitionMemory :: Maybe (Val Integer)
, _eCSTaskDefinitionContainerDefinitionMemoryReservation :: Maybe (Val Integer)
, _eCSTaskDefinitionContainerDefinitionMountPoints :: Maybe [ECSTaskDefinitionMountPoint]
, _eCSTaskDefinitionContainerDefinitionName :: Val Text
, _eCSTaskDefinitionContainerDefinitionPortMappings :: Maybe [ECSTaskDefinitionPortMapping]
, _eCSTaskDefinitionContainerDefinitionPrivileged :: Maybe (Val Bool)
, _eCSTaskDefinitionContainerDefinitionReadonlyRootFilesystem :: Maybe (Val Bool)
, _eCSTaskDefinitionContainerDefinitionRepositoryCredentials :: Maybe ECSTaskDefinitionRepositoryCredentials
, _eCSTaskDefinitionContainerDefinitionUlimits :: Maybe [ECSTaskDefinitionUlimit]
, _eCSTaskDefinitionContainerDefinitionUser :: Maybe (Val Text)
, _eCSTaskDefinitionContainerDefinitionVolumesFrom :: Maybe [ECSTaskDefinitionVolumeFrom]
, _eCSTaskDefinitionContainerDefinitionWorkingDirectory :: Maybe (Val Text)
} deriving (Show, Eq)
instance ToJSON ECSTaskDefinitionContainerDefinition where
toJSON ECSTaskDefinitionContainerDefinition{..} =
object $
catMaybes
[ fmap (("Command",) . toJSON) _eCSTaskDefinitionContainerDefinitionCommand
, fmap (("Cpu",) . toJSON) _eCSTaskDefinitionContainerDefinitionCpu
, fmap (("DisableNetworking",) . toJSON) _eCSTaskDefinitionContainerDefinitionDisableNetworking
, fmap (("DnsSearchDomains",) . toJSON) _eCSTaskDefinitionContainerDefinitionDnsSearchDomains
, fmap (("DnsServers",) . toJSON) _eCSTaskDefinitionContainerDefinitionDnsServers
, fmap (("DockerLabels",) . toJSON) _eCSTaskDefinitionContainerDefinitionDockerLabels
, fmap (("DockerSecurityOptions",) . toJSON) _eCSTaskDefinitionContainerDefinitionDockerSecurityOptions
, fmap (("EntryPoint",) . toJSON) _eCSTaskDefinitionContainerDefinitionEntryPoint
, fmap (("Environment",) . toJSON) _eCSTaskDefinitionContainerDefinitionEnvironment
, fmap (("Essential",) . toJSON) _eCSTaskDefinitionContainerDefinitionEssential
, fmap (("ExtraHosts",) . toJSON) _eCSTaskDefinitionContainerDefinitionExtraHosts
, fmap (("HealthCheck",) . toJSON) _eCSTaskDefinitionContainerDefinitionHealthCheck
, fmap (("Hostname",) . toJSON) _eCSTaskDefinitionContainerDefinitionHostname
, (Just . ("Image",) . toJSON) _eCSTaskDefinitionContainerDefinitionImage
, fmap (("Links",) . toJSON) _eCSTaskDefinitionContainerDefinitionLinks
, fmap (("LinuxParameters",) . toJSON) _eCSTaskDefinitionContainerDefinitionLinuxParameters
, fmap (("LogConfiguration",) . toJSON) _eCSTaskDefinitionContainerDefinitionLogConfiguration
, fmap (("Memory",) . toJSON) _eCSTaskDefinitionContainerDefinitionMemory
, fmap (("MemoryReservation",) . toJSON) _eCSTaskDefinitionContainerDefinitionMemoryReservation
, fmap (("MountPoints",) . toJSON) _eCSTaskDefinitionContainerDefinitionMountPoints
, (Just . ("Name",) . toJSON) _eCSTaskDefinitionContainerDefinitionName
, fmap (("PortMappings",) . toJSON) _eCSTaskDefinitionContainerDefinitionPortMappings
, fmap (("Privileged",) . toJSON) _eCSTaskDefinitionContainerDefinitionPrivileged
, fmap (("ReadonlyRootFilesystem",) . toJSON) _eCSTaskDefinitionContainerDefinitionReadonlyRootFilesystem
, fmap (("RepositoryCredentials",) . toJSON) _eCSTaskDefinitionContainerDefinitionRepositoryCredentials
, fmap (("Ulimits",) . toJSON) _eCSTaskDefinitionContainerDefinitionUlimits
, fmap (("User",) . toJSON) _eCSTaskDefinitionContainerDefinitionUser
, fmap (("VolumesFrom",) . toJSON) _eCSTaskDefinitionContainerDefinitionVolumesFrom
, fmap (("WorkingDirectory",) . toJSON) _eCSTaskDefinitionContainerDefinitionWorkingDirectory
]
-- | Constructor for 'ECSTaskDefinitionContainerDefinition' containing
-- required fields as arguments.
ecsTaskDefinitionContainerDefinition
:: Val Text -- ^ 'ecstdcdImage'
-> Val Text -- ^ 'ecstdcdName'
-> ECSTaskDefinitionContainerDefinition
ecsTaskDefinitionContainerDefinition imagearg namearg =
ECSTaskDefinitionContainerDefinition
{ _eCSTaskDefinitionContainerDefinitionCommand = Nothing
, _eCSTaskDefinitionContainerDefinitionCpu = Nothing
, _eCSTaskDefinitionContainerDefinitionDisableNetworking = Nothing
, _eCSTaskDefinitionContainerDefinitionDnsSearchDomains = Nothing
, _eCSTaskDefinitionContainerDefinitionDnsServers = Nothing
, _eCSTaskDefinitionContainerDefinitionDockerLabels = Nothing
, _eCSTaskDefinitionContainerDefinitionDockerSecurityOptions = Nothing
, _eCSTaskDefinitionContainerDefinitionEntryPoint = Nothing
, _eCSTaskDefinitionContainerDefinitionEnvironment = Nothing
, _eCSTaskDefinitionContainerDefinitionEssential = Nothing
, _eCSTaskDefinitionContainerDefinitionExtraHosts = Nothing
, _eCSTaskDefinitionContainerDefinitionHealthCheck = Nothing
, _eCSTaskDefinitionContainerDefinitionHostname = Nothing
, _eCSTaskDefinitionContainerDefinitionImage = imagearg
, _eCSTaskDefinitionContainerDefinitionLinks = Nothing
, _eCSTaskDefinitionContainerDefinitionLinuxParameters = Nothing
, _eCSTaskDefinitionContainerDefinitionLogConfiguration = Nothing
, _eCSTaskDefinitionContainerDefinitionMemory = Nothing
, _eCSTaskDefinitionContainerDefinitionMemoryReservation = Nothing
, _eCSTaskDefinitionContainerDefinitionMountPoints = Nothing
, _eCSTaskDefinitionContainerDefinitionName = namearg
, _eCSTaskDefinitionContainerDefinitionPortMappings = Nothing
, _eCSTaskDefinitionContainerDefinitionPrivileged = Nothing
, _eCSTaskDefinitionContainerDefinitionReadonlyRootFilesystem = Nothing
, _eCSTaskDefinitionContainerDefinitionRepositoryCredentials = Nothing
, _eCSTaskDefinitionContainerDefinitionUlimits = Nothing
, _eCSTaskDefinitionContainerDefinitionUser = Nothing
, _eCSTaskDefinitionContainerDefinitionVolumesFrom = Nothing
, _eCSTaskDefinitionContainerDefinitionWorkingDirectory = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-containerdefinitions.html#cfn-ecs-taskdefinition-containerdefinition-command
ecstdcdCommand :: Lens' ECSTaskDefinitionContainerDefinition (Maybe (ValList Text))
ecstdcdCommand = lens _eCSTaskDefinitionContainerDefinitionCommand (\s a -> s { _eCSTaskDefinitionContainerDefinitionCommand = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-containerdefinitions.html#cfn-ecs-taskdefinition-containerdefinition-cpu
ecstdcdCpu :: Lens' ECSTaskDefinitionContainerDefinition (Maybe (Val Integer))
ecstdcdCpu = lens _eCSTaskDefinitionContainerDefinitionCpu (\s a -> s { _eCSTaskDefinitionContainerDefinitionCpu = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-containerdefinitions.html#cfn-ecs-taskdefinition-containerdefinition-disablenetworking
ecstdcdDisableNetworking :: Lens' ECSTaskDefinitionContainerDefinition (Maybe (Val Bool))
ecstdcdDisableNetworking = lens _eCSTaskDefinitionContainerDefinitionDisableNetworking (\s a -> s { _eCSTaskDefinitionContainerDefinitionDisableNetworking = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-containerdefinitions.html#cfn-ecs-taskdefinition-containerdefinition-dnssearchdomains
ecstdcdDnsSearchDomains :: Lens' ECSTaskDefinitionContainerDefinition (Maybe (ValList Text))
ecstdcdDnsSearchDomains = lens _eCSTaskDefinitionContainerDefinitionDnsSearchDomains (\s a -> s { _eCSTaskDefinitionContainerDefinitionDnsSearchDomains = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-containerdefinitions.html#cfn-ecs-taskdefinition-containerdefinition-dnsservers
ecstdcdDnsServers :: Lens' ECSTaskDefinitionContainerDefinition (Maybe (ValList Text))
ecstdcdDnsServers = lens _eCSTaskDefinitionContainerDefinitionDnsServers (\s a -> s { _eCSTaskDefinitionContainerDefinitionDnsServers = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-containerdefinitions.html#cfn-ecs-taskdefinition-containerdefinition-dockerlabels
ecstdcdDockerLabels :: Lens' ECSTaskDefinitionContainerDefinition (Maybe Object)
ecstdcdDockerLabels = lens _eCSTaskDefinitionContainerDefinitionDockerLabels (\s a -> s { _eCSTaskDefinitionContainerDefinitionDockerLabels = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-containerdefinitions.html#cfn-ecs-taskdefinition-containerdefinition-dockersecurityoptions
ecstdcdDockerSecurityOptions :: Lens' ECSTaskDefinitionContainerDefinition (Maybe (ValList Text))
ecstdcdDockerSecurityOptions = lens _eCSTaskDefinitionContainerDefinitionDockerSecurityOptions (\s a -> s { _eCSTaskDefinitionContainerDefinitionDockerSecurityOptions = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-containerdefinitions.html#cfn-ecs-taskdefinition-containerdefinition-entrypoint
ecstdcdEntryPoint :: Lens' ECSTaskDefinitionContainerDefinition (Maybe (ValList Text))
ecstdcdEntryPoint = lens _eCSTaskDefinitionContainerDefinitionEntryPoint (\s a -> s { _eCSTaskDefinitionContainerDefinitionEntryPoint = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-containerdefinitions.html#cfn-ecs-taskdefinition-containerdefinition-environment
ecstdcdEnvironment :: Lens' ECSTaskDefinitionContainerDefinition (Maybe [ECSTaskDefinitionKeyValuePair])
ecstdcdEnvironment = lens _eCSTaskDefinitionContainerDefinitionEnvironment (\s a -> s { _eCSTaskDefinitionContainerDefinitionEnvironment = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-containerdefinitions.html#cfn-ecs-taskdefinition-containerdefinition-essential
ecstdcdEssential :: Lens' ECSTaskDefinitionContainerDefinition (Maybe (Val Bool))
ecstdcdEssential = lens _eCSTaskDefinitionContainerDefinitionEssential (\s a -> s { _eCSTaskDefinitionContainerDefinitionEssential = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-containerdefinitions.html#cfn-ecs-taskdefinition-containerdefinition-extrahosts
ecstdcdExtraHosts :: Lens' ECSTaskDefinitionContainerDefinition (Maybe [ECSTaskDefinitionHostEntry])
ecstdcdExtraHosts = lens _eCSTaskDefinitionContainerDefinitionExtraHosts (\s a -> s { _eCSTaskDefinitionContainerDefinitionExtraHosts = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-containerdefinitions.html#cfn-ecs-taskdefinition-containerdefinition-healthcheck
ecstdcdHealthCheck :: Lens' ECSTaskDefinitionContainerDefinition (Maybe ECSTaskDefinitionHealthCheck)
ecstdcdHealthCheck = lens _eCSTaskDefinitionContainerDefinitionHealthCheck (\s a -> s { _eCSTaskDefinitionContainerDefinitionHealthCheck = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-containerdefinitions.html#cfn-ecs-taskdefinition-containerdefinition-hostname
ecstdcdHostname :: Lens' ECSTaskDefinitionContainerDefinition (Maybe (Val Text))
ecstdcdHostname = lens _eCSTaskDefinitionContainerDefinitionHostname (\s a -> s { _eCSTaskDefinitionContainerDefinitionHostname = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-containerdefinitions.html#cfn-ecs-taskdefinition-containerdefinition-image
ecstdcdImage :: Lens' ECSTaskDefinitionContainerDefinition (Val Text)
ecstdcdImage = lens _eCSTaskDefinitionContainerDefinitionImage (\s a -> s { _eCSTaskDefinitionContainerDefinitionImage = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-containerdefinitions.html#cfn-ecs-taskdefinition-containerdefinition-links
ecstdcdLinks :: Lens' ECSTaskDefinitionContainerDefinition (Maybe (ValList Text))
ecstdcdLinks = lens _eCSTaskDefinitionContainerDefinitionLinks (\s a -> s { _eCSTaskDefinitionContainerDefinitionLinks = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-containerdefinitions.html#cfn-ecs-taskdefinition-containerdefinition-linuxparameters
ecstdcdLinuxParameters :: Lens' ECSTaskDefinitionContainerDefinition (Maybe ECSTaskDefinitionLinuxParameters)
ecstdcdLinuxParameters = lens _eCSTaskDefinitionContainerDefinitionLinuxParameters (\s a -> s { _eCSTaskDefinitionContainerDefinitionLinuxParameters = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-containerdefinitions.html#cfn-ecs-taskdefinition-containerdefinition-logconfiguration
ecstdcdLogConfiguration :: Lens' ECSTaskDefinitionContainerDefinition (Maybe ECSTaskDefinitionLogConfiguration)
ecstdcdLogConfiguration = lens _eCSTaskDefinitionContainerDefinitionLogConfiguration (\s a -> s { _eCSTaskDefinitionContainerDefinitionLogConfiguration = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-containerdefinitions.html#cfn-ecs-taskdefinition-containerdefinition-memory
ecstdcdMemory :: Lens' ECSTaskDefinitionContainerDefinition (Maybe (Val Integer))
ecstdcdMemory = lens _eCSTaskDefinitionContainerDefinitionMemory (\s a -> s { _eCSTaskDefinitionContainerDefinitionMemory = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-containerdefinitions.html#cfn-ecs-taskdefinition-containerdefinition-memoryreservation
ecstdcdMemoryReservation :: Lens' ECSTaskDefinitionContainerDefinition (Maybe (Val Integer))
ecstdcdMemoryReservation = lens _eCSTaskDefinitionContainerDefinitionMemoryReservation (\s a -> s { _eCSTaskDefinitionContainerDefinitionMemoryReservation = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-containerdefinitions.html#cfn-ecs-taskdefinition-containerdefinition-mountpoints
ecstdcdMountPoints :: Lens' ECSTaskDefinitionContainerDefinition (Maybe [ECSTaskDefinitionMountPoint])
ecstdcdMountPoints = lens _eCSTaskDefinitionContainerDefinitionMountPoints (\s a -> s { _eCSTaskDefinitionContainerDefinitionMountPoints = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-containerdefinitions.html#cfn-ecs-taskdefinition-containerdefinition-name
ecstdcdName :: Lens' ECSTaskDefinitionContainerDefinition (Val Text)
ecstdcdName = lens _eCSTaskDefinitionContainerDefinitionName (\s a -> s { _eCSTaskDefinitionContainerDefinitionName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-containerdefinitions.html#cfn-ecs-taskdefinition-containerdefinition-portmappings
ecstdcdPortMappings :: Lens' ECSTaskDefinitionContainerDefinition (Maybe [ECSTaskDefinitionPortMapping])
ecstdcdPortMappings = lens _eCSTaskDefinitionContainerDefinitionPortMappings (\s a -> s { _eCSTaskDefinitionContainerDefinitionPortMappings = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-containerdefinitions.html#cfn-ecs-taskdefinition-containerdefinition-privileged
ecstdcdPrivileged :: Lens' ECSTaskDefinitionContainerDefinition (Maybe (Val Bool))
ecstdcdPrivileged = lens _eCSTaskDefinitionContainerDefinitionPrivileged (\s a -> s { _eCSTaskDefinitionContainerDefinitionPrivileged = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-containerdefinitions.html#cfn-ecs-taskdefinition-containerdefinition-readonlyrootfilesystem
ecstdcdReadonlyRootFilesystem :: Lens' ECSTaskDefinitionContainerDefinition (Maybe (Val Bool))
ecstdcdReadonlyRootFilesystem = lens _eCSTaskDefinitionContainerDefinitionReadonlyRootFilesystem (\s a -> s { _eCSTaskDefinitionContainerDefinitionReadonlyRootFilesystem = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-containerdefinitions.html#cfn-ecs-taskdefinition-containerdefinition-repositorycredentials
ecstdcdRepositoryCredentials :: Lens' ECSTaskDefinitionContainerDefinition (Maybe ECSTaskDefinitionRepositoryCredentials)
ecstdcdRepositoryCredentials = lens _eCSTaskDefinitionContainerDefinitionRepositoryCredentials (\s a -> s { _eCSTaskDefinitionContainerDefinitionRepositoryCredentials = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-containerdefinitions.html#cfn-ecs-taskdefinition-containerdefinition-ulimits
ecstdcdUlimits :: Lens' ECSTaskDefinitionContainerDefinition (Maybe [ECSTaskDefinitionUlimit])
ecstdcdUlimits = lens _eCSTaskDefinitionContainerDefinitionUlimits (\s a -> s { _eCSTaskDefinitionContainerDefinitionUlimits = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-containerdefinitions.html#cfn-ecs-taskdefinition-containerdefinition-user
ecstdcdUser :: Lens' ECSTaskDefinitionContainerDefinition (Maybe (Val Text))
ecstdcdUser = lens _eCSTaskDefinitionContainerDefinitionUser (\s a -> s { _eCSTaskDefinitionContainerDefinitionUser = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-containerdefinitions.html#cfn-ecs-taskdefinition-containerdefinition-volumesfrom
ecstdcdVolumesFrom :: Lens' ECSTaskDefinitionContainerDefinition (Maybe [ECSTaskDefinitionVolumeFrom])
ecstdcdVolumesFrom = lens _eCSTaskDefinitionContainerDefinitionVolumesFrom (\s a -> s { _eCSTaskDefinitionContainerDefinitionVolumesFrom = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-containerdefinitions.html#cfn-ecs-taskdefinition-containerdefinition-workingdirectory
ecstdcdWorkingDirectory :: Lens' ECSTaskDefinitionContainerDefinition (Maybe (Val Text))
ecstdcdWorkingDirectory = lens _eCSTaskDefinitionContainerDefinitionWorkingDirectory (\s a -> s { _eCSTaskDefinitionContainerDefinitionWorkingDirectory = a })
|
frontrowed/stratosphere
|
library-gen/Stratosphere/ResourceProperties/ECSTaskDefinitionContainerDefinition.hs
|
mit
| 20,747
| 0
| 13
| 1,513
| 2,686
| 1,526
| 1,160
| 174
| 1
|
-- Copyright (c) 2016-present, SoundCloud Ltd.
-- All rights reserved.
--
-- This source code is distributed under the terms of a MIT license,
-- found in the LICENSE file.
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
module Kubernetes.Model.V1.ResourceRequirements
( ResourceRequirements (..)
, limits
, requests
, mkResourceRequirements
) where
import Control.Lens.TH (makeLenses)
import Data.Aeson.TH (defaultOptions, deriveJSON,
fieldLabelModifier)
import GHC.Generics (Generic)
import Kubernetes.Model.V1.Any (Any)
import Prelude hiding (drop, error, max, min)
import qualified Prelude as P
import Test.QuickCheck (Arbitrary, arbitrary)
import Test.QuickCheck.Instances ()
-- | ResourceRequirements describes the compute resource requirements.
data ResourceRequirements = ResourceRequirements
{ _limits :: !(Maybe Any)
, _requests :: !(Maybe Any)
} deriving (Show, Eq, Generic)
makeLenses ''ResourceRequirements
$(deriveJSON defaultOptions{fieldLabelModifier = (\n -> if n == "_type_" then "type" else P.drop 1 n)} ''ResourceRequirements)
instance Arbitrary ResourceRequirements where
arbitrary = ResourceRequirements <$> arbitrary <*> arbitrary
-- | Use this method to build a ResourceRequirements
mkResourceRequirements :: ResourceRequirements
mkResourceRequirements = ResourceRequirements Nothing Nothing
|
soundcloud/haskell-kubernetes
|
lib/Kubernetes/Model/V1/ResourceRequirements.hs
|
mit
| 1,643
| 0
| 14
| 428
| 275
| 166
| 109
| 31
| 1
|
module Language.SexpGrammar.Generic
( -- * GHC.Generics helpers
with
, match
, Coproduct (..)
) where
import Data.InvertibleGrammar.Generic
|
ricardopenyamari/ir2haskell
|
clir-parser-haskell-master/lib/sexp-grammar/src/Language/SexpGrammar/Generic.hs
|
gpl-2.0
| 154
| 0
| 5
| 31
| 29
| 20
| 9
| 6
| 0
|
{- |
Module : ./Static/PrintDevGraph.hs
Description : pretty printing (parts of) a LibEnv
Copyright : (c) C. Maeder, Uni Bremen 2002-2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : non-portable(DevGraph)
pretty printing (parts of) a LibEnv
-}
module Static.PrintDevGraph
( prettyLibEnv
, printTh
, prettyHistElem
, prettyHistory
, prettyGr
, prettyLEdge
, showLEdge
, dgOriginHeader
, dgOriginSpec
, showXPath
, dgLinkOriginHeader
, dgLinkOriginSpec
, dgRuleHeader
, dgRuleEdges
) where
import Syntax.Print_AS_Structured
import Syntax.AS_Structured
import Static.DgUtils
import Static.GTheory
import Static.DevGraph
import Static.History
import Common.GlobalAnnotations
import Common.LibName
import Common.Id
import Common.IRI
import Common.Consistency
import Common.Doc as Doc
import Common.DocUtils
import Common.Result
import Common.Keywords
import Common.ConvertGlobalAnnos
import Common.AnalyseAnnos
import qualified Common.Lib.SizedList as SizedList
import qualified Common.Lib.Rel as Rel
import qualified Common.Lib.Graph as Tree
import Data.Graph.Inductive.Graph as Graph
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.List
import Data.Char
printTh :: GlobalAnnos -> IRI -> G_theory -> Doc
printTh oga sn g =
let ga = removeProblematicListAnnos oga in
useGlobalAnnos ga $ pretty ga $+$ prettyGTheorySL g $+$
sep [ if sn == nullIRI then Doc.empty else
keyword specS <+> structIRI sn <+> equals
, prettyGTheory (gTheorySyntax g) g]
removeProblematicListAnnos :: GlobalAnnos -> GlobalAnnos
removeProblematicListAnnos ga = let
is = Map.keysSet $ Rel.toMap $ prec_annos ga
la = literal_annos ga
nla = la { list_lit = Map.filterWithKey ( \ li _ ->
let (op, cl, cs) = getListBrackets li in
Set.null $ Set.filter ( \ (Id ts ics _) ->
cs == ics && isPrefixOf op ts && isSuffixOf cl ts) is)
$ list_lit la }
Result _ (Just lm) = store_literal_map Map.empty $ convertLiteralAnnos nla
in ga { literal_annos = nla
, literal_map = lm }
-- * pretty instances
showXPathComp :: XPathPart -> String
showXPathComp c = case c of
ElemName s -> s
ChildIndex i -> "Spec[" ++ show i ++ "]"
showXPath :: [XPathPart] -> String
showXPath l = case l of
[] -> "/"
_ -> concatMap (('/' :) . showXPathComp) l
showNodeId :: Node -> String
showNodeId i = "node " ++ show i
instance Pretty NodeSig where
pretty (NodeSig n sig) = fsep [ text (showNodeId n) <> colon, pretty sig ]
instance Pretty NodeName where
pretty n = text $ showName n
dgOriginSpec :: DGOrigin -> Maybe IRI
dgOriginSpec o = case o of
DGInst n -> Just n
DGFitView n -> Just n
_ -> Nothing
dgOriginHeader :: DGOrigin -> String
dgOriginHeader o = case o of
DGEmpty -> "empty-spec"
DGBasic -> "foreign-basic-spec"
DGBasicSpec {} -> "basic-spec"
DGExtension -> "extension"
DGLogicCoercion -> "logic-translation"
DGTranslation _ -> "translation"
DGUnion -> "union"
DGIntersect -> "intersection"
DGExtract -> "extraction"
DGRestriction _ _ -> "restriction"
DGRevealTranslation -> "translation part of a revealing"
DGFreeOrCofree v -> map toLower (show v) ++ "-spec"
DGLocal -> "local-spec"
DGClosed -> "closed-spec"
DGLogicQual -> "spec with logic qualifier"
DGData -> "data-spec"
DGFormalParams -> "formal parameters"
DGImports -> "arch import"
DGInst _ -> "instantiation"
DGFitSpec -> "fitting-spec"
DGFitView _ -> "fitting-view"
DGProof -> "proof-construct"
DGNormalForm n -> "normal-form(" ++ shows n ")"
DGintegratedSCC -> "OWL spec with integrated strongly connected components"
DGFlattening -> "flattening"
DGTest -> "testing"
DGAlignment -> "alignment"
instance Pretty DGOrigin where
pretty o = let prettySyms headr syms = if Set.null syms then Doc.empty
else text headr $+$ pretty syms
in text (dgOriginHeader o) <+> pretty (dgOriginSpec o)
$+$ case o of
DGBasicSpec mgbs _ syms -> case mgbs of
Nothing -> Doc.empty
Just gbs -> specBraces (pretty gbs)
$+$ prettySyms "new symbols:" syms
DGTranslation (Renamed r) -> pretty r
DGRestriction rst syms -> let
prtS = prettySyms "hidden symbols:" syms
in case rst of
Restricted r -> pretty r $+$ prtS
NoRestriction -> prtS
_ -> Doc.empty
instance Pretty DGNodeInfo where
pretty c = case c of
DGNode {} -> pretty $ node_origin c
DGRef {} ->
pretty (getLibId $ ref_libname c) <+> text (showNodeId $ ref_node c)
prettyDGNodeLab :: DGNodeLab -> Doc
prettyDGNodeLab l = sep [ text $ getDGNodeName l, pretty $ nodeInfo l]
instance Pretty DGNodeLab where
pretty l = vcat
[ text $ "xpath: " ++ showXPath (reverse $ xpath $ dgn_name l)
, pretty $ getNodeConsStatus l
, if hasOpenGoals l then text "has open goals" else
if hasSenKind (const True) l then Doc.empty else text "locally empty"
, if labelHasHiding l then text "has ingoing hiding link" else Doc.empty
, case dgn_nf l of
Nothing -> Doc.empty
Just n -> text "normal form:" <+> text (showNodeId n)
, text "origin:" $+$ pretty (nodeInfo l)
, case dgn_sigma l of
Nothing -> Doc.empty
Just gm -> text "normal form inclusion:" $+$ pretty gm
, text "local theory:"
, pretty $ dgn_theory l]
instance Pretty EdgeId where
pretty (EdgeId i) = text $ show i
dgLinkOriginSpec :: DGLinkOrigin -> Maybe IRI
dgLinkOriginSpec o = case o of
DGLinkMorph n -> Just n
DGLinkInst n _ -> Just n
DGLinkInstArg n -> Just n
DGLinkView n _ -> Just n
DGLinkFitView n -> Just n
DGLinkFitViewImp n -> Just n
DGLinkRefinement n -> Just n
_ -> Nothing
dgLinkMapping :: DGLinkOrigin -> [G_mapping]
dgLinkMapping o = case o of
DGLinkInst _ (Fitted l) -> l
DGLinkView _ (Fitted l) -> l
_ -> []
dgLinkOriginHeader :: DGLinkOrigin -> String
dgLinkOriginHeader o = case o of
SeeTarget -> "see target"
DGLinkVerif -> "architectural verification condition"
SeeSource -> "see source"
DGImpliesLink -> "reversed implies link of extension"
DGLinkExtension -> "extension"
DGLinkTranslation -> "OMDoc translation"
DGLinkClosedLenv -> "closed spec and local environment"
DGLinkImports -> "OWL import"
DGLinkIntersect -> "inclusion of intersection"
DGLinkMorph _ -> "instantiation morphism of"
DGLinkInst _ _ -> "instantiation of"
DGLinkInstArg _ -> "actual parameter of"
DGLinkView _ _ -> "view"
DGLinkAlign _ -> "alignment"
DGLinkFitView _ -> "fit source of"
DGLinkFitViewImp _ -> "add import to source of"
DGLinkProof -> "proof-link"
DGLinkFlatteningUnion -> "flattening non-disjoint union"
DGLinkFlatteningRename -> "flattening renaming"
DGLinkRefinement _ -> "refinement"
TEST -> "test"
instance Pretty DGLinkOrigin where
pretty o = text (dgLinkOriginHeader o) <+> pretty (dgLinkOriginSpec o)
$+$ ppWithCommas (dgLinkMapping o)
-- | only shows the edge and node ids
showLEdge :: LEdge DGLinkLab -> String
showLEdge (s, t, l) = "edge " ++ showEdgeId (dgl_id l)
++ " " ++ dglName l
++ "(" ++ showNodeId s ++ " --> " ++ show t ++ ")"
-- | only print the origin and parts of the type
prettyDGLinkLab :: (DGLinkLab -> Doc) -> DGLinkLab -> Doc
prettyDGLinkLab f l = fsep
[ case dgl_origin l of
SeeTarget -> Doc.empty
o -> pretty o
, f l ]
-- | print short edge information
prettyLEdge :: LEdge DGLinkLab -> Doc
prettyLEdge e@(_, _, l) = fsep
[ text $ showLEdge e
, prettyDGLinkLab (text . getDGLinkType) l
, prettyThmLinkStatus $ dgl_type l ]
dgRuleEdges :: DGRule -> [EdgeId]
dgRuleEdges r = case r of
DGRuleWithEdge _ l -> [l]
Composition ls -> ls
_ -> []
dgRuleHeader :: DGRule -> String
dgRuleHeader r = case r of
DGRule str -> str
DGRuleWithEdge str _ -> str
DGRuleLocalInference _ -> "local-inference"
Composition _ -> "composition"
instance Pretty DGRule where
pretty r = let es = dgRuleEdges r in fsep
[ text (dgRuleHeader r) <> if null es then Doc.empty else colon, case r of
DGRuleLocalInference m ->
braces $ sepByCommas $ map (\ (s, t) ->
let d = text s in if s == t then d else pairElems d $ text t) m
_ -> case es of
[] -> Doc.empty
_ -> pretty $ Set.fromList es]
instance Pretty ThmLinkStatus where
pretty tls = case tls of
LeftOpen -> Doc.empty
Proven r ls -> let s = proofBasis ls in
fcat [parens (pretty r), if Set.null s then Doc.empty else pretty s]
prettyThmLinkStatus :: DGLinkType -> Doc
prettyThmLinkStatus = maybe Doc.empty pretty . thmLinkStatus
instance Pretty ConsStatus where
pretty (ConsStatus cons pc thm) = case max cons pc of
None -> Doc.empty
c -> text (show c) <> pretty thm
instance Pretty DGLinkType where
pretty t = (case t of
FreeOrCofreeDefLink v _ -> text $ show v
_ -> Doc.empty)
<> text (getDGEdgeTypeModIncName $ getHomEdgeType False True t)
<> prettyThmLinkStatus t
$+$ pretty (getLinkConsStatus t)
instance Pretty DGLinkLab where
pretty l = let mor = pretty $ dgl_morphism l in vcat
[ text "Origin:" <+> pretty (dgl_origin l)
, text "Type:" <+> pretty (dgl_type l)
, if dglPending l then text "proof chain incomplete" else Doc.empty
, case dgl_type l of
HidingFreeOrCofreeThm k n gm _ -> let nstr = showNodeId n ++ ":" in
text ("Signature morphism from " ++ nstr)
$+$ mor
$+$ text ("with " ++ (case k of
Nothing -> "hiding"
Just v -> map toLower (show v))
++ " morphism:") $+$ pretty gm
_ -> text "Signature morphism:" $+$ mor ]
-- | pretty print a labelled node
prettyGenLNode :: (a -> Doc) -> LNode a -> Doc
prettyGenLNode f (n, l) = fsep [text (showNodeId n) <> colon, f l]
prettyLNode :: LNode DGNodeLab -> Doc
prettyLNode = prettyGenLNode prettyDGNodeLab
dgChangeType :: DGChange -> String
dgChangeType c = case c of
InsertNode _ -> "insert"
DeleteNode _ -> "delete"
InsertEdge _ -> "insert"
DeleteEdge _ -> "delete"
SetNodeLab _ _ -> "change"
instance Pretty DGChange where
pretty c = text (dgChangeType c) <+> case c of
InsertNode n -> prettyLNode n
DeleteNode n -> prettyLNode n
InsertEdge e -> prettyLEdge e
DeleteEdge e -> prettyLEdge e
SetNodeLab _ n -> prettyLNode n
prettyGr :: Tree.Gr DGNodeLab DGLinkLab -> Doc
prettyGr g = vcat (map prettyLNode $ labNodes g)
$+$ vcat (map prettyLEdge $ labEdges g)
prettyImport :: MaybeNode -> Doc
prettyImport imp = case imp of
EmptyNode _ -> Doc.empty
JustNode n -> keyword givenS <+> pretty (getNode n)
prettyAllParams :: MaybeNode -> Doc
prettyAllParams ps = case ps of
EmptyNode _ -> Doc.empty
JustNode n -> pretty (getNode n)
instance Pretty ExtGenSig where
pretty (ExtGenSig (GenSig imp params allParamSig) body) = fsep $
pretty (getNode body) :
(if null params then [] else
[ pretty $ map getNode params
, prettyAllParams allParamSig ]) ++
[ prettyImport imp ]
instance Pretty ExtViewSig where
pretty (ExtViewSig src gmor ptar) = fsep
[ pretty (getNode src) <+> text toS
, pretty ptar
, pretty gmor ]
instance Pretty UnitSig where
pretty (UnitSig params usig _) =
(if null params then Doc.empty else pretty $ map getSig params)
<+> pretty (getSig usig)
instance Pretty ImpUnitSigOrSig where
pretty iu = case iu of
ImpUnitSig imp usig -> fsep
[ pretty usig, prettyImport imp ]
Sig n -> keyword specS <+> pretty (getNode n)
instance Pretty RTNodeType where
pretty (RTPlain usig) = pretty usig
pretty (RTRef n) = pretty n
instance Pretty RTNodeLab where
pretty rlab = fsep [text $ show $ rtn_name rlab,
text $ show $ rtn_diag rlab,
pretty $ rtn_type rlab
]
instance Pretty RefSig where
pretty = text . show -- missing
instance Pretty AlignSig where
pretty = text . show -- missing
instance Pretty GlobalEntry where
pretty ge = case ge of
SpecEntry se -> topKey specS <+> pretty se
ViewOrStructEntry b ve -> topKey (if b then viewS else structS)
<+> pretty ve
UnitEntry ue -> topKey unitS <+> pretty ue
AlignEntry ae -> pretty ae
ArchOrRefEntry b ae -> (if b then topKey archS else keyword refinementS)
<+> pretty ae
instance Pretty DGraph where
pretty dg = vcat
[ prettyGr $ dgBody dg
, text "global environment:"
, printMap id vcat (\ k v -> fsep [k <+> mapsto, v]) $ globalEnv dg
, text "history:"
, prettyHistory $ reverseHistory $ proofHistory dg
, text "redoable history:"
, prettyHistory $ SizedList.reverse $ reverseHistory $ redoHistory dg
, text "next edge:" <+> pretty (getNewEdgeId dg) ]
prettyHistElem :: HistElem -> Doc
prettyHistElem he = case he of
HistElem c -> pretty c
HistGroup r l -> text "rule:" <+> pretty r $+$ space <+> prettyHistory l
prettyHistory :: ProofHistory -> Doc
prettyHistory = vcat . map prettyHistElem . SizedList.toList
prettyLibEnv :: LibEnv -> Doc
prettyLibEnv = printMap id vsep ($+$)
|
gnn/Hets
|
Static/PrintDevGraph.hs
|
gpl-2.0
| 13,455
| 3
| 28
| 3,325
| 4,241
| 2,096
| 2,145
| 344
| 27
|
{- |
Module : ./HolLight/HolLight2DG.hs
Description : Import data generated by hol2hets into a DG
Copyright : (c) Jonathan von Schroeder, DFKI GmbH 2010
License : GPLv2 or higher, see LICENSE.txt
Maintainer : jonathan.von_schroeder@dfki.de
Stability : experimental
Portability : portable
-}
module HolLight.HolLight2DG where
import Static.GTheory
import Static.DevGraph
import Static.DgUtils
import Static.History
import Static.ComputeTheory
import Logic.Logic
import Logic.Prover
import Logic.ExtSign
import Logic.Grothendieck
import Common.LibName
import Common.Id
import Common.IRI (simpleIdToIRI)
import Common.AS_Annotation
import Common.Result
import Common.Utils
import Common.SAX
import Control.Monad
import Common.Lib.Maybe
import qualified Data.ByteString.Lazy as L
import Text.XML.Expat.SAX
import HolLight.Sign
import HolLight.Sentence
import HolLight.Term
import HolLight.Logic_HolLight
import HolLight.Helper (names)
import Driver.Options
import Data.Graph.Inductive.Graph
import qualified Data.Map as Map
import Data.Maybe (fromMaybe)
import System.Exit
import System.Directory
import System.FilePath
readTuple :: (Show a, Show b) => MSaxState a -> MSaxState b -> MSaxState (a, b)
readTuple f1 f2 = do
expectTag True "tuple"
t1 <- readWithTag f1 "fst"
t2 <- readWithTag f2 "snd"
expectTag False "tuple"
return (t1, t2)
readWord :: MSaxState String
readWord = liftM reverse $ foldCatchLeft (\ s ->
do
s' <- do
dropSpaces
d <- getD
case d of
[] -> error "HolLight.readWord"
h : xs -> case h of
CharacterData s' -> do
putD xs
return s'
_ -> debugS $ "Expected character data but instead got: " ++ show h
return $ trimLeft (reverse $ trimLeft s') ++ s) []
readStr :: MSaxState String
readStr = readWithTag readWord "s"
readInt :: MSaxState Int
readInt = do
w <- readWord
return $ fromMaybe (error "HolLight.readInt") $ readMaybe w
readInt' :: MSaxState Int
readInt' = readWithTag readInt "i"
readMappedInt :: Map.Map Int a -> MSaxState a
readMappedInt m = do
i <- readInt
case Map.lookup i m of
Just a -> return a
_ -> debugS $ "readMappedInt: Integer " ++ show i ++ " not mapped"
listToTypes :: Map.Map Int HolType -> [Int] -> Maybe [HolType]
listToTypes m l = case l of
x : xs -> case Map.lookup x m of
Just t -> case listToTypes m xs of
Just ts -> Just (t : ts)
_ -> Nothing
_ -> Nothing
[] -> Just []
readSharedHolType :: Map.Map Int String -> Map.Map Int HolType
-> MSaxState (Map.Map Int HolType)
readSharedHolType sl m = do
d <- getM
(b, t) <- tag
case (b, t) of
(True, "TyApp") -> do
(i, l) <- readTuple readInt (whileM readInt')
case (Map.lookup i sl, listToTypes m l) of
(Just s, Just l') -> do
expectTag False "TyApp"
return $ Map.insert (Map.size m + 1) (TyApp s $ reverse l') m
(r1, r2) -> debugS $ "readSharedHolType: Couldn't build TyApp"
++ " because the result of the lookup for "
++ show (i, l) ++ " was " ++ show (r1, r2)
(True, "TyVar") -> do
i <- readInt
case Map.lookup i sl of
Just s -> do
expectTag False "TyVar"
return $ Map.insert (Map.size m + 1) (TyVar s) m
_ -> debugS $ "readSharedHolType: Couldn't build TyVar"
++ " because looking up " ++ show i
++ " failed"
_ -> do
putM d
debugS $ "readSharedHolType: Expected a hol type but"
++ " instead got following tag: " ++ show (b, t)
readParseType :: MSaxState HolParseType
readParseType = do
(b, t) <- tag
case (b, t) of
(True, "Prefix") -> do
expectTag False "Prefix"
return PrefixT
(True, "InfixR") -> do
i <- readInt
expectTag False "InfixR"
return $ InfixR i
(True, "InfixL") -> do
i <- readInt
expectTag False "InfixL"
return $ InfixL i
(True, "Normal") -> do
expectTag False "Normal"
return Normal
(True, "Binder") -> do
expectTag False "Binder"
return Binder
_ -> debugS $ "readParseType: Expected a parse type but"
++ " instead got following tag: " ++ show (b, t)
readTermInfo :: MSaxState HolTermInfo
readTermInfo = do
p <- readParseType
MaybeT $ do
v <- runMaybeT $ readTuple readWord readParseType
case v of
Just _ -> return $ Just $ HolTermInfo (p, v)
_ -> return $ Just $ HolTermInfo (p, Nothing)
readSharedHolTerm :: Map.Map Int HolType -> Map.Map Int String
-> Map.Map Int Term -> MSaxState (Map.Map Int Term)
readSharedHolTerm ts sl m = do
d <- getM
(b, tg) <- tag
case (b, tg) of
(True, "Var") -> do
(n, t) <- readTuple readInt readInt
ti <- readTermInfo
case (Map.lookup n sl, Map.lookup t ts) of
(Just name, Just tp) -> do
expectTag False "Var"
return $ Map.insert (Map.size m + 1) (Var name tp ti) m
(r1, r2) -> debugS $ "readSharedHolTerm: Couldn't build Var"
++ " because the result of the lookup for "
++ show (n, t) ++ " was " ++ show (r1, r2)
(True, "Const") -> do
(n, t) <- readTuple readInt readInt
ti <- readTermInfo
case (Map.lookup n sl, Map.lookup t ts) of
(Just name, Just tp) -> do
expectTag False "Const"
return $ Map.insert (Map.size m + 1) (Const name tp ti) m
(r1, r2) -> debugS $ "readSharedHolTerm: Couldn't build Const"
++ " because the result of the lookup for "
++ show (n, t) ++ " was " ++ show (r1, r2)
(True, "Comb") -> do
(t1, t2) <- readTuple readInt readInt
case (Map.lookup t1 m, Map.lookup t2 m) of
(Just t1', Just t2') -> do
expectTag False "Comb"
return $ Map.insert (Map.size m + 1) (Comb t1' t2') m
(r1, r2) -> debugS $ "readSharedHolTerm: Couldn't build Comb"
++ " because the result of the lookup for "
++ show (t1, t2) ++ " was " ++ show (r1, r2)
(True, "Abs") -> do
(t1, t2) <- readTuple readInt readInt
case (Map.lookup t1 m, Map.lookup t2 m) of
(Just t1', Just t2') -> do
expectTag False "Abs"
return $ Map.insert (Map.size m + 1) (Abs t1' t2') m
(r1, r2) -> debugS $ "readSharedHoLTerm: Couldn't build Abs"
++ " because the result of the lookup for "
++ show (t1, t2) ++ " was " ++ show (r1, r2)
_ -> do
putM d
debugS $ "readSharedHolTerm: Expected a hol term but"
++ " instead got following tag: " ++ show (b, tg)
importData :: HetcatsOpts -> FilePath
-> IO ([(String, [(String, Term)])], [(String, String)])
importData opts fp' = do
fp <- canonicalizePath fp'
let image = "hol_light.dmtcp"
dmtcpBin = "dmtcp_restart"
tmpImage <- getTempFile "" image
imageFile <- fmap (</> image) $ getEnvDef
"HETS_HOLLIGHT_TOOLS" "HolLight/OcamlTools/"
-- for dmtcp we need an image owned by the current user
copyFile imageFile tmpImage
e2 <- doesFileExist imageFile
unless e2 $ fail $ image ++ " not found"
tempFile <- getTempFile "" (takeBaseName fp)
(ex, sout, err) <- executeProcess dmtcpBin [tmpImage]
$ "use_file " ++ show fp ++ ";;\n"
++ "inject_hol_include " ++ show fp ++ ";;\n"
++ "export_libs (get_libs()) " ++ show tempFile ++ ";;\n"
++ "exit 0;;\n"
removeFile tmpImage
s <- L.readFile tempFile
case ex of
ExitFailure _ -> do
removeFile tempFile
fail $ "HolLight.importData: " ++ err
ExitSuccess -> do
putIfVerbose opts 5 sout
let e = ([], [])
(r, evl, msgs) <- return $ case runMSaxState (do
expectTag True "HolExport"
sl <- readL readStr "Strings"
let strings = Map.fromList (zip [1 ..] sl)
hol_types <- foldS (readSharedHolType strings)
Map.empty "SharedHolTypes"
hol_terms <- foldS (readSharedHolTerm hol_types strings)
Map.empty "SharedHolTerms"
libs <- readL (readTuple readWord
(whileM (readTuple readWord
(readMappedInt hol_terms)))) "Libs"
liblinks <- readL (readTuple readWord readWord) "LibLinks"
return (libs, liblinks)) (parsexml s) (verbose opts >= 6) of
(de, msgs) -> (fromMaybe e de, "Next 5 items: "
++ show (take 5 $ fst msgs), fst $ snd msgs)
case msgs of
Just ms -> putIfVerbose opts 6 $ unlines (reverse ms) ++ evl
Nothing -> return ()
removeFile tempFile
return r
getTypes :: Map.Map String Int -> HolType -> Map.Map String Int
getTypes m t = case t of
TyVar _ -> m
TyApp s ts -> let m' = foldl getTypes m ts in
Map.insert s (length ts) m'
mergeTypesOps :: (Map.Map String Int, Map.Map String HolType)
-> (Map.Map String Int, Map.Map String HolType)
-> (Map.Map String Int, Map.Map String HolType)
mergeTypesOps (ts1, ops1) (ts2, ops2) =
(ts1 `Map.union` ts2, ops1 `Map.union` ops2)
getOps :: Term
-> (Map.Map String Int, Map.Map String HolType)
getOps tm = case tm of
Var _ t _ -> let ts = getTypes Map.empty t
in (ts, Map.empty)
Const s t _ -> let ts = getTypes Map.empty t
in (ts, Map.insert s t Map.empty)
Comb t1 t2 -> mergeTypesOps
(getOps t1)
(getOps t2)
Abs t1 t2 -> mergeTypesOps
(getOps t1)
(getOps t2)
calcSig :: [(String, Term)] -> Sign
calcSig tm = let (ts, os) = foldl
(\ p (_, t) -> (mergeTypesOps (getOps t) p))
(Map.empty, Map.empty) tm
in Sign {
types = ts
, ops = os }
sigDepends :: Sign -> Sign -> Bool
sigDepends s1 s2 = (Map.size (Map.intersection (types s1) (types s2)) /= 0) ||
(Map.size (Map.intersection (ops s1) (ops s2)) /= 0)
prettifyTypeVarsTp :: HolType -> Map.Map String String
-> (HolType, Map.Map String String)
prettifyTypeVarsTp (TyVar s) m = case Map.lookup s m of
Just s' -> (TyVar s', m)
Nothing -> let s' = '\'' : (names !! Map.size m)
in (TyVar s', Map.insert s s' m)
prettifyTypeVarsTp (TyApp s ts) m =
let (ts', m') = foldl (\ (ts'', m'') t ->
let (t', m''') = prettifyTypeVarsTp t m''
in (t' : ts'', m''')) ([], m) ts
in (TyApp s ts', m')
prettifyTypeVarsTm :: Term -> Map.Map String String
-> (Term, Map.Map String String)
prettifyTypeVarsTm (Const s t p) _ =
let (t1, m1) = prettifyTypeVarsTp t Map.empty
in (Const s t1 p, m1)
prettifyTypeVarsTm (Comb tm1 tm2) m =
let (tm1', m1) = prettifyTypeVarsTm tm1 m
(tm2', m2) = prettifyTypeVarsTm tm2 m1
in (Comb tm1' tm2', m2)
prettifyTypeVarsTm (Abs tm1 tm2) m =
let (tm1', m1) = prettifyTypeVarsTm tm1 m
(tm2', m2) = prettifyTypeVarsTm tm2 m1
in (Abs tm1' tm2', m2)
prettifyTypeVarsTm t m = (t, m)
prettifyTypeVars :: ([(String, [(String, Term)])], [(String, String)]) ->
([(String, [(String, Term)])], [(String, String)])
prettifyTypeVars (libs, lnks) =
let libs' = map (\ (s, terms) ->
let terms' = foldl (\ tms (ts, t) ->
let (t', _) = prettifyTypeVarsTm t Map.empty
in ((ts, t') : tms))
[] terms
in (s, terms')
) libs
in (libs', lnks)
treeLevels :: [(String, String)] -> Map.Map Int [(String, String)]
treeLevels l =
let lk = foldr (\ (imp, t) l' ->
case lookup t l' of
Just (p, _) -> (imp, (p + 1, t)) : l'
Nothing -> (imp, (1, t)) : (t, (0, "")) : l') [] l
in foldl (\ m (imp, (p, t)) ->
let s = Map.findWithDefault [] p m
in Map.insert p ((imp, t) : s) m) Map.empty lk
makeNamedSentence :: String -> Term -> Named Sentence
makeNamedSentence n t = makeNamed n Sentence { term = t, proof = Nothing }
_insNodeDG :: Sign -> [Named Sentence] -> String
-> (DGraph, Map.Map String
(String, Data.Graph.Inductive.Graph.Node, DGNodeLab))
-> (DGraph, Map.Map String
(String, Data.Graph.Inductive.Graph.Node, DGNodeLab))
_insNodeDG sig sens n (dg, m) =
let gt = G_theory HolLight Nothing (makeExtSign HolLight sig) startSigId
(toThSens sens) startThId
n' = snd (System.FilePath.splitFileName n)
labelK = newInfoNodeLab
(makeName (simpleIdToIRI $ mkSimpleId n'))
(newNodeInfo DGEmpty)
gt
k = getNewNodeDG dg
m' = Map.insert n (n, k, labelK) m
insN = [InsertNode (k, labelK)]
newDG = changesDGH dg insN
labCh = [SetNodeLab labelK (k, labelK
{ globalTheory = computeLabelTheory Map.empty newDG
(k, labelK) })]
newDG1 = changesDGH newDG labCh in (newDG1, m')
anaHolLightFile :: HetcatsOpts -> FilePath -> IO (Maybe (LibName, LibEnv))
anaHolLightFile opts path = do
(libs_, lnks_) <- importData opts path
let (libs, lnks) = prettifyTypeVars (libs_, lnks_)
let h = treeLevels lnks
let fixLinks m l =
case l of
(l1 : l2 : l') ->
if snd l1 == snd l2 && sigDepends
(Map.findWithDefault emptySig (fst l1) m)
(Map.findWithDefault emptySig (fst l2) m) then
(fst l1, fst l2) : fixLinks m (l2 : l')
else l1 : l2 : fixLinks m l'
l' -> l'
let uniteSigs = foldl (\ m' (s, t) ->
case resultToMaybe
(sigUnion (Map.findWithDefault emptySig s m')
(Map.findWithDefault emptySig t m')) of
Nothing -> m'
Just new_tsig -> Map.insert t new_tsig m')
let m = foldl (\ m' (s, l) -> Map.insert s (calcSig l) m') Map.empty libs
let (m', lnks') = foldr (\ lvl (m'', lnks_loc) ->
let lvl' = Map.findWithDefault [] lvl h
lnks_next = fixLinks m'' (reverse lvl')
{- we'd probably need to take care of dependencies on previously
imported files not imported by the file imported last -}
in (uniteSigs m'' lnks_next, lnks_next ++ lnks_loc)
) (m, []) [0 .. (Map.size h - 1)]
let (dg', node_m) = foldr (\ (lname, lterms) (dg, node_m') ->
let sig = Map.findWithDefault emptySig lname m'
sens = map (uncurry makeNamedSentence) lterms in
_insNodeDG sig sens lname (dg, node_m')) (emptyDG, Map.empty) libs
dg'' = foldr (\ (source, target) dg ->
case Map.lookup source node_m of
Just (n, k, _) ->
case Map.lookup target node_m of
Just (n1, k1, _) ->
let sig = Map.findWithDefault emptySig n m'
sig1 = Map.findWithDefault emptySig n1 m'
in case resultToMaybe $
subsig_inclusion HolLight sig sig1 of
Nothing -> dg
Just incl ->
let inclM = gEmbed $ mkG_morphism HolLight incl
insE = [InsertEdge (k, k1, globDefLink inclM DGLinkImports)]
in changesDGH dg insE
Nothing -> dg
Nothing -> dg) dg' lnks'
le = Map.insert (emptyLibName
(System.FilePath.takeBaseName path))
dg'' Map.empty
return (Just (emptyLibName
(System.FilePath.takeBaseName path),
computeLibEnvTheories le))
|
gnn/Hets
|
HolLight/HolLight2DG.hs
|
gpl-2.0
| 15,052
| 3
| 28
| 4,289
| 5,677
| 2,890
| 2,787
| 383
| 9
|
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies #-}
module Rewriting.Apply where
import Rewriting.Derive.Instance
import Autolib.Reporter
import Autolib.ToDoc
class Apply tag system object action | tag -> system object action where
-- | default instance
example :: tag -> Instance system object
-- | apply one action to object
apply :: tag -> system -> object -> action
-> Reporter object
-- | list of all possible actions
actions :: tag -> system -> object
-> [ action ]
|
Erdwolf/autotool-bonn
|
src/Rewriting/Apply.hs
|
gpl-2.0
| 544
| 0
| 11
| 136
| 107
| 61
| 46
| 11
| 0
|
{- | Module : $Header$
- Description : Implementation of logic instance Graded Modal Logic
- Copyright : (c) Daniel Hausmann & Georgel Calin & Lutz Schroeder, DFKI Lab Bremen,
- Rob Myers & Dirk Pattinson, Department of Computing, ICL
- License : GPLv2 or higher, see LICENSE.txt
- Maintainer : hausmann@dfki.de
- Stability : provisional
- Portability : portable
-
- Provides the implementation of the matching functions of graded modal logic.
-}
module GMP.Logics.G where
import List
import Ratio
import Maybe
import Debug.Trace
import Text.ParserCombinators.Parsec
import GMP.Logics.Generic
import GMP.Parser
import GMP.Logics.IneqSolver
--------------------------------------------------------------------------------
-- instance of feature for graded modal logic
--------------------------------------------------------------------------------
data G a = G Int [Formula a] deriving (Eq,Show)
instance (SigFeature b c d, Eq (b (c d)), Eq (c d)) => NonEmptyFeature G b c d where
nefMatch flags seq = let poslits = keep_poslits seq
neglits = keep_neglits seq
-- take all combinations of positive and negative modal literals
all_combinations = [ (pos,neg) |
pos <- (powerList poslits), neg <- (powerList neglits)] \\ [([],[]) ]
multiplicities xs = map (\(Mod (G k _)) -> k) xs
strip_neg (Neg phi) = phi
side_condition_tuples (p,n) =
let switch l = map (\(x,y)->(y,map negate x)) l
in switch $ ineqSolver (Coeffs (map (1+) (multiplicities (map strip_neg n))) (multiplicities p))
(gml_bound ((multiplicities p),(multiplicities (map strip_neg n))))
gml_match (ps,ns) = map (gml_build_matches (ps,ns))
(gml_filter_tuples (side_condition_tuples (ps,ns)) [] )
in map gml_match all_combinations
nefPretty d = case d of
G i [] -> "[G]" ++ show i ++ "empty,mann"
G i e -> "[G]" ++ show i ++ (pretty (head e))
nefFeatureFromSignature sig = G 1
nefFeatureFromFormula phi = G 1
nefStripFeature (G i phis) = phis
nefDisj2Conj (Mod (G i phi)) = Mod (G i ([disj2conj (head phi)]))
nefNegNorm (Mod (G i phi)) = Mod (G i ([negNorm (head phi)]))
nefParser sig = do n <- natural
return $ G (fromInteger n)
--------------------------------------------------------------------------------
-- additional functions for the matching function of this logic
--------------------------------------------------------------------------------
gml_build_matches :: (SigFeature a b c, Eq (a (b c))) => ([Formula (G (a (b c)))],[Formula (G (a (b c)))]) -> ([Int],[Int]) -> [Sequent]
gml_build_matches (poslits,neglits) (prs,nrs) =
let (pos_inds,neg_inds) = (to_inds prs,to_inds nrs)
all_inds = [(pos,neg) | pos <- (powerList pos_inds), neg <- (powerList neg_inds)]
(sposlits,sneglits) = ([phi | Mod (G k [phi]) <- poslits],[phi | Neg (Mod (G k [phi])) <- neglits])
relevant_inds = filter (\(pos,neg) -> (sum $ imgInt pos prs) < (sum $ imgInt neg nrs)) all_inds
in [Sequent (map (\(ps,ns) -> (Neg (andify (((map nneg ((img (pos_inds \\ ps) sposlits) ++ (img (neg_inds \\ ns) sneglits))) ++ ((img ps sposlits) ++ (img ns sneglits)))))) ) relevant_inds)]
-- GML bound on integer magnitude
gml_bound :: ([Int],[Int]) -> Int
gml_bound (kps,kns) =
let n = (length kps) + (length kns)
logint k x = ceiling $ logBase 2 (k + x)
logsum ls k = sum $ map (\x -> logint k (fromIntegral(x))) ls
in 12*n*(1+n) + 6*n*((logsum kps 1) + (logsum kns 2))
gml_filter_tuples :: [([Int], [Int])] -> [([Int], [Int])] -> [([Int], [Int])]
gml_filter_tuples [] bs = bs
gml_filter_tuples (a:as) bs
| any (\x -> gml_geq x a) bs = gml_filter_tuples as bs
| otherwise = a:(filter (\x -> not (gml_leq x a)) bs)
gml_leq :: ([Int],[Int]) -> ([Int],[Int]) -> Bool
gml_leq (p1, n1) (p2, n2) = (and (( map (\(x, y) -> x <= y) (( zip p1 p2) ++ (zip n1 n2)))))
gml_geq :: ([Int],[Int]) -> ([Int],[Int]) -> Bool
gml_geq (p1, n1) (p2, n2) = (and (( map (\(x, y) -> x >= y) (( zip p1 p2) ++ (zip n1 n2)))))
--------------------------------------------------------------------------------
-- instance of sigFeature for graded modal logic
--------------------------------------------------------------------------------
instance (SigFeature b c d, Eq (c d), Eq (b (c d))) => NonEmptySigFeature G b c d where
neGoOn sig flag = genericPGoOn sig flag
|
nevrenato/Hets_Fork
|
GMP/versioning/gmp-coloss-0.0.3/GMP/Logics/G.hs
|
gpl-2.0
| 4,828
| 6
| 27
| 1,286
| 1,812
| 974
| 838
| -1
| -1
|
main = do
putStrLn "Your name bro?"
name <- getLine
print ("Hey dude " ++ name ++ ", you rock!")
|
ThePaul51/Paul-repo
|
hellodude.hs
|
gpl-3.0
| 100
| 0
| 10
| 23
| 36
| 16
| 20
| 4
| 1
|
{-
Copyright (C) 2017-2018 defanor <defanor@uberspace.net>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
{- |
Module : Pancake.Rendering
Maintainer : defanor <defanor@uberspace.net>
Stability : unstable
Portability : portable
Document rendering: conversion from 'Pandoc' to 'RendererOutput'.
-}
{-# LANGUAGE OverloadedStrings #-}
module Pancake.Rendering ( Denotation(..)
, Styled(..)
, StyledLine
, RendererOutput(..)
, rLinks
, rLines
, rIdentifiers
, rBlocks
, rNotes
, renderDoc
) where
import qualified Text.Pandoc as P
import Network.URI
import Data.List
import System.Console.Terminfo.Color
import Data.String
import Control.Monad.Writer
import Control.Monad.State
import System.FilePath
import Data.Char
import Numeric
import Pancake.Configuration
-- | The type of a list item that should be rendered next.
data Listing = Bulleted
| Ordered Int
deriving (Show, Eq)
-- | Denotations: information that can be ignored, but can also be
-- used to improve the UI.
data Denotation = Link URI
| Image URI
| Math String
| Heading Int
deriving (Show, Eq)
-- | A styled string.
data Styled = Plain String
| Underline Styled
| Bold Styled
| Emph Styled
| Strikethrough Styled
| Subscript Styled
| Superscript Styled
| Fg Color Styled
| Denote Denotation Styled
deriving (Show, Eq)
-- | Just for convenience.
instance IsString Styled where
fromString = Plain
-- | A line of styled elements.
type StyledLine = [Styled]
-- | Renderer state.
data RS = RS { indentationLevel :: Int
, linkCount :: Int
, noteCount :: Int
, lineNumber :: Int
, listing :: Maybe Listing
, columns :: Int
, noWrap :: Bool
, rsConf :: Config
} deriving (Show, Eq)
-- | This is what gets rendered.
data RendererOutput = RLink URI
-- ^ An URI reference.
| RNote [RendererOutput]
-- ^ A note.
| RLine StyledLine
-- ^ A line to render.
| RIdentifier String Int
-- ^ An identifier.
| RBlock Int Int
-- ^ A fixed block's position (start line and end
-- line). These blocks reflect semantics and don't
-- vary as the terminal width changes, so they are
-- safe to rely on for position retention.
deriving (Show, Eq)
-- | Show a reference.
showRef :: String -> Int -> String
showRef digits n = showIntAtBase (length digits) (digits !!) n ""
-- | Extracts links.
rLinks :: [RendererOutput] -> [URI]
rLinks [] = []
rLinks (RLink l:xs) = l : rLinks xs
rLinks (_:xs) = rLinks xs
-- | Extracts text lines.
rLines :: [RendererOutput] -> [StyledLine]
rLines [] = []
rLines (RLine l:xs) = l : rLines xs
rLines (_:xs) = rLines xs
-- | Extracts identifiers.
rIdentifiers :: [RendererOutput] -> [(String, Int)]
rIdentifiers [] = []
rIdentifiers (RIdentifier s i:xs) = (s, i) : rIdentifiers xs
rIdentifiers (_:xs) = rIdentifiers xs
-- | Extracts fixed block positions.
rBlocks :: [RendererOutput] -> [(Int, Int)]
rBlocks [] = []
rBlocks (RBlock s e:xs) = (s, e) : rBlocks xs
rBlocks (_:xs) = rBlocks xs
-- | Extracts notes.
rNotes :: [RendererOutput] -> [[RendererOutput]]
rNotes [] = []
rNotes (RNote l:xs) = l : rNotes xs
rNotes (_:xs) = rNotes xs
-- | Used to render 'Pandoc' docs by writing text lines and collected
-- links using 'Writer'.
type Renderer a = WriterT [RendererOutput] (State RS) a
-- | Runs a 'Renderer'.
runRenderer :: Int
-- ^ Column count (line width).
-> Bool
-- ^ Leave line wrapping to UI.
-> Int
-- ^ Link number to start with.
-> Int
-- ^ Note number to start with.
-> Int
-- ^ Line number to start with.
-> Config
-- ^ Configuration.
-> Renderer a
-- ^ A renderer.
-> [RendererOutput]
-- ^ Collected links and text lines.
runRenderer cols llw ls ns ln cnf r =
let o = snd $ evalState (runWriterT r)
RS { indentationLevel = 0
, linkCount = ls
, noteCount = ns
, lineNumber = ln
, listing = Nothing
, columns = cols
, noWrap = llw
, rsConf = cnf
}
in o ++ concatMap (map RLine . rLines) (rNotes o)
-- | Stores a link, increasing the counter.
storeLink :: URI -> Renderer Int
storeLink u = do
tell [RLink u]
st <- get
put (st { linkCount = linkCount st + 1 })
pure $ linkCount st
-- | Stores a note, increasing the counter.
storeNote :: [RendererOutput] -> Renderer Int
storeNote ro = do
st <- get
put $ st { noteCount = noteCount st + 1 }
mapM_ storeLink $ rLinks ro
let cnt = noteCount st
mark = Superscript . Fg Red . fromString $ "note " ++ show cnt
ro' = case ro of
(RLine l:rest) -> RLine (mark:l):rest
_ -> RLine [mark] : ro
tell [RNote ro']
pure cnt
-- | Stores text lines.
storeLines :: [StyledLine] -> Renderer ()
storeLines l = do
modify (\s -> s { lineNumber = lineNumber s + length l })
tell $ map RLine l
-- | Stores attributes (identifier and line number).
storeAttr :: P.Attr -> Renderer ()
storeAttr ("", _, _) = pure ()
storeAttr (i, _, _) = do
l <- get
tell [RIdentifier i (lineNumber l)]
-- | Increases indentation level, runs a renderer, decreases
-- indentation level.
withIndent :: Renderer a -> Renderer a
withIndent x = do
modify (\s -> s { indentationLevel = indentationLevel s + 1 })
r <- x
modify (\s -> s { indentationLevel = indentationLevel s - 1 })
pure r
-- | Reads indentation level, runs a renderer, restores the original
-- indentation level.
keepIndent :: Renderer a -> Renderer a
keepIndent r = do
st <- get
ret <- r
modify $ \s -> s { indentationLevel = indentationLevel st }
pure ret
-- | Renders indented (with the current indent level) lines.
indented :: [StyledLine] -> Renderer ()
indented slines = do
st <- get
-- The following blocks of the same list item should not be marked.
modify $ \s -> s { listing = Nothing }
let il = indentationLevel st
prefix = case listing st of
Nothing -> ""
(Just Bulleted) -> Fg Yellow "* "
(Just (Ordered n)) -> Fg Yellow $ fromString $ show n ++ ". "
prefixLen = length $ unstyled [prefix]
indent = il + prefixLen
fittedLines = if noWrap st
then slines
else fitLines (columns st - indent) slines
pad = (fromString (replicate indent ' ') :)
padFirst x = fromString (replicate il ' ') : prefix : x
-- The following blocks of the same list item should be indented
-- with the same level. This should be reset to the original value
-- where the listing type is getting set.
modify $ \s -> s { indentationLevel = indent }
case fittedLines of
[] -> pure ()
(l:ls) -> storeLines $ padFirst l : map pad ls
-- This may be unreliable, especially for resulting length estimation,
-- but usually works. Maybe improve someday.
-- | Returns a string as it would be shown on a dumb terminal.
unstyled :: StyledLine -> String
unstyled = concatMap unstyled'
where
unstyled' (Plain s) = s
unstyled' (Underline s) = unstyled' s
unstyled' (Bold s) = unstyled' s
unstyled' (Emph s) = unstyled' s
unstyled' (Strikethrough s) = unstyled' s
unstyled' (Subscript s) = unstyled' s
unstyled' (Superscript s) = unstyled' s
unstyled' (Fg _ s) = unstyled' s
unstyled' (Denote _ s) = unstyled' s
-- | Fits words into terminal lines of a given width.
fitLines :: Int
-- ^ Line width.
-> [[Styled]]
-- ^ Strings: usually words and similar short elements.
-> [StyledLine]
-- ^ Fitted lines.
fitLines 0 _ = []
fitLines maxLen inlineBits = concatMap (map reverse . fitWords') inlineBits
where
splitStyled :: Styled -> [Styled]
splitStyled (Plain s)
| length s > maxLen =
case reverse (takeWhile (<= maxLen) (findIndices isSpace s)) of
(n:_) -> let (t, _:d) = splitAt n s
in Plain t : splitStyled (Plain d)
[] -> let (t, d) = splitAt maxLen s
in Plain t : splitStyled (Plain d)
| otherwise = [Plain s]
splitStyled (Underline s) = map Underline $ splitStyled s
splitStyled (Bold s) = map Bold $ splitStyled s
splitStyled (Emph s) = map Emph $ splitStyled s
splitStyled (Strikethrough s) = map Strikethrough $ splitStyled s
splitStyled (Subscript s) = map Subscript $ splitStyled s
splitStyled (Superscript s) = map Superscript $ splitStyled s
splitStyled (Fg c s) = map (Fg c) $ splitStyled s
splitStyled (Denote d s) = map (Denote d) $ splitStyled s
fitWords' :: [Styled] -> [StyledLine]
fitWords' ws
-- handle empty lines
| null (unstyled ws) = [[]]
| otherwise = fitWords [] 0 ws
fitWords :: [Styled] -> Int -> [Styled] -> [StyledLine]
fitWords curLine curLen (w:ws)
-- handle newline characters
| unstyled [w] == "\n" = curLine : fitWords [] 0 ws
-- a new line
| curLen == 0 = if length (unstyled [w]) <= maxLen
then fitWords [w] (length $ unstyled [w]) ws
else map pure (splitStyled w) ++ fitWords [] 0 ws
-- add a word to a line
| otherwise = let wLen = length (unstyled [w])
spaceAhead = case ws of
(" " : _) -> True
_ -> False
in if curLen + wLen <= maxLen
then fitWords (w:curLine) (curLen + wLen) $
-- if there's an unnecessary space ahead, skip it
if curLen + wLen + 1 > maxLen && spaceAhead
then tail ws
else ws
else curLine : fitWords [] 0 (w:ws)
-- end, no words pending
fitWords _ 0 [] = []
-- end, with words pending
fitWords curLine _ [] = [curLine]
-- | A helper function to put inline elements between two strings
-- (such as parens or quotes).
wrappedInlines :: Styled
-- ^ String on the left.
-> Styled
-- ^ String on the right.
-> [P.Inline]
-- ^ Inlines to wrap.
-> Renderer [Styled]
-- ^ Resulting inlines.
wrappedInlines s e r = do
r' <- concat <$> mapM readInline r
pure $ s : r' ++ [e]
-- | Reads an inline element, producing styled strings. Doesn't render
-- them (i.e., using 'Writer') on its own, but collects links.
readInline :: P.Inline -> Renderer [Styled]
readInline (P.Str s)
| all isSpace s = pure []
| otherwise = pure [fromString s]
readInline (P.Emph s) = concatMap (map Emph) <$> mapM readInline s
readInline (P.Strong s) = concatMap (map Bold) <$> mapM readInline s
readInline (P.Strikeout s) = map Strikethrough <$> wrappedInlines "-" "-" s
readInline (P.Superscript s) = map Superscript <$> wrappedInlines "^{" "}" s
readInline (P.Subscript s) = map Subscript <$> wrappedInlines "_{" "}" s
readInline (P.SmallCaps s) = wrappedInlines "\\sc{" "}" s
readInline (P.Quoted P.SingleQuote s) = wrappedInlines "‘" "’" s
readInline (P.Quoted P.DoubleQuote s) = wrappedInlines "“" "”" s
readInline (P.Cite _ s) = concat <$> mapM readInline s
readInline (P.Code attr s) = do
storeAttr attr
pure . map (Fg Green . fromString) $ intersperse "\n" $ lines s
readInline P.Space = pure [" "]
readInline P.SoftBreak = pure [" "]
readInline P.LineBreak = pure ["\n"]
readInline (P.Math _ s) = pure [Denote (Math s) $ fromString s]
readInline (P.RawInline _ s) = pure [fromString s]
readInline (P.Link attr alt (url, title)) = do
storeAttr attr
case parseURIReference url of
Just uri -> do
a <- mapM readInline alt
let t = case (title, concat a) of
("", []) -> [fromString url]
("", alt') -> alt'
(title', []) -> [fromString title']
(_, alt') -> alt'
cnt <- storeLink uri
let color = case uri of
(URI "" Nothing "" "" ('#':_)) -> Magenta
_ -> Cyan
st <- get
pure $ map (Denote (Link uri) . Fg color) t ++
[Fg Blue $ fromString
(concat ["[", showRef (referenceDigits $ rsConf st) cnt, "]"])]
Nothing -> pure [fromString title]
readInline (P.Image attr alt (url, title)) = do
storeAttr attr
case parseURIReference url of
Nothing -> pure [Fg Red "i", fromString title]
Just uri -> do
a <- mapM readInline alt
let t = case (title, concat a) of
("", []) -> [fromString $ takeFileName $ uriPath uri]
("", alt') -> alt'
(title', []) -> [fromString title']
(_, alt') -> alt'
cnt <- storeLink uri
st <- get
pure $ Denote (Image uri) (Fg Red "i") :
map (Denote (Link uri) . Fg Cyan) t ++
[Fg Blue $ fromString
(concat ["[", showRef (referenceDigits $ rsConf st) cnt, "]"])]
readInline (P.Note bs) = do
-- Minor issues are quite likely with this.
st <- get
-- 12 is somewhat arbitrary, but narrowing the rendered notes so
-- that "^{note xxx}" could be added without overflow.
let ro = runRenderer (columns st - 12) (noWrap st) (linkCount st) (noteCount st) 0
(rsConf st) (renderBlocks bs)
cnt <- storeNote ro
pure [Superscript . Fg Red . fromString $ "[" ++ show cnt ++ "]"]
readInline (P.Span attr i) = do
storeAttr attr
concat <$> mapM readInline i
-- | Reads lines of inline elements.
readInlines :: [P.Inline] -> Renderer [StyledLine]
readInlines i = do
inlineBits <- concat <$> mapM readInline i
pure $ styledLines inlineBits
-- | Like 'readLines', but renders the lines at once, one-by-one,
-- making sure that identifier positions are correct.
renderInlines :: [P.Inline] -> Renderer ()
renderInlines i = do
inlineBits <- concat <$> mapM readInline i
mapM_ (\l -> fixed $ indented [l]) $ styledLines inlineBits
-- | Splits a list of styled elements into styled lines, similar to
-- 'lines'.
styledLines :: [Styled] -> [StyledLine]
styledLines = styledLines' []
where
styledLines' cur [] = [cur]
styledLines' cur (x:xs)
| unstyled [x] == "\n" = cur : styledLines' [] xs
| otherwise = styledLines' (cur ++ [x]) xs
-- | Renders a block element.
renderBlock :: P.Block -> Renderer ()
renderBlock (P.Plain i) = renderInlines i
renderBlock (P.Para i) = renderInlines i
renderBlock (P.LineBlock i) = mapM_ renderInlines i
renderBlock (P.CodeBlock attr s) = do
storeAttr attr
mapM_ (fixed . indented . pure . pure . Fg Green . fromString)
(lines s)
renderBlock (P.RawBlock _ s) =
indented $ map (pure . fromString) $ lines s
renderBlock (P.BlockQuote bs) = withIndent $ renderBlocks bs
renderBlock (P.OrderedList _ bs) = do
st <- get
zipWithM_ (\b n -> modify (\s -> s { listing = Just (Ordered n)
, noWrap = False })
>> fixed (keepIndent (renderBlocks b)))
bs [1..]
modify $ \s -> s { listing = Nothing, noWrap = noWrap st }
renderBlock (P.BulletList bs) = do
st <- get
mapM_ (\b -> modify (\s -> s { listing = Just Bulleted
, noWrap = False })
>> fixed (keepIndent (renderBlocks b)))
bs
modify $ \s -> s { listing = Nothing, noWrap = noWrap st }
renderBlock (P.DefinitionList dl) =
let renderDefinition (term, definition) = do
indented =<< map (map (Fg Yellow)) <$> readInlines term
withIndent $ mapM_ renderBlocks definition
in mapM_ (fixed . renderDefinition) dl
renderBlock (P.Header level attr i) = do
storeAttr attr
indented =<< map (map (Denote (Heading level) . Bold . Fg Green)
. ([fromString (replicate level '#'), " "] ++))
<$> readInlines i
renderBlock P.HorizontalRule = do
st <- get
indented [[Fg Black $
fromString $ replicate (columns st - indentationLevel st * 2) '-']]
renderBlock (P.Table caption aligns widths headers rows) = do
indented =<< readInlines caption
-- Use pandoc-provided widths if they are set, calculate them
-- otherwise.
let widthsAreSet = case widths of
[] -> False
w -> minimum w /= maximum w
ws <- if widthsAreSet then pure widths else do
lens <- map sum . transpose <$> mapM
(mapM (fmap (length . unstyled . concat . rLines) . renderCell 80)) rows
pure $ map (\l -> if sum lens == 0
then 0
else fromIntegral l / fromIntegral (sum lens) * 0.7
+ 1 / fromIntegral (length lens) * 0.3) lens
let withHead = if all null headers then id else (headers :)
mapM_ (\r -> fixed (renderBlock P.HorizontalRule) >> fixed (tableRow ws r))
(withHead rows)
fixed $ renderBlock P.HorizontalRule
where
renderCell :: Int -> [P.Block] -> Renderer [RendererOutput]
renderCell w blocks = do
st <- get
pure $ runRenderer w False (linkCount st) (noteCount st)
(lineNumber st) (rsConf st) $ renderBlocks blocks
tableCell :: (P.Alignment, Int, [P.Block]) -> Renderer [StyledLine]
tableCell (a, w, blocks) = do
l <- renderCell w blocks
mapM_ storeLink $ rLinks l
modify (\s -> s { noteCount = noteCount s + length (rNotes l) })
tell $ map (uncurry RIdentifier) $ rIdentifiers l
pure $ map (padCell a w) $ rLines l
padCell :: P.Alignment -> Int -> StyledLine -> StyledLine
padCell a w x =
let pLen = w - length (unstyled x)
halfLen :: Rational
halfLen = fromIntegral pLen / 2
(lPad, rPad) = case a of
P.AlignRight -> (pLen, 0)
P.AlignCenter -> ( ceiling halfLen, floor halfLen )
_ -> (0, pLen)
mkPadding l = [fromString (replicate l ' ')]
in concat [mkPadding lPad, x, mkPadding rPad]
tableRow :: [Double] -> [[P.Block]] -> Renderer ()
tableRow ws cols = do
st <- get
let maxWidth = columns st - indentationLevel st - ((length cols - 1) * 3)
widths' = map (\w -> floor (fromIntegral maxWidth * w)) ws
cells <- mapM tableCell $ zip3 aligns widths' cols
let maxLines = foldr (max . length) 0 cells
padded = zipWith (\w c -> c ++ replicate (maxLines - length c)
[fromString $ replicate w ' ']) widths' cells
indented $ map (mconcat . intersperse (pure $ Fg Black " | "))
$ transpose padded
renderBlock (P.Div attr b) = do
storeAttr attr
st <- get
let i = if indentDivs $ rsConf st
then withIndent
else id
i $ renderBlocks b
renderBlock P.Null = pure ()
-- | Checks whether a block is a list (ordered-, bullet-, or
-- definition-).
isList :: P.Block -> Bool
isList P.OrderedList {} = True
isList P.BulletList {} = True
isList P.DefinitionList {} = True
isList _ = False
-- | Determines whether a line should be skipped before a block.
skipBefore :: P.Block -> Bool
skipBefore P.Header {} = True
skipBefore P.Para {} = True
skipBefore P.BlockQuote {} = True
skipBefore (P.Div _ (b:_)) = skipBefore b
skipBefore _ = False
-- | Determines whether a line should be skipped after a block.
skipAfter :: P.Block -> Bool
skipAfter P.Header {} = True
skipAfter P.Para {} = True
skipAfter P.BlockQuote {} = True
skipAfter (P.Div _ bs@(_:_)) = skipAfter $ last bs
skipAfter b = isList b
-- | Stores an element position for fixed elements.
fixed :: Renderer a -> Renderer a
fixed r = do
st <- get
ret <- r
st' <- get
tell [RBlock (lineNumber st) (lineNumber st')]
pure ret
-- | Renders block elements with empty lines between some of them.
renderBlocks :: [P.Block] -> Renderer ()
renderBlocks [] = pure ()
renderBlocks [b] = fixed $ renderBlock b
renderBlocks (b1:bs@(b2:_)) = do
fixed $ renderBlock b1
when (skipAfter b1 || skipBefore b2) $
fixed $ storeLines [[]]
renderBlocks bs
-- | Renders a document.
renderDoc :: Int
-- ^ Number of columns.
-> Bool
-- ^ Leave line wrapping to UI.
-> Config
-- ^ Configuration.
-> P.Pandoc
-- ^ Document to render.
-> [RendererOutput]
-- ^ Rendered document.
renderDoc cols llw cnf (P.Pandoc _ blocks) =
runRenderer cols llw 0 0 1 cnf $ renderBlocks blocks
|
defanor/pancake
|
Pancake/Rendering.hs
|
gpl-3.0
| 21,441
| 0
| 23
| 6,342
| 6,833
| 3,459
| 3,374
| 437
| 16
|
{-# LANGUAGE OverloadedStrings, DeriveGeneric #-}
module System.DevUtils.MySQL.Helpers.Tables.JSON (
) where
import System.DevUtils.MySQL.Helpers.Tables.Include (Tables(..))
import Data.Aeson (FromJSON, ToJSON, decode, encode)
instance FromJSON Tables
instance ToJSON Tables
|
adarqui/DevUtils-MySQL
|
src/System/DevUtils/MySQL/Helpers/Tables/JSON.hs
|
gpl-3.0
| 277
| 0
| 6
| 27
| 65
| 42
| 23
| 6
| 0
|
module Calculusbot.Simplify where
import Calculusbot.LanguageDef
simplify :: CBExpr -> CBExpr
simplify (BinExpr Plus (Const (IntLit x)) (Const (IntLit y))) = (Const (IntLit (x + y)))
simplify (BinExpr Plus l r)
| sr == (Const (IntLit 0)) = sl
| sl == (Const (IntLit 0)) = sr
| otherwise = (BinExpr Plus sl sr)
where
sr = simplify r
sl = simplify l
simplify (BinExpr Minus (Const (IntLit x)) (Const (IntLit y))) = (Const (IntLit (x - y)))
simplify (BinExpr Minus l r)
| sr == (Const (IntLit 0)) = sl
| sl == (Const (IntLit 0)) = (UnExpr Neg sr)
| otherwise = (BinExpr Minus sl sr)
where
sr = simplify r
sl = simplify l
simplify (BinExpr Times (Const (IntLit x)) (Const (IntLit y))) = (Const (IntLit (x * y)))
simplify (BinExpr Times l r)
| sr == (Const (IntLit 0)) = (Const (IntLit 0))
| sl == (Const (IntLit 0)) = (Const (IntLit 0))
| sr == (Const (IntLit 1)) = sl
| sl == (Const (IntLit 1)) = sr
| otherwise = (BinExpr Times sl sr)
where
sr = simplify r
sl = simplify l
-- simplify (BinExpr Divide (Const (IntLit x)) (Const (IntLit y)) = (Const (IntLit (x / y)))
simplify (BinExpr Divide l r)
| sr == (Const (IntLit 0)) = error "Divide by 0 error"
| sl == (Const (IntLit 0)) = (Const (IntLit 0))
| sr == (Const (IntLit 1)) = sl
| otherwise = (BinExpr Divide sl sr)
where
sr = simplify r
sl = simplify l
simplify (BinExpr Power b e)
| se == (Const (IntLit 0)) = (Const (IntLit 1))
| sb == (Const (IntLit 0)) = (Const (IntLit 0))
| sb == (Const (IntLit 1)) = sb
| se == (Const (IntLit 1)) = sb
| otherwise = (BinExpr Power sb se)
where
sb = simplify b
se = simplify e
simplify (UnExpr Log (Const E)) = (Const (IntLit 1))
simplify e = e
|
BenedictEggers/calculusbot
|
src/Calculusbot/Simplify.hs
|
gpl-3.0
| 1,998
| 0
| 12
| 688
| 952
| 467
| 485
| 43
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.TargetTCPProxies.Insert
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates a TargetTcpProxy resource in the specified project using the
-- data included in the request.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.targetTcpProxies.insert@.
module Network.Google.Resource.Compute.TargetTCPProxies.Insert
(
-- * REST Resource
TargetTCPProxiesInsertResource
-- * Creating a Request
, targetTCPProxiesInsert
, TargetTCPProxiesInsert
-- * Request Lenses
, ttpiRequestId
, ttpiProject
, ttpiPayload
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.targetTcpProxies.insert@ method which the
-- 'TargetTCPProxiesInsert' request conforms to.
type TargetTCPProxiesInsertResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"global" :>
"targetTcpProxies" :>
QueryParam "requestId" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] TargetTCPProxy :>
Post '[JSON] Operation
-- | Creates a TargetTcpProxy resource in the specified project using the
-- data included in the request.
--
-- /See:/ 'targetTCPProxiesInsert' smart constructor.
data TargetTCPProxiesInsert =
TargetTCPProxiesInsert'
{ _ttpiRequestId :: !(Maybe Text)
, _ttpiProject :: !Text
, _ttpiPayload :: !TargetTCPProxy
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'TargetTCPProxiesInsert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ttpiRequestId'
--
-- * 'ttpiProject'
--
-- * 'ttpiPayload'
targetTCPProxiesInsert
:: Text -- ^ 'ttpiProject'
-> TargetTCPProxy -- ^ 'ttpiPayload'
-> TargetTCPProxiesInsert
targetTCPProxiesInsert pTtpiProject_ pTtpiPayload_ =
TargetTCPProxiesInsert'
{ _ttpiRequestId = Nothing
, _ttpiProject = pTtpiProject_
, _ttpiPayload = pTtpiPayload_
}
-- | An optional request ID to identify requests. Specify a unique request ID
-- so that if you must retry your request, the server will know to ignore
-- the request if it has already been completed. For example, consider a
-- situation where you make an initial request and the request times out.
-- If you make the request again with the same request ID, the server can
-- check if original operation with the same request ID was received, and
-- if so, will ignore the second request. This prevents clients from
-- accidentally creating duplicate commitments. The request ID must be a
-- valid UUID with the exception that zero UUID is not supported
-- (00000000-0000-0000-0000-000000000000).
ttpiRequestId :: Lens' TargetTCPProxiesInsert (Maybe Text)
ttpiRequestId
= lens _ttpiRequestId
(\ s a -> s{_ttpiRequestId = a})
-- | Project ID for this request.
ttpiProject :: Lens' TargetTCPProxiesInsert Text
ttpiProject
= lens _ttpiProject (\ s a -> s{_ttpiProject = a})
-- | Multipart request metadata.
ttpiPayload :: Lens' TargetTCPProxiesInsert TargetTCPProxy
ttpiPayload
= lens _ttpiPayload (\ s a -> s{_ttpiPayload = a})
instance GoogleRequest TargetTCPProxiesInsert where
type Rs TargetTCPProxiesInsert = Operation
type Scopes TargetTCPProxiesInsert =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient TargetTCPProxiesInsert'{..}
= go _ttpiProject _ttpiRequestId (Just AltJSON)
_ttpiPayload
computeService
where go
= buildClient
(Proxy :: Proxy TargetTCPProxiesInsertResource)
mempty
|
brendanhay/gogol
|
gogol-compute/gen/Network/Google/Resource/Compute/TargetTCPProxies/Insert.hs
|
mpl-2.0
| 4,579
| 0
| 16
| 1,006
| 484
| 292
| 192
| 77
| 1
|
module Codewars.Kata.SumString where
data Token = TChar Char
| TInt Int
| TStr String
deriving (Eq, Show)
--
alpha, digit :: String
alpha = ['a'..'z'] ++ ['A'..'Z']
digit = ['0'..'9']
tokenize :: String -> [Token]
tokenize [] = []
tokenize xxs@(c:cs)
| c `elem` "-+*/()[]" = TChar c : tokenize cs
| not (null i) = TInt (read i) : tokenize is
| not (null s) = TStr s : tokenize ss
| otherwise = tokenize cs
where
(i, is) = span (`elem` digit) xxs
(s, ss) = span (`elem` alpha) xxs
--
sumFromString :: String -> Integer
sumFromString n = sum $ f <$> tokenize n
where f = \x -> case x of
(TInt i) -> toInteger i
_ -> toInteger 0
--
|
ice1000/OI-codes
|
codewars/1-100/sum-up-the-random-string.hs
|
agpl-3.0
| 733
| 0
| 12
| 229
| 330
| 175
| 155
| 22
| 2
|
-- |
-- Copyright : (c) 2012 Daniël de Kok
-- License : BSD3
--
-- Maintainer : Daniël de Kok <me@danieldk.eu>
-- Stability : experimental
--
-- The more tests that are applied on a dataset, the more likely it becomes
-- that a Type I error occurs (rejecting the null hypothesis when it is
-- true). This module provides significance level corrections for repeated
-- tests.
--
-- For more information, see:
--
-- * /Bonferroni and Šidák corrections for multiple comparisons, H. Abdi, 2007, in: N.J. Salkind (ed.), Encyclopedia of Measurement and Statistics, Thousand Oaks, CA: Sage/
module Statistics.Test.Correction (
sidak,
bonferroni
) where
-- |
-- Bonferroni correction: α* = α / n, where α is the intended
-- significance level over all tests, n the number of tests, and α* the
-- corrected significance level.
bonferroni :: (Fractional f, Integral i) =>
f -- ^ Significance level
-> i -- ^ Number of comparisons
-> f -- ^ The corrected significance level
bonferroni level n =
level / fromIntegral n
-- |
-- Sidak correction: α* = 1 - (1 - α)^(1 / n), where α is the intended
-- significance level over all tests, n the number of tests, and α* the
-- corrected significance level.
sidak :: (Floating f, Integral i) =>
f -- ^ Significance level
-> i -- ^ Number of comparisons
-> f -- ^ The corrected significance level
sidak level n =
1 - (1 - level) ** (1 / (fromIntegral n))
|
danieldk/approx-rand-test
|
src/Statistics/Test/Correction.hs
|
apache-2.0
| 1,431
| 0
| 9
| 288
| 155
| 97
| 58
| 15
| 1
|
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE BangPatterns #-}
module Spark.Core.RowToSQLSpec where
import qualified Data.Vector as V
import GHC.Generics (Generic)
import Test.Hspec
import Spark.Core.Types
import Spark.Core.Row
data TestStruct1 = TestStruct1 {
ts1f1 :: Int,
ts1f2 :: Maybe Int } deriving (Show, Eq, Generic, ToSQL, FromSQL)
data TestStruct2 = TestStruct2 { ts2f1 :: [Int] } deriving (Show, Generic, SQLTypeable)
data TestStruct3 = TestStruct3 { ts3f1 :: Int } deriving (Show, Eq, Generic, ToSQL, FromSQL)
data TestStruct4 = TestStruct4 { ts4f1 :: TestStruct3 } deriving (Show, Eq, Generic, ToSQL, FromSQL)
data TestStruct5 = TestStruct5 {
ts5f1 :: Int,
ts5f2 :: Int,
ts5f3 :: TestStruct3
} deriving (Show, Eq, Generic, ToSQL, FromSQL)
newtype TestT1 = TestT1 { unTestT1 :: Int } deriving (Eq, Show, Generic, ToSQL, FromSQL)
v2c :: (Show a, ToSQL a, FromSQL a, Eq a) => a -> Cell -> IO ()
v2c !x !y = do
_ <- shouldBe (valueToCell x) y
_ <- shouldBe (cellToValue y) (Right x)
return ()
spec :: Spec
spec = do
describe "Simple type tests" $ do
it "int" $
v2c (3 :: Int) (IntElement 3)
it "int?" $
v2c (Just 3 :: Maybe Int) (IntElement 3)
it "int? 2" $
v2c (Nothing :: Maybe Int) Empty
it "TestStruct3" $
v2c (TestStruct3 2) (rowCell [IntElement 2])
it "TestStruct4" $
v2c (TestStruct4 (TestStruct3 3)) $
(rowCell [rowCell [IntElement 3]])
it "TestStruct1 - empty" $
v2c (TestStruct1 2 Nothing) (rowCell [IntElement 2, Empty])
it "TestStruct1 - full" $
v2c (TestStruct1 2 (Just 4)) (rowCell [IntElement 2, IntElement 4])
it "TestStruct5" $
v2c (TestStruct5 1 2 (TestStruct3 3)) $
(rowCell [
IntElement 1,
IntElement 2,
rowCell[IntElement 3]
])
-- describe "Simple type tests" $ do
-- it "newtype" $
-- v2c (TestT1 3) (IntElement 3)
|
tjhunter/karps
|
haskell/test/Spark/Core/RowToSQLSpec.hs
|
apache-2.0
| 1,994
| 0
| 16
| 478
| 730
| 378
| 352
| 51
| 1
|
module FreePalace.GUI.Initialize where
import FreePalace.Domain.GUI
import qualified FreePalace.Domain.Net as Net
initializeGUI :: Components -> (Net.Hostname -> Net.PortId -> IO ()) -> IO ()
initializeGUI guiComponents handler =
do
setUpMainWindow guiComponents
setUpConnectDialog guiComponents handler
showConnectDialog guiComponents
setUpMainWindow :: Components -> IO ()
setUpMainWindow guiComponents =
do
let mainWin = mainWindow guiComponents
quitAction = quit mainWin
onMainWindowClose mainWin quitAction
setUpConnectDialog :: Components -> (Net.Hostname -> Net.PortId -> IO ()) -> IO ()
setUpConnectDialog guiComponents connectHandler =
do
let okButton = connectOk guiComponents
okHandler = do
host <- textValue $ connectHostEntry guiComponents
port <- textValue $ connectPortEntry guiComponents
connectHandler host port
onButtonClick okButton okHandler
let cancelButton = connectCancel guiComponents
cancelHandler = closeDialog $ connectDialog guiComponents
onButtonClick cancelButton cancelHandler
showConnectDialog :: Components -> IO ()
showConnectDialog guiComponents = showDialog $ connectDialog guiComponents
|
psfblair/freepalace
|
src/FreePalace/GUI/Initialize.hs
|
apache-2.0
| 1,240
| 0
| 14
| 244
| 310
| 148
| 162
| 29
| 1
|
-----------------------------------------------------------------------------
-- Copyright 2012 Microsoft Corporation.
--
-- This is free software; you can redistribute it and/or modify it under the
-- terms of the Apache License, Version 2.0. A copy of the License can be
-- found in the file "license.txt" at the root of this distribution.
-----------------------------------------------------------------------------
module Backend.JavaScript.FromCore
( javascriptFromCore )
where
import Data.List ( intersperse )
import Data.Char
-- import Data.Maybe
-- import Data.Monoid ( mappend )
import qualified Data.Set as S
-- import Kind.Kind
import Type.Type
-- import Type.TypeVar
-- import Type.Kind( getKind )
-- import Type.Assumption( getArity )
import qualified Type.Pretty as Pretty
import Lib.PPrint
-- import qualified Lib.PPrint
import Common.Name
-- import Common.Range
import Common.NamePrim
import Common.Failure
import Common.Unique
import Common.Syntax
import Core.Core
import Core.Pretty ()
type CommentDoc = Doc
type ConditionDoc = Doc
debug :: Bool
debug = False
externalNames :: [(TName, Doc)]
externalNames
= [ (conName exprTrue, text "true")
, (conName exprFalse, text "false")
, (TName nameOptionalNone typeOptional, text "undefined") -- ugly but has real performance benefit
]
--------------------------------------------------------------------------
-- Generate JavaScript code from System-F core language
--------------------------------------------------------------------------
javascriptFromCore :: Maybe (Name) -> Core -> Doc
javascriptFromCore mbMain core
= runAsm (Env moduleName penv externalNames) (genModule mbMain core)
where
moduleName = coreProgName core
penv = Pretty.defaultEnv{ Pretty.context = moduleName, Pretty.fullNames = False }
genModule :: Maybe (Name) -> Core -> Asm Doc
genModule mbMain core
= do let externs = vcat (concatMap includeExternal (coreProgExternals core))
decls1 <- genTypeDefs (coreProgTypeDefs core)
decls2 <- genGroups (coreProgDefs core) -- (removeTypeLamApp $ coreProgDefs core)
let imports = map importName (coreProgImports core)
mainEntry = case mbMain of
Nothing -> empty
Just (name) -> text " " <$> text "// koka main entry:" <$>
ppName (unqualify name) <> text "();"
return $ text "// koka generated module: " <> string (showName (coreProgName core))
<$> text "if (typeof define !== 'function') { var define = require('amdefine')(module) }"
<$> text "define(" <> ( -- (squotes $ ppModFileName $ coreProgName core) <> comma <$>
list ( {- (squotes $ text "_external"): -} (map squotes (map fst externalImports) ++ map moduleImport (coreProgImports core))) <> comma <+>
text "function" <> tupled ( {- (text "_external"): -} (map snd externalImports ++ map ppModName imports)) <+> text "{" <$>
vcat (
[ text "\"use strict\";"
, text " "
, text "// koka declarations:"
, externs
, decls1
, decls2
, mainEntry
, text " "
, text "// koka exports:"
, text "return" <+> encloseSep (text "{ ")
(text " }")
(text ", ")
(map
(\n-> squotes (ppName n) <> text ":" <+> ppName n)
( exportedConstrs ++ exportedValues )
)
<> semi
])
)
<$> text "});"
where
exportedValues = let f (DefRec xs) = map defName xs
f (DefNonRec x) = [defName x]
in map unqualify $ concatMap f (coreProgDefs core)
exportedConstrs = let f (Synonym _ _) = []
f (Data info _ vs) = let xs = zip vs $ map conInfoName (dataInfoConstrs info)
in map snd $ filter (\(v,_)-> v == Public) xs
u (TypeDefGroup xs) = xs
in map unqualify $ concatMap f $ concatMap u (coreProgTypeDefs core)
externalImports :: [(Doc,Doc)]
externalImports
= concatMap importExternal (coreProgExternals core)
moduleImport :: Import -> Doc
moduleImport imp
= squotes (text (if null (importPackage imp) then "." else importPackage imp) <> text "/" <> text (moduleNameToPath (importName imp)))
includeExternal :: External -> [Doc]
includeExternal (ExternalInclude includes range)
= let content = case lookup JS includes of
Just s -> s
Nothing -> case lookup Default includes of
Just s -> s
Nothing -> failure ("javascript backend does not support external inline at " ++ show range)
in [align $ vcat $! map text (lines content)]
includeExternal _ = []
importExternal :: External -> [(Doc,Doc)]
importExternal (ExternalImport imports range)
= let (nm,s) = case lookup JS imports of
Just s -> s
Nothing -> case lookup Default imports of
Just s -> s
Nothing -> failure ("javascript backend does not support external import at " ++ show range)
in [(text s,pretty nm)]
importExternal _
= []
---------------------------------------------------------------------------------
-- Generate javascript statements for value definitions
---------------------------------------------------------------------------------
genGroups :: [DefGroup] -> Asm Doc
genGroups groups
= do docs <- mapM genGroup groups
return (vcat docs)
genGroup :: DefGroup -> Asm Doc
genGroup group
= localUnique $
case group of
DefRec defs -> do docs <- mapM genDef defs
return (vcat docs)
DefNonRec def -> genDef def
genDef :: Def -> Asm Doc
genDef def@(Def name tp expr vis sort rng comm)
= do penv <- getPrettyEnv
let resDoc = typeComment (Pretty.ppType penv tp)
defDoc <- do mdoc <- tryFunDef name resDoc expr
case mdoc of
Just doc -> return doc
Nothing -> genStat (ResultAssign name Nothing) expr
return $ vcat [ if null comm
then empty
else align (vcat (space : map text (lines (trim comm)))) {- already a valid javascript comment -}
, defDoc
]
where
-- remove final newlines and whitespace
trim s = reverse (dropWhile (`elem` " \n\r\t") (reverse s))
tryFunDef :: Name -> CommentDoc -> Expr -> Asm (Maybe Doc)
tryFunDef name comment expr
= case expr of
TypeApp e _ -> tryFunDef name comment e
TypeLam _ e -> tryFunDef name comment e
Lam args body -> fmap Just $ genFunDef' name args comment body
_ -> return Nothing
where
genFunDef' :: Name -> [TName] -> CommentDoc -> Expr -> Asm Doc
genFunDef' name params comm body
= do let args = map ( ppName . getName ) params
bodyDoc <- genStat (ResultReturn (Just name) params) body
return $ text "function" <+> ppName (unqualify name)
<> tupled args
<+> comm
<+> ( if body `isTailCalling` name
then tcoBlock bodyDoc
else debugComment ("genFunDef: no tail calls to " ++ showName name ++ " found")
<> block bodyDoc
)
---------------------------------------------------------------------------------
-- Generate value constructors for each defined type
---------------------------------------------------------------------------------
genTypeDefs :: TypeDefGroups -> Asm Doc
genTypeDefs groups
= do docs <- mapM (genTypeDefGroup) groups
return (vcat docs)
genTypeDefGroup :: TypeDefGroup -> Asm Doc
genTypeDefGroup (TypeDefGroup tds)
= do docs <- mapM (genTypeDef ) tds
return (vcat docs)
genTypeDef :: TypeDef -> Asm Doc
genTypeDef (Synonym {})
= return empty
genTypeDef (Data info _ _)
= do let (dataRepr, conReprs) = getDataRepr (-1) {- maxStructFields -} info
docs <- mapM ( \(c,repr) -> do let args = map ppName (map fst (conInfoParams c))
name <- genName (conInfoName c)
penv <- getPrettyEnv
if (conInfoName c == nameTrue)
then return (text "var" <+> name <+> text "=" <+> text "true" <> semi)
else if (conInfoName c == nameFalse)
then return (text "var" <+> name <+> text "=" <+> text "false" <> semi)
else return $ case repr of
ConEnum{}
-> text "var" <+> name <+> text "=" <+> int (conTag repr) <> semi <+> comment (Pretty.ppType penv (conInfoType c))
ConSingleton{}
-> text "var" <+> name <+> text "=" <+>
text (if conInfoName c == nameOptionalNone then "undefined" else "null")
<> semi <+> comment (Pretty.ppType penv (conInfoType c))
-- tagless
ConSingle{} -> genConstr penv c repr name args []
ConAsCons{} -> genConstr penv c repr name args []
_ -> genConstr penv c repr name args [(tagField, int (conTag repr))]
) $ zip (dataInfoConstrs $ info) conReprs
return $ debugComment ( "Value constructors for type '" ++ (show $ dataInfoName info) ++ "' (" ++ (show dataRepr) ++ ")" )
<$> vcat docs
where
genConstr penv c repr name args tagFields
= if null args
then debugWrap "genConstr: null fields"
$ text "var" <+> name <+> text "=" <+> object tagFields <> semi <+> comment (Pretty.ppType penv (conInfoType c))
else debugWrap "genConstr: with fields"
$ text "function" <+> name <> tupled args <+> comment (Pretty.ppType penv (conInfoType c))
<+> block ( text "return" <+>
(if conInfoName c == nameOptional then head args
else object (tagFields ++ map (\arg -> (arg, arg)) args)) <> semi )
---------------------------------------------------------------------------------
-- Statements
---------------------------------------------------------------------------------
-- | Applies a return context
getResult :: Result -> Doc -> Doc
getResult result doc
= if isEmptyDoc doc
then text ""
else case result of
ResultReturn _ _ -> text "return" <+> doc <> semi
ResultAssign n ml -> ( if isWildcard n
then doc <> semi
else text "var" <+> ppName (unqualify n) <+> text "=" <+> doc <> semi
) <$> case ml of
Nothing -> empty
Just l -> text "break" <+> ppName l <> semi
tryTailCall :: Result -> Expr -> Asm (Maybe Doc)
tryTailCall result expr
= case expr of
-- Tailcall case 1
App (Var n _) args | ( case result of
ResultReturn (Just m) _ -> m == getName n
_ -> False
)
-> do let (ResultReturn _ params) = result
stmts <- genOverride params args
return $ Just $ block $ stmts <$> tailcall
-- Tailcall case 2
App (TypeApp (Var n _) _) args | ( case result of
ResultReturn (Just m) _ -> m == getName n
_ -> False
)
-> do let (ResultReturn _ params) = result
stmts <- genOverride params args
return $ Just $ block $ stmts <$> tailcall
_ -> return Nothing
where
-- overriding function arguments carefully
genOverride :: [TName] -> [Expr] -> Asm Doc
genOverride params args
= fmap (debugWrap "genOverride") $
do (stmts, varNames) <- fmap unzip $ mapM genVarBinding args
docs1 <- mapM genTName params
docs2 <- mapM genTName varNames
let assigns = map (\(p,a)-> if p == a
then debugComment ("genOverride: skipped overriding `" ++ (show p) ++ "` with itself")
else debugComment ("genOverride: preparing tailcall") <> p <+> text "=" <+> a <> semi
) (zip docs1 docs2)
return $ vcat stmts <$> vcat assigns
-- | Generates a statement from an expression by applying a return context (deeply) inside
genStat :: Result -> Expr -> Asm Doc
genStat result expr
= fmap (debugWrap "genStat") $
case extractExternal expr of
Just (tn,fs,es)
-> do (statDoc, exprDoc) <- genExternalExpr tn fs es
return (statDoc <$> getResult result exprDoc)
Nothing
-> do mdoc <- tryTailCall result expr
case mdoc of
Just doc
-> return doc
Nothing
-> case expr of
-- If expression is inlineable, inline it
_ | isInlineableExpr expr
-> do exprDoc <- genInline expr
return (getResult result exprDoc)
Case exprs branches
-> do (docs, scrutinees) <- fmap unzip $ mapM (\e-> if isInlineableExpr e && isTypeBool (typeOf e)
then do d <- genInline e
return (text "", d)
else do (sd,vn) <- genVarBinding e
vd <- genTName vn
return (sd, vd)
) exprs
doc <- genMatch result scrutinees branches
return (vcat docs <$> doc)
Let groups body
-> do doc1 <- genGroups groups
doc2 <- genStat result body
return (doc1 <$> doc2)
-- Handling all other cases
_ -> do (statDoc,exprDoc) <- genExpr expr
return (statDoc <$> getResult result exprDoc)
-- | Generates a statement for a match expression regarding a given return context
genMatch :: Result -> [Doc] -> [Branch] -> Asm Doc
genMatch result scrutinees branches
= fmap (debugWrap "genMatch") $ do
case branches of
[] -> fail ("Backend.JavaScript.FromCore.genMatch: no branch in match statement: " ++ show(scrutinees))
[b] -> fmap snd $ genBranch True result scrutinees b
{-- Special handling of return related cases - would be nice to get rid of it
[ Branch [p1] [Guard t1 (App (Var tn _) [r1])], Branch [p2] [Guard t2 e2] ]
| getName tn == nameReturn && isPat True p1 && isPat False p2 && isExprTrue t1 && isExprTrue t2
-> case e2 of
App (Var tn _) [r2]
| getName tn == nameReturn
-> do (stmts1, expr1) <- genExpr r1
(stmts2, expr2) <- genExpr r2
tnameDocs <- mapM genTName tnames
return $ text "if" <> parens (head tnameDocs ) <+> block (stmts1 <$> text "return" <+> expr1 <> semi)
<$> text "else" <+> block (stmts2 <$> text "return" <+> expr2 <> semi)
Con tn _
| getName tn == nameTuple 0
-> do (stmts, expr) <- genExpr r1
tnameDocs <- mapM genTName tnames
return $ text "if" <> parens (head tnameDocs ) <+> block (stmts <$> text "return" <+> expr <> semi)
_ -> fail "Backend.JavaScript.genMatch: found something different than () or return in explicit return"
-}
[Branch [p1] [Guard t1 e1], Branch [p2] [Guard t2 e2]]
| isExprTrue t1
&& isExprTrue t2
&& isInlineableExpr e1
&& isInlineableExpr e2
-> do let nameDoc = head scrutinees
let test = genTest (nameDoc, p1)
if (isExprTrue e1 && isExprFalse e2)
then return $ getResult result $ parens (conjunction test)
else do doc1 <- withNameSubstitutions (getSubstitutions nameDoc p1) (genInline e1)
doc2 <- withNameSubstitutions (getSubstitutions nameDoc p2) (genInline e2)
return $ debugWrap "genMatch: conditional expression"
$ getResult result
$ parens (conjunction test) <+> text "?" <+> doc1 <+> text ":" <+> doc2
bs
| all (\b-> length (branchGuards b) == 1) bs
&& all (\b->isExprTrue $ guardTest $ head $ branchGuards b) bs
-> do xs <- mapM (genBranch True result scrutinees) bs
return $ debugWrap "genMatch: guard-free case"
$ hcat ( map (\(conds,d)-> text "if" <+> parens (conjunction conds)
<+> block d <$> text "else "
) (init xs)
)
<> block (snd (last xs))
_ -> do (labelF, result') <- case result of
ResultReturn _ _ -> return (id, result)
ResultAssign n (Just _) -> return (id, result) -- wohoo, we can jump out from deep in!
ResultAssign n Nothing -> return ( \d-> text "match: " <> block d
, ResultAssign n (Just $ newName "match")
)
bs <- mapM (genBranch False result' scrutinees) (init branches)
b <- (genBranch True result' scrutinees) (last branches)
let ds = map (\(cds,stmts)-> if null cds
then stmts
else text "if" <+> parens (conjunction cds)
<+> block stmts
) bs
let d = snd b
return $ debugWrap "genMatch: regular case"
$ labelF (vcat ds <$> d)
where
-- | Generates a statement for a branch with given return context
genBranch :: Bool -> Result -> [Doc] -> Branch -> Asm ([ConditionDoc], Doc)
-- Regular catch-all branch generation
genBranch lastBranch result tnDocs branch@(Branch patterns guards)
= do let substs = concatMap (uncurry getSubstitutions) (zip tnDocs patterns)
let conditions = concatMap genTest (zip tnDocs patterns)
let se = withNameSubstitutions substs
gs <- mapM (se . genGuard False result) (init guards)
g <- (se . genGuard lastBranch result) (last guards)
return (conditions, debugWrap "genBranch" $ vcat gs <$> g)
getSubstitutions :: Doc -> Pattern -> [(TName, Doc)]
getSubstitutions nameDoc pat
= case pat of
PatCon tn args _ _ info
-> concatMap (\(pat',fn)-> getSubstitutions
(nameDoc <> (if (getName tn == nameOptional) then empty else (text "." <> fn)))
pat'
) (zip args (map (ppName . fst) (conInfoParams info)) )
PatVar tn pat' -> (tn, nameDoc):(getSubstitutions nameDoc pat')
PatWild -> []
genGuard :: Bool -> Result -> Guard -> Asm Doc
genGuard lastBranchLastGuard result (Guard t expr)
= do (testSt, testE) <- genExpr t
let result' = case result of
ResultAssign n _ | lastBranchLastGuard -> ResultAssign n Nothing
_ -> result
exprSt <- genStat result' expr
return $ if isExprTrue t
then exprSt
else testSt <$> text "if" <+> parens testE <> block exprSt
-- | Generates a list of boolish expression for matching the pattern
genTest :: (Doc, Pattern) -> [Doc]
genTest (scrutinee,pattern)
= case pattern of
PatWild -> []
PatVar _ pat
-> genTest (scrutinee,pat)
PatCon tn fields repr _ info
| getName tn == nameTrue
-> [scrutinee]
| getName tn == nameFalse
-> [text "!" <> scrutinee]
| otherwise
-> case repr of
ConEnum _ tag
-> [debugWrap "genTest: enum" $ scrutinee <+> text "===" <+> int tag]
ConSingleton{} -- the only constructor without fields (=== null)
-> [debugWrap "genTest: singleton" $ scrutinee <+> text "== null"] -- use == instead of === since undefined == null (for optional arguments)
ConSingle{} -- always succeeds
-> []
ConStruct{}
-> fail "Backend.JavaScript.FromCore.genTest: encountered ConStruct, which is not supposed to happen"
ConAsCons{}
| getName tn == nameOptional
-> [scrutinee <+> text "!== undefined"] ++ concatMap (\field -> genTest (scrutinee,field) ) fields
| otherwise
-> let conTest = debugWrap "genTest: asCons" $ scrutinee <+> text "!= null" -- use === instead of == since undefined == null (for optional arguments)
fieldTests = concatMap
(\(field,fieldName) -> genTest (scrutinee <> dot <> fieldName, field) )
(zip fields (map (ppName . fst) (conInfoParams info)) )
in (conTest:fieldTests)
ConNormal{}
-> let conTest = debugWrap "genTest: normal" $ scrutinee <> dot <> tagField <+> text "===" <+> int (conTag repr)
fieldTests = concatMap
(\(field,fieldName) -> genTest (scrutinee <> dot <> fieldName, field) )
( zip fields (map (ppName . fst) (conInfoParams info)) )
in (conTest:fieldTests)
{- -- | Generates assignments for the variables in the pattern
genAssign :: (TName,Pattern) -> Asm [Doc]
genAssign (TName n t,pattern)
= do docs <- f (ppName n) pattern
return $ [debugComment "<genAssign>"] ++ docs ++ [debugComment "</genAssign>"]
where
f s pattern
= case pattern of
PatWild
-> do return []
PatVar tname pat
-> do let doc = text "var" <+> ppName (getName tname) <+> text "=" <+> s <> semi
docs <- f (ppName (getName tname)) pat -- avoid mutiple a.b.c.d call
return (doc:docs)
PatCon _ fields _ _ info
-> do fmap concat $ mapM (\(field,fn) -> f (s <> text "." <> text (show fn)) field) (zip fields (map fst (conInfoParams info))) -- using ppName here writes __null0_ for _field1. WTF?
-}
-- | Takes a list of docs and concatenates them with logical and
conjunction :: [Doc] -> Doc
conjunction docs
= hcat (intersperse (text " && ") docs)
---------------------------------------------------------------------------------
-- Expressions that produce statements on their way
---------------------------------------------------------------------------------
-- | Generates javascript statements and a javascript expression from core expression
genExpr :: Expr -> Asm (Doc,Doc)
genExpr expr
= case extractExternal expr of
Just (tn,fs,es)
-> genExternalExpr tn fs es
Nothing
-> case expr of
-- check whether the expression is pure an can be inlined
_ | isInlineableExpr expr
-> do doc <- genInline expr
return (empty,doc)
TypeApp e _ -> genExpr e
TypeLam _ e -> genExpr e
-- handle not inlineable cases
App (TypeApp (Con name info) _) [arg] | getName name == nameOptional
-> genExpr arg
App f args
-> do (decls,fdoc:docs) <- genExprs (f:args)
return (vcat decls, fdoc <> tupled docs <> debugComment "genExpr: App")
Let groups body
-> do decls1 <- genGroups groups
(decls2,doc) <- genExpr body
return (decls1 <$> decls2, doc)
Case _ _
-> do (doc, tname) <- genVarBinding expr
nameDoc <- genTName tname
return (doc, nameDoc)
_ -> failure ("JavaScript.FromCore.genExpr: invalid expression:\n" ++ show expr)
genExternalExpr :: TName -> String -> [Expr] -> Asm (Doc,Doc)
genExternalExpr tname format args
| getName tname == nameReturn
= do (statDoc,exprDoc) <- genExpr (head args)
return (statDoc <$> text "return" <+> exprDoc <> semi <> debugComment "premature return statement (2)"
, text "") -- emptyness of doc is important! no other way to tell to not generate assignment/return/whatever!
| otherwise
= do (statDocs,argDocs) <- genExprs args
doc <- genExternal tname format argDocs
return ( debugComment "<genExternalExpr.stmt>" <> vcat statDocs <> debugComment "</genExternalExpr.stmt>"
, debugComment "<genExternalExpr.expr>" <> doc <> debugComment "</genExternalExpr.expr>"
)
genExprs :: [Expr] -> Asm ([Doc],[Doc])
genExprs exprs
= do xs <- mapM genExpr exprs
return (unzip xs)
-- | Introduces an additional let binding in core if necessary
-- The expression in the result is guaranteed to be a Var afterwards
genVarBinding :: Expr -> Asm (Doc, TName)
genVarBinding expr
= case expr of
Var tn _ -> return $ (empty, tn)
_ -> do name <- newVarName "x"
doc <- genStat (ResultAssign name Nothing) expr
return ( doc, TName name (typeOf expr) )
---------------------------------------------------------------------------------
-- Pure expressions
---------------------------------------------------------------------------------
genPure :: Expr -> Asm Doc
genPure expr
= case extractExternal expr of
Just (tn,fs,es)
-> do vs <- genVarNames (countExternalArguments tn fs)
doc <- genExternal tn fs vs
return $ text "function" <> tupled vs <+> block ( text "return" <+> doc <> semi )
Nothing
-> case expr of
TypeApp e _ -> genPure e
TypeLam _ e -> genPure e
Var name info
-> do doc <- genTName name
return $ debugComment (show name)
<> doc
Con name repr
-> genTName name
Lit l
-> return $ ppLit l
Lam params body
-> do args <- mapM genCommentTName params
bodyDoc <- genStat (ResultReturn Nothing params) body
return (text "function" <> tupled args <+> block bodyDoc)
_ -> failure ("JavaScript.FromCore.genPure: invalid expression:\n" ++ show expr)
isPat :: Bool -> Pattern -> Bool
isPat b q
= case q of
PatWild -> False
PatVar _ q' -> isPat b q'
PatCon {} -> getName (patConName q) == if b then nameTrue else nameFalse
-- | Generates an effect-free javasript expression
-- NOTE: Throws an error if expression is not guaranteed to be effectfree
genInline :: Expr -> Asm Doc
genInline expr
= case extractExternal expr of
Just (tn,fs,es)
-> genExternalInline tn fs es
Nothing
-> case expr of
_ | isPureExpr expr
-> genPure expr
TypeLam _ e -> genInline e
TypeApp e _ -> genInline e
App (TypeApp (Con name info) _) [arg] | getName name == nameOptional
-> genInline arg
App f args
-> do fdoc <- genInline f
argDocs <- mapM genInline args
return (fdoc <> tupled argDocs <> debugComment "genInline: App")
_ -> failure ("JavaScript.FromCore.genInline: invalid expression:\n" ++ show expr)
where
genExternalInline :: TName -> String -> [Expr] -> Asm Doc
genExternalInline tname format args
= do argDocs <- mapM genInline args
genExternal tname format argDocs
countExternalArguments :: TName -> String -> Int
countExternalArguments tname format
= let name = getName tname
in length $ filter (=='#') format
genExternal :: TName -> String -> [Doc] -> Asm Doc
genExternal tname format argDocs
= do let name = getName tname
return (debugComment ("<genExternal format='" ++ format ++ "'>") <> ppExternalF name format argDocs <> debugComment "</genExternal>")
where
ppExternalF :: Name -> String -> [Doc] -> Doc
ppExternalF name [] args
= empty
ppExternalF name k@('\\':'#':xs) args
= char '#' <> ppExternalF name xs args
ppExternalF name k@('#':'#':xs) args
= failure ("Backend.JavaScript.FromCore: type arguments in javascript external in: " ++ show tname)
ppExternalF name k@('#':y:xs) args
= if y `elem` ['1'..'9']
then (let n = length args
i = fromEnum y - fromEnum '1'
in assertion ("illegal index in external: " ++ show tname ++ "("++k++"): index: " ++ show i) (i < n) $
args!!i <> ppExternalF name xs args)
else char y <> ppExternalF name xs args
ppExternalF name (x:xs) args
= char x <> ppExternalF name xs args
genDefName :: TName -> Asm Doc
genDefName tname
= return (ppName (unqualify (getName tname)))
genTName :: TName -> Asm Doc
genTName tname
= do env <- getEnv
case lookup tname (substEnv env) of
Nothing -> genName (getName tname)
Just d -> return d
genName :: Name -> Asm Doc
genName name
= if (isQualified name)
then do modname <- getModule
if (qualifier name == modname)
then return (ppName (unqualify name))
else return (ppName name)
else return (ppName name)
genVarName :: String -> Asm Doc
genVarName s = do n <- newVarName s
return $ ppName n
-- | Generates `i` fresh variables and delivers them as `Doc` right away
genVarNames :: Int -> Asm [Doc]
genVarNames i = do ns <- newVarNames i
return $ map ppName ns
-- | Generate a name with its type in comments
genCommentTName :: TName -> Asm Doc
genCommentTName (TName n t)
= do env <- getPrettyEnv
return $ ppName n <+> comment (Pretty.ppType env t )
---------------------------------------------------------------------------------
-- Classification
---------------------------------------------------------------------------------
extractExternal :: Expr -> Maybe (TName, String, [Expr])
extractExternal expr
= case expr of
App (TypeApp (Var tname (InfoExternal formats)) targs) args
-> Just (tname, format tname formats, args)
App var@(Var tname (InfoExternal formats)) args
-> Just (tname, format tname formats, args)
_ -> Nothing
where
format tn fs
= case lookup JS fs of
Nothing -> case lookup Default fs of
Nothing -> failure ("backend does not support external in " ++ show tn ++ show fs)
Just s -> s
Just s -> s
isFunExpr :: Expr -> Bool
isFunExpr expr
= case expr of
TypeApp e _ -> isFunExpr e
TypeLam _ e -> isFunExpr e
Lam args body -> True
_ -> False
isInlineableExpr :: Expr -> Bool
isInlineableExpr expr
= case expr of
TypeApp expr _ -> isInlineableExpr expr
TypeLam _ expr -> isInlineableExpr expr
App f args -> isPureExpr f && all isPureExpr args && not (isFunExpr f) -- avoid `fun() {}(a,b,c)` !
_ -> isPureExpr expr
isPureExpr :: Expr -> Bool
isPureExpr expr
= case expr of
TypeApp expr _ -> isPureExpr expr
TypeLam _ expr -> isPureExpr expr
Var n _ | getName n == nameReturn -> False -- make sure return will never be inlined
| otherwise -> True
Con _ _ -> True
Lit _ -> True
Lam _ _ -> True
_ -> False
isTailCalling :: Expr -> Name -> Bool
isTailCalling expr n
= case expr of
TypeApp expr _ -> expr `isTailCalling` n -- trivial
TypeLam _ expr -> expr `isTailCalling` n -- trivial
Lam _ _ -> False -- lambda body is a new context, can't tailcall
Var _ _ -> False -- a variable is not a call
Con _ _ -> False -- a constructor is not a call
Lit _ -> False -- a literal is not a call
App (Var tn _) _ | getName tn == n -- direct application can be a tail call
-> True
App (TypeApp (Var tn _) _) _ | getName tn == n -- tailcalled function might be polymorphic and is applied to types before
-> True
App (Var tn _) [e] | getName tn == nameReturn -- a return statement is transparent in terms of tail calling
-> e `isTailCalling` n
App _ _ -> False -- other applications don't apply
Let _ e -> e `isTailCalling` n -- tail calls can only happen in the actual body
Case _ bs -> any f1 bs -- match statement get analyzed in depth
where
f1 (Branch _ gs) = any f2 gs -- does any of the guards tailcall?
f2 (Guard _ e) = e `isTailCalling` n -- does the guarded expression tailcall?
---------------------------------------------------------------------------------
-- The assembly monad
---------------------------------------------------------------------------------
newtype Asm a = Asm { unAsm :: Env -> St -> (a, St)}
instance Functor Asm where
fmap f (Asm a) = Asm (\env st -> case a env st of
(x,st') -> (f x, st'))
instance Monad Asm where
return x = Asm (\env st -> (x,st))
(Asm a) >>= f = Asm (\env st -> case a env st of
(x,st1) -> case f x of
Asm b -> b env st1)
runAsm :: Env -> Asm Doc -> Doc
runAsm initEnv (Asm asm)
= case asm initEnv initSt of
(doc,st) -> doc
data St = St { uniq :: Int
}
data Env = Env { -- | current module
moduleName :: Name
-- | for printing nice types
, prettyEnv :: Pretty.Env
-- | substituting names
, substEnv :: [(TName, Doc)]
}
data Result = ResultReturn (Maybe Name) [TName] -- first field carries function name if not anonymous and second the arguments which are always known
| ResultAssign Name (Maybe Name) -- variable name and optional label to break
initSt = St 0
instance HasUnique Asm where
updateUnique f
= Asm (\env st -> (uniq st, st{ uniq = f (uniq st)}))
updateSt f
= Asm (\env st -> (st,f st))
getSt
= updateSt id
setSt st
= updateSt (const st)
getEnv
= Asm (\env st -> (env, st))
withEnv f (Asm asm)
= Asm (\env st -> asm (f env) st)
localUnique asm
= do u <- updateUnique id
x <- asm
setUnique u
return x
newVarName :: String -> Asm Name
newVarName s
= do u <- unique
return (newName ("." ++ s ++ show u))
newVarNames :: Int -> Asm [Name]
newVarNames 0 = return []
newVarNames i
= do n <- newVarName "x"
ns <- newVarNames (i - 1)
return (n:ns)
getModule :: Asm Name
getModule
= do env <- getEnv
return (moduleName env)
getPrettyEnv :: Asm Pretty.Env
getPrettyEnv
= do env <- getEnv
return (prettyEnv env)
withTypeVars :: [TypeVar] -> Asm a -> Asm a
withTypeVars vars asm
= withEnv (\env -> env{ prettyEnv = Pretty.niceEnv (prettyEnv env) vars }) asm
withNameSubstitutions :: [(TName, Doc)] -> Asm a -> Asm a
withNameSubstitutions subs asm
= withEnv (\env -> env{ substEnv = subs ++ substEnv env }) asm
---------------------------------------------------------------------------------
-- Pretty printing
---------------------------------------------------------------------------------
-- | Approved for use in JavaScript according to ECMA definition
ppLit :: Lit -> Doc
ppLit lit
= case lit of
LitInt i -> (pretty i)
LitChar c -> squotes (escape c)
LitFloat d -> (pretty d)
LitString s -> dquotes (hcat (map escape s))
where
escape c
= if (c < ' ')
then (if (c=='\n') then text "\\n"
else if (c == '\r') then text "\\r"
else if (c == '\t') then text "\\t"
else text "\\u" <> text (showHex 4 (fromEnum c)))
else if (c <= '~')
then (if (c == '\"') then text "\\\""
else if (c=='\'') then text "\\'"
else if (c=='\\') then text "\\\\"
else char c)
else if (fromEnum c <= 0xFFFF)
then text "\\u" <> text (showHex 4 (fromEnum c))
else text "\\U" <> text (showHex 8 (fromEnum c))
ppName :: Name -> Doc
ppName name
= if isQualified name
then ppModName (qualifier name) <> dot <> encode False (unqualify name)
else encode False name
ppQName :: Name -> Name -> Doc
ppQName modName name
= if (modName == qualifier name) -- We need to qualify always since otherwise we may clash with local variables. i.e. fun f( x : int ) { Main.x( x ) }
then ppName (unqualify name)
else ppName name
ppModName :: Name -> Doc
ppModName name
= text "$" <> encode True (name)
encode :: Bool -> Name -> Doc
encode isModule name
= let s = show name
in if (isReserved s)
then text ('$' : s)
else text ( (asciiEncode isModule s))
isReserved :: String -> Bool
isReserved s
= if (not $ null s) && (head s == 'T') && all isDigit (tail s)
then True
else s `S.member` reserved
reserved :: S.Set String
reserved
= S.fromList $ -- JavaScript pseudo-keywords
[ "prototype"
, "toString"
, "arguments"
, "eval"
]
++ -- word literals
[ "null"
, "Infinity"
, "NaN"
]
++ -- JavaScript keywords
[ "break"
, "case"
, "catch"
, "continue"
, "debugger"
, "default"
, "delete"
, "do"
, "else"
, "finally"
, "for"
, "function"
, "if"
, "in"
, "instanceof"
, "new"
, "return"
, "switch"
, "this"
, "throw"
, "try"
, "typeof"
, "var"
, "void"
, "while"
, "with"
]
++ -- reserved for future use
[ "class"
, "enum"
, "export"
, "extends"
, "import"
, "super"
, "const"
]
++ -- special globals
[ "window"
, "document"
, "process"
, "exports"
, "module"
]
block :: Doc -> Doc
block doc
= text "{" <$> tab doc <$> text "}"
tcoBlock :: Doc -> Doc
tcoBlock doc
= text "{ tailcall: while(1)" <$>
text "{" <$> tab ( doc ) <$> text "}}"
tailcall :: Doc
tailcall = text "continue tailcall;"
object :: [(Doc, Doc)] -> Doc
object xs
= text "{" <+> hcat ( punctuate (comma <> space) (map f xs) ) <+> text "}"
where
f (d1, d2) = d1 <> colon <+> d2
tab :: Doc -> Doc
tab doc
= indent 2 doc
typeComment = comment
comment :: Doc -> Doc
comment d
= text " /*" <+> d <+> text "*/ "
debugComment :: String -> Doc
debugComment s
= if debug
then comment (text s)
else empty
debugWrap :: String -> Doc -> Doc
debugWrap s d
= if debug
then debugComment ("<" ++ s ++ ">") <$> tab d <$> debugComment ("</" ++ s ++ ">")
else d
tagField :: Doc
tagField = text "_tag"
|
lpeterse/koka
|
src/Backend/JavaScript/FromCore.hs
|
apache-2.0
| 42,627
| 0
| 29
| 16,378
| 10,937
| 5,428
| 5,509
| 764
| 21
|
-- | Basic low-level GL wrapper and reference.
module Graphics.GL.Low (
-- * Overview
-- | This library exposes a simplified subset of OpenGL that I hope is
-- complete enough for following tutorials and making simple games or demos.
--
-- For a whirlwind tour of the machinery behind GL see the module:
-- "Graphics.GL.Low.EntirePictureUpFront"
--
-- This library uses the `gl' package for raw bindings to OpenGL and the
-- `linear' package for matrices.
--
-- See submodules for specialized documentation of each subsystem.
--
-- @"Graphics.GL.Low.VAO"@
--
-- @"Graphics.GL.Low.BufferObject"@
--
-- @"Graphics.GL.Low.Shader"@
--
-- @"Graphics.GL.Low.VertexAttrib"@
--
-- @"Graphics.GL.Low.Texture"@
--
-- @"Graphics.GL.Low.Render"@
--
-- @"Graphics.GL.Low.Stencil"@
--
-- @"Graphics.GL.Low.Blending"@
--
-- @"Graphics.GL.Low.Framebuffer"@
-- * VAO
-- | See also "Graphics.GL.Low.VAO".
newVAO,
bindVAO,
deleteVAO,
VAO,
-- * Buffer Objects
-- | See also "Graphics.GL.Low.BufferObject".
newBufferObject,
bindVBO,
bindElementArray,
updateVBO,
updateElementArray,
deleteBufferObject,
BufferObject,
UsageHint(..),
-- * Shader Program
-- | See also "Graphics.GL.Low.Shader".
newProgram,
newProgramSafe,
useProgram,
deleteProgram,
setUniform1f,
setUniform2f,
setUniform3f,
setUniform4f,
setUniform1i,
setUniform2i,
setUniform3i,
setUniform4i,
setUniform22,
setUniform33,
setUniform44,
Program,
ProgramError(..),
-- ** Vertex Attributes
-- | See also "Graphics.GL.Low.VertexAttrib".
setVertexLayout,
LayoutElement(..),
DataType(..),
-- * Textures
-- | See also "Graphics.GL.Low.Texture".
Texture,
newTexture2D,
newCubeMap,
newEmptyTexture2D,
newEmptyCubeMap,
deleteTexture,
setActiveTextureUnit,
bindTexture2D,
bindTextureCubeMap,
setTex2DFiltering,
setCubeMapFiltering,
setTex2DWrapping,
setCubeMapWrapping,
Cube(..),
Filtering(..),
Wrapping(..),
-- * Rendering
--
-- ** Primitives
-- | See also "Graphics.GL.Low.Render".
drawPoints,
drawLines,
drawLineStrip,
drawLineLoop,
drawTriangles,
drawTriangleStrip,
drawTriangleFan,
drawIndexedPoints,
drawIndexedLines,
drawIndexedLineStrip,
drawIndexedLineLoop,
drawIndexedTriangles,
drawIndexedTriangleStrip,
drawIndexedTriangleFan,
setViewport,
enableScissorTest,
disableScissorTest,
enableCulling,
disableCulling,
Viewport(..),
Culling(..),
IndexFormat(..),
-- ** Color Buffer
enableColorWriting,
disableColorWriting,
clearColorBuffer,
-- ** Depth Test
enableDepthTest,
disableDepthTest,
clearDepthBuffer,
-- ** Stencil Test
-- | See also "Graphics.GL.Low.Stencil".
enableStencil,
disableStencil,
clearStencilBuffer,
basicStencil,
Stencil(..),
StencilFunc(..),
StencilOp(..),
-- ** Blending
-- | See also "Graphics.GL.Low.Blending".
enableBlending,
disableBlending,
basicBlending,
Blending(..),
BlendFactor(..),
BlendEquation(..),
-- * Framebuffers
-- | See also "Graphics.GL.Low.Framebuffer".
FBO,
newFBO,
bindFBO,
bindDefaultFramebuffer,
deleteFBO,
attachTex2D,
attachCubeMap,
attachRBO,
-- * Renderbuffers
RBO,
newRBO,
deleteRBO,
-- * Errors
GLError(..),
getGLError,
assertNoGLError,
-- * Image Formats
ImageFormat(..),
) where
import Graphics.GL.Low.Classes
import Graphics.GL.Low.VAO
import Graphics.GL.Low.BufferObject
import Graphics.GL.Low.Shader
import Graphics.GL.Low.VertexAttrib
import Graphics.GL.Low.Texture
import Graphics.GL.Low.Framebuffer
import Graphics.GL.Low.Blending
import Graphics.GL.Low.Stencil
import Graphics.GL.Low.Render
import Graphics.GL.Low.Cube
import Graphics.GL.Low.Error
|
evanrinehart/lowgl
|
Graphics/GL/Low.hs
|
bsd-2-clause
| 3,783
| 0
| 5
| 677
| 533
| 384
| 149
| 117
| 0
|
module Emulator.Video.BitmapModes where
import Emulator.Memory
import Emulator.Types
import Emulator.Video.Util
import Emulator.Video.Palette
import Emulator.Video.VideoController
import Data.Array.IArray
bitmapModes :: AddressIO m => LCDControl -> m [ScreenObj]
bitmapModes cnt = do
bgCNT <- recordBGControl 0x0400000C
xWord <- readAddressWord 0x04000028
yWord <- readAddressWord 0x0400002C
paramMem <- readRange (0x04000020, 0x04000027)
vram <- readBitmapVram (bgMode cnt) (displayFrameSelect cnt)
palette <- readRange (0x05000000, 0x050001FF)
let refPoint = (referencePoint xWord, referencePoint yWord)
let params = affineParameters 0x04000020 0x04000022 0x04000024 0x04000026 paramMem
case bgMode cnt of
3 -> return [mode3n5 bgCNT refPoint params (240, 160) vram]
4 -> return [mode4 bgCNT refPoint params (240, 160) vram palette]
_ -> return [mode3n5 bgCNT refPoint params (160, 128) vram]
readBitmapVram :: AddressSpace m => Byte -> Bool -> m (Array Address Byte)
readBitmapVram 3 _ = readRange (0x06000000, 0x06012BFF)
readBitmapVram 4 True = readRange (0x0600A000, 0x060135FF)
readBitmapVram 4 False = readRange (0x06000000, 0x0060095FF)
readBitmapVram _ True = readRange (0x0600A000, 0x006013FFF)
readBitmapVram _ False = readRange (0x06000000, 0x006009FFF)
mode3n5 :: BGControl -> AffineRefPoints -> AffineParameters -> (Int, Int) -> Array Address Byte -> ScreenObj
mode3n5 bgCNT (x, y) params (w, h) vram = BG [quad] (bgPriority bgCNT) 2
where
bitmapList = convToBitmap vram (fst (bounds vram)) (w * h)
centre = (x + fromIntegral w, y + fromIntegral h)
preAffineCoords = ((x, y), (x+8, y), (x, y+8), (x+8, y+8))
coords = affineCoords centre params preAffineCoords
quad = Tile bitmapList coords
mode4 :: BGControl -> AffineRefPoints -> AffineParameters -> (Int, Int) -> Array Address Byte -> Palette -> ScreenObj
mode4 bgCNT (x, y) params (w, h) vram pal = BG [quad] (bgPriority bgCNT) 2
where
bitmapList = palette256 pal vram (fst (bounds vram)) (w * h)
centre = (x + fromIntegral w, y + fromIntegral h)
preAffineCoords = ((x, y), (x+8, y), (x, y+8), (x+8, y+8))
coords = affineCoords centre params preAffineCoords
quad = Tile bitmapList coords
convToBitmap :: Array Address Byte -> Address -> Int -> [HalfWord]
convToBitmap _ _ 0 = []
convToBitmap vram baseAddr nPixels = col:convToBitmap vram (baseAddr + 0x00000002) (nPixels - 1)
where
colByte1 = vram!baseAddr
colByte2 = vram!(baseAddr + 0x00000001)
col = bytesToHalfWord colByte1 colByte2
|
intolerable/GroupProject
|
src/Emulator/Video/BitmapModes.hs
|
bsd-2-clause
| 2,549
| 0
| 13
| 443
| 978
| 516
| 462
| 47
| 3
|
{-# LANGUAGE OverloadedStrings #-}
module KATOCB3 where
import qualified Data.ByteString as B
import Data.ByteString.Char8 ()
-- (key, iv, aad, input, out, taglen, tag)
type KATOCB3 = (B.ByteString, B.ByteString, B.ByteString, B.ByteString, B.ByteString, Int, B.ByteString)
key1 = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
nonce1 = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b"
vectors_aes128_enc :: [KATOCB3]
vectors_aes128_enc =
[ ( {-key = -} key1
, {-iv = -} nonce1
, {-aad = -}""
, {-input = -}""
, {-out = -}""
, {-taglen = -} 16
, {-tag = -} "\x19\x7b\x9c\x3c\x44\x1d\x3c\x83\xea\xfb\x2b\xef\x63\x3b\x91\x82")
, ( key1, nonce1
, "\x00\x01\x02\x03\x04\x05\x06\x07"
, "\x00\x01\x02\x03\x04\x05\x06\x07"
, "\x92\xb6\x57\x13\x0a\x74\xb8\x5a"
, 16
, "\x16\xdc\x76\xa4\x6d\x47\xe1\xea\xd5\x37\x20\x9e\x8a\x96\xd1\x4e")
, ( key1, nonce1
, "\x00\x01\x02\x03\x04\x05\x06\x07"
, ""
, ""
, 16
, "\x98\xb9\x15\x52\xc8\xc0\x09\x18\x50\x44\xe3\x0a\x6e\xb2\xfe\x21")
, ( key1, nonce1
, ""
, "\x00\x01\x02\x03\x04\x05\x06\x07"
, "\x92\xb6\x57\x13\x0a\x74\xb8\x5a"
, 16
, "\x97\x1e\xff\xca\xe1\x9a\xd4\x71\x6f\x88\xe8\x7b\x87\x1f\xbe\xed")
, ( key1, nonce1
, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
, "\xbe\xa5\xe8\x79\x8d\xbe\x71\x10\x03\x1c\x14\x4d\xa0\xb2\x61\x22"
, 16
, "\x77\x6c\x99\x24\xd6\x72\x3a\x1f\xc4\x52\x45\x32\xac\x3e\x5b\xeb")
]
vectors_encrypt =
[ ("AES128 Enc", vectors_aes128_enc)
]
vectors_decrypt = []
|
vincenthz/hs-cipher-aes
|
Tests/KATOCB3.hs
|
bsd-3-clause
| 1,735
| 0
| 6
| 340
| 233
| 155
| 78
| 43
| 1
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveGeneric #-}
{-# OPTIONS_GHC -funbox-strict-fields #-}
module Tinfoil.Data.KDF(
Credential(..)
, CredentialHash(..)
, KDF(..)
, MCFHash(..)
, MCFPrefix(..)
, NeedsRehash(..)
, Verification(..)
, packMCFHash
, parseMCFPrefix
, renderMCFPrefix
, renderMCFHash
, unpackMCFHash
) where
import Control.DeepSeq.Generics (genericRnf)
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import Data.Word (Word8)
import GHC.Generics (Generic)
import P
import System.IO
import Tinfoil.Data.Verify
-- | Output of a 'KDF'. Do not ever implement an 'Eq' instance for
-- this type.
newtype CredentialHash =
CredentialHash {
unCredentialHash :: ByteString
} deriving (Show, Generic)
instance NFData CredentialHash where rnf = genericRnf
-- | Credential hash wrapped up with an MCF prefix.
newtype MCFHash =
MCFHash {
unMCFHash :: ByteString
} deriving (Show, Generic)
instance NFData MCFHash where rnf = genericRnf
renderMCFHash :: MCFHash -> ByteString
renderMCFHash = unMCFHash
newtype Credential =
Credential {
unCredential :: ByteString
} deriving (Eq, Show, Generic)
instance NFData Credential where rnf = genericRnf
data NeedsRehash =
NeedsRehash
| UpToDate
deriving (Eq, Show, Generic)
instance NFData NeedsRehash where rnf = genericRnf
data Verification = Verification !Verified !NeedsRehash
deriving (Eq, Show, Generic)
instance NFData Verification where rnf = genericRnf
-- | Key derivation function - put in a secret and get out a token
-- from which it is computationally infeasible to derive the secret, which
-- is suitable either for use as a cryptographic key or as a credential hash.
--
-- Properties:
-- * Uses an underlying cryptographic hash function or other pseudo-random
-- function with good collision resistance and diffusion.
-- * Salted with high-quality entropy (to make rainbow tables
-- infeasible).
-- * Slow, for naive brute-force.
-- * High memory requirements, for highly-parallel low-memory
-- processors (GPUs, mining ASICs, et cetera).
data KDF = KDF
{ kdfGenHash :: (Credential -> IO CredentialHash)
, kdfVerifyCredential :: (CredentialHash -> Credential -> IO Verified)
, kdfVerifyNoCredential :: (Credential -> IO Verified)
, kdfMcfPrefix :: MCFPrefix
, kdfUpToDate :: CredentialHash -> Maybe' NeedsRehash
}
-- | Non-standardized modular crypt format string. Uniquely identifies (from
-- tinfoil's perspective) a KDF algorithm.
data MCFPrefix =
Scrypt0
deriving (Eq, Show, Generic, Enum, Bounded)
instance NFData MCFPrefix where rnf = genericRnf
renderMCFPrefix :: MCFPrefix -> ByteString
renderMCFPrefix Scrypt0 = "scrypt0"
parseMCFPrefix :: ByteString -> Maybe' MCFPrefix
parseMCFPrefix "scrypt0" = pure Scrypt0
parseMCFPrefix _ = Nothing'
unpackMCFHash :: MCFHash -> Maybe' (MCFPrefix, CredentialHash)
unpackMCFHash (MCFHash bs) = do
(p, h) <- splitMCF
p' <- parseMCFPrefix p
pure (p', CredentialHash h)
where
splitMCF = case BS.split mcfDelimiter bs of
("":x:ys) -> pure (x, BS.intercalate (BS.singleton mcfDelimiter) ys)
_ -> Nothing'
packMCFHash :: MCFPrefix -> CredentialHash -> MCFHash
packMCFHash p h = MCFHash $ BS.concat [
BS.singleton mcfDelimiter
, renderMCFPrefix p
, BS.singleton mcfDelimiter
, unCredentialHash h
]
mcfDelimiter :: Word8
mcfDelimiter = 0x24
|
ambiata/tinfoil
|
src/Tinfoil/Data/KDF.hs
|
bsd-3-clause
| 3,562
| 0
| 15
| 682
| 742
| 427
| 315
| 86
| 2
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TypeOperators #-}
module Data.Vector.Vinyl.Default.NonEmpty.Monomorphic.Join where
import Control.Monad (guard)
import Control.Monad.Primitive (PrimMonad, PrimState)
import Control.Monad.ST (ST, runST)
import Data.Constraint
import Data.Function (on)
import qualified Data.List as List
import Data.List.TypeLevel.Constraint (ListAll)
import Data.Primitive.MutVar (newMutVar, readMutVar, writeMutVar)
import Data.Proxy (Proxy (Proxy))
import qualified Data.Vector.Algorithms.Merge as Merge
import qualified Data.Vector.Generic as G
import qualified Data.Vector.Generic.Mutable as GM
import qualified Data.Vector.Hybrid as Hybrid
import qualified Data.Vector.Hybrid.Internal as Hybrid
import qualified Data.Vector.Hybrid.Mutable as MHybrid
import qualified Data.Vector.Unboxed as U
import qualified Data.Vector.Unboxed.Mutable as UM
import Data.Vector.Vinyl.Default.NonEmpty.Monomorphic.Implication (listAllVector)
import qualified Data.Vector.Vinyl.Default.NonEmpty.Monomorphic.Internal as Vinyl
import Data.Vector.Vinyl.Default.Types (HasDefaultVector, VectorVal)
import Data.Vinyl.Class.Implication (listAllOrd)
import Data.Vinyl.Core (Rec (..))
import Data.Vinyl.Functor (Identity)
import Data.Vinyl.TypeLevel (RecAll)
defSort :: (PrimMonad m, GM.MVector v e, Ord e) => v (PrimState m) e -> m ()
defSort = Merge.sort
defSortBy :: (PrimMonad m, GM.MVector v e) => (e -> e -> Ordering) -> v (PrimState m) e -> m ()
defSortBy = Merge.sortBy
uniq :: (G.Vector v a, Eq a) => v a -> v a
uniq v = if G.length v > 0
then runST $ do
let initVal = v G.! 0
m <- GM.new (G.length v)
GM.write m 0 initVal
mlen <- uniqHelper 1 1 initVal v m
G.freeze $ GM.slice 0 mlen m
else G.empty
uniqHelper :: (GM.MVector u a, G.Vector v a, Eq a, PrimMonad m)
=> Int -> Int -> a -> v a -> u (PrimState m) a -> m Int
uniqHelper i j prev v m = if (i < G.length v)
then let current = v G.! i in
if current == prev
then uniqHelper (i + 1) j prev v m
else do
GM.write m j current
uniqHelper (i + 1) (j + 1) current v m
else return j
uniqNaive :: Eq a => [a] -> [a]
uniqNaive = map List.head . List.group
-- This is a poorly performing but certainly correct
-- version of the algorithm. Notice that the produced
-- ordering of the indices may differ.
fullJoinIndicesNaive :: Ord a => [a] -> [a] -> [(Int,Int)]
fullJoinIndicesNaive as bs = do
(ia,a) <- List.sortBy (compare `on` snd) (zip (enumFrom 0) as)
(ib,b) <- List.sortBy (compare `on` snd) (zip (enumFrom 0) bs)
guard (a == b)
return (ia,ib)
-- Demands that records are identical. This step must be performed
-- after a projection step. This destroys the input vectors.
fullJoinIndices ::
( PrimMonad m
, s ~ PrimState m
, GM.MVector v a
, Ord a
)
=> v s a
-> v s a
-> m (Hybrid.MVector U.MVector U.MVector s (Int,Int))
fullJoinIndices as bs = do
ias <- pairWithIndices as
ibs <- pairWithIndices bs
sortWithIndices ias
sortWithIndices ibs
matchingIndices ias ibs
indexMany :: ( G.Vector v a ) => U.Vector Int -> v a -> v a
indexMany ixs xs = runST $ do
res <- GM.new (U.length ixs)
flip U.imapM_ ixs $ \i ix -> do
GM.write res i (xs G.! ix)
G.freeze res
indexManyPredicate :: ( G.Vector v a )
=> (a -> Bool) -> U.Vector Int -> v a -> U.Vector Bool
indexManyPredicate predicate ixs xs = runST $ do
res <- UM.new (U.length ixs)
flip U.imapM_ ixs $ \i ix -> do
UM.write res i (predicate (xs G.! ix))
U.freeze res
fullJoinIndicesImmutable ::
( G.Vector v a
, Ord a
)
=> v a
-> v a
-> Hybrid.Vector U.Vector U.Vector (Int,Int)
fullJoinIndicesImmutable as bs = runST $ do
mas <- G.thaw as
mbs <- G.thaw bs
r <- fullJoinIndices mas mbs
G.freeze r
recFullJoinIndicesImmutable :: forall rs z zs.
( ListAll rs Ord
, ListAll rs HasDefaultVector
-- , ListAll rs
, rs ~ (z ': zs)
)
=> Rec VectorVal rs
-> Rec VectorVal rs
-> Hybrid.Vector U.Vector U.Vector (Int,Int)
recFullJoinIndicesImmutable as bs =
case listAllOrd (Proxy :: Proxy Identity) as (Sub Dict) of
Sub Dict -> case listAllVector as of
Sub Dict -> fullJoinIndicesImmutable (Vinyl.V as) (Vinyl.V bs)
-- The input vectors must already be sorted from
-- low to high. Otherwise, this may crash.
matchingIndices ::
( GM.MVector v a
, GM.MVector u b
, GM.MVector w c
, PrimMonad m
, s ~ PrimState m
, Ord a
)
=> Hybrid.MVector u v s (b, a)
-> Hybrid.MVector w v s (c, a)
-> m (Hybrid.MVector u w s (b,c))
matchingIndices as bs = do
iaRef <- newMutVar 0
ibRef <- newMutVar 0
iaWalkRef <- newMutVar 0
ibWalkRef <- newMutVar 0
irRef <- newMutVar 0
rinit <- MHybrid.new initialSize
rref <- newMutVar rinit
let appendRes v = do
rbefore <- readMutVar rref
ir <- readMutVar irRef
r <- case compare ir (MHybrid.length rbefore) of
EQ -> do
r' <- MHybrid.grow rbefore ir
writeMutVar rref r'
return r'
LT -> return rbefore
GT -> error "matchingIndices: invariant violated"
writeMutVar irRef (ir + 1)
MHybrid.write r ir v -- (traceShowId v)
whileM_
( do ia <- readMutVar iaRef
ib <- readMutVar ibRef
return (ia < alen && ib < blen)
)
( do ia <- readMutVar iaRef
ib <- readMutVar ibRef
(iaOriginal,arec) <- MHybrid.read as ia
(ibOriginal,brec) <- MHybrid.read bs ib
case compare arec brec of
LT -> writeMutVar iaRef (ia + 1)
GT -> writeMutVar ibRef (ib + 1)
EQ -> do
appendRes (iaOriginal,ibOriginal)
-- Iterate over b
writeMutVar ibWalkRef (ib + 1)
whileM_
( do ibWalk <- readMutVar ibWalkRef
case compare ibWalk blen of
LT -> do
(_,brecNext) <- MHybrid.read bs ibWalk
return (brecNext == arec)
EQ -> return False
GT -> error "ib walk: invariant violated"
)
( do ibWalk <- readMutVar ibWalkRef
(ibOriginalNext,_brecNext) <- MHybrid.read bs ibWalk
appendRes (iaOriginal, ibOriginalNext)
writeMutVar ibWalkRef (ibWalk + 1)
)
-- Iterate over a
writeMutVar iaWalkRef (ia + 1)
whileM_
( do iaWalk <- readMutVar iaWalkRef
case compare iaWalk alen of
LT -> do
(_,arecNext) <- MHybrid.read as iaWalk
return (arecNext == brec)
EQ -> return False
GT -> error "ia walk: invariant violated"
)
( do iaWalk <- readMutVar iaWalkRef
(iaOriginalNext,_arecNext) <- MHybrid.read as iaWalk
appendRes (iaOriginalNext, ibOriginal)
writeMutVar iaWalkRef (iaWalk + 1)
)
writeMutVar iaRef (ia + 1)
writeMutVar ibRef (ib + 1)
)
r <- readMutVar rref
ir <- readMutVar irRef
return (MHybrid.slice 0 ir r)
where
initialSize = 4 -- change to 64
alen = MHybrid.length as
blen = MHybrid.length bs
whileM_ :: (Monad m) => m Bool -> m a -> m ()
whileM_ p f = go
where
go = do
x <- p
if x
then f >> go
else return ()
-- gives us the freedom to use anything as indices
-- instead of just Ints
matchingIndicesExtraImmutable ::
( G.Vector v a
, G.Vector u b
, G.Vector w c
, Ord a
)
=> Hybrid.Vector u v (b,a)
-> Hybrid.Vector w v (c,a)
-> Hybrid.Vector u w (b,c)
matchingIndicesExtraImmutable a b = runST $ do
ma <- Hybrid.thaw a
mb <- Hybrid.thaw b
mr <- matchingIndices ma mb
Hybrid.freeze mr
pairWithIndices ::
( PrimMonad m
, s ~ PrimState m
, GM.MVector v a
)
=> v s a
-> m (Hybrid.MVector U.MVector v s (Int, a))
pairWithIndices v = do
let total = GM.length v
mv <- U.thaw (U.fromList (enumFromTo 0 (total - 1)))
return (Hybrid.MV mv v)
sortWithIndices ::
( Ord a
, GM.MVector v a
, GM.MVector u i
, PrimMonad m
, s ~ PrimState m
)
=> Hybrid.MVector u v s (i,a)
-> m ()
sortWithIndices v = defSortBy (\(_,a) (_,b) -> compare a b) v
|
andrewthad/vinyl-vectors
|
src/Data/Vector/Vinyl/Default/NonEmpty/Monomorphic/Join.hs
|
bsd-3-clause
| 9,352
| 0
| 27
| 3,336
| 3,092
| 1,567
| 1,525
| -1
| -1
|
-- Programatica Front-End Commands, level 1
module Pfe1Cmds where
import Prelude hiding (putStr,putStrLn,print)
import Pfe0Cmds(pfe0Cmds,runPFE0Cmds)
import PfeParse(moduleArg,fileArg,filename,( #@ ), (<@),kwOption)
import PFE0(pput,lex0SourceFile,preparseSourceFile,findFile)
import DefinedNames
import FreeNames
import HsTokens
import HsLexerPass1(lexerPass1Only)
import HsLexMerge(mergeLex)
import PrettyPrint
import AbstractIO
import MUtils
import Maybe(mapMaybe)
import PPModules() -- for PFE
pfe1 ext = runPFE0Cmds ext pfe1Cmds
pfe1Cmds =
pfe0Cmds ++
[-- Simple, local module queries
("defined" , (moduleArg defined,"list entities defined in the module")),
("free" , (moduleArg free,"list names referenced but not defined in the module")),
("pragmas", (moduleArg pragmas,"extract pragmas from modules")),
("lex", (lFileArg tstlex ,"show the result of lexical analysis")),
("lexl", (lFileArg tstlexl,"show the result of lexical analysis + layout preprocessing")),
("preparse", (fileArg preparse,"preparse and show abstract syntax"))
]
--- Simple module queries ------------------------------------------------------
free = simple freeNames
defined = simple definedNames
preparse = print @@ preparseSourceFile
simple f = pput.vcat.f @@ preparseSourceFile @@ findFile
pragmas = putStr.unlines.map show.lex2pragmas.snd @@ lex0SourceFile @@ findFile
tstlex one = printLex one . mergeLex @@ lex0SourceFile
tstlexl one = printLex one . lexerPass1Only . mergeLex @@ lex0SourceFile
printLex True = putStrLn . unlines . map show
printLex False = print
lFileArg f = f #@ kwOption "-1" <@ filename
lex2pragmas = mapMaybe pragma
where
pragma (NestedComment,(p,'{':'-':'#':s)) | last3 s=="#-}" =
Just (p,droplast3 s)
pragma _ = Nothing
last3 = reverse . take 3 . reverse
droplast3 = reverse . drop 3 . reverse
|
forste/haReFork
|
tools/pfe/Pfe1Cmds.hs
|
bsd-3-clause
| 1,904
| 0
| 12
| 327
| 520
| 293
| 227
| -1
| -1
|
module Position
( Position (getX, getY)
, position
, unPosition
, row
, rows
, positionsByRow
, column
, columns
, positionsByColumn
, axis
) where
import Data.Array (Ix)
import Axis (Axis(Column, Row))
import Coordinate (Coordinate, coordinate, coordinates, unCoordinate)
-- A Position on a 5x5 Board
data Position = Position {
getX :: Coordinate,
getY :: Coordinate
} deriving (Ix, Eq, Ord, Show)
instance Bounded Position where
minBound = position minBound minBound
maxBound = position maxBound maxBound
instance Enum Position where
toEnum x | x >= 0 && x < 25 = position (coordinate $ x `mod` 5) (coordinate $ x `div` 5)
| otherwise = error "Position out of bounds"
fromEnum p = 5 * (unCoordinate $ getY p) + (unCoordinate $ getX p)
-- Constructor for a Position.
position :: Coordinate -> Coordinate -> Position
position = Position
-- Deconstructor for a Position.
unPosition :: Position -> (Coordinate, Coordinate)
unPosition (Position x y) = (x, y)
-- Returns a list of all Positions with the given Y Coordinate, in ascending
-- order of X Coordinate.
row :: Coordinate -> [Position]
row y = [position x y | x <- coordinates]
-- Returns a list of all rows, in ascending order of Y Coordinate.
rows :: [[Position]]
rows = [row y | y <- coordinates]
-- Returns a list of all Positions, in ascending order of Y and then ascending
-- order of X Coordinate.
positionsByRow :: [Position]
positionsByRow = concat rows
-- Returns a list of all Positions with the given X Coordinate, in ascending
-- order of Y Coordinate.
column :: Coordinate -> [Position]
column x = [position x y | y <- coordinates]
-- Returns a list of all columns, in ascending order of X Coordinate.
columns :: [[Position]]
columns = [column x | x <- coordinates]
-- Returns a list of all Positions, in ascending order of X Coordinate and then
-- ascending order of Y Coordinate.
positionsByColumn :: [Position]
positionsByColumn = concat columns
-- Returns a list of all Positions with the given Axis.
axis :: Axis -> [Position]
axis (Column c) = column c
axis (Row c) = row c
|
jameshales/voltorb-flip
|
src/Position.hs
|
bsd-3-clause
| 2,122
| 0
| 11
| 430
| 543
| 308
| 235
| 46
| 1
|
{-# LANGUAGE QuasiQuotes #-}
module Main where
import Text.Printf.Safe (fmt, printf)
main = do
putStrLn $ printf [fmt|1 + 2 = %d and 0 == 1 is %S.|] (1 + 2) (0 == 1)
putStrLn $ printf [fmt|42 is %010b in binary.|] 42
putStrLn $ printf [fmt|48%% of people answers that the negation of True is %{show . not}.|] True
|
konn/safe-printf
|
examples/quasi.hs
|
bsd-3-clause
| 321
| 0
| 10
| 67
| 88
| 52
| 36
| 7
| 1
|
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
-- | This module provides XML picklers that plug into the XML tree of the
-- /xml-types/ package. This module was \"inspired\" by /hexpat-pickle/.
--
-- The API differences between /hexpat-pickle/ and this module include:
--
-- * When unpickling, picklers will /consume/ matching elmements so that they
-- will be ignored by sucessive picklers. To circumvent this behaviour, use
-- @'xpPeek'@
--
-- * Wrappers like 'xpWrap' are uncurried
--
-- * There are no lazy unpicklers
--
-- * Most unpicklers will produce an error when their child unpicklers fail to
-- consume all elements. Use 'xpClean' to discard those elements
--
-- The data type @'PU' t a@ represents both a pickler (converting Haskell data
-- to XML) and an unpickler (XML to Haskell data), so your code only needs to
-- be written once for both serialization and deserialization. The 'PU'
-- primitives, such as 'xpElem' for XML elements, may be composed into complex
-- arrangements using 'xpPair' and other combinators.
--
-- Most picklers will try to find the /first match/ rather than failing when
-- the first element doesn't match. This is why the target type often is a
-- list. To prevent this behaviour and commit the pickler to the first element
-- available, use 'xpIsolate'.
--
-- The top level of the document does not follow this rule, because it is a
-- single node type. 'xpRoot' is needed to adapt this to type ['Node'] for your
-- pickler to use. You would typically define a pickler for a whole document
-- with 'xpElem', then pickle it to a single 'Node' with @'pickleTree' (xpRoot
-- myDocPickler) value@.
--
-- /NB/: Unresolved entities are considered an error and will trigger an
-- exception
--
-- When unpickling, the following invariant regarding the list of remaining
-- elements should be observed:
--
-- * The returned list should be a subset of or the initial list itself, that
-- is, no elements should be added or changed
--
-- * The relative order of elements should be preserved
--
-- * Elements may, however, be removed from anywhere in the list
--
-- Here is a simple example to get you started:
--
-- > {-# LANGUAGE OverloadedStrings #-}
-- > import Data.Text
-- > import Data.XML.Types
-- > import Data.XML.Pickle
-- >
-- > -- Person name, age and description
-- > data Person = Person Text Int Text
-- >
-- > xpPerson :: PU [Node] Person
-- > xpPerson =
-- > -- How to wrap and unwrap a Person
-- > xpWrap (\((name, age), descr) -> Person name age descr)
-- > (\(Person name age descr) -> ((name, age), descr)) $
-- > xpElem "person"
-- > (xpPair
-- > (xpAttr "name" xpId)
-- > (xpAttr "age" xpPrim))
-- > (xpContent xpId)
-- >
-- > people = [
-- > Person "Dave" 27 "A fat thin man with long short hair",
-- > Person "Jane" 21 "Lives in a white house with green windows"]
-- >
-- > main = do
-- > print $ pickle (xpRoot $ xpElemNodes "people" $ xpAll xpPerson) people
--
-- Program output would be an xml-value equivalent to:
--
-- > <people>
-- > <person name="Dave" age="27">
-- > A fat thin man with long short hair
-- > </person>
-- > <person name="Jane" age="21">
-- > Lives in a white house with green windows
-- > </person>
-- > </people>
--
-- Functions marked with /compat/ are included for compatibility with
-- /hexpat-pickle/.
module Data.XML.Pickle (
-- * Types
PU(..)
, Attribute
, UnpickleResult(..)
-- * Pickler invocation
, pickle
, unpickle
-- * Primitive picklers
, xpUnit
, xpZero
, xpThrow
, xpIso
, xpPartial
-- * Value-preserving picklers
, xpId
, xpFst
, xpSnd
, xpTrees
, xpHead
, xpTree
, xpText0
, xpText
, xpString
, xpRoot
, xpPrim
-- * XML specific picklers
-- ** Attributes
, xpAttribute
, xpAttribute'
, xpAttribute_
, xpAttr
, xpAttrImplied
, xpAttrFixed
, xpAddFixedAttr
-- ** Elements
, xpElem
, xpElemWithName
, xpElemByNamespace
, xpElemVerbatim
, xpElemAttrs
, xpElemNodes
, xpElemText
, xpElemBlank
, xpElemExists
, xpElems
-- ** Character Content
, xpContent
, xpBool
-- * Pickler combinators
-- ** Choice
, xpOption
, xpDefault
, xpWithDefault
, xpMap
, xpAlt
, xpChoice
, xpEither
, xpTryCatch
-- ** Sequencing
-- | /NB/: The sequencing operations /do not/ enforce any order on the
-- matched elements unless stated otherwise, but you can commit individial
-- picklers to the next available element with 'xpIsolate'. Applying
-- @xpIsolate@ on all nested Picklers will in effect enforce order.
--
-- Howver, once a pickler consumes an element it will not be available to
-- following picklers. You can circumvent this behaviour with 'xpPeek'.
--
-- If you want ensure that all elements are consumed after the last pickler
-- is run you may want to use 'xpClean'.
--
-- *** Lists
-- | The List pickler combinators will pickle lists in the given order
-- without any special treatment and unpickle as stated.
, xpFindMatches
, xpFindFirst
, xpAll
, xpSubsetAll
, xpAllByNamespace
, xpList0
, xpSeqWhile
, xpList
, xpListMinLen
-- *** Tuples
-- | Tuple combinators apply their picklers from left to right. They will
-- succeed when all their constituents produce a value.
, xp2Tuple
, xpPair
, (<#>)
, xp3Tuple
, xpTriple
, xp4Tuple
, xp5Tuple
, xp6Tuple
-- ** Wrappers
-- *** Value wrappers
, xpWrap
, xpConst
, xpWrapEither
, xpWrapMaybe
, xpWrapMaybe_
, xpAssert
, xpMayFail
, xpUnliftElems
-- *** Bookkeeping
-- | Change the semantics of picklers
, xpIsolate
, xpPeek
-- *** Cleannes
-- | Picklers keep track of elements left over after unpickling, so the may
-- be [@clean@] an unpickling is considered @clean@ when it doesn't leave any
-- remainng elements.
, xpClean
-- * Error handling
, UnpickleError(..)
, ppUnpickleError
, (<++>)
, (<?+>)
, (<?>)
, (<??>)
, UnresolvedEntityException(..)
-- * Helper functions
, flattenContent
, tErr
, getRest
) where
import Control.Applicative ((<$>))
import Control.Arrow
import Control.Exception
import Control.Monad
import Data.Char (isSpace)
import Data.Either
import Data.List (partition)
import qualified Data.Map as M
import Data.Maybe
import Data.Monoid (Monoid, mempty)
import Data.Text (Text)
import qualified Data.Text as Text
import Data.XML.Types
import Data.XML.Pickle.Tuples
import Data.XML.Pickle.Basic
-- | Pickle a tree.
pickle :: PU t a -> a -> t
pickle = pickleTree
-- | Unpickle a tree.
unpickle :: PU t a -> t -> Either UnpickleError a
unpickle xp x = case unpickleTree xp x of
UnpickleError e -> Left e
NoResult e -> Left . ErrorMessage $ "Entity not found " `Text.append` e
Result r _ -> Right r
for :: [a] -> (a -> b) -> [b]
for = flip map
type Attribute = (Name,[Content])
-- | Isomorphic pickler.
xpIso :: (a -> b) -> (b -> a) -> PU a b
xpIso f g = PU (\t -> Result (f t) Nothing) g
xpPartial :: (a -> Either Text b)
-> (b -> a)
-> PU a b
xpPartial f g = ("xpEither", "") <?+>
PU { pickleTree = g
, unpickleTree = \v -> case f v of
Left e -> UnpickleError $ ErrorMessage e
Right r -> Result r Nothing
}
-- | Doesn't create or consume anything, always succeeds.
xpUnit :: PU [a] ()
xpUnit = PU (Result () . remList) (const [])
-- | Returns everything (remaining), untouched.
xpId :: PU a a
xpId = xpIso id id
-- | 'xpId'. (/compat/)
xpTrees :: PU a a
xpTrees = xpId
-- | Converts Booleans to XML boolean values
--
-- * \"true\" and \"1\" are read as @True@
--
-- * \"false\" and \"0\" are read as @False@
--
-- * All other values generate an unpickle error
--
-- Will always generate \"true\" or \"false\" (not \"0\" or \"1\") when
-- pickling.
xpBool :: PU Text Bool
xpBool = ("xpBool" ,"") <?+> PU
{ unpickleTree =
\v -> case () of ()
| v `elem` ["true", "1"] ->
Result True Nothing
| v `elem` ["false", "0"] ->
Result False Nothing
| otherwise -> UnpickleError
(ErrorMessage $
"Not a boolean value: "
`Text.append` v)
, pickleTree = \v -> case v of
True -> "true"
False -> "false"
}
-- | Apply a bijection before pickling / after unpickling.
xpWrap :: (a -> b) -> (b -> a) -> PU t a -> PU t b
xpWrap to from xp = ("xpWrap","") <?+>
PU { unpickleTree = \x -> to <$> unpickleTree xp x
, pickleTree = pickleTree xp . from
}
-- | Like xpWrap, but strips @Just@ (and treats @Nothing@ as a failure) during
-- unpickling.
xpWrapMaybe :: (a -> Maybe b) -> (b -> a) -> PU t a -> PU t b
xpWrapMaybe a2b b2a pua = ("xpWrapMaybe","") <?>
xpWrapMaybe_ "xpWrapMaybe can't encode Nothing" a2b b2a pua
-- | Like xpWrap, but strips @Just@ (and treats @Nothing@ as a failure) during
-- unpickling, with specified error message for @Nothing@ value.
xpWrapMaybe_ :: String -> (a -> Maybe b) -> ( b -> a) -> PU t a -> PU t b
xpWrapMaybe_ errorMsg a2b b2a pua = ("xpWrapMaybe_","") <?+> PU {
unpickleTree = \t -> case unpickleTree pua t of
Result val rest ->
case a2b val of
Just val' -> Result val' rest
Nothing -> UnpickleError $ upe errorMsg
NoResult e -> NoResult e
UnpickleError e -> UnpickleError e
, pickleTree = pickleTree pua . b2a
}
-- | Lift a pickler. Nothing is returned when the given pickler doesn't return
-- a value (e.g. the element isn't found). Does not affect unpickling errors.
-- @Nothing@ is pickled to @mempty@.
--
-- A typical example is:
--
-- > xpElemAttributes "score" $ xpOption $ xpAttribute "value" xpPrim
--
-- in which @Just 5@ is encoded as @\<score value=\"5\"\/\>@ and @Nothing@
-- as @\<score\/\>@.
xpOption :: PU [t] a -> PU [t] (Maybe a)
xpOption pu = PU { unpickleTree = doUnpickle
, pickleTree = \mValue ->
case mValue of
Just value -> pickleTree pu value
Nothing -> mempty
}
where
doUnpickle t =
case unpickleTree pu t of
Result r t' -> Result (Just r) t'
NoResult _e -> Result Nothing (remList t)
UnpickleError e -> UnpickleError e
-- | Return one element, untouched.
xpHead :: PU [a] a
xpHead = PU {unpickleTree = \t' -> case t' of
[] -> UnpickleError $ ("xpHead","")
<++> upe "No element remaining"
t:ts -> Result t (if null ts then Nothing else Just ts)
, pickleTree = return
}
-- | 'xpHead'. (/compat/)
xpTree :: PU [a] a
xpTree = xpHead
-- | Specialised version of 'xpId'. (/compat/)
xpText0 :: PU Text Text
xpText0 = xpId
-- | Convert @Text@ to/from @String@.
xpString :: PU Text String
xpString = ("xpString", "") <?> xpIso Text.unpack Text.pack
-- | Test predicate when unpickling. Fails with given error message when the
-- predicate return false.
--
-- /NB/: The predicate will only be tested while /unpickling/. When pickling,
-- this is a noop.
xpAssert :: Text -> (a -> Bool) -> PU t a -> PU t a
xpAssert err p xp = ("xpAssert",err) <?+>
PU { unpickleTree = \t -> do
r <- unpickleTree xp t
unless (p r) $ UnpickleError assertErr
return r
, pickleTree = pickleTree xp
}
where
assertErr = upe ("Assertion failed; " ++ Text.unpack err)
-- | Like 'xpText0', but fails on non-empty input.
xpText :: PU Text Text
xpText = ("xpText","") <?> xpAssert "Input is empty" (not . Text.null) xpText0
-- | Transforms a pickler on lists to a pickler on single elements.
--
-- /NB/: Will error when the given pickler doesn't produce exactly one
-- element.
xpRoot ::PU [a] b -> PU a b
xpRoot pa = ("xpRoot","") <?+> PU
{ unpickleTree = \t -> case unpickleTree pa [t] of
Result x Nothing -> Result x Nothing
Result _x (Just _) -> UnpickleError $ upe "Leftover entities"
UnpickleError e -> UnpickleError e
NoResult e -> NoResult e
, pickleTree = \t -> case pickleTree pa t of
[t1] -> t1
_ -> error "pickler called by xpRoot must output exactly one element"
}
getFirst :: (t -> Bool) -> [t] -> Maybe (t, [t])
getFirst _ [] = Nothing
getFirst p (x:xs) = case p x of
True -> Just (x,xs)
False -> second (x:) <$> getFirst p xs
-- | Pickle to/from attribute.
xpAttribute :: Name -> PU Text a -> PU [Attribute] a
xpAttribute name pu = ("xpAttr" , Text.pack $ ppName name) <?+> PU
{ unpickleTree = doUnpickle
, pickleTree = \value -> [(name, [ContentText $ pickleTree pu value])]
}
where
doUnpickle attrs = case getFirst ((== name) . fst) attrs of
Nothing -> NoResult $ Text.pack $ ppName name
Just ((_, c), rem')
| all isContentText c -> case unpickleTree pu (contentToText c) of
NoResult e -> missingE $ Text.unpack e
UnpickleError e -> UnpickleError e
Result _ (Just e) -> leftoverE $ show e
Result r Nothing -> Result r (remList rem')
-- Content entities are not supported.
| otherwise -> UnpickleError $
upe ("Unresolved entities in " ++ ppName name ++ ".")
contentToText = Text.concat . map contentToText_
contentToText_ (ContentText t) = t
contentToText_ (ContentEntity t) = t
isContentText (ContentText _) = True
isContentText (ContentEntity _) = False
-- | 'xpAttribute' (/compat/)
xpAttr :: Name -> PU Text a -> PU [Attribute] a
xpAttr = xpAttribute
-- | Pickle attribute if @Just@ is given, on unpickling return @Just <val>@
-- when the attribute is found, @Nothing@ otherwise.
xpAttribute' :: Name -> PU Text a -> PU [Attribute] (Maybe a)
xpAttribute' name pu = xpOption $ xpAttr name pu
xpAttrImplied :: Name -> PU Text a -> PU [Attribute] (Maybe a)
xpAttrImplied = xpAttribute'
-- | Pickle an attribute with the specified name and value, fail if the same
-- attribute is not present on unpickle.
xpAttribute_ :: Name -> Text -> PU [Attribute] ()
xpAttribute_ name val =
xpWrapMaybe_ ("expected fixed attribute "++ ppName name++"="++show val)
(\v -> if v == val then Just () else Nothing) (const val) $
xpAttr name xpId
-- | 'xpAttribute_' (/compat/)
xpAttrFixed :: Name -> Text -> PU [Attribute] ()
xpAttrFixed = xpAttribute_
-- | Merge successive @NodeContent@ values.
flattenContent :: [Node] -> [Node]
flattenContent xs = case foldr (\x (buf, res) -> case x of
NodeContent (ContentText txt)
-> (txt : buf, res)
NodeComment _ -> (buf, res)
e@(NodeElement _)
-> ([] , e : addConcatText buf res)
_ -> throw UnresolvedEntityException
) ([], []) xs
of
(buf, res) -> addConcatText buf res
where
nc = NodeContent . ContentText
addConcatText [] = id
addConcatText xs' = let txt = Text.concat xs' in
if Text.all isSpace txt then id else (nc txt :)
-- | When unpickling, tries to find the first element with the supplied name.
-- Once such an element is found, it will commit to it and /fail/ if any of the
-- picklers don't match.
xpElem :: Name -- ^ Name of the 'Element'
-> PU [Attribute] a -- ^ Pickler for attributes
-> PU [Node] n -- ^ Pickler for child nodes
-> PU [Node] (a,n)
xpElem name attrP nodeP = tr <?+> PU
{ unpickleTree = doUnpickleTree
, pickleTree = \(a,n) -> [NodeElement $ Element name
(pickleTree attrP a)
(pickleTree nodeP n)
]
} where
doUnpickleTree nodes = case getFirst (nodeElementNameHelper name) nodes of
Just (NodeElement (Element _ attrs children), rem') -> do
as <- ("attrs","") <++.> child attrP attrs
cs <- ("children","") <++.> child nodeP (flattenContent children)
leftover $ remList rem'
return (as, cs)
_ -> NoResult $ Text.pack $ ppName name
tr = ("xpElem", Text.pack $ ppName name)
nodeElementNameHelper name' (NodeElement (Element n _ _)) = n == name'
nodeElementNameHelper _ _ = False
-- | Handle all elements with a given name. The unpickler will fail when any of
-- the elements fails to unpickle.
xpElems :: Name -- ^ Name of the 'Element' values
-> PU [Attribute] a -- ^ Pickler for attributes
-> PU [Node] n -- ^ Pickler for child nodes
-> PU [Node] [(a, n)]
xpElems name attrs children = tr <?> xpSubsetAll isThisElem
(xpElem name attrs children)
where
isThisElem (NodeElement (Element name' _ _)) = name' == name
isThisElem _ = False
tr = ("xpElems", Text.pack $ ppName name)
-- | Tries to apply the pickler to all the remaining elements; fails if any of
-- them don't match.
xpAll :: PU [a] b -> PU [a] [b]
xpAll xp = ("xpAll", "") <?+> PU { unpickleTree = doUnpickleTree
, pickleTree = concatMap (pickleTree xp)
} where
doUnpickleTree = mapM (child' xp . return)
-- | For unpickling, apply the given pickler to a subset of the elements
-- determined by a given predicate.
--
-- Pickles like 'xpAll'.
xpSubsetAll :: (a -> Bool) -- ^ Predicate to select the subset
-> PU [a] b -- ^ Pickler to apply on the subset
-> PU [a] [b]
xpSubsetAll p xp = ("xpSubsetAll","") <?+> PU { unpickleTree = \t ->
let (targets, rest) = partition p t in
do
leftover $ remList rest
child' (xpAll xp) targets
, pickleTree = pickleTree $ xpAll xp
}
-- | Apply unpickler to all elements with the given namespace.
--
-- Pickles like 'xpAll'.
xpAllByNamespace :: Text -> PU [Node] b -> PU [Node] [b]
xpAllByNamespace namespace xp = ("xpAllByNamespace",namespace)
<?> xpSubsetAll helper xp
where
helper (NodeElement (Element (Name _local (Just ns) _pre) _ _ ))
= ns == namespace
helper _ = False
-- | Pickle 'Element' without a restriction on the name. The name as taken /
-- returned as the first element of the triple.
xpElemWithName :: PU [Attribute] a -- ^ Pickler for attributes
-> PU [Node] n -- ^ Pickler for child nodes
-> PU [Node] (Name,a,n)
xpElemWithName attrP nodeP = ("xpElemWithName", "") <?+> PU
{ unpickleTree = doUnpickleTree
, pickleTree = \(name, a,n) -> [NodeElement $ Element name
(pickleTree attrP a)
(pickleTree nodeP n)
]
} where
doUnpickleTree nodes = case getFirst nodeElementHelper nodes of
Just (NodeElement (Element name attrs children), rem') -> do
x <- child attrP attrs
y <- child nodeP $ flattenContent children
leftover $ remList rem'
return (name, x, y)
_ -> NoResult "element"
nodeElementHelper (NodeElement Element{}) = True
nodeElementHelper _ = False
-- | Find element by name space, prefixes are ignored.
xpElemByNamespace :: Text -- ^ Namespace
-> PU Text name -- ^ Pickler for the local name
-> PU [Attribute] a -- ^ Pickler for attributes
-> PU [Node] n -- ^ Pickler for child nodes
-> PU [Node] (name,a,n)
xpElemByNamespace ns nameP attrP nodeP = PU
{ unpickleTree = doUnpickleTree
, pickleTree = \(name, a,n) -> [NodeElement $ Element
(Name (pickleTree nameP name) (Just ns) Nothing)
(pickleTree attrP a)
(pickleTree nodeP n)
]
} where
doUnpickleTree nodes = case getFirst (nodeElementNSHelper ns) nodes of
Just (NodeElement (Element name attrs children), rem') -> tr name $
do
name' <- child nameP (nameLocalName name)
attrs' <- child attrP attrs
nodes' <- child nodeP children
leftover $ remList rem'
return (name', attrs', nodes')
_ -> NoResult $ "Element with namepspace " `Text.append` ns
tr a res = case res of
UnpickleError e -> UnpickleError (TraceStep
( "xpElemByNamespace"
, Text.concat [ ns
, " ; "
, nameLocalName a])
e)
x -> x
nodeElementNSHelper ns' (NodeElement (Element n _ _)) = nameNamespace n == Just ns'
nodeElementNSHelper _ns _ = False
-- | Pickler returns the first found 'Element' untouched.
--
-- Unpickler wraps element in 'NodeElement'.
xpElemVerbatim :: PU [Node] Element
xpElemVerbatim = PU
{ unpickleTree = doUnpickleTree
, pickleTree = \e -> [NodeElement e]
} where
doUnpickleTree nodes = case getFirst nodeElementHelper nodes of
Just (NodeElement e@Element{}, re) -> Result e (remList re)
_ -> NoResult "element"
nodeElementHelper (NodeElement Element{}) = True
nodeElementHelper _ = False
-- | A helper variant of 'xpElem' for elements that contain attributes but no
-- child tags.
xpElemAttrs :: Name -> PU [Attribute] b -> PU [Node] b
xpElemAttrs name puAttrs = xpWrap fst (\a -> (a,())) $
xpElem name puAttrs xpUnit
-- | A helper variant of 'xpElem' for elements that contain child nodes but no
-- attributes.
xpElemNodes :: Name -> PU [Node] b -> PU [Node] b
xpElemNodes name puChildren = xpWrap snd (\a -> ((),a)) $
xpElem name xpUnit puChildren
-- | A helper variant of 'xpElem' for elements that contain only character
-- data.
xpElemText :: Name -> PU [Node] Text
xpElemText name = xpElemNodes name $ xpContent xpId
-- | Helper for 'Element' values that don't contain anything.
xpElemBlank :: Name -> PU [Node] ()
xpElemBlank name = ("xpElemBlank", "") <?> xpWrap (const () ) (const ((),())) $
xpElem name xpUnit xpUnit
-- | When pickling, creates an empty element iff parameter is @True@.
--
-- When unpickling, checks whether element exists. Generates an error when the
-- element is not empty.
xpElemExists :: Name -> PU [Node] Bool
xpElemExists name = ("xpElemBlank", "") <?>
xpWrap (\x -> case x of Nothing -> False; Just _ -> True)
(\x -> if x then Just () else Nothing) $
xpOption (xpElemBlank name)
-- | Get the first non-element 'NodeContent' from a node.
xpContent :: PU Text a -> PU [Node] a
xpContent xp = ("xpContent","") <?+> PU
{ unpickleTree = doUnpickle
, pickleTree = return . NodeContent . ContentText . pickleTree xp
} where
doUnpickle nodes = case getFirst nodeContentHelper
(filter (\node -> case node of
NodeComment _ -> False
_ -> True) nodes) of -- flatten
Just (NodeContent (ContentText t), _re) -> child xp t
Just (NodeContent (ContentEntity t), _) ->
UnpickleError . upe $ "Unresolved entity" ++ show t ++ "."
_ -> NoResult "node content"
nodeContentHelper (NodeContent _) = True
nodeContentHelper _ = False
-- | Unlift a pickler on 'Node' values to a pickler on 'Element' values. Nodes
-- generated during pickling that are not 'Element' values will be silently
-- discarded.
xpUnliftElems :: PU [Node] a -> PU [Element] a
xpUnliftElems xp = ("xpUnliftElems","") <?+> PU
{ unpickleTree = doUnpickle
, pickleTree = nodesToElems . pickleTree xp
}
where
doUnpickle nodes = case unpickleTree xp (map NodeElement nodes) of
UnpickleError e -> UnpickleError e
NoResult e -> NoResult e
Result a r -> let r' = case r of
Nothing -> Nothing
Just rs' -> case nodesToElems rs' of
[] -> Nothing
rs -> Just rs
in Result a r'
nodesToElems = foldr (\n es -> case n of
NodeElement e -> e : es
_ -> es) []
-- | Optional conversion with default value.
--
-- Unlike 'xpWithDefault' the default value is not encoded in the XML document,
-- during unpickling the default value is inserted if the pickler doesn't
-- return a value.
xpDefault :: (Eq a) => a -> PU [t] a -> PU [t] a
xpDefault df
= xpWrap (fromMaybe df)
(\ x -> if x == df then Nothing else Just x)
.
xpOption
-- | Attempt to use a pickler. Return a default value when the pickler doesn't
-- return anything (doesn't touch on 'UnpickleError').
--
-- Unlike 'xpDefault', the default value /is/ encoded in the XML document.
xpWithDefault :: a -> PU t a -> PU t a
xpWithDefault a pa = PU { pickleTree = pickleTree pa
, unpickleTree = \v -> case unpickleTree pa v of
Result r t -> Result r t
NoResult _ -> Result a (Just v)
UnpickleError e -> UnpickleError e
}
-- | 'xp2Tuple'. (/compat/)
xpPair :: PU [a] b1 -> PU [a] b2 -> PU [a] (b1, b2)
xpPair l r = "xpPair" <??> xp2Tuple l r
-- | 'xp2Tuple'.
(<#>) :: PU [a] b1 -> PU [a] b2 -> PU [a] (b1, b2)
(<#>) l r = "(<#>)" <??> xp2Tuple l r
-- | 'xp3Tuple'. (/compat/)
xpTriple :: PU [a] a1 -> PU [a] a2 -> PU [a] a3 -> PU [a] (a1, a2, a3)
xpTriple l m r = "xpTriple" <??> xp3Tuple l m r
-- | When unpickling, don't consume the matched element(s), noop when pickling.
xpPeek :: PU t a -> PU t a
xpPeek xp = PU { pickleTree = pickleTree xp
, unpickleTree = \xs ->
case unpickleTree xp xs of
Result r _ -> Result r (Just xs)
x -> x
}
-- | Noop when pickling.
--
-- When unpickling, only give access to the first element.
xpIsolate :: PU [t] a -> PU [t] a
xpIsolate xp = ("xpIsolate","") <?+>
PU { pickleTree = pickleTree xp
, unpickleTree = \xs' -> case xs' of
[] -> NoResult "entity"
(x:xs) -> case unpickleTree xp [x] of
Result r t -> Result r (remList $ mbToList t ++ xs)
NoResult e -> missingE $ Text.unpack e
y -> y
} where
mbToList Nothing = []
mbToList (Just r) = r
-- | Select a single element from the list and apply unpickler to it.
--
-- Returns no value when no element matches the predicate.
--
-- Fails when the unpickler doesn't return a value.
--
-- When pickling, this is a noop.
xpFindFirst :: (t -> Bool) -> PU [t] a -> PU [t] a
xpFindFirst p xp = ("xpFindFirst","") <?+>
PU { pickleTree = pickleTree xp
, unpickleTree = \xs -> case break p xs of
(_, []) -> NoResult "entity"
(ys,z:zs) -> do
leftover . remList $ ys ++ zs
child' xp [z]
}
-- | Ignore input/output and replace with constant values.
xpConst :: a -> PU t () -> PU t a
xpConst c xp = ("xpConst" ,"") <?> xpWrap (const c) (const ()) xp
-- | Convert text to/from any type that implements 'Read' and 'Show'. Fails on
-- unpickle if 'read' fails.
xpPrim :: (Show a, Read a) => PU Text a
xpPrim = PU { unpickleTree = \x -> case reads $ Text.unpack x of
[] -> UnpickleError $ ("xpPrim","") <++>
upe ("Couldn't read " ++ show x ++ ".")
(r,rest):_ -> Result r (Text.pack <$> remList rest)
, pickleTree = Text.pack . show
}
-- | When unpickling, tries to apply the pickler to all elements returning and
-- consuming only matched elements.
xpFindMatches :: PU [b] a -> PU [b] [a]
xpFindMatches xp = PU { unpickleTree = doUnpickleTree
, pickleTree = \xs -> pickleTree xp =<< xs
} where
doUnpickleTree xs =
let (ls, rs) = partitionEithers . for xs $ \x ->
case unpickleTree xp [x] of
NoResult _ -> Left x
Result r Nothing -> Right $ Result r Nothing
Result _r (Just _) -> Right $ leftoverE ""
UnpickleError e -> Right $ UnpickleError e
in leftover (remList ls) >> sequence rs
-- | 'xpAll'. (/compat/)
xpList0 :: PU [a] b -> PU [a] [b]
xpList0 = xpAll
-- | Like 'xpList', but only succeed during unpickling if at least a minimum
-- number of elements are unpickled.
xpListMinLen :: Int -> PU [a] b -> PU [a] [b]
xpListMinLen ml = xpWrapEither testLength id . xpList
where
testLength as
| length as < ml = Left $ "Expecting at least " ++ show ml ++ " elements"
testLength as = Right as
-- | When unpickling, successively applies pickler to single elements until it
-- doesn't return anything; returns all matched elements.
xpSeqWhile :: PU [a] b -> PU [a] [b]
xpSeqWhile pu = ("xpSeqWhile", "") <?+> PU {
unpickleTree = doUnpickle
, pickleTree = concatMap $ pickleTree pu
}
where
doUnpickle [] = Result [] Nothing
doUnpickle es@(elt:re) =
case unpickleTree pu [elt] of
Result val _ -> case doUnpickle re of
Result xs r -> Result (val:xs) r
e -> e
NoResult _ -> Result [] (Just es)
UnpickleError e -> UnpickleError e
-- | 'xpSeqWhile'. (/compat/)
xpList :: PU [a] b -> PU [a] [b]
xpList = xpSeqWhile
-- | Standard pickler for maps
--
-- This pickler converts a map into a list of pairs of the form
--
-- > <elt attr="key">value</elt>
xpMap :: Ord k =>
Name -- ^ Element name (elt)
-> Name -- ^ Attribute name (attr)
-> PU Text k -- ^ Pickler for keys (key)
-> PU [Node] a -- ^ Pickler for values (value)
-> PU [Node] (M.Map k a)
xpMap en an xpk xpv
= xpWrap M.fromList
M.toList
$
xpAll $
xpElem en
(xpAttr an xpk)
xpv
-- | Like xpWrap, except it strips @Right@ (and treats @Left@ as a failure)
-- during unpickling.
--
-- Not to be confused with 'xpEither'.
xpWrapEither :: Show e => (a -> Either e b) -> (b -> a) -> PU t a -> PU t b
xpWrapEither a2b b2a pua = ("xpWrapEither","") <?+>
PU {
unpickleTree = \t -> case unpickleTree pua t of
Result val rest -> case a2b val of
Left e -> UnpickleError . upe $ "Function returned Left "
++ show e
Right r -> Result r rest
NoResult e -> NoResult e
UnpickleError e -> UnpickleError e
,
pickleTree = pickleTree pua . b2a
}
-- | Execute one of a list of picklers. The /selector function/ is used during
-- pickling, and the integer returned is taken as a 0-based index to select a
-- pickler from /pickler options/. Unpickling is done by trying each list
-- element in order until one returns a @Result@ (the /selector/ is not used).
--
-- This is typically used to handle each constructor of a data type. However,
-- it can be used wherever multiple serialization strategies apply to a single
-- type.
--
-- /NB/: This function will ignore all errors as long as one of the branches
-- returns a result. Also, it will produce an error when all branches return
-- @NoResult@. Use 'xpChoice' for a saner version of this function.
xpAlt :: (a -> Int) -- ^ Selector function
-> [PU t a] -- ^ List of picklers
-> PU t a
xpAlt selector picklers = PU {
unpickleTree = doUnpickle,
pickleTree = \value -> pickleTree (picklers !! selector value) value
}
where
eitherResult (Result r t) = Right (Result r t)
eitherResult (UnpickleError e) = Left e
eitherResult (NoResult e) = Left . missing $ Text.unpack e
splitResults v = partitionEithers $ map (eitherResult . flip unpickleTree v)
picklers
doUnpickle v = case splitResults v of
(_, Result r t:_) -> Result r t
(es, []) -> ("xpAlt", "") <++.> UnpickleError (Variants es)
_ -> error "xpAlt: splitResults returned impossible result"
-- | Execute one of a list of picklers. The /selector function/ is used during
-- pickling, and the integer returned is taken as a 0-based index to select a
-- pickler from /pickler options/. Unpickling is done by trying each list
-- element in order until one returns a @Result@ or an @Error@.
--
-- This is typically used to handle each constructor of a data type. However,
-- it can be used wherever multiple serialization strategies apply to a single
-- type.
--
-- This function is similar to 'xpAlt' but it will stop unpickling on the first
-- error. It will return @NoResult@ iff all of the picklers return @NoResult@
-- (or the list of picklers is empty).
xpChoice :: (a -> Int) -- ^ Selector function
-> [PU t a] -- ^ List of picklers
-> PU t a
xpChoice selector picklers =
PU { unpickleTree = go picklers (1 :: Integer)
, pickleTree = \value -> pickleTree (picklers !! selector value) value
}
where
go [] _ _ = NoResult "entity"
go (p:ps) i v = case unpickleTree p v of
r@Result{} -> r
UnpickleError e -> UnpickleError $ ("xpChoice", Text.pack $ show i)
<++> e
NoResult _ -> go ps (i+1) v
-- | Try the left pickler first and if that doesn't produce anything the right
-- one. Wrapping the result in @Left@ or @Right@, respectively.
--
-- Not to be confused with 'xpWrapEither'.
xpEither :: PU n t1 -> PU n t2 -> PU n (Either t1 t2)
xpEither xpl xpr = PU {
unpickleTree = doUnpickle,
pickleTree = \v -> case v of
Left l -> pickleTree xpl l
Right r -> pickleTree xpr r
}
where
doUnpickle t = case unpickleTree xpl t of
Result r s -> Result (Left r) s
NoResult e1 -> case unpickleTree xpr t of
Result r s -> Result (Right r) s
NoResult e2 -> UnpickleError $ ("xpEither","")
<++> Variants [ missing $ Text.unpack e1
, missing $ Text.unpack e2
]
UnpickleError e -> UnpickleError $ ("xpEither","Right")
<++> e
UnpickleError e -> UnpickleError $ ("xpEither","Left")
<++> e
-- | Pickler that during pickling always uses the first pickler, and during
-- unpickling tries the first, and on failure then tries the second.
xpTryCatch :: PU t a -> PU t a -> PU t a
xpTryCatch pu1 pu2 = PU
{ unpickleTree = \t -> case unpickleTree pu1 t of
Result val1 rest -> Result val1 rest
NoResult e1 -> case unpickleTree pu2 t of
Result val2 rest -> Result val2 rest
NoResult e2 -> NoResult $ Text.concat [e1, " / ", e2]
UnpickleError e2 -> UnpickleError $ ("xpTryCatch","Right")
<++> e2
UnpickleError e1 -> case unpickleTree pu2 t of
Result val2 rest -> Result val2 rest
NoResult e2 -> UnpickleError
$ Variants [ e1
, upe $ " / not found:"
++ Text.unpack e2
]
UnpickleError e2 -> UnpickleError $ ("xpTryCatch","")
<++> Variants [e1, e2]
, pickleTree = pickleTree pu1
}
-- | The zero pickler.
--
-- Encodes nothing, always fails during unpickling. (Same as @'xpThrow'
-- \"got xpZero\"@).
xpZero :: PU [t] a
xpZero = ("xpZero","") <?> xpThrow "got xpZero"
-- | No output when pickling, always generates an error with the specified
-- message on unpickling.
xpThrow :: Monoid m
=> String -- ^ Error message
-> PU m a
xpThrow msg = PU
{ unpickleTree = \_ -> UnpickleError $ ("xpThrow",Text.pack msg) <++> upe msg
, pickleTree = const mempty
}
-- | Add an attribute with a fixed value.
xpAddFixedAttr :: Name -> Text -> PU [Attribute] b -> PU [Attribute] b
xpAddFixedAttr name val pa
= xpWrap snd ((,) ()) $
xp2Tuple (xpAttrFixed name val) pa
xpFst :: Monoid b => PU t (a, b) -> PU t a
xpFst = xpWrap fst (\x -> (x, mempty))
xpSnd :: Monoid a => PU t (a, b) -> PU t b
xpSnd = xpWrap snd (\y -> (mempty, y))
-- | Instead of failing the pickler will return no result.
xpMayFail :: PU t a -> PU t a
xpMayFail xp = PU { pickleTree = pickleTree xp
, unpickleTree = \v -> case unpickleTree xp v of
UnpickleError _ -> NoResult "failed with xpMayFail"
x -> x
}
-- | Run unpickler and consume and discard remaining elements.
--
-- When pickling, this is a noop.
xpClean :: PU t a -> PU t a
xpClean xp = PU { unpickleTree = \x -> case unpickleTree xp x of
Result r _ -> Result r Nothing
e -> e
, pickleTree = pickleTree xp
}
|
Philonous/xml-picklers
|
src/Data/XML/Pickle.hs
|
bsd-3-clause
| 38,996
| 0
| 21
| 12,660
| 9,041
| 4,827
| 4,214
| 607
| 7
|
{-# LANGUAGE CPP, ExistentialQuantification, MultiParamTypeClasses, FlexibleInstances, Rank2Types, BangPatterns, KindSignatures, GADTs, ScopedTypeVariables #-}
-- |
-- Module : Data.Vector.Fusion.Stream.Monadic
-- Copyright : (c) Roman Leshchinskiy 2008-2010
-- License : BSD-style
--
-- Maintainer : Roman Leshchinskiy <rl@cse.unsw.edu.au>
-- Stability : experimental
-- Portability : non-portable
--
-- Monadic stream combinators.
--
module Data.Vector.Fusion.Stream.Monadic (
Stream(..), Step(..), SPEC(..),
-- * Length
length, null,
-- * Construction
empty, singleton, cons, snoc, replicate, replicateM, generate, generateM, (++),
-- * Accessing elements
head, last, (!!), (!?),
-- * Substreams
slice, init, tail, take, drop,
-- * Mapping
map, mapM, mapM_, trans, unbox, concatMap, flatten,
-- * Zipping
indexed, indexedR, zipWithM_,
zipWithM, zipWith3M, zipWith4M, zipWith5M, zipWith6M,
zipWith, zipWith3, zipWith4, zipWith5, zipWith6,
zip, zip3, zip4, zip5, zip6,
-- * Comparisons
eq, cmp,
-- * Filtering
filter, filterM, takeWhile, takeWhileM, dropWhile, dropWhileM,
-- * Searching
elem, notElem, find, findM, findIndex, findIndexM,
-- * Folding
foldl, foldlM, foldl1, foldl1M, foldM, fold1M,
foldl', foldlM', foldl1', foldl1M', foldM', fold1M',
foldr, foldrM, foldr1, foldr1M,
-- * Specialised folds
and, or, concatMapM,
-- * Unfolding
unfoldr, unfoldrM,
unfoldrN, unfoldrNM,
iterateN, iterateNM,
-- * Scans
prescanl, prescanlM, prescanl', prescanlM',
postscanl, postscanlM, postscanl', postscanlM',
scanl, scanlM, scanl', scanlM',
scanl1, scanl1M, scanl1', scanl1M',
-- * Enumerations
enumFromStepN, enumFromTo, enumFromThenTo,
-- * Conversions
toList, fromList, fromListN
) where
import Data.Vector.Fusion.Util ( Box(..) )
import qualified Data.List as List
import Data.Char ( ord )
import GHC.Base ( unsafeChr )
import Control.Monad ( liftM )
import Prelude hiding ( length, null,
replicate, (++),
head, last, (!!),
init, tail, take, drop,
map, mapM, mapM_, concatMap,
zipWith, zipWith3, zip, zip3,
filter, takeWhile, dropWhile,
elem, notElem,
foldl, foldl1, foldr, foldr1,
and, or,
scanl, scanl1,
enumFromTo, enumFromThenTo )
import Data.Int ( Int8, Int16, Int32, Int64 )
import Data.Word ( Word8, Word16, Word32, Word, Word64 )
#if __GLASGOW_HASKELL__ >= 700
import GHC.Exts ( SpecConstrAnnotation(..) )
#endif
#include "vector.h"
data SPEC = SPEC | SPEC2
#if __GLASGOW_HASKELL__ >= 700
{-# ANN type SPEC ForceSpecConstr #-}
#endif
emptyStream :: String
{-# NOINLINE emptyStream #-}
emptyStream = "empty stream"
#define EMPTY_STREAM (\s -> ERROR s emptyStream)
-- | Result of taking a single step in a stream
data Step s a where
Yield :: a -> s -> Step s a
Skip :: s -> Step s a
Done :: Step s a
instance Functor (Step s) where
{-# INLINE fmap #-}
fmap f (Yield x s) = Yield (f x) s
fmap f (Skip s) = Skip s
fmap f Done = Done
-- | Monadic streams
data Stream m a = forall s. Stream (s -> m (Step s a)) s
-- Length
-- ------
-- | Length of a 'Stream'
length :: Monad m => Stream m a -> m Int
{-# INLINE_FUSED length #-}
length = foldl' (\n _ -> n+1) 0
-- | Check if a 'Stream' is empty
null :: Monad m => Stream m a -> m Bool
{-# INLINE_FUSED null #-}
null (Stream step s) = null_loop s
where
null_loop s = do
r <- step s
case r of
Yield _ _ -> return False
Skip s' -> null_loop s'
Done -> return True
-- Construction
-- ------------
-- | Empty 'Stream'
empty :: Monad m => Stream m a
{-# INLINE_FUSED empty #-}
empty = Stream (const (return Done)) ()
-- | Singleton 'Stream'
singleton :: Monad m => a -> Stream m a
{-# INLINE_FUSED singleton #-}
singleton x = Stream (return . step) True
where
{-# INLINE_INNER step #-}
step True = Yield x False
step False = Done
-- | Replicate a value to a given length
replicate :: Monad m => Int -> a -> Stream m a
{-# INLINE_FUSED replicate #-}
replicate n x = replicateM n (return x)
-- | Yield a 'Stream' of values obtained by performing the monadic action the
-- given number of times
replicateM :: Monad m => Int -> m a -> Stream m a
{-# INLINE_FUSED replicateM #-}
replicateM n p = Stream step n
where
{-# INLINE_INNER step #-}
step i | i <= 0 = return Done
| otherwise = do { x <- p; return $ Yield x (i-1) }
generate :: Monad m => Int -> (Int -> a) -> Stream m a
{-# INLINE generate #-}
generate n f = generateM n (return . f)
-- | Generate a stream from its indices
generateM :: Monad m => Int -> (Int -> m a) -> Stream m a
{-# INLINE_FUSED generateM #-}
generateM n f = n `seq` Stream step 0
where
{-# INLINE_INNER step #-}
step i | i < n = do
x <- f i
return $ Yield x (i+1)
| otherwise = return Done
-- | Prepend an element
cons :: Monad m => a -> Stream m a -> Stream m a
{-# INLINE cons #-}
cons x s = singleton x ++ s
-- | Append an element
snoc :: Monad m => Stream m a -> a -> Stream m a
{-# INLINE snoc #-}
snoc s x = s ++ singleton x
infixr 5 ++
-- | Concatenate two 'Stream's
(++) :: Monad m => Stream m a -> Stream m a -> Stream m a
{-# INLINE_FUSED (++) #-}
Stream stepa sa ++ Stream stepb sb = Stream step (Left sa)
where
{-# INLINE_INNER step #-}
step (Left sa) = do
r <- stepa sa
case r of
Yield x sa' -> return $ Yield x (Left sa')
Skip sa' -> return $ Skip (Left sa')
Done -> return $ Skip (Right sb)
step (Right sb) = do
r <- stepb sb
case r of
Yield x sb' -> return $ Yield x (Right sb')
Skip sb' -> return $ Skip (Right sb')
Done -> return $ Done
-- Accessing elements
-- ------------------
-- | First element of the 'Stream' or error if empty
head :: Monad m => Stream m a -> m a
{-# INLINE_FUSED head #-}
head (Stream step s) = head_loop SPEC s
where
head_loop !sPEC s
= do
r <- step s
case r of
Yield x _ -> return x
Skip s' -> head_loop SPEC s'
Done -> EMPTY_STREAM "head"
-- | Last element of the 'Stream' or error if empty
last :: Monad m => Stream m a -> m a
{-# INLINE_FUSED last #-}
last (Stream step s) = last_loop0 SPEC s
where
last_loop0 !sPEC s
= do
r <- step s
case r of
Yield x s' -> last_loop1 SPEC x s'
Skip s' -> last_loop0 SPEC s'
Done -> EMPTY_STREAM "last"
last_loop1 !sPEC x s
= do
r <- step s
case r of
Yield y s' -> last_loop1 SPEC y s'
Skip s' -> last_loop1 SPEC x s'
Done -> return x
infixl 9 !!
-- | Element at the given position
(!!) :: Monad m => Stream m a -> Int -> m a
{-# INLINE (!!) #-}
Stream step s !! i | i < 0 = ERROR "!!" "negative index"
| otherwise = index_loop SPEC s i
where
index_loop !sPEC s i
= i `seq`
do
r <- step s
case r of
Yield x s' | i == 0 -> return x
| otherwise -> index_loop SPEC s' (i-1)
Skip s' -> index_loop SPEC s' i
Done -> EMPTY_STREAM "!!"
infixl 9 !?
-- | Element at the given position or 'Nothing' if out of bounds
(!?) :: Monad m => Stream m a -> Int -> m (Maybe a)
{-# INLINE (!?) #-}
Stream step s !? i = index_loop SPEC s i
where
index_loop !sPEC s i
= i `seq`
do
r <- step s
case r of
Yield x s' | i == 0 -> return (Just x)
| otherwise -> index_loop SPEC s' (i-1)
Skip s' -> index_loop SPEC s' i
Done -> return Nothing
-- Substreams
-- ----------
-- | Extract a substream of the given length starting at the given position.
slice :: Monad m => Int -- ^ starting index
-> Int -- ^ length
-> Stream m a
-> Stream m a
{-# INLINE slice #-}
slice i n s = take n (drop i s)
-- | All but the last element
init :: Monad m => Stream m a -> Stream m a
{-# INLINE_FUSED init #-}
init (Stream step s) = Stream step' (Nothing, s)
where
{-# INLINE_INNER step' #-}
step' (Nothing, s) = liftM (\r ->
case r of
Yield x s' -> Skip (Just x, s')
Skip s' -> Skip (Nothing, s')
Done -> EMPTY_STREAM "init"
) (step s)
step' (Just x, s) = liftM (\r ->
case r of
Yield y s' -> Yield x (Just y, s')
Skip s' -> Skip (Just x, s')
Done -> Done
) (step s)
-- | All but the first element
tail :: Monad m => Stream m a -> Stream m a
{-# INLINE_FUSED tail #-}
tail (Stream step s) = Stream step' (Left s)
where
{-# INLINE_INNER step' #-}
step' (Left s) = liftM (\r ->
case r of
Yield x s' -> Skip (Right s')
Skip s' -> Skip (Left s')
Done -> EMPTY_STREAM "tail"
) (step s)
step' (Right s) = liftM (\r ->
case r of
Yield x s' -> Yield x (Right s')
Skip s' -> Skip (Right s')
Done -> Done
) (step s)
-- | The first @n@ elements
take :: Monad m => Int -> Stream m a -> Stream m a
{-# INLINE_FUSED take #-}
take n (Stream step s) = n `seq` Stream step' (s, 0)
where
{-# INLINE_INNER step' #-}
step' (s, i) | i < n = liftM (\r ->
case r of
Yield x s' -> Yield x (s', i+1)
Skip s' -> Skip (s', i)
Done -> Done
) (step s)
step' (s, i) = return Done
-- | All but the first @n@ elements
drop :: Monad m => Int -> Stream m a -> Stream m a
{-# INLINE_FUSED drop #-}
drop n (Stream step s) = Stream step' (s, Just n)
where
{-# INLINE_INNER step' #-}
step' (s, Just i) | i > 0 = liftM (\r ->
case r of
Yield x s' -> Skip (s', Just (i-1))
Skip s' -> Skip (s', Just i)
Done -> Done
) (step s)
| otherwise = return $ Skip (s, Nothing)
step' (s, Nothing) = liftM (\r ->
case r of
Yield x s' -> Yield x (s', Nothing)
Skip s' -> Skip (s', Nothing)
Done -> Done
) (step s)
-- Mapping
-- -------
instance Monad m => Functor (Stream m) where
{-# INLINE fmap #-}
fmap = map
-- | Map a function over a 'Stream'
map :: Monad m => (a -> b) -> Stream m a -> Stream m b
{-# INLINE map #-}
map f = mapM (return . f)
-- | Map a monadic function over a 'Stream'
mapM :: Monad m => (a -> m b) -> Stream m a -> Stream m b
{-# INLINE_FUSED mapM #-}
mapM f (Stream step s) = Stream step' s
where
{-# INLINE_INNER step' #-}
step' s = do
r <- step s
case r of
Yield x s' -> liftM (`Yield` s') (f x)
Skip s' -> return (Skip s')
Done -> return Done
consume :: Monad m => Stream m a -> m ()
{-# INLINE_FUSED consume #-}
consume (Stream step s) = consume_loop SPEC s
where
consume_loop !sPEC s
= do
r <- step s
case r of
Yield _ s' -> consume_loop SPEC s'
Skip s' -> consume_loop SPEC s'
Done -> return ()
-- | Execute a monadic action for each element of the 'Stream'
mapM_ :: Monad m => (a -> m b) -> Stream m a -> m ()
{-# INLINE_FUSED mapM_ #-}
mapM_ m = consume . mapM m
-- | Transform a 'Stream' to use a different monad
trans :: (Monad m, Monad m')
=> (forall a. m a -> m' a) -> Stream m a -> Stream m' a
{-# INLINE_FUSED trans #-}
trans f (Stream step s) = Stream (f . step) s
unbox :: Monad m => Stream m (Box a) -> Stream m a
{-# INLINE_FUSED unbox #-}
unbox (Stream step s) = Stream step' s
where
{-# INLINE_INNER step' #-}
step' s = do
r <- step s
case r of
Yield (Box x) s' -> return $ Yield x s'
Skip s' -> return $ Skip s'
Done -> return $ Done
-- Zipping
-- -------
-- | Pair each element in a 'Stream' with its index
indexed :: Monad m => Stream m a -> Stream m (Int,a)
{-# INLINE_FUSED indexed #-}
indexed (Stream step s) = Stream step' (s,0)
where
{-# INLINE_INNER step' #-}
step' (s,i) = i `seq`
do
r <- step s
case r of
Yield x s' -> return $ Yield (i,x) (s', i+1)
Skip s' -> return $ Skip (s', i)
Done -> return Done
-- | Pair each element in a 'Stream' with its index, starting from the right
-- and counting down
indexedR :: Monad m => Int -> Stream m a -> Stream m (Int,a)
{-# INLINE_FUSED indexedR #-}
indexedR m (Stream step s) = Stream step' (s,m)
where
{-# INLINE_INNER step' #-}
step' (s,i) = i `seq`
do
r <- step s
case r of
Yield x s' -> let i' = i-1
in
return $ Yield (i',x) (s', i')
Skip s' -> return $ Skip (s', i)
Done -> return Done
-- | Zip two 'Stream's with the given monadic function
zipWithM :: Monad m => (a -> b -> m c) -> Stream m a -> Stream m b -> Stream m c
{-# INLINE_FUSED zipWithM #-}
zipWithM f (Stream stepa sa) (Stream stepb sb) = Stream step (sa, sb, Nothing)
where
{-# INLINE_INNER step #-}
step (sa, sb, Nothing) = liftM (\r ->
case r of
Yield x sa' -> Skip (sa', sb, Just x)
Skip sa' -> Skip (sa', sb, Nothing)
Done -> Done
) (stepa sa)
step (sa, sb, Just x) = do
r <- stepb sb
case r of
Yield y sb' ->
do
z <- f x y
return $ Yield z (sa, sb', Nothing)
Skip sb' -> return $ Skip (sa, sb', Just x)
Done -> return $ Done
-- FIXME: This might expose an opportunity for inplace execution.
{-# RULES
"zipWithM xs xs [Vector.Stream]" forall f xs.
zipWithM f xs xs = mapM (\x -> f x x) xs
#-}
zipWithM_ :: Monad m => (a -> b -> m c) -> Stream m a -> Stream m b -> m ()
{-# INLINE zipWithM_ #-}
zipWithM_ f sa sb = consume (zipWithM f sa sb)
zipWith3M :: Monad m => (a -> b -> c -> m d) -> Stream m a -> Stream m b -> Stream m c -> Stream m d
{-# INLINE_FUSED zipWith3M #-}
zipWith3M f (Stream stepa sa)
(Stream stepb sb)
(Stream stepc sc) = Stream step (sa, sb, sc, Nothing)
where
{-# INLINE_INNER step #-}
step (sa, sb, sc, Nothing) = do
r <- stepa sa
return $ case r of
Yield x sa' -> Skip (sa', sb, sc, Just (x, Nothing))
Skip sa' -> Skip (sa', sb, sc, Nothing)
Done -> Done
step (sa, sb, sc, Just (x, Nothing)) = do
r <- stepb sb
return $ case r of
Yield y sb' -> Skip (sa, sb', sc, Just (x, Just y))
Skip sb' -> Skip (sa, sb', sc, Just (x, Nothing))
Done -> Done
step (sa, sb, sc, Just (x, Just y)) = do
r <- stepc sc
case r of
Yield z sc' -> f x y z >>= (\res -> return $ Yield res (sa, sb, sc', Nothing))
Skip sc' -> return $ Skip (sa, sb, sc', Just (x, Just y))
Done -> return $ Done
zipWith4M :: Monad m => (a -> b -> c -> d -> m e)
-> Stream m a -> Stream m b -> Stream m c -> Stream m d
-> Stream m e
{-# INLINE zipWith4M #-}
zipWith4M f sa sb sc sd
= zipWithM (\(a,b) (c,d) -> f a b c d) (zip sa sb) (zip sc sd)
zipWith5M :: Monad m => (a -> b -> c -> d -> e -> m f)
-> Stream m a -> Stream m b -> Stream m c -> Stream m d
-> Stream m e -> Stream m f
{-# INLINE zipWith5M #-}
zipWith5M f sa sb sc sd se
= zipWithM (\(a,b,c) (d,e) -> f a b c d e) (zip3 sa sb sc) (zip sd se)
zipWith6M :: Monad m => (a -> b -> c -> d -> e -> f -> m g)
-> Stream m a -> Stream m b -> Stream m c -> Stream m d
-> Stream m e -> Stream m f -> Stream m g
{-# INLINE zipWith6M #-}
zipWith6M fn sa sb sc sd se sf
= zipWithM (\(a,b,c) (d,e,f) -> fn a b c d e f) (zip3 sa sb sc)
(zip3 sd se sf)
zipWith :: Monad m => (a -> b -> c) -> Stream m a -> Stream m b -> Stream m c
{-# INLINE zipWith #-}
zipWith f = zipWithM (\a b -> return (f a b))
zipWith3 :: Monad m => (a -> b -> c -> d)
-> Stream m a -> Stream m b -> Stream m c -> Stream m d
{-# INLINE zipWith3 #-}
zipWith3 f = zipWith3M (\a b c -> return (f a b c))
zipWith4 :: Monad m => (a -> b -> c -> d -> e)
-> Stream m a -> Stream m b -> Stream m c -> Stream m d
-> Stream m e
{-# INLINE zipWith4 #-}
zipWith4 f = zipWith4M (\a b c d -> return (f a b c d))
zipWith5 :: Monad m => (a -> b -> c -> d -> e -> f)
-> Stream m a -> Stream m b -> Stream m c -> Stream m d
-> Stream m e -> Stream m f
{-# INLINE zipWith5 #-}
zipWith5 f = zipWith5M (\a b c d e -> return (f a b c d e))
zipWith6 :: Monad m => (a -> b -> c -> d -> e -> f -> g)
-> Stream m a -> Stream m b -> Stream m c -> Stream m d
-> Stream m e -> Stream m f -> Stream m g
{-# INLINE zipWith6 #-}
zipWith6 fn = zipWith6M (\a b c d e f -> return (fn a b c d e f))
zip :: Monad m => Stream m a -> Stream m b -> Stream m (a,b)
{-# INLINE zip #-}
zip = zipWith (,)
zip3 :: Monad m => Stream m a -> Stream m b -> Stream m c -> Stream m (a,b,c)
{-# INLINE zip3 #-}
zip3 = zipWith3 (,,)
zip4 :: Monad m => Stream m a -> Stream m b -> Stream m c -> Stream m d
-> Stream m (a,b,c,d)
{-# INLINE zip4 #-}
zip4 = zipWith4 (,,,)
zip5 :: Monad m => Stream m a -> Stream m b -> Stream m c -> Stream m d
-> Stream m e -> Stream m (a,b,c,d,e)
{-# INLINE zip5 #-}
zip5 = zipWith5 (,,,,)
zip6 :: Monad m => Stream m a -> Stream m b -> Stream m c -> Stream m d
-> Stream m e -> Stream m f -> Stream m (a,b,c,d,e,f)
{-# INLINE zip6 #-}
zip6 = zipWith6 (,,,,,)
-- Comparisons
-- -----------
-- | Check if two 'Stream's are equal
eq :: (Monad m, Eq a) => Stream m a -> Stream m a -> m Bool
{-# INLINE_FUSED eq #-}
eq (Stream step1 s1) (Stream step2 s2) = eq_loop0 SPEC s1 s2
where
eq_loop0 !sPEC s1 s2 = do
r <- step1 s1
case r of
Yield x s1' -> eq_loop1 SPEC x s1' s2
Skip s1' -> eq_loop0 SPEC s1' s2
Done -> eq_null s2
eq_loop1 !sPEC x s1 s2 = do
r <- step2 s2
case r of
Yield y s2'
| x == y -> eq_loop0 SPEC s1 s2'
| otherwise -> return False
Skip s2' -> eq_loop1 SPEC x s1 s2'
Done -> return False
eq_null s2 = do
r <- step2 s2
case r of
Yield _ _ -> return False
Skip s2' -> eq_null s2'
Done -> return True
-- | Lexicographically compare two 'Stream's
cmp :: (Monad m, Ord a) => Stream m a -> Stream m a -> m Ordering
{-# INLINE_FUSED cmp #-}
cmp (Stream step1 s1) (Stream step2 s2) = cmp_loop0 SPEC s1 s2
where
cmp_loop0 !sPEC s1 s2 = do
r <- step1 s1
case r of
Yield x s1' -> cmp_loop1 SPEC x s1' s2
Skip s1' -> cmp_loop0 SPEC s1' s2
Done -> cmp_null s2
cmp_loop1 !sPEC x s1 s2 = do
r <- step2 s2
case r of
Yield y s2' -> case x `compare` y of
EQ -> cmp_loop0 SPEC s1 s2'
c -> return c
Skip s2' -> cmp_loop1 SPEC x s1 s2'
Done -> return GT
cmp_null s2 = do
r <- step2 s2
case r of
Yield _ _ -> return LT
Skip s2' -> cmp_null s2'
Done -> return EQ
-- Filtering
-- ---------
-- | Drop elements which do not satisfy the predicate
filter :: Monad m => (a -> Bool) -> Stream m a -> Stream m a
{-# INLINE filter #-}
filter f = filterM (return . f)
-- | Drop elements which do not satisfy the monadic predicate
filterM :: Monad m => (a -> m Bool) -> Stream m a -> Stream m a
{-# INLINE_FUSED filterM #-}
filterM f (Stream step s) = Stream step' s
where
{-# INLINE_INNER step' #-}
step' s = do
r <- step s
case r of
Yield x s' -> do
b <- f x
return $ if b then Yield x s'
else Skip s'
Skip s' -> return $ Skip s'
Done -> return $ Done
-- | Longest prefix of elements that satisfy the predicate
takeWhile :: Monad m => (a -> Bool) -> Stream m a -> Stream m a
{-# INLINE takeWhile #-}
takeWhile f = takeWhileM (return . f)
-- | Longest prefix of elements that satisfy the monadic predicate
takeWhileM :: Monad m => (a -> m Bool) -> Stream m a -> Stream m a
{-# INLINE_FUSED takeWhileM #-}
takeWhileM f (Stream step s) = Stream step' s
where
{-# INLINE_INNER step' #-}
step' s = do
r <- step s
case r of
Yield x s' -> do
b <- f x
return $ if b then Yield x s' else Done
Skip s' -> return $ Skip s'
Done -> return $ Done
-- | Drop the longest prefix of elements that satisfy the predicate
dropWhile :: Monad m => (a -> Bool) -> Stream m a -> Stream m a
{-# INLINE dropWhile #-}
dropWhile f = dropWhileM (return . f)
data DropWhile s a = DropWhile_Drop s | DropWhile_Yield a s | DropWhile_Next s
-- | Drop the longest prefix of elements that satisfy the monadic predicate
dropWhileM :: Monad m => (a -> m Bool) -> Stream m a -> Stream m a
{-# INLINE_FUSED dropWhileM #-}
dropWhileM f (Stream step s) = Stream step' (DropWhile_Drop s)
where
-- NOTE: we jump through hoops here to have only one Yield; local data
-- declarations would be nice!
{-# INLINE_INNER step' #-}
step' (DropWhile_Drop s)
= do
r <- step s
case r of
Yield x s' -> do
b <- f x
return $ if b then Skip (DropWhile_Drop s')
else Skip (DropWhile_Yield x s')
Skip s' -> return $ Skip (DropWhile_Drop s')
Done -> return $ Done
step' (DropWhile_Yield x s) = return $ Yield x (DropWhile_Next s)
step' (DropWhile_Next s)
= liftM (\r ->
case r of
Yield x s' -> Skip (DropWhile_Yield x s')
Skip s' -> Skip (DropWhile_Next s')
Done -> Done
) (step s)
-- Searching
-- ---------
infix 4 `elem`
-- | Check whether the 'Stream' contains an element
elem :: (Monad m, Eq a) => a -> Stream m a -> m Bool
{-# INLINE_FUSED elem #-}
elem x (Stream step s) = elem_loop SPEC s
where
elem_loop !sPEC s
= do
r <- step s
case r of
Yield y s' | x == y -> return True
| otherwise -> elem_loop SPEC s'
Skip s' -> elem_loop SPEC s'
Done -> return False
infix 4 `notElem`
-- | Inverse of `elem`
notElem :: (Monad m, Eq a) => a -> Stream m a -> m Bool
{-# INLINE notElem #-}
notElem x s = liftM not (elem x s)
-- | Yield 'Just' the first element that satisfies the predicate or 'Nothing'
-- if no such element exists.
find :: Monad m => (a -> Bool) -> Stream m a -> m (Maybe a)
{-# INLINE find #-}
find f = findM (return . f)
-- | Yield 'Just' the first element that satisfies the monadic predicate or
-- 'Nothing' if no such element exists.
findM :: Monad m => (a -> m Bool) -> Stream m a -> m (Maybe a)
{-# INLINE_FUSED findM #-}
findM f (Stream step s) = find_loop SPEC s
where
find_loop !sPEC s
= do
r <- step s
case r of
Yield x s' -> do
b <- f x
if b then return $ Just x
else find_loop SPEC s'
Skip s' -> find_loop SPEC s'
Done -> return Nothing
-- | Yield 'Just' the index of the first element that satisfies the predicate
-- or 'Nothing' if no such element exists.
findIndex :: Monad m => (a -> Bool) -> Stream m a -> m (Maybe Int)
{-# INLINE_FUSED findIndex #-}
findIndex f = findIndexM (return . f)
-- | Yield 'Just' the index of the first element that satisfies the monadic
-- predicate or 'Nothing' if no such element exists.
findIndexM :: Monad m => (a -> m Bool) -> Stream m a -> m (Maybe Int)
{-# INLINE_FUSED findIndexM #-}
findIndexM f (Stream step s) = findIndex_loop SPEC s 0
where
findIndex_loop !sPEC s i
= do
r <- step s
case r of
Yield x s' -> do
b <- f x
if b then return $ Just i
else findIndex_loop SPEC s' (i+1)
Skip s' -> findIndex_loop SPEC s' i
Done -> return Nothing
-- Folding
-- -------
-- | Left fold
foldl :: Monad m => (a -> b -> a) -> a -> Stream m b -> m a
{-# INLINE foldl #-}
foldl f = foldlM (\a b -> return (f a b))
-- | Left fold with a monadic operator
foldlM :: Monad m => (a -> b -> m a) -> a -> Stream m b -> m a
{-# INLINE_FUSED foldlM #-}
foldlM m z (Stream step s) = foldlM_loop SPEC z s
where
foldlM_loop !sPEC z s
= do
r <- step s
case r of
Yield x s' -> do { z' <- m z x; foldlM_loop SPEC z' s' }
Skip s' -> foldlM_loop SPEC z s'
Done -> return z
-- | Same as 'foldlM'
foldM :: Monad m => (a -> b -> m a) -> a -> Stream m b -> m a
{-# INLINE foldM #-}
foldM = foldlM
-- | Left fold over a non-empty 'Stream'
foldl1 :: Monad m => (a -> a -> a) -> Stream m a -> m a
{-# INLINE foldl1 #-}
foldl1 f = foldl1M (\a b -> return (f a b))
-- | Left fold over a non-empty 'Stream' with a monadic operator
foldl1M :: Monad m => (a -> a -> m a) -> Stream m a -> m a
{-# INLINE_FUSED foldl1M #-}
foldl1M f (Stream step s) = foldl1M_loop SPEC s
where
foldl1M_loop !sPEC s
= do
r <- step s
case r of
Yield x s' -> foldlM f x (Stream step s')
Skip s' -> foldl1M_loop SPEC s'
Done -> EMPTY_STREAM "foldl1M"
-- | Same as 'foldl1M'
fold1M :: Monad m => (a -> a -> m a) -> Stream m a -> m a
{-# INLINE fold1M #-}
fold1M = foldl1M
-- | Left fold with a strict accumulator
foldl' :: Monad m => (a -> b -> a) -> a -> Stream m b -> m a
{-# INLINE foldl' #-}
foldl' f = foldlM' (\a b -> return (f a b))
-- | Left fold with a strict accumulator and a monadic operator
foldlM' :: Monad m => (a -> b -> m a) -> a -> Stream m b -> m a
{-# INLINE_FUSED foldlM' #-}
foldlM' m z (Stream step s) = foldlM'_loop SPEC z s
where
foldlM'_loop !sPEC z s
= z `seq`
do
r <- step s
case r of
Yield x s' -> do { z' <- m z x; foldlM'_loop SPEC z' s' }
Skip s' -> foldlM'_loop SPEC z s'
Done -> return z
-- | Same as 'foldlM''
foldM' :: Monad m => (a -> b -> m a) -> a -> Stream m b -> m a
{-# INLINE foldM' #-}
foldM' = foldlM'
-- | Left fold over a non-empty 'Stream' with a strict accumulator
foldl1' :: Monad m => (a -> a -> a) -> Stream m a -> m a
{-# INLINE foldl1' #-}
foldl1' f = foldl1M' (\a b -> return (f a b))
-- | Left fold over a non-empty 'Stream' with a strict accumulator and a
-- monadic operator
foldl1M' :: Monad m => (a -> a -> m a) -> Stream m a -> m a
{-# INLINE_FUSED foldl1M' #-}
foldl1M' f (Stream step s) = foldl1M'_loop SPEC s
where
foldl1M'_loop !sPEC s
= do
r <- step s
case r of
Yield x s' -> foldlM' f x (Stream step s')
Skip s' -> foldl1M'_loop SPEC s'
Done -> EMPTY_STREAM "foldl1M'"
-- | Same as 'foldl1M''
fold1M' :: Monad m => (a -> a -> m a) -> Stream m a -> m a
{-# INLINE fold1M' #-}
fold1M' = foldl1M'
-- | Right fold
foldr :: Monad m => (a -> b -> b) -> b -> Stream m a -> m b
{-# INLINE foldr #-}
foldr f = foldrM (\a b -> return (f a b))
-- | Right fold with a monadic operator
foldrM :: Monad m => (a -> b -> m b) -> b -> Stream m a -> m b
{-# INLINE_FUSED foldrM #-}
foldrM f z (Stream step s) = foldrM_loop SPEC s
where
foldrM_loop !sPEC s
= do
r <- step s
case r of
Yield x s' -> f x =<< foldrM_loop SPEC s'
Skip s' -> foldrM_loop SPEC s'
Done -> return z
-- | Right fold over a non-empty stream
foldr1 :: Monad m => (a -> a -> a) -> Stream m a -> m a
{-# INLINE foldr1 #-}
foldr1 f = foldr1M (\a b -> return (f a b))
-- | Right fold over a non-empty stream with a monadic operator
foldr1M :: Monad m => (a -> a -> m a) -> Stream m a -> m a
{-# INLINE_FUSED foldr1M #-}
foldr1M f (Stream step s) = foldr1M_loop0 SPEC s
where
foldr1M_loop0 !sPEC s
= do
r <- step s
case r of
Yield x s' -> foldr1M_loop1 SPEC x s'
Skip s' -> foldr1M_loop0 SPEC s'
Done -> EMPTY_STREAM "foldr1M"
foldr1M_loop1 !sPEC x s
= do
r <- step s
case r of
Yield y s' -> f x =<< foldr1M_loop1 SPEC y s'
Skip s' -> foldr1M_loop1 SPEC x s'
Done -> return x
-- Specialised folds
-- -----------------
and :: Monad m => Stream m Bool -> m Bool
{-# INLINE_FUSED and #-}
and (Stream step s) = and_loop SPEC s
where
and_loop !sPEC s
= do
r <- step s
case r of
Yield False _ -> return False
Yield True s' -> and_loop SPEC s'
Skip s' -> and_loop SPEC s'
Done -> return True
or :: Monad m => Stream m Bool -> m Bool
{-# INLINE_FUSED or #-}
or (Stream step s) = or_loop SPEC s
where
or_loop !sPEC s
= do
r <- step s
case r of
Yield False s' -> or_loop SPEC s'
Yield True _ -> return True
Skip s' -> or_loop SPEC s'
Done -> return False
concatMap :: Monad m => (a -> Stream m b) -> Stream m a -> Stream m b
{-# INLINE concatMap #-}
concatMap f = concatMapM (return . f)
concatMapM :: Monad m => (a -> m (Stream m b)) -> Stream m a -> Stream m b
{-# INLINE_FUSED concatMapM #-}
concatMapM f (Stream step s) = Stream concatMap_go (Left s)
where
concatMap_go (Left s) = do
r <- step s
case r of
Yield a s' -> do
b_stream <- f a
return $ Skip (Right (b_stream, s'))
Skip s' -> return $ Skip (Left s')
Done -> return Done
concatMap_go (Right (Stream inner_step inner_s, s)) = do
r <- inner_step inner_s
case r of
Yield b inner_s' -> return $ Yield b (Right (Stream inner_step inner_s', s))
Skip inner_s' -> return $ Skip (Right (Stream inner_step inner_s', s))
Done -> return $ Skip (Left s)
-- | Create a 'Stream' of values from a 'Stream' of streamable things
flatten :: Monad m => (a -> m s) -> (s -> m (Step s b)) -> Stream m a -> Stream m b
{-# INLINE_FUSED flatten #-}
flatten mk istep (Stream ostep t) = Stream step (Left t)
where
{-# INLINE_INNER step #-}
step (Left t) = do
r <- ostep t
case r of
Yield a t' -> do
s <- mk a
s `seq` return (Skip (Right (s,t')))
Skip t' -> return $ Skip (Left t')
Done -> return $ Done
step (Right (s,t)) = do
r <- istep s
case r of
Yield x s' -> return $ Yield x (Right (s',t))
Skip s' -> return $ Skip (Right (s',t))
Done -> return $ Skip (Left t)
-- Unfolding
-- ---------
-- | Unfold
unfoldr :: Monad m => (s -> Maybe (a, s)) -> s -> Stream m a
{-# INLINE_FUSED unfoldr #-}
unfoldr f = unfoldrM (return . f)
-- | Unfold with a monadic function
unfoldrM :: Monad m => (s -> m (Maybe (a, s))) -> s -> Stream m a
{-# INLINE_FUSED unfoldrM #-}
unfoldrM f s = Stream step s
where
{-# INLINE_INNER step #-}
step s = liftM (\r ->
case r of
Just (x, s') -> Yield x s'
Nothing -> Done
) (f s)
unfoldrN :: Monad m => Int -> (s -> Maybe (a, s)) -> s -> Stream m a
{-# INLINE_FUSED unfoldrN #-}
unfoldrN n f = unfoldrNM n (return . f)
-- | Unfold at most @n@ elements with a monadic functions
unfoldrNM :: Monad m => Int -> (s -> m (Maybe (a, s))) -> s -> Stream m a
{-# INLINE_FUSED unfoldrNM #-}
unfoldrNM n f s = Stream step (s,n)
where
{-# INLINE_INNER step #-}
step (s,n) | n <= 0 = return Done
| otherwise = liftM (\r ->
case r of
Just (x,s') -> Yield x (s',n-1)
Nothing -> Done
) (f s)
-- | Apply monadic function n times to value. Zeroth element is original value.
iterateNM :: Monad m => Int -> (a -> m a) -> a -> Stream m a
{-# INLINE_FUSED iterateNM #-}
iterateNM n f x0 = Stream step (x0,n)
where
{-# INLINE_INNER step #-}
step (x,i) | i <= 0 = return Done
| i == n = return $ Yield x (x,i-1)
| otherwise = do a <- f x
return $ Yield a (a,i-1)
-- | Apply function n times to value. Zeroth element is original value.
iterateN :: Monad m => Int -> (a -> a) -> a -> Stream m a
{-# INLINE_FUSED iterateN #-}
iterateN n f x0 = iterateNM n (return . f) x0
-- Scans
-- -----
-- | Prefix scan
prescanl :: Monad m => (a -> b -> a) -> a -> Stream m b -> Stream m a
{-# INLINE prescanl #-}
prescanl f = prescanlM (\a b -> return (f a b))
-- | Prefix scan with a monadic operator
prescanlM :: Monad m => (a -> b -> m a) -> a -> Stream m b -> Stream m a
{-# INLINE_FUSED prescanlM #-}
prescanlM f z (Stream step s) = Stream step' (s,z)
where
{-# INLINE_INNER step' #-}
step' (s,x) = do
r <- step s
case r of
Yield y s' -> do
z <- f x y
return $ Yield x (s', z)
Skip s' -> return $ Skip (s', x)
Done -> return Done
-- | Prefix scan with strict accumulator
prescanl' :: Monad m => (a -> b -> a) -> a -> Stream m b -> Stream m a
{-# INLINE prescanl' #-}
prescanl' f = prescanlM' (\a b -> return (f a b))
-- | Prefix scan with strict accumulator and a monadic operator
prescanlM' :: Monad m => (a -> b -> m a) -> a -> Stream m b -> Stream m a
{-# INLINE_FUSED prescanlM' #-}
prescanlM' f z (Stream step s) = Stream step' (s,z)
where
{-# INLINE_INNER step' #-}
step' (s,x) = x `seq`
do
r <- step s
case r of
Yield y s' -> do
z <- f x y
return $ Yield x (s', z)
Skip s' -> return $ Skip (s', x)
Done -> return Done
-- | Suffix scan
postscanl :: Monad m => (a -> b -> a) -> a -> Stream m b -> Stream m a
{-# INLINE postscanl #-}
postscanl f = postscanlM (\a b -> return (f a b))
-- | Suffix scan with a monadic operator
postscanlM :: Monad m => (a -> b -> m a) -> a -> Stream m b -> Stream m a
{-# INLINE_FUSED postscanlM #-}
postscanlM f z (Stream step s) = Stream step' (s,z)
where
{-# INLINE_INNER step' #-}
step' (s,x) = do
r <- step s
case r of
Yield y s' -> do
z <- f x y
return $ Yield z (s',z)
Skip s' -> return $ Skip (s',x)
Done -> return Done
-- | Suffix scan with strict accumulator
postscanl' :: Monad m => (a -> b -> a) -> a -> Stream m b -> Stream m a
{-# INLINE postscanl' #-}
postscanl' f = postscanlM' (\a b -> return (f a b))
-- | Suffix scan with strict acccumulator and a monadic operator
postscanlM' :: Monad m => (a -> b -> m a) -> a -> Stream m b -> Stream m a
{-# INLINE_FUSED postscanlM' #-}
postscanlM' f z (Stream step s) = z `seq` Stream step' (s,z)
where
{-# INLINE_INNER step' #-}
step' (s,x) = x `seq`
do
r <- step s
case r of
Yield y s' -> do
z <- f x y
z `seq` return (Yield z (s',z))
Skip s' -> return $ Skip (s',x)
Done -> return Done
-- | Haskell-style scan
scanl :: Monad m => (a -> b -> a) -> a -> Stream m b -> Stream m a
{-# INLINE scanl #-}
scanl f = scanlM (\a b -> return (f a b))
-- | Haskell-style scan with a monadic operator
scanlM :: Monad m => (a -> b -> m a) -> a -> Stream m b -> Stream m a
{-# INLINE scanlM #-}
scanlM f z s = z `cons` postscanlM f z s
-- | Haskell-style scan with strict accumulator
scanl' :: Monad m => (a -> b -> a) -> a -> Stream m b -> Stream m a
{-# INLINE scanl' #-}
scanl' f = scanlM' (\a b -> return (f a b))
-- | Haskell-style scan with strict accumulator and a monadic operator
scanlM' :: Monad m => (a -> b -> m a) -> a -> Stream m b -> Stream m a
{-# INLINE scanlM' #-}
scanlM' f z s = z `seq` (z `cons` postscanlM f z s)
-- | Scan over a non-empty 'Stream'
scanl1 :: Monad m => (a -> a -> a) -> Stream m a -> Stream m a
{-# INLINE scanl1 #-}
scanl1 f = scanl1M (\x y -> return (f x y))
-- | Scan over a non-empty 'Stream' with a monadic operator
scanl1M :: Monad m => (a -> a -> m a) -> Stream m a -> Stream m a
{-# INLINE_FUSED scanl1M #-}
scanl1M f (Stream step s) = Stream step' (s, Nothing)
where
{-# INLINE_INNER step' #-}
step' (s, Nothing) = do
r <- step s
case r of
Yield x s' -> return $ Yield x (s', Just x)
Skip s' -> return $ Skip (s', Nothing)
Done -> EMPTY_STREAM "scanl1M"
step' (s, Just x) = do
r <- step s
case r of
Yield y s' -> do
z <- f x y
return $ Yield z (s', Just z)
Skip s' -> return $ Skip (s', Just x)
Done -> return Done
-- | Scan over a non-empty 'Stream' with a strict accumulator
scanl1' :: Monad m => (a -> a -> a) -> Stream m a -> Stream m a
{-# INLINE scanl1' #-}
scanl1' f = scanl1M' (\x y -> return (f x y))
-- | Scan over a non-empty 'Stream' with a strict accumulator and a monadic
-- operator
scanl1M' :: Monad m => (a -> a -> m a) -> Stream m a -> Stream m a
{-# INLINE_FUSED scanl1M' #-}
scanl1M' f (Stream step s) = Stream step' (s, Nothing)
where
{-# INLINE_INNER step' #-}
step' (s, Nothing) = do
r <- step s
case r of
Yield x s' -> x `seq` return (Yield x (s', Just x))
Skip s' -> return $ Skip (s', Nothing)
Done -> EMPTY_STREAM "scanl1M"
step' (s, Just x) = x `seq`
do
r <- step s
case r of
Yield y s' -> do
z <- f x y
z `seq` return (Yield z (s', Just z))
Skip s' -> return $ Skip (s', Just x)
Done -> return Done
-- Enumerations
-- ------------
-- The Enum class is broken for this, there just doesn't seem to be a
-- way to implement this generically. We have to specialise for as many types
-- as we can but this doesn't help in polymorphic loops.
-- | Yield a 'Stream' of the given length containing the values @x@, @x+y@,
-- @x+y+y@ etc.
enumFromStepN :: (Num a, Monad m) => a -> a -> Int -> Stream m a
{-# INLINE_FUSED enumFromStepN #-}
enumFromStepN x y n = x `seq` y `seq` n `seq` Stream step (x,n)
where
{-# INLINE_INNER step #-}
step (x,n) | n > 0 = return $ Yield x (x+y,n-1)
| otherwise = return $ Done
-- | Enumerate values
--
-- /WARNING:/ This operation can be very inefficient. If at all possible, use
-- 'enumFromStepN' instead.
enumFromTo :: (Enum a, Monad m) => a -> a -> Stream m a
{-# INLINE_FUSED enumFromTo #-}
enumFromTo x y = fromList [x .. y]
-- NOTE: We use (x+1) instead of (succ x) below because the latter checks for
-- overflow which can't happen here.
-- FIXME: add "too large" test for Int
enumFromTo_small :: (Integral a, Monad m) => a -> a -> Stream m a
{-# INLINE_FUSED enumFromTo_small #-}
enumFromTo_small x y = x `seq` y `seq` Stream step x
where
{-# INLINE_INNER step #-}
step x | x <= y = return $ Yield x (x+1)
| otherwise = return $ Done
{-# RULES
"enumFromTo<Int8> [Stream]"
enumFromTo = enumFromTo_small :: Monad m => Int8 -> Int8 -> Stream m Int8
"enumFromTo<Int16> [Stream]"
enumFromTo = enumFromTo_small :: Monad m => Int16 -> Int16 -> Stream m Int16
"enumFromTo<Word8> [Stream]"
enumFromTo = enumFromTo_small :: Monad m => Word8 -> Word8 -> Stream m Word8
"enumFromTo<Word16> [Stream]"
enumFromTo = enumFromTo_small :: Monad m => Word16 -> Word16 -> Stream m Word16
#-}
#if WORD_SIZE_IN_BITS > 32
{-# RULES
"enumFromTo<Int32> [Stream]"
enumFromTo = enumFromTo_small :: Monad m => Int32 -> Int32 -> Stream m Int32
"enumFromTo<Word32> [Stream]"
enumFromTo = enumFromTo_small :: Monad m => Word32 -> Word32 -> Stream m Word32
#-}
#endif
-- NOTE: We could implement a generic "too large" test:
--
-- len x y | x > y = 0
-- | n > 0 && n <= fromIntegral (maxBound :: Int) = fromIntegral n
-- | otherwise = error
-- where
-- n = y-x+1
--
-- Alas, GHC won't eliminate unnecessary comparisons (such as n >= 0 for
-- unsigned types). See http://hackage.haskell.org/trac/ghc/ticket/3744
--
enumFromTo_int :: forall m. Monad m => Int -> Int -> Stream m Int
{-# INLINE_FUSED enumFromTo_int #-}
enumFromTo_int x y = x `seq` y `seq` Stream step x
where
{-# INLINE [0] len #-}
len :: Int -> Int -> Int
len x y | x > y = 0
| otherwise = BOUNDS_CHECK(check) "enumFromTo" "vector too large"
(n > 0)
$ n
where
n = y-x+1
{-# INLINE_INNER step #-}
step x | x <= y = return $ Yield x (x+1)
| otherwise = return $ Done
enumFromTo_intlike :: (Integral a, Monad m) => a -> a -> Stream m a
{-# INLINE_FUSED enumFromTo_intlike #-}
enumFromTo_intlike x y = x `seq` y `seq` Stream step x
where
{-# INLINE_INNER step #-}
step x | x <= y = return $ Yield x (x+1)
| otherwise = return $ Done
{-# RULES
"enumFromTo<Int> [Stream]"
enumFromTo = enumFromTo_int :: Monad m => Int -> Int -> Stream m Int
#if WORD_SIZE_IN_BITS > 32
"enumFromTo<Int64> [Stream]"
enumFromTo = enumFromTo_intlike :: Monad m => Int64 -> Int64 -> Stream m Int64
#else
"enumFromTo<Int32> [Stream]"
enumFromTo = enumFromTo_intlike :: Monad m => Int32 -> Int32 -> Stream m Int32
#endif
#-}
enumFromTo_big_word :: (Integral a, Monad m) => a -> a -> Stream m a
{-# INLINE_FUSED enumFromTo_big_word #-}
enumFromTo_big_word x y = x `seq` y `seq` Stream step x
where
{-# INLINE_INNER step #-}
step x | x <= y = return $ Yield x (x+1)
| otherwise = return $ Done
{-# RULES
"enumFromTo<Word> [Stream]"
enumFromTo = enumFromTo_big_word :: Monad m => Word -> Word -> Stream m Word
"enumFromTo<Word64> [Stream]"
enumFromTo = enumFromTo_big_word
:: Monad m => Word64 -> Word64 -> Stream m Word64
#if WORD_SIZE_IN_BITS == 32
"enumFromTo<Word32> [Stream]"
enumFromTo = enumFromTo_big_word
:: Monad m => Word32 -> Word32 -> Stream m Word32
#endif
"enumFromTo<Integer> [Stream]"
enumFromTo = enumFromTo_big_word
:: Monad m => Integer -> Integer -> Stream m Integer
#-}
-- FIXME: the "too large" test is totally wrong
enumFromTo_big_int :: (Integral a, Monad m) => a -> a -> Stream m a
{-# INLINE_FUSED enumFromTo_big_int #-}
enumFromTo_big_int x y = x `seq` y `seq` Stream step x
where
{-# INLINE_INNER step #-}
step x | x <= y = return $ Yield x (x+1)
| otherwise = return $ Done
#if WORD_SIZE_IN_BITS > 32
{-# RULES
"enumFromTo<Int64> [Stream]"
enumFromTo = enumFromTo_big :: Monad m => Int64 -> Int64 -> Stream m Int64
#-}
#endif
enumFromTo_char :: Monad m => Char -> Char -> Stream m Char
{-# INLINE_FUSED enumFromTo_char #-}
enumFromTo_char x y = x `seq` y `seq` Stream step xn
where
xn = ord x
yn = ord y
{-# INLINE_INNER step #-}
step xn | xn <= yn = return $ Yield (unsafeChr xn) (xn+1)
| otherwise = return $ Done
{-# RULES
"enumFromTo<Char> [Stream]"
enumFromTo = enumFromTo_char
#-}
------------------------------------------------------------------------
-- Specialise enumFromTo for Float and Double.
-- Also, try to do something about pairs?
enumFromTo_double :: (Monad m, Ord a, RealFrac a) => a -> a -> Stream m a
{-# INLINE_FUSED enumFromTo_double #-}
enumFromTo_double n m = n `seq` m `seq` Stream step n
where
lim = m + 1/2 -- important to float out
{-# INLINE_INNER step #-}
step x | x <= lim = return $ Yield x (x+1)
| otherwise = return $ Done
{-# RULES
"enumFromTo<Double> [Stream]"
enumFromTo = enumFromTo_double :: Monad m => Double -> Double -> Stream m Double
"enumFromTo<Float> [Stream]"
enumFromTo = enumFromTo_double :: Monad m => Float -> Float -> Stream m Float
#-}
------------------------------------------------------------------------
-- | Enumerate values with a given step.
--
-- /WARNING:/ This operation is very inefficient. If at all possible, use
-- 'enumFromStepN' instead.
enumFromThenTo :: (Enum a, Monad m) => a -> a -> a -> Stream m a
{-# INLINE_FUSED enumFromThenTo #-}
enumFromThenTo x y z = fromList [x, y .. z]
-- FIXME: Specialise enumFromThenTo.
-- Conversions
-- -----------
-- | Convert a 'Stream' to a list
toList :: Monad m => Stream m a -> m [a]
{-# INLINE toList #-}
toList = foldr (:) []
-- | Convert a list to a 'Stream'
fromList :: Monad m => [a] -> Stream m a
{-# INLINE fromList #-}
fromList xs = Stream step xs
where
step (x:xs) = return (Yield x xs)
step [] = return Done
-- | Convert the first @n@ elements of a list to a 'Bundle'
fromListN :: Monad m => Int -> [a] -> Stream m a
{-# INLINE_FUSED fromListN #-}
fromListN n xs = Stream step (xs,n)
where
{-# INLINE_INNER step #-}
step (xs,n) | n <= 0 = return Done
step (x:xs,n) = return (Yield x (xs,n-1))
step ([],n) = return Done
{-
fromVector :: (Monad m, Vector v a) => v a -> Stream m a
{-# INLINE_FUSED fromVector #-}
fromVector v = v `seq` n `seq` Stream (Unf step 0)
(Unf vstep True)
(Just v)
(Exact n)
where
n = basicLength v
{-# INLINE step #-}
step i | i >= n = return Done
| otherwise = case basicUnsafeIndexM v i of
Box x -> return $ Yield x (i+1)
{-# INLINE vstep #-}
vstep True = return (Yield (Chunk (basicLength v) (\mv -> basicUnsafeCopy mv v)) False)
vstep False = return Done
fromVectors :: forall m a. (Monad m, Vector v a) => [v a] -> Stream m a
{-# INLINE_FUSED fromVectors #-}
fromVectors vs = Stream (Unf pstep (Left vs))
(Unf vstep vs)
Nothing
(Exact n)
where
n = List.foldl' (\k v -> k + basicLength v) 0 vs
pstep (Left []) = return Done
pstep (Left (v:vs)) = basicLength v `seq` return (Skip (Right (v,0,vs)))
pstep (Right (v,i,vs))
| i >= basicLength v = return $ Skip (Left vs)
| otherwise = case basicUnsafeIndexM v i of
Box x -> return $ Yield x (Right (v,i+1,vs))
-- FIXME: work around bug in GHC 7.6.1
vstep :: [v a] -> m (Step [v a] (Chunk v a))
vstep [] = return Done
vstep (v:vs) = return $ Yield (Chunk (basicLength v)
(\mv -> INTERNAL_CHECK(check) "concatVectors" "length mismatch"
(M.basicLength mv == basicLength v)
$ basicUnsafeCopy mv v)) vs
concatVectors :: (Monad m, Vector v a) => Stream m (v a) -> Stream m a
{-# INLINE_FUSED concatVectors #-}
concatVectors (Stream step s}
= Stream (Unf pstep (Left s))
(Unf vstep s)
Nothing
Unknown
where
pstep (Left s) = do
r <- step s
case r of
Yield v s' -> basicLength v `seq` return (Skip (Right (v,0,s')))
Skip s' -> return (Skip (Left s'))
Done -> return Done
pstep (Right (v,i,s))
| i >= basicLength v = return (Skip (Left s))
| otherwise = case basicUnsafeIndexM v i of
Box x -> return (Yield x (Right (v,i+1,s)))
vstep s = do
r <- step s
case r of
Yield v s' -> return (Yield (Chunk (basicLength v)
(\mv -> INTERNAL_CHECK(check) "concatVectors" "length mismatch"
(M.basicLength mv == basicLength v)
$ basicUnsafeCopy mv v)) s')
Skip s' -> return (Skip s')
Done -> return Done
reVector :: Monad m => Stream m a -> Stream m a
{-# INLINE_FUSED reVector #-}
reVector (Stream step s, sSize = n} = Stream step s n
{-# RULES
"reVector [Vector]"
reVector = id
"reVector/reVector [Vector]" forall s.
reVector (reVector s) = s
#-}
-}
|
hvr/vector
|
Data/Vector/Fusion/Stream/Monadic.hs
|
bsd-3-clause
| 52,317
| 0
| 20
| 19,453
| 15,858
| 7,987
| 7,871
| 890
| 9
|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE UndecidableInstances #-}
-------------------------------------------------------------------------------
-- |
-- Module : Database.Bloodhound.Types
-- Copyright : (C) 2014 Chris Allen
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Chris Allen <cma@bitemyapp.com
-- Stability : provisional
-- Portability : DeriveGeneric, RecordWildCards
--
-- Data types for describing actions and data structures performed to interact
-- with Elasticsearch. The two main buckets your queries against Elasticsearch
-- will fall into are 'Query's and 'Filter's. 'Filter's are more like
-- traditional database constraints and often have preferable performance
-- properties. 'Query's support human-written textual queries, such as fuzzy
-- queries.
-------------------------------------------------------------------------------
module Database.Bloodhound.Types
( defaultCache
, defaultIndexSettings
, mkSort
, showText
, unpackId
, mkMatchQuery
, mkMultiMatchQuery
, mkBoolQuery
, mkRangeQuery
, mkQueryStringQuery
, mkAggregations
, mkTermsAggregation
, mkTermsScriptAggregation
, mkDateHistogram
, toTerms
, toDateHistogram
, omitNulls
, BH
, runBH
, BHEnv(..)
, MonadBH(..)
, Version(..)
, Status(..)
, Existence(..)
, NullValue(..)
, IndexSettings(..)
, Server(..)
, Reply
, EsResult(..)
, Query(..)
, Search(..)
, SearchResult(..)
, SearchHits(..)
, TrackSortScores
, From(..)
, Size(..)
, Source(..)
, PatternOrPatterns(..)
, Include(..)
, Exclude(..)
, Pattern(..)
, ShardResult(..)
, Hit(..)
, Filter(..)
, Seminearring(..)
, BoolMatch(..)
, Term(..)
, GeoPoint(..)
, GeoBoundingBoxConstraint(..)
, GeoBoundingBox(..)
, GeoFilterType(..)
, Distance(..)
, DistanceUnit(..)
, DistanceType(..)
, DistanceRange(..)
, OptimizeBbox(..)
, LatLon(..)
, RangeValue(..)
, RangeExecution(..)
, LessThan(..)
, LessThanEq(..)
, GreaterThan(..)
, GreaterThanEq(..)
, LessThanD(..)
, LessThanEqD(..)
, GreaterThanD(..)
, GreaterThanEqD(..)
, Regexp(..)
, RegexpFlags(..)
, RegexpFlag(..)
, FieldName(..)
, IndexName(..)
, MappingName(..)
, DocId(..)
, CacheName(..)
, CacheKey(..)
, BulkOperation(..)
, ReplicaCount(..)
, ShardCount(..)
, Sort
, SortMode(..)
, SortOrder(..)
, SortSpec(..)
, DefaultSort(..)
, Missing(..)
, OpenCloseIndex(..)
, Method
, Boost(..)
, MatchQuery(..)
, MultiMatchQuery(..)
, BoolQuery(..)
, BoostingQuery(..)
, CommonTermsQuery(..)
, DisMaxQuery(..)
, FilteredQuery(..)
, FuzzyLikeThisQuery(..)
, FuzzyLikeFieldQuery(..)
, FuzzyQuery(..)
, HasChildQuery(..)
, HasParentQuery(..)
, IndicesQuery(..)
, MoreLikeThisQuery(..)
, MoreLikeThisFieldQuery(..)
, NestedQuery(..)
, PrefixQuery(..)
, QueryStringQuery(..)
, SimpleQueryStringQuery(..)
, RangeQuery(..)
, RegexpQuery(..)
, QueryString(..)
, BooleanOperator(..)
, ZeroTermsQuery(..)
, CutoffFrequency(..)
, Analyzer(..)
, MaxExpansions(..)
, Lenient(..)
, MatchQueryType(..)
, MultiMatchQueryType(..)
, Tiebreaker(..)
, MinimumMatch(..)
, DisableCoord(..)
, CommonMinimumMatch(..)
, MinimumMatchHighLow(..)
, PrefixLength(..)
, Fuzziness(..)
, IgnoreTermFrequency(..)
, MaxQueryTerms(..)
, ScoreType(..)
, Score
, Cache
, TypeName(..)
, BoostTerms(..)
, MaxWordLength(..)
, MinWordLength(..)
, MaxDocFrequency(..)
, MinDocFrequency(..)
, PhraseSlop(..)
, StopWord(..)
, QueryPath(..)
, MinimumTermFrequency(..)
, PercentMatch(..)
, FieldDefinition(..)
, MappingField(..)
, Mapping(..)
, AllowLeadingWildcard(..)
, LowercaseExpanded(..)
, GeneratePhraseQueries(..)
, Locale(..)
, AnalyzeWildcard(..)
, EnablePositionIncrements(..)
, SimpleQueryFlag(..)
, FieldOrFields(..)
, Monoid(..)
, ToJSON(..)
, Interval(..)
, TimeInterval(..)
, ExecutionHint(..)
, CollectionMode(..)
, TermOrder(..)
, TermInclusion(..)
, Aggregation(..)
, Aggregations
, AggregationResults
, Bucket(..)
, BucketAggregation(..)
, TermsAggregation(..)
, DateHistogramAggregation(..)
, Highlights(..)
, FieldHighlight(..)
, HighlightSettings(..)
, PlainHighlight(..)
, PostingsHighlight(..)
, FastVectorHighlight(..)
, CommonHighlight(..)
, NonPostings(..)
, HighlightEncoder(..)
, HighlightTag(..)
, HitHighlight
, TermsResult(..)
, DateHistogramResult(..)
) where
import Control.Applicative
import Control.Monad.Error
import Control.Monad.Reader
import Control.Monad.State
import Control.Monad.Writer
import Data.Aeson
import Data.Aeson.Types (Pair, emptyObject, parseMaybe)
import qualified Data.ByteString.Lazy.Char8 as L
import Data.List (nub)
import Data.List.NonEmpty (NonEmpty (..), toList)
import qualified Data.Map.Strict as M
import Data.Text (Text)
import qualified Data.Text as T
import Data.Time.Clock (UTCTime)
import qualified Data.Vector as V
import GHC.Generics (Generic)
import Network.HTTP.Client
import qualified Network.HTTP.Types.Method as NHTM
import Database.Bloodhound.Types.Class
-- $setup
-- >>> :set -XOverloadedStrings
-- >>> import Database.Bloodhound
-- >>> let testServer = (Server "http://localhost:9200")
-- >>> let testIndex = IndexName "twitter"
-- >>> let testMapping = MappingName "tweet"
-- >>> let defaultIndexSettings = IndexSettings (ShardCount 3) (ReplicaCount 2)
-- defaultIndexSettings is exported by Database.Bloodhound as well
-- no trailing slashes in servers, library handles building the path.
{-| Common environment for Elasticsearch calls. Connections will be
pipelined according to the provided HTTP connection manager.
-}
data BHEnv = BHEnv { bhServer :: Server
, bhManager :: Manager
}
{-| All API calls to Elasticsearch operate within
MonadBH. The idea is that it can be easily embedded in your
own monad transformer stack. A default instance for a ReaderT and
alias 'BH' is provided for the simple case.
-}
class (Functor m, Applicative m, MonadIO m) => MonadBH m where
getBHEnv :: m BHEnv
newtype BH m a = BH {
unBH :: ReaderT BHEnv m a
} deriving ( Functor
, Applicative
, Monad
, MonadIO
, MonadState s
, MonadWriter w
, MonadError e
, Alternative
, MonadPlus
, MonadFix)
instance MonadTrans BH where
lift = BH . lift
instance (MonadReader r m) => MonadReader r (BH m) where
ask = lift ask
local f (BH (ReaderT m)) = BH $ ReaderT $ \r ->
local f (m r)
instance (Functor m, Applicative m, MonadIO m) => MonadBH (BH m) where
getBHEnv = BH getBHEnv
instance (Functor m, Applicative m, MonadIO m) => MonadBH (ReaderT BHEnv m) where
getBHEnv = ask
runBH :: BHEnv -> BH m a -> m a
runBH e f = runReaderT (unBH f) e
{-| 'Version' is embedded in 'Status' -}
data Version = Version { number :: Text
, build_hash :: Text
, build_timestamp :: UTCTime
, build_snapshot :: Bool
, lucene_version :: Text } deriving (Eq, Show, Generic)
{-| 'Status' is a data type for describing the JSON body returned by
Elasticsearch when you query its status. This was deprecated in 1.2.0.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-status.html#indices-status>
-}
data Status = Status { ok :: Maybe Bool
, status :: Int
, name :: Text
, version :: Version
, tagline :: Text } deriving (Eq, Show)
{-| 'IndexSettings' is used to configure the shards and replicas when you create
an Elasticsearch Index.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-create-index.html>
-}
data IndexSettings =
IndexSettings { indexShards :: ShardCount
, indexReplicas :: ReplicaCount } deriving (Eq, Show)
{-| 'defaultIndexSettings' is an 'IndexSettings' with 3 shards and 2 replicas. -}
defaultIndexSettings :: IndexSettings
defaultIndexSettings = IndexSettings (ShardCount 3) (ReplicaCount 2)
{-| 'Reply' and 'Method' are type synonyms from 'Network.HTTP.Types.Method.Method' -}
type Reply = Network.HTTP.Client.Response L.ByteString
type Method = NHTM.Method
{-| 'OpenCloseIndex' is a sum type for opening and closing indices.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-open-close.html>
-}
data OpenCloseIndex = OpenIndex | CloseIndex deriving (Eq, Show)
data FieldType = GeoPointType
| GeoShapeType
| FloatType
| IntegerType
| LongType
| ShortType
| ByteType deriving (Eq, Show)
data FieldDefinition =
FieldDefinition { fieldType :: FieldType } deriving (Eq, Show)
data MappingField =
MappingField { mappingFieldName :: FieldName
, fieldDefinition :: FieldDefinition } deriving (Eq, Show)
{-| Support for type reification of 'Mapping's is currently incomplete, for
now the mapping API verbiage expects a 'ToJSON'able blob.
Indexes have mappings, mappings are schemas for the documents contained in the
index. I'd recommend having only one mapping per index, always having a mapping,
and keeping different kinds of documents separated if possible.
-}
data Mapping = Mapping { typeName :: TypeName
, mappingFields :: [MappingField] } deriving (Eq, Show)
{-| 'BulkOperation' is a sum type for expressing the four kinds of bulk
operation index, create, delete, and update. 'BulkIndex' behaves like an
"upsert", 'BulkCreate' will fail if a document already exists at the DocId.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/docs-bulk.html#docs-bulk>
-}
data BulkOperation =
BulkIndex IndexName MappingName DocId Value
| BulkCreate IndexName MappingName DocId Value
| BulkDelete IndexName MappingName DocId
| BulkUpdate IndexName MappingName DocId Value deriving (Eq, Show)
{-| 'EsResult' describes the standard wrapper JSON document that you see in
successful Elasticsearch responses.
-}
data EsResult a = EsResult { _index :: Text
, _type :: Text
, _id :: Text
, _version :: Int
, found :: Maybe Bool
, _source :: a } deriving (Eq, Show)
{-| 'Sort' is a synonym for a list of 'SortSpec's. Sort behavior is order
dependent with later sorts acting as tie-breakers for earlier sorts.
-}
type Sort = [SortSpec]
{-| The two main kinds of 'SortSpec' are 'DefaultSortSpec' and
'GeoDistanceSortSpec'. The latter takes a 'SortOrder', 'GeoPoint', and
'DistanceUnit' to express "nearness" to a single geographical point as a
sort specification.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#search-request-sort>
-}
data SortSpec = DefaultSortSpec DefaultSort
| GeoDistanceSortSpec SortOrder GeoPoint DistanceUnit deriving (Eq, Show)
{-| 'DefaultSort' is usually the kind of 'SortSpec' you'll want. There's a
'mkSort' convenience function for when you want to specify only the most
common parameters.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#search-request-sort>
-}
data DefaultSort =
DefaultSort { sortFieldName :: FieldName
, sortOrder :: SortOrder
-- default False
, ignoreUnmapped :: Bool
, sortMode :: Maybe SortMode
, missingSort :: Maybe Missing
, nestedFilter :: Maybe Filter } deriving (Eq, Show)
{-| 'SortOrder' is 'Ascending' or 'Descending', as you might expect. These get
encoded into "asc" or "desc" when turned into JSON.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#search-request-sort>
-}
data SortOrder = Ascending
| Descending deriving (Eq, Show)
{-| 'Missing' prescribes how to handle missing fields. A missing field can be
sorted last, first, or using a custom value as a substitute.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#_missing_values>
-}
data Missing = LastMissing
| FirstMissing
| CustomMissing Text deriving (Eq, Show)
{-| 'SortMode' prescribes how to handle sorting array/multi-valued fields.
http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#_sort_mode_option
-}
data SortMode = SortMin
| SortMax
| SortSum
| SortAvg deriving (Eq, Show)
{-| 'mkSort' defaults everything but the 'FieldName' and the 'SortOrder' so
that you can concisely describe the usual kind of 'SortSpec's you want.
-}
mkSort :: FieldName -> SortOrder -> DefaultSort
mkSort fieldName sOrder = DefaultSort fieldName sOrder False Nothing Nothing Nothing
{-| 'Cache' is for telling ES whether it should cache a 'Filter' not.
'Query's cannot be cached.
-}
type Cache = Bool -- caching on/off
defaultCache :: Cache
defaultCache = False
{-| 'PrefixValue' is used in 'PrefixQuery' as the main query component.
-}
type PrefixValue = Text
{-| 'BooleanOperator' is the usual And/Or operators with an ES compatible
JSON encoding baked in. Used all over the place.
-}
data BooleanOperator = And | Or deriving (Eq, Show)
{-| 'ShardCount' is part of 'IndexSettings'
-}
newtype ShardCount = ShardCount Int deriving (Eq, Show, Generic)
{-| 'ReplicaCount' is part of 'IndexSettings'
-}
newtype ReplicaCount = ReplicaCount Int deriving (Eq, Show, Generic)
{-| 'Server' is used with the client functions to point at the ES instance
-}
newtype Server = Server Text deriving (Eq, Show)
{-| 'IndexName' is used to describe which index to query/create/delete
-}
newtype IndexName = IndexName Text deriving (Eq, Generic, Show)
{-| 'MappingName' is part of mappings which are how ES describes and schematizes
the data in the indices.
-}
newtype MappingName = MappingName Text deriving (Eq, Generic, Show)
{-| 'DocId' is a generic wrapper value for expressing unique Document IDs.
Can be set by the user or created by ES itself. Often used in client
functions for poking at specific documents.
-}
newtype DocId = DocId Text deriving (Eq, Generic, Show)
{-| 'QueryString' is used to wrap query text bodies, be they human written or not.
-}
newtype QueryString = QueryString Text deriving (Eq, Generic, Show)
{-| 'FieldName' is used all over the place wherever a specific field within
a document needs to be specified, usually in 'Query's or 'Filter's.
-}
newtype FieldName = FieldName Text deriving (Eq, Show)
{-| 'CacheName' is used in 'RegexpFilter' for describing the
'CacheKey' keyed caching behavior.
-}
newtype CacheName = CacheName Text deriving (Eq, Show)
{-| 'CacheKey' is used in 'RegexpFilter' to key regex caching.
-}
newtype CacheKey =
CacheKey Text deriving (Eq, Show)
newtype Existence =
Existence Bool deriving (Eq, Show)
newtype NullValue =
NullValue Bool deriving (Eq, Show)
newtype CutoffFrequency =
CutoffFrequency Double deriving (Eq, Show, Generic)
newtype Analyzer =
Analyzer Text deriving (Eq, Show, Generic)
newtype MaxExpansions =
MaxExpansions Int deriving (Eq, Show, Generic)
{-| 'Lenient', if set to true, will cause format based failures to be
ignored. I don't know what the bloody default is, Elasticsearch
documentation didn't say what it was. Let me know if you figure it out.
-}
newtype Lenient =
Lenient Bool deriving (Eq, Show, Generic)
newtype Tiebreaker =
Tiebreaker Double deriving (Eq, Show, Generic)
newtype Boost =
Boost Double deriving (Eq, Show, Generic)
newtype BoostTerms =
BoostTerms Double deriving (Eq, Show, Generic)
{-| 'MinimumMatch' controls how many should clauses in the bool query should
match. Can be an absolute value (2) or a percentage (30%) or a
combination of both.
-}
newtype MinimumMatch =
MinimumMatch Int deriving (Eq, Show, Generic)
newtype MinimumMatchText =
MinimumMatchText Text deriving (Eq, Show)
newtype DisableCoord =
DisableCoord Bool deriving (Eq, Show, Generic)
newtype IgnoreTermFrequency =
IgnoreTermFrequency Bool deriving (Eq, Show, Generic)
newtype MinimumTermFrequency =
MinimumTermFrequency Int deriving (Eq, Show, Generic)
newtype MaxQueryTerms =
MaxQueryTerms Int deriving (Eq, Show, Generic)
newtype Fuzziness =
Fuzziness Double deriving (Eq, Show, Generic)
{-| 'PrefixLength' is the prefix length used in queries, defaults to 0. -}
newtype PrefixLength =
PrefixLength Int deriving (Eq, Show, Generic)
newtype TypeName =
TypeName Text deriving (Eq, Show, Generic)
newtype PercentMatch =
PercentMatch Double deriving (Eq, Show, Generic)
newtype StopWord =
StopWord Text deriving (Eq, Show, Generic)
newtype QueryPath =
QueryPath Text deriving (Eq, Show, Generic)
{-| Allowing a wildcard at the beginning of a word (eg "*ing") is particularly
heavy, because all terms in the index need to be examined, just in case
they match. Leading wildcards can be disabled by setting
'AllowLeadingWildcard' to false. -}
newtype AllowLeadingWildcard =
AllowLeadingWildcard Bool deriving (Eq, Show, Generic)
newtype LowercaseExpanded =
LowercaseExpanded Bool deriving (Eq, Show, Generic)
newtype EnablePositionIncrements =
EnablePositionIncrements Bool deriving (Eq, Show, Generic)
{-| By default, wildcard terms in a query are not analyzed.
Setting 'AnalyzeWildcard' to true enables best-effort analysis.
-}
newtype AnalyzeWildcard = AnalyzeWildcard Bool deriving (Eq, Show, Generic)
{-| 'GeneratePhraseQueries' defaults to false.
-}
newtype GeneratePhraseQueries =
GeneratePhraseQueries Bool deriving (Eq, Show, Generic)
{-| 'Locale' is used for string conversions - defaults to ROOT.
-}
newtype Locale = Locale Text deriving (Eq, Show, Generic)
newtype MaxWordLength = MaxWordLength Int deriving (Eq, Show, Generic)
newtype MinWordLength = MinWordLength Int deriving (Eq, Show, Generic)
{-| 'PhraseSlop' sets the default slop for phrases, 0 means exact
phrase matches. Default is 0.
-}
newtype PhraseSlop = PhraseSlop Int deriving (Eq, Show, Generic)
newtype MinDocFrequency = MinDocFrequency Int deriving (Eq, Show, Generic)
newtype MaxDocFrequency = MaxDocFrequency Int deriving (Eq, Show, Generic)
{-| 'unpackId' is a silly convenience function that gets used once.
-}
unpackId :: DocId -> Text
unpackId (DocId docId) = docId
type TrackSortScores = Bool
newtype From = From Int deriving (Eq, Show, ToJSON)
newtype Size = Size Int deriving (Eq, Show, ToJSON)
data Search = Search { queryBody :: Maybe Query
, filterBody :: Maybe Filter
, sortBody :: Maybe Sort
, aggBody :: Maybe Aggregations
, highlight :: Maybe Highlights
-- default False
, trackSortScores :: TrackSortScores
, from :: From
, size :: Size
, source :: Maybe Source } deriving (Eq, Show)
data Source =
NoSource
| SourcePatterns PatternOrPatterns
| SourceIncludeExclude Include Exclude
deriving (Show, Eq)
data PatternOrPatterns = PopPattern Pattern
| PopPatterns [Pattern] deriving (Eq, Show)
data Include = Include [Pattern] deriving (Eq, Show)
data Exclude = Exclude [Pattern] deriving (Eq, Show)
newtype Pattern = Pattern Text deriving (Eq, Show)
data Highlights = Highlights { globalsettings :: Maybe HighlightSettings
, highlightFields :: [FieldHighlight]
} deriving (Show, Eq)
data FieldHighlight = FieldHighlight FieldName (Maybe HighlightSettings)
deriving (Show, Eq)
data HighlightSettings = Plain PlainHighlight
| Postings PostingsHighlight
| FastVector FastVectorHighlight
deriving (Show, Eq)
data PlainHighlight =
PlainHighlight { plainCommon :: Maybe CommonHighlight
, plainNonPost :: Maybe NonPostings } deriving (Show, Eq)
-- This requires that index_options are set to 'offset' in the mapping.
data PostingsHighlight = PostingsHighlight (Maybe CommonHighlight) deriving (Show, Eq)
-- This requires that term_vector is set to 'with_positions_offsets' in the mapping.
data FastVectorHighlight =
FastVectorHighlight { fvCommon :: Maybe CommonHighlight
, fvNonPostSettings :: Maybe NonPostings
, boundaryChars :: Maybe Text
, boundaryMaxScan :: Maybe Int
, fragmentOffset :: Maybe Int
, matchedFields :: [Text]
, phraseLimit :: Maybe Int
} deriving (Show, Eq)
data CommonHighlight =
CommonHighlight { order :: Maybe Text
, forceSource :: Maybe Bool
, tag :: Maybe HighlightTag
, encoder :: Maybe HighlightEncoder
, noMatchSize :: Maybe Int
, highlightQuery :: Maybe Query
, requireFieldMatch :: Maybe Bool
} deriving (Show, Eq)
-- Settings that are only applicable to FastVector and Plain highlighters.
data NonPostings =
NonPostings { fragmentSize :: Maybe Int
, numberOfFragments :: Maybe Int} deriving (Show, Eq)
data HighlightEncoder = DefaultEncoder
| HTMLEncoder
deriving (Show, Eq)
-- NOTE: Should the tags use some kind of HTML type, rather than Text?
data HighlightTag = TagSchema Text
| CustomTags ([Text], [Text]) -- Only uses more than the first value in the lists if fvh
deriving (Show, Eq)
data Query =
TermQuery Term (Maybe Boost)
| TermsQuery (NonEmpty Term)
| QueryMatchQuery MatchQuery
| QueryMultiMatchQuery MultiMatchQuery
| QueryBoolQuery BoolQuery
| QueryBoostingQuery BoostingQuery
| QueryCommonTermsQuery CommonTermsQuery
| ConstantScoreFilter Filter Boost
| ConstantScoreQuery Query Boost
| QueryDisMaxQuery DisMaxQuery
| QueryFilteredQuery FilteredQuery
| QueryFuzzyLikeThisQuery FuzzyLikeThisQuery
| QueryFuzzyLikeFieldQuery FuzzyLikeFieldQuery
| QueryFuzzyQuery FuzzyQuery
| QueryHasChildQuery HasChildQuery
| QueryHasParentQuery HasParentQuery
| IdsQuery MappingName [DocId]
| QueryIndicesQuery IndicesQuery
| MatchAllQuery (Maybe Boost)
| QueryMoreLikeThisQuery MoreLikeThisQuery
| QueryMoreLikeThisFieldQuery MoreLikeThisFieldQuery
| QueryNestedQuery NestedQuery
| QueryPrefixQuery PrefixQuery
| QueryQueryStringQuery QueryStringQuery
| QuerySimpleQueryStringQuery SimpleQueryStringQuery
| QueryRangeQuery RangeQuery
| QueryRegexpQuery RegexpQuery
deriving (Eq, Show)
data RegexpQuery =
RegexpQuery { regexpQueryField :: FieldName
, regexpQuery :: Regexp
, regexpQueryFlags :: RegexpFlags
, regexpQueryBoost :: Maybe Boost
} deriving (Eq, Show)
data RangeQuery =
RangeQuery { rangeQueryField :: FieldName
, rangeQueryRange :: RangeValue
, rangeQueryBoost :: Boost } deriving (Eq, Show)
mkRangeQuery :: FieldName -> RangeValue -> RangeQuery
mkRangeQuery f r = RangeQuery f r (Boost 1.0)
data SimpleQueryStringQuery =
SimpleQueryStringQuery
{ simpleQueryStringQuery :: QueryString
, simpleQueryStringField :: Maybe FieldOrFields
, simpleQueryStringOperator :: Maybe BooleanOperator
, simpleQueryStringAnalyzer :: Maybe Analyzer
, simpleQueryStringFlags :: Maybe [SimpleQueryFlag]
, simpleQueryStringLowercaseExpanded :: Maybe LowercaseExpanded
, simpleQueryStringLocale :: Maybe Locale
} deriving (Eq, Show)
data SimpleQueryFlag =
SimpleQueryAll
| SimpleQueryNone
| SimpleQueryAnd
| SimpleQueryOr
| SimpleQueryPrefix
| SimpleQueryPhrase
| SimpleQueryPrecedence
| SimpleQueryEscape
| SimpleQueryWhitespace
| SimpleQueryFuzzy
| SimpleQueryNear
| SimpleQuerySlop deriving (Eq, Show)
-- use_dis_max and tie_breaker when fields are plural?
data QueryStringQuery =
QueryStringQuery
{ queryStringQuery :: QueryString
, queryStringDefaultField :: Maybe FieldName
, queryStringOperator :: Maybe BooleanOperator
, queryStringAnalyzer :: Maybe Analyzer
, queryStringAllowLeadingWildcard :: Maybe AllowLeadingWildcard
, queryStringLowercaseExpanded :: Maybe LowercaseExpanded
, queryStringEnablePositionIncrements :: Maybe EnablePositionIncrements
, queryStringFuzzyMaxExpansions :: Maybe MaxExpansions
, queryStringFuzziness :: Maybe Fuzziness
, queryStringFuzzyPrefixLength :: Maybe PrefixLength
, queryStringPhraseSlop :: Maybe PhraseSlop
, queryStringBoost :: Maybe Boost
, queryStringAnalyzeWildcard :: Maybe AnalyzeWildcard
, queryStringGeneratePhraseQueries :: Maybe GeneratePhraseQueries
, queryStringMinimumShouldMatch :: Maybe MinimumMatch
, queryStringLenient :: Maybe Lenient
, queryStringLocale :: Maybe Locale
} deriving (Eq, Show)
mkQueryStringQuery :: QueryString -> QueryStringQuery
mkQueryStringQuery qs =
QueryStringQuery qs Nothing Nothing
Nothing Nothing Nothing Nothing
Nothing Nothing Nothing Nothing
Nothing Nothing Nothing Nothing
Nothing Nothing
data FieldOrFields = FofField FieldName
| FofFields [FieldName] deriving (Eq, Show)
data PrefixQuery =
PrefixQuery
{ prefixQueryField :: FieldName
, prefixQueryPrefixValue :: Text
, prefixQueryBoost :: Maybe Boost } deriving (Eq, Show)
data NestedQuery =
NestedQuery
{ nestedQueryPath :: QueryPath
, nestedQueryScoreType :: ScoreType
, nestedQuery :: Query } deriving (Eq, Show)
data MoreLikeThisFieldQuery =
MoreLikeThisFieldQuery
{ moreLikeThisFieldText :: Text
, moreLikeThisFieldFields :: FieldName
-- default 0.3 (30%)
, moreLikeThisFieldPercentMatch :: Maybe PercentMatch
, moreLikeThisFieldMinimumTermFreq :: Maybe MinimumTermFrequency
, moreLikeThisFieldMaxQueryTerms :: Maybe MaxQueryTerms
, moreLikeThisFieldStopWords :: Maybe [StopWord]
, moreLikeThisFieldMinDocFrequency :: Maybe MinDocFrequency
, moreLikeThisFieldMaxDocFrequency :: Maybe MaxDocFrequency
, moreLikeThisFieldMinWordLength :: Maybe MinWordLength
, moreLikeThisFieldMaxWordLength :: Maybe MaxWordLength
, moreLikeThisFieldBoostTerms :: Maybe BoostTerms
, moreLikeThisFieldBoost :: Maybe Boost
, moreLikeThisFieldAnalyzer :: Maybe Analyzer
} deriving (Eq, Show)
data MoreLikeThisQuery =
MoreLikeThisQuery
{ moreLikeThisText :: Text
, moreLikeThisFields :: Maybe [FieldName]
-- default 0.3 (30%)
, moreLikeThisPercentMatch :: Maybe PercentMatch
, moreLikeThisMinimumTermFreq :: Maybe MinimumTermFrequency
, moreLikeThisMaxQueryTerms :: Maybe MaxQueryTerms
, moreLikeThisStopWords :: Maybe [StopWord]
, moreLikeThisMinDocFrequency :: Maybe MinDocFrequency
, moreLikeThisMaxDocFrequency :: Maybe MaxDocFrequency
, moreLikeThisMinWordLength :: Maybe MinWordLength
, moreLikeThisMaxWordLength :: Maybe MaxWordLength
, moreLikeThisBoostTerms :: Maybe BoostTerms
, moreLikeThisBoost :: Maybe Boost
, moreLikeThisAnalyzer :: Maybe Analyzer
} deriving (Eq, Show)
data IndicesQuery =
IndicesQuery
{ indicesQueryIndices :: [IndexName]
, indicesQuery :: Query
-- default "all"
, indicesQueryNoMatch :: Maybe Query } deriving (Eq, Show)
data HasParentQuery =
HasParentQuery
{ hasParentQueryType :: TypeName
, hasParentQuery :: Query
, hasParentQueryScoreType :: Maybe ScoreType } deriving (Eq, Show)
data HasChildQuery =
HasChildQuery
{ hasChildQueryType :: TypeName
, hasChildQuery :: Query
, hasChildQueryScoreType :: Maybe ScoreType } deriving (Eq, Show)
data ScoreType =
ScoreTypeMax
| ScoreTypeSum
| ScoreTypeAvg
| ScoreTypeNone deriving (Eq, Show)
data FuzzyQuery =
FuzzyQuery { fuzzyQueryField :: FieldName
, fuzzyQueryValue :: Text
, fuzzyQueryPrefixLength :: PrefixLength
, fuzzyQueryMaxExpansions :: MaxExpansions
, fuzzyQueryFuzziness :: Fuzziness
, fuzzyQueryBoost :: Maybe Boost
} deriving (Eq, Show)
data FuzzyLikeFieldQuery =
FuzzyLikeFieldQuery
{ fuzzyLikeField :: FieldName
-- anaphora is good for the soul.
, fuzzyLikeFieldText :: Text
, fuzzyLikeFieldMaxQueryTerms :: MaxQueryTerms
, fuzzyLikeFieldIgnoreTermFrequency :: IgnoreTermFrequency
, fuzzyLikeFieldFuzziness :: Fuzziness
, fuzzyLikeFieldPrefixLength :: PrefixLength
, fuzzyLikeFieldBoost :: Boost
, fuzzyLikeFieldAnalyzer :: Maybe Analyzer
} deriving (Eq, Show)
data FuzzyLikeThisQuery =
FuzzyLikeThisQuery
{ fuzzyLikeFields :: [FieldName]
, fuzzyLikeText :: Text
, fuzzyLikeMaxQueryTerms :: MaxQueryTerms
, fuzzyLikeIgnoreTermFrequency :: IgnoreTermFrequency
, fuzzyLikeFuzziness :: Fuzziness
, fuzzyLikePrefixLength :: PrefixLength
, fuzzyLikeBoost :: Boost
, fuzzyLikeAnalyzer :: Maybe Analyzer
} deriving (Eq, Show)
data FilteredQuery =
FilteredQuery
{ filteredQuery :: Query
, filteredFilter :: Filter } deriving (Eq, Show)
data DisMaxQuery =
DisMaxQuery { disMaxQueries :: [Query]
-- default 0.0
, disMaxTiebreaker :: Tiebreaker
, disMaxBoost :: Maybe Boost
} deriving (Eq, Show)
data MatchQuery =
MatchQuery { matchQueryField :: FieldName
, matchQueryQueryString :: QueryString
, matchQueryOperator :: BooleanOperator
, matchQueryZeroTerms :: ZeroTermsQuery
, matchQueryCutoffFrequency :: Maybe CutoffFrequency
, matchQueryMatchType :: Maybe MatchQueryType
, matchQueryAnalyzer :: Maybe Analyzer
, matchQueryMaxExpansions :: Maybe MaxExpansions
, matchQueryLenient :: Maybe Lenient
, matchQueryBoost :: Maybe Boost } deriving (Eq, Show)
{-| 'mkMatchQuery' is a convenience function that defaults the less common parameters,
enabling you to provide only the 'FieldName' and 'QueryString' to make a 'MatchQuery'
-}
mkMatchQuery :: FieldName -> QueryString -> MatchQuery
mkMatchQuery field query = MatchQuery field query Or ZeroTermsNone Nothing Nothing Nothing Nothing Nothing Nothing
data MatchQueryType =
MatchPhrase
| MatchPhrasePrefix deriving (Eq, Show)
data MultiMatchQuery =
MultiMatchQuery { multiMatchQueryFields :: [FieldName]
, multiMatchQueryString :: QueryString
, multiMatchQueryOperator :: BooleanOperator
, multiMatchQueryZeroTerms :: ZeroTermsQuery
, multiMatchQueryTiebreaker :: Maybe Tiebreaker
, multiMatchQueryType :: Maybe MultiMatchQueryType
, multiMatchQueryCutoffFrequency :: Maybe CutoffFrequency
, multiMatchQueryAnalyzer :: Maybe Analyzer
, multiMatchQueryMaxExpansions :: Maybe MaxExpansions
, multiMatchQueryLenient :: Maybe Lenient } deriving (Eq, Show)
{-| 'mkMultiMatchQuery' is a convenience function that defaults the less common parameters,
enabling you to provide only the list of 'FieldName's and 'QueryString' to
make a 'MultiMatchQuery'.
-}
mkMultiMatchQuery :: [FieldName] -> QueryString -> MultiMatchQuery
mkMultiMatchQuery matchFields query =
MultiMatchQuery matchFields query
Or ZeroTermsNone Nothing Nothing Nothing Nothing Nothing Nothing
data MultiMatchQueryType =
MultiMatchBestFields
| MultiMatchMostFields
| MultiMatchCrossFields
| MultiMatchPhrase
| MultiMatchPhrasePrefix deriving (Eq, Show)
data BoolQuery =
BoolQuery { boolQueryMustMatch :: [Query]
, boolQueryMustNotMatch :: [Query]
, boolQueryShouldMatch :: [Query]
, boolQueryMinimumShouldMatch :: Maybe MinimumMatch
, boolQueryBoost :: Maybe Boost
, boolQueryDisableCoord :: Maybe DisableCoord
} deriving (Eq, Show)
mkBoolQuery :: [Query] -> [Query] -> [Query] -> BoolQuery
mkBoolQuery must mustNot should =
BoolQuery must mustNot should Nothing Nothing Nothing
data BoostingQuery =
BoostingQuery { positiveQuery :: Query
, negativeQuery :: Query
, negativeBoost :: Boost } deriving (Eq, Show)
data CommonTermsQuery =
CommonTermsQuery { commonField :: FieldName
, commonQuery :: QueryString
, commonCutoffFrequency :: CutoffFrequency
, commonLowFreqOperator :: BooleanOperator
, commonHighFreqOperator :: BooleanOperator
, commonMinimumShouldMatch :: Maybe CommonMinimumMatch
, commonBoost :: Maybe Boost
, commonAnalyzer :: Maybe Analyzer
, commonDisableCoord :: Maybe DisableCoord
} deriving (Eq, Show)
data CommonMinimumMatch =
CommonMinimumMatchHighLow MinimumMatchHighLow
| CommonMinimumMatch MinimumMatch
deriving (Eq, Show)
data MinimumMatchHighLow =
MinimumMatchHighLow { lowFreq :: MinimumMatch
, highFreq :: MinimumMatch } deriving (Eq, Show)
data Filter = AndFilter [Filter] Cache
| OrFilter [Filter] Cache
| NotFilter Filter Cache
| IdentityFilter
| BoolFilter BoolMatch
| ExistsFilter FieldName -- always cached
| GeoBoundingBoxFilter GeoBoundingBoxConstraint
| GeoDistanceFilter GeoPoint Distance DistanceType OptimizeBbox Cache
| GeoDistanceRangeFilter GeoPoint DistanceRange
| GeoPolygonFilter FieldName [LatLon]
| IdsFilter MappingName [DocId]
| LimitFilter Int
| MissingFilter FieldName Existence NullValue
| PrefixFilter FieldName PrefixValue Cache
| RangeFilter FieldName RangeValue RangeExecution Cache
| RegexpFilter FieldName Regexp RegexpFlags CacheName Cache CacheKey
| TermFilter Term Cache
deriving (Eq, Show)
data ZeroTermsQuery = ZeroTermsNone
| ZeroTermsAll deriving (Eq, Show)
data RangeExecution = RangeExecutionIndex
| RangeExecutionFielddata deriving (Eq, Show)
newtype Regexp = Regexp Text deriving (Eq, Show)
data RegexpFlags = AllRegexpFlags
| NoRegexpFlags
| SomeRegexpFlags (NonEmpty RegexpFlag) deriving (Eq, Show)
data RegexpFlag = AnyString
| Automaton
| Complement
| Empty
| Intersection
| Interval deriving (Eq, Show)
newtype LessThan = LessThan Double deriving (Eq, Show)
newtype LessThanEq = LessThanEq Double deriving (Eq, Show)
newtype GreaterThan = GreaterThan Double deriving (Eq, Show)
newtype GreaterThanEq = GreaterThanEq Double deriving (Eq, Show)
newtype LessThanD = LessThanD UTCTime deriving (Eq, Show)
newtype LessThanEqD = LessThanEqD UTCTime deriving (Eq, Show)
newtype GreaterThanD = GreaterThanD UTCTime deriving (Eq, Show)
newtype GreaterThanEqD = GreaterThanEqD UTCTime deriving (Eq, Show)
data RangeValue = RangeDateLte LessThanEqD
| RangeDateLt LessThanD
| RangeDateGte GreaterThanEqD
| RangeDateGt GreaterThanD
| RangeDateGtLt GreaterThanD LessThanD
| RangeDateGteLte GreaterThanEqD LessThanEqD
| RangeDateGteLt GreaterThanEqD LessThanD
| RangeDateGtLte GreaterThanD LessThanEqD
| RangeDoubleLte LessThanEq
| RangeDoubleLt LessThan
| RangeDoubleGte GreaterThanEq
| RangeDoubleGt GreaterThan
| RangeDoubleGtLt GreaterThan LessThan
| RangeDoubleGteLte GreaterThanEq LessThanEq
| RangeDoubleGteLt GreaterThanEq LessThan
| RangeDoubleGtLte GreaterThan LessThanEq
deriving (Eq, Show)
rangeValueToPair :: RangeValue -> [Pair]
rangeValueToPair rv = case rv of
RangeDateLte (LessThanEqD t) -> ["lte" .= t]
RangeDateGte (GreaterThanEqD t) -> ["gte" .= t]
RangeDateLt (LessThanD t) -> ["lt" .= t]
RangeDateGt (GreaterThanD t) -> ["gt" .= t]
RangeDateGteLte (GreaterThanEqD l) (LessThanEqD g) -> ["gte" .= l, "lte" .= g]
RangeDateGtLte (GreaterThanD l) (LessThanEqD g) -> ["gt" .= l, "lte" .= g]
RangeDateGteLt (GreaterThanEqD l) (LessThanD g) -> ["gte" .= l, "lt" .= g]
RangeDateGtLt (GreaterThanD l) (LessThanD g) -> ["gt" .= l, "lt" .= g]
RangeDoubleLte (LessThanEq t) -> ["lte" .= t]
RangeDoubleGte (GreaterThanEq t) -> ["gte" .= t]
RangeDoubleLt (LessThan t) -> ["lt" .= t]
RangeDoubleGt (GreaterThan t) -> ["gt" .= t]
RangeDoubleGteLte (GreaterThanEq l) (LessThanEq g) -> ["gte" .= l, "lte" .= g]
RangeDoubleGtLte (GreaterThan l) (LessThanEq g) -> ["gt" .= l, "lte" .= g]
RangeDoubleGteLt (GreaterThanEq l) (LessThan g) -> ["gte" .= l, "lt" .= g]
RangeDoubleGtLt (GreaterThan l) (LessThan g) -> ["gt" .= l, "lt" .= g]
data Term = Term { termField :: Text
, termValue :: Text } deriving (Eq, Show)
data BoolMatch = MustMatch Term Cache
| MustNotMatch Term Cache
| ShouldMatch [Term] Cache deriving (Eq, Show)
-- "memory" or "indexed"
data GeoFilterType = GeoFilterMemory
| GeoFilterIndexed deriving (Eq, Show)
data LatLon = LatLon { lat :: Double
, lon :: Double } deriving (Eq, Show)
data GeoBoundingBox =
GeoBoundingBox { topLeft :: LatLon
, bottomRight :: LatLon } deriving (Eq, Show)
data GeoBoundingBoxConstraint =
GeoBoundingBoxConstraint { geoBBField :: FieldName
, constraintBox :: GeoBoundingBox
, bbConstraintcache :: Cache
, geoType :: GeoFilterType
} deriving (Eq, Show)
data GeoPoint =
GeoPoint { geoField :: FieldName
, latLon :: LatLon} deriving (Eq, Show)
data DistanceUnit = Miles
| Yards
| Feet
| Inches
| Kilometers
| Meters
| Centimeters
| Millimeters
| NauticalMiles deriving (Eq, Show)
data DistanceType = Arc
| SloppyArc -- doesn't exist <1.0
| Plane deriving (Eq, Show)
data OptimizeBbox = OptimizeGeoFilterType GeoFilterType
| NoOptimizeBbox deriving (Eq, Show)
data Distance =
Distance { coefficient :: Double
, unit :: DistanceUnit } deriving (Eq, Show)
data DistanceRange =
DistanceRange { distanceFrom :: Distance
, distanceTo :: Distance } deriving (Eq, Show)
data SearchResult a =
SearchResult { took :: Int
, timedOut :: Bool
, shards :: ShardResult
, searchHits :: SearchHits a
, aggregations :: Maybe AggregationResults } deriving (Eq, Show)
type Score = Maybe Double
data SearchHits a =
SearchHits { hitsTotal :: Int
, maxScore :: Score
, hits :: [Hit a] } deriving (Eq, Show)
instance Monoid (SearchHits a) where
mempty = SearchHits 0 Nothing mempty
mappend (SearchHits ta ma ha) (SearchHits tb mb hb) =
SearchHits (ta + tb) (max ma mb) (ha <> hb)
data Hit a =
Hit { hitIndex :: IndexName
, hitType :: MappingName
, hitDocId :: DocId
, hitScore :: Score
, hitSource :: a
, hitHighlight :: Maybe HitHighlight } deriving (Eq, Show)
data ShardResult =
ShardResult { shardTotal :: Int
, shardsSuccessful :: Int
, shardsFailed :: Int } deriving (Eq, Show, Generic)
type HitHighlight = M.Map Text [Text]
showText :: Show a => a -> Text
showText = T.pack . show
type Aggregations = M.Map Text Aggregation
emptyAggregations :: Aggregations
emptyAggregations = M.empty
mkAggregations :: Text -> Aggregation -> Aggregations
mkAggregations name aggregation = M.insert name aggregation emptyAggregations
data TermOrder = TermOrder{ termSortField :: Text
, termSortOrder :: SortOrder } deriving (Eq, Show)
data TermInclusion = TermInclusion Text
| TermPattern Text Text deriving (Eq, Show)
data CollectionMode = BreadthFirst
| DepthFirst deriving (Eq, Show)
data ExecutionHint = Ordinals
| GlobalOrdinals
| GlobalOrdinalsHash
| GlobalOrdinalsLowCardinality
| Map deriving (Eq, Show)
data TimeInterval = Weeks
| Days
| Hours
| Minutes
| Seconds deriving (Eq)
data Interval = Year
| Quarter
| Month
| Week
| Day
| Hour
| Minute
| Second
| FractionalInterval Float TimeInterval deriving (Eq, Show)
data Aggregation = TermsAgg TermsAggregation
| DateHistogramAgg DateHistogramAggregation deriving (Eq, Show)
data TermsAggregation = TermsAggregation { term :: Either Text Text
, termInclude :: Maybe TermInclusion
, termExclude :: Maybe TermInclusion
, termOrder :: Maybe TermOrder
, termMinDocCount :: Maybe Int
, termSize :: Maybe Int
, termShardSize :: Maybe Int
, termCollectMode :: Maybe CollectionMode
, termExecutionHint :: Maybe ExecutionHint
, termAggs :: Maybe Aggregations
} deriving (Eq, Show)
data DateHistogramAggregation = DateHistogramAggregation { dateField :: FieldName
, dateInterval :: Interval
, dateFormat :: Maybe Text
, datePreZone :: Maybe Text
, datePostZone :: Maybe Text
, datePreOffset :: Maybe Text
, datePostOffset :: Maybe Text
, dateAggs :: Maybe Aggregations
} deriving (Eq, Show)
mkTermsAggregation :: Text -> TermsAggregation
mkTermsAggregation t = TermsAggregation (Left t) Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
mkTermsScriptAggregation :: Text -> TermsAggregation
mkTermsScriptAggregation t = TermsAggregation (Right t) Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
mkDateHistogram :: FieldName -> Interval -> DateHistogramAggregation
mkDateHistogram t i = DateHistogramAggregation t i Nothing Nothing Nothing Nothing Nothing Nothing
instance ToJSON TermOrder where
toJSON (TermOrder termSortField termSortOrder) = object [termSortField .= termSortOrder]
instance ToJSON TermInclusion where
toJSON (TermInclusion x) = toJSON x
toJSON (TermPattern pattern flags) = omitNulls [ "pattern" .= pattern,
"flags" .= flags]
instance ToJSON CollectionMode where
toJSON BreadthFirst = "breadth_first"
toJSON DepthFirst = "depth_first"
instance ToJSON ExecutionHint where
toJSON Ordinals = "ordinals"
toJSON GlobalOrdinals = "global_ordinals"
toJSON GlobalOrdinalsHash = "global_ordinals_hash"
toJSON GlobalOrdinalsLowCardinality = "global_ordinals_low_cardinality"
toJSON Map = "map"
instance ToJSON Interval where
toJSON Year = "year"
toJSON Quarter = "quarter"
toJSON Month = "month"
toJSON Week = "week"
toJSON Day = "day"
toJSON Hour = "hour"
toJSON Minute = "minute"
toJSON Second = "second"
toJSON (FractionalInterval fraction interval) = toJSON $ show fraction ++ show interval
instance Show TimeInterval where
show Weeks = "w"
show Days = "d"
show Hours = "h"
show Minutes = "m"
show Seconds = "s"
instance ToJSON Aggregation where
toJSON (TermsAgg (TermsAggregation term include exclude order minDocCount size shardSize collectMode executionHint termAggs)) =
omitNulls ["terms" .= omitNulls [ toJSON' term,
"include" .= include,
"exclude" .= exclude,
"order" .= order,
"min_doc_count" .= minDocCount,
"size" .= size,
"shard_size" .= shardSize,
"collect_mode" .= collectMode,
"execution_hint" .= executionHint
],
"aggs" .= termAggs ]
where
toJSON' x = case x of { Left y -> "field" .= y; Right y -> "script" .= y }
toJSON (DateHistogramAgg (DateHistogramAggregation field interval format preZone postZone preOffset postOffset dateHistoAggs)) =
omitNulls ["date_histogram" .= omitNulls [ "field" .= field,
"interval" .= interval,
"format" .= format,
"pre_zone" .= preZone,
"post_zone" .= postZone,
"pre_offset" .= preOffset,
"post_offset" .= postOffset
],
"aggs" .= dateHistoAggs ]
type AggregationResults = M.Map Text Value
class BucketAggregation a where
key :: a -> Text
docCount :: a -> Int
aggs :: a -> Maybe AggregationResults
data Bucket a = Bucket { buckets :: [a]} deriving (Show)
data TermsResult = TermsResult { termKey :: Text
, termsDocCount :: Int
, termsAggs :: Maybe AggregationResults } deriving (Show)
data DateHistogramResult = DateHistogramResult { dateKey :: Int
, dateKeyStr :: Maybe Text
, dateDocCount :: Int
, dateHistogramAggs :: Maybe AggregationResults } deriving (Show)
toTerms :: Text -> AggregationResults -> Maybe (Bucket TermsResult)
toTerms t a = M.lookup t a >>= deserialize
where deserialize = parseMaybe parseJSON
toDateHistogram :: Text -> AggregationResults -> Maybe (Bucket DateHistogramResult)
toDateHistogram t a = M.lookup t a >>= deserialize
where deserialize = parseMaybe parseJSON
instance BucketAggregation TermsResult where
key = termKey
docCount = termsDocCount
aggs = termsAggs
instance BucketAggregation DateHistogramResult where
key = showText . dateKey
docCount = dateDocCount
aggs = dateHistogramAggs
instance (FromJSON a, BucketAggregation a) => FromJSON (Bucket a) where
parseJSON (Object v) = Bucket <$>
v .: "buckets"
parseJSON _ = mempty
instance FromJSON TermsResult where
parseJSON (Object v) = TermsResult <$>
v .: "key" <*>
v .: "doc_count" <*>
v .:? "aggregations"
parseJSON _ = mempty
instance FromJSON DateHistogramResult where
parseJSON (Object v) = DateHistogramResult <$>
v .: "key" <*>
v .:? "key_as_string" <*>
v .: "doc_count" <*>
v .:? "aggregations"
parseJSON _ = mempty
instance Monoid Filter where
mempty = IdentityFilter
mappend a b = AndFilter [a, b] defaultCache
instance Seminearring Filter where
a <||> b = OrFilter [a, b] defaultCache
instance ToJSON Filter where
toJSON (AndFilter filters cache) =
object ["and" .=
object [ "filters" .= fmap toJSON filters
, "_cache" .= cache]]
toJSON (OrFilter filters cache) =
object ["or" .=
object [ "filters" .= fmap toJSON filters
, "_cache" .= cache]]
toJSON (NotFilter notFilter cache) =
object ["not" .=
object ["filter" .= notFilter
, "_cache" .= cache]]
toJSON (IdentityFilter) =
object ["match_all" .= object []]
toJSON (TermFilter (Term termFilterField termFilterValue) cache) =
object ["term" .= object base]
where base = [termFilterField .= termFilterValue,
"_cache" .= cache]
toJSON (ExistsFilter (FieldName fieldName)) =
object ["exists" .= object
["field" .= fieldName]]
toJSON (BoolFilter boolMatch) =
object ["bool" .= boolMatch]
toJSON (GeoBoundingBoxFilter bbConstraint) =
object ["geo_bounding_box" .= bbConstraint]
toJSON (GeoDistanceFilter (GeoPoint (FieldName distanceGeoField) geoDistLatLon)
distance distanceType optimizeBbox cache) =
object ["geo_distance" .=
object ["distance" .= distance
, "distance_type" .= distanceType
, "optimize_bbox" .= optimizeBbox
, distanceGeoField .= geoDistLatLon
, "_cache" .= cache]]
toJSON (GeoDistanceRangeFilter (GeoPoint (FieldName gddrField) drLatLon)
(DistanceRange geoDistRangeDistFrom drDistanceTo)) =
object ["geo_distance_range" .=
object ["from" .= geoDistRangeDistFrom
, "to" .= drDistanceTo
, gddrField .= drLatLon]]
toJSON (GeoPolygonFilter (FieldName geoPolygonFilterField) latLons) =
object ["geo_polygon" .=
object [geoPolygonFilterField .=
object ["points" .= fmap toJSON latLons]]]
toJSON (IdsFilter (MappingName mappingName) values) =
object ["ids" .=
object ["type" .= mappingName
, "values" .= fmap unpackId values]]
toJSON (LimitFilter limit) =
object ["limit" .= object ["value" .= limit]]
toJSON (MissingFilter (FieldName fieldName) (Existence existence) (NullValue nullValue)) =
object ["missing" .=
object ["field" .= fieldName
, "existence" .= existence
, "null_value" .= nullValue]]
toJSON (PrefixFilter (FieldName fieldName) fieldValue cache) =
object ["prefix" .=
object [fieldName .= fieldValue
, "_cache" .= cache]]
toJSON (RangeFilter (FieldName fieldName) rangeValue rangeExecution cache) =
object ["range" .=
object [ fieldName .= object (rangeValueToPair rangeValue)
, "execution" .= rangeExecution
, "_cache" .= cache]]
toJSON (RegexpFilter (FieldName fieldName)
(Regexp regexText) flags (CacheName cacheName) cache (CacheKey cacheKey)) =
object ["regexp" .=
object [fieldName .=
object ["value" .= regexText
, "flags" .= flags]
, "_name" .= cacheName
, "_cache" .= cache
, "_cache_key" .= cacheKey]]
instance ToJSON GeoPoint where
toJSON (GeoPoint (FieldName geoPointField) geoPointLatLon) =
object [ geoPointField .= geoPointLatLon ]
instance ToJSON Query where
toJSON (TermQuery (Term termQueryField termQueryValue) boost) =
object [ "term" .=
object [termQueryField .= object merged]]
where
base = [ "value" .= termQueryValue ]
boosted = maybe [] (return . ("boost" .=)) boost
merged = mappend base boosted
toJSON (TermsQuery terms) =
object [ "terms" .= object conjoined ]
where conjoined = [ getTermsField terms .=
fmap (toJSON . getTermValue) (toList terms)]
getTermsField ((Term f _ ) :| _) = f
getTermValue (Term _ v) = v
toJSON (IdsQuery idsQueryMappingName docIds) =
object [ "ids" .= object conjoined ]
where conjoined = [ "type" .= idsQueryMappingName
, "values" .= fmap toJSON docIds ]
toJSON (QueryQueryStringQuery qQueryStringQuery) =
object [ "query_string" .= qQueryStringQuery ]
toJSON (QueryMatchQuery matchQuery) =
object [ "match" .= matchQuery ]
toJSON (QueryMultiMatchQuery multiMatchQuery) =
toJSON multiMatchQuery
toJSON (QueryBoolQuery boolQuery) =
object [ "bool" .= boolQuery ]
toJSON (QueryBoostingQuery boostingQuery) =
object [ "boosting" .= boostingQuery ]
toJSON (QueryCommonTermsQuery commonTermsQuery) =
object [ "common" .= commonTermsQuery ]
toJSON (ConstantScoreFilter csFilter boost) =
object [ "constant_score" .= csFilter
, "boost" .= boost]
toJSON (ConstantScoreQuery query boost) =
object [ "constant_score" .= query
, "boost" .= boost]
toJSON (QueryDisMaxQuery disMaxQuery) =
object [ "dis_max" .= disMaxQuery ]
toJSON (QueryFilteredQuery qFilteredQuery) =
object [ "filtered" .= qFilteredQuery ]
toJSON (QueryFuzzyLikeThisQuery fuzzyQuery) =
object [ "fuzzy_like_this" .= fuzzyQuery ]
toJSON (QueryFuzzyLikeFieldQuery fuzzyFieldQuery) =
object [ "fuzzy_like_this_field" .= fuzzyFieldQuery ]
toJSON (QueryFuzzyQuery fuzzyQuery) =
object [ "fuzzy" .= fuzzyQuery ]
toJSON (QueryHasChildQuery childQuery) =
object [ "has_child" .= childQuery ]
toJSON (QueryHasParentQuery parentQuery) =
object [ "has_parent" .= parentQuery ]
toJSON (QueryIndicesQuery qIndicesQuery) =
object [ "indices" .= qIndicesQuery ]
toJSON (MatchAllQuery boost) =
object [ "match_all" .= omitNulls [ "boost" .= boost ] ]
toJSON (QueryMoreLikeThisQuery query) =
object [ "more_like_this" .= query ]
toJSON (QueryMoreLikeThisFieldQuery query) =
object [ "more_like_this_field" .= query ]
toJSON (QueryNestedQuery query) =
object [ "nested" .= query ]
toJSON (QueryPrefixQuery query) =
object [ "prefix" .= query ]
toJSON (QueryRangeQuery query) =
object [ "range" .= query ]
toJSON (QueryRegexpQuery query) =
object [ "regexp" .= query ]
toJSON (QuerySimpleQueryStringQuery query) =
object [ "simple_query_string" .= query ]
omitNulls :: [(Text, Value)] -> Value
omitNulls = object . filter notNull where
notNull (_, Null) = False
notNull (_, Array a) = (not . V.null) a
notNull _ = True
instance ToJSON SimpleQueryStringQuery where
toJSON SimpleQueryStringQuery {..} =
omitNulls (base ++ maybeAdd)
where base = [ "query" .= simpleQueryStringQuery ]
maybeAdd = [ "fields" .= simpleQueryStringField
, "default_operator" .= simpleQueryStringOperator
, "analyzer" .= simpleQueryStringAnalyzer
, "flags" .= simpleQueryStringFlags
, "lowercase_expanded_terms" .= simpleQueryStringLowercaseExpanded
, "locale" .= simpleQueryStringLocale ]
instance ToJSON FieldOrFields where
toJSON (FofField fieldName) =
toJSON fieldName
toJSON (FofFields fieldNames) =
toJSON fieldNames
instance ToJSON SimpleQueryFlag where
toJSON SimpleQueryAll = "ALL"
toJSON SimpleQueryNone = "NONE"
toJSON SimpleQueryAnd = "AND"
toJSON SimpleQueryOr = "OR"
toJSON SimpleQueryPrefix = "PREFIX"
toJSON SimpleQueryPhrase = "PHRASE"
toJSON SimpleQueryPrecedence = "PRECEDENCE"
toJSON SimpleQueryEscape = "ESCAPE"
toJSON SimpleQueryWhitespace = "WHITESPACE"
toJSON SimpleQueryFuzzy = "FUZZY"
toJSON SimpleQueryNear = "NEAR"
toJSON SimpleQuerySlop = "SLOP"
instance ToJSON RegexpQuery where
toJSON (RegexpQuery (FieldName rqQueryField)
(Regexp regexpQueryQuery) rqQueryFlags
rqQueryBoost) =
object [ rqQueryField .= omitNulls base ]
where base = [ "value" .= regexpQueryQuery
, "flags" .= rqQueryFlags
, "boost" .= rqQueryBoost ]
instance ToJSON QueryStringQuery where
toJSON (QueryStringQuery qsQueryString
qsDefaultField qsOperator
qsAnalyzer qsAllowWildcard
qsLowercaseExpanded qsEnablePositionIncrements
qsFuzzyMaxExpansions qsFuzziness
qsFuzzyPrefixLength qsPhraseSlop
qsBoost qsAnalyzeWildcard
qsGeneratePhraseQueries qsMinimumShouldMatch
qsLenient qsLocale) =
omitNulls base
where
base = [ "query" .= qsQueryString
, "default_field" .= qsDefaultField
, "default_operator" .= qsOperator
, "analyzer" .= qsAnalyzer
, "allow_leading_wildcard" .= qsAllowWildcard
, "lowercase_expanded_terms" .= qsLowercaseExpanded
, "enable_position_increments" .= qsEnablePositionIncrements
, "fuzzy_max_expansions" .= qsFuzzyMaxExpansions
, "fuzziness" .= qsFuzziness
, "fuzzy_prefix_length" .= qsFuzzyPrefixLength
, "phrase_slop" .= qsPhraseSlop
, "boost" .= qsBoost
, "analyze_wildcard" .= qsAnalyzeWildcard
, "auto_generate_phrase_queries" .= qsGeneratePhraseQueries
, "minimum_should_match" .= qsMinimumShouldMatch
, "lenient" .= qsLenient
, "locale" .= qsLocale ]
instance ToJSON RangeQuery where
toJSON (RangeQuery (FieldName fieldName) range boost) =
object [ fieldName .= conjoined ]
where conjoined = [ "boost" .= boost ] ++ (rangeValueToPair range)
instance ToJSON PrefixQuery where
toJSON (PrefixQuery (FieldName fieldName) queryValue boost) =
object [ fieldName .= omitNulls base ]
where base = [ "value" .= queryValue
, "boost" .= boost ]
instance ToJSON NestedQuery where
toJSON (NestedQuery nqPath nqScoreType nqQuery) =
object [ "path" .= nqPath
, "score_mode" .= nqScoreType
, "query" .= nqQuery ]
instance ToJSON MoreLikeThisFieldQuery where
toJSON (MoreLikeThisFieldQuery text (FieldName fieldName)
percent mtf mqt stopwords mindf maxdf
minwl maxwl boostTerms boost analyzer) =
object [ fieldName .= omitNulls base ]
where base = [ "like_text" .= text
, "percent_terms_to_match" .= percent
, "min_term_freq" .= mtf
, "max_query_terms" .= mqt
, "stop_words" .= stopwords
, "min_doc_freq" .= mindf
, "max_doc_freq" .= maxdf
, "min_word_length" .= minwl
, "max_word_length" .= maxwl
, "boost_terms" .= boostTerms
, "boost" .= boost
, "analyzer" .= analyzer ]
instance ToJSON MoreLikeThisQuery where
toJSON (MoreLikeThisQuery text fields percent
mtf mqt stopwords mindf maxdf
minwl maxwl boostTerms boost analyzer) =
omitNulls base
where base = [ "like_text" .= text
, "fields" .= fields
, "percent_terms_to_match" .= percent
, "min_term_freq" .= mtf
, "max_query_terms" .= mqt
, "stop_words" .= stopwords
, "min_doc_freq" .= mindf
, "max_doc_freq" .= maxdf
, "min_word_length" .= minwl
, "max_word_length" .= maxwl
, "boost_terms" .= boostTerms
, "boost" .= boost
, "analyzer" .= analyzer ]
instance ToJSON IndicesQuery where
toJSON (IndicesQuery indices query noMatch) =
omitNulls [ "indices" .= indices
, "no_match_query" .= noMatch
, "query" .= query ]
instance ToJSON HasParentQuery where
toJSON (HasParentQuery queryType query scoreType) =
omitNulls [ "parent_type" .= queryType
, "score_type" .= scoreType
, "query" .= query ]
instance ToJSON HasChildQuery where
toJSON (HasChildQuery queryType query scoreType) =
omitNulls [ "query" .= query
, "score_type" .= scoreType
, "type" .= queryType ]
instance ToJSON FuzzyQuery where
toJSON (FuzzyQuery (FieldName fieldName) queryText
prefixLength maxEx fuzziness boost) =
object [ fieldName .= omitNulls base ]
where base = [ "value" .= queryText
, "fuzziness" .= fuzziness
, "prefix_length" .= prefixLength
, "boost" .= boost
, "max_expansions" .= maxEx ]
instance ToJSON FuzzyLikeFieldQuery where
toJSON (FuzzyLikeFieldQuery (FieldName fieldName)
fieldText maxTerms ignoreFreq fuzziness prefixLength
boost analyzer) =
object [ fieldName .=
omitNulls [ "like_text" .= fieldText
, "max_query_terms" .= maxTerms
, "ignore_tf" .= ignoreFreq
, "fuzziness" .= fuzziness
, "prefix_length" .= prefixLength
, "analyzer" .= analyzer
, "boost" .= boost ]]
instance ToJSON FuzzyLikeThisQuery where
toJSON (FuzzyLikeThisQuery fields text maxTerms
ignoreFreq fuzziness prefixLength boost analyzer) =
omitNulls base
where base = [ "fields" .= fields
, "like_text" .= text
, "max_query_terms" .= maxTerms
, "ignore_tf" .= ignoreFreq
, "fuzziness" .= fuzziness
, "prefix_length" .= prefixLength
, "analyzer" .= analyzer
, "boost" .= boost ]
instance ToJSON FilteredQuery where
toJSON (FilteredQuery query fFilter) =
object [ "query" .= query
, "filter" .= fFilter ]
instance ToJSON DisMaxQuery where
toJSON (DisMaxQuery queries tiebreaker boost) =
omitNulls base
where base = [ "queries" .= queries
, "boost" .= boost
, "tie_breaker" .= tiebreaker ]
instance ToJSON CommonTermsQuery where
toJSON (CommonTermsQuery (FieldName fieldName)
(QueryString query) cf lfo hfo msm
boost analyzer disableCoord) =
object [fieldName .= omitNulls base ]
where base = [ "query" .= query
, "cutoff_frequency" .= cf
, "low_freq_operator" .= lfo
, "minimum_should_match" .= msm
, "boost" .= boost
, "analyzer" .= analyzer
, "disable_coord" .= disableCoord
, "high_freq_operator" .= hfo ]
instance ToJSON CommonMinimumMatch where
toJSON (CommonMinimumMatch mm) = toJSON mm
toJSON (CommonMinimumMatchHighLow (MinimumMatchHighLow lowF highF)) =
object [ "low_freq" .= lowF
, "high_freq" .= highF ]
instance ToJSON BoostingQuery where
toJSON (BoostingQuery bqPositiveQuery bqNegativeQuery bqNegativeBoost) =
object [ "positive" .= bqPositiveQuery
, "negative" .= bqNegativeQuery
, "negative_boost" .= bqNegativeBoost ]
instance ToJSON BoolQuery where
toJSON (BoolQuery mustM notM shouldM bqMin boost disableCoord) =
omitNulls base
where base = [ "must" .= mustM
, "must_not" .= notM
, "should" .= shouldM
, "minimum_should_match" .= bqMin
, "boost" .= boost
, "disable_coord" .= disableCoord ]
instance ToJSON MatchQuery where
toJSON (MatchQuery (FieldName fieldName)
(QueryString mqQueryString) booleanOperator
zeroTermsQuery cutoffFrequency matchQueryType
analyzer maxExpansions lenient boost) =
object [ fieldName .= omitNulls base ]
where base = [ "query" .= mqQueryString
, "operator" .= booleanOperator
, "zero_terms_query" .= zeroTermsQuery
, "cutoff_frequency" .= cutoffFrequency
, "type" .= matchQueryType
, "analyzer" .= analyzer
, "max_expansions" .= maxExpansions
, "lenient" .= lenient
, "boost" .= boost ]
instance ToJSON MultiMatchQuery where
toJSON (MultiMatchQuery fields (QueryString query) boolOp
ztQ tb mmqt cf analyzer maxEx lenient) =
object ["multi_match" .= omitNulls base]
where base = [ "fields" .= fmap toJSON fields
, "query" .= query
, "operator" .= boolOp
, "zero_terms_query" .= ztQ
, "tiebreaker" .= tb
, "type" .= mmqt
, "cutoff_frequency" .= cf
, "analyzer" .= analyzer
, "max_expansions" .= maxEx
, "lenient" .= lenient ]
instance ToJSON MultiMatchQueryType where
toJSON MultiMatchBestFields = "best_fields"
toJSON MultiMatchMostFields = "most_fields"
toJSON MultiMatchCrossFields = "cross_fields"
toJSON MultiMatchPhrase = "phrase"
toJSON MultiMatchPhrasePrefix = "phrase_prefix"
instance ToJSON BooleanOperator where
toJSON And = String "and"
toJSON Or = String "or"
instance ToJSON ZeroTermsQuery where
toJSON ZeroTermsNone = String "none"
toJSON ZeroTermsAll = String "all"
instance ToJSON MatchQueryType where
toJSON MatchPhrase = "phrase"
toJSON MatchPhrasePrefix = "phrase_prefix"
instance ToJSON FieldName where
toJSON (FieldName fieldName) = String fieldName
instance ToJSON ReplicaCount
instance ToJSON ShardCount
instance ToJSON CutoffFrequency
instance ToJSON Analyzer
instance ToJSON MaxExpansions
instance ToJSON Lenient
instance ToJSON Boost
instance ToJSON Version
instance ToJSON Tiebreaker
instance ToJSON MinimumMatch
instance ToJSON DisableCoord
instance ToJSON PrefixLength
instance ToJSON Fuzziness
instance ToJSON IgnoreTermFrequency
instance ToJSON MaxQueryTerms
instance ToJSON TypeName
instance ToJSON IndexName
instance ToJSON BoostTerms
instance ToJSON MaxWordLength
instance ToJSON MinWordLength
instance ToJSON MaxDocFrequency
instance ToJSON MinDocFrequency
instance ToJSON PhraseSlop
instance ToJSON StopWord
instance ToJSON QueryPath
instance ToJSON MinimumTermFrequency
instance ToJSON PercentMatch
instance ToJSON MappingName
instance ToJSON DocId
instance ToJSON QueryString
instance ToJSON AllowLeadingWildcard
instance ToJSON LowercaseExpanded
instance ToJSON AnalyzeWildcard
instance ToJSON GeneratePhraseQueries
instance ToJSON Locale
instance ToJSON EnablePositionIncrements
instance FromJSON Version
instance FromJSON IndexName
instance FromJSON MappingName
instance FromJSON DocId
instance FromJSON Status where
parseJSON (Object v) = Status <$>
v .:? "ok" <*>
v .: "status" <*>
v .: "name" <*>
v .: "version" <*>
v .: "tagline"
parseJSON _ = empty
instance ToJSON IndexSettings where
toJSON (IndexSettings s r) = object ["settings" .= object ["shards" .= s, "replicas" .= r]]
instance (FromJSON a) => FromJSON (EsResult a) where
parseJSON (Object v) = EsResult <$>
v .: "_index" <*>
v .: "_type" <*>
v .: "_id" <*>
v .: "_version" <*>
v .:? "found" <*>
v .: "_source"
parseJSON _ = empty
instance ToJSON Search where
toJSON (Search query sFilter sort searchAggs highlight sTrackSortScores sFrom sSize sSource) =
omitNulls [ "query" .= query
, "filter" .= sFilter
, "sort" .= sort
, "aggregations" .= searchAggs
, "highlight" .= highlight
, "from" .= sFrom
, "size" .= sSize
, "track_scores" .= sTrackSortScores
, "_source" .= sSource]
instance ToJSON Source where
toJSON NoSource = toJSON False
toJSON (SourcePatterns patterns) = toJSON patterns
toJSON (SourceIncludeExclude incl excl) = object [ "include" .= incl, "exclude" .= excl ]
instance ToJSON PatternOrPatterns where
toJSON (PopPattern pattern) = toJSON pattern
toJSON (PopPatterns patterns) = toJSON patterns
instance ToJSON Include where
toJSON (Include patterns) = toJSON patterns
instance ToJSON Exclude where
toJSON (Exclude patterns) = toJSON patterns
instance ToJSON Pattern where
toJSON (Pattern pattern) = toJSON pattern
instance ToJSON FieldHighlight where
toJSON (FieldHighlight (FieldName fName) (Just fSettings)) =
object [ fName .= fSettings ]
toJSON (FieldHighlight (FieldName fName) Nothing) =
object [ fName .= emptyObject ]
instance ToJSON Highlights where
toJSON (Highlights global fields) =
omitNulls (("fields" .= fields)
: highlightSettingsPairs global)
instance ToJSON HighlightSettings where
toJSON hs = omitNulls (highlightSettingsPairs (Just hs))
highlightSettingsPairs :: Maybe HighlightSettings -> [Pair]
highlightSettingsPairs Nothing = []
highlightSettingsPairs (Just (Plain plh)) = plainHighPairs (Just plh)
highlightSettingsPairs (Just (Postings ph)) = postHighPairs (Just ph)
highlightSettingsPairs (Just (FastVector fvh)) = fastVectorHighPairs (Just fvh)
plainHighPairs :: Maybe PlainHighlight -> [Pair]
plainHighPairs Nothing = []
plainHighPairs (Just (PlainHighlight plCom plNonPost)) =
[ "type" .= String "plain"]
++ commonHighlightPairs plCom
++ nonPostingsToPairs plNonPost
postHighPairs :: Maybe PostingsHighlight -> [Pair]
postHighPairs Nothing = []
postHighPairs (Just (PostingsHighlight pCom)) =
("type" .= String "postings")
: commonHighlightPairs pCom
fastVectorHighPairs :: Maybe FastVectorHighlight -> [Pair]
fastVectorHighPairs Nothing = []
fastVectorHighPairs (Just
(FastVectorHighlight fvCom fvNonPostSettings fvBoundChars
fvBoundMaxScan fvFragOff fvMatchedFields
fvPhraseLim)) =
[ "type" .= String "fvh"
, "boundary_chars" .= fvBoundChars
, "boundary_max_scan" .= fvBoundMaxScan
, "fragment_offset" .= fvFragOff
, "matched_fields" .= fvMatchedFields
, "phraseLimit" .= fvPhraseLim]
++ commonHighlightPairs fvCom
++ nonPostingsToPairs fvNonPostSettings
commonHighlightPairs :: Maybe CommonHighlight -> [Pair]
commonHighlightPairs Nothing = []
commonHighlightPairs (Just (CommonHighlight chScore chForceSource chTag chEncoder
chNoMatchSize chHighlightQuery
chRequireFieldMatch)) =
[ "order" .= chScore
, "force_source" .= chForceSource
, "encoder" .= chEncoder
, "no_match_size" .= chNoMatchSize
, "highlight_query" .= chHighlightQuery
, "require_fieldMatch" .= chRequireFieldMatch]
++ highlightTagToPairs chTag
nonPostingsToPairs :: Maybe NonPostings -> [Pair]
nonPostingsToPairs Nothing = []
nonPostingsToPairs (Just (NonPostings npFragSize npNumOfFrags)) =
[ "fragment_size" .= npFragSize
, "number_of_fragments" .= npNumOfFrags]
instance ToJSON HighlightEncoder where
toJSON DefaultEncoder = String "default"
toJSON HTMLEncoder = String "html"
highlightTagToPairs :: Maybe HighlightTag -> [Pair]
highlightTagToPairs (Just (TagSchema _)) = [ "scheme" .= String "default"]
highlightTagToPairs (Just (CustomTags (pre, post))) = [ "pre_tags" .= pre
, "post_tags" .= post]
highlightTagToPairs Nothing = []
instance ToJSON SortSpec where
toJSON (DefaultSortSpec
(DefaultSort (FieldName dsSortFieldName) dsSortOrder dsIgnoreUnmapped
dsSortMode dsMissingSort dsNestedFilter)) =
object [dsSortFieldName .= omitNulls base] where
base = [ "order" .= dsSortOrder
, "ignore_unmapped" .= dsIgnoreUnmapped
, "mode" .= dsSortMode
, "missing" .= dsMissingSort
, "nested_filter" .= dsNestedFilter ]
toJSON (GeoDistanceSortSpec gdsSortOrder (GeoPoint (FieldName field) gdsLatLon) units) =
object [ "unit" .= units
, field .= gdsLatLon
, "order" .= gdsSortOrder ]
instance ToJSON SortOrder where
toJSON Ascending = String "asc"
toJSON Descending = String "desc"
instance ToJSON SortMode where
toJSON SortMin = String "min"
toJSON SortMax = String "max"
toJSON SortSum = String "sum"
toJSON SortAvg = String "avg"
instance ToJSON Missing where
toJSON LastMissing = String "_last"
toJSON FirstMissing = String "_first"
toJSON (CustomMissing txt) = String txt
instance ToJSON ScoreType where
toJSON ScoreTypeMax = "max"
toJSON ScoreTypeAvg = "avg"
toJSON ScoreTypeSum = "sum"
toJSON ScoreTypeNone = "none"
instance ToJSON Distance where
toJSON (Distance dCoefficient dUnit) =
String boltedTogether where
coefText = showText dCoefficient
(String unitText) = toJSON dUnit
boltedTogether = mappend coefText unitText
instance ToJSON DistanceUnit where
toJSON Miles = String "mi"
toJSON Yards = String "yd"
toJSON Feet = String "ft"
toJSON Inches = String "in"
toJSON Kilometers = String "km"
toJSON Meters = String "m"
toJSON Centimeters = String "cm"
toJSON Millimeters = String "mm"
toJSON NauticalMiles = String "nmi"
instance ToJSON DistanceType where
toJSON Arc = String "arc"
toJSON SloppyArc = String "sloppy_arc"
toJSON Plane = String "plane"
instance ToJSON OptimizeBbox where
toJSON NoOptimizeBbox = String "none"
toJSON (OptimizeGeoFilterType gft) = toJSON gft
instance ToJSON GeoBoundingBoxConstraint where
toJSON (GeoBoundingBoxConstraint
(FieldName gbbcGeoBBField) gbbcConstraintBox cache type') =
object [gbbcGeoBBField .= gbbcConstraintBox
, "_cache" .= cache
, "type" .= type']
instance ToJSON GeoFilterType where
toJSON GeoFilterMemory = String "memory"
toJSON GeoFilterIndexed = String "indexed"
instance ToJSON GeoBoundingBox where
toJSON (GeoBoundingBox gbbTopLeft gbbBottomRight) =
object ["top_left" .= gbbTopLeft
, "bottom_right" .= gbbBottomRight]
instance ToJSON LatLon where
toJSON (LatLon lLat lLon) =
object ["lat" .= lLat
, "lon" .= lLon]
-- index for smaller ranges, fielddata for longer ranges
instance ToJSON RangeExecution where
toJSON RangeExecutionIndex = "index"
toJSON RangeExecutionFielddata = "fielddata"
instance ToJSON RegexpFlags where
toJSON AllRegexpFlags = String "ALL"
toJSON NoRegexpFlags = String "NONE"
toJSON (SomeRegexpFlags (h :| fs)) = String $ T.intercalate "|" flagStrs
where flagStrs = map flagStr . nub $ h:fs
flagStr AnyString = "ANYSTRING"
flagStr Automaton = "AUTOMATON"
flagStr Complement = "COMPLEMENT"
flagStr Empty = "EMPTY"
flagStr Intersection = "INTERSECTION"
flagStr Interval = "INTERVAL"
instance ToJSON Term where
toJSON (Term field value) = object ["term" .= object
[field .= value]]
instance ToJSON BoolMatch where
toJSON (MustMatch term cache) = object ["must" .= term,
"_cache" .= cache]
toJSON (MustNotMatch term cache) = object ["must_not" .= term,
"_cache" .= cache]
toJSON (ShouldMatch terms cache) = object ["should" .= fmap toJSON terms,
"_cache" .= cache]
instance (FromJSON a) => FromJSON (SearchResult a) where
parseJSON (Object v) = SearchResult <$>
v .: "took" <*>
v .: "timed_out" <*>
v .: "_shards" <*>
v .: "hits" <*>
v .:? "aggregations"
parseJSON _ = empty
instance (FromJSON a) => FromJSON (SearchHits a) where
parseJSON (Object v) = SearchHits <$>
v .: "total" <*>
v .: "max_score" <*>
v .: "hits"
parseJSON _ = empty
instance (FromJSON a) => FromJSON (Hit a) where
parseJSON (Object v) = Hit <$>
v .: "_index" <*>
v .: "_type" <*>
v .: "_id" <*>
v .: "_score" <*>
v .: "_source" <*>
v .:? "highlight"
parseJSON _ = empty
instance FromJSON ShardResult where
parseJSON (Object v) = ShardResult <$>
v .: "total" <*>
v .: "successful" <*>
v .: "failed"
parseJSON _ = empty
|
silkapp/bloodhound
|
src/Database/Bloodhound/Types.hs
|
bsd-3-clause
| 81,736
| 0
| 17
| 25,337
| 16,884
| 9,316
| 7,568
| 1,676
| 16
|
-----------------------------------------------------------------------------
-- |
-- Module : Generics.Pointless.Observe.Functors
-- Copyright : (c) 2008 University of Minho
-- License : BSD3
--
-- Maintainer : hpacheco@di.uminho.pt
-- Stability : experimental
-- Portability : non-portable
--
-- Pointless Haskell:
-- point-free programming with recursion patterns as hylomorphisms
--
-- This module defines generic GHood observations for user-defined data types.
--
-----------------------------------------------------------------------------
module Generics.Pointless.Observe.Functors where
import Generics.Pointless.Combinators
import Generics.Pointless.Functors
import Debug.Observe
import qualified Data.Generics as G
import Prelude hiding (Functor(..))
import Control.Monad hiding (Functor(..))
-- * Definition of generic observations
-- | Class for mapping observations over functor representations.
class FunctorO f where
-- | Derives a type representation for a functor. This is used for showing the functor for reursion trees.
functorOf :: Ann (Fix f) -> String
-- | Watch values of a functor. Since the fixpoint of a functor recurses over himself, we cannot use the 'Show' instance for functor values applied to their fixpoint.
watch :: Ann (Fix f) -> Ann x -> Rep f x -> String
-- | Maps an observation over a functor representation.
fmapO :: Ann (Fix f) -> (x -> ObserverM y) -> Rep f x -> ObserverM (Rep f y)
instance FunctorO Id where
functorOf _ = "Id"
watch _ _ _ = ""
fmapO _ f = f
instance (G.Typeable a,Observable a) => FunctorO (Const a) where
functorOf _ = "Const " ++ show (G.typeOf (_L::a))
watch _ _ _ = ""
fmapO _ f = thunk
instance (FunctorO f, FunctorO g) => FunctorO (f :+: g) where
functorOf (_::Ann (Fix (f:+:g))) = "(" ++ functorOf (ann::Ann (Fix f)) ++ ":+:" ++ functorOf (ann::Ann (Fix g)) ++ ")"
watch (_::Ann (Fix (f:+:g))) _ (Left _) = "Left"
watch (_::Ann (Fix (f:+:g))) _ (Right _) = "Right"
fmapO (_::Ann (Fix (f:+:g))) f (Left x) = liftM Left (fmapO (ann::Ann (Fix f)) f x)
fmapO (_::Ann (Fix (f:+:g))) f (Right x) = liftM Right (fmapO (ann::Ann (Fix g)) f x)
instance (FunctorO f, FunctorO g) => FunctorO (f :*: g) where
functorOf (_::Ann (Fix (f:*:g))) = "(" ++ functorOf (ann::Ann (Fix f)) ++ ":*:" ++ functorOf (ann::Ann (Fix g)) ++ ")"
watch _ _ _ = ""
fmapO (_::Ann (Fix (f:*:g))) f (x,y) = do
x' <- fmapO (ann::Ann (Fix f)) f x
y' <- fmapO (ann::Ann (Fix g)) f y
return (x',y')
instance (FunctorO g, FunctorO h) => FunctorO (g :@: h) where
functorOf (_::Ann (Fix (g:@:h))) = "(" ++ functorOf (ann::Ann (Fix g)) ++ ":@:" ++ functorOf (ann::Ann (Fix h)) ++ ")"
watch (_::Ann (Fix (g:@:h))) (x::Ann x) = watch (ann::Ann (Fix g)) (ann::Ann (Rep h x))
fmapO (_::Ann (Fix (g:@:h))) = fmapO (ann::Ann (Fix g)) . fmapO (ann::Ann (Fix h))
-- | Polytypic mapping of observations.
omap :: FunctorO (PF a) => Ann a -> (x -> ObserverM y) -> F a x -> ObserverM (F a y)
omap (_::Ann a) = fmapO (ann::Ann (Fix (PF a)))
instance Observable One where
observer = observeBase
instance Observable I where
observer FixId = send "" (fmapO (ann :: Ann (Fix Id)) thunk FixId)
instance (G.Typeable a,Observable a) => Observable (K a) where
observer (FixConst a) = send "" (liftM FixConst (fmapO (ann::Ann (Fix (Const a))) thk a))
where thk = thunk :: a -> ObserverM a
instance (FunctorO (PF a),FunctorO (PF b)) => Observable (a :+!: b) where
observer (FixSum f) = send "" (liftM FixSum (fmapO (ann::Ann (Fix (PF a :+: PF b))) thk f))
where thk = thunk :: a :+!: b -> ObserverM (a :+!: b)
instance (FunctorO (PF a), FunctorO (PF b)) => Observable (a :*!: b) where
observer (FixProd f) = send "" (liftM FixProd (fmapO (ann::Ann (Fix (PF a :*: PF b))) thk f))
where thk = thunk :: a :*!: b -> ObserverM (a :*!: b)
instance (FunctorO (PF a), FunctorO (PF b)) => Observable (a :@!: b) where
observer (FixComp f) = send "" (liftM FixComp (fmapO (ann::Ann (Fix (PF a :@: PF b))) thk f))
where thk = thunk :: a :@!: b -> ObserverM (a :@!: b)
-- NOTE: The following commented instance causes overlapping problems with the specific ones defined for base types (One,Int,etc.).
-- The solution is to provide its specific case for each type when needed, or to uncomment the following code
-- and using the flag -XIncoherentInstances.
--instance (Mu a,FunctorO (PF a)) => Observable a where
-- observer x = send "" (omap (_L :: a) thk (out x) >>= return . inn)
-- where thk = thunk :: a -> ObserverM a
instance (Functor f, FunctorO f) => Observable (Fix f) where
observer (Inn x) = send (watch f f x) (liftM Inn (fmapO f thk x))
where thk = thunk :: Fix f -> ObserverM (Fix f)
f = ann::Ann (Fix f)
|
d-day/relation
|
include/pointfree-style/pointless-haskell-0.0.8/src/Generics/Pointless/Observe/Functors.hs
|
bsd-3-clause
| 4,814
| 0
| 17
| 998
| 1,949
| 1,012
| 937
| -1
| -1
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
module VYPe15.Internal.Lexer
where
import Data.Bool (Bool(False))
import Data.Monoid ((<>))
import Text.Parsec (alphaNum, char, letter, oneOf, (<|>))
import Text.Parsec.Language (LanguageDef, emptyDef)
import Text.Parsec.Token
( GenTokenParser(TokenParser)
, braces
, charLiteral
, commaSep
, commaSep1
, commentEnd
, commentLine
, commentStart
, identLetter
, identStart
, identifier
, integer
, makeTokenParser
, nestedComments
, opLetter
, opStart
, parens
, reserved
, reservedNames
, reservedOp
, reservedOpNames
, semi
, semiSep
, stringLiteral
, whiteSpace
)
def :: LanguageDef st
def = emptyDef
{ commentStart = "/*"
, commentEnd = "*/"
, commentLine = "//"
, nestedComments = False
, identStart = letter <|> char '_'
, identLetter = alphaNum <|> char '_'
, opStart = oneOf "=+-*/%<>!&|"
, opLetter = oneOf "=+-*/%<>!&|"
, reservedOpNames = operators
, reservedNames = names
}
where
operators =
[ "=", "+", "-", "*", "/", "%", "<", ">", "<=", ">=", "==", "!=", "&&"
, "||", "!"
]
names = ["if", "else", "return", "while", "string", "char", "int", "void"]
<> vypeReserved
-- | There are reserved keywords according to project specification
-- (Chapter 3.1)
vypeReserved = ["break", "continue", "for", "short", "unsigned"]
TokenParser { parens = m_parens
, braces = m_braces
, identifier = m_identifier
, commaSep1 = m_commaSep1
, commaSep = m_commaSep
, semi = m_semi
, reservedOp = m_reservedOp
, reserved = m_reserved
, semiSep = m_semiSep
, whiteSpace = m_whiteSpace
, integer = m_integer
, stringLiteral = m_stringLit
, charLiteral = m_charLit
} = makeTokenParser def
|
Tr1p0d/VYPe15
|
src/VYPe15/Internal/Lexer.hs
|
bsd-3-clause
| 2,030
| 0
| 8
| 614
| 452
| 290
| 162
| 65
| 1
|
graph :: [[Integer]]
graph =
[ [75]
, [95,64]
, [17,47,82]
, [18,35,87,10]
, [20,04,82,47,65]
, [19,01,23,75,03,34]
, [88,02,77,73,07,63,67]
, [99,65,04,28,06,16,70,92]
, [41,41,26,56,83,40,80,70,33]
, [41,48,72,33,47,32,37,16,94,29]
, [53,71,44,65,25,43,91,52,97,51,14]
, [70,11,33,28,77,73,17,78,39,68,17,57]
, [91,71,52,38,17,14,91,43,58,50,27,29,48]
, [63,66,04,68,89,53,67,30,73,16,69,87,40,31]
, [04,62,98,27,23,09,70,98,73,93,38,53,60,04,23] ]
calc_max_path_totals_for_row :: [Integer] -> [Integer] -> [Integer]
calc_max_path_totals_for_row row prev_row = zipWith max l_sums r_sums
where
l_sums :: [Integer]
l_sums = (zipWith (+) (init row) prev_row) ++ [0]
r_sums :: [Integer]
r_sums = [0] ++ (zipWith (+) (tail row) prev_row)
-- parallel to `graph`
max_path_totals :: [[Integer]]
max_path_totals = (:) (head graph) $
zipWith
calc_max_path_totals_for_row
(tail graph)
max_path_totals
main :: IO ()
main = do
putStrLn . show $ maximum . last $ max_path_totals
|
bgwines/project-euler
|
src/solved/problem18.hs
|
bsd-3-clause
| 1,007
| 15
| 11
| 150
| 633
| 396
| 237
| 32
| 1
|
-----------------------------------------------------------------------------
-- |
-- Module : Control.Concurrent.STM.TSem
-- Copyright : (c) The University of Glasgow 2012
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : non-portable (requires STM)
--
-- 'TSem': transactional semaphores.
--
-- @since 2.4.2
-----------------------------------------------------------------------------
{-# LANGUAGE DeriveDataTypeable #-}
module Control.Concurrent.STM.TSem (
TSem, newTSem, waitTSem, signalTSem
) where
import Control.Concurrent.STM
import Control.Monad
import Data.Typeable
-- | 'TSem' is a transactional semaphore. It holds a certain number
-- of units, and units may be acquired or released by 'waitTSem' and
-- 'signalTSem' respectively. When the 'TSem' is empty, 'waitTSem'
-- blocks.
--
-- Note that 'TSem' has no concept of fairness, and there is no
-- guarantee that threads blocked in `waitTSem` will be unblocked in
-- the same order; in fact they will all be unblocked at the same time
-- and will fight over the 'TSem'. Hence 'TSem' is not suitable if
-- you expect there to be a high number of threads contending for the
-- resource. However, like other STM abstractions, 'TSem' is
-- composable.
--
-- @since 2.4.2
newtype TSem = TSem (TVar Int)
deriving (Eq, Typeable)
newTSem :: Int -> STM TSem
newTSem i = fmap TSem (newTVar i)
waitTSem :: TSem -> STM ()
waitTSem (TSem t) = do
i <- readTVar t
when (i <= 0) retry
writeTVar t $! (i-1)
signalTSem :: TSem -> STM ()
signalTSem (TSem t) = do
i <- readTVar t
writeTVar t $! i+1
|
gridaphobe/packages-stm
|
Control/Concurrent/STM/TSem.hs
|
bsd-3-clause
| 1,692
| 0
| 9
| 303
| 245
| 140
| 105
| 19
| 1
|
module Day6 where
import Test.Hspec
import Data.List (transpose, sortOn)
import Utils
-- utils
countString s = sortOn (flip count s) ['a' .. 'z']
-- FIRST problem
day = map (last . countString) . transpose
-- SECOND problem
day' = map (head . countString) . transpose
-- tests and data
test = hspec $ do
describe "firstProblem" $ do
it "works" $ do
day <$> content `shouldReturn` "wkbvmikb"
describe "secondProblem" $ do
it "works" $ do
day' <$> content `shouldReturn` "evakwaga"
fileContent = readFile "content/day6"
content = lines <$> fileContent
|
guibou/AdventOfCode2016
|
src/Day6.hs
|
bsd-3-clause
| 582
| 0
| 15
| 121
| 186
| 98
| 88
| 16
| 1
|
{-# LANGUAGE ScopedTypeVariables #-}
module ControllerPostActions where
import Control.Monad
import Control.Monad.Trans
import Data.List (isInfixOf)
import qualified Data.ByteString.Char8 as B
import Control.Monad.Error
import System.FilePath (takeFileName)
import HAppS.Server
import HAppS.Helpers
import StateStuff
import View
import Misc
import ControllerMisc
import ControllerGetActions
import FromDataInstances
loginPage = loginPage' authUser (startsess' getLoginReferer)
where authUser = authUser' getUserPassword
getUserPassword name = return . maybe Nothing (Just . B.unpack . password)
=<< query (GetUserInfos name)
-- after login, just redirect to the last page we were on.
-- try to return the referral, if can't parse it from headers, return home page
getLoginReferer :: Request -> String
getLoginReferer rq =
let homepage = getAppUrl "" rq
etRef = getHeaderVal "referer" rq
in case etRef of
Left _ -> homepage
Right rf -> if isInfixOf "logout" rf
then homepage
else rf
loginPage' :: (UserName -> B.ByteString -> WebT IO Bool)
-> (RenderGlobals -> UserName -> WebT IO Response)
-> RenderGlobals -> ServerPartT IO Response
loginPage' auth sessW' rglobs =
withData $ \(UserAuthInfo user pass) ->
[ ServerPartT $ \_ -> do
loginOk <- auth user pass
if loginOk
then sessW' rglobs user
else errW "login error: invalid username or password"
]
where errW msg = return $ tutlayoutU rglobs [("loginerrormsg",msg)] "home"
{-
-- check if a username and password is valid. If it is, return the user as successful monadic value
-- otherwise fail monadically
authUser :: Monad m => UserName -> B.ByteString -> WebT IO (m UserInfos)
authUser name pass = do
mbUser <- query (GetUserInfos name)
case mbUser of
Nothing -> return . fail $ "login failed"
(Just u) -> do
p <- return . password $ u
-- scramblepass works with lazy bytestrings, maybe that's by design. meh, leave it for now
if p == scramblepass pass
then return . return $ u
else return . fail $ "login failed"
-}
-- to do: make it so keeps your current page if you login/logout
-- probably modify RenderGlobals to keep track of that bit of state
{-
startsess :: RenderGlobals -> UserName -> WebT IO Response
startsess (RenderGlobals ts _) user = do
key <- update $ NewSession (SessionData user)
addCookie (3600) (mkCookie "sid" (show key))
let newRGlobs = RenderGlobals ts (Just user)
(return . tutlayoutU newRGlobs [] ) "home"
-}
-- check if a username and password is valid. If it is, return the user as successful monadic value
-- otherwise fail monadically
authUser' :: (UserName -> WebT IO (Maybe String) ) -> UserName -> B.ByteString -> WebT IO Bool
authUser' getpwd name pass = do
mbP <- getpwd name
-- scramblepass works with lazy bytestrings, maybe that's by design. meh, leave it for now
-- to do: we need to use a seed, there was a discussion about this on haskell cafe.
return $ maybe False ( == scramblepass (B.unpack pass) ) mbP
{-
changePasswordSP rglobs = withData $ \(ChangeUserInfo oldpass newpass1 newpass2) -> [ ServerPartT $ \rq -> do
if newpass1 == newpass2
then do mbL <- liftIO $ getmbLoggedInUser rq
maybe
(errW "Not logged in" rq)
(\u -> do mbUis <- query (GetUserInfos u)
case mbUis of
Nothing -> errW ("bad username: " ++ (B.unpack . unusername $ u)) rq
Just uis -> do update $ ChangePassword u oldpass newpass1
return $ tutlayoutU rglobs [] "accountsettings-changed" )
(mbL :: Maybe UserName)
else errW "new passwords did not match" rq
]
where errW msg rq = ( return . tutlayoutU rglobs [("errormsgAccountSettings", msg)] ) "changepassword"
-}
changePasswordSP rglobs = withData $ \(ChangeUserInfo oldpass newpass1 newpass2) -> [ ServerPartT $ \rq -> do
etRes <- runErrorT $ getLoggedInUserInfos rglobs
case etRes of
Left e -> errW e
Right (u,_) -> do
if newpass1 /= newpass2
then errW "new passwords did not match"
else do update $ ChangePassword u oldpass newpass1
return $ tutlayoutU rglobs [] "accountsettings-changed"
]
where errW msg = ( return . tutlayoutU rglobs [("errormsgAccountSettings", msg)] ) "changepassword"
processformEditConsultantProfile rglobs =
withData $ \fd@(EditUserProfileFormData fdContact fdBlurb fdlistAsC fdimagecontents) -> [ ServerPartT $ \rq -> do
case (return . sesUser =<< mbSession rglobs) of
Nothing -> errW "Not logged in"
Just unB -> do
mbUP <- query $ GetUserProfile unB
case mbUP of
Nothing -> errW "error retrieving user infos"
Just (UserProfile pContact pBlurb listasC pAvatar) -> do
up <- if B.null (fdimagecontents)
then return $ UserProfile fdContact fdBlurb fdlistAsC pAvatar
else do
let avatarpath = writeavatarpath unB
-- to do: verify this handles errors, eg try writing to someplace we don't have permission,
-- or a filename with spaces, whatever
liftIO $ writeFileForce avatarpath fdimagecontents
return $ UserProfile fdContact fdBlurb fdlistAsC (B.pack avatarpath)
update $ SetUserProfile unB up
unServerPartT ( viewEditConsultantProfile rglobs) rq
]
where errW msg = ( return . tutlayoutU rglobs [("errormsg", msg)] ) "errortemplate"
processformEditJob :: RenderGlobals -> ServerPartT IO Response
processformEditJob rglobs@(RenderGlobals rq ts mbSess) =
withData $ \(EditJob jn jbud jblu) -> [ ServerPartT $ \rq -> do
case (return . sesUser =<< mbSess) of
Nothing -> errW "Not logged in"
-- Just olduser@(User uname p cp js) -> do
Just uname -> do
if null (B.unpack . unjobname $ jn)
then errW "error, blank job name"
else do
update $ SetJob uname jn (Job (B.pack jbud) (B.pack jblu))
unServerPartT ( viewEditJob uname jn rglobs) rq
]
where errW msg = ( return . tutlayoutU rglobs [("errormsg", msg)] ) "errortemplate"
processformNewJob rglobs@(RenderGlobals rq ts mbSess) =
withData $ \(NewJobFormData jn newjob) -> [ ServerPartT $ \rq -> do
case (return . sesUser =<< mbSess) of
Nothing -> errW "Not logged in"
Just user -> do
if null (B.unpack . unjobname $ jn)
then errW "error, blank job name"
else do
res <- update (AddJob user jn newjob)
case res of
Left err -> case isInfixOf "duplicate key" (lc err) of
True -> errW "duplicate job name"
otherwise -> errW "error inserting job"
Right () -> unServerPartT ( pageMyJobPosts rglobs ) rq
]
where errW msg = ( return . tutlayoutU rglobs [("errormsg", msg)] ) "errortemplate"
{-
newUserPage :: RenderGlobals -> ServerPartT IO Response
newUserPage rglobs =
withData $ \(NewUserInfo user pass1 pass2) ->
[ ServerPartT $ \_ ->
do userExists <- query $ IsUser user
newuser user pass1 pass2 userExists
]
where errW msg = ( return . tutlayoutU rglobs [("errormsgRegister", msg)] ) "register"
newuser :: UserName -> B.ByteString -> B.ByteString -> Bool -> WebT IO Response
newuser u@(UserName us) pass1 pass2 userExists
| pass1 /= pass2 = errW "passwords did not match"
| null . B.unpack $ pass1 = errW "bad password"
| null . B.unpack $ us = errW "bad username"
| userExists = errW "User already exists"
| otherwise = do update $ AddUser u $ scramblepass pass1
mbUis <- query $ GetUserInfos u
case mbUis of
Nothing -> errW "newUserPage, update failed"
Just uis -> startsess rglobs u
-}
newUserPage :: RenderGlobals -> ServerPartT IO Response
newUserPage rglobs =
withData $ \(NewUserInfo user (pass1 :: B.ByteString) pass2) ->
[ ServerPartT $ \rq -> do
etRes <- runErrorT $ do
setupNewUser (NewUserInfo user (pass1 :: B.ByteString) pass2)
case etRes of
Left err -> return $ tutlayoutU rglobs [("errormsgRegister", err)] "register"
Right () -> startsess' (getAppUrl "tutorial/registered") rglobs user
]
where
setupNewUser :: NewUserInfo -> ErrorT [Char] (WebT IO) ()
setupNewUser (NewUserInfo user (pass1 :: B.ByteString) pass2) = do
if B.null pass1 || B.null pass2
then throwError "blank password"
else return ()
if pass1 /= pass2
-- TITS: can return . Left be replaced with throwError?
-- A: no. But you can return just plain Left with throwError.
then throwError "passwords don't match"
else return ()
nameTakenHAppSState <- query $ IsUser user
if nameTakenHAppSState
then throwError "name taken"
else return ()
addUserVerifiedPass user pass1 pass2
addUserVerifiedPass :: UserName -> B.ByteString -> B.ByteString -> ErrorT String (WebT IO) ()
addUserVerifiedPass user pass1 pass2 = do
ErrorT $ newuser user pass1 pass2
where
newuser :: UserName -> B.ByteString -> B.ByteString -> WebT IO (Either String ())
newuser u@(UserName us) pass1 pass2 -- userExists
| pass1 /= pass2 = return . Left $ "passwords did not match"
| otherwise = update $ AddUser u $ B.pack $ scramblepass (B.unpack pass1)
|
tphyahoo/happs-tutorial
|
src/ControllerPostActions.hs
|
bsd-3-clause
| 9,876
| 0
| 32
| 2,839
| 1,893
| 950
| 943
| 128
| 5
|
x = concatMap show [1..]
y = [x!!0, x!!9, x!!99, x!!999, x!!9999, x!!99999, x!!999999]
result = product $ map (\z->read [z]) y :: Int
main :: IO ()
main = print result
|
stulli/projectEuler
|
eu40.hs
|
bsd-3-clause
| 171
| 0
| 10
| 35
| 117
| 64
| 53
| 5
| 1
|
{-# LANGUAGE CPP #-}
#if __GLASGOW_HASKELL__ >= 704
{-# LANGUAGE Trustworthy #-}
#endif
--------------------------------------------------------------------------------
-- |
-- Module : System.USB.Devices
-- Copyright : (c) 2009–2014 Bas van Dijk
-- License : BSD3 (see the file LICENSE)
-- Maintainer : Bas van Dijk <v.dijk.bas@gmail.com>
--
-- The module provides functionality for opening, closing and configuring USB
-- devices.
--
--------------------------------------------------------------------------------
module System.USB.DeviceHandling
( -- * Opening & closing devices
DeviceHandle
, openDevice
, closeDevice
, withDeviceHandle
, getDevice
-- * Getting & setting the configuration
, ConfigValue
, getConfig
, setConfig
-- * Claiming & releasing interfaces
, InterfaceNumber
, claimInterface
, releaseInterface
, withClaimedInterface
-- * Setting interface alternate settings
, InterfaceAltSetting
, setInterfaceAltSetting
-- * Clearing & Resetting devices
, clearHalt
, resetDevice
-- * USB kernel drivers
, setAutoDetachKernelDriver
, kernelDriverActive
, detachKernelDriver
, attachKernelDriver
, withDetachedKernelDriver
) where
import System.USB.Base
|
alex1818/usb
|
System/USB/DeviceHandling.hs
|
bsd-3-clause
| 1,319
| 0
| 4
| 291
| 99
| 73
| 26
| 25
| 0
|
{-# LANGUAGE MultiParamTypeClasses,
FlexibleInstances,
OverloadedStrings #-}
module Text.KIF where
import Numeric
import Text.Twine
data KIFTest = KIFTest {
noOfScenarios :: Int,
noOfFailures :: Int,
testDuration :: Float,
scenarios :: [KIFScenario]
}
deriving (Eq, Show)
data KIFScenario = KIFScenario {
number :: Int,
scenarioDescription :: String,
noOfSteps :: Int,
scenarioDuration :: Float,
scenarioPassed :: Bool,
steps :: [KIFStep]
}
deriving (Eq, Show)
data KIFStep = Pass {
stepDescription :: String,
stepDuration :: Float
}
| Fail {
stepDescription :: String,
reason :: String,
stepDuration :: Float
}
deriving (Eq, Show)
stepPassed :: KIFStep -> Bool
stepPassed (Pass _ _) = True
stepPassed (Fail _ _ _ ) = False
mapStrings :: (String -> String) -> KIFTest -> KIFTest
mapStrings escape (KIFTest noOfScenarios noOfFailures testDuration scenarios) =
KIFTest noOfScenarios noOfFailures testDuration (map (mapScenarioStrings escape) scenarios)
mapScenarioStrings :: (String -> String) -> KIFScenario -> KIFScenario
mapScenarioStrings escape (KIFScenario number description noOfSteps duration passed steps) =
KIFScenario number
(escape description)
noOfSteps
duration
passed
(map (mapStepStrings escape) steps)
mapStepStrings :: (String -> String) -> KIFStep -> KIFStep
mapStepStrings escape (Pass description duration) =
Pass (escape description) duration
mapStepStrings escape (Fail description reason duration) =
Fail (escape description) (escape reason) duration
instance TemplateInterface IO KIFTest where
property "noOfScenarios" = return . bind . noOfScenarios
property "noOfFailures" = return . bind . noOfFailures
property "duration" = return . bind . testDuration
property "scenarios" = return . bind . scenarios
instance TemplateInterface IO KIFScenario where
property "number" = return . bind . number
property "description" = return . bind . scenarioDescription
property "noOfSteps" = return . bind . noOfSteps
property "duration" = return . bind . scenarioDuration
property "passed" = return . bind . scenarioPassed
property "failed" = return . bind . not . scenarioPassed
property "steps" = return . bind . steps
instance TemplateInterface IO KIFStep where
property "description" = return . bind . stepDescription
property "duration" = return . bind . stepDuration
property "reason" =
\step -> if stepPassed step then error "Failed step has no reason"
else return (bind (reason step))
property "passed" = return . bind . stepPassed
property "failed" = return . bind . not . stepPassed
instance TemplateInterface IO Float where
makeString x = return (showFFloat Nothing x "")
|
plancalculus/kif-parser
|
Text/KIF.hs
|
bsd-3-clause
| 2,838
| 0
| 12
| 609
| 802
| 424
| 378
| 70
| 1
|
{-# LANGUAGE TemplateHaskell, TypeFamilies #-}
module Test.LazySmallCheck2012.TH (deriveSerial, deriveArgument, viewPretty, viewRaw) where
import Control.Applicative
import Control.Monad
import Data.Generics.Uniplate.Data
import Data.List
import Language.Haskell.TH
import Test.LazySmallCheck2012.Core
import Test.LazySmallCheck2012.FunctionalValues
-- Type-level template holes
data THole = THole
-- Expression-level template holes
thole = error "Haven't you replaced this yet?"
-- Utility functions
simpleCon (NormalC c ts) = (c, map snd ts)
simpleCon (RecC c vts) = (c, [ t | (_, s, t) <- vts ])
simpleCon (InfixC t0 c t1) = (c, map snd [t0, t1])
simpleCon _ = error "simpleCon: Unsupported datatype"
-- | Deriving a `Serial` instance
deriveSerial :: Name -> DecsQ
deriveSerial tname = do
TyConI (DataD _ _ tvars tconstrs _) <- reify tname
when (null tconstrs) $ fail "deriveSerial: Empty datatype."
template <- [d| instance Serial THole where
series = thole
|]
-- Change instance name
let instName (AppT serial _) = appT (return serial) $
foldl1 appT (conT tname : [ varT tv | PlainTV tv <- tvars ])
instName x = return x
-- Change instance contexts
let instCtx (InstanceD _ name@(AppT (ConT serial) _) decls) = instanceD
(return [ ClassP serial [VarT tv] | PlainTV tv <- tvars ])
(return name) (map return decls)
instCtx x = return x
-- Change instance function body
let union xs ys = [| $xs \/ $ys |]
let body = normalB $ foldr1 union
[ appE (varE $ mkName $ "cons" ++ show (length ts)) (conE c)
| (c, ts) <- map simpleCon tconstrs ]
let instFunc (ValD seriesF _ _) = valD (return seriesF) body []
instFunc x = return x
-- Perform transformations
transformBiM instName template >>=
transformBiM instCtx >>=
transformBiM instFunc
-- | Deriving a `Argument` instance
deriveArgument :: Name -> DecsQ
deriveArgument tname = do
TyConI (DataD _ _ tvars tconstrs _) <- reify tname
let tconstrs' = map simpleCon tconstrs
when (null tconstrs) $ fail "deriveSerial: Empty datatype."
template <- [d| instance Argument THole where
type Base THole = THole
toBase _ = thole
fromBase _ = thole
|]
-- Change instance name
let tfullname = foldl1 appT (conT tname : [ varT tv | PlainTV tv <- tvars ])
let instName (AppT argument _) = appT (return argument) tfullname
instName x = return x
-- Change instance contexts
let instCtx (InstanceD _ name@(AppT (ConT argument) _) decls) = instanceD
(return [ ClassP argument [VarT tv] | PlainTV tv <- tvars ])
(return name) (map return decls)
instCtx x = return x
-- Change instance of Base
let unitT = [t| () |]
let sumT t0 t1 = [t| Either $(t0) $(t1) |]
let proT t0 t1 = [t| ($(t0), $(t1)) |]
let instBase (TySynInstD base (TySynEqn _ _)) =
tySynInstD base
(tySynEqn [tfullname]
(foldr sumT unitT
[ foldr proT unitT [ [t| BaseCast $(return t) |] | t <- ts ]
| (c, ts) <- tconstrs' ]))
instBase x = return x
-- Change instance for toBase
let proE x y = [| ($x, $y) |]
let toBaseE name = [| toBaseCast $(varE name) |]
let buildBaseE 0 vs = [| Left $(foldr (proE . toBaseE) [| () |] vs) |]
buildBaseE n vs = [| Right $(buildBaseE (n - 1) vs) |]
let instTo (FunD to _) | "toBase" `isSuffixOf` show to = funD to
[ do vs <- mapM (const $ newName "x") ts
let lhs = ConP c $ map VarP vs
clause [return lhs] (normalB (buildBaseE i vs)) []
| (i, (c, ts)) <- zip [0..] tconstrs' ]
instTo x = return x
-- Change instance for fromBase
let proP x y = conP '(,) [x, y]
let fromBaseE name = [| fromBaseCast $(varE name) |]
let buildBaseP 0 vs = conP 'Left [ foldr (proP . varP) wildP vs ]
buildBaseP n vs = conP 'Right [ buildBaseP (n - 1) vs ]
let instFrom (FunD from _) | "fromBase" `isSuffixOf` show from = funD from
[ do vs <- mapM (const $ newName "x") ts
let rhs = foldl1 appE (conE c : map fromBaseE vs)
clause [buildBaseP i vs] (normalB rhs) []
| (i, (c, ts)) <- zip [0..] tconstrs' ]
instFrom x = return x
transformBiM instName template >>=
transformBiM instCtx >>=
transformBiM instFrom >>=
transformBiM instTo >>=
transformBiM instBase
viewRaw :: (Name -> DecsQ) -> Name -> ExpQ
viewRaw f = f >=> stringE . show
viewPretty :: (Name -> DecsQ) -> Name -> ExpQ
viewPretty f = f >=> stringE . pprint
|
UoYCS-plasma/LazySmallCheck2012
|
Test/LazySmallCheck2012/TH.hs
|
bsd-3-clause
| 4,616
| 6
| 22
| 1,234
| 1,641
| 832
| 809
| -1
| -1
|
import System.IO (hSetEcho,stdin)
import Text.Read (readMaybe)
import Network.Transport.InMemory (createTransport)
import Control.Distributed.Process.Node (LocalNode,newLocalNode,initRemoteTable,forkProcess,runProcess)
import Control.Distributed.Process (Process,ProcessId,send,receiveWait,match,getSelfPid,expectTimeout)
import Control.Monad (forever)
import Control.Monad.Trans (liftIO)
import Control.Monad.Reader (ReaderT,runReaderT,ask)
import Data.IORef(newIORef,writeIORef,readIORef)
main :: IO ()
main = do
node <- createTransport >>= (\t -> newLocalNode t initRemoteTable)
mpid <- forkMain node
hSetEcho stdin False
runReaderT clientLoop (node,mpid)
where
clientLoop :: IO' ()
clientLoop = do
msg <- liftIO getLine
case readMaybe msg :: Maybe Int of
Nothing -> do
mmsg' <- askString msg
case mmsg' of
Just msg' -> liftIO $ putStrLn $ "String: " ++ msg ++ " Result: " ++ msg'
Nothing -> liftIO $ putStrLn $ "String: " ++ msg ++ " NoResult"
Just n -> do
mn' <- askInt n
case mn' of
Just n' -> liftIO $ putStrLn $ "Int: " ++ show n ++ " Result: " ++ show n'
Nothing -> liftIO $ putStrLn $ "Int: " ++ show n ++ " NoResult"
clientLoop
type IO' = ReaderT (LocalNode,ProcessId) IO
askString :: String -> IO' (Maybe String)
askString msg = do
(node,mpid) <- ask
liftIO $ do
rr <- newIORef Nothing
runProcess node $ do
self <- getSelfPid
send mpid (self,msg)
r <- expectTimeout 1000000 -- must have timeout
liftIO $ writeIORef rr r
r <- readIORef rr
return r
askInt :: Int -> IO' (Maybe Int)
askInt n = do
(node,mpid) <- ask
liftIO $ do
rr <- newIORef Nothing
runProcess node $ do
self <- getSelfPid
send mpid (self,n)
r <- expectTimeout 1000000 -- must have timeout
liftIO $ writeIORef rr r
r <- readIORef rr
return r
forkMain :: LocalNode -> IO ProcessId
forkMain node = forkProcess node mainProcess
mainProcess :: Process ()
mainProcess = forever $ receiveWait [match pString,match pInt]
where
pString :: (ProcessId,String) -> Process ()
pString (sender,msg) = send sender $ "*" ++ msg ++ "*"
pInt :: (ProcessId,Int) -> Process ()
pInt (sender,n) = send sender $ n * 2 + 1
|
mitsuji/exp-CloudHaskell
|
app/ConsoleAsk.hs
|
bsd-3-clause
| 2,350
| 1
| 19
| 595
| 852
| 425
| 427
| 63
| 4
|
{-# LANGUAGE FlexibleContexts #-}
module PatternRecogn.Gauss.Classify(
ClassificationParam,
calcClassificationParams,
classify,
infoStringForParam
) where
import PatternRecogn.Lina
import PatternRecogn.Gauss.Utils
import PatternRecogn.Gauss.Types
import PatternRecogn.Types
import PatternRecogn.Utils
import Data.List( intercalate, maximumBy )
type ClassificationParam = [(Class, Label)]
-----------------------------------------------------------------
-- general gauss classification:
-----------------------------------------------------------------
calcClassificationParams :: TrainingDataBundled -> ClassificationParam
calcClassificationParams trainingData =
map `flip` trainingData $ mapToFst $
\set ->
let
center = average $ toRows set
in
Class{
class_min = center,
class_cov = cov_SAFE center set
}
classify :: ClassificationParam -> Matrix -> VectorOf Label
classify param =
fromList
.
map classifySingleVec
.
toRows
where
classifySingleVec :: Vector -> Label
classifySingleVec vec =
snd $
maximumBy (\x y -> fst x `compare` fst y) $
map `flip` param $ mapToFst $
\Class{ class_min = center, class_cov = cov } ->
mahalanobis center cov vec
infoStringForParam :: ClassificationParam -> String
infoStringForParam =
intercalate "\n"
.
map infoStringForSingleClass
where
infoStringForSingleClass (Class{ class_min = center, class_cov = cov },label) =
(concat ["Label ", show label, ":"] ++) $
intercalate "\n" $
[ concat $ ["center size:", show $ size center]
, concat $ ["covariance size:", show $ size cov]
]
|
EsGeh/pattern-recognition
|
src/PatternRecogn/Gauss/Classify.hs
|
bsd-3-clause
| 1,607
| 50
| 16
| 273
| 456
| 252
| 204
| 46
| 1
|
{-
foo
bar
a) foo
foo
<ESC>a b) bar
bar
baa
-}
{-
foo
bar
* @foo
* @bar
<ESC>abaa
-}
{-
foo
bar
> foo
> bar
<ESC>abaa
-}
|
itchyny/vim-haskell-indent
|
test/comment/list.in.hs
|
mit
| 130
| 0
| 2
| 39
| 5
| 4
| 1
| 1
| 0
|
{- |
Module : ./FreeCAD/HetPrinter.hs
Description : print the HasCASL representation of FreeCAD terms
Copyright : (c) Robert Savu and Uni Bremen 2011
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Robert.Savu@dfki.de
Stability : experimental
Portability : portable
Printing of the HasCASL specification of a FreeCAD document
-}
module FreeCAD.HetPrinter where
import FreeCAD.As
import Common.DocUtils
import Common.Doc
import Common.Id
-- | Pretty printing 'Double'
instance Pretty Double where
pretty = sidDoc . mkSimpleId . show
instance Pretty Vector3 where
pretty v = hcat [text "V", lparen, sepByCommas $ map pretty [x v, y v, z v], rparen]
instance Pretty Vector4 where
pretty v = hcat [text "Q", lparen, sepByCommas $ map pretty [q0 v, q1 v, q2 v, q3 v], rparen]
instance Pretty Placement where
pretty p1 =
hcat [text "Placement", lparen, sepByCommas [pretty $ position p1, pretty $ orientation p1], rparen]
printBO :: BaseObject -> Doc
printBO (Box h w l) = hcat [text "BBox", lparen, sepByCommas [pretty l, pretty w, pretty h], rparen]
printBO (Cylinder a h r) = hcat [text "BCylinder" , lparen, sepByCommas [pretty a, pretty h, pretty r], rparen]
printBO (Sphere a1 a2 a3 r) = hcat [text "BCylinder" , lparen, sepByCommas [pretty a1, pretty a2, pretty a3, pretty r], rparen]
printBO (Cone a r1 r2 h) = hcat [text "BCone" , lparen, sepByCommas [pretty a, pretty r1, pretty r2, pretty h], rparen]
printBO (Torus a1 a2 a3 r1 r2) = hcat [text "BTorus" , lparen, sepByCommas [pretty a1, pretty a2, pretty a3, pretty r1, pretty r2], rparen]
printBO (Line a) = hcat [text "BLine" , lparen, pretty a, rparen]
printBO (Circle sa ea r) = hcat [text "BCircle" , lparen, sepByCommas [pretty sa, pretty ea, pretty r], rparen]
printBO (Rectangle h w) = hcat [text "BRectangle" , lparen, sepByCommas [pretty h, pretty w], rparen]
instance Pretty BaseObject where
pretty = printBO
printObject :: Object -> Doc
printObject (BaseObject bo) = pretty bo
printObject ( Cut eo1 eo2) = text "Cut" <+> hcat [lparen, sepByCommas [pretty eo1, pretty eo2], rparen]
printObject ( Common eo1 eo2) = text "Common" <+> hcat [lparen, sepByCommas [pretty eo1, pretty eo2], rparen]
printObject ( Fusion eo1 eo2) = text "Fusion" <+> hcat [lparen, sepByCommas [pretty eo1, pretty eo2], rparen]
-- printObject ( Section eo1 eo2) = text "Section" <+> brackets $ sepByCommas [pretty eo1, pretty eo2]
printObject ( Extrusion eo d) = text "Extrusion" <+> hcat [lparen, sepByCommas [pretty eo, pretty d], rparen]
instance Pretty Object where
pretty = printObject
printEO :: ExtendedObject -> Doc
printEO (Placed po) = pretty po
printEO (Ref s) = text s
printPO :: PlacedObject -> Doc
printPO (PlacedObject plc obj) = text "PObj = " <+> hcat [text "PObj", lparen, sepByCommas [pretty obj, pretty plc], rparen, text ";"]
printDoc :: String -> Document -> Doc
printDoc name a = vcat [header, vcat [text " ops", hcat [text " ", vcat $ map pretty a]], end]
where
header = vcat [logic, imports, specname]
logic = text "logic HasCASL"
imports = text "from HasCASL/Real3D/FreeCAD/FreeCAD get FCObject"
specname = hcat [text "spec ", text name, text " = FCObject ", text "then"]
end = text "end"
instance Pretty ExtendedObject where
pretty = printEO
instance Pretty PlacedObject where
pretty = printPO
instance Pretty NamedObject where
pretty no = hcat [pretty (name no), colon, pretty $ object no]
-- $+$
-- instance GetRange NamedObject
instance Pretty Sign where
pretty = pretty . objects
|
spechub/Hets
|
FreeCAD/HetPrinter.hs
|
gpl-2.0
| 3,575
| 0
| 12
| 680
| 1,300
| 667
| 633
| 53
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.CognitoSync.SubscribeToDataset
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Subscribes to receive notifications when a dataset is modified by
-- another device.
--
-- This API can only be called with temporary credentials provided by
-- Cognito Identity. You cannot call this API with developer credentials.
--
-- /See:/ <http://docs.aws.amazon.com/cognitosync/latest/APIReference/API_SubscribeToDataset.html AWS API Reference> for SubscribeToDataset.
module Network.AWS.CognitoSync.SubscribeToDataset
(
-- * Creating a Request
subscribeToDataset
, SubscribeToDataset
-- * Request Lenses
, stdIdentityPoolId
, stdIdentityId
, stdDatasetName
, stdDeviceId
-- * Destructuring the Response
, subscribeToDatasetResponse
, SubscribeToDatasetResponse
-- * Response Lenses
, stdrsResponseStatus
) where
import Network.AWS.CognitoSync.Types
import Network.AWS.CognitoSync.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | A request to SubscribeToDatasetRequest.
--
-- /See:/ 'subscribeToDataset' smart constructor.
data SubscribeToDataset = SubscribeToDataset'
{ _stdIdentityPoolId :: !Text
, _stdIdentityId :: !Text
, _stdDatasetName :: !Text
, _stdDeviceId :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'SubscribeToDataset' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'stdIdentityPoolId'
--
-- * 'stdIdentityId'
--
-- * 'stdDatasetName'
--
-- * 'stdDeviceId'
subscribeToDataset
:: Text -- ^ 'stdIdentityPoolId'
-> Text -- ^ 'stdIdentityId'
-> Text -- ^ 'stdDatasetName'
-> Text -- ^ 'stdDeviceId'
-> SubscribeToDataset
subscribeToDataset pIdentityPoolId_ pIdentityId_ pDatasetName_ pDeviceId_ =
SubscribeToDataset'
{ _stdIdentityPoolId = pIdentityPoolId_
, _stdIdentityId = pIdentityId_
, _stdDatasetName = pDatasetName_
, _stdDeviceId = pDeviceId_
}
-- | A name-spaced GUID (for example,
-- us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon
-- Cognito. The ID of the pool to which the identity belongs.
stdIdentityPoolId :: Lens' SubscribeToDataset Text
stdIdentityPoolId = lens _stdIdentityPoolId (\ s a -> s{_stdIdentityPoolId = a});
-- | Unique ID for this identity.
stdIdentityId :: Lens' SubscribeToDataset Text
stdIdentityId = lens _stdIdentityId (\ s a -> s{_stdIdentityId = a});
-- | The name of the dataset to subcribe to.
stdDatasetName :: Lens' SubscribeToDataset Text
stdDatasetName = lens _stdDatasetName (\ s a -> s{_stdDatasetName = a});
-- | The unique ID generated for this device by Cognito.
stdDeviceId :: Lens' SubscribeToDataset Text
stdDeviceId = lens _stdDeviceId (\ s a -> s{_stdDeviceId = a});
instance AWSRequest SubscribeToDataset where
type Rs SubscribeToDataset =
SubscribeToDatasetResponse
request = postJSON cognitoSync
response
= receiveEmpty
(\ s h x ->
SubscribeToDatasetResponse' <$> (pure (fromEnum s)))
instance ToHeaders SubscribeToDataset where
toHeaders
= const
(mconcat
["Content-Type" =#
("application/x-amz-json-1.1" :: ByteString)])
instance ToJSON SubscribeToDataset where
toJSON = const (Object mempty)
instance ToPath SubscribeToDataset where
toPath SubscribeToDataset'{..}
= mconcat
["/identitypools/", toBS _stdIdentityPoolId,
"/identities/", toBS _stdIdentityId, "/datasets/",
toBS _stdDatasetName, "/subscriptions/",
toBS _stdDeviceId]
instance ToQuery SubscribeToDataset where
toQuery = const mempty
-- | Response to a SubscribeToDataset request.
--
-- /See:/ 'subscribeToDatasetResponse' smart constructor.
newtype SubscribeToDatasetResponse = SubscribeToDatasetResponse'
{ _stdrsResponseStatus :: Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'SubscribeToDatasetResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'stdrsResponseStatus'
subscribeToDatasetResponse
:: Int -- ^ 'stdrsResponseStatus'
-> SubscribeToDatasetResponse
subscribeToDatasetResponse pResponseStatus_ =
SubscribeToDatasetResponse'
{ _stdrsResponseStatus = pResponseStatus_
}
-- | The response status code.
stdrsResponseStatus :: Lens' SubscribeToDatasetResponse Int
stdrsResponseStatus = lens _stdrsResponseStatus (\ s a -> s{_stdrsResponseStatus = a});
|
fmapfmapfmap/amazonka
|
amazonka-cognito-sync/gen/Network/AWS/CognitoSync/SubscribeToDataset.hs
|
mpl-2.0
| 5,366
| 0
| 13
| 1,101
| 712
| 429
| 283
| 94
| 1
|
module Data.Discrimination
(
-- * Discrimination
Discriminating(..)
-- * Unordered
, Group(..)
, Grouping(..)
, Grouping1(..)
, nub
, nubWith
, group
, groupWith
, runGroup
-- , groupingBag
-- , groupingSet
, groupingEq
-- * Ordered
, Sort(..)
, Sorting(..)
, Sorting1(..)
, desc
, sort
, sortWith
, sortingBag
, sortingSet
, sortingCompare
-- * Container Construction
, toMap
, toMapWith
, toMapWithKey
, toIntMap
, toIntMapWith
, toIntMapWithKey
, toSet
, toIntSet
-- * Joins
, joining
, inner
, outer
, leftOuter
, rightOuter
) where
import Data.Discrimination.Class
import Data.Discrimination.Grouping
import Data.Discrimination.Sorting
|
markus1189/discrimination
|
src/Data/Discrimination.hs
|
bsd-2-clause
| 727
| 0
| 5
| 182
| 157
| 109
| 48
| 37
| 0
|
{-# LANGUAGE TemplateHaskell, FlexibleInstances, MultiParamTypeClasses, KindSignatures, DataKinds #-}
{-# OPTIONS -Wall #-}
module Language.Hakaru.TH (THRepr, unTHRepr, show_code) where
import Prelude hiding (Real)
import Language.Hakaru.Syntax (Hakaru(..), Number(..), Fraction(..),
Order(..), Base(..), Mochastic(..), Lambda(..))
import Language.Haskell.TH
newtype THRepr (a :: Hakaru *) = THR { unTHRepr :: ExpQ }
liftT :: Name -> [ExpQ] -> THRepr a
liftT n es = THR (foldl appE (varE n) es)
liftL :: [ExpQ] -> ExpQ
liftL [] = varE '[]
liftL (e:es) = varE '(:) `appE` e `appE` liftL es
-- liftF takes a function on THRepr values and uses it to generate a lambda expression for acting on repr values.
liftF :: (THRepr a -> THRepr b) -> ExpQ
liftF f = do x <- newName "x"
lamE [varP x] (unTHRepr (f (THR (varE x))))
liftF2 :: (THRepr a -> THRepr b -> THRepr c) -> ExpQ
liftF2 f = do x <- newName "x"
y <- newName "y"
lamE [varP x, varP y] (unTHRepr (f (THR (varE x)) (THR (varE y))))
instance (Number a) => Order THRepr a where
less (THR e) (THR e') = liftT 'less [e, e']
equal (THR e) (THR e') = liftT 'equal [e, e']
instance Base THRepr where
unit = liftT 'unit []
pair (THR e) (THR e') = liftT 'pair [e, e']
unpair (THR e) f = liftT 'unpair [e, liftF2 f]
inl (THR e) = liftT 'inl [e]
inr (THR e) = liftT 'inr [e]
uneither (THR e) f g = liftT 'uneither [e, liftF f, liftF g]
true = liftT 'true []
false = liftT 'false []
if_ (THR eb) (THR et) (THR ef) = liftT 'if_ [eb, et, ef]
unsafeProb (THR e) = liftT 'unsafeProb [e]
fromProb (THR e) = liftT 'fromProb [e]
fromInt (THR e) = liftT 'fromInt [e]
infinity = liftT 'infinity []
negativeInfinity = liftT 'negativeInfinity []
erf (THR e) = liftT 'erf [e]
erf_ (THR e) = liftT 'erf_ [e]
gammaFunc (THR e) = liftT 'gammaFunc [e]
betaFunc (THR e) (THR e') = liftT 'betaFunc [e, e']
fix f = liftT 'fix [liftF f]
instance (Number a) => Num (THRepr a) where
(THR e) + (THR e') = liftT '(+) [e, e']
(THR e) * (THR e') = liftT '(*) [e, e']
abs (THR e) = liftT 'abs [e]
signum (THR e) = liftT 'signum [e]
fromInteger n = liftT 'fromInteger [litE (IntegerL n)]
negate (THR e) = liftT 'negate [e]
(THR e) - (THR e') = liftT '(-) [e, e']
instance (Fraction a) => Fractional (THRepr a) where
fromRational r = liftT 'fromRational [litE (RationalL r)]
recip (THR e) = liftT 'recip [e]
(THR e) / (THR e') = liftT '(/) [e, e']
instance (Fraction a) => Floating (THRepr a) where
pi = liftT 'pi []
exp (THR e) = liftT 'exp [e]
log (THR e) = liftT 'log [e]
sqrt (THR e) = liftT 'sqrt [e]
(THR e) ** (THR e') = liftT '(**) [e,e']
logBase (THR e) (THR e') = liftT 'logBase [e,e']
sin (THR e) = liftT 'sin [e]
cos (THR e) = liftT 'cos [e]
tan (THR e) = liftT 'tan [e]
asin (THR e) = liftT 'asin [e]
acos (THR e) = liftT 'acos [e]
atan (THR e) = liftT 'atan [e]
sinh (THR e) = liftT 'sinh [e]
cosh (THR e) = liftT 'cosh [e]
tanh (THR e) = liftT 'tanh [e]
asinh (THR e) = liftT 'asinh [e]
acosh (THR e) = liftT 'acosh [e]
atanh (THR e) = liftT 'atanh [e]
instance Mochastic THRepr where
dirac (THR e) = liftT 'dirac [e]
bind (THR e) f = liftT 'bind [e, liftF f]
lebesgue = liftT 'lebesgue []
counting = liftT 'counting []
superpose pms = liftT 'superpose [liftL [ varE '(,) `appE` e `appE` e'
| (THR e, THR e') <- pms ]]
uniform (THR e) (THR e') = liftT 'uniform [e, e']
normal (THR e) (THR e') = liftT 'normal [e, e']
poisson (THR e) = liftT 'poisson [e]
gamma (THR e) (THR e') = liftT 'gamma [e, e']
beta (THR e) (THR e') = liftT 'beta [e, e']
instance Lambda THRepr where
lam f = liftT 'lam [liftF f]
app (THR e) (THR e') = liftT 'app [e, e']
let_ (THR e) f = liftT 'let_ [e, liftF f]
show_code :: THRepr a -> IO ()
show_code (THR cde) = runQ cde >>= putStrLn . pprint
|
suhailshergill/hakaru
|
Language/Hakaru/TH.hs
|
bsd-3-clause
| 3,931
| 3
| 15
| 960
| 2,201
| 1,165
| 1,036
| -1
| -1
|
module Lib where
-- | Hjälpfunktion för inmatning av lista (inte en del av just den här
-- uppgiften, men ofta användbar i dessa tävlingar).
getList :: Read a => String -> String -> IO [a]
getList frågaAntal frågaVarje = do
putStr frågaAntal
n <- readLn
sequence $ replicate n $ do
putStr frågaVarje
readLn
-- Några hjälpfunktioner för I/O
fråga :: Read a => String -> IO a
fråga text = do
putStr text
readLn
frågor :: Read a => Int -> String -> IO [a]
frågor n text = mapM (\i -> fråga (text ++ show i ++ " ? ")) [1..n]
svar :: Show a => a -> IO ()
svar x = putStrLn ("Svar: " ++ show x)
|
patrikja/progolymp
|
skolkval/2015/Lib.hs
|
bsd-3-clause
| 629
| 7
| 12
| 145
| 244
| 119
| 125
| 16
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TemplateHaskell #-}
module Ros.Sensor_msgs.JointState where
import qualified Prelude as P
import Prelude ((.), (+), (*))
import qualified Data.Typeable as T
import Control.Applicative
import Ros.Internal.RosBinary
import Ros.Internal.Msg.MsgInfo
import qualified GHC.Generics as G
import qualified Data.Default.Generics as D
import Ros.Internal.Msg.HeaderSupport
import qualified Data.Vector.Storable as V
import qualified Ros.Std_msgs.Header as Header
import Lens.Family.TH (makeLenses)
import Lens.Family (view, set)
data JointState = JointState { _header :: Header.Header
, _name :: [P.String]
, _position :: V.Vector P.Double
, _velocity :: V.Vector P.Double
, _effort :: V.Vector P.Double
} deriving (P.Show, P.Eq, P.Ord, T.Typeable, G.Generic)
$(makeLenses ''JointState)
instance RosBinary JointState where
put obj' = put (_header obj') *> putList (_name obj') *> put (_position obj') *> put (_velocity obj') *> put (_effort obj')
get = JointState <$> get <*> getList <*> get <*> get <*> get
putMsg = putStampedMsg
instance HasHeader JointState where
getSequence = view (header . Header.seq)
getFrame = view (header . Header.frame_id)
getStamp = view (header . Header.stamp)
setSequence = set (header . Header.seq)
instance MsgInfo JointState where
sourceMD5 _ = "3066dcd76a6cfaef579bd0f34173e9fd"
msgTypeName _ = "sensor_msgs/JointState"
instance D.Default JointState
|
acowley/roshask
|
msgs/Sensor_msgs/Ros/Sensor_msgs/JointState.hs
|
bsd-3-clause
| 1,667
| 1
| 12
| 367
| 457
| 262
| 195
| 38
| 0
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
--------------------------------------------------------------------------------
-- |
-- Module : Data.Comp.Multi.HFoldable
-- Copyright : (c) 2011 Patrick Bahr
-- License : BSD3
-- Maintainer : Patrick Bahr <paba@diku.dk>
-- Stability : experimental
-- Portability : non-portable (GHC Extensions)
--
-- This module defines higher-order foldable functors.
--
--------------------------------------------------------------------------------
module Data.Comp.Multi.HFoldable
(
HFoldable (..),
kfoldr,
kfoldl,
htoList
) where
import Data.Comp.Multi.HFunctor
import Data.Maybe
import Data.Monoid
-- | Higher-order functors that can be folded.
--
-- Minimal complete definition: 'hfoldMap' or 'hfoldr'.
class HFunctor h => HFoldable h where
hfold :: Monoid m => h (K m) :=> m
hfold = hfoldMap unK
hfoldMap :: Monoid m => (a :=> m) -> h a :=> m
hfoldMap f = hfoldr (mappend . f) mempty
hfoldr :: (a :=> b -> b) -> b -> h a :=> b
hfoldr f z t = appEndo (hfoldMap (Endo . f) t) z
hfoldl :: (b -> a :=> b) -> b -> h a :=> b
hfoldl f z t = appEndo (getDual (hfoldMap (Dual . Endo . flip f) t)) z
hfoldr1 :: forall a. (a -> a -> a) -> h (K a) :=> a
hfoldr1 f xs = fromMaybe (error "hfoldr1: empty structure")
(hfoldr mf Nothing xs)
where mf :: K a :=> Maybe a -> Maybe a
mf (K x) Nothing = Just x
mf (K x) (Just y) = Just (f x y)
hfoldl1 :: forall a . (a -> a -> a) -> h (K a) :=> a
hfoldl1 f xs = fromMaybe (error "hfoldl1: empty structure")
(hfoldl mf Nothing xs)
where mf :: Maybe a -> K a :=> Maybe a
mf Nothing (K y) = Just y
mf (Just x) (K y) = Just (f x y)
htoList :: (HFoldable f) => f a :=> [E a]
htoList = hfoldr (\ n l -> E n : l) []
kfoldr :: (HFoldable f) => (a -> b -> b) -> b -> f (K a) :=> b
kfoldr f = hfoldr (\ (K x) y -> f x y)
kfoldl :: (HFoldable f) => (b -> a -> b) -> b -> f (K a) :=> b
kfoldl f = hfoldl (\ x (K y) -> f x y)
|
spacekitteh/compdata
|
src/Data/Comp/Multi/HFoldable.hs
|
bsd-3-clause
| 2,345
| 0
| 14
| 673
| 805
| 420
| 385
| 43
| 1
|
{-| Incident failing in the maintenace daemon
This module implements the treatment of an incident, once
a job failed.
-}
{-
Copyright (C) 2015 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.MaintD.FailIncident
( failIncident
) where
import Control.Exception.Lifted (bracket)
import Control.Lens.Setter (over)
import Control.Monad (liftM, when)
import Control.Monad.IO.Class (liftIO)
import qualified Data.ByteString.UTF8 as UTF8
import Data.IORef (IORef)
import System.IO.Error (tryIOError)
import Ganeti.BasicTypes (ResultT, mkResultT, GenericResult(..))
import qualified Ganeti.Constants as C
import Ganeti.JQueue (currentTimestamp)
import Ganeti.Jobs (execJobsWaitOkJid)
import Ganeti.Logging.Lifted
import qualified Ganeti.Luxi as L
import Ganeti.MaintD.MemoryState (MemoryState, getIncidents, updateIncident)
import Ganeti.MaintD.Utils (annotateOpCode)
import Ganeti.Objects.Lens (incidentJobsL)
import Ganeti.Objects.Maintenance (Incident(..), RepairStatus(..))
import Ganeti.OpCodes (OpCode(..))
import qualified Ganeti.Path as Path
import Ganeti.Types (JobId, fromJobId, TagKind(..))
-- | Mark an incident as failed.
markAsFailed :: IORef MemoryState -> Incident -> ResultT String IO ()
markAsFailed memstate incident = do
let uuid = incidentUuid incident
newtag = C.maintdFailureTagPrefix ++ UTF8.toString uuid
logInfo $ "Marking incident " ++ UTF8.toString uuid ++ " as failed"
now <- liftIO currentTimestamp
luxiSocket <- liftIO Path.defaultQuerySocket
jids <- bracket (mkResultT . liftM (either (Bad . show) Ok)
. tryIOError $ L.getLuxiClient luxiSocket)
(liftIO . L.closeClient)
(mkResultT . execJobsWaitOkJid
[[ annotateOpCode "marking incident handling as failed" now
. OpTagsSet TagKindNode [ newtag ]
. Just $ incidentNode incident ]])
let incident' = over incidentJobsL (++ jids)
$ incident { incidentRepairStatus = RSFailed
, incidentTag = newtag
}
liftIO $ updateIncident memstate incident'
-- | Mark the incident, if any, belonging to the given job as
-- failed after having tagged it appropriately.
failIncident :: IORef MemoryState -> JobId -> ResultT String IO ()
failIncident memstate jid = do
incidents <- getIncidents memstate
let affected = filter (elem jid . incidentJobs) incidents
when (null affected) . logInfo
$ "Job " ++ show (fromJobId jid) ++ " does not belong to an incident"
mapM_ (markAsFailed memstate) affected
|
leshchevds/ganeti
|
src/Ganeti/MaintD/FailIncident.hs
|
bsd-2-clause
| 3,842
| 0
| 18
| 742
| 637
| 352
| 285
| 47
| 1
|
{-# LANGUAGE ScopedTypeVariables #-}
module Main (main) where
import Compare.BufferBuilder ()
import Compare.JsonBuilder ()
import Criterion.Main
import Data.BufferBuilder.Json
import Data.Json.Builder
import Twitter
import Twitter.Manual ()
import Typed.Common
import qualified Compare.JsonBench as JsonBench
import qualified Data.Aeson as Aeson
main :: IO ()
main =
defaultMain [
env (load "json-data/twitter100.json") $ \ ~(twtr :: Result) ->
bgroup "twitter" [
bench "aeson" $ nf Aeson.encode twtr
, bench "buffer-builder" $ nf encodeJson twtr
, bench "json-builder" $ nf toJsonLBS twtr
]
, JsonBench.benchmarks
]
|
abbradar/aeson
|
benchmarks/Compare.hs
|
bsd-3-clause
| 669
| 0
| 13
| 132
| 179
| 101
| 78
| 21
| 1
|
-- !!! cc009 -- foreign label returning newtype of Addr
module ShouldCompile where
import Foreign
type Addr = Ptr ()
newtype NPtr a = NPtr Addr
foreign import ccall "&" foo :: NPtr Int
|
urbanslug/ghc
|
testsuite/tests/ffi/should_compile/cc009.hs
|
bsd-3-clause
| 187
| 0
| 6
| 36
| 43
| 26
| 17
| 5
| 0
|
module T7368 where
f = b (l Nothing)
l :: b a -> c b
l = undefined
b :: (a -> b) -> c
b = undefined
|
siddhanathan/ghc
|
testsuite/tests/typecheck/should_fail/T7368.hs
|
bsd-3-clause
| 105
| 0
| 7
| 33
| 59
| 32
| 27
| 6
| 1
|
{-# htermination (gtRatio :: Ratio MyInt -> Ratio MyInt -> MyBool) #-}
import qualified Prelude
data MyBool = MyTrue | MyFalse
data List a = Cons a (List a) | Nil
data MyInt = Pos Nat | Neg Nat ;
data Nat = Succ Nat | Zero ;
data Ordering = LT | EQ | GT ;
data Ratio a = CnPc a a;
primCmpNat :: Nat -> Nat -> Ordering;
primCmpNat Zero Zero = EQ;
primCmpNat Zero (Succ y) = LT;
primCmpNat (Succ x) Zero = GT;
primCmpNat (Succ x) (Succ y) = primCmpNat x y;
primCmpInt :: MyInt -> MyInt -> Ordering;
primCmpInt (Pos Zero) (Pos Zero) = EQ;
primCmpInt (Pos Zero) (Neg Zero) = EQ;
primCmpInt (Neg Zero) (Pos Zero) = EQ;
primCmpInt (Neg Zero) (Neg Zero) = EQ;
primCmpInt (Pos x) (Pos y) = primCmpNat x y;
primCmpInt (Pos x) (Neg y) = GT;
primCmpInt (Neg x) (Pos y) = LT;
primCmpInt (Neg x) (Neg y) = primCmpNat y x;
compareMyInt :: MyInt -> MyInt -> Ordering
compareMyInt = primCmpInt;
primPlusNat :: Nat -> Nat -> Nat;
primPlusNat Zero Zero = Zero;
primPlusNat Zero (Succ y) = Succ y;
primPlusNat (Succ x) Zero = Succ x;
primPlusNat (Succ x) (Succ y) = Succ (Succ (primPlusNat x y));
primMulNat :: Nat -> Nat -> Nat;
primMulNat Zero Zero = Zero;
primMulNat Zero (Succ y) = Zero;
primMulNat (Succ x) Zero = Zero;
primMulNat (Succ x) (Succ y) = primPlusNat (primMulNat x (Succ y)) (Succ y);
primMulInt :: MyInt -> MyInt -> MyInt;
primMulInt (Pos x) (Pos y) = Pos (primMulNat x y);
primMulInt (Pos x) (Neg y) = Neg (primMulNat x y);
primMulInt (Neg x) (Pos y) = Neg (primMulNat x y);
primMulInt (Neg x) (Neg y) = Pos (primMulNat x y);
srMyInt :: MyInt -> MyInt -> MyInt
srMyInt = primMulInt;
compareRatio :: Ratio MyInt -> Ratio MyInt -> Ordering
compareRatio (CnPc x y) (CnPc x' y') = compareMyInt (srMyInt x y') (srMyInt x' y);
esEsOrdering :: Ordering -> Ordering -> MyBool
esEsOrdering LT LT = MyTrue;
esEsOrdering LT EQ = MyFalse;
esEsOrdering LT GT = MyFalse;
esEsOrdering EQ LT = MyFalse;
esEsOrdering EQ EQ = MyTrue;
esEsOrdering EQ GT = MyFalse;
esEsOrdering GT LT = MyFalse;
esEsOrdering GT EQ = MyFalse;
esEsOrdering GT GT = MyTrue;
gtRatio :: Ratio MyInt -> Ratio MyInt -> MyBool
gtRatio x y = esEsOrdering (compareRatio x y) GT;
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/basic_haskell/GT_2.hs
|
mit
| 2,199
| 0
| 9
| 465
| 1,009
| 533
| 476
| 54
| 1
|
module Args where
data Args = Args {}
getArgs :: IO Args
getArgs = pure Args {}
|
mrak/coreutils.hs
|
src/uniq/Args.hs
|
mit
| 82
| 3
| 4
| 19
| 27
| 19
| 8
| 4
| 1
|
-- Emirps
-- https://www.codewars.com/kata/55de8eabd9bef5205e0000ba
module Codewars.G964.Emirps (findEmirp) where
import Data.Maybe(listToMaybe, fromMaybe)
import Data.List(unfoldr, genericLength)
import Data.Set (fromDistinctAscList, member)
pfactors prs n = unfoldr (\(ds,n) -> listToMaybe [(x, (dropWhile (< x) ds, div n x)) | x <- takeWhile ((<=n).(^2)) ds ++ [ n | n > 1 ], mod n x == 0]) (prs,n)
primes = 2 : 3 : [x | x <- [5,7..], head (pfactors (tail primes) x) == x]
reversed = read .reverse . show
findEmirp :: Integer -> [Integer]
findEmirp n = [genericLength empirs, fromMaybe 0 . listToMaybe . reverse $ empirs, sum empirs]
where empirs = filter (\p -> ((/= p) . reversed $ p) && ((`member` pset) . reversed $ p) ) . takeWhile (<= n) $ plist
pset = fromDistinctAscList plist
plist = takeWhile (< upperLimit) primes
upperLimit = read . ('1':) . replicate (length . show $ n) $ '0'
|
gafiatulin/codewars
|
src/5 kyu/Emirps.hs
|
mit
| 932
| 2
| 16
| 187
| 421
| 235
| 186
| 13
| 1
|
module Network.Traffic.Object.NewlineSplitter
(splitAtNextNewline
) where
import qualified Data.ByteString.Lazy.Char8 as LBS
import GHC.Int (Int64)
splitAtNextNewline :: Int64
-> LBS.ByteString
-> (LBS.ByteString, LBS.ByteString)
splitAtNextNewline startIndex lbs = go startIndex $ LBS.length lbs
where
go :: Int64 -> Int64 -> (LBS.ByteString, LBS.ByteString)
{-# INLINE go #-}
go index end
| index >= end = LBS.splitAt index lbs
| LBS.index lbs index == '\n' = LBS.splitAt (index + 1) lbs
| otherwise = go (index + 1) end
|
kosmoskatten/traffic-analysis
|
src/Network/Traffic/Object/NewlineSplitter.hs
|
mit
| 642
| 0
| 12
| 193
| 188
| 100
| 88
| 14
| 1
|
{-# LANGUAGE OverloadedStrings, NoImplicitPrelude #-}
module SuccessfulRequestLogger(initDB, logRequest, getTopNUserAgents, getTopNUrls) where
import qualified Data.Text as T
import qualified Data.Text.Encoding as E
import qualified Network.Wai as Wai
import qualified Database.SQLite.Simple as SQL
import System.IO (IO)
import Data.Int (Int)
import Prelude ((.), String, error)
import Data.Function (($))
import Control.Monad
import Data.Maybe
import Data.Monoid (mappend)
import Utils (getCurrentSeconds)
import HttpUtils (lookupHeader)
unspecifiedUAId :: Int
unspecifiedUAId = 1
unspecifiedUAOriginalName :: T.Text
unspecifiedUAOriginalName = "N/A"
unspecifiedUANormalizedName :: T.Text
unspecifiedUANormalizedName = T.toLower unspecifiedUAOriginalName
initDB :: SQL.Connection -> IO ()
initDB conn = do
mapM_ (SQL.execute_ conn) [userAgentTable, pathTable, accessLogTable]
SQL.execute conn insertUnspecifiedUserAgent (unspecifiedUAId, unspecifiedUAOriginalName, unspecifiedUANormalizedName)
where
userAgentTable :: SQL.Query
userAgentTable = "CREATE TABLE IF NOT EXISTS UserAgent(\
\ id INTEGER PRIMARY KEY NOT NULL,\
\ original TEXT NOT NULL UNIQUE,\
\ normalized TEXT NOT NULL\
\ );"
pathTable :: SQL.Query
pathTable = "CREATE TABLE IF NOT EXISTS Path(\
\ id INTEGER PRIMARY KEY NOT NULL,\
\ value TEXT NOT NULL UNIQUE\
\);"
accessLogTable :: SQL.Query
accessLogTable = "CREATE TABLE IF NOT EXISTS AccessLog(\
\ id INTEGER PRIMARY KEY NOT NULL,\
\ userAgentId INTEGER NOT NULL REFERENCES UserAgent(id),\
\ pathId INTEGER NOT NULL REFERENCES Path(id),\
\ whenSeconds INTEGER NOT NULL\
\ );"
insertUnspecifiedUserAgent :: SQL.Query
insertUnspecifiedUserAgent = "INSERT OR IGNORE INTO UserAgent (id, original, normalized) VALUES (?, ?, ?);"
type UserAgentId = Int
head :: String -> [a] -> a
head message [] = error message
head _ (x:_) = x
unpackOnly :: SQL.Only a -> a
unpackOnly (SQL.Only x) = x
singleOrThrow :: String -> [SQL.Only a] -> IO a
singleOrThrow name = return . head name . fmap unpackOnly
-- subject to race conditions
findOrCreateUserAgentId :: SQL.Connection -> T.Text -> IO UserAgentId
findOrCreateUserAgentId conn originalName =
let normalizedName = T.toLower originalName in do
SQL.execute conn "INSERT OR IGNORE INTO UserAgent (original, normalized) VALUES (?,?);" [originalName, normalizedName]
result <- SQL.query conn "SELECT id FROM UserAgent WHERE original=?;" (SQL.Only originalName)
singleOrThrow "findOrCreateUserAgent" result
type PathId = Int
findOrCreatePathId :: SQL.Connection -> T.Text -> IO PathId
findOrCreatePathId conn path = do
SQL.execute conn "INSERT OR IGNORE INTO Path (value) VALUES (?);" (SQL.Only path)
result <- SQL.query conn "SELECT id FROM Path where value = ?;" (SQL.Only path)
singleOrThrow "findoOrCreatePathId" result
lookupUA :: Wai.Request -> Maybe T.Text
lookupUA request = fmap E.decodeUtf8 $ lookupHeader request "User-Agent"
getUserAgentId :: SQL.Connection -> Wai.Request -> IO UserAgentId
getUserAgentId conn request =
case lookupUA request of
Nothing -> return unspecifiedUAId
Just name -> findOrCreateUserAgentId conn name
logRequest :: SQL.Connection -> Wai.Request -> IO ()
logRequest conn request = do
userAgentId <- getUserAgentId conn request
pathId <- findOrCreatePathId conn $ "/" `mappend` (T.intercalate "/" $ Wai.pathInfo request)
currSeconds <- getCurrentSeconds
SQL.execute conn "INSERT INTO AccessLog (userAgentId, whenSeconds, pathId) VALUES (?, ?, ?);" (userAgentId, currSeconds, pathId)
return ()
getTopNUserAgents :: SQL.Connection -> Int -> IO [(T.Text, Int)]
getTopNUserAgents conn limit = do
SQL.query conn "SELECT UserAgent.original, count(userAgentId) AS freq FROM AccessLog JOIN UserAgent ON AccessLog.userAgentId = UserAgent.id GROUP BY userAgentId ORDER BY freq DESC LIMIT ?;" (SQL.Only limit)
getTopNUrls :: SQL.Connection -> Int -> IO [(T.Text, Int)]
getTopNUrls conn limit = do
SQL.query conn "SELECT Path.value, count(pathId) AS freq FROM AccessLog JOIN Path ON AccessLog.pathId = Path.id GROUP BY pathId ORDER BY freq DESC LIMIT ?;" (SQL.Only limit)
|
itsuart/fdc_archivist
|
src/SuccessfulRequestLogger.hs
|
mit
| 4,370
| 0
| 13
| 855
| 944
| 495
| 449
| 73
| 2
|
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.SVGRectElement
(getX, getY, getWidth, getHeight, getRx, getRy, SVGRectElement(..),
gTypeSVGRectElement)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGRectElement.x Mozilla SVGRectElement.x documentation>
getX :: (MonadDOM m) => SVGRectElement -> m SVGAnimatedLength
getX self = liftDOM ((self ^. js "x") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGRectElement.y Mozilla SVGRectElement.y documentation>
getY :: (MonadDOM m) => SVGRectElement -> m SVGAnimatedLength
getY self = liftDOM ((self ^. js "y") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGRectElement.width Mozilla SVGRectElement.width documentation>
getWidth :: (MonadDOM m) => SVGRectElement -> m SVGAnimatedLength
getWidth self
= liftDOM ((self ^. js "width") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGRectElement.height Mozilla SVGRectElement.height documentation>
getHeight :: (MonadDOM m) => SVGRectElement -> m SVGAnimatedLength
getHeight self
= liftDOM ((self ^. js "height") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGRectElement.rx Mozilla SVGRectElement.rx documentation>
getRx :: (MonadDOM m) => SVGRectElement -> m SVGAnimatedLength
getRx self = liftDOM ((self ^. js "rx") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGRectElement.ry Mozilla SVGRectElement.ry documentation>
getRy :: (MonadDOM m) => SVGRectElement -> m SVGAnimatedLength
getRy self = liftDOM ((self ^. js "ry") >>= fromJSValUnchecked)
|
ghcjs/jsaddle-dom
|
src/JSDOM/Generated/SVGRectElement.hs
|
mit
| 2,556
| 0
| 10
| 295
| 610
| 364
| 246
| 33
| 1
|
module Handler.Home where
import Import hiding (on, (==.), delete)
import qualified Import as I
import Helpers.Common
import Models.Readable
import Yesod.Form.Bootstrap3
import Database.Esqueleto
import qualified Database.Esqueleto as E
skillForm :: UserId -> Maybe Skill -> Form Skill
skillForm userId ms = renderBootstrap3 BootstrapBasicForm $ Skill
<$> areq textField (bs "Name") (_skillName <$> ms)
<*> pure (maybe userId id (_skillUserId <$> ms))
<* bootstrapSubmit (BootstrapSubmit ("Submit" :: Text) "btn-default" [])
getProfileR :: Handler Html
getProfileR = do
userId <- requireAuthId
items <- runDB $ userReadableReadings userId
entries <- runDB $ selectList [EntryUserId I.==. userId] []
defaultLayout $(widgetFile "profile")
getSkillsR :: Handler Html
getSkillsR = do
userId <- requireAuthId
day <- fmap utctDay $ liftIO getCurrentTime
items <- runDB $ skillsWithProgress userId day
(form, _) <- generateFormPost $ skillForm userId Nothing
defaultLayout $(widgetFile "skills")
postSkillsR :: Handler Html
postSkillsR = do
userId <- requireAuthId
((res, _), _) <- runFormPost $ skillForm userId Nothing
case res of
FormSuccess skill -> do
void . runDB $ insert skill
setMessage "Skill created"
redirect SkillsR
_ -> do
setMessage "Failed to create a skill"
redirect SkillsR
deleteSkillR :: SkillId -> Handler Html
deleteSkillR key = do
-- TODO - only delete skills that belong to the currently logged in user
runDB $ delete $ from $ \p -> where_ (p ^. ProgressSkillId ==. val key)
runDB $ I.delete key
setMessage "Skill deleted"
redirect SkillsR
postProgressR :: SkillId -> Handler Html
postProgressR key = do
userId <- requireAuthId
day <- fmap utctDay $ liftIO getCurrentTime
mprogress <- runDB $ selectFirst [ProgressSkillId I.==. key,
ProgressUserId I.==. userId,
ProgressCreatedAt I.==. day] []
case mprogress of
Just _ ->
setMessage "You've already marked progress on this skill today"
Nothing -> do
void . runDB $ insert $ Progress key day userId
setMessage "Progress marked"
redirect SkillsR
|
darthdeus/reedink
|
Handler/Home.hs
|
mit
| 2,268
| 0
| 14
| 552
| 667
| 323
| 344
| 58
| 2
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-|
Provides combinators for currying and uncurrying functions over arbitrary vinyl
records.
-}
module Data.Vinyl.Curry where
import Data.Kind (Type)
import Data.Vinyl
import Data.Vinyl.Functor
import Data.Vinyl.XRec
-- * Currying
class RecordCurry ts where
{-|
N-ary version of 'curry' over functorial records.
Example specialized signatures:
@
rcurry :: (Rec Maybe '[Int, Double] -> Bool) -> Maybe Int -> Maybe Double -> Bool
rcurry :: (Rec (Either Int) '[Double, String, ()] -> Int) -> Either Int Double -> Either Int String -> Either Int () -> Int
rcurry :: (Rec f '[] -> Bool) -> Bool
@
-}
rcurry :: (Rec f ts -> a) -> CurriedF f ts a
class RecordCurry' ts where
{-|
N-ary version of 'curry' over pure records.
Example specialized signatures:
@
rcurry' :: (Rec Identity '[Int, Double] -> Bool) -> Int -> Double -> Bool
rcurry' :: (Rec Identity '[Double, String, ()] -> Int) -> Double -> String -> () -> Int
rcurry' :: (Rec Identity '[] -> Bool) -> Bool
@
-}
rcurry' :: (Rec Identity ts -> a) -> Curried ts a
instance RecordCurry '[] where
rcurry f = f RNil
{-# INLINABLE rcurry #-}
instance RecordCurry' '[] where
rcurry' f = f RNil
{-# INLINABLE rcurry' #-}
instance RecordCurry ts => RecordCurry (t ': ts) where
rcurry f x = rcurry (\xs -> f (x :& xs))
{-# INLINABLE rcurry #-}
instance RecordCurry' ts => RecordCurry' (t ': ts) where
rcurry' f x = rcurry' (\xs -> f (Identity x :& xs))
{-# INLINABLE rcurry' #-}
-- * Uncurrying
{-|
N-ary version of 'uncurry' over functorial records.
Example specialized signatures:
@
runcurry :: (Maybe Int -> Maybe Double -> String) -> Rec Maybe '[Int, Double] -> String
runcurry :: (IO FilePath -> String) -> Rec IO '[FilePath] -> String
runcurry :: Int -> Rec f '[] -> Int
@
-}
runcurry :: CurriedF f ts a -> Rec f ts -> a
runcurry x RNil = x
runcurry f (x :& xs) = runcurry (f x) xs
{-# INLINABLE runcurry #-}
{-|
N-ary version of 'uncurry' over pure records.
Example specialized signatures:
@
runcurry' :: (Int -> Double -> String) -> Rec Identity '[Int, Double] -> String
runcurry' :: Int -> Rec Identity '[] -> Int
@
Example usage:
@
f :: Rec Identity '[Bool, Int, Double] -> Either Int Double
f = runcurry' $ \b x y -> if b then Left x else Right y
@
-}
runcurry' :: Curried ts a -> Rec Identity ts -> a
runcurry' x RNil = x
runcurry' f (Identity x :& xs) = runcurry' (f x) xs
{-# INLINABLE runcurry' #-}
-- | Apply an uncurried function to an 'XRec'.
xruncurry :: CurriedX f ts a -> XRec f ts -> a
xruncurry x RNil = x
xruncurry f (x :& xs) = xruncurry (f (unX x)) xs
{-# INLINABLE xruncurry #-}
-- | Apply an uncurried function to a 'Rec' like 'runcurry' except the
-- function enjoys a type simplified by the 'XData' machinery that
-- strips away type-induced noise like 'Identity', 'Compose', and
-- 'ElField'.
runcurryX :: IsoXRec f ts => CurriedX f ts a -> Rec f ts -> a
runcurryX f = xruncurry f . toXRec
{-# INLINE runcurryX #-}
-- * Applicative Combinators
{-|
Lift an N-ary function to work over a record of 'Applicative' computations.
>>> runcurryA' (+) (Just 2 :& Just 3 :& RNil)
Just 5
>>> runcurryA' (+) (Nothing :& Just 3 :& RNil)
Nothing
-}
runcurryA' :: (Applicative f) => Curried ts a -> Rec f ts -> f a
runcurryA' f = fmap (runcurry' f) . rtraverse (fmap Identity)
{-# INLINE runcurryA' #-}
{-|
Lift an N-ary function over types in @g@ to work over a record of 'Compose'd
'Applicative' computations. A more general version of 'runcurryA''.
Example specialized signatures:
@
runcurryA :: (g x -> g y -> a) -> Rec (Compose Maybe g) '[x, y] -> Maybe a
@
-}
runcurryA :: (Applicative f) => CurriedF g ts a -> Rec (Compose f g) ts -> f a
runcurryA f = fmap (runcurry f) . rtraverse getCompose
{-# INLINE runcurryA #-}
-- * Curried Function Types
{-|
For the list of types @ts@, @'Curried' ts a@ is a curried function type from
arguments of types in @ts@ to a result of type @a@.
>>> :kind! Curried '[Int, Bool, String] Int
Curried '[Int, Bool, String] Int :: *
= Int -> Bool -> [Char] -> Int
-}
type family Curried ts a where
Curried '[] a = a
Curried (t ': ts) a = t -> Curried ts a
{-|
For the type-level list @ts@, @'CurriedF' f ts a@ is a curried function type
from arguments of type @f t@ for @t@ in @ts@, to a result of type @a@.
>>> :kind! CurriedF Maybe '[Int, Bool, String] Int
CurriedF Maybe '[Int, Bool, String] Int :: *
= Maybe Int -> Maybe Bool -> Maybe [Char] -> Int
-}
type family CurriedF (f :: u -> Type) (ts :: [u]) a where
CurriedF f '[] a = a
CurriedF f (t ': ts) a = f t -> CurriedF f ts a
{-|
For the type-level list @ts@, @'CurriedX' f ts a@ is a curried function type
from arguments of type @HKD f t@ for @t@ in @ts@, to a result of type @a@.
>>> :set -XTypeOperators
>>> :kind! CurriedX (Maybe :. Identity) '[Int, Bool, String] Int
CurriedX (Maybe :. Identity) '[Int, Bool, String] Int :: *
= Maybe Int -> Maybe Bool -> Maybe [Char] -> Int
-}
type family CurriedX (f :: u -> Type) (ts :: [u]) a where
CurriedX f '[] a = a
CurriedX f (t ': ts) a = HKD f t -> CurriedX f ts a
|
VinylRecords/Vinyl
|
Data/Vinyl/Curry.hs
|
mit
| 5,327
| 0
| 12
| 1,187
| 888
| 469
| 419
| 49
| 1
|
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
import Data.Default (def)
import Data.Text (Text)
import Network.HTTP.Conduit (Manager, conduitManagerSettings, newManager)
import Yesod
import Yesod.Auth
import Yesod.Auth.Dummy -- just for testing, don't use in real life!!!
data App = App
{ httpManager :: Manager
}
mkYesod "App" [parseRoutes|
/ HomeR GET POST
/admin AdminR GET
/auth AuthR Auth getAuth
|]
instance Yesod App where
authRoute _ = Just $ AuthR LoginR
-- route name, then a boolean indicating if it's a write request
isAuthorized HomeR True = isAdmin
isAuthorized AdminR _ = isAdmin
-- anyone can access other pages
isAuthorized _ _ = return Authorized
isAdmin = do
mu <- maybeAuthId
return $ case mu of
Nothing -> AuthenticationRequired
Just "admin" -> Authorized
Just _ -> Unauthorized "You must be an admin"
instance YesodAuth App where
type AuthId App = Text
getAuthId = return . Just . credsIdent
loginDest _ = HomeR
logoutDest _ = HomeR
authPlugins _ = [authDummy]
authHttpManager = httpManager
maybeAuthId = lookupSession "_ID"
instance RenderMessage App FormMessage where
renderMessage _ _ = defaultFormMessage
getHomeR :: Handler Html
getHomeR = do
maid <- maybeAuthId
defaultLayout
[whamlet|
<p>Note: Log in as "admin" to be an administrator.
<p>Your current auth ID: #{show maid}
$maybe _ <- maid
<p>
<a href=@{AuthR LogoutR}>Logout
<p>
<a href=@{AdminR}>Go to admin page
<form method=post>
Make a change (admins only)
\ #
<input type=submit>
|]
postHomeR :: Handler ()
postHomeR = do
setMessage "You made some change to the page"
redirect HomeR
getAdminR :: Handler Html
getAdminR = defaultLayout
[whamlet|
<p>I guess you're an admin!
<p>
<a href=@{HomeR}>Return to homepage
|]
main :: IO ()
main = do
manager <- newManager conduitManagerSettings
warp 3000 $ App manager
|
cirquit/quizlearner
|
resources/auth/Authentification3.hs
|
mit
| 2,378
| 3
| 11
| 755
| 398
| 210
| 188
| 51
| 3
|
-- | Minify JS and CSS files using no external @$PATH@ dependencies.
--
-- @
-- main :: IO ()
-- main = shakeArgs shakeOptions $ do
-- want ["\/\/*.min.js", "\/\/*.min.css"]
-- "//*.min.js" *> minifyJs
-- "//*.min.css" *> minifyCss
-- @
module Development.Shake.Minify where
import qualified Data.ByteString.Lazy as BS
import qualified Data.Text.IO as TIO
import qualified Data.Text.Lazy as LT
import qualified Data.Text.Lazy.Builder as LTB
import Development.Shake (Action, liftIO, need)
import Development.Shake.FilePath (dropExtension, (-<.>))
import qualified Text.CSS.Parse as CSS
import qualified Text.CSS.Render as CSS
import qualified Text.Jasmine as JS
-- | Given a @.min.js@ path, find the @.js@ file and minify it into the specified file name.
minifyJs :: FilePath -- ^ Desired minified JS files (ex: @"//*.min.js"@)
-> Action ()
minifyJs = minifyCss' ((-<.> "js") . dropExtension)
-- | Same as `minifyJs` except take a function for custom file path mapping.
minifyJs' :: (FilePath -> FilePath) -- ^ Given a target minified JS file path, return the source JS file path.
-> FilePath -- ^ Desired minified JS file (ex: @"//*.min.js"@)
-> Action ()
minifyJs' fromMin minJs = do
let js = fromMin minJs
need [js]
liftIO $ BS.writeFile minJs =<< JS.minifyFile js
-- | Given a @.min.css@ path, find the @.css@ file and minify it into the specified file name.
minifyCss :: FilePath -- ^ Desired minified CSS file (ex: @"//*.min.css"@)
-> Action ()
minifyCss = minifyCss' ((-<.> "css") . dropExtension)
-- | Same as `minifyCss` except take a function for custom file path mapping.
minifyCss' :: (FilePath -> FilePath) -- ^ Given a target minified CSS file path, return the source CSS file path.
-> FilePath -- ^ Target minified CSS file (ex: @"//*.min.css"@)
-> Action ()
minifyCss' fromMin minCss = do
let css = fromMin minCss
need [css]
liftIO $ TIO.writeFile minCss . LT.toStrict . LTB.toLazyText . minify =<< TIO.readFile css
where minify = either error CSS.renderNestedBlocks . CSS.parseNestedBlocks
|
alfa07/shake-minify
|
src/Development/Shake/Minify.hs
|
mit
| 2,249
| 0
| 13
| 554
| 377
| 220
| 157
| 31
| 1
|
module Filter.Bib (bibliography) where
import Paths_ppp
import Emb (emb)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LBS
import Data.Maybe (catMaybes, fromJust, fromMaybe, isJust)
import qualified Text.CSL as CSL
import Text.CSL.Pandoc (processCites)
import qualified Text.CSL.Style as Style
import Text.Pandoc.Definition
import Text.Pandoc.Walk
tex :: String -> Block
tex = RawBlock (Format "tex")
tex' :: String -> Inline
tex' = RawInline (Format "tex")
lookupMeta' :: String -> Pandoc -> Maybe MetaValue
lookupMeta' x (Pandoc meta _) = lookupMeta x meta
fromMetaString :: Maybe MetaValue -> Maybe String
fromMetaString (Just (MetaString x)) = Just x
fromMetaString _ = Nothing
fromMetaList :: Maybe MetaValue -> [String]
fromMetaList (Just (MetaString x)) = [x]
fromMetaList (Just (MetaList xs)) = catMaybes . map (fromMetaString . Just) $ xs
fromMetaList _ = []
secondFieldAlignFlush :: CSL.Style -> Bool
secondFieldAlignFlush style = fromMaybe False $ do
bib <- CSL.biblio style
sfa <- lookup "second-field-align" . CSL.bibOptions $ bib
return $ sfa == "flush"
hackFlush :: CSL.Style -> CSL.Style
hackFlush style =
let newbib = do
bib <- CSL.biblio style
let lay = CSL.bibLayout bib
m x = Style.Const x Style.emptyFormatting
xs = Style.elements lay
xs' = [m "<pppref>"] ++ take 1 xs ++ [m "</pppref>"] ++ drop 1 xs
return $ bib { CSL.bibLayout = lay { Style.elements = xs' } }
in style { CSL.biblio = newbib }
replaceBibMarkers :: Inline -> Inline
replaceBibMarkers (RawInline (Format "html") "<pppref>" ) = tex' "\\item["
replaceBibMarkers (RawInline (Format "html") "</pppref>") = tex' "]"
replaceBibMarkers x = x
wrapBibliography :: Block -> Block
wrapBibliography (Div ("refs",_,_) bs)
| countRefs bs == 0 = Null
| otherwise = Div ("",["references"],[]) $
[tex "\\begin{itemize}"] ++
walk catDivs bs ++
[tex "\\end{itemize}"]
wrapBibliography x = x
countRefs :: [Block] -> Int
countRefs [] = 0
countRefs ((Div ('r':'e':'f':'-':_,_,_) _):xs) = 1 + countRefs xs
countRefs (x:xs) = countRefs xs
catDivs :: [Block] -> [Block]
catDivs ((Div _ bs):xs) = bs ++ catDivs xs
catDivs (x:xs) = x : catDivs xs
catDivs [] = []
bibliography' :: Pandoc -> IO Pandoc
bibliography' doc = do
style <- case fromMetaString $ lookupMeta' "csl" doc of
Nothing -> return
. CSL.parseCSL'
. LBS.fromStrict
. fromJust
. lookup "ieee.csl" $ emb
Just fp -> fmap CSL.parseCSL' . LBS.readFile $ fp
let flush = secondFieldAlignFlush style
style' = if flush then hackFlush style else style
style'' <- CSL.localizeCSL (fromMetaString $ lookupMeta' "csllocale" doc) style'
bib <- fmap concat
. mapM CSL.readBiblioFile
. fromMetaList
. lookupMeta' "bibliography" $ doc
let doc' = processCites style'' bib doc
doc'' = if flush
then walk wrapBibliography . walk replaceBibMarkers $ doc'
else doc'
return doc''
bibliography :: Pandoc -> IO Pandoc
bibliography doc
| isJust $ lookupMeta' "bibliography" doc = bibliography' doc
| otherwise = return doc
|
Thhethssmuz/ppp
|
src/Filter/Bib.hs
|
mit
| 3,441
| 0
| 18
| 938
| 1,180
| 600
| 580
| 84
| 4
|
module Network.Tremulous.Protocol (
module Network.Tremulous.NameInsensitive
, Delay(..)
, Team(..)
, GameServer(..)
, Player(..)
, MasterServer(..)
, PollResult(..)
, defaultDelay
, parseGameServer
, parseMasterServer
) where
import Prelude as P hiding (Maybe(..), maybe)
import Control.Applicative as A
import Control.Monad.State.Strict
import Data.Attoparsec.Char8 hiding (option)
import Data.Attoparsec (anyWord8)
import Data.ByteString.Char8 as B
import Network.Tremulous.StrictMaybe
import Data.Bits
import Data.Word
import Network.Socket
import Network.Tremulous.ByteStringUtils as B
import Network.Tremulous.SocketExtensions
import Network.Tremulous.NameInsensitive
import Network.Tremulous.TupleReader
data Delay = Delay
{ packetTimeout
, packetDuplication
, throughputDelay :: !Int
}
data MasterServer = MasterServer
{ masterAddress :: !SockAddr
, masterProtocol :: !Int
} deriving Eq
data GameServer = GameServer
{ address :: !SockAddr
, gameping
, protocol :: !Int
, hostname :: !TI
, gamemod
, version
, mapname :: !(Maybe TI)
, slots :: !Int
, privslots :: !Int
, protected
, unlagged :: !Bool
, timelimit
, suddendeath :: !(Maybe Int)
, nplayers :: !Int
, players :: ![Player]
}
data Team = Spectators | Aliens | Humans | Unknown deriving (Eq, Show)
data Player = Player
{ team :: !Team
, kills
, ping :: !Int
, name :: !TI
}
data PollResult = PollResult
{ polled :: ![GameServer]
, serversResponded
, serversRequested :: !Int
}
defaultDelay :: Delay
defaultDelay = Delay
{ packetTimeout = 400 * 1000
, packetDuplication = 2
, throughputDelay = 1 * 1000
}
parsePlayer :: Team -> ByteString -> Maybe Player
parsePlayer team = parseMaybe $ do
kills <- signed decimal
skipSpace
ping <- signed decimal
skipSpace
name <- mkColor <$> quoted
return Player {..}
-- cvar P
parseP :: ByteString -> [Team]
parseP = foldr' f []
where
f '-' xs = xs
f a xs = readTeam a : xs
readTeam x = case x of
'0' -> Spectators
'1' -> Aliens
'2' -> Humans
_ -> Unknown
parsePlayers :: Maybe ByteString -> [ByteString] -> Maybe [Player]
parsePlayers Nothing xs = mapM (parsePlayer Unknown) xs
parsePlayers (Just p) xs = zipWithM parsePlayer (parseP p ++ repeat Unknown) xs
parseCVars :: ByteString -> [(ByteString, ByteString)]
parseCVars xs = f (splitfilter '\\' xs) where
f (k:v:cs) = (k, v) : f cs
f _ = []
parseGameServer :: SockAddr -> ByteString -> Maybe GameServer
parseGameServer address str = do
xs <- stripPrefix "\xFF\xFF\xFF\xFFstatusResponse" str
case splitlines xs of
(cvars:players) -> mkGameServer address players (parseCVars cvars)
_ -> Nothing
mkGameServer :: SockAddr
-> [ByteString]
-> [(ByteString, ByteString)]
-> Maybe GameServer
mkGameServer address rawplayers = tupleReader $ do
timelimit <- optionWith maybeInt "timelimit"
hostname <- mkColor <$> require "sv_hostname"
protocol <- requireWith maybeInt "protocol"
mapname <- optionWith (Just . mk) "mapname"
version <- optionWith (Just . mk) "version"
gamemod <- optionWith mkMod "gamename"
p <- option "P"
players <- lift $ parsePlayers p rawplayers
protected <- mkBool <$> option "g_needpass"
privslots <- fromMaybe 0 <$> optionWith maybeInt
"sv_privateClients"
slots <- subtract privslots <$> requireWith maybeInt
"sv_maxclients"
suddendeath <- optionWith maybeInt "g_suddenDeathTime"
unlagged <- mkBool <$> option "g_unlagged"
return GameServer
{ gameping = -1
, nplayers = P.length players
, ..
}
where
mkMod "base" = Nothing
mkMod a = Just (mk a)
mkBool = maybe False (/="0")
parseMasterServer :: ByteString -> Maybe [SockAddr]
parseMasterServer = parseMaybe (static *> A.many addr)
where
static = string "\xFF\xFF\xFF\xFFgetserversResponse"
addr = do
char '\\'
ip <- parseUInt32N
port <- parseUInt16N
if port == 0 || ip == 0
then addr
else return $ SockAddrInet (PortNum (htons port)) (htonl ip)
parseUInt32N :: Parser Word32
parseUInt32N = do
b3 <- wg
b2 <- wg
b1 <- wg
b0 <- wg
return $ (b3 << 24) .|. (b2 << 16) .|. (b1 << 8) .|. b0
where
wg = fromIntegral <$> anyWord8
(<<) = unsafeShiftL
parseUInt16N :: Parser Word16
parseUInt16N = do
b1 <- wg
b0 <- wg
return $ (b1 << 8) .|. b0
where
wg = fromIntegral <$> anyWord8
(<<) = unsafeShiftL
-- /// Attoparsec utils ////////////////////////////////////////////////////////
quoted :: Parser ByteString
quoted = char '"' *> takeTill (=='"') <* char '"'
parseMaybe :: Parser a -> ByteString -> Maybe a
parseMaybe f xs = case parseOnly f xs of
Right a -> Just a
Left _ -> Nothing
|
Cadynum/tremulous-query
|
Network/Tremulous/Protocol.hs
|
gpl-3.0
| 5,409
| 0
| 15
| 1,693
| 1,545
| 821
| 724
| -1
| -1
|
import System.Environment
import System.IO
import Data.Char
import Data.List
import Data.Ord
import Control.Arrow ((&&&))
orf :: (a -> Bool) -> (a -> Bool) -> a -> Bool
orf f1 f2 = (\b -> (f1 b) || (f2 b))
wordCount :: String -> [(String, Int)]
wordCount = sortBy (flip $ comparing snd) . map (head &&& length) . group . sort . words . filter (orf isAlpha isSpace) . map toLower
wordCountLine :: (String, Int) -> String
wordCountLine a = (show (snd a)) ++ " " ++ (fst a)
main = do
args <- getArgs
inputs <- readFile (head args)
out <- openFile "freqs.txt" WriteMode
mapM (hPutStrLn out . wordCountLine) $ wordCount inputs
hClose out
|
CS-CLUB/contests
|
2013/submissions/Wesley Taylor/1/haskell/word_frequency.hs
|
gpl-3.0
| 662
| 0
| 14
| 142
| 305
| 157
| 148
| 18
| 1
|
predecessor = predecessor
-- comment
successor :: a
successor = successor
|
evolutics/haskell-formatter
|
testsuite/resources/source/comments/depends_on_displacement/single_annotation/single_line/none/Input.hs
|
gpl-3.0
| 74
| 0
| 4
| 11
| 17
| 10
| 7
| 3
| 1
|
-- -----------------------------------------------------------------------------
module Main(main) where
-- -----------------------------------------------------------------------------
import System.Environment( getArgs, getProgName )
import System.Console.GetOpt(
ArgOrder(..), OptDescr(..), ArgDescr(..), getOpt, usageInfo )
-- -----------------------------------------------------------------------------
data Options = Options { startIndex :: !Int }
deriving( Show )
defaultOptions :: Options
defaultOptions = Options 0
-- -----------------------------------------------------------------------------
options :: [OptDescr (Options -> Either String Options)]
options = [ Option "n" []
(ReqArg setStartIndex "N") "start index of date"]
-- -----------------------------------------------------------------------------
header :: IO String
header = do
prog <- getProgName
return $ "Usage: " ++ prog ++ " [Options]"
-- -----------------------------------------------------------------------------
main :: IO ()
main = do
h <- header
args <- getArgs
case getOpt Permute options args of
(xs, _, []) -> case foldl ebind (Right defaultOptions) xs of
Right opts -> print opts
Left err -> error $ err ++ usageInfo h options
(_, _, zs) -> error $ concat zs ++ usageInfo h options
-- -----------------------------------------------------------------------------
ebind :: Either String Options -> (Options -> Either String Options)
-> Either String Options
ebind x f = case x of
Right opts -> f opts
err -> err
-- -----------------------------------------------------------------------------
setStartIndex :: String -> Options -> Either String Options
setStartIndex n opts = case readMaybe n of
Just num
| num < 0 -> Left "Error, index must be >= 0\n"
| otherwise -> Right $ opts { startIndex = num }
_ -> Left $ "Error, invalid number syntax: " ++ n ++ "\n"
-- -----------------------------------------------------------------------------
readMaybe :: (Read a) => String -> Maybe a
readMaybe s = case reads s of
[(x, "")] -> Just x
_ -> Nothing
-- -----------------------------------------------------------------------------
|
zhensydow/zhtoolkit
|
src/ArrangeDate.hs
|
gpl-3.0
| 2,267
| 0
| 14
| 411
| 553
| 289
| 264
| 41
| 3
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.