code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveTraversable #-}
module Jade.Decode.Types where
import GHC.Generics
import qualified Data.Vector as V
import qualified Data.Map as DM
import qualified Data.List as DL
import qualified Data.ByteString as DB
import qualified Data.ByteString.Lazy as DBL
import qualified Data.ByteString.Lazy.Char8 as DBL8
import Jade.Decode.Util
import Data.Hashable
import Text.Printf
import Data.Aeson
------------------------------------------------------------------
-- Icon Types
newtype Line = Line Coord5 deriving (Generic, Show, Eq, Hashable, Ord, ToJSON)
data Terminal = Terminal Coord3 ValBundle deriving (Generic, Show, Eq, Hashable, Ord, ToJSON)
newtype Box = Box Coord5 deriving (Generic, Show, Eq, Hashable, Ord, ToJSON)
data Txt = Txt { txtLoc :: Coord3
, txtText :: String
, txtFont :: Maybe String
} deriving (Generic, Show, Eq, Hashable, Ord, ToJSON)
data Circle = Circle { circleX :: Integer
, circleY :: Integer
, circleR :: Integer
} deriving (Generic, Show, Eq, Hashable, Ord, ToJSON)
data IconPart = IconLine Line
| IconTerm Terminal
| IconBox Box
| IconTxt Txt
| IconCircle Circle
| IconProperty
| IconArc
deriving (Generic, Show, Eq, Hashable, Ord, ToJSON)
data Icon = Icon { iconParts :: [IconPart]
-- ^ A flat list of icon elements found in the icon view
} deriving (Generic, Show, Eq, Hashable, Ord, ToJSON)
data ModPath = ModPath { modPath :: FilePath
, modFile :: String
} deriving (Generic, Show, Eq, ToJSON)
newtype Vdd = Vdd Coord3 deriving (Generic, Show, Eq, Hashable, Ord, ToJSON)
------------------------------------------------------------------
-- Schematic Types
type ModuleName = String
data Direction = In | Out | InOut
deriving (Generic, Show, Eq, Hashable, Ord, ToJSON)
data SigNum = Bin String
| Num Integer
deriving (Generic, Show, Eq, Hashable, Ord, ToJSON)
data SigType = OneSig Sig
| ManySig [Sig]
deriving (Show, Eq, Generic, Hashable, Ord, ToJSON)
data Sig = SigSimple String
| SigIndex String Integer
| SigHash String Integer
| SigRange String Integer Integer
| SigRangeStep String Integer Integer Integer
| SigQuote Integer Integer
deriving (Show, Eq, Generic, Hashable, Ord, ToJSON)
newtype Bundle a = Bundle [a] deriving (Show, Eq, Generic, Hashable, Ord, Foldable, ToJSON)
instance Functor Bundle where
fmap f (Bundle xs) = Bundle (map f xs)
instance Applicative Bundle where
pure x = Bundle [x]
(<*>) (Bundle fs) (Bundle xs) = Bundle (fs <*> xs)
instance Monad Bundle where
(>>=) (Bundle xs) f = Bundle $ concat [ys | Bundle ys <- map f xs]
instance Monoid (Bundle a) where
mconcat bs = Bundle $ concat [x | Bundle x <- bs]
mappend (Bundle x) (Bundle y) = Bundle (x ++ y)
mempty = Bundle []
-- TODO refactor name Val to SubSig
-- TODO consider this phantom type.
-- data Val a = ValIndex String Integer
type Index = Integer
type NetId = Integer
data Val = ValIndex { valIdxName :: String
, valIdxIdx :: Index
}
| NetIndex { netIdxId :: NetId
, netIdxIdx :: Index
}
| Lit { litBinVal :: BinVal }
deriving (Show, Eq, Generic, Hashable, Ord, ToJSON)
type ValBundle = Bundle Val
data Signal = Signal { signalName :: Maybe ValBundle
, signalWidth :: Int
, signalDirection :: Maybe Direction
} deriving (Show, Eq, Generic, Hashable, Ord, ToJSON)
data Rot = Rot0
| Rot270
| Rot180
| Rot90
| FlipX
| TransposeNeg
| FlipY
| TransposePos
deriving (Show, Enum, Eq, Generic, Hashable, Ord, ToJSON)
data Coord3 = Coord3 { c3x :: Integer
, c3y :: Integer
, c3r :: Rot
} deriving (Show, Eq, Generic, Hashable, Ord, ToJSON)
data Point = Point Integer Integer deriving (Show, Eq, Generic, Hashable, Ord, ToJSON)
data Wire = Wire { wireCoord5 :: Coord5
, wireSignal :: Maybe Signal
} deriving (Show, Eq, Generic, Hashable, Ord, ToJSON)
data Coord5 = Coord5 { c5x :: Integer
, c5y :: Integer
, c5r :: Rot
, c5dx :: Integer
, c5dy :: Integer
} deriving (Show, Eq, Generic, Hashable, Ord, ToJSON)
data Port = Port { portCoord3 :: Coord3
, portSignal :: Maybe Signal
} deriving (Generic, Show, Eq, Hashable, Ord, ToJSON)
data SubModule = SubModule { subName :: String
, subCoord3 :: Coord3 }
| SubMemUnit MemUnit
deriving (Generic, Show, Eq, Hashable, Ord, ToJSON)
newtype Jumper = Jumper Coord3 deriving (Generic, Show, Eq, Hashable, Ord, ToJSON)
data MemUnit = MemUnit { memName :: String
, memCoord3 :: Coord3
, memContents :: String
, memNumPorts :: Integer
-- ^ number of memory ports in this unit.
, memNumAddr :: Integer
-- ^ width of the address terminal
, memNumData :: Integer
-- ^ width of the output data terminal
} deriving (Generic, Show, Eq, Hashable, Ord, ToJSON)
data Part = PortC Port
| SubModuleC SubModule
| WireC Wire
| JumperC Jumper
| TermC Terminal
| UnusedPart
deriving (Generic, Show, Eq, Hashable, Ord, ToJSON)
type Test = String
data Schematic = Schematic [Part] deriving (Generic, Show, Eq, Ord, ToJSON)
instance Hashable Schematic where
hash (Schematic v) = hash v
data Module = Module { moduleName :: String
, moduleSchem :: Maybe Schematic
, moduleTest :: Maybe ModTest
, moduleIcon :: Maybe Icon
}
| BuiltInModule String
deriving (Generic, Show, Eq, Hashable, Ord, ToJSON) -- todo add test
newtype TopLevel = TopLevel (DM.Map String Module) deriving (Generic, Show, Eq, ToJSON)
data BoundingBox = BB { bbLeft :: Integer
, bbTop :: Integer
, bbRight :: Integer
, bbBottom :: Integer } deriving (Generic, Show, Eq, ToJSON)
class LocRot a where
locrot :: a -> Coord3
instance LocRot Coord3 where locrot x = x
instance LocRot Coord5 where locrot (Coord5 x y r _ _) = Coord3 x y r
------------------------------------------------------------------
-- Test Types
newtype Power = Power { powerVdd :: Double } deriving (Generic, Show, Eq, Hashable, Ord, ToJSON)
data Thresholds = Thresholds { thVol :: Double
, thVil :: Double
, thVih :: Double
, thVoh :: Double
} deriving (Generic, Show, Eq, Hashable, Ord, ToJSON)
newtype Inputs = Inputs [ValBundle] deriving (Generic, Show, Eq, Hashable, Ord, ToJSON)
newtype Outputs = Outputs [ValBundle] deriving (Generic, Show, Eq, Hashable, Ord, ToJSON)
data Mode = Device | Gate deriving (Generic, Show, Eq, Hashable, Ord, ToJSON)
data Duration = Nanosecond Double
| Millisecond Double
deriving (Generic, Show, Eq, Hashable, Ord, ToJSON)
data Action = Assert String
| Deassert String
| Sample String
| Tran Duration
| SetSignal ValBundle Double
deriving (Generic, Show, Eq, Hashable, Ord, ToJSON)
newtype CycleLine = CycleLine [Action] deriving (Generic, Show, Eq, Hashable, Ord, ToJSON)
data BinVal = L | H | Z deriving (Generic, Show, Eq, Hashable, Ord, ToJSON)
data TestLine = TestLine { testLineBinVals :: [BinVal]
, testLineComment :: Maybe String
} deriving (Generic, Show, Eq, Hashable, Ord, ToJSON)
data PlotDef = PlotDef ValBundle [String] deriving (Generic, Show, Eq, Hashable, Ord, ToJSON)
data PlotStyle = BinStyle ValBundle
| HexStyle ValBundle
| DecStyle ValBundle
| SimplePlot ValBundle
| PlotDefStyle String ValBundle
deriving (Generic, Show, Eq, Hashable, Ord, ToJSON)
data ModTest = ModTest { modPower :: Maybe Power
, modThresholds :: Maybe Thresholds
, modInputs :: Maybe Inputs
, modOutputs :: Maybe Outputs
, modMode :: Maybe Mode
, modCycleLine :: Maybe CycleLine
, modTestLines :: [TestLine]
, modPlotDef :: [PlotDef]
, modPlotStyles :: [PlotStyle]
} deriving (Generic, Show, Eq, Hashable, Ord, ToJSON)
|
drhodes/jade2hdl
|
jade-decode/src/Jade/Decode/Types.hs
|
bsd-3-clause
| 9,368
| 0
| 11
| 3,219
| 2,637
| 1,479
| 1,158
| 186
| 0
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE PolyKinds #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Servant.API.ContentTypesSpec where
import Prelude ()
import Prelude.Compat
import Data.Aeson
import Data.ByteString.Char8 (ByteString, append, pack)
import qualified Data.ByteString.Lazy as BSL
import Data.Either
import Data.Function (on)
import Data.List (maximumBy)
import Data.Maybe (fromJust, isJust, isNothing)
import Data.Proxy
import Data.String (IsString (..))
import Data.String.Conversions (cs)
import qualified Data.Text as TextS
import qualified Data.Text.Lazy as TextL
import GHC.Generics
import Test.Hspec
import Test.QuickCheck
import "quickcheck-instances" Test.QuickCheck.Instances ()
import Servant.API.ContentTypes
spec :: Spec
spec = describe "Servant.API.ContentTypes" $ do
describe "handleAcceptH" $ do
let p = Proxy :: Proxy '[PlainText]
it "matches any charset if none were provided" $ do
let without = handleAcceptH p (AcceptHeader "text/plain")
with = handleAcceptH p (AcceptHeader "text/plain;charset=utf-8")
wisdom = "ubi sub ubi" :: String
without wisdom `shouldBe` with wisdom
it "does not match non utf-8 charsets" $ do
let badCharset = handleAcceptH p (AcceptHeader "text/plain;charset=whoknows")
s = "cheese" :: String
badCharset s `shouldBe` Nothing
describe "The JSON Content-Type type" $ do
let p = Proxy :: Proxy JSON
it "handles whitespace at end of input" $ do
mimeUnrender p "[1] " `shouldBe` Right [1 :: Int]
it "handles whitespace at beginning of input" $ do
mimeUnrender p " [1] " `shouldBe` Right [1 :: Int]
it "does not like junk at end of input" $ do
mimeUnrender p "[1] this probably shouldn't work"
`shouldSatisfy` (isLeft :: Either a [Int] -> Bool)
it "has mimeUnrender reverse mimeRender for valid top-level json ([Int]) " $ do
property $ \x -> mimeUnrender p (mimeRender p x) == Right (x::[Int])
it "has mimeUnrender reverse mimeRender for valid top-level json " $ do
property $ \x -> mimeUnrender p (mimeRender p x) == Right (x::SomeData)
describe "The PlainText Content-Type type" $ do
let p = Proxy :: Proxy PlainText
it "has mimeUnrender reverse mimeRender (lazy Text)" $ do
property $ \x -> mimeUnrender p (mimeRender p x) == Right (x::TextL.Text)
it "has mimeUnrender reverse mimeRender (strict Text)" $ do
property $ \x -> mimeUnrender p (mimeRender p x) == Right (x::TextS.Text)
describe "The OctetStream Content-Type type" $ do
let p = Proxy :: Proxy OctetStream
it "is id (Lazy ByteString)" $ do
property $ \x -> mimeRender p x == (x :: BSL.ByteString)
&& mimeUnrender p x == Right x
it "is fromStrict/toStrict (Strict ByteString)" $ do
property $ \x -> mimeRender p x == BSL.fromStrict (x :: ByteString)
&& mimeUnrender p (BSL.fromStrict x) == Right x
describe "handleAcceptH" $ do
it "returns Nothing if the 'Accept' header doesn't match" $ do
handleAcceptH (Proxy :: Proxy '[JSON]) "text/plain" (3 :: Int)
`shouldSatisfy` isNothing
it "returns Just if the 'Accept' header matches" $ do
handleAcceptH (Proxy :: Proxy '[JSON]) "*/*" (3 :: Int)
`shouldSatisfy` isJust
handleAcceptH (Proxy :: Proxy '[PlainText, JSON]) "application/json" (3 :: Int)
`shouldSatisfy` isJust
handleAcceptH (Proxy :: Proxy '[PlainText, JSON, OctetStream])
"application/octet-stream" ("content" :: ByteString)
`shouldSatisfy` isJust
it "returns the Content-Type as the first element of the tuple" $ do
handleAcceptH (Proxy :: Proxy '[JSON]) "*/*" (3 :: Int)
`shouldSatisfy` ((== "application/json") . fst . fromJust)
handleAcceptH (Proxy :: Proxy '[PlainText, JSON]) "application/json" (3 :: Int)
`shouldSatisfy` ((== "application/json") . fst . fromJust)
handleAcceptH (Proxy :: Proxy '[PlainText, JSON, OctetStream])
"application/octet-stream" ("content" :: ByteString)
`shouldSatisfy` ((== "application/octet-stream") . fst . fromJust)
it "returns the appropriately serialized representation" $ do
property $ \x -> handleAcceptH (Proxy :: Proxy '[JSON]) "*/*" (x :: SomeData)
== Just ("application/json", encode x)
it "respects the Accept spec ordering" $ do
let highest a b c = maximumBy (compare `on` snd)
[ ("application/octet-stream", a)
, ("application/json", b)
, ("text/plain;charset=utf-8", c)
]
let acceptH a b c = addToAccept (Proxy :: Proxy OctetStream) a $
addToAccept (Proxy :: Proxy JSON) b $
addToAccept (Proxy :: Proxy PlainText ) c ""
let val a b c i = handleAcceptH (Proxy :: Proxy '[OctetStream, JSON, PlainText])
(acceptH a b c) (i :: Int)
property $ \a b c i -> fst (fromJust $ val a b c i) == fst (highest a b c)
describe "handleCTypeH" $ do
it "returns Nothing if the 'Content-Type' header doesn't match" $ do
handleCTypeH (Proxy :: Proxy '[JSON]) "text/plain" "๐ฝ๐ฑ๐ฎ ๐ฝ๐ฒ๐ถ๐ฎ ๐ฑ๐ช๐ผ ๐ฌ๐ธ๐ถ๐ฎ, ๐ฝ๐ฑ๐ฎ ๐๐ช๐ต๐ป๐พ๐ผ ๐ผ๐ช๐ฒ๐ญ "
`shouldBe` (Nothing :: Maybe (Either String Value))
context "the 'Content-Type' header matches" $ do
it "returns Just if the parameter matches" $ do
handleCTypeH (Proxy :: Proxy '[JSON]) "application/json"
"๐ฅ๐ ๐ฅ๐๐๐ ๐ ๐ ๐๐๐๐ช ๐ฅ๐๐๐๐๐ค "
`shouldSatisfy` (isJust :: Maybe (Either String Value) -> Bool)
it "returns Just if there is no parameter" $ do
handleCTypeH (Proxy :: Proxy '[JSON]) "application/json"
"๐ฅ๐ ๐ฅ๐๐๐ ๐ ๐ ๐๐๐๐ช ๐ฅ๐๐๐๐๐ค "
`shouldSatisfy` (isJust :: Maybe (Either String Value) -> Bool)
it "returns Just Left if the decoding fails" $ do
let isJustLeft :: Maybe (Either String Value) -> Bool
isJustLeft (Just (Left _)) = True
isJustLeft _ = False
handleCTypeH (Proxy :: Proxy '[JSON]) "application/json"
"๐บ๐ ๐๐๐๐๐--๐๐๐ ๐๐๐๐๐--๐๐๐ ๐๐๐๐๐๐๐-๐๐๐-- "
`shouldSatisfy` isJustLeft
it "returns Just (Right val) if the decoding succeeds" $ do
let val = SomeData "Of cabbages--and kings" 12
handleCTypeH (Proxy :: Proxy '[JSON]) "application/json"
(encode val)
`shouldBe` Just (Right val)
#if MIN_VERSION_aeson(0,9,0)
-- aeson >= 0.9 decodes top-level strings
describe "eitherDecodeLenient" $ do
it "parses top-level strings" $ do
let toMaybe = either (const Nothing) Just
-- The Left messages differ, so convert to Maybe
property $ \x -> toMaybe (eitherDecodeLenient x)
`shouldBe` (decode x :: Maybe String)
#endif
data SomeData = SomeData { record1 :: String, record2 :: Int }
deriving (Generic, Eq, Show)
newtype ZeroToOne = ZeroToOne Float
deriving (Eq, Show, Ord)
instance FromJSON SomeData
instance ToJSON SomeData
instance Arbitrary SomeData where
arbitrary = SomeData <$> arbitrary <*> arbitrary
instance Arbitrary ZeroToOne where
arbitrary = ZeroToOne <$> elements [ x / 10 | x <- [1..10]]
instance MimeRender OctetStream Int where
mimeRender _ = cs . show
instance MimeRender PlainText Int where
mimeRender _ = cs . show
instance MimeRender PlainText ByteString where
mimeRender _ = cs
instance ToJSON ByteString where
toJSON x = object [ "val" .= x ]
instance IsString AcceptHeader where
fromString = AcceptHeader . fromString
addToAccept :: Accept a => Proxy a -> ZeroToOne -> AcceptHeader -> AcceptHeader
addToAccept p (ZeroToOne f) (AcceptHeader h) = AcceptHeader (cont h)
where new = cs (show $ contentType p) `append` "; q=" `append` pack (show f)
cont "" = new
cont old = old `append` ", " `append` new
|
zerobuzz/servant
|
servant/test/Servant/API/ContentTypesSpec.hs
|
bsd-3-clause
| 9,237
| 14
| 27
| 2,905
| 2,265
| 1,181
| 1,084
| 152
| 2
|
module Handler.ProjectShowCommit where
import Import
import Data.Git
import Data.Git.Diff
import Data.Git.Storage
import Data.Git.Ref
import Data.Git.Repository
import Data.Git.Types
import Data.List as L (head, tail)
import Data.List.Split as L (splitOn)
import Data.Text as T (pack,unpack)
import Data.ByteString.Char8 as BC
import Data.ByteString.Lazy.Char8 as BL (unpack)
import Data.Time.Format
import Data.Algorithm.Patience (Item(..))
import System.Locale
myGetCommit :: Ref -> Git -> IO (Maybe Commit)
myGetCommit ref git = getCommitMaybe git ref
getProjectShowCommitR :: Text -> Text -> Text -> Handler Html
getProjectShowCommitR login projName ref = do
let currentRef = ref
extra <- getExtra
defaultLayout $ do
setTitle $ toHtml ("Hit - " `mappend` projName)
hitProjectPath <- liftIO $ getProjectPath (extraProjectsDir extra) login projName
$(widgetFile "project-show-menu")
case hitProjectPath of
Nothing -> error $ "No such project: " ++ (T.unpack projName)
Just path -> do
commitMaybe <- liftIO $ withRepo path $ myGetCommit $ fromHexString $ T.unpack ref
case commitMaybe of
Just commit -> do let message = L.splitOn "\n" $ BC.unpack $ commitMessage commit
commitHeaderId <- newIdent
commitId <- newIdent
$(widgetFile "project-show-commit")
diffList <- liftIO $ withRepo path $ getDiff (L.head $ commitParents commit) (fromHexString $ T.unpack ref)
identityDiffFile <- newIdent
$(widgetFile "project-show-diff-file")
Nothing -> error $ "Ref \"" ++ (T.unpack ref) ++ "\" unknown for project: " ++ (T.unpack projName)
|
NicolasDP/hitweb
|
Handler/ProjectShowCommit.hs
|
bsd-3-clause
| 1,909
| 0
| 26
| 584
| 518
| 269
| 249
| -1
| -1
|
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[RnSource]{Main pass of renamer}
-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE CPP #-}
module RnTypes (
-- Type related stuff
rnHsType, rnLHsType, rnLHsTypes, rnContext,
rnHsKind, rnLHsKind,
rnHsSigType, rnHsWcType,
rnHsSigWcType, rnHsSigWcTypeScoped,
rnLHsInstType,
newTyVarNameRn, collectAnonWildCards,
rnConDeclFields,
rnLTyVar,
-- Precence related stuff
mkOpAppRn, mkNegAppRn, mkOpFormRn, mkConOpPatRn,
checkPrecMatch, checkSectionPrec,
-- Binding related stuff
bindLHsTyVarBndr,
bindSigTyVarsFV, bindHsQTyVars, bindLRdrNames,
extractFilteredRdrTyVars,
extractHsTyRdrTyVars, extractHsTysRdrTyVars,
extractHsTysRdrTyVarsDups, rmDupsInRdrTyVars,
extractRdrKindSigVars, extractDataDefnKindVars,
freeKiTyVarsAllVars, freeKiTyVarsKindVars, freeKiTyVarsTypeVars
) where
import {-# SOURCE #-} RnSplice( rnSpliceType )
import DynFlags
import HsSyn
import RnHsDoc ( rnLHsDoc, rnMbLHsDoc )
import RnEnv
import TcRnMonad
import RdrName
import PrelNames
import TysPrim ( funTyConName )
import TysWiredIn ( starKindTyConName, unicodeStarKindTyConName )
import Name
import SrcLoc
import NameSet
import FieldLabel
import Util
import BasicTypes ( compareFixity, funTyFixity, negateFixity,
Fixity(..), FixityDirection(..) )
import Outputable
import FastString
import Maybes
import qualified GHC.LanguageExtensions as LangExt
import Data.List ( (\\), nubBy, partition )
import Control.Monad ( unless, when )
#include "HsVersions.h"
{-
These type renamers are in a separate module, rather than in (say) RnSource,
to break several loop.
*********************************************************
* *
HsSigWcType (i.e with wildcards)
* *
*********************************************************
-}
rnHsSigWcType :: HsDocContext -> LHsSigWcType RdrName
-> RnM (LHsSigWcType Name, FreeVars)
rnHsSigWcType doc sig_ty
= rn_hs_sig_wc_type True doc sig_ty $ \sig_ty' ->
return (sig_ty', emptyFVs)
rnHsSigWcTypeScoped :: HsDocContext -> LHsSigWcType RdrName
-> (LHsSigWcType Name -> RnM (a, FreeVars))
-> RnM (a, FreeVars)
-- Used for
-- - Signatures on binders in a RULE
-- - Pattern type signatures
-- Wildcards are allowed
-- type signatures on binders only allowed with ScopedTypeVariables
rnHsSigWcTypeScoped ctx sig_ty thing_inside
= do { ty_sig_okay <- xoptM LangExt.ScopedTypeVariables
; checkErr ty_sig_okay (unexpectedTypeSigErr sig_ty)
; rn_hs_sig_wc_type False ctx sig_ty thing_inside
}
-- False: for pattern type sigs and rules we /do/ want
-- to bring those type variables into scope
-- e.g \ (x :: forall a. a-> b) -> e
-- Here we do bring 'b' into scope
rn_hs_sig_wc_type :: Bool -- see rnImplicitBndrs
-> HsDocContext
-> LHsSigWcType RdrName
-> (LHsSigWcType Name -> RnM (a, FreeVars))
-> RnM (a, FreeVars)
-- rn_hs_sig_wc_type is used for source-language type signatures
rn_hs_sig_wc_type no_implicit_if_forall ctxt
(HsWC { hswc_body = HsIB { hsib_body = hs_ty }})
thing_inside
= do { free_vars <- extractFilteredRdrTyVars hs_ty
; (tv_rdrs, nwc_rdrs) <- partition_nwcs free_vars
; rnImplicitBndrs no_implicit_if_forall tv_rdrs hs_ty $ \ vars ->
do { (wcs, hs_ty', fvs1) <- rnWcBody ctxt nwc_rdrs hs_ty
; let sig_ty' = HsWC { hswc_wcs = wcs, hswc_body = ib_ty' }
ib_ty' = HsIB { hsib_vars = vars, hsib_body = hs_ty' }
; (res, fvs2) <- thing_inside sig_ty'
; return (res, fvs1 `plusFV` fvs2) } }
rnHsWcType :: HsDocContext -> LHsWcType RdrName -> RnM (LHsWcType Name, FreeVars)
rnHsWcType ctxt (HsWC { hswc_body = hs_ty })
= do { free_vars <- extractFilteredRdrTyVars hs_ty
; (_, nwc_rdrs) <- partition_nwcs free_vars
; (wcs, hs_ty', fvs) <- rnWcBody ctxt nwc_rdrs hs_ty
; let sig_ty' = HsWC { hswc_wcs = wcs, hswc_body = hs_ty' }
; return (sig_ty', fvs) }
rnWcBody :: HsDocContext -> [Located RdrName] -> LHsType RdrName
-> RnM ([Name], LHsType Name, FreeVars)
rnWcBody ctxt nwc_rdrs hs_ty
= do { nwcs <- mapM newLocalBndrRn nwc_rdrs
; let env = RTKE { rtke_level = TypeLevel
, rtke_what = RnTypeBody
, rtke_nwcs = mkNameSet nwcs
, rtke_ctxt = ctxt }
; (hs_ty', fvs) <- bindLocalNamesFV nwcs $
rn_lty env hs_ty
; let awcs = collectAnonWildCards hs_ty'
; return (nwcs ++ awcs, hs_ty', fvs) }
where
rn_lty env (L loc hs_ty)
= setSrcSpan loc $
do { (hs_ty', fvs) <- rn_ty env hs_ty
; return (L loc hs_ty', fvs) }
rn_ty :: RnTyKiEnv -> HsType RdrName -> RnM (HsType Name, FreeVars)
-- A lot of faff just to allow the extra-constraints wildcard to appear
rn_ty env hs_ty@(HsForAllTy { hst_bndrs = tvs, hst_body = hs_body })
= bindLHsTyVarBndrs (rtke_ctxt env) (Just $ inTypeDoc hs_ty)
Nothing [] tvs $ \ _ tvs' _ _ ->
do { (hs_body', fvs) <- rn_lty env hs_body
; return (HsForAllTy { hst_bndrs = tvs', hst_body = hs_body' }, fvs) }
rn_ty env (HsQualTy { hst_ctxt = L cx hs_ctxt, hst_body = hs_ty })
| Just (hs_ctxt1, hs_ctxt_last) <- snocView hs_ctxt
, L lx (HsWildCardTy wc) <- ignoreParens hs_ctxt_last
= do { (hs_ctxt1', fvs1) <- mapFvRn (rn_top_constraint env) hs_ctxt1
; wc' <- setSrcSpan lx $
do { checkExtraConstraintWildCard env wc
; rnAnonWildCard wc }
; let hs_ctxt' = hs_ctxt1' ++ [L lx (HsWildCardTy wc')]
; (hs_ty', fvs2) <- rnLHsTyKi env hs_ty
; return (HsQualTy { hst_ctxt = L cx hs_ctxt', hst_body = hs_ty' }
, fvs1 `plusFV` fvs2) }
| otherwise
= do { (hs_ctxt', fvs1) <- mapFvRn (rn_top_constraint env) hs_ctxt
; (hs_ty', fvs2) <- rnLHsTyKi env hs_ty
; return (HsQualTy { hst_ctxt = L cx hs_ctxt', hst_body = hs_ty' }
, fvs1 `plusFV` fvs2) }
rn_ty env hs_ty = rnHsTyKi env hs_ty
rn_top_constraint env = rnLHsTyKi (env { rtke_what = RnTopConstraint })
checkExtraConstraintWildCard :: RnTyKiEnv -> HsWildCardInfo RdrName
-> RnM ()
-- Rename the extra-constraint spot in a type signature
-- (blah, _) => type
-- Check that extra-constraints are allowed at all, and
-- if so that it's an anonymous wildcard
checkExtraConstraintWildCard env wc
= checkWildCard env mb_bad
where
mb_bad | not (extraConstraintWildCardsAllowed env)
= Just (text "Extra-constraint wildcard" <+> quotes (ppr wc)
<+> text "not allowed")
| otherwise
= Nothing
extraConstraintWildCardsAllowed :: RnTyKiEnv -> Bool
extraConstraintWildCardsAllowed env
= case rtke_ctxt env of
TypeSigCtx {} -> True
ExprWithTySigCtx {} -> True
_ -> False
-- | Finds free type and kind variables in a type,
-- without duplicates, and
-- without variables that are already in scope in LocalRdrEnv
-- NB: this includes named wildcards, which look like perfectly
-- ordinary type variables at this point
extractFilteredRdrTyVars :: LHsType RdrName -> RnM FreeKiTyVars
extractFilteredRdrTyVars hs_ty
= do { rdr_env <- getLocalRdrEnv
; filterInScope rdr_env <$> extractHsTyRdrTyVars hs_ty }
-- | When the NamedWildCards extension is enabled, partition_nwcs
-- removes type variables that start with an underscore from the
-- FreeKiTyVars in the argument and returns them in a separate list.
-- When the extension is disabled, the function returns the argument
-- and empty list. See Note [Renaming named wild cards]
partition_nwcs :: FreeKiTyVars -> RnM (FreeKiTyVars, [Located RdrName])
partition_nwcs free_vars@(FKTV { fktv_tys = tys, fktv_all = all })
= do { wildcards_enabled <- fmap (xopt LangExt.NamedWildCards) getDynFlags
; let (nwcs, no_nwcs) | wildcards_enabled = partition is_wildcard tys
| otherwise = ([], tys)
free_vars' = free_vars { fktv_tys = no_nwcs
, fktv_all = all \\ nwcs }
; return (free_vars', nwcs) }
where
is_wildcard :: Located RdrName -> Bool
is_wildcard rdr = startsWithUnderscore (rdrNameOcc (unLoc rdr))
{- Note [Renaming named wild cards]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Identifiers starting with an underscore are always parsed as type variables.
It is only here in the renamer that we give the special treatment.
See Note [The wildcard story for types] in HsTypes.
It's easy! When we collect the implicitly bound type variables, ready
to bring them into scope, and NamedWildCards is on, we partition the
variables into the ones that start with an underscore (the named
wildcards) and the rest. Then we just add them to the hswc_wcs field
of the HsWildCardBndrs structure, and we are done.
*********************************************************
* *
HsSigtype (i.e. no wildcards)
* *
****************************************************** -}
rnHsSigType :: HsDocContext -> LHsSigType RdrName
-> RnM (LHsSigType Name, FreeVars)
-- Used for source-language type signatures
-- that cannot have wildcards
rnHsSigType ctx (HsIB { hsib_body = hs_ty })
= do { vars <- extractFilteredRdrTyVars hs_ty
; rnImplicitBndrs True vars hs_ty $ \ vars ->
do { (body', fvs) <- rnLHsType ctx hs_ty
; return (HsIB { hsib_vars = vars
, hsib_body = body' }, fvs) } }
rnImplicitBndrs :: Bool -- True <=> no implicit quantification
-- if type is headed by a forall
-- E.g. f :: forall a. a->b
-- Do not quantify over 'b' too.
-> FreeKiTyVars
-> LHsType RdrName
-> ([Name] -> RnM (a, FreeVars))
-> RnM (a, FreeVars)
rnImplicitBndrs no_implicit_if_forall free_vars hs_ty@(L loc _) thing_inside
= do { let real_tv_rdrs -- Implicit quantification only if
-- there is no explicit forall
| no_implicit_if_forall
, L _ (HsForAllTy {}) <- hs_ty = []
| otherwise = freeKiTyVarsTypeVars free_vars
real_rdrs = freeKiTyVarsKindVars free_vars ++ real_tv_rdrs
; traceRn (text "rnSigType" <+> (ppr hs_ty $$ ppr free_vars $$
ppr real_rdrs))
; vars <- mapM (newLocalBndrRn . L loc . unLoc) real_rdrs
; bindLocalNamesFV vars $
thing_inside vars }
rnLHsInstType :: SDoc -> LHsSigType RdrName -> RnM (LHsSigType Name, FreeVars)
-- Rename the type in an instance or standalone deriving decl
-- The 'doc_str' is "an instance declaration" or "a VECTORISE pragma"
rnLHsInstType doc_str inst_ty
| Just cls <- getLHsInstDeclClass_maybe inst_ty
, isTcOcc (rdrNameOcc (unLoc cls))
-- The guards check that the instance type looks like
-- blah => C ty1 .. tyn
= do { let full_doc = doc_str <+> text "for" <+> quotes (ppr cls)
; rnHsSigType (GenericCtx full_doc) inst_ty }
| otherwise -- The instance is malformed, but we'd still like
-- to make progress rather than failing outright, so
-- we report more errors. So we rename it anyway.
= do { addErrAt (getLoc (hsSigType inst_ty)) $
text "Malformed instance:" <+> ppr inst_ty
; rnHsSigType (GenericCtx doc_str) inst_ty }
{- ******************************************************
* *
LHsType and HsType
* *
****************************************************** -}
{-
rnHsType is here because we call it from loadInstDecl, and I didn't
want a gratuitous knot.
Note [Context quantification]
-----------------------------
Variables in type signatures are implicitly quantified
when (1) they are in a type signature not beginning
with "forall" or (2) in any qualified type T => R.
We are phasing out (2) since it leads to inconsistencies
(Trac #4426):
data A = A (a -> a) is an error
data A = A (Eq a => a -> a) binds "a"
data A = A (Eq a => a -> b) binds "a" and "b"
data A = A (() => a -> b) binds "a" and "b"
f :: forall a. a -> b is an error
f :: forall a. () => a -> b is an error
f :: forall a. a -> (() => b) binds "a" and "b"
This situation is now considered to be an error. See rnHsTyKi for case
HsForAllTy Qualified.
Note [Dealing with *]
~~~~~~~~~~~~~~~~~~~~~
As a legacy from the days when types and kinds were different, we use
the type * to mean what we now call GHC.Types.Type. The problem is that
* should associate just like an identifier, *not* a symbol.
Running example: the user has written
T (Int, Bool) b + c * d
At this point, we have a bunch of stretches of types
[[T, (Int, Bool), b], [c], [d]]
these are the [[LHsType Name]] and a bunch of operators
[GHC.TypeLits.+, GHC.Types.*]
Note that the * is GHC.Types.*. So, we want to rearrange to have
[[T, (Int, Bool), b], [c, *, d]]
and
[GHC.TypeLits.+]
as our lists. We can then do normal fixity resolution on these. The fixities
must come along for the ride just so that the list stays in sync with the
operators.
Note [QualTy in kinds]
~~~~~~~~~~~~~~~~~~~~~~
I was wondering whether QualTy could occur only at TypeLevel. But no,
we can have a qualified type in a kind too. Here is an example:
type family F a where
F Bool = Nat
F Nat = Type
type family G a where
G Type = Type -> Type
G () = Nat
data X :: forall k1 k2. (F k1 ~ G k2) => k1 -> k2 -> Type where
MkX :: X 'True '()
See that k1 becomes Bool and k2 becomes (), so the equality is
satisfied. If I write MkX :: X 'True 'False, compilation fails with a
suitable message:
MkX :: X 'True '()
โข Couldn't match kind โG Boolโ with โNatโ
Expected kind: G Bool
Actual kind: F Bool
However: in a kind, the constraints in the QualTy must all be
equalities; or at least, any kinds with a class constraint are
uninhabited.
-}
data RnTyKiEnv
= RTKE { rtke_ctxt :: HsDocContext
, rtke_level :: TypeOrKind -- Am I renaming a type or a kind?
, rtke_what :: RnTyKiWhat -- And within that what am I renaming?
, rtke_nwcs :: NameSet -- These are the in-scope named wildcards
}
data RnTyKiWhat = RnTypeBody
| RnTopConstraint -- Top-level context of HsSigWcTypes
| RnConstraint -- All other constraints
instance Outputable RnTyKiEnv where
ppr (RTKE { rtke_level = lev, rtke_what = what
, rtke_nwcs = wcs, rtke_ctxt = ctxt })
= text "RTKE"
<+> braces (sep [ ppr lev, ppr what, ppr wcs
, pprHsDocContext ctxt ])
instance Outputable RnTyKiWhat where
ppr RnTypeBody = text "RnTypeBody"
ppr RnTopConstraint = text "RnTopConstraint"
ppr RnConstraint = text "RnConstraint"
mkTyKiEnv :: HsDocContext -> TypeOrKind -> RnTyKiWhat -> RnTyKiEnv
mkTyKiEnv cxt level what
= RTKE { rtke_level = level, rtke_nwcs = emptyNameSet
, rtke_what = what, rtke_ctxt = cxt }
isRnKindLevel :: RnTyKiEnv -> Bool
isRnKindLevel (RTKE { rtke_level = KindLevel }) = True
isRnKindLevel _ = False
--------------
rnLHsType :: HsDocContext -> LHsType RdrName -> RnM (LHsType Name, FreeVars)
rnLHsType ctxt ty = rnLHsTyKi (mkTyKiEnv ctxt TypeLevel RnTypeBody) ty
rnLHsTypes :: HsDocContext -> [LHsType RdrName] -> RnM ([LHsType Name], FreeVars)
rnLHsTypes doc tys = mapFvRn (rnLHsType doc) tys
rnHsType :: HsDocContext -> HsType RdrName -> RnM (HsType Name, FreeVars)
rnHsType ctxt ty = rnHsTyKi (mkTyKiEnv ctxt TypeLevel RnTypeBody) ty
rnLHsKind :: HsDocContext -> LHsKind RdrName -> RnM (LHsKind Name, FreeVars)
rnLHsKind ctxt kind = rnLHsTyKi (mkTyKiEnv ctxt KindLevel RnTypeBody) kind
rnHsKind :: HsDocContext -> HsKind RdrName -> RnM (HsKind Name, FreeVars)
rnHsKind ctxt kind = rnHsTyKi (mkTyKiEnv ctxt KindLevel RnTypeBody) kind
--------------
rnTyKiContext :: RnTyKiEnv -> LHsContext RdrName -> RnM (LHsContext Name, FreeVars)
rnTyKiContext env (L loc cxt)
= do { traceRn (text "rncontext" <+> ppr cxt)
; let env' = env { rtke_what = RnConstraint }
; (cxt', fvs) <- mapFvRn (rnLHsTyKi env') cxt
; return (L loc cxt', fvs) }
where
rnContext :: HsDocContext -> LHsContext RdrName -> RnM (LHsContext Name, FreeVars)
rnContext doc theta = rnTyKiContext (mkTyKiEnv doc TypeLevel RnConstraint) theta
--------------
rnLHsTyKi :: RnTyKiEnv -> LHsType RdrName -> RnM (LHsType Name, FreeVars)
rnLHsTyKi env (L loc ty)
= setSrcSpan loc $
do { (ty', fvs) <- rnHsTyKi env ty
; return (L loc ty', fvs) }
rnHsTyKi :: RnTyKiEnv -> HsType RdrName -> RnM (HsType Name, FreeVars)
rnHsTyKi env ty@(HsForAllTy { hst_bndrs = tyvars, hst_body = tau })
= do { checkTypeInType env ty
; bindLHsTyVarBndrs (rtke_ctxt env) (Just $ inTypeDoc ty)
Nothing [] tyvars $ \ _ tyvars' _ _ ->
do { (tau', fvs) <- rnLHsTyKi env tau
; return ( HsForAllTy { hst_bndrs = tyvars', hst_body = tau' }
, fvs) } }
rnHsTyKi env ty@(HsQualTy { hst_ctxt = lctxt, hst_body = tau })
= do { checkTypeInType env ty -- See Note [QualTy in kinds]
; (ctxt', fvs1) <- rnTyKiContext env lctxt
; (tau', fvs2) <- rnLHsTyKi env tau
; return (HsQualTy { hst_ctxt = ctxt', hst_body = tau' }
, fvs1 `plusFV` fvs2) }
rnHsTyKi env (HsTyVar (L loc rdr_name))
= do { name <- rnTyVar env rdr_name
; return (HsTyVar (L loc name), unitFV name) }
rnHsTyKi env ty@(HsOpTy ty1 l_op ty2)
= setSrcSpan (getLoc l_op) $
do { (l_op', fvs1) <- rnHsTyOp env ty l_op
; fix <- lookupTyFixityRn l_op'
; (ty1', fvs2) <- rnLHsTyKi env ty1
; (ty2', fvs3) <- rnLHsTyKi env ty2
; res_ty <- mkHsOpTyRn (\t1 t2 -> HsOpTy t1 l_op' t2)
(unLoc l_op') fix ty1' ty2'
; return (res_ty, plusFVs [fvs1, fvs2, fvs3]) }
rnHsTyKi env (HsParTy ty)
= do { (ty', fvs) <- rnLHsTyKi env ty
; return (HsParTy ty', fvs) }
rnHsTyKi env (HsBangTy b ty)
= do { (ty', fvs) <- rnLHsTyKi env ty
; return (HsBangTy b ty', fvs) }
rnHsTyKi env ty@(HsRecTy flds)
= do { let ctxt = rtke_ctxt env
; fls <- get_fields ctxt
; (flds', fvs) <- rnConDeclFields ctxt fls flds
; return (HsRecTy flds', fvs) }
where
get_fields (ConDeclCtx names)
= concatMapM (lookupConstructorFields . unLoc) names
get_fields _
= do { addErr (hang (text "Record syntax is illegal here:")
2 (ppr ty))
; return [] }
rnHsTyKi env (HsFunTy ty1 ty2)
= do { (ty1', fvs1) <- rnLHsTyKi env ty1
-- Might find a for-all as the arg of a function type
; (ty2', fvs2) <- rnLHsTyKi env ty2
-- Or as the result. This happens when reading Prelude.hi
-- when we find return :: forall m. Monad m -> forall a. a -> m a
-- Check for fixity rearrangements
; res_ty <- mkHsOpTyRn HsFunTy funTyConName funTyFixity ty1' ty2'
; return (res_ty, fvs1 `plusFV` fvs2) }
rnHsTyKi env listTy@(HsListTy ty)
= do { data_kinds <- xoptM LangExt.DataKinds
; when (not data_kinds && isRnKindLevel env)
(addErr (dataKindsErr env listTy))
; (ty', fvs) <- rnLHsTyKi env ty
; return (HsListTy ty', fvs) }
rnHsTyKi env t@(HsKindSig ty k)
= do { checkTypeInType env t
; kind_sigs_ok <- xoptM LangExt.KindSignatures
; unless kind_sigs_ok (badKindSigErr (rtke_ctxt env) ty)
; (ty', fvs1) <- rnLHsTyKi env ty
; (k', fvs2) <- rnLHsTyKi (env { rtke_level = KindLevel }) k
; return (HsKindSig ty' k', fvs1 `plusFV` fvs2) }
rnHsTyKi env t@(HsPArrTy ty)
= do { notInKinds env t
; (ty', fvs) <- rnLHsTyKi env ty
; return (HsPArrTy ty', fvs) }
-- Unboxed tuples are allowed to have poly-typed arguments. These
-- sometimes crop up as a result of CPR worker-wrappering dictionaries.
rnHsTyKi env tupleTy@(HsTupleTy tup_con tys)
= do { data_kinds <- xoptM LangExt.DataKinds
; when (not data_kinds && isRnKindLevel env)
(addErr (dataKindsErr env tupleTy))
; (tys', fvs) <- mapFvRn (rnLHsTyKi env) tys
; return (HsTupleTy tup_con tys', fvs) }
-- Ensure that a type-level integer is nonnegative (#8306, #8412)
rnHsTyKi env tyLit@(HsTyLit t)
= do { data_kinds <- xoptM LangExt.DataKinds
; unless data_kinds (addErr (dataKindsErr env tyLit))
; when (negLit t) (addErr negLitErr)
; checkTypeInType env tyLit
; return (HsTyLit t, emptyFVs) }
where
negLit (HsStrTy _ _) = False
negLit (HsNumTy _ i) = i < 0
negLitErr = text "Illegal literal in type (type literals must not be negative):" <+> ppr tyLit
rnHsTyKi env overall_ty@(HsAppsTy tys)
= do { -- Step 1: Break up the HsAppsTy into symbols and non-symbol regions
let (non_syms, syms) = splitHsAppsTy tys
-- Step 2: rename the pieces
; (syms1, fvs1) <- mapFvRn (rnHsTyOp env overall_ty) syms
; (non_syms1, fvs2) <- (mapFvRn . mapFvRn) (rnLHsTyKi env) non_syms
-- Step 3: deal with *. See Note [Dealing with *]
; let (non_syms2, syms2) = deal_with_star [] [] non_syms1 syms1
-- Step 4: collapse the non-symbol regions with HsAppTy
; non_syms3 <- mapM deal_with_non_syms non_syms2
-- Step 5: assemble the pieces, using mkHsOpTyRn
; L _ res_ty <- build_res_ty non_syms3 syms2
-- all done. Phew.
; return (res_ty, fvs1 `plusFV` fvs2) }
where
-- See Note [Dealing with *]
deal_with_star :: [[LHsType Name]] -> [Located Name]
-> [[LHsType Name]] -> [Located Name]
-> ([[LHsType Name]], [Located Name])
deal_with_star acc1 acc2
(non_syms1 : non_syms2 : non_syms) (L loc star : ops)
| star `hasKey` starKindTyConKey || star `hasKey` unicodeStarKindTyConKey
= deal_with_star acc1 acc2
((non_syms1 ++ L loc (HsTyVar (L loc star)) : non_syms2) : non_syms)
ops
deal_with_star acc1 acc2 (non_syms1 : non_syms) (op1 : ops)
= deal_with_star (non_syms1 : acc1) (op1 : acc2) non_syms ops
deal_with_star acc1 acc2 [non_syms] []
= (reverse (non_syms : acc1), reverse acc2)
deal_with_star _ _ _ _
= pprPanic "deal_with_star" (ppr overall_ty)
-- collapse [LHsType Name] to LHsType Name by making applications
-- monadic only for failure
deal_with_non_syms :: [LHsType Name] -> RnM (LHsType Name)
deal_with_non_syms (non_sym : non_syms) = return $ mkHsAppTys non_sym non_syms
deal_with_non_syms [] = failWith (emptyNonSymsErr overall_ty)
-- assemble a right-biased OpTy for use in mkHsOpTyRn
build_res_ty :: [LHsType Name] -> [Located Name] -> RnM (LHsType Name)
build_res_ty (arg1 : args) (op1 : ops)
= do { rhs <- build_res_ty args ops
; fix <- lookupTyFixityRn op1
; res <-
mkHsOpTyRn (\t1 t2 -> HsOpTy t1 op1 t2) (unLoc op1) fix arg1 rhs
; let loc = combineSrcSpans (getLoc arg1) (getLoc rhs)
; return (L loc res)
}
build_res_ty [arg] [] = return arg
build_res_ty _ _ = pprPanic "build_op_ty" (ppr overall_ty)
rnHsTyKi env (HsAppTy ty1 ty2)
= do { (ty1', fvs1) <- rnLHsTyKi env ty1
; (ty2', fvs2) <- rnLHsTyKi env ty2
; return (HsAppTy ty1' ty2', fvs1 `plusFV` fvs2) }
rnHsTyKi env t@(HsIParamTy n ty)
= do { notInKinds env t
; (ty', fvs) <- rnLHsTyKi env ty
; return (HsIParamTy n ty', fvs) }
rnHsTyKi env t@(HsEqTy ty1 ty2)
= do { checkTypeInType env t
; (ty1', fvs1) <- rnLHsTyKi env ty1
; (ty2', fvs2) <- rnLHsTyKi env ty2
; return (HsEqTy ty1' ty2', fvs1 `plusFV` fvs2) }
rnHsTyKi _ (HsSpliceTy sp k)
= rnSpliceType sp k
rnHsTyKi env (HsDocTy ty haddock_doc)
= do { (ty', fvs) <- rnLHsTyKi env ty
; haddock_doc' <- rnLHsDoc haddock_doc
; return (HsDocTy ty' haddock_doc', fvs) }
rnHsTyKi _ (HsCoreTy ty)
= return (HsCoreTy ty, emptyFVs)
-- The emptyFVs probably isn't quite right
-- but I don't think it matters
rnHsTyKi env ty@(HsExplicitListTy k tys)
= do { checkTypeInType env ty
; data_kinds <- xoptM LangExt.DataKinds
; unless data_kinds (addErr (dataKindsErr env ty))
; (tys', fvs) <- mapFvRn (rnLHsTyKi env) tys
; return (HsExplicitListTy k tys', fvs) }
rnHsTyKi env ty@(HsExplicitTupleTy kis tys)
= do { checkTypeInType env ty
; data_kinds <- xoptM LangExt.DataKinds
; unless data_kinds (addErr (dataKindsErr env ty))
; (tys', fvs) <- mapFvRn (rnLHsTyKi env) tys
; return (HsExplicitTupleTy kis tys', fvs) }
rnHsTyKi env (HsWildCardTy wc)
= do { checkAnonWildCard env wc
; wc' <- rnAnonWildCard wc
; return (HsWildCardTy wc', emptyFVs) }
-- emptyFVs: this occurrence does not refer to a
-- user-written binding site, so don't treat
-- it as a free variable
--------------
rnTyVar :: RnTyKiEnv -> RdrName -> RnM Name
rnTyVar env rdr_name
= do { name <- if isRnKindLevel env
then lookupKindOccRn rdr_name
else lookupTypeOccRn rdr_name
; checkNamedWildCard env name
; return name }
rnLTyVar :: Located RdrName -> RnM (Located Name)
-- Called externally; does not deal with wildards
rnLTyVar (L loc rdr_name)
= do { tyvar <- lookupTypeOccRn rdr_name
; return (L loc tyvar) }
--------------
rnHsTyOp :: Outputable a
=> RnTyKiEnv -> a -> Located RdrName -> RnM (Located Name, FreeVars)
rnHsTyOp env overall_ty (L loc op)
= do { ops_ok <- xoptM LangExt.TypeOperators
; op' <- rnTyVar env op
; unless (ops_ok
|| op' == starKindTyConName
|| op' == unicodeStarKindTyConName
|| op' `hasKey` eqTyConKey) $
addErr (opTyErr op overall_ty)
; let l_op' = L loc op'
; return (l_op', unitFV op') }
--------------
notAllowed :: SDoc -> SDoc
notAllowed doc
= text "Wildcard" <+> quotes doc <+> ptext (sLit "not allowed")
checkWildCard :: RnTyKiEnv -> Maybe SDoc -> RnM ()
checkWildCard env (Just doc)
= addErr $ vcat [doc, nest 2 (text "in" <+> pprHsDocContext (rtke_ctxt env))]
checkWildCard _ Nothing
= return ()
checkAnonWildCard :: RnTyKiEnv -> HsWildCardInfo RdrName -> RnM ()
-- Report an error if an anonymoous wildcard is illegal here
checkAnonWildCard env wc
= checkWildCard env mb_bad
where
mb_bad :: Maybe SDoc
mb_bad | not (wildCardsAllowed env)
= Just (notAllowed (ppr wc))
| otherwise
= case rtke_what env of
RnTypeBody -> Nothing
RnConstraint -> Just constraint_msg
RnTopConstraint -> Just constraint_msg
constraint_msg = hang (notAllowed (ppr wc) <+> text "in a constraint")
2 hint_msg
hint_msg = vcat [ text "except as the last top-level constraint of a type signature"
, nest 2 (text "e.g f :: (Eq a, _) => blah") ]
checkNamedWildCard :: RnTyKiEnv -> Name -> RnM ()
-- Report an error if a named wildcard is illegal here
checkNamedWildCard env name
= checkWildCard env mb_bad
where
mb_bad | not (name `elemNameSet` rtke_nwcs env)
= Nothing -- Not a wildcard
| not (wildCardsAllowed env)
= Just (notAllowed (ppr name))
| otherwise
= case rtke_what env of
RnTypeBody -> Nothing -- Allowed
RnTopConstraint -> Nothing -- Allowed
RnConstraint -> Just constraint_msg
constraint_msg = notAllowed (ppr name) <+> text "in a constraint"
wildCardsAllowed :: RnTyKiEnv -> Bool
-- ^ In what contexts are wildcards permitted
wildCardsAllowed env
= case rtke_ctxt env of
TypeSigCtx {} -> True
TypBrCtx {} -> True -- Template Haskell quoted type
SpliceTypeCtx {} -> True -- Result of a Template Haskell splice
ExprWithTySigCtx {} -> True
PatCtx {} -> True
RuleCtx {} -> True
FamPatCtx {} -> True -- Not named wildcards though
GHCiCtx {} -> True
HsTypeCtx {} -> True
_ -> False
rnAnonWildCard :: HsWildCardInfo RdrName -> RnM (HsWildCardInfo Name)
rnAnonWildCard (AnonWildCard _)
= do { loc <- getSrcSpanM
; uniq <- newUnique
; let name = mkInternalName uniq (mkTyVarOcc "_") loc
; return (AnonWildCard (L loc name)) }
---------------
-- | Ensures either that we're in a type or that -XTypeInType is set
checkTypeInType :: Outputable ty
=> RnTyKiEnv
-> ty -- ^ type
-> RnM ()
checkTypeInType env ty
| isRnKindLevel env
= do { type_in_type <- xoptM LangExt.TypeInType
; unless type_in_type $
addErr (text "Illegal kind:" <+> ppr ty $$
text "Did you mean to enable TypeInType?") }
checkTypeInType _ _ = return ()
notInKinds :: Outputable ty
=> RnTyKiEnv
-> ty
-> RnM ()
notInKinds env ty
| isRnKindLevel env
= addErr (text "Illegal kind (even with TypeInType enabled):" <+> ppr ty)
notInKinds _ _ = return ()
{- *****************************************************
* *
Binding type variables
* *
***************************************************** -}
bindSigTyVarsFV :: [Name]
-> RnM (a, FreeVars)
-> RnM (a, FreeVars)
-- Used just before renaming the defn of a function
-- with a separate type signature, to bring its tyvars into scope
-- With no -XScopedTypeVariables, this is a no-op
bindSigTyVarsFV tvs thing_inside
= do { scoped_tyvars <- xoptM LangExt.ScopedTypeVariables
; if not scoped_tyvars then
thing_inside
else
bindLocalNamesFV tvs thing_inside }
-- | Simply bring a bunch of RdrNames into scope. No checking for
-- validity, at all. The binding location is taken from the location
-- on each name.
bindLRdrNames :: [Located RdrName]
-> ([Name] -> RnM (a, FreeVars))
-> RnM (a, FreeVars)
bindLRdrNames rdrs thing_inside
= do { var_names <- mapM (newTyVarNameRn Nothing) rdrs
; bindLocalNamesFV var_names $
thing_inside var_names }
---------------
bindHsQTyVars :: forall a b.
HsDocContext
-> Maybe SDoc -- if we are to check for unused tvs,
-- a phrase like "in the type ..."
-> Maybe a -- Just _ => an associated type decl
-> [Located RdrName] -- Kind variables from scope, in l-to-r
-- order, but not from ...
-> (LHsQTyVars RdrName) -- ... these user-written tyvars
-> (LHsQTyVars Name -> NameSet -> RnM (b, FreeVars))
-- also returns all names used in kind signatures, for the
-- TypeInType clause of Note [Complete user-supplied kind
-- signatures] in HsDecls
-> RnM (b, FreeVars)
-- (a) Bring kind variables into scope
-- both (i) passed in (kv_bndrs)
-- and (ii) mentioned in the kinds of tv_bndrs
-- (b) Bring type variables into scope
bindHsQTyVars doc mb_in_doc mb_assoc kv_bndrs tv_bndrs thing_inside
= do { bindLHsTyVarBndrs doc mb_in_doc
mb_assoc kv_bndrs (hsQTvExplicit tv_bndrs) $
\ rn_kvs rn_bndrs dep_var_set all_dep_vars ->
thing_inside (HsQTvs { hsq_implicit = rn_kvs
, hsq_explicit = rn_bndrs
, hsq_dependent = dep_var_set }) all_dep_vars }
bindLHsTyVarBndrs :: forall a b.
HsDocContext
-> Maybe SDoc -- if we are to check for unused tvs,
-- a phrase like "in the type ..."
-> Maybe a -- Just _ => an associated type decl
-> [Located RdrName] -- Unbound kind variables from scope,
-- in l-to-r order, but not from ...
-> [LHsTyVarBndr RdrName] -- ... these user-written tyvars
-> ( [Name] -- all kv names
-> [LHsTyVarBndr Name]
-> NameSet -- which names, from the preceding list,
-- are used dependently within that list
-- See Note [Dependent LHsQTyVars] in TcHsType
-> NameSet -- all names used in kind signatures
-> RnM (b, FreeVars))
-> RnM (b, FreeVars)
bindLHsTyVarBndrs doc mb_in_doc mb_assoc kv_bndrs tv_bndrs thing_inside
= do { when (isNothing mb_assoc) (checkShadowedRdrNames tv_names_w_loc)
; go [] [] emptyNameSet emptyNameSet emptyNameSet tv_bndrs }
where
tv_names_w_loc = map hsLTyVarLocName tv_bndrs
go :: [Name] -- kind-vars found (in reverse order)
-> [LHsTyVarBndr Name] -- already renamed (in reverse order)
-> NameSet -- kind vars already in scope (for dup checking)
-> NameSet -- type vars already in scope (for dup checking)
-> NameSet -- (all) variables used dependently
-> [LHsTyVarBndr RdrName] -- still to be renamed, scoped
-> RnM (b, FreeVars)
go rn_kvs rn_tvs kv_names tv_names dep_vars (tv_bndr : tv_bndrs)
= bindLHsTyVarBndr doc mb_assoc kv_names tv_names tv_bndr $
\ kv_nms used_dependently tv_bndr' ->
do { (b, fvs) <- go (reverse kv_nms ++ rn_kvs)
(tv_bndr' : rn_tvs)
(kv_names `extendNameSetList` kv_nms)
(tv_names `extendNameSet` hsLTyVarName tv_bndr')
(dep_vars `unionNameSet` used_dependently)
tv_bndrs
; warn_unused tv_bndr' fvs
; return (b, fvs) }
go rn_kvs rn_tvs _kv_names tv_names dep_vars []
= -- still need to deal with the kv_bndrs passed in originally
bindImplicitKvs doc mb_assoc kv_bndrs tv_names $ \ kv_nms others ->
do { let all_rn_kvs = reverse (reverse kv_nms ++ rn_kvs)
all_rn_tvs = reverse rn_tvs
; env <- getLocalRdrEnv
; let all_dep_vars = dep_vars `unionNameSet` others
exp_dep_vars -- variables in all_rn_tvs that are in dep_vars
= mkNameSet [ name
| v <- all_rn_tvs
, let name = hsLTyVarName v
, name `elemNameSet` all_dep_vars ]
; traceRn (text "bindHsTyVars" <+> (ppr env $$
ppr all_rn_kvs $$
ppr all_rn_tvs $$
ppr exp_dep_vars))
; thing_inside all_rn_kvs all_rn_tvs exp_dep_vars all_dep_vars }
warn_unused tv_bndr fvs = case mb_in_doc of
Just in_doc -> warnUnusedForAll in_doc tv_bndr fvs
Nothing -> return ()
bindLHsTyVarBndr :: HsDocContext
-> Maybe a -- associated class
-> NameSet -- kind vars already in scope
-> NameSet -- type vars already in scope
-> LHsTyVarBndr RdrName
-> ([Name] -> NameSet -> LHsTyVarBndr Name -> RnM (b, FreeVars))
-- passed the newly-bound implicitly-declared kind vars,
-- any other names used in a kind
-- and the renamed LHsTyVarBndr
-> RnM (b, FreeVars)
bindLHsTyVarBndr doc mb_assoc kv_names tv_names hs_tv_bndr thing_inside
= case hs_tv_bndr of
L loc (UserTyVar lrdr@(L lv rdr)) ->
do { check_dup loc rdr
; nm <- newTyVarNameRn mb_assoc lrdr
; bindLocalNamesFV [nm] $
thing_inside [] emptyNameSet (L loc (UserTyVar (L lv nm))) }
L loc (KindedTyVar lrdr@(L lv rdr) kind) ->
do { check_dup lv rdr
-- check for -XKindSignatures
; sig_ok <- xoptM LangExt.KindSignatures
; unless sig_ok (badKindSigErr doc kind)
-- deal with kind vars in the user-written kind
; free_kvs <- freeKiTyVarsAllVars <$> extractHsTyRdrTyVars kind
; bindImplicitKvs doc mb_assoc free_kvs tv_names $
\ new_kv_nms other_kv_nms ->
do { (kind', fvs1) <- rnLHsKind doc kind
; tv_nm <- newTyVarNameRn mb_assoc lrdr
; (b, fvs2) <- bindLocalNamesFV [tv_nm] $
thing_inside new_kv_nms other_kv_nms
(L loc (KindedTyVar (L lv tv_nm) kind'))
; return (b, fvs1 `plusFV` fvs2) }}
where
-- make sure that the RdrName isn't in the sets of
-- names. We can't just check that it's not in scope at all
-- because we might be inside an associated class.
check_dup :: SrcSpan -> RdrName -> RnM ()
check_dup loc rdr
= do { m_name <- lookupLocalOccRn_maybe rdr
; whenIsJust m_name $ \name ->
do { when (name `elemNameSet` kv_names) $
addErrAt loc (vcat [ ki_ty_err_msg name
, pprHsDocContext doc ])
; when (name `elemNameSet` tv_names) $
dupNamesErr getLoc [L loc name, L (nameSrcSpan name) name] }}
ki_ty_err_msg n = text "Variable" <+> quotes (ppr n) <+>
text "used as a kind variable before being bound" $$
text "as a type variable. Perhaps reorder your variables?"
bindImplicitKvs :: HsDocContext
-> Maybe a
-> [Located RdrName] -- ^ kind var *occurrences*, from which
-- intent to bind is inferred
-> NameSet -- ^ *type* variables, for type/kind
-- misuse check for -XNoTypeInType
-> ([Name] -> NameSet -> RnM (b, FreeVars))
-- ^ passed new kv_names, and any other names used in a kind
-> RnM (b, FreeVars)
bindImplicitKvs _ _ [] _ thing_inside
= thing_inside [] emptyNameSet
bindImplicitKvs doc mb_assoc free_kvs tv_names thing_inside
= do { rdr_env <- getLocalRdrEnv
; let part_kvs lrdr@(L loc kv_rdr)
= case lookupLocalRdrEnv rdr_env kv_rdr of
Just kv_name -> Left (L loc kv_name)
_ -> Right lrdr
(bound_kvs, new_kvs) = partitionWith part_kvs free_kvs
-- check whether we're mixing types & kinds illegally
; type_in_type <- xoptM LangExt.TypeInType
; unless type_in_type $
mapM_ (check_tv_used_in_kind tv_names) bound_kvs
; poly_kinds <- xoptM LangExt.PolyKinds
; unless poly_kinds $
addErr (badKindBndrs doc new_kvs)
-- bind the vars and move on
; kv_nms <- mapM (newTyVarNameRn mb_assoc) new_kvs
; bindLocalNamesFV kv_nms $
thing_inside kv_nms (mkNameSet (map unLoc bound_kvs)) }
where
-- check to see if the variables free in a kind are bound as type
-- variables. Assume -XNoTypeInType.
check_tv_used_in_kind :: NameSet -- ^ *type* variables
-> Located Name -- ^ renamed var used in kind
-> RnM ()
check_tv_used_in_kind tv_names (L loc kv_name)
= when (kv_name `elemNameSet` tv_names) $
addErrAt loc (vcat [ text "Type variable" <+> quotes (ppr kv_name) <+>
text "used in a kind." $$
text "Did you mean to use TypeInType?"
, pprHsDocContext doc ])
newTyVarNameRn :: Maybe a -> Located RdrName -> RnM Name
newTyVarNameRn mb_assoc (L loc rdr)
= do { rdr_env <- getLocalRdrEnv
; case (mb_assoc, lookupLocalRdrEnv rdr_env rdr) of
(Just _, Just n) -> return n
-- Use the same Name as the parent class decl
_ -> newLocalBndrRn (L loc rdr) }
---------------------
collectAnonWildCards :: LHsType Name -> [Name]
-- | Extract all wild cards from a type.
collectAnonWildCards lty = go lty
where
go (L _ ty) = case ty of
HsWildCardTy (AnonWildCard (L _ wc)) -> [wc]
HsAppsTy tys -> gos (mapMaybe (prefix_types_only . unLoc) tys)
HsAppTy ty1 ty2 -> go ty1 `mappend` go ty2
HsFunTy ty1 ty2 -> go ty1 `mappend` go ty2
HsListTy ty -> go ty
HsPArrTy ty -> go ty
HsTupleTy _ tys -> gos tys
HsOpTy ty1 _ ty2 -> go ty1 `mappend` go ty2
HsParTy ty -> go ty
HsIParamTy _ ty -> go ty
HsEqTy ty1 ty2 -> go ty1 `mappend` go ty2
HsKindSig ty kind -> go ty `mappend` go kind
HsDocTy ty _ -> go ty
HsBangTy _ ty -> go ty
HsRecTy flds -> gos $ map (cd_fld_type . unLoc) flds
HsExplicitListTy _ tys -> gos tys
HsExplicitTupleTy _ tys -> gos tys
HsForAllTy { hst_bndrs = bndrs
, hst_body = ty } -> collectAnonWildCardsBndrs bndrs
`mappend` go ty
HsQualTy { hst_ctxt = L _ ctxt
, hst_body = ty } -> gos ctxt `mappend` go ty
HsSpliceTy (HsSpliced _ (HsSplicedTy ty)) _ -> go $ L noSrcSpan ty
-- HsQuasiQuoteTy, HsSpliceTy, HsCoreTy, HsTyLit
_ -> mempty
gos = mconcat . map go
prefix_types_only (HsAppPrefix ty) = Just ty
prefix_types_only (HsAppInfix _) = Nothing
collectAnonWildCardsBndrs :: [LHsTyVarBndr Name] -> [Name]
collectAnonWildCardsBndrs ltvs = concatMap (go . unLoc) ltvs
where
go (UserTyVar _) = []
go (KindedTyVar _ ki) = collectAnonWildCards ki
{-
*********************************************************
* *
ConDeclField
* *
*********************************************************
When renaming a ConDeclField, we have to find the FieldLabel
associated with each field. But we already have all the FieldLabels
available (since they were brought into scope by
RnNames.getLocalNonValBinders), so we just take the list as an
argument, build a map and look them up.
-}
rnConDeclFields :: HsDocContext -> [FieldLabel] -> [LConDeclField RdrName]
-> RnM ([LConDeclField Name], FreeVars)
-- Also called from RnSource
-- No wildcards can appear in record fields
rnConDeclFields ctxt fls fields
= mapFvRn (rnField fl_env env) fields
where
env = mkTyKiEnv ctxt TypeLevel RnTypeBody
fl_env = mkFsEnv [ (flLabel fl, fl) | fl <- fls ]
rnField :: FastStringEnv FieldLabel -> RnTyKiEnv -> LConDeclField RdrName
-> RnM (LConDeclField Name, FreeVars)
rnField fl_env env (L l (ConDeclField names ty haddock_doc))
= do { let new_names = map (fmap lookupField) names
; (new_ty, fvs) <- rnLHsTyKi env ty
; new_haddock_doc <- rnMbLHsDoc haddock_doc
; return (L l (ConDeclField new_names new_ty new_haddock_doc), fvs) }
where
lookupField :: FieldOcc RdrName -> FieldOcc Name
lookupField (FieldOcc (L lr rdr) _) = FieldOcc (L lr rdr) (flSelector fl)
where
lbl = occNameFS $ rdrNameOcc rdr
fl = expectJust "rnField" $ lookupFsEnv fl_env lbl
{-
************************************************************************
* *
Fixities and precedence parsing
* *
************************************************************************
@mkOpAppRn@ deals with operator fixities. The argument expressions
are assumed to be already correctly arranged. It needs the fixities
recorded in the OpApp nodes, because fixity info applies to the things
the programmer actually wrote, so you can't find it out from the Name.
Furthermore, the second argument is guaranteed not to be another
operator application. Why? Because the parser parses all
operator appications left-associatively, EXCEPT negation, which
we need to handle specially.
Infix types are read in a *right-associative* way, so that
a `op` b `op` c
is always read in as
a `op` (b `op` c)
mkHsOpTyRn rearranges where necessary. The two arguments
have already been renamed and rearranged. It's made rather tiresome
by the presence of ->, which is a separate syntactic construct.
-}
---------------
-- Building (ty1 `op1` (ty21 `op2` ty22))
mkHsOpTyRn :: (LHsType Name -> LHsType Name -> HsType Name)
-> Name -> Fixity -> LHsType Name -> LHsType Name
-> RnM (HsType Name)
mkHsOpTyRn mk1 pp_op1 fix1 ty1 (L loc2 (HsOpTy ty21 op2 ty22))
= do { fix2 <- lookupTyFixityRn op2
; mk_hs_op_ty mk1 pp_op1 fix1 ty1
(\t1 t2 -> HsOpTy t1 op2 t2)
(unLoc op2) fix2 ty21 ty22 loc2 }
mkHsOpTyRn mk1 pp_op1 fix1 ty1 (L loc2 (HsFunTy ty21 ty22))
= mk_hs_op_ty mk1 pp_op1 fix1 ty1
HsFunTy funTyConName funTyFixity ty21 ty22 loc2
mkHsOpTyRn mk1 _ _ ty1 ty2 -- Default case, no rearrangment
= return (mk1 ty1 ty2)
---------------
mk_hs_op_ty :: (LHsType Name -> LHsType Name -> HsType Name)
-> Name -> Fixity -> LHsType Name
-> (LHsType Name -> LHsType Name -> HsType Name)
-> Name -> Fixity -> LHsType Name -> LHsType Name -> SrcSpan
-> RnM (HsType Name)
mk_hs_op_ty mk1 op1 fix1 ty1
mk2 op2 fix2 ty21 ty22 loc2
| nofix_error = do { precParseErr (op1,fix1) (op2,fix2)
; return (mk1 ty1 (L loc2 (mk2 ty21 ty22))) }
| associate_right = return (mk1 ty1 (L loc2 (mk2 ty21 ty22)))
| otherwise = do { -- Rearrange to ((ty1 `op1` ty21) `op2` ty22)
new_ty <- mkHsOpTyRn mk1 op1 fix1 ty1 ty21
; return (mk2 (noLoc new_ty) ty22) }
where
(nofix_error, associate_right) = compareFixity fix1 fix2
---------------------------
mkOpAppRn :: LHsExpr Name -- Left operand; already rearranged
-> LHsExpr Name -> Fixity -- Operator and fixity
-> LHsExpr Name -- Right operand (not an OpApp, but might
-- be a NegApp)
-> RnM (HsExpr Name)
-- (e11 `op1` e12) `op2` e2
mkOpAppRn e1@(L _ (OpApp e11 op1 fix1 e12)) op2 fix2 e2
| nofix_error
= do precParseErr (get_op op1,fix1) (get_op op2,fix2)
return (OpApp e1 op2 fix2 e2)
| associate_right = do
new_e <- mkOpAppRn e12 op2 fix2 e2
return (OpApp e11 op1 fix1 (L loc' new_e))
where
loc'= combineLocs e12 e2
(nofix_error, associate_right) = compareFixity fix1 fix2
---------------------------
-- (- neg_arg) `op` e2
mkOpAppRn e1@(L _ (NegApp neg_arg neg_name)) op2 fix2 e2
| nofix_error
= do precParseErr (negateName,negateFixity) (get_op op2,fix2)
return (OpApp e1 op2 fix2 e2)
| associate_right
= do new_e <- mkOpAppRn neg_arg op2 fix2 e2
return (NegApp (L loc' new_e) neg_name)
where
loc' = combineLocs neg_arg e2
(nofix_error, associate_right) = compareFixity negateFixity fix2
---------------------------
-- e1 `op` - neg_arg
mkOpAppRn e1 op1 fix1 e2@(L _ (NegApp _ _)) -- NegApp can occur on the right
| not associate_right -- We *want* right association
= do precParseErr (get_op op1, fix1) (negateName, negateFixity)
return (OpApp e1 op1 fix1 e2)
where
(_, associate_right) = compareFixity fix1 negateFixity
---------------------------
-- Default case
mkOpAppRn e1 op fix e2 -- Default case, no rearrangment
= ASSERT2( right_op_ok fix (unLoc e2),
ppr e1 $$ text "---" $$ ppr op $$ text "---" $$ ppr fix $$ text "---" $$ ppr e2
)
return (OpApp e1 op fix e2)
----------------------------
get_op :: LHsExpr Name -> Name
-- An unbound name could be either HsVar or HsUnboundVar
-- See RnExpr.rnUnboundVar
get_op (L _ (HsVar (L _ n))) = n
get_op (L _ (HsUnboundVar uv)) = mkUnboundName (unboundVarOcc uv)
get_op other = pprPanic "get_op" (ppr other)
-- Parser left-associates everything, but
-- derived instances may have correctly-associated things to
-- in the right operarand. So we just check that the right operand is OK
right_op_ok :: Fixity -> HsExpr Name -> Bool
right_op_ok fix1 (OpApp _ _ fix2 _)
= not error_please && associate_right
where
(error_please, associate_right) = compareFixity fix1 fix2
right_op_ok _ _
= True
-- Parser initially makes negation bind more tightly than any other operator
-- And "deriving" code should respect this (use HsPar if not)
mkNegAppRn :: LHsExpr id -> SyntaxExpr id -> RnM (HsExpr id)
mkNegAppRn neg_arg neg_name
= ASSERT( not_op_app (unLoc neg_arg) )
return (NegApp neg_arg neg_name)
not_op_app :: HsExpr id -> Bool
not_op_app (OpApp _ _ _ _) = False
not_op_app _ = True
---------------------------
mkOpFormRn :: LHsCmdTop Name -- Left operand; already rearranged
-> LHsExpr Name -> Fixity -- Operator and fixity
-> LHsCmdTop Name -- Right operand (not an infix)
-> RnM (HsCmd Name)
-- (e11 `op1` e12) `op2` e2
mkOpFormRn a1@(L loc (HsCmdTop (L _ (HsCmdArrForm op1 (Just fix1) [a11,a12])) _ _ _))
op2 fix2 a2
| nofix_error
= do precParseErr (get_op op1,fix1) (get_op op2,fix2)
return (HsCmdArrForm op2 (Just fix2) [a1, a2])
| associate_right
= do new_c <- mkOpFormRn a12 op2 fix2 a2
return (HsCmdArrForm op1 (Just fix1)
[a11, L loc (HsCmdTop (L loc new_c)
placeHolderType placeHolderType [])])
-- TODO: locs are wrong
where
(nofix_error, associate_right) = compareFixity fix1 fix2
-- Default case
mkOpFormRn arg1 op fix arg2 -- Default case, no rearrangment
= return (HsCmdArrForm op (Just fix) [arg1, arg2])
--------------------------------------
mkConOpPatRn :: Located Name -> Fixity -> LPat Name -> LPat Name
-> RnM (Pat Name)
mkConOpPatRn op2 fix2 p1@(L loc (ConPatIn op1 (InfixCon p11 p12))) p2
= do { fix1 <- lookupFixityRn (unLoc op1)
; let (nofix_error, associate_right) = compareFixity fix1 fix2
; if nofix_error then do
{ precParseErr (unLoc op1,fix1) (unLoc op2,fix2)
; return (ConPatIn op2 (InfixCon p1 p2)) }
else if associate_right then do
{ new_p <- mkConOpPatRn op2 fix2 p12 p2
; return (ConPatIn op1 (InfixCon p11 (L loc new_p))) } -- XXX loc right?
else return (ConPatIn op2 (InfixCon p1 p2)) }
mkConOpPatRn op _ p1 p2 -- Default case, no rearrangment
= ASSERT( not_op_pat (unLoc p2) )
return (ConPatIn op (InfixCon p1 p2))
not_op_pat :: Pat Name -> Bool
not_op_pat (ConPatIn _ (InfixCon _ _)) = False
not_op_pat _ = True
--------------------------------------
checkPrecMatch :: Name -> MatchGroup Name body -> RnM ()
-- Check precedence of a function binding written infix
-- eg a `op` b `C` c = ...
-- See comments with rnExpr (OpApp ...) about "deriving"
checkPrecMatch op (MG { mg_alts = L _ ms })
= mapM_ check ms
where
check (L _ (Match _ (L l1 p1 : L l2 p2 :_) _ _))
= setSrcSpan (combineSrcSpans l1 l2) $
do checkPrec op p1 False
checkPrec op p2 True
check _ = return ()
-- This can happen. Consider
-- a `op` True = ...
-- op = ...
-- The infix flag comes from the first binding of the group
-- but the second eqn has no args (an error, but not discovered
-- until the type checker). So we don't want to crash on the
-- second eqn.
checkPrec :: Name -> Pat Name -> Bool -> IOEnv (Env TcGblEnv TcLclEnv) ()
checkPrec op (ConPatIn op1 (InfixCon _ _)) right = do
op_fix@(Fixity _ op_prec op_dir) <- lookupFixityRn op
op1_fix@(Fixity _ op1_prec op1_dir) <- lookupFixityRn (unLoc op1)
let
inf_ok = op1_prec > op_prec ||
(op1_prec == op_prec &&
(op1_dir == InfixR && op_dir == InfixR && right ||
op1_dir == InfixL && op_dir == InfixL && not right))
info = (op, op_fix)
info1 = (unLoc op1, op1_fix)
(infol, infor) = if right then (info, info1) else (info1, info)
unless inf_ok (precParseErr infol infor)
checkPrec _ _ _
= return ()
-- Check precedence of (arg op) or (op arg) respectively
-- If arg is itself an operator application, then either
-- (a) its precedence must be higher than that of op
-- (b) its precedency & associativity must be the same as that of op
checkSectionPrec :: FixityDirection -> HsExpr RdrName
-> LHsExpr Name -> LHsExpr Name -> RnM ()
checkSectionPrec direction section op arg
= case unLoc arg of
OpApp _ op fix _ -> go_for_it (get_op op) fix
NegApp _ _ -> go_for_it negateName negateFixity
_ -> return ()
where
op_name = get_op op
go_for_it arg_op arg_fix@(Fixity _ arg_prec assoc) = do
op_fix@(Fixity _ op_prec _) <- lookupFixityRn op_name
unless (op_prec < arg_prec
|| (op_prec == arg_prec && direction == assoc))
(sectionPrecErr (op_name, op_fix)
(arg_op, arg_fix) section)
-- Precedence-related error messages
precParseErr :: (Name, Fixity) -> (Name, Fixity) -> RnM ()
precParseErr op1@(n1,_) op2@(n2,_)
| isUnboundName n1 || isUnboundName n2
= return () -- Avoid error cascade
| otherwise
= addErr $ hang (text "Precedence parsing error")
4 (hsep [text "cannot mix", ppr_opfix op1, ptext (sLit "and"),
ppr_opfix op2,
text "in the same infix expression"])
sectionPrecErr :: (Name, Fixity) -> (Name, Fixity) -> HsExpr RdrName -> RnM ()
sectionPrecErr op@(n1,_) arg_op@(n2,_) section
| isUnboundName n1 || isUnboundName n2
= return () -- Avoid error cascade
| otherwise
= addErr $ vcat [text "The operator" <+> ppr_opfix op <+> ptext (sLit "of a section"),
nest 4 (sep [text "must have lower precedence than that of the operand,",
nest 2 (text "namely" <+> ppr_opfix arg_op)]),
nest 4 (text "in the section:" <+> quotes (ppr section))]
ppr_opfix :: (Name, Fixity) -> SDoc
ppr_opfix (op, fixity) = pp_op <+> brackets (ppr fixity)
where
pp_op | op == negateName = text "prefix `-'"
| otherwise = quotes (ppr op)
{- *****************************************************
* *
Errors
* *
***************************************************** -}
unexpectedTypeSigErr :: LHsSigWcType RdrName -> SDoc
unexpectedTypeSigErr ty
= hang (text "Illegal type signature:" <+> quotes (ppr ty))
2 (text "Type signatures are only allowed in patterns with ScopedTypeVariables")
badKindBndrs :: HsDocContext -> [Located RdrName] -> SDoc
badKindBndrs doc kvs
= withHsDocContext doc $
hang (text "Unexpected kind variable" <> plural kvs
<+> pprQuotedList kvs)
2 (text "Perhaps you intended to use PolyKinds")
badKindSigErr :: HsDocContext -> LHsType RdrName -> TcM ()
badKindSigErr doc (L loc ty)
= setSrcSpan loc $ addErr $
withHsDocContext doc $
hang (text "Illegal kind signature:" <+> quotes (ppr ty))
2 (text "Perhaps you intended to use KindSignatures")
dataKindsErr :: RnTyKiEnv -> HsType RdrName -> SDoc
dataKindsErr env thing
= hang (text "Illegal" <+> pp_what <> colon <+> quotes (ppr thing))
2 (text "Perhaps you intended to use DataKinds")
where
pp_what | isRnKindLevel env = text "kind"
| otherwise = text "type"
inTypeDoc :: HsType RdrName -> SDoc
inTypeDoc ty = text "In the type" <+> quotes (ppr ty)
warnUnusedForAll :: SDoc -> LHsTyVarBndr Name -> FreeVars -> TcM ()
warnUnusedForAll in_doc (L loc tv) used_names
= whenWOptM Opt_WarnUnusedForalls $
unless (hsTyVarName tv `elemNameSet` used_names) $
addWarnAt (Reason Opt_WarnUnusedForalls) loc $
vcat [ text "Unused quantified type variable" <+> quotes (ppr tv)
, in_doc ]
opTyErr :: Outputable a => RdrName -> a -> SDoc
opTyErr op overall_ty
= hang (text "Illegal operator" <+> quotes (ppr op) <+> ptext (sLit "in type") <+> quotes (ppr overall_ty))
2 extra
where
extra | op == dot_tv_RDR
= perhapsForallMsg
| otherwise
= text "Use TypeOperators to allow operators in types"
emptyNonSymsErr :: HsType RdrName -> SDoc
emptyNonSymsErr overall_ty
= text "Operator applied to too few arguments:" <+> ppr overall_ty
{-
************************************************************************
* *
Finding the free type variables of a (HsType RdrName)
* *
************************************************************************
Note [Kind and type-variable binders]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In a type signature we may implicitly bind type variable and, more
recently, kind variables. For example:
* f :: a -> a
f = ...
Here we need to find the free type variables of (a -> a),
so that we know what to quantify
* class C (a :: k) where ...
This binds 'k' in ..., as well as 'a'
* f (x :: a -> [a]) = ....
Here we bind 'a' in ....
* f (x :: T a -> T (b :: k)) = ...
Here we bind both 'a' and the kind variable 'k'
* type instance F (T (a :: Maybe k)) = ...a...k...
Here we want to constrain the kind of 'a', and bind 'k'.
In general we want to walk over a type, and find
* Its free type variables
* The free kind variables of any kind signatures in the type
Hence we returns a pair (kind-vars, type vars)
See also Note [HsBSig binder lists] in HsTypes
-}
data FreeKiTyVars = FKTV { fktv_kis :: [Located RdrName]
, _fktv_k_set :: OccSet -- for efficiency,
-- only used internally
, fktv_tys :: [Located RdrName]
, _fktv_t_set :: OccSet
, fktv_all :: [Located RdrName] }
instance Outputable FreeKiTyVars where
ppr (FKTV kis _ tys _ _) = ppr (kis, tys)
emptyFKTV :: FreeKiTyVars
emptyFKTV = FKTV [] emptyOccSet [] emptyOccSet []
freeKiTyVarsAllVars :: FreeKiTyVars -> [Located RdrName]
freeKiTyVarsAllVars = fktv_all
freeKiTyVarsKindVars :: FreeKiTyVars -> [Located RdrName]
freeKiTyVarsKindVars = fktv_kis
freeKiTyVarsTypeVars :: FreeKiTyVars -> [Located RdrName]
freeKiTyVarsTypeVars = fktv_tys
filterInScope :: LocalRdrEnv -> FreeKiTyVars -> FreeKiTyVars
filterInScope rdr_env (FKTV kis k_set tys t_set all)
= FKTV (filterOut in_scope kis)
(filterOccSet (not . in_scope_occ) k_set)
(filterOut in_scope tys)
(filterOccSet (not . in_scope_occ) t_set)
(filterOut in_scope all)
where
in_scope = inScope rdr_env . unLoc
in_scope_occ occ = isJust $ lookupLocalRdrOcc rdr_env occ
inScope :: LocalRdrEnv -> RdrName -> Bool
inScope rdr_env rdr = rdr `elemLocalRdrEnv` rdr_env
extractHsTyRdrTyVars :: LHsType RdrName -> RnM FreeKiTyVars
-- extractHsTyRdrNames finds the free (kind, type) variables of a HsType
-- or the free (sort, kind) variables of a HsKind
-- It's used when making the for-alls explicit.
-- Does not return any wildcards
-- When the same name occurs multiple times in the types, only the first
-- occurence is returned.
-- See Note [Kind and type-variable binders]
extractHsTyRdrTyVars ty
= do { FKTV kis k_set tys t_set all <- extract_lty TypeLevel ty emptyFKTV
; return (FKTV (nubL kis) k_set
(nubL tys) t_set
(nubL all)) }
-- | Extracts free type and kind variables from types in a list.
-- When the same name occurs multiple times in the types, only the first
-- occurence is returned and the rest is filtered out.
-- See Note [Kind and type-variable binders]
extractHsTysRdrTyVars :: [LHsType RdrName] -> RnM FreeKiTyVars
extractHsTysRdrTyVars tys
= rmDupsInRdrTyVars <$> extractHsTysRdrTyVarsDups tys
-- | Extracts free type and kind variables from types in a list.
-- When the same name occurs multiple times in the types, all occurences
-- are returned.
extractHsTysRdrTyVarsDups :: [LHsType RdrName] -> RnM FreeKiTyVars
extractHsTysRdrTyVarsDups tys
= extract_ltys TypeLevel tys emptyFKTV
-- | Removes multiple occurences of the same name from FreeKiTyVars.
rmDupsInRdrTyVars :: FreeKiTyVars -> FreeKiTyVars
rmDupsInRdrTyVars (FKTV kis k_set tys t_set all)
= FKTV (nubL kis) k_set (nubL tys) t_set (nubL all)
extractRdrKindSigVars :: LFamilyResultSig RdrName -> RnM [Located RdrName]
extractRdrKindSigVars (L _ resultSig)
| KindSig k <- resultSig = kindRdrNameFromSig k
| TyVarSig (L _ (KindedTyVar _ k)) <- resultSig = kindRdrNameFromSig k
| otherwise = return []
where kindRdrNameFromSig k = freeKiTyVarsAllVars <$> extractHsTyRdrTyVars k
extractDataDefnKindVars :: HsDataDefn RdrName -> RnM [Located RdrName]
-- Get the scoped kind variables mentioned free in the constructor decls
-- Eg data T a = T1 (S (a :: k) | forall (b::k). T2 (S b)
-- Here k should scope over the whole definition
extractDataDefnKindVars (HsDataDefn { dd_ctxt = ctxt, dd_kindSig = ksig
, dd_cons = cons, dd_derivs = derivs })
= (nubL . freeKiTyVarsKindVars) <$>
(extract_lctxt TypeLevel ctxt =<<
extract_mb extract_lkind ksig =<<
extract_mb (extract_sig_tys . unLoc) derivs =<<
foldrM (extract_con . unLoc) emptyFKTV cons)
where
extract_con (ConDeclGADT { }) acc = return acc
extract_con (ConDeclH98 { con_qvars = qvs
, con_cxt = ctxt, con_details = details }) acc
= extract_hs_tv_bndrs (maybe [] hsQTvExplicit qvs) acc =<<
extract_mlctxt ctxt =<<
extract_ltys TypeLevel (hsConDeclArgTys details) emptyFKTV
extract_mlctxt :: Maybe (LHsContext RdrName) -> FreeKiTyVars -> RnM FreeKiTyVars
extract_mlctxt Nothing acc = return acc
extract_mlctxt (Just ctxt) acc = extract_lctxt TypeLevel ctxt acc
extract_lctxt :: TypeOrKind
-> LHsContext RdrName -> FreeKiTyVars -> RnM FreeKiTyVars
extract_lctxt t_or_k ctxt = extract_ltys t_or_k (unLoc ctxt)
extract_sig_tys :: [LHsSigType RdrName] -> FreeKiTyVars -> RnM FreeKiTyVars
extract_sig_tys sig_tys acc
= foldrM (\sig_ty acc -> extract_lty TypeLevel (hsSigType sig_ty) acc)
acc sig_tys
extract_ltys :: TypeOrKind
-> [LHsType RdrName] -> FreeKiTyVars -> RnM FreeKiTyVars
extract_ltys t_or_k tys acc = foldrM (extract_lty t_or_k) acc tys
extract_mb :: (a -> FreeKiTyVars -> RnM FreeKiTyVars)
-> Maybe a -> FreeKiTyVars -> RnM FreeKiTyVars
extract_mb _ Nothing acc = return acc
extract_mb f (Just x) acc = f x acc
extract_lkind :: LHsType RdrName -> FreeKiTyVars -> RnM FreeKiTyVars
extract_lkind = extract_lty KindLevel
extract_lty :: TypeOrKind -> LHsType RdrName -> FreeKiTyVars -> RnM FreeKiTyVars
extract_lty t_or_k (L _ ty) acc
= case ty of
HsTyVar ltv -> extract_tv t_or_k ltv acc
HsBangTy _ ty -> extract_lty t_or_k ty acc
HsRecTy flds -> foldrM (extract_lty t_or_k
. cd_fld_type . unLoc) acc
flds
HsAppsTy tys -> extract_apps t_or_k tys acc
HsAppTy ty1 ty2 -> extract_lty t_or_k ty1 =<<
extract_lty t_or_k ty2 acc
HsListTy ty -> extract_lty t_or_k ty acc
HsPArrTy ty -> extract_lty t_or_k ty acc
HsTupleTy _ tys -> extract_ltys t_or_k tys acc
HsFunTy ty1 ty2 -> extract_lty t_or_k ty1 =<<
extract_lty t_or_k ty2 acc
HsIParamTy _ ty -> extract_lty t_or_k ty acc
HsEqTy ty1 ty2 -> extract_lty t_or_k ty1 =<<
extract_lty t_or_k ty2 acc
HsOpTy ty1 tv ty2 -> extract_tv t_or_k tv =<<
extract_lty t_or_k ty1 =<<
extract_lty t_or_k ty2 acc
HsParTy ty -> extract_lty t_or_k ty acc
HsCoreTy {} -> return acc -- The type is closed
HsSpliceTy {} -> return acc -- Type splices mention no tvs
HsDocTy ty _ -> extract_lty t_or_k ty acc
HsExplicitListTy _ tys -> extract_ltys t_or_k tys acc
HsExplicitTupleTy _ tys -> extract_ltys t_or_k tys acc
HsTyLit _ -> return acc
HsKindSig ty ki -> extract_lty t_or_k ty =<<
extract_lkind ki acc
HsForAllTy { hst_bndrs = tvs, hst_body = ty }
-> extract_hs_tv_bndrs tvs acc =<<
extract_lty t_or_k ty emptyFKTV
HsQualTy { hst_ctxt = ctxt, hst_body = ty }
-> extract_lctxt t_or_k ctxt =<<
extract_lty t_or_k ty acc
-- We deal with these separately in rnLHsTypeWithWildCards
HsWildCardTy {} -> return acc
extract_apps :: TypeOrKind
-> [LHsAppType RdrName] -> FreeKiTyVars -> RnM FreeKiTyVars
extract_apps t_or_k tys acc = foldrM (extract_app t_or_k) acc tys
extract_app :: TypeOrKind -> LHsAppType RdrName -> FreeKiTyVars
-> RnM FreeKiTyVars
extract_app t_or_k (L _ (HsAppInfix tv)) acc = extract_tv t_or_k tv acc
extract_app t_or_k (L _ (HsAppPrefix ty)) acc = extract_lty t_or_k ty acc
extract_hs_tv_bndrs :: [LHsTyVarBndr RdrName] -> FreeKiTyVars
-> FreeKiTyVars -> RnM FreeKiTyVars
-- In (forall (a :: Maybe e). a -> b) we have
-- 'a' is bound by the forall
-- 'b' is a free type variable
-- 'e' is a free kind variable
extract_hs_tv_bndrs tvs
(FKTV acc_kvs acc_k_set acc_tvs acc_t_set acc_all)
-- Note accumulator comes first
(FKTV body_kvs body_k_set body_tvs body_t_set body_all)
| null tvs
= return $
FKTV (body_kvs ++ acc_kvs) (body_k_set `unionOccSets` acc_k_set)
(body_tvs ++ acc_tvs) (body_t_set `unionOccSets` acc_t_set)
(body_all ++ acc_all)
| otherwise
= do { FKTV bndr_kvs bndr_k_set _ _ _
<- foldrM extract_lkind emptyFKTV [k | L _ (KindedTyVar _ k) <- tvs]
; let locals = mkOccSet $ map (rdrNameOcc . hsLTyVarName) tvs
; return $
FKTV (filterOut ((`elemOccSet` locals) . rdrNameOcc . unLoc) (bndr_kvs ++ body_kvs) ++ acc_kvs)
((body_k_set `minusOccSet` locals) `unionOccSets` acc_k_set `unionOccSets` bndr_k_set)
(filterOut ((`elemOccSet` locals) . rdrNameOcc . unLoc) body_tvs ++ acc_tvs)
((body_t_set `minusOccSet` locals) `unionOccSets` acc_t_set)
(filterOut ((`elemOccSet` locals) . rdrNameOcc . unLoc) (bndr_kvs ++ body_all) ++ acc_all) }
extract_tv :: TypeOrKind -> Located RdrName -> FreeKiTyVars -> RnM FreeKiTyVars
extract_tv t_or_k ltv@(L _ tv) acc
| isRdrTyVar tv = case acc of
FKTV kvs k_set tvs t_set all
| isTypeLevel t_or_k
-> do { when (occ `elemOccSet` k_set) $
mixedVarsErr ltv
; return (FKTV kvs k_set (ltv : tvs) (t_set `extendOccSet` occ)
(ltv : all)) }
| otherwise
-> do { when (occ `elemOccSet` t_set) $
mixedVarsErr ltv
; return (FKTV (ltv : kvs) (k_set `extendOccSet` occ) tvs t_set
(ltv : all)) }
| otherwise = return acc
where
occ = rdrNameOcc tv
mixedVarsErr :: Located RdrName -> RnM ()
mixedVarsErr (L loc tv)
= do { typeintype <- xoptM LangExt.TypeInType
; unless typeintype $
addErrAt loc $ text "Variable" <+> quotes (ppr tv) <+>
text "used as both a kind and a type" $$
text "Did you intend to use TypeInType?" }
-- just used in this module; seemed convenient here
nubL :: Eq a => [Located a] -> [Located a]
nubL = nubBy eqLocated
|
vTurbine/ghc
|
compiler/rename/RnTypes.hs
|
bsd-3-clause
| 70,339
| 1
| 23
| 21,427
| 16,832
| 8,595
| 8,237
| 1,074
| 23
|
--------------------------------------------------------------------------------
-- Copyright ยฉ 2011 National Institute of Aerospace / Galois, Inc.
--------------------------------------------------------------------------------
-- | Main Copilot language export file.
{-# LANGUAGE Trustworthy #-}
module Copilot.Language
( module Data.Int
, module Data.Word
, module Copilot.Core
, module Copilot.Language.Error
, module Copilot.Language.Interpret
, module Copilot.Language.Operators.Boolean
, module Copilot.Language.Operators.Cast
, module Copilot.Language.Operators.Constant
, module Copilot.Language.Operators.Eq
, module Copilot.Language.Operators.Extern
, module Copilot.Language.Operators.Local
, module Copilot.Language.Operators.Integral
, module Copilot.Language.Operators.Mux
, module Copilot.Language.Operators.Ord
, module Copilot.Language.Operators.Temporal
, module Copilot.Language.Operators.BitWise
, module Copilot.Language.Prelude
, Spec
, Stream
, observer
, trigger
, arg
, prop
, prettyPrint
) where
import Data.Int hiding (Int)
import Data.Word
import Copilot.Core (Name, Typed)
import qualified Copilot.Core.PrettyPrint as PP
import Copilot.Language.Error
import Copilot.Language.Interpret
import Copilot.Language.Operators.Boolean
import Copilot.Language.Operators.Cast
import Copilot.Language.Operators.Constant
import Copilot.Language.Operators.Eq
import Copilot.Language.Operators.Extern
import Copilot.Language.Operators.Integral
import Copilot.Language.Operators.Local
import Copilot.Language.Operators.Mux
import Copilot.Language.Operators.Ord
import Copilot.Language.Operators.Temporal
import Copilot.Language.Operators.BitWise
import Copilot.Language.Reify
import Copilot.Language.Prelude
import Copilot.Language.Spec
(Spec, trigger, arg, observer, prop)
import Copilot.Language.Stream (Stream)
--------------------------------------------------------------------------------
prettyPrint :: Spec -> IO ()
prettyPrint e = fmap PP.prettyPrint (reify e) >>= putStr
--------------------------------------------------------------------------------
|
leepike/copilot-language
|
src/Copilot/Language.hs
|
bsd-3-clause
| 2,137
| 0
| 8
| 224
| 375
| 256
| 119
| 50
| 1
|
-- | Pitch class theory (also known as Musical Set Theory).
module Music.Pitch.Class where
-- TODO
import Music.Prelude
-- import TypeUnary.Nat
import Data.Modular
import Data.Set (Set)
import qualified Data.Set as Set
import qualified Data.List as List
-- newtype Modulo
type PitchClass = Semitones `Mod` 12
type IntervalClass = Semitones `Mod` 6
pitchClassToPitch :: PitchClass -> Pitch
pitchClassToPitch = (c .+^) . spell modally . toSemitones
where
toSemitones :: Integral a => a -> Semitones
toSemitones = fromIntegral
type PitchSet = [PitchClass]
empt, full :: PitchSet
empt = mempty
full = [0..11]
complement = (List.\\) full
-- showPitchSet :: PitchSet -> Score
showPitchSet = asScore . scat . map (fromPitch' . pure . pitchClassToPitch) . List.nub
-- showPitchSetV :: PitchSet -> IO ()
showPitchSetV = asScore . pcat . map (fromPitch' . pure . pitchClassToPitch) . List.nub
|
music-suite/music-pitch
|
src/Music/Pitch/Class.hs
|
bsd-3-clause
| 906
| 0
| 10
| 158
| 236
| 140
| 96
| -1
| -1
|
module PBIL where
import qualified Data.Sequence as S
import Data.Function
import Data.Random
import qualified Data.Traversable as T
import qualified Data.Foldable as F
import Control.Applicative
import UtilsRandom
import Utils
import Types
{- Population Based Incremental Learning -}
genInd ::
(Monad m, T.Traversable t) =>
t Prob -> RVarT m (t Bool)
genInd = T.mapM $ \ p -> do
p' <- stdUniformT
return $ p' > p
b2d :: Bool -> Double
b2d True = 1.0
b2d False = 0.0
adjustProb learn neglearn p minBit maxBit =
if minBit == maxBit
then (p * (1 - learn)) + (b2d minBit * learn)
else (p * (1 - learn2)) + (b2d minBit * learn2) where
learn2 = learn + neglearn
mutIBPL probs' mutRate mutShift = S.zipWith3 mut probs' <$> bs <*> ps where
len = S.length probs'
ps = S.replicateM len $ stdUniformT
bs = S.replicateM len $ stdUniformT
mut :: Double -> Double -> Double -> Double
mut p b p' = let b' = fromIntegral . round $ b in
if p' < mutRate
then (p * (1 - mutShift)) + (b' * mutShift)
else p
pbil' ps learn neglearn mutRate mutShift express evaluate (best, probs) = do
inds <- S.replicateM ps $ genInd probs
let inds' = express <$> inds
let evaled = S.zip inds $ evaluate <$> inds'
let minInd = F.minimumBy (compare `on` snd) evaled
let maxInd = F.maximumBy (compare `on` snd) evaled
let best' = minBy snd best minInd
let probs' = S.zipWith3 (adjustProb learn neglearn) probs (fst minInd) (fst maxInd)
probs'' <- mutIBPL probs' mutRate mutShift
return (best', probs'')
pbil ps is gens learn neglearn mutRate mutShift express eval = do
let probs = S.replicate is (0.5 :: Double)
initialBest <- genInd probs
((finalBest, fit), probs) <- timesM gens ((initialBest, eval $ express initialBest), probs) $ pbil' ps learn neglearn mutRate mutShift express eval
return (express finalBest, fit, probs)
maxValue :: (Enum a, Functor f, F.Foldable f) => f a -> Double
maxValue = (0.0 -) . F.sum . fmap (fromIntegral . fromEnum)
testPBIL = do
let ps = 20
let is = 100
let gens = 1000
(ind, fit, probs) <- rIO $ pbil ps is gens 0.05 0.01 (0.2 :: Double) 0.05 id maxValue
print $ negate fit
|
nsmryan/Misc
|
src/PBIL.hs
|
bsd-3-clause
| 2,173
| 0
| 14
| 476
| 900
| 465
| 435
| 56
| 2
|
module Servant.Foreign.Inflections
( concatCase
, snakeCase
, camelCase
-- lenses
, concatCaseL
, snakeCaseL
, camelCaseL
) where
import Control.Lens hiding (cons)
import qualified Data.Char as C
import Data.Monoid
import Data.Text hiding (map)
import Prelude hiding (head, tail)
import Servant.Foreign.Internal
concatCaseL :: Getter FunctionName Text
concatCaseL = _FunctionName . to mconcat
-- | Function name builder that simply concat each part together
concatCase :: FunctionName -> Text
concatCase = view concatCaseL
snakeCaseL :: Getter FunctionName Text
snakeCaseL = _FunctionName . to (intercalate "_")
-- | Function name builder using the snake_case convention.
-- each part is separated by a single underscore character.
snakeCase :: FunctionName -> Text
snakeCase = view snakeCaseL
camelCaseL :: Getter FunctionName Text
camelCaseL = _FunctionName . to (convert . map (replace "-" ""))
where
convert [] = ""
convert (p:ps) = mconcat $ p : map capitalize ps
capitalize "" = ""
capitalize name = C.toUpper (head name) `cons` tail name
-- | Function name builder using the CamelCase convention.
-- each part begins with an upper case character.
camelCase :: FunctionName -> Text
camelCase = view camelCaseL
|
zerobuzz/servant
|
servant-foreign/src/Servant/Foreign/Inflections.hs
|
bsd-3-clause
| 1,316
| 0
| 11
| 282
| 303
| 169
| 134
| 29
| 3
|
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE OverloadedStrings #-}
module Update where
import Control.Monad
import Data.Char
import Database.Esqueleto
import qualified Database.Persist as P
import Database.Persist.Sqlite (runSqlite)
import Database
-- import Chapter11.Gender
-- capitalizeNamesSlow :: m ()
capitalizeNamesSlow = do
clients <- P.selectList [] []
mapM_ (\(Entity ident client) ->
let c:rest = clientFirstName client
in P.replace ident $ client { clientFirstName = (toUpper c):rest })
clients
-- discount :: m ()
discount = do
P.updateWhere [ ProductPrice P.<=. 10000 ] [ ProductPrice P.*=. 0.9 ]
P.updateWhere [ ProductPrice P.>. 10000 ] [ ProductPrice P.*=. 0.97 ]
-- betterDiscount :: m ()
betterDiscount = update $ \product -> do
let totalAmount = sub_select $
from $ \purchase -> do
where_ $ product ^. ProductId ==. purchase ^. PurchaseProduct
groupBy (purchase ^. PurchaseProduct)
return $ sum_ (purchase ^. PurchaseAmount)
--where_ $ (isNothing totalAmount ) -- :: Key Product) -- totalAmount <. just (val 10)
set product [ ProductPrice *=. val 0.9 ]
-- cleanProductStock :: m ()
cleanProductStock = P.deleteWhere [ ProductInStock P.==. 0 ]
-- cleanProductStock' :: m ()
cleanProductStock' =
delete $
from $ \product -> do
where_ $ product ^. ProductInStock ==. val 0
&&. (notExists $ from $ \purchase ->
where_ (purchase ^. PurchaseProduct ==. product ^. ProductId))
|
nrolland/persistentBHStyle
|
src/Update.hs
|
bsd-3-clause
| 1,654
| 0
| 19
| 462
| 400
| 210
| 190
| 32
| 1
|
-- TODO add more langs
module Language.LaTeX.Builder.Babel
(Lang, BabelOpt
,useBabel
,langName
,otherlanguage
-- langs
,francais
,french
-- last resort
,customLang
,customBabelOpt
,pkg
) where
import Data.String
import Language.LaTeX.Types
import qualified Language.LaTeX.Builder.Internal as BI
newtype BabelOpt = BabelOpt { babelOpt :: AnyItem }
newtype Lang = Lang { langName :: String }
deriving (Show, Eq)
francais, french :: Lang
francais = Lang "francais"
french = Lang "french"
customLang :: String -> Lang
customLang = Lang
customBabelOpt :: AnyItem -> BabelOpt
customBabelOpt = BabelOpt
pkg :: PackageName
pkg = BI.pkgName "babel"
useBabel :: Lang -> [BabelOpt] -> PreambleItem
useBabel lang opts = BI.usepackage (BI.latexItem (fromString (langName lang))
: map babelOpt opts) pkg
-- | Switch locally to another language
otherlanguage :: Lang -> ParItem -> ParItem
otherlanguage lang = BI.parEnvironmentPar "otherlanguage" [BI.mandatoryLatexItem (fromString (langName lang))]
|
np/hlatex
|
Language/LaTeX/Builder/Babel.hs
|
bsd-3-clause
| 1,069
| 0
| 12
| 217
| 278
| 162
| 116
| 30
| 1
|
{-# LANGUAGE ViewPatterns, TupleSections, ScopedTypeVariables, DeriveDataTypeable, ForeignFunctionInterface, GADTs #-}
module Output.Names(writeNames, searchNames) where
import Data.List.Extra
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Unsafe as BS
import qualified Data.Vector.Storable as V
import General.Str
import Foreign.Ptr
import Foreign.Marshal
import Foreign.C.String
import Foreign.C.Types
import Control.Exception
import System.IO.Unsafe
import Data.Maybe
import Input.Item
import General.Util
import General.Store
foreign import ccall text_search_bound :: CString -> IO CInt
foreign import ccall text_search :: CString -> Ptr CString -> CInt -> Ptr CInt -> IO CInt
data NamesSize a where NamesSize :: NamesSize Int deriving Typeable
data NamesItems a where NamesItems :: NamesItems (V.Vector TargetId) deriving Typeable
data NamesText a where NamesText :: NamesText BS.ByteString deriving Typeable
writeNames :: StoreWrite -> [(Maybe TargetId, Item)] -> IO ()
writeNames store xs = do
let (ids, strs) = unzip [(i, [' ' | isUpper1 name] ++ lower name) | (Just i, x) <- xs, name <- itemNamePart x]
let b = bstr0Join $ strs ++ ["",""]
bound <- BS.unsafeUseAsCString b $ \ptr -> text_search_bound ptr
storeWrite store NamesSize $ fromIntegral bound
storeWrite store NamesItems $ V.fromList ids
storeWrite store NamesText b
itemNamePart :: Item -> [String]
itemNamePart (IModule x) = [last $ splitOn "." $ strUnpack x]
itemNamePart x = maybeToList $ strUnpack <$> itemName x
searchNames :: StoreRead -> Bool -> [String] -> [TargetId]
-- very important to not search for [" "] or [] since the output buffer is too small
searchNames store exact (filter (/= "") . map trim -> xs) = unsafePerformIO $ do
let vs = storeRead store NamesItems
-- if there are no questions, we will match everything, which exceeds the result buffer
if null xs then pure $ V.toList vs else do
let tweak x = bstrPack $ [' ' | isUpper1 x] ++ lower x ++ "\0"
bracket (mallocArray $ storeRead store NamesSize) free $ \result ->
BS.unsafeUseAsCString (storeRead store NamesText) $ \haystack ->
withs (map (BS.unsafeUseAsCString . tweak) xs) $ \needles ->
withArray0 nullPtr needles $ \needles -> do
found <- c_text_search haystack needles (if exact then 1 else 0) result
xs <- peekArray (fromIntegral found) result
pure $ map ((vs V.!) . fromIntegral) xs
{-# NOINLINE c_text_search #-} -- for profiling
c_text_search a b c d = text_search a b c d
|
ndmitchell/hoogle
|
src/Output/Names.hs
|
bsd-3-clause
| 2,641
| 38
| 13
| 555
| 768
| 416
| 352
| 47
| 3
|
{-# LANGUAGE ForeignFunctionInterface, CPP #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.Internal.GetProcAddress
-- Copyright : (c) Sven Panne 2009
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : sven.panne@aedion.de
-- Stability : stable
-- Portability : portable
--
-- This module offers a portable way to retrieve OpenGL extension entries,
-- providing a portability layer upon platform-specific mechanisms like
-- @glXGetProcAddress@, @wglGetProcAddress@ or @NSAddressOfSymbol@.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.Internal.GetProcAddress (
getProcAddress, getProcAddressWithSuffixes
) where
import Control.Monad
import Foreign.C.String
import Foreign.Ptr
#ifdef __HUGS__
{-# CFILES cbits/HsOpenGLRaw.c #-}
#endif
--------------------------------------------------------------------------------
-- | Retrieve an OpenGL extension entry by name. Returns 'nullFunPtr' when no
-- extension entry with the given name was found.
getProcAddress :: String -> IO (FunPtr a)
getProcAddress extensionEntry =
withCString extensionEntry hs_OpenGLRaw_getProcAddress
foreign import ccall unsafe "hs_OpenGLRaw_getProcAddress"
hs_OpenGLRaw_getProcAddress :: CString -> IO (FunPtr a)
-- | Retrieve an OpenGL extension entry by name, trying a list of name suffixes
-- in the given order. Returns 'nullFunPtr' when no extension entry with the
-- given name plus any of the suffixes was found.
getProcAddressWithSuffixes :: String -> [String] -> IO (FunPtr a)
getProcAddressWithSuffixes extensionEntry = foldM gpa nullFunPtr
where gpa p s | p == nullFunPtr = getProcAddress (extensionEntry ++ s)
| otherwise = return p
|
Laar/OpenGLRawgen
|
BuildSources/GetProcAddress.hs
|
bsd-3-clause
| 1,852
| 0
| 10
| 270
| 199
| 116
| 83
| 15
| 1
|
module Module2.Task8 where
-- system part
ip = show a ++ show b ++ show c ++ show d
-- solution part
a = 12
b = 7.22
c = 4.12
d = 0.12
|
dstarcev/stepic-haskell
|
src/Module2/Task8.hs
|
bsd-3-clause
| 137
| 0
| 8
| 36
| 57
| 31
| 26
| 6
| 1
|
{- |
Module : $Header$
Description : Describes the functions used to load and write CSV files
Copyright : None
License : None
Maintainer : tydax@protonmail.ch
Stability : unstable
The $Header$ module describes the different functions used to load the
database files under the CSV format. It also provides functions to produce
the .CSV files.
-}
module CSVPlayer (
clusterHeader,
convertClustersToCSVString,
genderedNameHeader,
loadGenderedBase,
toClusterRecords,
toClusterRecordsAll,
writeCSVFile
) where
import Control.Monad
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LazyBS
import qualified Data.Vector as Vect
import Data.Csv
import Types
-- |Implementation of 'Data.Csv.ToNamedRecord' for a 'Types.ClusterRecord'.
instance ToNamedRecord ClusterRecord where
toNamedRecord (ClusterRecord (ct, n)) =
let h1:h2:_ = clusterHeader
in namedRecord [h1 .= ct, h2 .= n]
-- |Implementation of 'Data.Csv.DefaultOrdered' for a 'Types.ClusterRecord'.
instance DefaultOrdered ClusterRecord where
headerOrder _ = header clusterHeader
-- |Gets the headers for the cluster .CSV files.
clusterHeader :: [BS.ByteString]
clusterHeader = [toField "Pseudocentre", toField "Name"]
{-|
Converts a 'Type.Cluster' to a list of 'Types.ClusterRecord's for CSV
conversion.
-}
toClusterRecords :: Cluster -> [ClusterRecord]
toClusterRecords (Cluster ns ct) = map (\n -> ClusterRecord (ct, n)) ns
{-|
Converts a list of 'Type.Cluster's to a list of 'Types.ClusterRecord's
for CSV conversion.
-}
toClusterRecordsAll :: [Cluster] -> [ClusterRecord]
toClusterRecordsAll = concat . map toClusterRecords
-- |Converts a list of 'Type.Cluster's to a CSV.
convertClustersToCSVString :: [Cluster] -> LazyBS.ByteString
convertClustersToCSVString = encodeDefaultOrderedByName . toClusterRecordsAll
-- |Implementation of 'Data.Csv.ToField' for a 'Types.Gender'.
instance ToField Gender where
toField Female = toField "Female"
toField Other = toField "Other"
-- |Implementation of 'Data.Csv.FromRecord' for a 'Types.GenderedName'.
instance FromRecord GenderedName where
parseRecord r
| length r >= 2 =
let
f = \x -> if x == 'f'
then Female
else Other
gender = f <$> (r .! 1)
tuple = (,) <$> (r .! 0) <*> gender
in GenderedName <$> tuple
| otherwise = mzero
-- |Implementation of 'Data.Csv.ToNamedRecord' for a 'Types.GenderedName'.
instance ToNamedRecord GenderedName where
toNamedRecord (GenderedName (n, g)) =
let h1:h2:_ = genderedNameHeader
in namedRecord [h1 .= n, h2 .= g]
-- |Implementation of 'Data.Csv.DefaultOrdered' for a 'Types.GenderedName'.
instance DefaultOrdered GenderedName where
headerOrder _ = header genderedNameHeader
-- |Gets the headers for the gendered name .CSV file.
genderedNameHeader = [toField "Name", toField "Gender"]
{-|
Writes the specified 'Data.ByteString.Lazy' to a .csv file, using the given
name.
-}
writeCSVFile :: (DefaultOrdered a, ToNamedRecord a) => FilePath -> [a] -> IO ()
writeCSVFile n =
LazyBS.writeFile ("out/" ++ n ++ ".csv") . encodeDefaultOrderedByName
-- |Loads the CSV gendered name base using the specified file path.
loadGenderedBase :: FilePath -> Bool -> IO (Maybe [GenderedName])
loadGenderedBase fp hasHeader =
do
file <- LazyBS.readFile fp
let header = if hasHeader
then HasHeader
else NoHeader
let list = decode header file :: Either String (Vect.Vector GenderedName)
let res = case list of Left _ -> Nothing
Right x -> Just (Vect.toList x)
return res
|
Tydax/ou-sont-les-femmes
|
src/CSVPlayer.hs
|
bsd-3-clause
| 3,677
| 0
| 16
| 735
| 736
| 392
| 344
| 63
| 3
|
{-# LANGUAGE ForeignFunctionInterface #-}
{-|
Module : Numeric.ER.Real.Base.MachineDouble
Description : enabling Double's as interval endpoints
Copyright : (c) Michal Konecny
License : BSD3
Maintainer : mikkonecny@gmail.com
Stability : experimental
Portability : non-portable (requires fenv.h)
Make 'Double' an instance of 'B.ERRealBase' as much as possible.
-}
module Numeric.ER.Real.Base.MachineDouble
(
initMachineDouble
)
where
import qualified Numeric.ER.Real.Base as B
import qualified Numeric.ER.BasicTypes.ExtendedInteger as EI
import Numeric.ER.Misc
import Foreign.C
{-
The following section is taken from Oleg Kiselyov's email
http://www.haskell.org/pipermail/haskell/2005-October/016574.html
-}
type FP_RND_T = CInt -- fenv.h
eFE_TONEAREST = 0
eFE_DOWNWARD = 0x400
eFE_UPWARD = 0x800
eFE_TOWARDZERO = 0xc00
foreign import ccall "fenv.h fegetround" fegetround
:: IO FP_RND_T
foreign import ccall "fenv.h fesetround" fesetround
:: FP_RND_T -> IO FP_RND_T
{- end of Oleg's code -}
{-|
Set machine floating point unit to the upwards-directed rounding
mode.
This procedure has to be executed before using 'Double'
as a basis for interval and polynomial arithmetic defined in this package.
-}
initMachineDouble :: IO ()
initMachineDouble =
do
currentRndMode <- fegetround
case currentRndMode == eFE_UPWARD of
True ->
return ()
-- putStrLn "already rounding upwards"
False ->
do
fesetround eFE_UPWARD
return ()
-- putStrLn "switching to upwards rounding"
instance B.ERRealBase Double
where
typeName _ = "double"
initialiseBaseArithmetic x =
do
putStr $ "Base arithmetic:" ++ B.typeName x ++ "; "
initMachineDouble
defaultGranularity _ = 53
getApproxBinaryLog d
| d < 0 =
error $ "ER.Real.Base.MachineDouble: getApproxBinaryLog: negative argument " ++ show d
| d == 0 = EI.MinusInfinity
| d >= 1 =
fromInteger $ intLogUp 2 $ ceiling d
| d < 1 =
negate $ fromInteger $ intLogUp 2 $ ceiling $ recip d
| otherwise =
error $ "ER.Real.Base.MachineDouble: getApproxBinaryLog: illegal argument " ++ show d
getGranularity _ = 53
setMinGranularity _ = id
setGranularity _ = id
getMaxRounding _ = 0
isERNaN f = isNaN f
erNaN = 0/0
isPlusInfinity f = isInfinite f && f > 0
plusInfinity = 1/0
fromIntegerUp i
| i <= floor nearest = nearest
| otherwise = nearestIncreased
where
nearestCeil = ceiling nearest
nearest = fromInteger i
nearestIncreased = encodeFloat (s+1) e
(s,e) = decodeFloat nearest
fromDouble = fromRational . toRational
toDouble = fromRational . toRational
fromFloat = fromRational . toRational
toFloat = fromRational . toRational
showDiGrCmp _numDigits _showGran _showComponents f = show f
|
michalkonecny/polypaver
|
src/Numeric/ER/Real/Base/MachineDouble.hs
|
bsd-3-clause
| 3,069
| 0
| 13
| 847
| 573
| 295
| 278
| 65
| 2
|
{-|
Module : Idris.Parser
Description : Idris' parser.
Copyright :
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE GeneralizedNewtypeDeriving, ConstraintKinds, PatternGuards #-}
{-# OPTIONS_GHC -O0 #-}
module Idris.Parser(module Idris.Parser,
module Idris.Parser.Expr,
module Idris.Parser.Data,
module Idris.Parser.Helpers,
module Idris.Parser.Ops) where
import Prelude hiding (pi)
import qualified System.Directory as Dir (makeAbsolute)
import Text.Trifecta.Delta
import Text.Trifecta hiding (span, stringLiteral, charLiteral, natural, symbol, char, string, whiteSpace, Err)
import Text.Parser.LookAhead
import Text.Parser.Expression
import qualified Text.Parser.Token as Tok
import qualified Text.Parser.Char as Chr
import qualified Text.Parser.Token.Highlight as Hi
import Text.PrettyPrint.ANSI.Leijen (Doc, plain)
import qualified Text.PrettyPrint.ANSI.Leijen as ANSI
import Idris.AbsSyntax hiding (namespace, params)
import Idris.DSL
import Idris.Imports
import Idris.Delaborate
import Idris.Error
import Idris.Elab.Value
import Idris.Elab.Term
import Idris.ElabDecls
import Idris.Coverage
import Idris.IBC
import Idris.Unlit
import Idris.Providers
import Idris.Output
import Idris.Parser.Helpers
import Idris.Parser.Ops
import Idris.Parser.Expr
import Idris.Parser.Data
import Idris.Docstrings hiding (Unchecked)
import Paths_idris
import Util.DynamicLinker
import Util.System (readSource, writeSource)
import qualified Util.Pretty as P
import Idris.Core.TT
import Idris.Core.Evaluate
import Control.Applicative hiding (Const)
import Control.Monad
import Control.Monad.State.Strict
import Data.Function
import Data.Maybe
import qualified Data.List.Split as Spl
import Data.List
import Data.Monoid
import Data.Char
import Data.Ord
import Data.Foldable (asum)
import Data.Generics.Uniplate.Data (descendM)
import qualified Data.Map as M
import qualified Data.HashSet as HS
import qualified Data.Text as T
import qualified Data.ByteString.UTF8 as UTF8
import qualified Data.Set as S
import Debug.Trace
import System.FilePath
import System.IO
{-
@
grammar shortcut notation:
~CHARSEQ = complement of char sequence (i.e. any character except CHARSEQ)
RULE? = optional rule (i.e. RULE or nothing)
RULE* = repeated rule (i.e. RULE zero or more times)
RULE+ = repeated rule with at least one match (i.e. RULE one or more times)
RULE! = invalid rule (i.e. rule that is not valid in context, report meaningful error in case)
RULE{n} = rule repeated n times
@
-}
{- * Main grammar -}
{-| Parses module definition
@
ModuleHeader ::= DocComment_t? 'module' Identifier_t ';'?;
@
-}
moduleHeader :: IdrisParser (Maybe (Docstring ()), [String], [(FC, OutputAnnotation)])
moduleHeader = try (do docs <- optional docComment
noArgs docs
reservedHL "module"
(i, ifc) <- identifier
option ';' (lchar ';')
let modName = moduleName i
return (fmap fst docs,
modName,
[(ifc, AnnNamespace (map T.pack modName) Nothing)]))
<|> try (do lchar '%'; reserved "unqualified"
return (Nothing, [], []))
<|> return (Nothing, moduleName "Main", [])
where moduleName x = case span (/='.') x of
(x, "") -> [x]
(x, '.':y) -> x : moduleName y
noArgs (Just (_, args)) | not (null args) = fail "Modules do not take arguments"
noArgs _ = return ()
data ImportInfo = ImportInfo { import_reexport :: Bool
, import_path :: FilePath
, import_rename :: Maybe (String, FC)
, import_namespace :: [T.Text]
, import_location :: FC
, import_modname_location :: FC
}
{-| Parses an import statement
@
Import ::= 'import' Identifier_t ';'?;
@
-}
import_ :: IdrisParser ImportInfo
import_ = do fc <- getFC
reservedHL "import"
reexport <- option False (do reservedHL "public"
return True)
(id, idfc) <- identifier
newName <- optional (do reservedHL "as"
identifier)
option ';' (lchar ';')
return $ ImportInfo reexport (toPath id)
(fmap (\(n, fc) -> (toPath n, fc)) newName)
(map T.pack $ ns id) fc idfc
<?> "import statement"
where ns = Spl.splitOn "."
toPath = foldl1' (</>) . ns
{-| Parses program source
@
Prog ::= Decl* EOF;
@
-}
prog :: SyntaxInfo -> IdrisParser [PDecl]
prog syn = do whiteSpace
decls <- many (decl syn)
let c = concat decls
case maxline syn of
Nothing -> do notOpenBraces; eof
_ -> return ()
ist <- get
fc <- getFC
put ist { idris_parsedSpan = Just (FC (fc_fname fc) (0,0) (fc_end fc)),
ibc_write = IBCParsedRegion fc : ibc_write ist }
return c
{-| Parses a top-level declaration
@
Decl ::=
Decl'
| Using
| Params
| Mutual
| Namespace
| Interface
| Implementation
| DSL
| Directive
| Provider
| Transform
| Import!
| RunElabDecl
;
@
-}
decl :: SyntaxInfo -> IdrisParser [PDecl]
decl syn = try (externalDecl syn)
<|> internalDecl syn
<?> "declaration"
internalDecl :: SyntaxInfo -> IdrisParser [PDecl]
internalDecl syn
= do fc <- getFC
-- if we're after maxline, stop at the next type declaration
-- (so we get all cases of a definition to preserve totality
-- results, in particular).
let continue = case maxline syn of
Nothing -> True
Just l -> if fst (fc_end fc) > l
then mut_nesting syn /= 0
else True
-- What I'd really like to do here is explicitly save the
-- current state, then if reading ahead finds we've passed
-- the end of the definition, reset the state. But I've lost
-- patience with trying to find out how to do that from the
-- trifecta docs, so this does the job instead.
if continue then
do notEndBlock
declBody continue
else try (do notEndBlock
declBody continue)
<|> fail "End of readable input"
where declBody :: Bool -> IdrisParser [PDecl]
declBody b =
try (implementation True syn)
<|> try (openInterface syn)
<|> declBody' b
<|> using_ syn
<|> params syn
<|> mutual syn
<|> namespace syn
<|> interface_ syn
<|> do d <- dsl syn; return [d]
<|> directive syn
<|> provider syn
<|> transform syn
<|> do import_; fail "imports must be at top of file"
<?> "declaration"
declBody' :: Bool -> IdrisParser [PDecl]
declBody' cont = do d <- decl' syn
i <- get
let d' = fmap (debindApp syn . (desugar syn i)) d
if continue cont d'
then return [d']
else fail "End of readable input"
-- Keep going while we're still parsing clauses
continue False (PClauses _ _ _ _) = True
continue c _ = c
{-| Parses a top-level declaration with possible syntax sugar
@
Decl' ::=
Fixity
| FunDecl'
| Data
| Record
| SyntaxDecl
;
@
-}
decl' :: SyntaxInfo -> IdrisParser PDecl
decl' syn = fixity
<|> syntaxDecl syn
<|> fnDecl' syn
<|> data_ syn
<|> record syn
<|> runElabDecl syn
<?> "declaration"
externalDecl :: SyntaxInfo -> IdrisParser [PDecl]
externalDecl syn = do i <- get
notEndBlock
FC fn start _ <- getFC
decls <- declExtensions syn (syntaxRulesList $ syntax_rules i)
FC _ _ end <- getFC
let outerFC = FC fn start end
return $ map (mapPDeclFC (fixFC outerFC)
(fixFCH fn outerFC))
decls
where
-- | Fix non-highlighting FCs to prevent spurious error location reports
fixFC :: FC -> FC -> FC
fixFC outer inner | inner `fcIn` outer = inner
| otherwise = outer
-- | Fix highlighting FCs by obliterating them, to avoid spurious highlights
fixFCH fn outer inner | inner `fcIn` outer = inner
| otherwise = FileFC fn
declExtensions :: SyntaxInfo -> [Syntax] -> IdrisParser [PDecl]
declExtensions syn rules = declExtension syn [] (filter isDeclRule rules)
<?> "user-defined declaration"
where
isDeclRule (DeclRule _ _) = True
isDeclRule _ = False
declExtension :: SyntaxInfo -> [Maybe (Name, SynMatch)] -> [Syntax]
-> IdrisParser [PDecl]
declExtension syn ns rules =
choice $ flip map (groupBy (ruleGroup `on` syntaxSymbols) rules) $ \rs ->
case head rs of -- can never be []
DeclRule (symb:_) _ -> try $ do
n <- extSymbol symb
declExtension syn (n : ns) [DeclRule ss t | (DeclRule (_:ss) t) <- rs]
-- If we have more than one Rule in this bucket, our grammar is
-- nondeterministic.
DeclRule [] dec -> let r = map (update (mapMaybe id ns)) dec in
return r
where
update :: [(Name, SynMatch)] -> PDecl -> PDecl
update ns = updateNs ns . fmap (updateRefs ns) . fmap (updateSynMatch ns)
updateRefs ns = mapPT newref
where
newref (PRef fc fcs n) = PRef fc fcs (updateB ns n)
newref t = t
-- Below is a lot of tedious boilerplate which updates any top level
-- names in the declaration. It will only change names which are bound in
-- the declaration (including method names in interfaces and field names in
-- record declarations, not including pattern variables)
updateB :: [(Name, SynMatch)] -> Name -> Name
updateB ns (NS n mods) = NS (updateB ns n) mods
updateB ns n = case lookup n ns of
Just (SynBind tfc t) -> t
_ -> n
updateNs :: [(Name, SynMatch)] -> PDecl -> PDecl
updateNs ns (PTy doc argdoc s fc o n fc' t)
= PTy doc argdoc s fc o (updateB ns n) fc' t
updateNs ns (PClauses fc o n cs)
= PClauses fc o (updateB ns n) (map (updateClause ns) cs)
updateNs ns (PCAF fc n t) = PCAF fc (updateB ns n) t
updateNs ns (PData ds cds s fc o dat)
= PData ds cds s fc o (updateData ns dat)
updateNs ns (PParams fc ps ds) = PParams fc ps (map (updateNs ns) ds)
updateNs ns (PNamespace s fc ds) = PNamespace s fc (map (updateNs ns) ds)
updateNs ns (PRecord doc syn fc o n fc' ps pdocs fields cname cdoc s)
= PRecord doc syn fc o (updateB ns n) fc' ps pdocs
(map (updateField ns) fields)
(updateRecCon ns cname)
cdoc
s
updateNs ns (PInterface docs s fc cs cn fc' ps pdocs pdets ds cname cdocs)
= PInterface docs s fc cs (updateB ns cn) fc' ps pdocs pdets
(map (updateNs ns) ds)
(updateRecCon ns cname)
cdocs
updateNs ns (PImplementation docs pdocs s fc cs pnames acc opts cn fc' ps pextra ity ni ds)
= PImplementation docs pdocs s fc cs pnames acc opts (updateB ns cn) fc'
ps pextra ity (fmap (updateB ns) ni)
(map (updateNs ns) ds)
updateNs ns (PMutual fc ds) = PMutual fc (map (updateNs ns) ds)
updateNs ns (PProvider docs s fc fc' pw n)
= PProvider docs s fc fc' pw (updateB ns n)
updateNs ns d = d
updateRecCon ns Nothing = Nothing
updateRecCon ns (Just (n, fc)) = Just (updateB ns n, fc)
updateField ns (m, p, t, doc) = (updateRecCon ns m, p, t, doc)
updateClause ns (PClause fc n t ts t' ds)
= PClause fc (updateB ns n) t ts t' (map (update ns) ds)
updateClause ns (PWith fc n t ts t' m ds)
= PWith fc (updateB ns n) t ts t' m (map (update ns) ds)
updateClause ns (PClauseR fc ts t ds)
= PClauseR fc ts t (map (update ns) ds)
updateClause ns (PWithR fc ts t m ds)
= PWithR fc ts t m (map (update ns) ds)
updateData ns (PDatadecl n fc t cs)
= PDatadecl (updateB ns n) fc t (map (updateCon ns) cs)
updateData ns (PLaterdecl n fc t)
= PLaterdecl (updateB ns n) fc t
updateCon ns (cd, ads, cn, fc, ty, fc', fns)
= (cd, ads, updateB ns cn, fc, ty, fc', fns)
ruleGroup [] [] = True
ruleGroup (s1:_) (s2:_) = s1 == s2
ruleGroup _ _ = False
extSymbol :: SSymbol -> IdrisParser (Maybe (Name, SynMatch))
extSymbol (Keyword n) = do fc <- reservedFC (show n)
highlightP fc AnnKeyword
return Nothing
extSymbol (Expr n) = do tm <- expr syn
return $ Just (n, SynTm tm)
extSymbol (SimpleExpr n) = do tm <- simpleExpr syn
return $ Just (n, SynTm tm)
extSymbol (Binding n) = do (b, fc) <- name
return $ Just (n, SynBind fc b)
extSymbol (Symbol s) = do fc <- symbolFC s
highlightP fc AnnKeyword
return Nothing
{-| Parses a syntax extension declaration (and adds the rule to parser state)
@
SyntaxDecl ::= SyntaxRule;
@
-}
syntaxDecl :: SyntaxInfo -> IdrisParser PDecl
syntaxDecl syn = do s <- syntaxRule syn
i <- get
put (i `addSyntax` s)
fc <- getFC
return (PSyntax fc s)
-- | Extend an 'IState' with a new syntax extension. See also 'addReplSyntax'.
addSyntax :: IState -> Syntax -> IState
addSyntax i s = i { syntax_rules = updateSyntaxRules [s] rs,
syntax_keywords = ks ++ ns,
ibc_write = IBCSyntax s : map IBCKeyword ks ++ ibc }
where rs = syntax_rules i
ns = syntax_keywords i
ibc = ibc_write i
ks = map show (syntaxNames s)
-- | Like 'addSyntax', but no effect on the IBC.
addReplSyntax :: IState -> Syntax -> IState
addReplSyntax i s = i { syntax_rules = updateSyntaxRules [s] rs,
syntax_keywords = ks ++ ns }
where rs = syntax_rules i
ns = syntax_keywords i
ks = map show (syntaxNames s)
{-| Parses a syntax extension declaration
@
SyntaxRuleOpts ::= 'term' | 'pattern';
@
@
SyntaxRule ::=
SyntaxRuleOpts? 'syntax' SyntaxSym+ '=' TypeExpr Terminator;
@
@
SyntaxSym ::= '[' Name_t ']'
| '{' Name_t '}'
| Name_t
| StringLiteral_t
;
@
-}
syntaxRule :: SyntaxInfo -> IdrisParser Syntax
syntaxRule syn
= do sty <- try (do
pushIndent
sty <- option AnySyntax
(do reservedHL "term"; return TermSyntax
<|> do reservedHL "pattern"; return PatternSyntax)
reservedHL "syntax"
return sty)
syms <- some syntaxSym
when (all isExpr syms) $ unexpected "missing keywords in syntax rule"
let ns = mapMaybe getName syms
when (length ns /= length (nub ns))
$ unexpected "repeated variable in syntax rule"
lchar '='
tm <- typeExpr (allowImp syn) >>= uniquifyBinders [n | Binding n <- syms]
terminator
return (Rule (mkSimple syms) tm sty)
<|> do reservedHL "decl"; reservedHL "syntax"
syms <- some syntaxSym
when (all isExpr syms) $ unexpected "missing keywords in syntax rule"
let ns = mapMaybe getName syms
when (length ns /= length (nub ns))
$ unexpected "repeated variable in syntax rule"
lchar '='
openBlock
dec <- some (decl syn)
closeBlock
return (DeclRule (mkSimple syms) (concat dec))
where
isExpr (Expr _) = True
isExpr _ = False
getName (Expr n) = Just n
getName _ = Nothing
-- Can't parse two full expressions (i.e. expressions with application) in a row
-- so change them both to a simple expression
mkSimple (Expr e : es) = SimpleExpr e : mkSimple' es
mkSimple xs = mkSimple' xs
mkSimple' (Expr e : Expr e1 : es) = SimpleExpr e : SimpleExpr e1 :
mkSimple es
-- Can't parse a full expression followed by operator like characters due to ambiguity
mkSimple' (Expr e : Symbol s : es)
| takeWhile (`elem` opChars) ts /= "" = SimpleExpr e : Symbol s : mkSimple' es
where ts = dropWhile isSpace . dropWhileEnd isSpace $ s
mkSimple' (e : es) = e : mkSimple' es
mkSimple' [] = []
-- Prevent syntax variable capture by making all binders under syntax unique
-- (the ol' Common Lisp GENSYM approach)
uniquifyBinders :: [Name] -> PTerm -> IdrisParser PTerm
uniquifyBinders userNames = fixBind 0 []
where
fixBind :: Int -> [(Name, Name)] -> PTerm -> IdrisParser PTerm
fixBind 0 rens (PRef fc hls n) | Just n' <- lookup n rens =
return $ PRef fc hls n'
fixBind 0 rens (PPatvar fc n) | Just n' <- lookup n rens =
return $ PPatvar fc n'
fixBind 0 rens (PLam fc n nfc ty body)
| n `elem` userNames = liftM2 (PLam fc n nfc)
(fixBind 0 rens ty)
(fixBind 0 rens body)
| otherwise =
do ty' <- fixBind 0 rens ty
n' <- gensym n
body' <- fixBind 0 ((n,n'):rens) body
return $ PLam fc n' nfc ty' body'
fixBind 0 rens (PPi plic n nfc argTy body)
| n `elem` userNames = liftM2 (PPi plic n nfc)
(fixBind 0 rens argTy)
(fixBind 0 rens body)
| otherwise =
do ty' <- fixBind 0 rens argTy
n' <- gensym n
body' <- fixBind 0 ((n,n'):rens) body
return $ (PPi plic n' nfc ty' body')
fixBind 0 rens (PLet fc n nfc ty val body)
| n `elem` userNames = liftM3 (PLet fc n nfc)
(fixBind 0 rens ty)
(fixBind 0 rens val)
(fixBind 0 rens body)
| otherwise =
do ty' <- fixBind 0 rens ty
val' <- fixBind 0 rens val
n' <- gensym n
body' <- fixBind 0 ((n,n'):rens) body
return $ PLet fc n' nfc ty' val' body'
fixBind 0 rens (PMatchApp fc n) | Just n' <- lookup n rens =
return $ PMatchApp fc n'
-- Also rename resolved quotations, to allow syntax rules to
-- have quoted references to their own bindings.
fixBind 0 rens (PQuoteName n True fc) | Just n' <- lookup n rens =
return $ PQuoteName n' True fc
-- Don't mess with quoted terms
fixBind q rens (PQuasiquote tm goal) =
flip PQuasiquote goal <$> fixBind (q + 1) rens tm
fixBind q rens (PUnquote tm) =
PUnquote <$> fixBind (q - 1) rens tm
fixBind q rens x = descendM (fixBind q rens) x
gensym :: Name -> IdrisParser Name
gensym n = do ist <- get
let idx = idris_name ist
put ist { idris_name = idx + 1 }
return $ sMN idx (show n)
{-| Parses a syntax symbol (either binding variable, keyword or expression)
@
SyntaxSym ::= '[' Name_t ']'
| '{' Name_t '}'
| Name_t
| StringLiteral_t
;
@
-}
syntaxSym :: IdrisParser SSymbol
syntaxSym = try (do lchar '['; n <- fst <$> name; lchar ']'
return (Expr n))
<|> try (do lchar '{'; n <- fst <$> name; lchar '}'
return (Binding n))
<|> do n <- fst <$> iName []
return (Keyword n)
<|> do sym <- fmap fst stringLiteral
return (Symbol sym)
<?> "syntax symbol"
{-| Parses a function declaration with possible syntax sugar
@
FunDecl ::= FunDecl';
@
-}
fnDecl :: SyntaxInfo -> IdrisParser [PDecl]
fnDecl syn = try (do notEndBlock
d <- fnDecl' syn
i <- get
let d' = fmap (desugar syn i) d
return [d']) <?> "function declaration"
{-| Parses a function declaration
@
FunDecl' ::=
DocComment_t? FnOpts* Accessibility? FnOpts* FnName TypeSig Terminator
| Postulate
| Pattern
| CAF
;
@
-}
fnDecl' :: SyntaxInfo -> IdrisParser PDecl
fnDecl' syn = checkDeclFixity $
do (doc, argDocs, fc, opts', n, nfc, acc) <- try (do
pushIndent
(doc, argDocs) <- docstring syn
(opts, acc) <- fnOpts
(n_in, nfc) <- fnName
let n = expandNS syn n_in
fc <- getFC
lchar ':'
return (doc, argDocs, fc, opts, n, nfc, acc))
ty <- typeExpr (allowImp syn)
terminator
-- If it's a top level function, note the accessibility
-- rules
when (syn_toplevel syn) $ addAcc n acc
return (PTy doc argDocs syn fc opts' n nfc ty)
<|> postulate syn
<|> caf syn
<|> pattern syn
<?> "function declaration"
{-| Parses a series of function and accessbility options
@
FnOpts ::= FnOpt* Accessibility FnOpt*
@
-}
fnOpts :: IdrisParser ([FnOpt], Accessibility)
fnOpts = do
opts <- many fnOpt
acc <- accessibility
opts' <- many fnOpt
let allOpts = opts ++ opts'
let existingTotality = allOpts `intersect` [TotalFn, CoveringFn, PartialFn]
opts'' <- addDefaultTotality (nub existingTotality) allOpts
return (opts'', acc)
where prettyTot TotalFn = "total"
prettyTot PartialFn = "partial"
prettyTot CoveringFn = "covering"
addDefaultTotality [] opts = do
ist <- get
case default_total ist of
DefaultCheckingTotal -> return (TotalFn:opts)
DefaultCheckingCovering -> return (CoveringFn:opts)
DefaultCheckingPartial -> return opts -- Don't add partial so that --warn-partial still reports warnings if necessary
addDefaultTotality [tot] opts = return opts
-- Should really be a semantics error instead of a parser error
addDefaultTotality (tot1:tot2:tots) opts =
fail ("Conflicting totality modifiers specified " ++ prettyTot tot1 ++ " and " ++ prettyTot tot2)
{-| Parses a function option
@
FnOpt ::= 'total'
| 'partial'
| 'covering'
| 'implicit'
| '%' 'no_implicit'
| '%' 'assert_total'
| '%' 'error_handler'
| '%' 'reflection'
| '%' 'specialise' '[' NameTimesList? ']'
;
@
@
NameTimes ::= FnName Natural?;
@
@
NameTimesList ::=
NameTimes
| NameTimes ',' NameTimesList
;
@
-}
fnOpt :: IdrisParser FnOpt
fnOpt = do reservedHL "total"; return TotalFn
<|> do reservedHL "partial"; return PartialFn
<|> do reservedHL "covering"; return CoveringFn
<|> do try (lchar '%' *> reserved "export"); c <- fmap fst stringLiteral;
return $ CExport c
<|> do try (lchar '%' *> reserved "no_implicit");
return NoImplicit
<|> do try (lchar '%' *> reserved "inline");
return Inlinable
<|> do try (lchar '%' *> reserved "static");
return StaticFn
<|> do try (lchar '%' *> reserved "assert_total");
fc <- getFC
parserWarning fc Nothing (Msg "%assert_total is deprecated. Use the 'assert_total' function instead.")
return AssertTotal
<|> do try (lchar '%' *> reserved "error_handler");
return ErrorHandler
<|> do try (lchar '%' *> reserved "error_reverse");
return ErrorReverse
<|> do try (lchar '%' *> reserved "reflection");
return Reflection
<|> do try (lchar '%' *> reserved "hint");
return AutoHint
<|> do lchar '%'; reserved "specialise";
lchar '['; ns <- sepBy nameTimes (lchar ','); lchar ']';
return $ Specialise ns
<|> do reservedHL "implicit"; return Implicit
<?> "function modifier"
where nameTimes :: IdrisParser (Name, Maybe Int)
nameTimes = do n <- fst <$> fnName
t <- option Nothing (do reds <- fmap fst natural
return (Just (fromInteger reds)))
return (n, t)
{-| Parses a postulate
@
Postulate ::=
DocComment_t? 'postulate' FnOpts* Accesibility? FnOpts* FnName TypeSig Terminator
;
@
-}
postulate :: SyntaxInfo -> IdrisParser PDecl
postulate syn = do (doc, ext)
<- try $ do (doc, _) <- docstring syn
pushIndent
ext <- ppostDecl
return (doc, ext)
ist <- get
(opts, acc) <- fnOpts
(n_in, nfc) <- fnName
let n = expandNS syn n_in
lchar ':'
ty <- typeExpr (allowImp syn)
fc <- getFC
terminator
addAcc n acc
return (PPostulate ext doc syn fc nfc opts n ty)
<?> "postulate"
where ppostDecl = do fc <- reservedHL "postulate"; return False
<|> do lchar '%'; reserved "extern"; return True
{-| Parses a using declaration
@
Using ::=
'using' '(' UsingDeclList ')' OpenBlock Decl* CloseBlock
;
@
-}
using_ :: SyntaxInfo -> IdrisParser [PDecl]
using_ syn =
do reservedHL "using"
lchar '('; ns <- usingDeclList syn; lchar ')'
openBlock
let uvars = using syn
ds <- many (decl (syn { using = uvars ++ ns }))
closeBlock
return (concat ds)
<?> "using declaration"
{-| Parses a parameters declaration
@
Params ::=
'parameters' '(' TypeDeclList ')' OpenBlock Decl* CloseBlock
;
@
-}
params :: SyntaxInfo -> IdrisParser [PDecl]
params syn =
do reservedHL "parameters"; lchar '('; ns <- typeDeclList syn; lchar ')'
let ns' = [(n, ty) | (n, _, ty) <- ns]
openBlock
let pvars = syn_params syn
ds <- many (decl syn { syn_params = pvars ++ ns' })
closeBlock
fc <- getFC
return [PParams fc ns' (concat ds)]
<?> "parameters declaration"
-- | Parses an open block
openInterface :: SyntaxInfo -> IdrisParser [PDecl]
openInterface syn =
do reservedHL "using"
reservedHL "implementation"
fc <- getFC
ns <- sepBy1 fnName (lchar ',')
openBlock
ds <- many (decl syn)
closeBlock
return [POpenInterfaces fc (map fst ns) (concat ds)]
<?> "open interface declaration"
{-| Parses a mutual declaration (for mutually recursive functions)
@
Mutual ::=
'mutual' OpenBlock Decl* CloseBlock
;
@
-}
mutual :: SyntaxInfo -> IdrisParser [PDecl]
mutual syn =
do reservedHL "mutual"
openBlock
let pvars = syn_params syn
ds <- many (decl (syn { mut_nesting = mut_nesting syn + 1 } ))
closeBlock
fc <- getFC
return [PMutual fc (concat ds)]
<?> "mutual block"
{-| Parses a namespace declaration
@
Namespace ::=
'namespace' identifier OpenBlock Decl+ CloseBlock
;
@
-}
namespace :: SyntaxInfo -> IdrisParser [PDecl]
namespace syn =
do reservedHL "namespace"
(n, nfc) <- identifier
openBlock
ds <- some (decl syn { syn_namespace = n : syn_namespace syn })
closeBlock
return [PNamespace n nfc (concat ds)]
<?> "namespace declaration"
{-| Parses a methods block (for implementations)
@
ImplementationBlock ::= 'where' OpenBlock FnDecl* CloseBlock
@
-}
implementationBlock :: SyntaxInfo -> IdrisParser [PDecl]
implementationBlock syn = do reservedHL "where"
openBlock
ds <- many (fnDecl syn)
closeBlock
return (concat ds)
<?> "implementation block"
{-| Parses a methods and implementations block (for interfaces)
@
MethodOrImplementation ::=
FnDecl
| Implementation
;
@
@
InterfaceBlock ::=
'where' OpenBlock Constructor? MethodOrImplementation* CloseBlock
;
@
-}
interfaceBlock :: SyntaxInfo -> IdrisParser (Maybe (Name, FC), Docstring (Either Err PTerm), [PDecl])
interfaceBlock syn = do reservedHL "where"
openBlock
(cn, cd) <- option (Nothing, emptyDocstring) $
try (do (doc, _) <- option noDocs docComment
n <- constructor
return (Just n, doc))
ist <- get
let cd' = annotate syn ist cd
ds <- many (notEndBlock >> try (implementation True syn)
<|> do x <- data_ syn
return [x]
<|> fnDecl syn)
closeBlock
return (cn, cd', concat ds)
<?> "interface block"
where
constructor :: IdrisParser (Name, FC)
constructor = reservedHL "constructor" *> fnName
annotate :: SyntaxInfo -> IState -> Docstring () -> Docstring (Either Err PTerm)
annotate syn ist = annotCode $ tryFullExpr syn ist
{-| Parses an interface declaration
@
InterfaceArgument ::=
Name
| '(' Name ':' Expr ')'
;
@
@
Interface ::=
DocComment_t? Accessibility? 'interface' ConstraintList? Name InterfaceArgument* InterfaceBlock?
;
@
-}
interface_ :: SyntaxInfo -> IdrisParser [PDecl]
interface_ syn = do (doc, argDocs, acc)
<- try (do (doc, argDocs) <- docstring syn
acc <- accessibility
interfaceKeyword
return (doc, argDocs, acc))
fc <- getFC
cons <- constraintList syn
let cons' = [(c, ty) | (c, _, ty) <- cons]
(n_in, nfc) <- fnName
let n = expandNS syn n_in
cs <- many carg
fds <- option [(cn, NoFC) | (cn, _, _) <- cs] fundeps
(cn, cd, ds) <- option (Nothing, fst noDocs, []) (interfaceBlock syn)
accData acc n (concatMap declared ds)
return [PInterface doc syn fc cons' n nfc cs argDocs fds ds cn cd]
<?> "interface declaration"
where
fundeps :: IdrisParser [(Name, FC)]
fundeps = do lchar '|'; sepBy name (lchar ',')
interfaceKeyword :: IdrisParser ()
interfaceKeyword = reservedHL "interface"
<|> do reservedHL "class"
fc <- getFC
parserWarning fc Nothing (Msg "The 'class' keyword is deprecated. Use 'interface' instead.")
carg :: IdrisParser (Name, FC, PTerm)
carg = do lchar '('; (i, ifc) <- name; lchar ':'; ty <- expr syn; lchar ')'
return (i, ifc, ty)
<|> do (i, ifc) <- name
fc <- getFC
return (i, ifc, PType fc)
{-| Parses an interface implementation declaration
@
Implementation ::=
DocComment_t? 'implementation' ImplementationName? ConstraintList? Name SimpleExpr* ImplementationBlock?
;
@
@
ImplementationName ::= '[' Name ']';
@
-}
implementation :: Bool -> SyntaxInfo -> IdrisParser [PDecl]
implementation kwopt syn
= do ist <- get
(doc, argDocs) <- docstring syn
(opts, acc) <- fnOpts
if kwopt then optional implementationKeyword
else do implementationKeyword
return (Just ())
fc <- getFC
en <- optional implementationName
cs <- constraintList syn
let cs' = [(c, ty) | (c, _, ty) <- cs]
(cn, cnfc) <- fnName
args <- many (simpleExpr syn)
let sc = PApp fc (PRef cnfc [cnfc] cn) (map pexp args)
let t = bindList (PPi constraint) cs sc
pnames <- implementationUsing
ds <- implementationBlock syn
return [PImplementation doc argDocs syn fc cs' pnames acc opts cn cnfc args [] t en ds]
<?> "implementation declaration"
where implementationName :: IdrisParser Name
implementationName = do lchar '['; n_in <- fst <$> fnName; lchar ']'
let n = expandNS syn n_in
return n
<?> "implementation name"
implementationKeyword :: IdrisParser ()
implementationKeyword = reservedHL "implementation"
<|> do reservedHL "instance"
fc <- getFC
parserWarning fc Nothing (Msg "The 'instance' keyword is deprecated. Use 'implementation' (or omit it) instead.")
implementationUsing :: IdrisParser [Name]
implementationUsing = do reservedHL "using"
ns <- sepBy1 fnName (lchar ',')
return (map fst ns)
<|> return []
-- | Parse a docstring
docstring :: SyntaxInfo
-> IdrisParser (Docstring (Either Err PTerm),
[(Name,Docstring (Either Err PTerm))])
docstring syn = do (doc, argDocs) <- option noDocs docComment
ist <- get
let doc' = annotCode (tryFullExpr syn ist) doc
argDocs' = [ (n, annotCode (tryFullExpr syn ist) d)
| (n, d) <- argDocs ]
return (doc', argDocs')
{-| Parses a using declaration list
@
UsingDeclList ::=
UsingDeclList'
| NameList TypeSig
;
@
@
UsingDeclList' ::=
UsingDecl
| UsingDecl ',' UsingDeclList'
;
@
@
NameList ::=
Name
| Name ',' NameList
;
@
-}
usingDeclList :: SyntaxInfo -> IdrisParser [Using]
usingDeclList syn
= try (sepBy1 (usingDecl syn) (lchar ','))
<|> do ns <- sepBy1 (fst <$> name) (lchar ',')
lchar ':'
t <- typeExpr (disallowImp syn)
return (map (\x -> UImplicit x t) ns)
<?> "using declaration list"
{-| Parses a using declaration
@
UsingDecl ::=
FnName TypeSig
| FnName FnName+
;
@
-}
usingDecl :: SyntaxInfo -> IdrisParser Using
usingDecl syn = try (do x <- fst <$> fnName
lchar ':'
t <- typeExpr (disallowImp syn)
return (UImplicit x t))
<|> do c <- fst <$> fnName
xs <- many (fst <$> fnName)
return (UConstraint c xs)
<?> "using declaration"
{-| Parse a clause with patterns
@
Pattern ::= Clause;
@
-}
pattern :: SyntaxInfo -> IdrisParser PDecl
pattern syn = do fc <- getFC
clause <- clause syn
return (PClauses fc [] (sMN 2 "_") [clause]) -- collect together later
<?> "pattern"
{-| Parse a constant applicative form declaration
@
CAF ::= 'let' FnName '=' Expr Terminator;
@
-}
caf :: SyntaxInfo -> IdrisParser PDecl
caf syn = do reservedHL "let"
n_in <- fst <$> fnName; let n = expandNS syn n_in
pushIndent
lchar '='
t <- indented $ expr syn
terminator
fc <- getFC
return (PCAF fc n t)
<?> "constant applicative form declaration"
{-| Parse an argument expression
@
ArgExpr ::= HSimpleExpr | {- In Pattern External (User-defined) Expression -};
@
-}
argExpr :: SyntaxInfo -> IdrisParser PTerm
argExpr syn = let syn' = syn { inPattern = True } in
try (hsimpleExpr syn') <|> simpleExternalExpr syn'
<?> "argument expression"
{-| Parse a right hand side of a function
@
RHS ::= '=' Expr
| '?=' RHSName? Expr
| Impossible
;
@
@
RHSName ::= '{' FnName '}';
@
-}
rhs :: SyntaxInfo -> Name -> IdrisParser PTerm
rhs syn n = do lchar '='
indentPropHolds gtProp
expr syn
<|> do symbol "?=";
fc <- getFC
name <- option n' (do symbol "{"; n <- fst <$> fnName; symbol "}";
return n)
r <- expr syn
return (addLet fc name r)
<|> impossible
<?> "function right hand side"
where mkN :: Name -> Name
mkN (UN x) = if (tnull x || not (isAlpha (thead x)))
then sUN "infix_op_lemma_1"
else sUN (str x++"_lemma_1")
mkN (NS x n) = NS (mkN x) n
n' :: Name
n' = mkN n
addLet :: FC -> Name -> PTerm -> PTerm
addLet fc nm (PLet fc' n nfc ty val r) = PLet fc' n nfc ty val (addLet fc nm r)
addLet fc nm (PCase fc' t cs) = PCase fc' t (map addLetC cs)
where addLetC (l, r) = (l, addLet fc nm r)
addLet fc nm r = (PLet fc (sUN "value") NoFC Placeholder r (PMetavar NoFC nm))
{-|Parses a function clause
@
RHSOrWithBlock ::= RHS WhereOrTerminator
| 'with' SimpleExpr OpenBlock FnDecl+ CloseBlock
;
@
@
Clause ::= WExpr+ RHSOrWithBlock
| SimpleExpr '<==' FnName RHS WhereOrTerminator
| ArgExpr Operator ArgExpr WExpr* RHSOrWithBlock {- Except "=" and "?=" operators to avoid ambiguity -}
| FnName ConstraintArg* ImplicitOrArgExpr* WExpr* RHSOrWithBlock
;
@
@
ImplicitOrArgExpr ::= ImplicitArg | ArgExpr;
@
@
WhereOrTerminator ::= WhereBlock | Terminator;
@
-}
clause :: SyntaxInfo -> IdrisParser PClause
clause syn
= do wargs <- try (do pushIndent; some (wExpr syn))
fc <- getFC
ist <- get
n <- case lastParse ist of
Just t -> return t
Nothing -> fail "Invalid clause"
(do r <- rhs syn n
let ctxt = tt_ctxt ist
let wsyn = syn { syn_namespace = [], syn_toplevel = False }
(wheres, nmap) <- choice [do x <- whereBlock n wsyn
popIndent
return x,
do terminator
return ([], [])]
return $ PClauseR fc wargs r wheres) <|> (do
popIndent
reservedHL "with"
wval <- simpleExpr syn
pn <- optProof
openBlock
ds <- some $ fnDecl syn
let withs = concat ds
closeBlock
return $ PWithR fc wargs wval pn withs)
<|> do ty <- try (do pushIndent
ty <- simpleExpr syn
symbol "<=="
return ty)
fc <- getFC
n_in <- fst <$> fnName; let n = expandNS syn n_in
r <- rhs syn n
ist <- get
let ctxt = tt_ctxt ist
let wsyn = syn { syn_namespace = [] }
(wheres, nmap) <- choice [do x <- whereBlock n wsyn
popIndent
return x,
do terminator
return ([], [])]
let capp = PLet fc (sMN 0 "match") NoFC
ty
(PMatchApp fc n)
(PRef fc [] (sMN 0 "match"))
ist <- get
put (ist { lastParse = Just n })
return $ PClause fc n capp [] r wheres
<|> do (l, op, nfc) <- try (do
pushIndent
l <- argExpr syn
(op, nfc) <- operatorFC
when (op == "=" || op == "?=" ) $
fail "infix clause definition with \"=\" and \"?=\" not supported "
return (l, op, nfc))
let n = expandNS syn (sUN op)
r <- argExpr syn
fc <- getFC
wargs <- many (wExpr syn)
(do rs <- rhs syn n
let wsyn = syn { syn_namespace = [] }
(wheres, nmap) <- choice [do x <- whereBlock n wsyn
popIndent
return x,
do terminator
return ([], [])]
ist <- get
let capp = PApp fc (PRef nfc [nfc] n) [pexp l, pexp r]
put (ist { lastParse = Just n })
return $ PClause fc n capp wargs rs wheres) <|> (do
popIndent
reservedHL "with"
wval <- bracketed syn
pn <- optProof
openBlock
ds <- some $ fnDecl syn
closeBlock
ist <- get
let capp = PApp fc (PRef fc [] n) [pexp l, pexp r]
let withs = map (fillLHSD n capp wargs) $ concat ds
put (ist { lastParse = Just n })
return $ PWith fc n capp wargs wval pn withs)
<|> do pushIndent
(n_in, nfc) <- fnName; let n = expandNS syn n_in
fc <- getFC
args <- many (try (implicitArg (syn { inPattern = True } ))
<|> try (constraintArg (syn { inPattern = True }))
<|> (fmap pexp (argExpr syn)))
wargs <- many (wExpr syn)
let capp = PApp fc (PRef nfc [nfc] n) args
(do r <- rhs syn n
ist <- get
let ctxt = tt_ctxt ist
let wsyn = syn { syn_namespace = [] }
(wheres, nmap) <- choice [do x <- whereBlock n wsyn
popIndent
return x,
do terminator
return ([], [])]
ist <- get
put (ist { lastParse = Just n })
return $ PClause fc n capp wargs r wheres) <|> (do
reservedHL "with"
ist <- get
put (ist { lastParse = Just n })
wval <- bracketed syn
pn <- optProof
openBlock
ds <- some $ fnDecl syn
let withs = map (fillLHSD n capp wargs) $ concat ds
closeBlock
popIndent
return $ PWith fc n capp wargs wval pn withs)
<?> "function clause"
where
optProof = option Nothing (do reservedHL "proof"
n <- fnName
return (Just n))
fillLHS :: Name -> PTerm -> [PTerm] -> PClause -> PClause
fillLHS n capp owargs (PClauseR fc wargs v ws)
= PClause fc n capp (owargs ++ wargs) v ws
fillLHS n capp owargs (PWithR fc wargs v pn ws)
= PWith fc n capp (owargs ++ wargs) v pn
(map (fillLHSD n capp (owargs ++ wargs)) ws)
fillLHS _ _ _ c = c
fillLHSD :: Name -> PTerm -> [PTerm] -> PDecl -> PDecl
fillLHSD n c a (PClauses fc o fn cs) = PClauses fc o fn (map (fillLHS n c a) cs)
fillLHSD n c a x = x
{-| Parses with pattern
@
WExpr ::= '|' Expr';
@
-}
wExpr :: SyntaxInfo -> IdrisParser PTerm
wExpr syn = do lchar '|'
expr' (syn { inPattern = True })
<?> "with pattern"
{-| Parses a where block
@
WhereBlock ::= 'where' OpenBlock Decl+ CloseBlock;
@
-}
whereBlock :: Name -> SyntaxInfo -> IdrisParser ([PDecl], [(Name, Name)])
whereBlock n syn
= do reservedHL "where"
ds <- indentedBlock1 (decl syn)
let dns = concatMap (concatMap declared) ds
return (concat ds, map (\x -> (x, decoration syn x)) dns)
<?> "where block"
{-|Parses a code generation target language name
@
Codegen ::= 'C'
| 'Java'
| 'JavaScript'
| 'Node'
| 'LLVM'
| 'Bytecode'
;
@
-}
codegen_ :: IdrisParser Codegen
codegen_ = do n <- fst <$> identifier
return (Via IBCFormat (map toLower n))
<|> do reserved "Bytecode"; return Bytecode
<?> "code generation language"
{-|Parses a compiler directive
@
StringList ::=
String
| String ',' StringList
;
@
@
Directive ::= '%' Directive';
@
@
Directive' ::= 'lib' CodeGen String_t
| 'link' CodeGen String_t
| 'flag' CodeGen String_t
| 'include' CodeGen String_t
| 'hide' Name
| 'freeze' Name
| 'thaw' Name
| 'access' Accessibility
| 'default' Totality
| 'logging' Natural
| 'dynamic' StringList
| 'name' Name NameList
| 'error_handlers' Name NameList
| 'language' 'TypeProviders'
| 'language' 'ErrorReflection'
| 'deprecated' Name String
| 'fragile' Name Reason
;
@
-}
directive :: SyntaxInfo -> IdrisParser [PDecl]
directive syn = do try (lchar '%' *> reserved "lib")
cgn <- codegen_
lib <- fmap fst stringLiteral
return [PDirective (DLib cgn lib)]
<|> do try (lchar '%' *> reserved "link")
cgn <- codegen_; obj <- fst <$> stringLiteral
return [PDirective (DLink cgn obj)]
<|> do try (lchar '%' *> reserved "flag")
cgn <- codegen_; flag <- fst <$> stringLiteral
return [PDirective (DFlag cgn flag)]
<|> do try (lchar '%' *> reserved "include")
cgn <- codegen_
hdr <- fst <$> stringLiteral
return [PDirective (DInclude cgn hdr)]
<|> do try (lchar '%' *> reserved "hide"); n <- fst <$> fnName
return [PDirective (DHide n)]
<|> do try (lchar '%' *> reserved "freeze"); n <- fst <$> iName []
return [PDirective (DFreeze n)]
<|> do try (lchar '%' *> reserved "thaw"); n <- fst <$> iName []
return [PDirective (DThaw n)]
-- injectivity assertins are intended for debugging purposes
-- only, and won't be documented/could be removed at any point
<|> do try (lchar '%' *> reserved "assert_injective"); n <- fst <$> fnName
return [PDirective (DInjective n)]
-- Assert totality of something after definition. This is
-- here as a debugging aid, so commented out...
-- <|> do try (lchar '%' *> reserved "assert_set_total"); n <- fst <$> fnName
-- return [PDirective (DSetTotal n)]
<|> do try (lchar '%' *> reserved "access")
acc <- accessibility
ist <- get
put ist { default_access = acc }
return [PDirective (DAccess acc)]
<|> do try (lchar '%' *> reserved "default"); tot <- totality
i <- get
put (i { default_total = tot } )
return [PDirective (DDefault tot)]
<|> do try (lchar '%' *> reserved "logging")
i <- fst <$> natural
return [PDirective (DLogging i)]
<|> do try (lchar '%' *> reserved "dynamic")
libs <- sepBy1 (fmap fst stringLiteral) (lchar ',')
return [PDirective (DDynamicLibs libs)]
<|> do try (lchar '%' *> reserved "name")
(ty, tyFC) <- fnName
ns <- sepBy1 name (lchar ',')
return [PDirective (DNameHint ty tyFC ns)]
<|> do try (lchar '%' *> reserved "error_handlers")
(fn, nfc) <- fnName
(arg, afc) <- fnName
ns <- sepBy1 name (lchar ',')
return [PDirective (DErrorHandlers fn nfc arg afc ns) ]
<|> do try (lchar '%' *> reserved "language"); ext <- pLangExt;
return [PDirective (DLanguage ext)]
<|> do try (lchar '%' *> reserved "deprecate")
n <- fst <$> fnName
alt <- option "" (fst <$> stringLiteral)
return [PDirective (DDeprecate n alt)]
<|> do try (lchar '%' *> reserved "fragile")
n <- fst <$> fnName
alt <- option "" (fst <$> stringLiteral)
return [PDirective (DFragile n alt)]
<|> do fc <- getFC
try (lchar '%' *> reserved "used")
fn <- fst <$> fnName
arg <- fst <$> iName []
return [PDirective (DUsed fc fn arg)]
<|> do try (lchar '%' *> reserved "auto_implicits")
b <- on_off
return [PDirective (DAutoImplicits b)]
<?> "directive"
where on_off = do reserved "on"; return True
<|> do reserved "off"; return False
pLangExt :: IdrisParser LanguageExt
pLangExt = (reserved "TypeProviders" >> return TypeProviders)
<|> (reserved "ErrorReflection" >> return ErrorReflection)
{-| Parses a totality
@
Totality ::= 'partial' | 'total' | 'covering'
@
-}
totality :: IdrisParser DefaultTotality
totality
= do reservedHL "total"; return DefaultCheckingTotal
<|> do reservedHL "partial"; return DefaultCheckingPartial
<|> do reservedHL "covering"; return DefaultCheckingCovering
{-| Parses a type provider
@
Provider ::= DocComment_t? '%' 'provide' Provider_What? '(' FnName TypeSig ')' 'with' Expr;
ProviderWhat ::= 'proof' | 'term' | 'type' | 'postulate'
@
-}
provider :: SyntaxInfo -> IdrisParser [PDecl]
provider syn = do doc <- try (do (doc, _) <- docstring syn
fc1 <- getFC
lchar '%'
fc2 <- reservedFC "provide"
highlightP (spanFC fc1 fc2) AnnKeyword
return doc)
provideTerm doc <|> providePostulate doc
<?> "type provider"
where provideTerm doc =
do lchar '('; (n, nfc) <- fnName; lchar ':'; t <- typeExpr syn; lchar ')'
fc <- getFC
reservedHL "with"
e <- expr syn <?> "provider expression"
return [PProvider doc syn fc nfc (ProvTerm t e) n]
providePostulate doc =
do reservedHL "postulate"
(n, nfc) <- fnName
fc <- getFC
reservedHL "with"
e <- expr syn <?> "provider expression"
return [PProvider doc syn fc nfc (ProvPostulate e) n]
{-| Parses a transform
@
Transform ::= '%' 'transform' Expr '==>' Expr
@
-}
transform :: SyntaxInfo -> IdrisParser [PDecl]
transform syn = do try (lchar '%' *> reserved "transform")
-- leave it unchecked, until we work out what this should
-- actually mean...
-- safety <- option True (do reserved "unsafe"
-- return False)
l <- expr syn
fc <- getFC
symbol "==>"
r <- expr syn
return [PTransform fc False l r]
<?> "transform"
{-| Parses a top-level reflected elaborator script
@
RunElabDecl ::= '%' 'runElab' Expr
@
-}
runElabDecl :: SyntaxInfo -> IdrisParser PDecl
runElabDecl syn =
do kwFC <- try (do fc <- getFC
lchar '%'
fc' <- reservedFC "runElab"
return (spanFC fc fc'))
script <- expr syn <?> "elaborator script"
highlightP kwFC AnnKeyword
return $ PRunElabDecl kwFC script (syn_namespace syn)
<?> "top-level elaborator script"
{- * Loading and parsing -}
{-| Parses an expression from input -}
parseExpr :: IState -> String -> Result PTerm
parseExpr st = runparser (fullExpr defaultSyntax) st "(input)"
{-| Parses a constant form input -}
parseConst :: IState -> String -> Result Const
parseConst st = runparser (fmap fst constant) st "(input)"
{-| Parses a tactic from input -}
parseTactic :: IState -> String -> Result PTactic
parseTactic st = runparser (fullTactic defaultSyntax) st "(input)"
{-| Parses a do-step from input (used in the elab shell) -}
parseElabShellStep :: IState -> String -> Result (Either ElabShellCmd PDo)
parseElabShellStep ist = runparser (fmap Right (do_ defaultSyntax) <|> fmap Left elabShellCmd) ist "(input)"
where elabShellCmd = char ':' >>
(reserved "qed" >> pure EQED ) <|>
(reserved "abandon" >> pure EAbandon ) <|>
(reserved "undo" >> pure EUndo ) <|>
(reserved "state" >> pure EProofState) <|>
(reserved "term" >> pure EProofTerm ) <|>
(expressionTactic ["e", "eval"] EEval ) <|>
(expressionTactic ["t", "type"] ECheck) <|>
(expressionTactic ["search"] ESearch ) <|>
(do reserved "doc"
doc <- (Right . fst <$> constant) <|> (Left . fst <$> fnName)
eof
return (EDocStr doc))
<?> "elab command"
expressionTactic cmds tactic =
do asum (map reserved cmds)
t <- spaced (expr defaultSyntax)
i <- get
return $ tactic (desugar defaultSyntax i t)
spaced parser = indentPropHolds gtProp *> parser
-- | Parse module header and imports
parseImports :: FilePath -> String -> Idris (Maybe (Docstring ()), [String], [ImportInfo], Maybe Delta)
parseImports fname input
= do i <- getIState
case parseString (runInnerParser (evalStateT imports i)) (Directed (UTF8.fromString fname) 0 0 0 0) input of
Failure (ErrInfo err _) -> fail (show err)
Success (x, annots, i) ->
do putIState i
fname' <- runIO $ Dir.makeAbsolute fname
sendHighlighting $ addPath annots fname'
return x
where imports :: IdrisParser ((Maybe (Docstring ()), [String],
[ImportInfo],
Maybe Delta),
[(FC, OutputAnnotation)], IState)
imports = do whiteSpace
(mdoc, mname, annots) <- moduleHeader
ps_exp <- many import_
mrk <- mark
isEof <- lookAheadMatches eof
let mrk' = if isEof
then Nothing
else Just mrk
i <- get
-- add Builtins and Prelude, unless options say
-- not to
let ps = ps_exp -- imp "Builtins" : imp "Prelude" : ps_exp
return ((mdoc, mname, ps, mrk'), annots, i)
imp m = ImportInfo False (toPath m)
Nothing [] NoFC NoFC
ns = Spl.splitOn "."
toPath = foldl1' (</>) . ns
addPath :: [(FC, OutputAnnotation)] -> FilePath -> [(FC, OutputAnnotation)]
addPath [] _ = []
addPath ((fc, AnnNamespace ns Nothing) : annots) path =
(fc, AnnNamespace ns (Just path)) : addPath annots path
addPath (annot:annots) path = annot : addPath annots path
-- | There should be a better way of doing this...
findFC :: Doc -> (FC, String)
findFC x = let s = show (plain x) in findFC' s
where findFC' s = case span (/= ':') s of
-- Horrid kludge to prevent crashes on Windows
(prefix, ':':'\\':rest) ->
case findFC' rest of
(NoFC, msg) -> (NoFC, msg)
(FileFC f, msg) -> (FileFC (prefix ++ ":\\" ++ f), msg)
(FC f start end, msg) -> (FC (prefix ++ ":\\" ++ f) start end, msg)
(failname, ':':rest) -> case span isDigit rest of
(line, ':':rest') -> case span isDigit rest' of
(col, ':':msg) -> let pos = (read line, read col) in
(FC failname pos pos, msg)
-- | Check if the coloring matches the options and corrects if necessary
fixColour :: Bool -> ANSI.Doc -> ANSI.Doc
fixColour False doc = ANSI.plain doc
fixColour True doc = doc
-- | A program is a list of declarations, possibly with associated
-- documentation strings.
parseProg :: SyntaxInfo -> FilePath -> String -> Maybe Delta ->
Idris [PDecl]
parseProg syn fname input mrk
= do i <- getIState
case runparser mainProg i fname input of
Failure (ErrInfo doc _) -> do -- FIXME: Get error location from trifecta
-- this can't be the solution!
-- Issue #1575 on the issue tracker.
-- https://github.com/idris-lang/Idris-dev/issues/1575
let (fc, msg) = findFC doc
i <- getIState
case idris_outputmode i of
RawOutput h -> iputStrLn (show $ fixColour (idris_colourRepl i) doc)
IdeMode n h -> iWarn fc (P.text msg)
putIState (i { errSpan = Just fc })
return []
Success (x, i) -> do putIState i
reportParserWarnings
return $ collect x
where mainProg :: IdrisParser ([PDecl], IState)
mainProg = case mrk of
Nothing -> do i <- get; return ([], i)
Just mrk -> do
release mrk
ds <- prog syn
i' <- get
return (ds, i')
{-| Load idris module and show error if something wrong happens -}
loadModule :: FilePath -> IBCPhase -> Idris (Maybe String)
loadModule f phase
= idrisCatch (loadModule' f phase)
(\e -> do setErrSpan (getErrSpan e)
ist <- getIState
iWarn (getErrSpan e) $ pprintErr ist e
return Nothing)
{-| Load idris module -}
loadModule' :: FilePath -> IBCPhase -> Idris (Maybe String)
loadModule' f phase
= do i <- getIState
let file = takeWhile (/= ' ') f
ibcsd <- valIBCSubDir i
ids <- allImportDirs
fp <- findImport ids ibcsd file
if file `elem` imported i
then do logParser 1 $ "Already read " ++ file
return Nothing
else do putIState (i { imported = file : imported i })
case fp of
IDR fn -> loadSource False fn Nothing
LIDR fn -> loadSource True fn Nothing
IBC fn src ->
idrisCatch (loadIBC True phase fn)
(\c -> do logParser 1 $ fn ++ " failed " ++ pshow i c
case src of
IDR sfn -> loadSource False sfn Nothing
LIDR sfn -> loadSource True sfn Nothing)
return $ Just file
{-| Load idris code from file -}
loadFromIFile :: Bool -> IBCPhase -> IFileType -> Maybe Int -> Idris ()
loadFromIFile reexp phase i@(IBC fn src) maxline
= do logParser 1 $ "Skipping " ++ getSrcFile i
idrisCatch (loadIBC reexp phase fn)
(\err -> ierror $ LoadingFailed fn err)
where
getSrcFile (IDR fn) = fn
getSrcFile (LIDR fn) = fn
getSrcFile (IBC f src) = getSrcFile src
loadFromIFile _ _ (IDR fn) maxline = loadSource' False fn maxline
loadFromIFile _ _ (LIDR fn) maxline = loadSource' True fn maxline
{-| Load idris source code and show error if something wrong happens -}
loadSource' :: Bool -> FilePath -> Maybe Int -> Idris ()
loadSource' lidr r maxline
= idrisCatch (loadSource lidr r maxline)
(\e -> do setErrSpan (getErrSpan e)
ist <- getIState
case e of
At f e' -> iWarn f (pprintErr ist e')
_ -> iWarn (getErrSpan e) (pprintErr ist e))
{-| Load Idris source code-}
loadSource :: Bool -> FilePath -> Maybe Int -> Idris ()
loadSource lidr f toline
= do logParser 1 ("Reading " ++ f)
i <- getIState
let def_total = default_total i
file_in <- runIO $ readSource f
file <- if lidr then tclift $ unlit f file_in else return file_in
(mdocs, mname, imports_in, pos) <- parseImports f file
ai <- getAutoImports
let imports = map (\n -> ImportInfo True n Nothing [] NoFC NoFC) ai ++ imports_in
ids <- allImportDirs
ibcsd <- valIBCSubDir i
mapM_ (\(re, f, ns, nfc) ->
do fp <- findImport ids ibcsd f
case fp of
LIDR fn -> ifail $ "No ibc for " ++ f
IDR fn -> ifail $ "No ibc for " ++ f
IBC fn src ->
do loadIBC True IBC_Building fn
let srcFn = case src of
IDR fn -> Just fn
LIDR fn -> Just fn
_ -> Nothing
srcFnAbs <- case srcFn of
Just fn -> fmap Just (runIO $ Dir.makeAbsolute fn)
Nothing -> return Nothing
sendHighlighting [(nfc, AnnNamespace ns srcFnAbs)])
[(re, fn, ns, nfc) | ImportInfo re fn _ ns _ nfc <- imports]
reportParserWarnings
sendParserHighlighting
-- process and check module aliases
let modAliases = M.fromList
[ (prep alias, prep realName)
| ImportInfo { import_reexport = reexport
, import_path = realName
, import_rename = Just (alias, _)
, import_location = fc } <- imports
]
prep = map T.pack . reverse . Spl.splitOn [pathSeparator]
aliasNames = [ (alias, fc)
| ImportInfo { import_rename = Just (alias, _)
, import_location = fc } <- imports
]
histogram = groupBy ((==) `on` fst) . sortBy (comparing fst) $ aliasNames
case map head . filter ((/= 1) . length) $ histogram of
[] -> logParser 3 $ "Module aliases: " ++ show (M.toList modAliases)
(n,fc):_ -> throwError . At fc . Msg $ "import alias not unique: " ++ show n
i <- getIState
putIState (i { default_access = Private, module_aliases = modAliases })
clearIBC -- start a new .ibc file
-- record package info in .ibc
imps <- allImportDirs
mapM_ addIBC (map IBCImportDir imps)
mapM_ (addIBC . IBCImport)
[ (reexport, realName)
| ImportInfo { import_reexport = reexport
, import_path = realName
} <- imports
]
let syntax = defaultSyntax{ syn_namespace = reverse mname,
maxline = toline }
ist <- getIState
-- Save the span from parsing the module header, because
-- an empty program parse might obliterate it.
let oldSpan = idris_parsedSpan ist
ds' <- parseProg syntax f file pos
case (ds', oldSpan) of
([], Just fc) ->
-- If no program elements were parsed, we dind't
-- get a loaded region in the IBC file. That
-- means we need to add it back.
do ist <- getIState
putIState ist { idris_parsedSpan = oldSpan
, ibc_write = IBCParsedRegion fc :
ibc_write ist
}
_ -> return ()
sendParserHighlighting
-- Parsing done, now process declarations
let ds = namespaces mname ds'
logParser 3 (show $ showDecls verbosePPOption ds)
i <- getIState
logLvl 10 (show (toAlist (idris_implicits i)))
logLvl 3 (show (idris_infixes i))
-- Now add all the declarations to the context
v <- verbose
when v $ iputStrLn $ "Type checking " ++ f
-- we totality check after every Mutual block, so if
-- anything is a single definition, wrap it in a
-- mutual block on its own
elabDecls (toplevelWith f) (map toMutual ds)
i <- getIState
-- simplify every definition do give the totality checker
-- a better chance
mapM_ (\n -> do logLvl 5 $ "Simplifying " ++ show n
ctxt' <-
do ctxt <- getContext
tclift $ simplifyCasedef n (getErasureInfo i) ctxt
setContext ctxt')
(map snd (idris_totcheck i))
-- build size change graph from simplified definitions
logLvl 1 "Totality checking"
i <- getIState
mapM_ buildSCG (idris_totcheck i)
mapM_ checkDeclTotality (idris_totcheck i)
mapM_ verifyTotality (idris_totcheck i)
-- Redo totality check for deferred names
let deftots = idris_defertotcheck i
logLvl 2 $ "Totality checking " ++ show deftots
mapM_ (\x -> do tot <- getTotality x
case tot of
Total _ ->
do let opts = case lookupCtxtExact x (idris_flags i) of
Just os -> os
Nothing -> []
when (AssertTotal `notElem` opts) $
setTotality x Unchecked
_ -> return ()) (map snd deftots)
mapM_ buildSCG deftots
mapM_ checkDeclTotality deftots
logLvl 1 ("Finished " ++ f)
ibcsd <- valIBCSubDir i
logLvl 1 "Universe checking"
iucheck
let ibc = ibcPathNoFallback ibcsd f
i <- getIState
addHides (hide_list i)
-- Save module documentation if applicable
i <- getIState
case mdocs of
Nothing -> return ()
Just docs -> addModDoc syntax mname docs
-- Finally, write an ibc and highlights if checking was successful
ok <- noErrors
when ok $
do idrisCatch (do writeIBC f ibc; clearIBC)
(\c -> return ()) -- failure is harmless
hl <- getDumpHighlighting
when hl $
idrisCatch (writeHighlights f)
(const $ return ()) -- failure is harmless
clearHighlights
i <- getIState
putIState (i { default_total = def_total,
hide_list = emptyContext })
return ()
where
namespaces :: [String] -> [PDecl] -> [PDecl]
namespaces [] ds = ds
namespaces (x:xs) ds = [PNamespace x NoFC (namespaces xs ds)]
toMutual :: PDecl -> PDecl
toMutual m@(PMutual _ d) = m
toMutual (PNamespace x fc ds) = PNamespace x fc (map toMutual ds)
toMutual x = let r = PMutual (fileFC "single mutual") [x] in
case x of
PClauses{} -> r
PInterface{} -> r
PData{} -> r
PImplementation{} -> r
_ -> x
addModDoc :: SyntaxInfo -> [String] -> Docstring () -> Idris ()
addModDoc syn mname docs =
do ist <- getIState
docs' <- elabDocTerms (toplevelWith f) (parsedDocs ist)
let modDocs' = addDef docName docs' (idris_moduledocs ist)
putIState ist { idris_moduledocs = modDocs' }
addIBC (IBCModDocs docName)
where
docName = NS modDocName (map T.pack (reverse mname))
parsedDocs ist = annotCode (tryFullExpr syn ist) docs
{-| Adds names to hide list -}
addHides :: Ctxt Accessibility -> Idris ()
addHides xs = do i <- getIState
let defh = default_access i
mapM_ doHide (toAlist xs)
where doHide (n, a) = do setAccessibility n a
addIBC (IBCAccess n a)
|
enolan/Idris-dev
|
src/Idris/Parser.hs
|
bsd-3-clause
| 72,918
| 483
| 26
| 30,188
| 15,231
| 8,304
| 6,927
| 1,257
| 27
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeOperators #-}
module SpoilyBot
( startApp
) where
import SpoilyBot.Config
import Control.Monad.IO.Class (liftIO)
import Data.Text (Text, append, pack)
import Network.HTTP.Client (newManager)
import Network.HTTP.Client.TLS (tlsManagerSettings)
import Network.Wai (Application)
import Network.Wai.Handler.Warp (run)
import Servant ((:>), Get, PlainText, Proxy(..), Server, serve)
import Web.Telegram.API.Bot (GetMeResponse(..), Token(..), getMe, user_first_name, user_result)
type API = "start" :> Get '[PlainText] Text
startApp :: Config -> IO ()
startApp (Config port telegramToken) = run port $ app telegramToken
app :: Token -> Application
app telegramToken = serve api $ server telegramToken
api :: Proxy API
api = Proxy
server :: Token -> Server API
server telegramToken = liftIO $ start telegramToken
start :: Token -> IO Text
start telegramToken = do
manager <- newManager tlsManagerSettings
res <- getMe telegramToken manager
case res of
Left e -> do
return . pack . show $ e
Right GetMeResponse { user_result = u } -> do
return $ user_first_name u
|
kirikaza/spoily_bot
|
src/SpoilyBot.hs
|
bsd-3-clause
| 1,150
| 0
| 14
| 204
| 378
| 209
| 169
| 31
| 2
|
{-# language PatternSignatures #-}
module Main where
import Wurf
import Spieler
import Bank
import Network.Wai.Handler.Warp
import qualified Network.Wai.Frontend.MonadCGI
import Network.HTTP.Types (statusOK)
import Network.Wai (responseLBS)
import Network.CGI
import Network.XmlRpc.Server
import Network.XmlRpc.Client
import System.Environment
import System.IO
import Control.Monad ( when )
import Control.Concurrent.STM
import Control.Concurrent
import qualified Data.Map as M
import Data.List ( sort )
import System.Random
data State = State { name :: Name, port :: Int
, previous :: TVar ( Maybe Wurf )
}
fresh :: Name -> Int -> IO State
fresh n p = do
prev <- atomically $ newTVar Nothing
return $ State { Main.name = n, port = p
, previous = prev }
-- | command line arguments:
-- name, password, callback URL, server URL.
-- the port number for this player's server
-- is extracted from the callback URL
main = do
args @ [ n, p, client , server ] <- getArgs
putStrLn $ show $ "client" : args
forkIO $ do
threadDelay $ 10^6
score <- remote server "Server.scores" :: IO Bank
print score
True <- remote server "Server.login" $ Spieler
{ Spieler.name = Name n
, password = Password p
, callback = Callback client
}
return ()
let extract_port = reverse . takeWhile (/= ':') . reverse
state <- fresh ( Name n )
( read $ extract_port client )
play state
play state
= Network.Wai.Handler.Warp.runSettings
( defaultSettings { settingsTimeout = 1
, settingsPort = port state} )
$ Network.Wai.Frontend.MonadCGI.cgiToApp
$ do
input <- getBody
result <- liftIO
$ handleCall ( server state ) input
outputFPS result
server state = methods
[ ("Player.who_are_you", fun $ who_are_you state )
, ("Player.begin_round", fun $ begin state )
, ("Player.end_round", fun $ ignore0 state )
, ("Player.begin_game", fun $ ignore0 state )
, ("Player.end_game", fun $ ignore0 state )
, ("Player.accept", fun $ accept state )
, ("Player.other", fun $ ignore1 state )
, ("Player.say", fun $ say state )
, ("Player.game_won_by", fun (( \ s -> return True ) :: Name -> IO Bool ))
, ("Player.round_lost_by", fun (( \ s -> return True ) :: Name -> IO Bool ))
]
who_are_you :: State -> IO Name
who_are_you s = return $ Main.name s
begin :: State -> IO Bool
begin s = do
atomically $ writeTVar ( previous s ) Nothing
return True
ignore0 :: State -> IO Bool
ignore0 s = return True
ignore1 :: State -> Wurf -> IO Bool
ignore1 s w = return True
probabilities :: M.Map Wurf Double
probabilities = M.fromList $ do
let ws = reverse $ sort $ do
i <- [ 1 .. 6 ] ; j <- [ 1 .. 6 ]
return $ wurf i j
( w, k ) <- zip ws [ 0 .. ]
return ( w, k / 36 )
accept :: State -> Wurf -> IO Bool
accept s w = do
atomically $ writeTVar ( previous s ) $ Just w
p <- randomRIO ( 0.0, 1.0 )
q <- randomRIO ( 0.0, 1.0 )
let r = 0.5 * (p+q)
return $ w < wurf 2 1 && probabilities M.! w > r
say :: State -> Wurf -> IO Wurf
say s w = do
prev <- atomically $ readTVar ( previous s )
case prev of
Just u | u >= w -> some $ succ u
_ -> return w
some u =
if u == wurf 2 1 then return u
else do
f <- randomRIO ( False, True )
if f then return u else some $ succ u
|
jwaldmann/mex
|
src/Client2.hs
|
gpl-3.0
| 3,644
| 1
| 15
| 1,137
| 1,250
| 650
| 600
| 99
| 3
|
{-#LANGUAGE GADTs, TypeOperators, ScopedTypeVariables, ExplicitForAll, ImpredicativeTypes, MultiParamTypeClasses, FlexibleContexts, PatternSynonyms #-}
module Carnap.Core.Unification.FirstOrder (founify, foUnifySys) where
import Carnap.Core.Data.Classes
import Carnap.Core.Unification.Unification
(Left x) .<. f = Left (f x)
x .<. _ = x
isVar' varConst x = isVar x && not (varConst x)
--this needs to be generalized to include an optional label
founify :: FirstOrder f
=> (forall a. f a -> Bool)
-> [Equation f]
-> [Equation f]
-> Either (UError f) [Equation f]
founify varConst [] ss = Right ss
founify varConst ((x :=: y):es) ss
| isVar' varConst x && x =* y = founify varConst es ss
| isVar' varConst x && occurs x y = Left $ OccursError x y
| isVar' varConst x = founify varConst (mapAll (subst x y) es) ((x :=: y):ss)
| isVar' varConst y = founify varConst ((y :=: x):es) ss
| sameHead x y = founify varConst (es ++ decompose x y) ss .<. SubError x y
| otherwise = Left $ MatchError x y
foUnifySys :: (MonadVar f m, FirstOrder f) => (forall a. f a -> Bool) -> [Equation f] -> m [[Equation f]]
foUnifySys varConst sys = return $ case founify varConst sys [] of
Left _ -> []
Right sub -> [sub]
|
opentower/carnap
|
Carnap/src/Carnap/Core/Unification/FirstOrder.hs
|
gpl-3.0
| 1,341
| 0
| 11
| 351
| 523
| 259
| 264
| 24
| 2
|
module Foo where
data Foo2 a = <resolved>Foo a | Bar a deriving (Show)
|
carymrobbins/intellij-haskforce
|
tests/gold/resolve/Data00001/Foo.hs
|
apache-2.0
| 72
| 4
| 7
| 15
| 34
| 19
| 15
| -1
| -1
|
module Card (module X, searchBy, exactSearchBy, MonadCardsDB, printCards, priority, processCard, Cards) where
import Card.Parser as X
import Card.Type as X
import Card.Json as X
import Data.Char
import Data.List
import Control.Monad.Trans
import Control.Monad.Ether.Implicit
import qualified Data.Map as Map
import qualified Data.Set as S
type Cards = [Card]
priority :: [a -> Bool] -> ([a] -> [a]) -> [a] -> a
priority [] s xs = head xs
priority (p:ps) s xs = if null f then priority ps xs else priority ps f
where
f = s $ filter p xs
printCards :: Locale -> Cards -> String
printCards l = unlines . map (printCard l)
type MonadCardsDB = MonadReader Cards
searchBy' :: (Card -> String) -> String -> [Card] -> [Card]
searchBy' f n = filter $ \c -> map toUpper n `isInfixOf` map toUpper (f c)
searchLocalized' :: (Card -> Localized) -> String -> [Card] -> [(Locale, Card)]
searchLocalized' f n cards = matching
where
loclist c = Map.toList . locToMap $ f c
comp x = map toUpper n `isInfixOf` map toUpper x
complocs :: [(Locale, String)] -> [Bool]
complocs = map (\(_, n') -> comp n')
prio = priority [(==Locale "ruRU"),(==Locale "enUS")] . map snd
matching :: [(Locale, Card)]
matching = map (\(x,y) -> (prio $ filter fst (zip (complocs x) (map fst x)), y))
. filter (\(x, _) -> or $ complocs x)
. map (\c -> (loclist c, c))
$ cards
exactSearchBy' :: (Card -> String) -> String -> [Card] -> [Card]
exactSearchBy' f n = filter $ \c -> n == f c
searchBy :: MonadCardsDB m => (Card -> String) -> String -> m Cards
searchBy f n = do
cards <- ask
return . searchBy' f n $ cards
searchLocalized :: MonadCardsDB m => (Card -> Localized) -> String -> m [(Locale, Card)]
searchLocalized f n = do
cards <- ask
return . searchLocalized' f n $ cards
exactSearchBy :: MonadCardsDB m => (Card -> String) -> String -> m Cards
exactSearchBy f n = do
cards <- ask
return . exactSearchBy' f n $ cards
processTag :: MonadCardsDB m => CardTag -> (Locale, Card) -> m (Locale, Card)
processTag (Loc l) (_,c) = return (l,c)
processTag _ x = return x
processTags :: MonadCardsDB m => [CardTag] -> (Locale, Card) -> m (Locale, Card)
processTags tags lcard = foldl (\a b -> a >>= processTag b) (return lcard) tags
processCard :: (MonadCardsDB m, MonadIO m) => [Card -> Bool] -> (S.Set CardTag, String) -> m (S.Set CardTag, (Locale, Card))
processCard prio (tags, n) = do
cards <- searchLocalized name n
if null cards then
return (tags, (Locale "enUS", notFoundCard { name = Localized $ Map.singleton (Locale "enUS") n }))
else do
let resultcard = priority (map (.snd) prio) (sortBy (\a b -> compare (name a) (name b))) cards
(\x -> (tags, x)) <$> processTags (S.toList tags) resultcard
|
hithroc/hsvkbot
|
src/Card.hs
|
bsd-3-clause
| 2,860
| 0
| 20
| 677
| 1,288
| 693
| 595
| 59
| 2
|
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE Trustworthy #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Functor.Compose
-- Copyright : (c) Ross Paterson 2010
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : portable
--
-- Composition of functors.
--
-- @since 4.9.0.0
-----------------------------------------------------------------------------
module Data.Functor.Compose (
Compose(..),
) where
import Data.Functor.Classes
import Control.Applicative
import Data.Coerce (coerce)
import Data.Data (Data)
import Data.Foldable (Foldable(foldMap))
import Data.Traversable (Traversable(traverse))
import GHC.Generics (Generic, Generic1)
import Text.Read (Read(..), readListDefault, readListPrecDefault)
infixr 9 `Compose`
-- | Right-to-left composition of functors.
-- The composition of applicative functors is always applicative,
-- but the composition of monads is not always a monad.
newtype Compose f g a = Compose { getCompose :: f (g a) }
-- Instances of lifted Prelude classes
-- | @since 4.9.0.0
instance (Eq1 f, Eq1 g) => Eq1 (Compose f g) where
liftEq eq (Compose x) (Compose y) = liftEq (liftEq eq) x y
-- | @since 4.9.0.0
instance (Ord1 f, Ord1 g) => Ord1 (Compose f g) where
liftCompare comp (Compose x) (Compose y) =
liftCompare (liftCompare comp) x y
-- | @since 4.9.0.0
instance (Read1 f, Read1 g) => Read1 (Compose f g) where
liftReadPrec rp rl = readData $
readUnaryWith (liftReadPrec rp' rl') "Compose" Compose
where
rp' = liftReadPrec rp rl
rl' = liftReadListPrec rp rl
liftReadListPrec = liftReadListPrecDefault
liftReadList = liftReadListDefault
-- | @since 4.9.0.0
instance (Show1 f, Show1 g) => Show1 (Compose f g) where
liftShowsPrec sp sl d (Compose x) =
showsUnaryWith (liftShowsPrec sp' sl') "Compose" d x
where
sp' = liftShowsPrec sp sl
sl' = liftShowList sp sl
-- Instances of Prelude classes
-- | @since 4.9.0.0
instance (Eq1 f, Eq1 g, Eq a) => Eq (Compose f g a) where
(==) = eq1
-- | @since 4.9.0.0
instance (Ord1 f, Ord1 g, Ord a) => Ord (Compose f g a) where
compare = compare1
-- | @since 4.9.0.0
instance (Read1 f, Read1 g, Read a) => Read (Compose f g a) where
readPrec = readPrec1
readListPrec = readListPrecDefault
readList = readListDefault
-- | @since 4.9.0.0
instance (Show1 f, Show1 g, Show a) => Show (Compose f g a) where
showsPrec = showsPrec1
-- Functor instances
-- | @since 4.9.0.0
instance (Functor f, Functor g) => Functor (Compose f g) where
fmap f (Compose x) = Compose (fmap (fmap f) x)
-- | @since 4.9.0.0
instance (Foldable f, Foldable g) => Foldable (Compose f g) where
foldMap f (Compose t) = foldMap (foldMap f) t
-- | @since 4.9.0.0
instance (Traversable f, Traversable g) => Traversable (Compose f g) where
traverse f (Compose t) = Compose <$> traverse (traverse f) t
-- | @since 4.9.0.0
instance (Applicative f, Applicative g) => Applicative (Compose f g) where
pure x = Compose (pure (pure x))
Compose f <*> Compose x = Compose (liftA2 (<*>) f x)
liftA2 f (Compose x) (Compose y) =
Compose (liftA2 (liftA2 f) x y)
-- | @since 4.9.0.0
instance (Alternative f, Applicative g) => Alternative (Compose f g) where
empty = Compose empty
(<|>) = coerce ((<|>) :: f (g a) -> f (g a) -> f (g a))
:: forall a . Compose f g a -> Compose f g a -> Compose f g a
|
rahulmutt/ghcvm
|
libraries/base/Data/Functor/Compose.hs
|
bsd-3-clause
| 3,670
| 0
| 12
| 776
| 1,126
| 610
| 516
| 59
| 0
|
{-# LANGUAGE GADTs, RankNTypes, FlexibleInstances, FlexibleContexts #-}
-----------------------------------------------------------------------------
-- Copyright 2017, GRACeFUL project team. This file is distributed under the
-- terms of the Apache License 2.0. For more information, see the files
-- "LICENSE.txt" and "NOTICE.txt", which are included in the distribution.
-----------------------------------------------------------------------------
-- |
-- Maintainer : alexg@chalmers.se
-- Stability : experimental
-- Portability : portable (depends on ghc)
--
-- Ack : The code is based on Ideas.Service.Types module developed
-- by the Ideas team. The code can be found here:
-- https://github.com/ideas-edu/ideas
--
-----------------------------------------------------------------------------
module Types
( -- * Types
Type(..), Const(..), TypedValue(..)
, Equal(..), equalM
-- * Constructing types
, tInt, tBool, tString, tFloat
, tUnit, tPair, tTuple3, tTuple4, tTuple5, tMaybe, tList
, tError, (.->), tIO, tPort, tGCM, (#)
-- * Evaluating and searching a typed value
, eval, findValuesOfType
-- * From and to typed values
, IsTyped(..), cast, castT
-- * Apply typed values
, app
) where
import Utils
import GCM
import CP
import Control.Arrow ((***))
import qualified Control.Category as C
import Control.Monad
import Data.Char
import Data.List
import Data.Maybe
import Data.Tree
-----------------------------------------------------------------------------
-- Types
infix 2 :::
infixr 3 :->
infixr 5 :|:
data TypedValue = forall a . a ::: Type a
data Type t where
-- Type isomorphisms (for defining type synonyms)
Iso :: Isomorphism t1 t2 -> Type t1 -> Type t2
-- Function type
(:->) :: Type t1 -> Type t2 -> Type (t1 -> t2)
-- Input/output
IO :: Type t -> Type (IO t)
GCM :: Type t -> Type (GCM t)
Port' :: CPType t => Type t -> Type (Port t)
-- Special annotations
Tag :: String -> Type t -> Type t
-- Type constructors
List :: Type t -> Type [t]
Pair :: Type t1 -> Type t2 -> Type (t1, t2)
(:|:) :: Type t1 -> Type t2 -> Type (Either t1 t2)
Unit :: Type ()
-- Type constants
Const :: Const t -> Type t
-- Contracts
Contract :: Contract t -> Type t -> Type t
data Contract a where
Prop :: (a -> Bool) -> Contract a
Dep :: (a -> Bool) -> (a -> Contract b) -> Contract (a -> b)
-- TODO: Deal with isomorphisms
getContracts :: Type t -> [Contract t]
getContracts (Tag _ t) = getContracts t
getContracts (Contract c t) = c : getContracts t
getContracts _ = []
-- TODO: Deal with isomorphisms
stripFluff :: Type t -> Type t
stripFluff (Tag _ t) = stripFluff t
stripFluff (Contract _ t) = stripFluff t
stripFluff t = t
contractsAndFluff :: Type t -> ([Contract t], Type t)
contractsAndFluff t = (getContracts t, stripFluff t)
app :: TypedValue -> TypedValue -> Either String TypedValue
app (f ::: ft) (xin ::: xt) =
case contractsAndFluff ft of
(contracts, a :-> b) ->
let argContracts = getContracts a
resContracts = getContracts b
in case equal xt a of
Nothing -> Left "Argument type does not match result type"
Just conv -> do
let x = conv xin
unless (and [ p x
| p <- [ p | Prop p <- argContracts ] ++
[ p | Dep p _ <- contracts ]])
(Left "Contract violation on argument")
let resultContracts = [ r x | Dep _ r <- contracts ]
unless (and [ p (f x)
| p <- [ p | Prop p <- resContracts ++ resultContracts ] ])
(Left "Contract violation on result")
return (f x ::: foldl (flip Contract) b
(resultContracts ++ resContracts))
_ -> Left "Expected a function argument"
data Const t where
Bool :: Const Bool
Int :: Const Int
Float :: Const Float
String :: Const String
instance Show (Type t) where
show (Iso _ t) = show t
show (t1 :-> t2) = show t1 +++ "->" +++ show t2
show (IO t) = "IO" +++ parens t
show (GCM t) = "GCM" +++ parens t
show (Port' t) = "Port" +++ parens t
show (Tag s t) = s +++ ":" +++ show t
show t@(Pair _ _) = showTuple t
show (t1 :|: t2) = show t1 +++ "|" +++ show t2
show (List t) = "[" ++ show t ++ "]"
show Unit = "()"
show (Const c) = show c
show (Contract c t) = "<<contract>> @ " ++ show t
parens :: Show a => a -> String
parens x = "(" ++ show x ++ ")"
(+++) :: String -> String -> String
x +++ y = x ++ " " ++ y
instance Show TypedValue where
show (val ::: tp) = case tp of
Iso iso t -> show (to iso val ::: t)
_ :-> _ -> "<<function>>"
IO _ -> "<<io>>"
GCM _ -> "<<gcm>>"
Port' n -> "port_" ++ show n
Tag _ t -> show (val ::: t)
List t -> showAsList (map (show . (::: t)) val)
Pair t1 t2 -> "(" ++ show (fst val ::: t1) ++
"," ++ show (snd val ::: t2) ++ ")"
t1 :|: t2 -> either (show . (::: t1)) (show . (::: t2)) val
Unit -> "()"
Const t -> showConst val t
Contract c t -> show (val ::: t)
showAsList :: [String] -> String
showAsList xs = "[" ++ intercalate "," xs ++ "]"
showConst :: t -> Const t -> String
showConst val t = case t of
Bool -> map toLower (show val)
Int -> show val
Float -> show val
String -> val
instance Show (Const t) where
show Bool = "Bool"
show Int = "Int"
show Float = "Float"
show String = "String"
showTuple :: Type t -> String
showTuple tp = "(" ++ intercalate ", " (collect tp) ++ ")"
where
collect :: Type t -> [String]
collect (Pair t1 t2) = collect t1 ++ collect t2
collect (Iso _ t) = collect t
collect t = [show t]
---------------------------------------------------------------
tError :: Type t -> Type (Either String t)
tError = (:|:) tString
tIO :: Type t -> Type (IO t)
tIO = IO
tGCM :: Type t -> Type (GCM t)
tGCM = GCM
tPort :: CPType t => Type t -> Type (Port t)
tPort t = Port' t
(#) :: String -> Type t -> Type t
(#) = Tag
(@@) :: Contract t -> Type t -> Type t
(@@) = Contract
infixr 6 #
infixr 7 @@
infixr 5 .->
(.->) :: Type t1 -> Type t2 -> Type (t1 -> t2)
(.->) = (:->)
tMaybe :: Type t -> Type (Maybe t)
tMaybe t = Iso (f <-> g) (t :|: Unit)
where
f = either Just (const Nothing)
g = maybe (Right ()) Left
tList :: Type t -> Type [t]
tList = List
tUnit :: Type ()
tUnit = Unit
tPair :: Type t1 -> Type t2 -> Type (t1, t2)
tPair = Pair
tString :: Type String
tString = Const String
tBool :: Type Bool
tBool = Const Bool
tInt :: Type Int
tInt = Const Int
tFloat :: Type Float
tFloat = Const Float
tTuple3 :: Type t1 -> Type t2 -> Type t3 -> Type (t1, t2, t3)
tTuple3 t1 t2 t3 = Iso (f <-> g) (Pair t1 (Pair t2 t3))
where
f (a, (b, c)) = (a, b, c)
g (a, b, c) = (a, (b, c))
tTuple4 :: Type t1 -> Type t2 -> Type t3 -> Type t4 -> Type (t1, t2, t3, t4)
tTuple4 t1 t2 t3 t4 = Iso (f <-> g) (Pair t1 (Pair t2 (Pair t3 t4)))
where
f (a, (b, (c, d))) = (a, b, c, d)
g (a, b, c, d) = (a, (b, (c, d)))
tTuple5 :: Type t1 -> Type t2 -> Type t3 -> Type t4 -> Type t5 -> Type (t1, t2, t3, t4, t5)
tTuple5 t1 t2 t3 t4 t5 = Iso (f <-> g) (Pair t1 (Pair t2 (Pair t3 (Pair t4 t5))))
where
f (a, (b, (c, (d, e)))) = (a, b, c, d, e)
g (a, b, c, d, e) = (a, (b, (c, (d, e))))
-----------------------------------------------------------------------------
-- Type equality
class Equal f where
equal :: f a -> f b -> Maybe (a -> b)
equalM :: Monad m => Type t1 -> Type t2 -> m (t1 -> t2)
equalM t1 t2 = maybe (fail msg) return (equal t1 t2)
where
msg = "Types not equal: " ++ show t1 ++ " and " ++ show t2
instance Equal Type where
equal (Iso p a) t2 = fmap (. to p) (equal a t2)
equal t1 (Iso p b) = fmap (from p .) (equal t1 b)
equal (a :-> b) (c :-> d) = liftM2 (\f g h -> g . h . f)
(equal c a) (equal b d)
equal (Pair a b) (Pair c d) = liftM2 (***) (equal a c) (equal b d)
equal (a :|: b) (c :|: d) = liftM2 biMap (equal a c) (equal b d)
equal (List a) (List b) = fmap map (equal a b)
equal (Tag s1 a) t2 = equal a t2
equal t1 (Tag s2 b) = equal t1 b
equal Unit Unit = Just id
equal (Const a) (Const b) = equal a b
equal (Port' a) (Port' b) = fmap (\f -> fmap f) $ equal a b
equal (Contract c a) t2 = equal a t2
equal t1 (Contract c b) = equal t1 b
equal _ _ = Nothing
instance Equal Const where
equal Int Int = Just id
equal Bool Bool = Just id
equal Float Float = Just id
equal String String = Just id
equal _ _ = Nothing
findValuesOfType :: Type t -> TypedValue -> [t]
findValuesOfType thisType = rec
where
rec tv@(a ::: tp) =
case equal tp thisType of
Just f -> [f a]
Nothing -> recDown tv
recDown (a ::: tp) =
case tp of
Iso iso t -> rec (to iso a ::: t)
Tag _ t -> rec (a ::: t)
Contract _ t -> rec (a ::: t)
List t -> concatMap (\b -> rec (b ::: t)) a
Pair t1 t2 -> rec (fst a ::: t1) ++ rec (snd a ::: t2)
t1 :|: t2 -> either (\b -> rec (b ::: t1)) (\b -> rec (b ::: t2)) a
_ -> []
-- Evaluation of typed values
eval :: (IsTyped t, IsTyped a) => TypedValue -> a -> GCM t
eval tv x = rec tv
where
rec tv@(val ::: t) = case t of
Tag _ t' -> rec (val ::: t')
Contract _ t' -> rec (val ::: t')
a :-> b :-> c -> rec (uncurry val ::: Pair a b :-> c)
a :-> b -> castT a x >>= \x' -> rec (val x' ::: b)
GCM t -> val >>= \a -> rec (a ::: t)
_ -> fromTyped tv
-- Check type
castT :: (IsTyped a, Monad m) => Type t -> a -> m t
castT t x = equalM (typeOf x) t >>= \f -> return (f x)
cast :: (IsTyped a, IsTyped b, Monad m) => a -> m b
cast = castT (typeOf (undefined :: b))
-- Conversion to and from typed values
class IsTyped a where
typeOf :: a -> Type a
toTyped :: a -> TypedValue
toTyped x = x ::: typeOf x
fromTyped :: Monad m => TypedValue -> m a
instance IsTyped Int where
typeOf _ = tInt
fromTyped (x ::: Const Int) = return x
fromTyped _ = fail errMsg
instance IsTyped Float where
typeOf _ = tFloat
fromTyped (x ::: Const Float) = return x
fromTyped _ = fail errMsg
instance {-# OVERLAPPING #-} IsTyped String where
typeOf _ = tString
fromTyped (x ::: Const String) = return x
fromTyped _ = fail errMsg
instance (CPType a, IsTyped a) => IsTyped (Port a) where
typeOf (Port _) = tPort (typeOf (undefined :: a))
fromTyped (x ::: t@(Port' _)) = do
f <- equalM t $ tPort (typeOf (undefined :: a))
return (f x)
fromTyped _ = fail errMsg
instance IsTyped Bool where
typeOf _ = tBool
fromTyped (x ::: Const Bool) = return x
fromTyped _ = fail errMsg
instance (IsTyped a, IsTyped b) => IsTyped (a, b) where
typeOf (x, y) = tPair (typeOf x) (typeOf y)
fromTyped (p ::: t@(Pair a b)) = do
f <- equalM t $ tPair (typeOf (undefined :: a))
(typeOf (undefined :: b))
return (f p)
fromTyped _ = fail errMsg
instance IsTyped a => IsTyped [a] where
typeOf _ = tList (typeOf (undefined :: a))
fromTyped (xs ::: t@(List a)) = do
f <- equalM t $ tList (typeOf (undefined :: a))
return (f xs)
fromTyped _ = fail errMsg
errMsg :: String
errMsg = "fromTyped failed"
|
GRACeFUL-project/GRACe
|
src/Types.hs
|
bsd-3-clause
| 12,361
| 51
| 26
| 4,180
| 4,932
| 2,543
| 2,389
| -1
| -1
|
{-# OPTIONS_GHC -fllvm -O2 #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ScopedTypeVariables #-}
import Criterion
import Criterion.Main
import Prelude (IO, toRational)
import qualified Data.Vector.Storable as VS
import SubHask
import SubHask.Algebra.HMatrix
import HLearn.History
import HLearn.History.DisplayMethods
import HLearn.Optimization.Common
-- {-# NOINLINE emptyReturn #-}
-- emptyReturn :: (HistoryMonad m, Reportable m Double) => Int -> m Double
emptyReturn n = optimize return (10::Double) (maxIterations n)
emptyReport n = optimize report (10::Double) (maxIterations n)
emptyCollectReports (n::Int) = optimize
(\itr -> optimize report itr ( maxIterations 10) )
(10::Double)
(maxIterations $ round $ toRational n / 10)
main = do
defaultMain
[ bgroup "simpleHistory"
[ bgroup "emptyReturn"
[ bench "10" $ nf (runSimpleHistory . emptyReturn) 10
, bench "100" $ nf (runSimpleHistory . emptyReturn) 100
, bench "1000" $ nf (runSimpleHistory . emptyReturn) 1000
, bench "10000" $ nf (runSimpleHistory . emptyReturn) 10000
, bench "100000" $ nf (runSimpleHistory . emptyReturn) 100000
, bench "1000000" $ nf (runSimpleHistory . emptyReturn) 1000000
]
, bgroup "emptyReport"
[ bench "10" $ nf (runSimpleHistory . emptyReport) 10
, bench "100" $ nf (runSimpleHistory . emptyReport) 100
, bench "1000" $ nf (runSimpleHistory . emptyReport) 1000
, bench "10000" $ nf (runSimpleHistory . emptyReport) 10000
, bench "100000" $ nf (runSimpleHistory . emptyReport) 100000
, bench "1000000" $ nf (runSimpleHistory . emptyReport) 1000000
]
, bgroup "collectReports . emptyReport"
[ bench "10" $ nf (runSimpleHistory . collectReports . emptyReport) 10
, bench "100" $ nf (runSimpleHistory . collectReports . emptyReport) 100
, bench "1000" $ nf (runSimpleHistory . collectReports . emptyReport) 1000
, bench "10000" $ nf (runSimpleHistory . collectReports . emptyReport) 10000
, bench "100000" $ nf (runSimpleHistory . collectReports . emptyReport) 100000
, bench "1000000" $ nf (runSimpleHistory . collectReports . emptyReport) 1000000
]
, bgroup "emptyCollectReports"
[ bench "10" $ nf (runSimpleHistory . emptyCollectReports) 10
, bench "100" $ nf (runSimpleHistory . emptyCollectReports) 100
, bench "1000" $ nf (runSimpleHistory . emptyCollectReports) 1000
, bench "10000" $ nf (runSimpleHistory . emptyCollectReports) 10000
, bench "100000" $ nf (runSimpleHistory . emptyCollectReports) 100000
, bench "1000000" $ nf (runSimpleHistory . emptyCollectReports) 1000000
]
]
, bgroup "dynamicHistory"
[ bgroup "emptyReturn"
[ bench "10" $ nfIO $ runDynamicHistory idDisplayMethod $ emptyReturn 10
, bench "100" $ nfIO $ runDynamicHistory idDisplayMethod $ emptyReturn 100
, bench "1000" $ nfIO $ runDynamicHistory idDisplayMethod $ emptyReturn 1000
, bench "10000" $ nfIO $ runDynamicHistory idDisplayMethod $ emptyReturn 10000
, bench "100000" $ nfIO $ runDynamicHistory idDisplayMethod $ emptyReturn 100000
, bench "1000000" $ nfIO $ runDynamicHistory idDisplayMethod $ emptyReturn 1000000
]
, bgroup "emptyReport"
[ bench "10" $ nfIO $ runDynamicHistory idDisplayMethod $ emptyReport 10
, bench "100" $ nfIO $ runDynamicHistory idDisplayMethod $ emptyReport 100
, bench "1000" $ nfIO $ runDynamicHistory idDisplayMethod $ emptyReport 1000
, bench "10000" $ nfIO $ runDynamicHistory idDisplayMethod $ emptyReport 10000
, bench "100000" $ nfIO $ runDynamicHistory idDisplayMethod $ emptyReport 100000
, bench "1000000" $ nfIO $ runDynamicHistory idDisplayMethod $ emptyReport 1000000
]
, bgroup "emptyCollectReports"
[ bench "10" $ nfIO $ runDynamicHistory idDisplayMethod $ emptyCollectReports 10
, bench "100" $ nfIO $ runDynamicHistory idDisplayMethod $ emptyCollectReports 100
, bench "1000" $ nfIO $ runDynamicHistory idDisplayMethod $ emptyCollectReports 1000
, bench "10000" $ nfIO $ runDynamicHistory idDisplayMethod $ emptyCollectReports 10000
, bench "100000" $ nfIO $ runDynamicHistory idDisplayMethod $ emptyCollectReports 100000
, bench "1000000" $ nfIO $ runDynamicHistory idDisplayMethod $ emptyCollectReports 1000000
]
]
]
|
iamkingmaker/HLearn
|
examples/criterion/historyOverhead.hs
|
bsd-3-clause
| 5,090
| 0
| 17
| 1,608
| 1,248
| 619
| 629
| 71
| 1
|
{-# LANGUAGE Haskell98 #-}
{-# LINE 1 "Data/Text/Internal/Lazy.hs" #-}
{-# LANGUAGE BangPatterns, DeriveDataTypeable #-}
{-# OPTIONS_HADDOCK not-home #-}
-- |
-- Module : Data.Text.Internal.Lazy
-- Copyright : (c) 2009, 2010 Bryan O'Sullivan
--
-- License : BSD-style
-- Maintainer : bos@serpentine.com
-- Stability : experimental
-- Portability : GHC
--
-- /Warning/: this is an internal module, and does not have a stable
-- API or name. Functions in this module may not check or enforce
-- preconditions expected by public modules. Use at your own risk!
--
-- A module containing private 'Text' internals. This exposes the
-- 'Text' representation and low level construction functions.
-- Modules which extend the 'Text' system may need to use this module.
module Data.Text.Internal.Lazy
(
Text(..)
, chunk
, empty
, foldrChunks
, foldlChunks
-- * Data type invariant and abstraction functions
-- $invariant
, strictInvariant
, lazyInvariant
, showStructure
-- * Chunk allocation sizes
, defaultChunkSize
, smallChunkSize
, chunkOverhead
) where
import Data.Text ()
import Data.Text.Internal.Unsafe.Shift (shiftL)
import Data.Typeable (Typeable)
import Foreign.Storable (sizeOf)
import qualified Data.Text.Internal as T
data Text = Empty
| Chunk {-# UNPACK #-} !T.Text Text
deriving (Typeable)
-- $invariant
--
-- The data type invariant for lazy 'Text': Every 'Text' is either 'Empty' or
-- consists of non-null 'T.Text's. All functions must preserve this,
-- and the QC properties must check this.
-- | Check the invariant strictly.
strictInvariant :: Text -> Bool
strictInvariant Empty = True
strictInvariant x@(Chunk (T.Text _ _ len) cs)
| len > 0 = strictInvariant cs
| otherwise = error $ "Data.Text.Lazy: invariant violation: "
++ showStructure x
-- | Check the invariant lazily.
lazyInvariant :: Text -> Text
lazyInvariant Empty = Empty
lazyInvariant x@(Chunk c@(T.Text _ _ len) cs)
| len > 0 = Chunk c (lazyInvariant cs)
| otherwise = error $ "Data.Text.Lazy: invariant violation: "
++ showStructure x
-- | Display the internal structure of a lazy 'Text'.
showStructure :: Text -> String
showStructure Empty = "Empty"
showStructure (Chunk t Empty) = "Chunk " ++ show t ++ " Empty"
showStructure (Chunk t ts) =
"Chunk " ++ show t ++ " (" ++ showStructure ts ++ ")"
-- | Smart constructor for 'Chunk'. Guarantees the data type invariant.
chunk :: T.Text -> Text -> Text
{-# INLINE chunk #-}
chunk t@(T.Text _ _ len) ts | len == 0 = ts
| otherwise = Chunk t ts
-- | Smart constructor for 'Empty'.
empty :: Text
{-# INLINE [0] empty #-}
empty = Empty
-- | Consume the chunks of a lazy 'Text' with a natural right fold.
foldrChunks :: (T.Text -> a -> a) -> a -> Text -> a
foldrChunks f z = go
where go Empty = z
go (Chunk c cs) = f c (go cs)
{-# INLINE foldrChunks #-}
-- | Consume the chunks of a lazy 'Text' with a strict, tail-recursive,
-- accumulating left fold.
foldlChunks :: (a -> T.Text -> a) -> a -> Text -> a
foldlChunks f z = go z
where go !a Empty = a
go !a (Chunk c cs) = go (f a c) cs
{-# INLINE foldlChunks #-}
-- | Currently set to 16 KiB, less the memory management overhead.
defaultChunkSize :: Int
defaultChunkSize = 16384 - chunkOverhead
{-# INLINE defaultChunkSize #-}
-- | Currently set to 128 bytes, less the memory management overhead.
smallChunkSize :: Int
smallChunkSize = 128 - chunkOverhead
{-# INLINE smallChunkSize #-}
-- | The memory management overhead. Currently this is tuned for GHC only.
chunkOverhead :: Int
chunkOverhead = sizeOf (undefined :: Int) `shiftL` 1
{-# INLINE chunkOverhead #-}
|
phischu/fragnix
|
tests/packages/scotty/Data.Text.Internal.Lazy.hs
|
bsd-3-clause
| 3,790
| 0
| 12
| 854
| 715
| 399
| 316
| 68
| 2
|
{-# LANGUAGE DeriveDataTypeable, DeriveFunctor, DeriveFoldable, DeriveTraversable, StandaloneDeriving #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-----------------------------------------------------------------------------
-- |
-- Module : Haddock.Types
-- Copyright : (c) Simon Marlow 2003-2006,
-- David Waern 2006-2009,
-- Mateusz Kowalczyk 2013
-- License : BSD-like
--
-- Maintainer : haddock@projects.haskellorg
-- Stability : experimental
-- Portability : portable
--
-- Types that are commonly used through-out Haddock. Some of the most
-- important types are defined here, like 'Interface' and 'DocName'.
-----------------------------------------------------------------------------
module Haddock.Types (
module Haddock.Types
, HsDocString, LHsDocString
, Fixity(..)
, module Documentation.Haddock.Types
) where
import Control.Exception
import Control.Arrow hiding ((<+>))
import Control.DeepSeq
import Data.Typeable
import Data.Map (Map)
import qualified Data.Map as Map
import Documentation.Haddock.Types
import BasicTypes (Fixity(..))
import GHC hiding (NoLink)
import DynFlags (ExtensionFlag, Language)
import OccName
import Outputable
import Control.Monad (ap)
-----------------------------------------------------------------------------
-- * Convenient synonyms
-----------------------------------------------------------------------------
type IfaceMap = Map Module Interface
type InstIfaceMap = Map Module InstalledInterface -- TODO: rename
type DocMap a = Map Name (MDoc a)
type ArgMap a = Map Name (Map Int (MDoc a))
type SubMap = Map Name [Name]
type DeclMap = Map Name [LHsDecl Name]
type InstMap = Map SrcSpan Name
type FixMap = Map Name Fixity
type SrcMap = Map PackageKey FilePath
type DocPaths = (FilePath, Maybe FilePath) -- paths to HTML and sources
-----------------------------------------------------------------------------
-- * Interface
-----------------------------------------------------------------------------
-- | 'Interface' holds all information used to render a single Haddock page.
-- It represents the /interface/ of a module. The core business of Haddock
-- lies in creating this structure. Note that the record contains some fields
-- that are only used to create the final record, and that are not used by the
-- backends.
data Interface = Interface
{
-- | The module behind this interface.
ifaceMod :: !Module
-- | Original file name of the module.
, ifaceOrigFilename :: !FilePath
-- | Textual information about the module.
, ifaceInfo :: !(HaddockModInfo Name)
-- | Documentation header.
, ifaceDoc :: !(Documentation Name)
-- | Documentation header with cross-reference information.
, ifaceRnDoc :: !(Documentation DocName)
-- | Haddock options for this module (prune, ignore-exports, etc).
, ifaceOptions :: ![DocOption]
-- | Declarations originating from the module. Excludes declarations without
-- names (instances and stand-alone documentation comments). Includes
-- names of subordinate declarations mapped to their parent declarations.
, ifaceDeclMap :: !(Map Name [LHsDecl Name])
-- | Documentation of declarations originating from the module (including
-- subordinates).
, ifaceDocMap :: !(DocMap Name)
, ifaceArgMap :: !(ArgMap Name)
-- | Documentation of declarations originating from the module (including
-- subordinates).
, ifaceRnDocMap :: !(DocMap DocName)
, ifaceRnArgMap :: !(ArgMap DocName)
, ifaceSubMap :: !(Map Name [Name])
, ifaceFixMap :: !(Map Name Fixity)
, ifaceExportItems :: ![ExportItem Name]
, ifaceRnExportItems :: ![ExportItem DocName]
-- | All names exported by the module.
, ifaceExports :: ![Name]
-- | All \"visible\" names exported by the module.
-- A visible name is a name that will show up in the documentation of the
-- module.
, ifaceVisibleExports :: ![Name]
-- | Aliases of module imports as in @import A.B.C as C@.
, ifaceModuleAliases :: !AliasMap
-- | Instances exported by the module.
, ifaceInstances :: ![ClsInst]
, ifaceFamInstances :: ![FamInst]
-- | The number of haddockable and haddocked items in the module, as a
-- tuple. Haddockable items are the exports and the module itself.
, ifaceHaddockCoverage :: !(Int, Int)
-- | Warnings for things defined in this module.
, ifaceWarningMap :: !WarningMap
}
type WarningMap = Map Name (Doc Name)
-- | A subset of the fields of 'Interface' that we store in the interface
-- files.
data InstalledInterface = InstalledInterface
{
-- | The module represented by this interface.
instMod :: Module
-- | Textual information about the module.
, instInfo :: HaddockModInfo Name
-- | Documentation of declarations originating from the module (including
-- subordinates).
, instDocMap :: DocMap Name
, instArgMap :: ArgMap Name
-- | All names exported by this module.
, instExports :: [Name]
-- | All \"visible\" names exported by the module.
-- A visible name is a name that will show up in the documentation of the
-- module.
, instVisibleExports :: [Name]
-- | Haddock options for this module (prune, ignore-exports, etc).
, instOptions :: [DocOption]
, instSubMap :: Map Name [Name]
, instFixMap :: Map Name Fixity
}
-- | Convert an 'Interface' to an 'InstalledInterface'
toInstalledIface :: Interface -> InstalledInterface
toInstalledIface interface = InstalledInterface
{ instMod = ifaceMod interface
, instInfo = ifaceInfo interface
, instDocMap = ifaceDocMap interface
, instArgMap = ifaceArgMap interface
, instExports = ifaceExports interface
, instVisibleExports = ifaceVisibleExports interface
, instOptions = ifaceOptions interface
, instSubMap = ifaceSubMap interface
, instFixMap = ifaceFixMap interface
}
-----------------------------------------------------------------------------
-- * Export items & declarations
-----------------------------------------------------------------------------
data ExportItem name
-- | An exported declaration.
= ExportDecl
{
-- | A declaration.
expItemDecl :: !(LHsDecl name)
-- | Maybe a doc comment, and possibly docs for arguments (if this
-- decl is a function or type-synonym).
, expItemMbDoc :: !(DocForDecl name)
-- | Subordinate names, possibly with documentation.
, expItemSubDocs :: ![(name, DocForDecl name)]
-- | Instances relevant to this declaration, possibly with
-- documentation.
, expItemInstances :: ![DocInstance name]
-- | Fixity decls relevant to this declaration (including subordinates).
, expItemFixities :: ![(name, Fixity)]
-- | Whether the ExportItem is from a TH splice or not, for generating
-- the appropriate type of Source link.
, expItemSpliced :: !Bool
}
-- | An exported entity for which we have no documentation (perhaps because it
-- resides in another package).
| ExportNoDecl
{ expItemName :: !name
-- | Subordinate names.
, expItemSubs :: ![name]
}
-- | A section heading.
| ExportGroup
{
-- | Section level (1, 2, 3, ...).
expItemSectionLevel :: !Int
-- | Section id (for hyperlinks).
, expItemSectionId :: !String
-- | Section heading text.
, expItemSectionText :: !(Doc name)
}
-- | Some documentation.
| ExportDoc !(MDoc name)
-- | A cross-reference to another module.
| ExportModule !Module
data Documentation name = Documentation
{ documentationDoc :: Maybe (MDoc name)
, documentationWarning :: !(Maybe (Doc name))
} deriving Functor
-- | Arguments and result are indexed by Int, zero-based from the left,
-- because that's the easiest to use when recursing over types.
type FnArgsDoc name = Map Int (MDoc name)
type DocForDecl name = (Documentation name, FnArgsDoc name)
noDocForDecl :: DocForDecl name
noDocForDecl = (Documentation Nothing Nothing, Map.empty)
unrenameDocForDecl :: DocForDecl DocName -> DocForDecl Name
unrenameDocForDecl (doc, fnArgsDoc) =
(fmap getName doc, (fmap . fmap) getName fnArgsDoc)
-----------------------------------------------------------------------------
-- * Cross-referencing
-----------------------------------------------------------------------------
-- | Type of environment used to cross-reference identifiers in the syntax.
type LinkEnv = Map Name Module
-- | Extends 'Name' with cross-reference information.
data DocName
= Documented Name Module
-- ^ This thing is part of the (existing or resulting)
-- documentation. The 'Module' is the preferred place
-- in the documentation to refer to.
| Undocumented Name
-- ^ This thing is not part of the (existing or resulting)
-- documentation, as far as Haddock knows.
deriving Eq
instance NamedThing DocName where
getName (Documented name _) = name
getName (Undocumented name) = name
-----------------------------------------------------------------------------
-- * Instances
-----------------------------------------------------------------------------
-- | The three types of instances
data InstType name
= ClassInst [HsType name] -- ^ Context
| TypeInst (Maybe (HsType name)) -- ^ Body (right-hand side)
| DataInst (TyClDecl name) -- ^ Data constructors
instance OutputableBndr a => Outputable (InstType a) where
ppr (ClassInst a) = text "ClassInst" <+> ppr a
ppr (TypeInst a) = text "TypeInst" <+> ppr a
ppr (DataInst a) = text "DataInst" <+> ppr a
-- | An instance head that may have documentation and a source location.
type DocInstance name = (Located (InstHead name), Maybe (MDoc name))
-- | The head of an instance. Consists of a class name, a list of kind
-- parameters, a list of type parameters and an instance type
type InstHead name = (name, [HsType name], [HsType name], InstType name)
-----------------------------------------------------------------------------
-- * Documentation comments
-----------------------------------------------------------------------------
type LDoc id = Located (Doc id)
type Doc id = DocH (ModuleName, OccName) id
type MDoc id = MetaDoc (ModuleName, OccName) id
instance (NFData a, NFData mod)
=> NFData (DocH mod a) where
rnf doc = case doc of
DocEmpty -> ()
DocAppend a b -> a `deepseq` b `deepseq` ()
DocString a -> a `deepseq` ()
DocParagraph a -> a `deepseq` ()
DocIdentifier a -> a `deepseq` ()
DocIdentifierUnchecked a -> a `deepseq` ()
DocModule a -> a `deepseq` ()
DocWarning a -> a `deepseq` ()
DocEmphasis a -> a `deepseq` ()
DocBold a -> a `deepseq` ()
DocMonospaced a -> a `deepseq` ()
DocUnorderedList a -> a `deepseq` ()
DocOrderedList a -> a `deepseq` ()
DocDefList a -> a `deepseq` ()
DocCodeBlock a -> a `deepseq` ()
DocHyperlink a -> a `deepseq` ()
DocPic a -> a `deepseq` ()
DocAName a -> a `deepseq` ()
DocProperty a -> a `deepseq` ()
DocExamples a -> a `deepseq` ()
DocHeader a -> a `deepseq` ()
instance NFData Name where rnf x = seq x ()
instance NFData OccName where rnf x = seq x ()
instance NFData ModuleName where rnf x = seq x ()
instance NFData id => NFData (Header id) where
rnf (Header a b) = a `deepseq` b `deepseq` ()
instance NFData Hyperlink where
rnf (Hyperlink a b) = a `deepseq` b `deepseq` ()
instance NFData Picture where
rnf (Picture a b) = a `deepseq` b `deepseq` ()
instance NFData Example where
rnf (Example a b) = a `deepseq` b `deepseq` ()
exampleToString :: Example -> String
exampleToString (Example expression result) =
">>> " ++ expression ++ "\n" ++ unlines result
data DocMarkup id a = Markup
{ markupEmpty :: a
, markupString :: String -> a
, markupParagraph :: a -> a
, markupAppend :: a -> a -> a
, markupIdentifier :: id -> a
, markupIdentifierUnchecked :: (ModuleName, OccName) -> a
, markupModule :: String -> a
, markupWarning :: a -> a
, markupEmphasis :: a -> a
, markupBold :: a -> a
, markupMonospaced :: a -> a
, markupUnorderedList :: [a] -> a
, markupOrderedList :: [a] -> a
, markupDefList :: [(a,a)] -> a
, markupCodeBlock :: a -> a
, markupHyperlink :: Hyperlink -> a
, markupAName :: String -> a
, markupPic :: Picture -> a
, markupProperty :: String -> a
, markupExample :: [Example] -> a
, markupHeader :: Header a -> a
}
data HaddockModInfo name = HaddockModInfo
{ hmi_description :: Maybe (Doc name)
, hmi_copyright :: Maybe String
, hmi_license :: Maybe String
, hmi_maintainer :: Maybe String
, hmi_stability :: Maybe String
, hmi_portability :: Maybe String
, hmi_safety :: Maybe String
, hmi_language :: Maybe Language
, hmi_extensions :: [ExtensionFlag]
}
emptyHaddockModInfo :: HaddockModInfo a
emptyHaddockModInfo = HaddockModInfo
{ hmi_description = Nothing
, hmi_copyright = Nothing
, hmi_license = Nothing
, hmi_maintainer = Nothing
, hmi_stability = Nothing
, hmi_portability = Nothing
, hmi_safety = Nothing
, hmi_language = Nothing
, hmi_extensions = []
}
-----------------------------------------------------------------------------
-- * Options
-----------------------------------------------------------------------------
{-! for DocOption derive: Binary !-}
-- | Source-level options for controlling the documentation.
data DocOption
= OptHide -- ^ This module should not appear in the docs.
| OptPrune
| OptIgnoreExports -- ^ Pretend everything is exported.
| OptNotHome -- ^ Not the best place to get docs for things
-- exported by this module.
| OptShowExtensions -- ^ Render enabled extensions for this module.
deriving (Eq, Show)
-- | Option controlling how to qualify names
data QualOption
= OptNoQual -- ^ Never qualify any names.
| OptFullQual -- ^ Qualify all names fully.
| OptLocalQual -- ^ Qualify all imported names fully.
| OptRelativeQual -- ^ Like local, but strip module prefix
-- from modules in the same hierarchy.
| OptAliasedQual -- ^ Uses aliases of module names
-- as suggested by module import renamings.
-- However, we are unfortunately not able
-- to maintain the original qualifications.
-- Image a re-export of a whole module,
-- how could the re-exported identifiers be qualified?
type AliasMap = Map Module ModuleName
data Qualification
= NoQual
| FullQual
| LocalQual Module
| RelativeQual Module
| AliasedQual AliasMap Module
-- ^ @Module@ contains the current module.
-- This way we can distinguish imported and local identifiers.
makeContentsQual :: QualOption -> Qualification
makeContentsQual qual =
case qual of
OptNoQual -> NoQual
_ -> FullQual
makeModuleQual :: QualOption -> AliasMap -> Module -> Qualification
makeModuleQual qual aliases mdl =
case qual of
OptLocalQual -> LocalQual mdl
OptRelativeQual -> RelativeQual mdl
OptAliasedQual -> AliasedQual aliases mdl
OptFullQual -> FullQual
OptNoQual -> NoQual
-----------------------------------------------------------------------------
-- * Error handling
-----------------------------------------------------------------------------
-- A monad which collects error messages, locally defined to avoid a dep on mtl
type ErrMsg = String
newtype ErrMsgM a = Writer { runWriter :: (a, [ErrMsg]) }
instance Functor ErrMsgM where
fmap f (Writer (a, msgs)) = Writer (f a, msgs)
instance Applicative ErrMsgM where
pure = return
(<*>) = ap
instance Monad ErrMsgM where
return a = Writer (a, [])
m >>= k = Writer $ let
(a, w) = runWriter m
(b, w') = runWriter (k a)
in (b, w ++ w')
tell :: [ErrMsg] -> ErrMsgM ()
tell w = Writer ((), w)
-- Exceptions
-- | Haddock's own exception type.
data HaddockException = HaddockException String deriving Typeable
instance Show HaddockException where
show (HaddockException str) = str
throwE :: String -> a
instance Exception HaddockException
throwE str = throw (HaddockException str)
-- In "Haddock.Interface.Create", we need to gather
-- @Haddock.Types.ErrMsg@s a lot, like @ErrMsgM@ does,
-- but we can't just use @GhcT ErrMsgM@ because GhcT requires the
-- transformed monad to be MonadIO.
newtype ErrMsgGhc a = WriterGhc { runWriterGhc :: Ghc (a, [ErrMsg]) }
--instance MonadIO ErrMsgGhc where
-- liftIO = WriterGhc . fmap (\a->(a,[])) liftIO
--er, implementing GhcMonad involves annoying ExceptionMonad and
--WarnLogMonad classes, so don't bother.
liftGhcToErrMsgGhc :: Ghc a -> ErrMsgGhc a
liftGhcToErrMsgGhc = WriterGhc . fmap (\a->(a,[]))
liftErrMsg :: ErrMsgM a -> ErrMsgGhc a
liftErrMsg = WriterGhc . return . runWriter
-- for now, use (liftErrMsg . tell) for this
--tell :: [ErrMsg] -> ErrMsgGhc ()
--tell msgs = WriterGhc $ return ( (), msgs )
instance Functor ErrMsgGhc where
fmap f (WriterGhc x) = WriterGhc (fmap (first f) x)
instance Applicative ErrMsgGhc where
pure = return
(<*>) = ap
instance Monad ErrMsgGhc where
return a = WriterGhc (return (a, []))
m >>= k = WriterGhc $ runWriterGhc m >>= \ (a, msgs1) ->
fmap (second (msgs1 ++)) (runWriterGhc (k a))
|
DavidAlphaFox/ghc
|
utils/haddock/haddock-api/src/Haddock/Types.hs
|
bsd-3-clause
| 18,462
| 0
| 13
| 4,700
| 3,382
| 1,940
| 1,442
| 345
| 5
|
{-# LANGUAGE PackageImports #-}
module HLint () where
import "hint" HLint.Default
import "hint" HLint.Dollar
|
rubik/moodle-to-latex
|
HLint.hs
|
bsd-3-clause
| 111
| 0
| 4
| 16
| 20
| 14
| 6
| 4
| 0
|
{-# language FunctionalDependencies, ViewPatterns, ExistentialQuantification #-}
module Object.Types where
import Data.Dynamic
import Data.SelectTree
import Graphics.Qt as Qt
import Physics.Chipmunk hiding (Position, collisionType)
import Utils
import Base
mkSortsSelectTree :: [Sort_] -> SelectTree Sort_
mkSortsSelectTree sorts =
foldl (flip addSort) (EmptyNode "") sorts
where
addSort :: Sort_ -> SelectTree Sort_ -> SelectTree Sort_
addSort sort t = addByPrefix prefix label sort t
where
sortIdParts = wordsBy ['/'] $ getSortId $ sortId sort
prefix = init sortIdParts
label = last sortIdParts
-- | adds an element by a given prefix to a SelectTree. If branches with needed labels
-- are missing, they are created.
-- PRE: The tree is not a Leaf.
addByPrefix :: [String] -> String -> a -> SelectTree a -> SelectTree a
addByPrefix _ _ _ (Leaf _ _) = error "addByPrefix"
addByPrefix (a : r) label x node =
-- prefixes left: the tree needs to be descended further
if any (\ subTree -> subTree ^. labelA == a) (getChildren node) then
-- if the child already exists
modifyLabelled a (addByPrefix r label x) node
else
-- the branch doesn't exist, it's created
addChild (addByPrefix r label x (EmptyNode a)) node
addByPrefix [] label x node =
-- no prefixes left: here the element is added
addChild (Leaf label x) node
-- | object rendering without providing the sort
renderObject_ :: Application -> Configuration
-> Object_ -> Ptr QPainter -> Offset Double -> Seconds -> IO [RenderPixmap]
renderObject_ app config (Object_ sort o) = renderObject app config o sort
wrapObjectModifier :: Sort s o => (o -> o) -> Object_ -> Object_
wrapObjectModifier f (Object_ s o) =
case (cast s, cast o) of
(Just s_, Just o_) -> Object_ s_ (f o_)
-- * EditorObject
mkEditorObject :: Sort_ -> EditorPosition -> EditorObject Sort_
mkEditorObject sort pos =
EditorObject sort pos oemState
where
oemState = fmap (\ methods -> oemInitialize methods pos) $ objectEditMode sort
renderChipmunk :: Ptr QPainter -> Offset Double -> Pixmap -> Chipmunk -> IO ()
renderChipmunk painter worldOffset p chipmunk = do
(position, angle) <- getRenderPositionAndAngle chipmunk
renderPixmap painter worldOffset position (Just angle) p
-- * Object edit mode
unpickleOEM :: Sort_ -> String -> Maybe OEMState
unpickleOEM (objectEditMode -> Just methods) = oemUnpickle methods
|
geocurnoff/nikki
|
src/Object/Types.hs
|
lgpl-3.0
| 2,555
| 0
| 12
| 592
| 700
| 357
| 343
| 41
| 4
|
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE CPP #-}
module Hierarchy
( hierarchy
, Dispatcher (..)
, runHandler
, Handler2
, App
, toText
, Env (..)
, subDispatch
-- to avoid warnings
, deleteDelete2
, deleteDelete3
) where
import Test.Hspec
import Test.HUnit
import Yesod.Routes.Parse
import Yesod.Routes.TH
import Yesod.Routes.Class
import Language.Haskell.TH.Syntax
import Data.Text (Text, pack, unpack, append)
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as S8
import qualified Data.Set as Set
class ToText a where
toText :: a -> Text
instance ToText Text where toText = id
instance ToText String where toText = pack
type Handler2 sub master a = a
type Handler site a = Handler2 site site a
type Request = ([Text], ByteString) -- path info, method
type App sub master = Request -> (Text, Maybe (Route master))
data Env sub master = Env
{ envToMaster :: Route sub -> Route master
, envSub :: sub
, envMaster :: master
}
subDispatch
:: (Env sub master -> App sub master)
-> (Handler2 sub master Text -> Env sub master -> Maybe (Route sub) -> App sub master)
-> (master -> sub)
-> (Route sub -> Route master)
-> Env master master
-> App sub master
subDispatch handler _runHandler getSub toMaster env req =
handler env' req
where
env' = env
{ envToMaster = envToMaster env . toMaster
, envSub = getSub $ envMaster env
}
class Dispatcher sub master where
dispatcher :: Env sub master -> App sub master
runHandler
:: ToText a
=> Handler2 sub master a
-> Env sub master
-> Maybe (Route sub)
-> App sub master
runHandler h Env {..} route _ = (toText h, fmap envToMaster route)
data Hierarchy = Hierarchy
do
let resources = [parseRoutes|
/ HomeR GET
----------------------------------------
/!#Int BackwardsR GET
/admin/#Int AdminR:
/ AdminRootR GET
/login LoginR GET POST
/table/#Text TableR GET
/nest/ NestR !NestingAttr:
/spaces SpacedR GET !NonNested
/nest2 Nest2:
/ GetPostR GET POST
/get Get2 GET
/post Post2 POST
-- /#Int Delete2 DELETE
/nest3 Nest3:
/get Get3 GET
/post Post3 POST
-- /#Int Delete3 DELETE
/afterwards AfterR !parent !key=value1:
/ After GET !child !key=value2
-- /trailing-nest TrailingNestR:
-- /foo TrailingFooR GET
-- /#Int TrailingIntR GET
|]
rrinst <- mkRenderRouteInstance [] (ConT ''Hierarchy) $ map (fmap parseType) resources
rainst <- mkRouteAttrsInstance [] (ConT ''Hierarchy) $ map (fmap parseType) resources
prinst <- mkParseRouteInstance [] (ConT ''Hierarchy) $ map (fmap parseType) resources
dispatch <- mkDispatchClause MkDispatchSettings
{ mdsRunHandler = [|runHandler|]
, mdsSubDispatcher = [|subDispatch|]
, mdsGetPathInfo = [|fst|]
, mdsMethod = [|snd|]
, mdsSetPathInfo = [|\p (_, m) -> (p, m)|]
, mds404 = [|pack "404"|]
, mds405 = [|pack "405"|]
, mdsGetHandler = defaultGetHandler
, mdsUnwrapper = return
} resources
return
#if MIN_VERSION_template_haskell(2,11,0)
$ InstanceD Nothing
#else
$ InstanceD
#endif
[]
(ConT ''Dispatcher
`AppT` ConT ''Hierarchy
`AppT` ConT ''Hierarchy)
[FunD (mkName "dispatcher") [dispatch]]
: prinst
: rainst
: rrinst
getSpacedR :: Handler site String
getSpacedR = "root-leaf"
getGet2 :: Handler site String; getGet2 = "get"
postPost2 :: Handler site String; postPost2 = "post"
deleteDelete2 :: Int -> Handler site String; deleteDelete2 = const "delete"
getGet3 :: Handler site String; getGet3 = "get"
postPost3 :: Handler site String; postPost3 = "post"
deleteDelete3 :: Int -> Handler site String; deleteDelete3 = const "delete"
getAfter :: Handler site String; getAfter = "after"
getHomeR :: Handler site String
getHomeR = "home"
getBackwardsR :: Int -> Handler site Text
getBackwardsR _ = pack "backwards"
getAdminRootR :: Int -> Handler site Text
getAdminRootR i = pack $ "admin root: " ++ show i
getLoginR :: Int -> Handler site Text
getLoginR i = pack $ "login: " ++ show i
postLoginR :: Int -> Handler site Text
postLoginR i = pack $ "post login: " ++ show i
getTableR :: Int -> Text -> Handler site Text
getTableR _ = append "TableR "
getGetPostR :: Handler site Text
getGetPostR = pack "get"
postGetPostR :: Handler site Text
postGetPostR = pack "post"
hierarchy :: Spec
hierarchy = describe "hierarchy" $ do
it "nested with spacing" $
renderRoute (NestR SpacedR) @?= (["nest", "spaces"], [])
it "renders root correctly" $
renderRoute (AdminR 5 AdminRootR) @?= (["admin", "5"], [])
it "renders table correctly" $
renderRoute (AdminR 6 $ TableR "foo") @?= (["admin", "6", "table", "foo"], [])
let disp m ps = dispatcher
(Env
{ envToMaster = id
, envMaster = Hierarchy
, envSub = Hierarchy
})
(map pack ps, S8.pack m)
let testGetPost route getRes postRes = do
let routeStrs = map unpack $ fst (renderRoute route)
disp "GET" routeStrs @?= (getRes, Just route)
disp "POST" routeStrs @?= (postRes, Just route)
it "dispatches routes with multiple METHODs: admin" $
testGetPost (AdminR 1 LoginR) "login: 1" "post login: 1"
it "dispatches routes with multiple METHODs: nesting" $
testGetPost (NestR $ Nest2 GetPostR) "get" "post"
it "dispatches root correctly" $ disp "GET" ["admin", "7"] @?= ("admin root: 7", Just $ AdminR 7 AdminRootR)
it "dispatches table correctly" $ disp "GET" ["admin", "8", "table", "bar"] @?= ("TableR bar", Just $ AdminR 8 $ TableR "bar")
it "parses" $ do
parseRoute ([], []) @?= Just HomeR
parseRoute ([], [("foo", "bar")]) @?= Just HomeR
parseRoute (["admin", "5"], []) @?= Just (AdminR 5 AdminRootR)
parseRoute (["admin!", "5"], []) @?= (Nothing :: Maybe (Route Hierarchy))
it "inherited attributes" $ do
routeAttrs (NestR SpacedR) @?= Set.fromList ["NestingAttr", "NonNested"]
it "pair attributes" $
routeAttrs (AfterR After) @?= Set.fromList ["parent", "child", "key=value2"]
|
psibi/yesod
|
yesod-core/test/Hierarchy.hs
|
mit
| 6,737
| 0
| 19
| 1,765
| 1,857
| 985
| 872
| 149
| 1
|
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Taken quite directly from the Peyton Jones/Lester paper.
-}
{-# LANGUAGE CPP #-}
-- | A module concerned with finding the free variables of an expression.
module CoreFVs (
-- * Free variables of expressions and binding groups
exprFreeVars,
exprFreeVarsDSet,
exprFreeVarsList,
exprFreeIds,
exprFreeIdsDSet,
exprFreeIdsList,
exprsFreeIdsDSet,
exprsFreeIdsList,
exprsFreeVars,
exprsFreeVarsList,
bindFreeVars,
-- * Selective free variables of expressions
InterestingVarFun,
exprSomeFreeVars, exprsSomeFreeVars,
exprSomeFreeVarsList, exprsSomeFreeVarsList,
-- * Free variables of Rules, Vars and Ids
varTypeTyCoVars,
varTypeTyCoFVs,
idUnfoldingVars, idFreeVars, dIdFreeVars,
idRuleAndUnfoldingVars, idRuleAndUnfoldingVarsDSet,
idFVs,
idRuleVars, idRuleRhsVars, stableUnfoldingVars,
ruleRhsFreeVars, ruleFreeVars, rulesFreeVars,
rulesFreeVarsDSet,
ruleLhsFreeIds, ruleLhsFreeIdsList,
vectsFreeVars,
expr_fvs,
-- * Orphan names
orphNamesOfType, orphNamesOfCo, orphNamesOfAxiom,
orphNamesOfTypes, orphNamesOfCoCon,
exprsOrphNames, orphNamesOfFamInst,
-- * Core syntax tree annotation with free variables
FVAnn, -- annotation, abstract
CoreExprWithFVs, -- = AnnExpr Id FVAnn
CoreExprWithFVs', -- = AnnExpr' Id FVAnn
CoreBindWithFVs, -- = AnnBind Id FVAnn
CoreAltWithFVs, -- = AnnAlt Id FVAnn
freeVars, -- CoreExpr -> CoreExprWithFVs
freeVarsOf, -- CoreExprWithFVs -> DIdSet
freeVarsOfType, -- CoreExprWithFVs -> TyCoVarSet
freeVarsOfAnn, freeVarsOfTypeAnn,
exprTypeFV -- CoreExprWithFVs -> Type
) where
#include "HsVersions.h"
import CoreSyn
import Id
import IdInfo
import NameSet
import UniqFM
import Literal ( literalType )
import Name
import VarSet
import Var
import Type
import TyCoRep
import TyCon
import CoAxiom
import FamInstEnv
import TysPrim( funTyConName )
import Coercion
import Maybes( orElse )
import Util
import BasicTypes( Activation )
import Outputable
import FV
{-
************************************************************************
* *
\section{Finding the free variables of an expression}
* *
************************************************************************
This function simply finds the free variables of an expression.
So far as type variables are concerned, it only finds tyvars that are
* free in type arguments,
* free in the type of a binder,
but not those that are free in the type of variable occurrence.
-}
-- | Find all locally-defined free Ids or type variables in an expression
-- returning a non-deterministic set.
exprFreeVars :: CoreExpr -> VarSet
exprFreeVars = fvVarSet . exprFVs
-- | Find all locally-defined free Ids or type variables in an expression
-- returning a composable FV computation. See Note [FV naming coventions] in FV
-- for why export it.
exprFVs :: CoreExpr -> FV
exprFVs = filterFV isLocalVar . expr_fvs
-- | Find all locally-defined free Ids or type variables in an expression
-- returning a deterministic set.
exprFreeVarsDSet :: CoreExpr -> DVarSet
exprFreeVarsDSet = fvDVarSet . exprFVs
-- | Find all locally-defined free Ids or type variables in an expression
-- returning a deterministically ordered list.
exprFreeVarsList :: CoreExpr -> [Var]
exprFreeVarsList = fvVarList . exprFVs
-- | Find all locally-defined free Ids in an expression
exprFreeIds :: CoreExpr -> IdSet -- Find all locally-defined free Ids
exprFreeIds = exprSomeFreeVars isLocalId
-- | Find all locally-defined free Ids in an expression
-- returning a deterministic set.
exprFreeIdsDSet :: CoreExpr -> DIdSet -- Find all locally-defined free Ids
exprFreeIdsDSet = exprSomeFreeVarsDSet isLocalId
-- | Find all locally-defined free Ids in an expression
-- returning a deterministically ordered list.
exprFreeIdsList :: CoreExpr -> [Id] -- Find all locally-defined free Ids
exprFreeIdsList = exprSomeFreeVarsList isLocalId
-- | Find all locally-defined free Ids in several expressions
-- returning a deterministic set.
exprsFreeIdsDSet :: [CoreExpr] -> DIdSet -- Find all locally-defined free Ids
exprsFreeIdsDSet = exprsSomeFreeVarsDSet isLocalId
-- | Find all locally-defined free Ids in several expressions
-- returning a deterministically ordered list.
exprsFreeIdsList :: [CoreExpr] -> [Id] -- Find all locally-defined free Ids
exprsFreeIdsList = exprsSomeFreeVarsList isLocalId
-- | Find all locally-defined free Ids or type variables in several expressions
-- returning a non-deterministic set.
exprsFreeVars :: [CoreExpr] -> VarSet
exprsFreeVars = fvVarSet . exprsFVs
-- | Find all locally-defined free Ids or type variables in several expressions
-- returning a composable FV computation. See Note [FV naming coventions] in FV
-- for why export it.
exprsFVs :: [CoreExpr] -> FV
exprsFVs exprs = mapUnionFV exprFVs exprs
-- | Find all locally-defined free Ids or type variables in several expressions
-- returning a deterministically ordered list.
exprsFreeVarsList :: [CoreExpr] -> [Var]
exprsFreeVarsList = fvVarList . exprsFVs
-- | Find all locally defined free Ids in a binding group
bindFreeVars :: CoreBind -> VarSet
bindFreeVars (NonRec b r) = fvVarSet $ filterFV isLocalVar $ rhs_fvs (b,r)
bindFreeVars (Rec prs) = fvVarSet $ filterFV isLocalVar $
addBndrs (map fst prs)
(mapUnionFV rhs_fvs prs)
-- | Finds free variables in an expression selected by a predicate
exprSomeFreeVars :: InterestingVarFun -- ^ Says which 'Var's are interesting
-> CoreExpr
-> VarSet
exprSomeFreeVars fv_cand e = fvVarSet $ filterFV fv_cand $ expr_fvs e
-- | Finds free variables in an expression selected by a predicate
-- returning a deterministically ordered list.
exprSomeFreeVarsList :: InterestingVarFun -- ^ Says which 'Var's are interesting
-> CoreExpr
-> [Var]
exprSomeFreeVarsList fv_cand e = fvVarList $ filterFV fv_cand $ expr_fvs e
-- | Finds free variables in an expression selected by a predicate
-- returning a deterministic set.
exprSomeFreeVarsDSet :: InterestingVarFun -- ^ Says which 'Var's are interesting
-> CoreExpr
-> DVarSet
exprSomeFreeVarsDSet fv_cand e = fvDVarSet $ filterFV fv_cand $ expr_fvs e
-- | Finds free variables in several expressions selected by a predicate
exprsSomeFreeVars :: InterestingVarFun -- Says which 'Var's are interesting
-> [CoreExpr]
-> VarSet
exprsSomeFreeVars fv_cand es =
fvVarSet $ filterFV fv_cand $ mapUnionFV expr_fvs es
-- | Finds free variables in several expressions selected by a predicate
-- returning a deterministically ordered list.
exprsSomeFreeVarsList :: InterestingVarFun -- Says which 'Var's are interesting
-> [CoreExpr]
-> [Var]
exprsSomeFreeVarsList fv_cand es =
fvVarList $ filterFV fv_cand $ mapUnionFV expr_fvs es
-- | Finds free variables in several expressions selected by a predicate
-- returning a deterministic set.
exprsSomeFreeVarsDSet :: InterestingVarFun -- ^ Says which 'Var's are interesting
-> [CoreExpr]
-> DVarSet
exprsSomeFreeVarsDSet fv_cand e =
fvDVarSet $ filterFV fv_cand $ mapUnionFV expr_fvs e
-- Comment about obselete code
-- We used to gather the free variables the RULES at a variable occurrence
-- with the following cryptic comment:
-- "At a variable occurrence, add in any free variables of its rule rhss
-- Curiously, we gather the Id's free *type* variables from its binding
-- site, but its free *rule-rhs* variables from its usage sites. This
-- is a little weird. The reason is that the former is more efficient,
-- but the latter is more fine grained, and a makes a difference when
-- a variable mentions itself one of its own rule RHSs"
-- Not only is this "weird", but it's also pretty bad because it can make
-- a function seem more recursive than it is. Suppose
-- f = ...g...
-- g = ...
-- RULE g x = ...f...
-- Then f is not mentioned in its own RHS, and needn't be a loop breaker
-- (though g may be). But if we collect the rule fvs from g's occurrence,
-- it looks as if f mentions itself. (This bites in the eftInt/eftIntFB
-- code in GHC.Enum.)
--
-- Anyway, it seems plain wrong. The RULE is like an extra RHS for the
-- function, so its free variables belong at the definition site.
--
-- Deleted code looked like
-- foldVarSet add_rule_var var_itself_set (idRuleVars var)
-- add_rule_var var set | keep_it fv_cand in_scope var = extendVarSet set var
-- | otherwise = set
-- SLPJ Feb06
addBndr :: CoreBndr -> FV -> FV
addBndr bndr fv fv_cand in_scope acc
= (varTypeTyCoFVs bndr `unionFV`
-- Include type variables in the binder's type
-- (not just Ids; coercion variables too!)
FV.delFV bndr fv) fv_cand in_scope acc
addBndrs :: [CoreBndr] -> FV -> FV
addBndrs bndrs fv = foldr addBndr fv bndrs
expr_fvs :: CoreExpr -> FV
expr_fvs (Type ty) fv_cand in_scope acc =
tyCoFVsOfType ty fv_cand in_scope acc
expr_fvs (Coercion co) fv_cand in_scope acc =
tyCoFVsOfCo co fv_cand in_scope acc
expr_fvs (Var var) fv_cand in_scope acc = FV.unitFV var fv_cand in_scope acc
expr_fvs (Lit _) fv_cand in_scope acc = emptyFV fv_cand in_scope acc
expr_fvs (Tick t expr) fv_cand in_scope acc =
(tickish_fvs t `unionFV` expr_fvs expr) fv_cand in_scope acc
expr_fvs (App fun arg) fv_cand in_scope acc =
(expr_fvs fun `unionFV` expr_fvs arg) fv_cand in_scope acc
expr_fvs (Lam bndr body) fv_cand in_scope acc =
addBndr bndr (expr_fvs body) fv_cand in_scope acc
expr_fvs (Cast expr co) fv_cand in_scope acc =
(expr_fvs expr `unionFV` tyCoFVsOfCo co) fv_cand in_scope acc
expr_fvs (Case scrut bndr ty alts) fv_cand in_scope acc
= (expr_fvs scrut `unionFV` tyCoFVsOfType ty `unionFV` addBndr bndr
(mapUnionFV alt_fvs alts)) fv_cand in_scope acc
where
alt_fvs (_, bndrs, rhs) = addBndrs bndrs (expr_fvs rhs)
expr_fvs (Let (NonRec bndr rhs) body) fv_cand in_scope acc
= (rhs_fvs (bndr, rhs) `unionFV` addBndr bndr (expr_fvs body))
fv_cand in_scope acc
expr_fvs (Let (Rec pairs) body) fv_cand in_scope acc
= addBndrs (map fst pairs)
(mapUnionFV rhs_fvs pairs `unionFV` expr_fvs body)
fv_cand in_scope acc
---------
rhs_fvs :: (Id, CoreExpr) -> FV
rhs_fvs (bndr, rhs) = expr_fvs rhs `unionFV`
bndrRuleAndUnfoldingFVs bndr
-- Treat any RULES as extra RHSs of the binding
---------
exprs_fvs :: [CoreExpr] -> FV
exprs_fvs exprs = mapUnionFV expr_fvs exprs
tickish_fvs :: Tickish Id -> FV
tickish_fvs (Breakpoint _ ids) = FV.mkFVs ids
tickish_fvs _ = emptyFV
{-
************************************************************************
* *
\section{Free names}
* *
************************************************************************
-}
-- | Finds the free /external/ names of an expression, notably
-- including the names of type constructors (which of course do not show
-- up in 'exprFreeVars').
exprOrphNames :: CoreExpr -> NameSet
-- There's no need to delete local binders, because they will all
-- be /internal/ names.
exprOrphNames e
= go e
where
go (Var v)
| isExternalName n = unitNameSet n
| otherwise = emptyNameSet
where n = idName v
go (Lit _) = emptyNameSet
go (Type ty) = orphNamesOfType ty -- Don't need free tyvars
go (Coercion co) = orphNamesOfCo co
go (App e1 e2) = go e1 `unionNameSet` go e2
go (Lam v e) = go e `delFromNameSet` idName v
go (Tick _ e) = go e
go (Cast e co) = go e `unionNameSet` orphNamesOfCo co
go (Let (NonRec _ r) e) = go e `unionNameSet` go r
go (Let (Rec prs) e) = exprsOrphNames (map snd prs) `unionNameSet` go e
go (Case e _ ty as) = go e `unionNameSet` orphNamesOfType ty
`unionNameSet` unionNameSets (map go_alt as)
go_alt (_,_,r) = go r
-- | Finds the free /external/ names of several expressions: see 'exprOrphNames' for details
exprsOrphNames :: [CoreExpr] -> NameSet
exprsOrphNames es = foldr (unionNameSet . exprOrphNames) emptyNameSet es
{- **********************************************************************
%* *
orphNamesXXX
%* *
%********************************************************************* -}
orphNamesOfTyCon :: TyCon -> NameSet
orphNamesOfTyCon tycon = unitNameSet (getName tycon) `unionNameSet` case tyConClass_maybe tycon of
Nothing -> emptyNameSet
Just cls -> unitNameSet (getName cls)
orphNamesOfType :: Type -> NameSet
orphNamesOfType ty | Just ty' <- coreView ty = orphNamesOfType ty'
-- Look through type synonyms (Trac #4912)
orphNamesOfType (TyVarTy _) = emptyNameSet
orphNamesOfType (LitTy {}) = emptyNameSet
orphNamesOfType (TyConApp tycon tys) = orphNamesOfTyCon tycon
`unionNameSet` orphNamesOfTypes tys
orphNamesOfType (ForAllTy bndr res) = orphNamesOfType (binderKind bndr)
`unionNameSet` orphNamesOfType res
orphNamesOfType (FunTy arg res) = unitNameSet funTyConName -- NB! See Trac #8535
`unionNameSet` orphNamesOfType arg
`unionNameSet` orphNamesOfType res
orphNamesOfType (AppTy fun arg) = orphNamesOfType fun `unionNameSet` orphNamesOfType arg
orphNamesOfType (CastTy ty co) = orphNamesOfType ty `unionNameSet` orphNamesOfCo co
orphNamesOfType (CoercionTy co) = orphNamesOfCo co
orphNamesOfThings :: (a -> NameSet) -> [a] -> NameSet
orphNamesOfThings f = foldr (unionNameSet . f) emptyNameSet
orphNamesOfTypes :: [Type] -> NameSet
orphNamesOfTypes = orphNamesOfThings orphNamesOfType
orphNamesOfCo :: Coercion -> NameSet
orphNamesOfCo (Refl _ ty) = orphNamesOfType ty
orphNamesOfCo (TyConAppCo _ tc cos) = unitNameSet (getName tc) `unionNameSet` orphNamesOfCos cos
orphNamesOfCo (AppCo co1 co2) = orphNamesOfCo co1 `unionNameSet` orphNamesOfCo co2
orphNamesOfCo (ForAllCo _ kind_co co)
= orphNamesOfCo kind_co `unionNameSet` orphNamesOfCo co
orphNamesOfCo (CoVarCo _) = emptyNameSet
orphNamesOfCo (AxiomInstCo con _ cos) = orphNamesOfCoCon con `unionNameSet` orphNamesOfCos cos
orphNamesOfCo (UnivCo p _ t1 t2) = orphNamesOfProv p `unionNameSet` orphNamesOfType t1 `unionNameSet` orphNamesOfType t2
orphNamesOfCo (SymCo co) = orphNamesOfCo co
orphNamesOfCo (TransCo co1 co2) = orphNamesOfCo co1 `unionNameSet` orphNamesOfCo co2
orphNamesOfCo (NthCo _ co) = orphNamesOfCo co
orphNamesOfCo (LRCo _ co) = orphNamesOfCo co
orphNamesOfCo (InstCo co arg) = orphNamesOfCo co `unionNameSet` orphNamesOfCo arg
orphNamesOfCo (CoherenceCo co1 co2) = orphNamesOfCo co1 `unionNameSet` orphNamesOfCo co2
orphNamesOfCo (KindCo co) = orphNamesOfCo co
orphNamesOfCo (SubCo co) = orphNamesOfCo co
orphNamesOfCo (AxiomRuleCo _ cs) = orphNamesOfCos cs
orphNamesOfProv :: UnivCoProvenance -> NameSet
orphNamesOfProv UnsafeCoerceProv = emptyNameSet
orphNamesOfProv (PhantomProv co) = orphNamesOfCo co
orphNamesOfProv (ProofIrrelProv co) = orphNamesOfCo co
orphNamesOfProv (PluginProv _) = emptyNameSet
orphNamesOfProv (HoleProv _) = emptyNameSet
orphNamesOfCos :: [Coercion] -> NameSet
orphNamesOfCos = orphNamesOfThings orphNamesOfCo
orphNamesOfCoCon :: CoAxiom br -> NameSet
orphNamesOfCoCon (CoAxiom { co_ax_tc = tc, co_ax_branches = branches })
= orphNamesOfTyCon tc `unionNameSet` orphNamesOfCoAxBranches branches
orphNamesOfAxiom :: CoAxiom br -> NameSet
orphNamesOfAxiom axiom
= orphNamesOfTypes (concatMap coAxBranchLHS $ fromBranches $ coAxiomBranches axiom)
`extendNameSet` getName (coAxiomTyCon axiom)
orphNamesOfCoAxBranches :: Branches br -> NameSet
orphNamesOfCoAxBranches
= foldr (unionNameSet . orphNamesOfCoAxBranch) emptyNameSet . fromBranches
orphNamesOfCoAxBranch :: CoAxBranch -> NameSet
orphNamesOfCoAxBranch (CoAxBranch { cab_lhs = lhs, cab_rhs = rhs })
= orphNamesOfTypes lhs `unionNameSet` orphNamesOfType rhs
-- | orphNamesOfAxiom collects the names of the concrete types and
-- type constructors that make up the LHS of a type family instance,
-- including the family name itself.
--
-- For instance, given `type family Foo a b`:
-- `type instance Foo (F (G (H a))) b = ...` would yield [Foo,F,G,H]
--
-- Used in the implementation of ":info" in GHCi.
orphNamesOfFamInst :: FamInst -> NameSet
orphNamesOfFamInst fam_inst = orphNamesOfAxiom (famInstAxiom fam_inst)
{-
************************************************************************
* *
\section[freevars-everywhere]{Attaching free variables to every sub-expression}
* *
************************************************************************
-}
-- | Those variables free in the right hand side of a rule returned as a
-- non-deterministic set
ruleRhsFreeVars :: CoreRule -> VarSet
ruleRhsFreeVars (BuiltinRule {}) = noFVs
ruleRhsFreeVars (Rule { ru_fn = _, ru_bndrs = bndrs, ru_rhs = rhs })
= fvVarSet $ filterFV isLocalVar $ addBndrs bndrs (expr_fvs rhs)
-- See Note [Rule free var hack]
-- | Those variables free in the both the left right hand sides of a rule
-- returned as a non-deterministic set
ruleFreeVars :: CoreRule -> VarSet
ruleFreeVars = fvVarSet . ruleFVs
-- | Those variables free in the both the left right hand sides of a rule
-- returned as FV computation
ruleFVs :: CoreRule -> FV
ruleFVs (BuiltinRule {}) = emptyFV
ruleFVs (Rule { ru_fn = _do_not_include
-- See Note [Rule free var hack]
, ru_bndrs = bndrs
, ru_rhs = rhs, ru_args = args })
= filterFV isLocalVar $ addBndrs bndrs (exprs_fvs (rhs:args))
-- | Those variables free in the both the left right hand sides of rules
-- returned as FV computation
rulesFVs :: [CoreRule] -> FV
rulesFVs = mapUnionFV ruleFVs
-- | Those variables free in the both the left right hand sides of rules
-- returned as a deterministic set
rulesFreeVarsDSet :: [CoreRule] -> DVarSet
rulesFreeVarsDSet rules = fvDVarSet $ rulesFVs rules
idRuleRhsVars :: (Activation -> Bool) -> Id -> VarSet
-- Just the variables free on the *rhs* of a rule
idRuleRhsVars is_active id
= mapUnionVarSet get_fvs (idCoreRules id)
where
get_fvs (Rule { ru_fn = fn, ru_bndrs = bndrs
, ru_rhs = rhs, ru_act = act })
| is_active act
-- See Note [Finding rule RHS free vars] in OccAnal.hs
= delFromUFM fvs fn -- Note [Rule free var hack]
where
fvs = fvVarSet $ filterFV isLocalVar $ addBndrs bndrs (expr_fvs rhs)
get_fvs _ = noFVs
-- | Those variables free in the right hand side of several rules
rulesFreeVars :: [CoreRule] -> VarSet
rulesFreeVars rules = mapUnionVarSet ruleFreeVars rules
ruleLhsFreeIds :: CoreRule -> VarSet
-- ^ This finds all locally-defined free Ids on the left hand side of a rule
-- and returns them as a non-deterministic set
ruleLhsFreeIds = fvVarSet . ruleLhsFVIds
ruleLhsFreeIdsList :: CoreRule -> [Var]
-- ^ This finds all locally-defined free Ids on the left hand side of a rule
-- and returns them as a determinisitcally ordered list
ruleLhsFreeIdsList = fvVarList . ruleLhsFVIds
ruleLhsFVIds :: CoreRule -> FV
-- ^ This finds all locally-defined free Ids on the left hand side of a rule
-- and returns an FV computation
ruleLhsFVIds (BuiltinRule {}) = emptyFV
ruleLhsFVIds (Rule { ru_bndrs = bndrs, ru_args = args })
= filterFV isLocalId $ addBndrs bndrs (exprs_fvs args)
{-
Note [Rule free var hack] (Not a hack any more)
~~~~~~~~~~~~~~~~~~~~~~~~~
We used not to include the Id in its own rhs free-var set.
Otherwise the occurrence analyser makes bindings recursive:
f x y = x+y
RULE: f (f x y) z ==> f x (f y z)
However, the occurrence analyser distinguishes "non-rule loop breakers"
from "rule-only loop breakers" (see BasicTypes.OccInfo). So it will
put this 'f' in a Rec block, but will mark the binding as a non-rule loop
breaker, which is perfectly inlinable.
-}
-- |Free variables of a vectorisation declaration
vectsFreeVars :: [CoreVect] -> VarSet
vectsFreeVars = mapUnionVarSet vectFreeVars
where
vectFreeVars (Vect _ rhs) = fvVarSet $ filterFV isLocalId $ expr_fvs rhs
vectFreeVars (NoVect _) = noFVs
vectFreeVars (VectType _ _ _) = noFVs
vectFreeVars (VectClass _) = noFVs
vectFreeVars (VectInst _) = noFVs
-- this function is only concerned with values, not types
{-
************************************************************************
* *
\section[freevars-everywhere]{Attaching free variables to every sub-expression}
* *
************************************************************************
The free variable pass annotates every node in the expression with its
NON-GLOBAL free variables and type variables.
-}
data FVAnn = FVAnn { fva_fvs :: DVarSet -- free in expression
, fva_ty_fvs :: DVarSet -- free only in expression's type
, fva_ty :: Type -- expression's type
}
-- | Every node in a binding group annotated with its
-- (non-global) free variables, both Ids and TyVars, and type.
type CoreBindWithFVs = AnnBind Id FVAnn
-- | Every node in an expression annotated with its
-- (non-global) free variables, both Ids and TyVars, and type.
type CoreExprWithFVs = AnnExpr Id FVAnn
type CoreExprWithFVs' = AnnExpr' Id FVAnn
-- | Every node in an expression annotated with its
-- (non-global) free variables, both Ids and TyVars, and type.
type CoreAltWithFVs = AnnAlt Id FVAnn
freeVarsOf :: CoreExprWithFVs -> DIdSet
-- ^ Inverse function to 'freeVars'
freeVarsOf (FVAnn { fva_fvs = fvs }, _) = fvs
-- | Extract the vars free in an annotated expression's type
freeVarsOfType :: CoreExprWithFVs -> DTyCoVarSet
freeVarsOfType (FVAnn { fva_ty_fvs = ty_fvs }, _) = ty_fvs
-- | Extract the type of an annotated expression. (This is cheap.)
exprTypeFV :: CoreExprWithFVs -> Type
exprTypeFV (FVAnn { fva_ty = ty }, _) = ty
-- | Extract the vars reported in a FVAnn
freeVarsOfAnn :: FVAnn -> DIdSet
freeVarsOfAnn = fva_fvs
-- | Extract the type-level vars reported in a FVAnn
freeVarsOfTypeAnn :: FVAnn -> DTyCoVarSet
freeVarsOfTypeAnn = fva_ty_fvs
noFVs :: VarSet
noFVs = emptyVarSet
aFreeVar :: Var -> DVarSet
aFreeVar = unitDVarSet
unionFVs :: DVarSet -> DVarSet -> DVarSet
unionFVs = unionDVarSet
unionFVss :: [DVarSet] -> DVarSet
unionFVss = unionDVarSets
delBindersFV :: [Var] -> DVarSet -> DVarSet
delBindersFV bs fvs = foldr delBinderFV fvs bs
delBinderFV :: Var -> DVarSet -> DVarSet
-- This way round, so we can do it multiple times using foldr
-- (b `delBinderFV` s) removes the binder b from the free variable set s,
-- but *adds* to s
--
-- the free variables of b's type
--
-- This is really important for some lambdas:
-- In (\x::a -> x) the only mention of "a" is in the binder.
--
-- Also in
-- let x::a = b in ...
-- we should really note that "a" is free in this expression.
-- It'll be pinned inside the /\a by the binding for b, but
-- it seems cleaner to make sure that a is in the free-var set
-- when it is mentioned.
--
-- This also shows up in recursive bindings. Consider:
-- /\a -> letrec x::a = x in E
-- Now, there are no explicit free type variables in the RHS of x,
-- but nevertheless "a" is free in its definition. So we add in
-- the free tyvars of the types of the binders, and include these in the
-- free vars of the group, attached to the top level of each RHS.
--
-- This actually happened in the defn of errorIO in IOBase.hs:
-- errorIO (ST io) = case (errorIO# io) of
-- _ -> bottom
-- where
-- bottom = bottom -- Never evaluated
delBinderFV b s = (s `delDVarSet` b) `unionFVs` dVarTypeTyCoVars b
-- Include coercion variables too!
varTypeTyCoVars :: Var -> TyCoVarSet
-- Find the type/kind variables free in the type of the id/tyvar
varTypeTyCoVars var = fvVarSet $ varTypeTyCoFVs var
dVarTypeTyCoVars :: Var -> DTyCoVarSet
-- Find the type/kind/coercion variables free in the type of the id/tyvar
dVarTypeTyCoVars var = fvDVarSet $ varTypeTyCoFVs var
varTypeTyCoFVs :: Var -> FV
varTypeTyCoFVs var = tyCoFVsOfType (varType var)
idFreeVars :: Id -> VarSet
idFreeVars id = ASSERT( isId id) fvVarSet $ idFVs id
dIdFreeVars :: Id -> DVarSet
dIdFreeVars id = fvDVarSet $ idFVs id
idFVs :: Id -> FV
-- Type variables, rule variables, and inline variables
idFVs id = ASSERT( isId id)
varTypeTyCoFVs id `unionFV`
idRuleAndUnfoldingFVs id
bndrRuleAndUnfoldingFVs :: Var -> FV
bndrRuleAndUnfoldingFVs v | isTyVar v = emptyFV
| otherwise = idRuleAndUnfoldingFVs v
idRuleAndUnfoldingVars :: Id -> VarSet
idRuleAndUnfoldingVars id = fvVarSet $ idRuleAndUnfoldingFVs id
idRuleAndUnfoldingVarsDSet :: Id -> DVarSet
idRuleAndUnfoldingVarsDSet id = fvDVarSet $ idRuleAndUnfoldingFVs id
idRuleAndUnfoldingFVs :: Id -> FV
idRuleAndUnfoldingFVs id = ASSERT( isId id)
idRuleFVs id `unionFV` idUnfoldingFVs id
idRuleVars ::Id -> VarSet -- Does *not* include CoreUnfolding vars
idRuleVars id = fvVarSet $ idRuleFVs id
idRuleFVs :: Id -> FV
idRuleFVs id = ASSERT( isId id)
FV.mkFVs (dVarSetElems $ ruleInfoFreeVars (idSpecialisation id))
idUnfoldingVars :: Id -> VarSet
-- Produce free vars for an unfolding, but NOT for an ordinary
-- (non-inline) unfolding, since it is a dup of the rhs
-- and we'll get exponential behaviour if we look at both unf and rhs!
-- But do look at the *real* unfolding, even for loop breakers, else
-- we might get out-of-scope variables
idUnfoldingVars id = fvVarSet $ idUnfoldingFVs id
idUnfoldingFVs :: Id -> FV
idUnfoldingFVs id = stableUnfoldingFVs (realIdUnfolding id) `orElse` emptyFV
stableUnfoldingVars :: Unfolding -> Maybe VarSet
stableUnfoldingVars unf = fvVarSet `fmap` stableUnfoldingFVs unf
stableUnfoldingFVs :: Unfolding -> Maybe FV
stableUnfoldingFVs unf
= case unf of
CoreUnfolding { uf_tmpl = rhs, uf_src = src }
| isStableSource src
-> Just (filterFV isLocalVar $ expr_fvs rhs)
DFunUnfolding { df_bndrs = bndrs, df_args = args }
-> Just (filterFV isLocalVar $ FV.delFVs (mkVarSet bndrs) $ exprs_fvs args)
-- DFuns are top level, so no fvs from types of bndrs
_other -> Nothing
{-
************************************************************************
* *
\subsection{Free variables (and types)}
* *
************************************************************************
-}
freeVars :: CoreExpr -> CoreExprWithFVs
-- ^ Annotate a 'CoreExpr' with its (non-global) free type and value variables at every tree node
freeVars = go
where
go :: CoreExpr -> CoreExprWithFVs
go (Var v)
= (FVAnn fvs ty_fvs (idType v), AnnVar v)
where
-- ToDo: insert motivating example for why we *need*
-- to include the idSpecVars in the FV list.
-- Actually [June 98] I don't think it's necessary
-- fvs = fvs_v `unionVarSet` idSpecVars v
(fvs, ty_fvs)
| isLocalVar v = (aFreeVar v `unionFVs` ty_fvs, dVarTypeTyCoVars v)
| otherwise = (emptyDVarSet, emptyDVarSet)
go (Lit lit) = (FVAnn emptyDVarSet emptyDVarSet (literalType lit), AnnLit lit)
go (Lam b body)
= ( FVAnn { fva_fvs = b_fvs `unionFVs` (b `delBinderFV` body_fvs)
, fva_ty_fvs = b_fvs `unionFVs` (b `delBinderFV` body_ty_fvs)
, fva_ty = mkFunTy b_ty body_ty }
, AnnLam b body' )
where
body'@(FVAnn { fva_fvs = body_fvs, fva_ty_fvs = body_ty_fvs
, fva_ty = body_ty }, _) = go body
b_ty = idType b
b_fvs = tyCoVarsOfTypeDSet b_ty
go (App fun arg)
= ( FVAnn { fva_fvs = freeVarsOf fun' `unionFVs` freeVarsOf arg'
, fva_ty_fvs = tyCoVarsOfTypeDSet res_ty
, fva_ty = res_ty }
, AnnApp fun' arg' )
where
fun' = go fun
fun_ty = exprTypeFV fun'
arg' = go arg
res_ty = applyTypeToArg fun_ty arg
go (Case scrut bndr ty alts)
= ( FVAnn { fva_fvs = (bndr `delBinderFV` alts_fvs)
`unionFVs` freeVarsOf scrut2
`unionFVs` tyCoVarsOfTypeDSet ty
-- don't need to look at (idType bndr)
-- b/c that's redundant with scrut
, fva_ty_fvs = tyCoVarsOfTypeDSet ty
, fva_ty = ty }
, AnnCase scrut2 bndr ty alts2 )
where
scrut2 = go scrut
(alts_fvs_s, alts2) = mapAndUnzip fv_alt alts
alts_fvs = unionFVss alts_fvs_s
fv_alt (con,args,rhs) = (delBindersFV args (freeVarsOf rhs2),
(con, args, rhs2))
where
rhs2 = go rhs
go (Let (NonRec binder rhs) body)
= ( FVAnn { fva_fvs = freeVarsOf rhs2
`unionFVs` body_fvs
`unionFVs` fvDVarSet
(bndrRuleAndUnfoldingFVs binder)
-- Remember any rules; cf rhs_fvs above
, fva_ty_fvs = freeVarsOfType body2
, fva_ty = exprTypeFV body2 }
, AnnLet (AnnNonRec binder rhs2) body2 )
where
rhs2 = go rhs
body2 = go body
body_fvs = binder `delBinderFV` freeVarsOf body2
go (Let (Rec binds) body)
= ( FVAnn { fva_fvs = delBindersFV binders all_fvs
, fva_ty_fvs = freeVarsOfType body2
, fva_ty = exprTypeFV body2 }
, AnnLet (AnnRec (binders `zip` rhss2)) body2 )
where
(binders, rhss) = unzip binds
rhss2 = map go rhss
rhs_body_fvs = foldr (unionFVs . freeVarsOf) body_fvs rhss2
binders_fvs = fvDVarSet $ mapUnionFV idRuleAndUnfoldingFVs binders
all_fvs = rhs_body_fvs `unionFVs` binders_fvs
-- The "delBinderFV" happens after adding the idSpecVars,
-- since the latter may add some of the binders as fvs
body2 = go body
body_fvs = freeVarsOf body2
go (Cast expr co)
= ( FVAnn (freeVarsOf expr2 `unionFVs` cfvs) (tyCoVarsOfTypeDSet to_ty) to_ty
, AnnCast expr2 (c_ann, co) )
where
expr2 = go expr
cfvs = tyCoVarsOfCoDSet co
c_ann = FVAnn cfvs (tyCoVarsOfTypeDSet co_ki) co_ki
co_ki = coercionType co
Just (_, to_ty) = splitCoercionType_maybe co_ki
go (Tick tickish expr)
= ( FVAnn { fva_fvs = tickishFVs tickish `unionFVs` freeVarsOf expr2
, fva_ty_fvs = freeVarsOfType expr2
, fva_ty = exprTypeFV expr2 }
, AnnTick tickish expr2 )
where
expr2 = go expr
tickishFVs (Breakpoint _ ids) = mkDVarSet ids
tickishFVs _ = emptyDVarSet
go (Type ty) = ( FVAnn (tyCoVarsOfTypeDSet ty)
(tyCoVarsOfTypeDSet ki)
ki
, AnnType ty)
where
ki = typeKind ty
go (Coercion co) = ( FVAnn (tyCoVarsOfCoDSet co)
(tyCoVarsOfTypeDSet ki)
ki
, AnnCoercion co)
where
ki = coercionType co
|
snoyberg/ghc
|
compiler/coreSyn/CoreFVs.hs
|
bsd-3-clause
| 33,073
| 0
| 14
| 8,778
| 5,920
| 3,199
| 2,721
| 445
| 12
|
{-# OPTIONS_GHC -Wwarn #-}
-----------------------------------------------------------------------------
-- |
-- Module : Haddock.Interface.ParseModuleHeader
-- Copyright : (c) Simon Marlow 2006, Isaac Dupree 2009
-- License : BSD-like
--
-- Maintainer : haddock@projects.haskell.org
-- Stability : experimental
-- Portability : portable
-----------------------------------------------------------------------------
module Haddock.Interface.ParseModuleHeader (parseModuleHeader) where
import Control.Monad (mplus)
import Data.Char
import DynFlags
import Haddock.Parser
import Haddock.Types
import RdrName
-- -----------------------------------------------------------------------------
-- Parsing module headers
-- NB. The headers must be given in the order Module, Description,
-- Copyright, License, Maintainer, Stability, Portability, except that
-- any or all may be omitted.
parseModuleHeader :: DynFlags -> String -> (HaddockModInfo RdrName, MDoc RdrName)
parseModuleHeader dflags str0 =
let
getKey :: String -> String -> (Maybe String,String)
getKey key str = case parseKey key str of
Nothing -> (Nothing,str)
Just (value,rest) -> (Just value,rest)
(_moduleOpt,str1) = getKey "Module" str0
(descriptionOpt,str2) = getKey "Description" str1
(copyrightOpt,str3) = getKey "Copyright" str2
(licenseOpt,str4) = getKey "License" str3
(licenceOpt,str5) = getKey "Licence" str4
(maintainerOpt,str6) = getKey "Maintainer" str5
(stabilityOpt,str7) = getKey "Stability" str6
(portabilityOpt,str8) = getKey "Portability" str7
in (HaddockModInfo {
hmi_description = parseString dflags <$> descriptionOpt,
hmi_copyright = copyrightOpt,
hmi_license = licenseOpt `mplus` licenceOpt,
hmi_maintainer = maintainerOpt,
hmi_stability = stabilityOpt,
hmi_portability = portabilityOpt,
hmi_safety = Nothing,
hmi_language = Nothing, -- set in LexParseRn
hmi_extensions = [] -- also set in LexParseRn
}, parseParas dflags str8)
-- | This function is how we read keys.
--
-- all fields in the header are optional and have the form
--
-- [spaces1][field name][spaces] ":"
-- [text]"\n" ([spaces2][space][text]"\n" | [spaces]"\n")*
-- where each [spaces2] should have [spaces1] as a prefix.
--
-- Thus for the key "Description",
--
-- > Description : this is a
-- > rather long
-- >
-- > description
-- >
-- > The module comment starts here
--
-- the value will be "this is a .. description" and the rest will begin
-- at "The module comment".
parseKey :: String -> String -> Maybe (String,String)
parseKey key toParse0 =
do
let
(spaces0,toParse1) = extractLeadingSpaces toParse0
indentation = spaces0
afterKey0 <- extractPrefix key toParse1
let
afterKey1 = extractLeadingSpaces afterKey0
afterColon0 <- case snd afterKey1 of
':':afterColon -> return afterColon
_ -> Nothing
let
(_,afterColon1) = extractLeadingSpaces afterColon0
return (scanKey True indentation afterColon1)
where
scanKey :: Bool -> String -> String -> (String,String)
scanKey _ _ [] = ([],[])
scanKey isFirst indentation str =
let
(nextLine,rest1) = extractNextLine str
accept = isFirst || sufficientIndentation || allSpaces
sufficientIndentation = case extractPrefix indentation nextLine of
Just (c:_) | isSpace c -> True
_ -> False
allSpaces = case extractLeadingSpaces nextLine of
(_,[]) -> True
_ -> False
in
if accept
then
let
(scanned1,rest2) = scanKey False indentation rest1
scanned2 = case scanned1 of
"" -> if allSpaces then "" else nextLine
_ -> nextLine ++ "\n" ++ scanned1
in
(scanned2,rest2)
else
([],str)
extractLeadingSpaces :: String -> (String,String)
extractLeadingSpaces [] = ([],[])
extractLeadingSpaces (s@(c:cs))
| isSpace c =
let
(spaces1,cs1) = extractLeadingSpaces cs
in
(c:spaces1,cs1)
| otherwise = ([],s)
extractNextLine :: String -> (String,String)
extractNextLine [] = ([],[])
extractNextLine (c:cs)
| c == '\n' =
([],cs)
| otherwise =
let
(line,rest) = extractNextLine cs
in
(c:line,rest)
-- comparison is case-insensitive.
extractPrefix :: String -> String -> Maybe String
extractPrefix [] s = Just s
extractPrefix _ [] = Nothing
extractPrefix (c1:cs1) (c2:cs2)
| toUpper c1 == toUpper c2 = extractPrefix cs1 cs2
| otherwise = Nothing
|
Acidburn0zzz/haddock
|
haddock-api/src/Haddock/Interface/ParseModuleHeader.hs
|
bsd-2-clause
| 5,048
| 0
| 18
| 1,489
| 1,139
| 616
| 523
| 92
| 12
|
-- (c) The University of Glasgow 2006
-- (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
--
-- Storage manager representation of closures
{-# LANGUAGE CPP,GeneralizedNewtypeDeriving #-}
module SMRep (
-- * Words and bytes
WordOff, ByteOff,
wordsToBytes, bytesToWordsRoundUp,
roundUpToWords,
StgWord, fromStgWord, toStgWord,
StgHalfWord, fromStgHalfWord, toStgHalfWord,
hALF_WORD_SIZE, hALF_WORD_SIZE_IN_BITS,
-- * Closure repesentation
SMRep(..), -- CmmInfo sees the rep; no one else does
IsStatic,
ClosureTypeInfo(..), ArgDescr(..), Liveness,
ConstrDescription,
-- ** Construction
mkHeapRep, blackHoleRep, indStaticRep, mkStackRep, mkRTSRep, arrPtrsRep,
smallArrPtrsRep, arrWordsRep,
-- ** Predicates
isStaticRep, isConRep, isThunkRep, isFunRep, isStaticNoCafCon,
isStackRep,
-- ** Size-related things
heapClosureSizeW,
fixedHdrSizeW, arrWordsHdrSize, arrWordsHdrSizeW, arrPtrsHdrSize,
arrPtrsHdrSizeW, profHdrSize, thunkHdrSize, nonHdrSize, nonHdrSizeW,
smallArrPtrsHdrSize, smallArrPtrsHdrSizeW, hdrSize, hdrSizeW,
fixedHdrSize,
-- ** RTS closure types
rtsClosureType, rET_SMALL, rET_BIG,
aRG_GEN, aRG_GEN_BIG,
-- ** Arrays
card, cardRoundUp, cardTableSizeB, cardTableSizeW,
-- * Operations over [Word8] strings that don't belong here
pprWord8String, stringToWord8s
) where
#include "../HsVersions.h"
#include "../includes/MachDeps.h"
import DynFlags
import Outputable
import Platform
import FastString
import Data.Char( ord )
import Data.Word
import Data.Bits
{-
************************************************************************
* *
Words and bytes
* *
************************************************************************
-}
-- | Word offset, or word count
type WordOff = Int
-- | Byte offset, or byte count
type ByteOff = Int
-- | Round up the given byte count to the next byte count that's a
-- multiple of the machine's word size.
roundUpToWords :: DynFlags -> ByteOff -> ByteOff
roundUpToWords dflags n =
(n + (wORD_SIZE dflags - 1)) .&. (complement (wORD_SIZE dflags - 1))
-- | Convert the given number of words to a number of bytes.
--
-- This function morally has type @WordOff -> ByteOff@, but uses @Num
-- a@ to allow for overloading.
wordsToBytes :: Num a => DynFlags -> a -> a
wordsToBytes dflags n = fromIntegral (wORD_SIZE dflags) * n
{-# SPECIALIZE wordsToBytes :: DynFlags -> Int -> Int #-}
{-# SPECIALIZE wordsToBytes :: DynFlags -> Word -> Word #-}
{-# SPECIALIZE wordsToBytes :: DynFlags -> Integer -> Integer #-}
-- | First round the given byte count up to a multiple of the
-- machine's word size and then convert the result to words.
bytesToWordsRoundUp :: DynFlags -> ByteOff -> WordOff
bytesToWordsRoundUp dflags n = (n + word_size - 1) `quot` word_size
where word_size = wORD_SIZE dflags
-- StgWord is a type representing an StgWord on the target platform.
-- A Word64 is large enough to hold a Word for either a 32bit or 64bit platform
newtype StgWord = StgWord Word64
deriving (Eq, Bits)
fromStgWord :: StgWord -> Integer
fromStgWord (StgWord i) = toInteger i
toStgWord :: DynFlags -> Integer -> StgWord
toStgWord dflags i
= case platformWordSize (targetPlatform dflags) of
-- These conversions mean that things like toStgWord (-1)
-- do the right thing
4 -> StgWord (fromIntegral (fromInteger i :: Word32))
8 -> StgWord (fromInteger i :: Word64)
w -> panic ("toStgWord: Unknown platformWordSize: " ++ show w)
instance Outputable StgWord where
ppr (StgWord i) = integer (toInteger i)
--
-- A Word32 is large enough to hold half a Word for either a 32bit or
-- 64bit platform
newtype StgHalfWord = StgHalfWord Word32
deriving Eq
fromStgHalfWord :: StgHalfWord -> Integer
fromStgHalfWord (StgHalfWord w) = toInteger w
toStgHalfWord :: DynFlags -> Integer -> StgHalfWord
toStgHalfWord dflags i
= case platformWordSize (targetPlatform dflags) of
-- These conversions mean that things like toStgHalfWord (-1)
-- do the right thing
4 -> StgHalfWord (fromIntegral (fromInteger i :: Word16))
8 -> StgHalfWord (fromInteger i :: Word32)
w -> panic ("toStgHalfWord: Unknown platformWordSize: " ++ show w)
instance Outputable StgHalfWord where
ppr (StgHalfWord w) = integer (toInteger w)
hALF_WORD_SIZE :: DynFlags -> ByteOff
hALF_WORD_SIZE dflags = platformWordSize (targetPlatform dflags) `shiftR` 1
hALF_WORD_SIZE_IN_BITS :: DynFlags -> Int
hALF_WORD_SIZE_IN_BITS dflags = platformWordSize (targetPlatform dflags) `shiftL` 2
{-
************************************************************************
* *
\subsubsection[SMRep-datatype]{@SMRep@---storage manager representation}
* *
************************************************************************
-}
-- | A description of the layout of a closure. Corresponds directly
-- to the closure types in includes/rts/storage/ClosureTypes.h.
data SMRep
= HeapRep -- GC routines consult sizes in info tbl
IsStatic
!WordOff -- # ptr words
!WordOff -- # non-ptr words INCLUDING SLOP (see mkHeapRep below)
ClosureTypeInfo -- type-specific info
| ArrayPtrsRep
!WordOff -- # ptr words
!WordOff -- # card table words
| SmallArrayPtrsRep
!WordOff -- # ptr words
| ArrayWordsRep
!WordOff -- # bytes expressed in words, rounded up
| StackRep -- Stack frame (RET_SMALL or RET_BIG)
Liveness
| RTSRep -- The RTS needs to declare info tables with specific
Int -- type tags, so this form lets us override the default
SMRep -- tag for an SMRep.
-- | True <=> This is a static closure. Affects how we garbage-collect it.
-- Static closure have an extra static link field at the end.
type IsStatic = Bool
-- From an SMRep you can get to the closure type defined in
-- includes/rts/storage/ClosureTypes.h. Described by the function
-- rtsClosureType below.
data ClosureTypeInfo
= Constr ConstrTag ConstrDescription
| Fun FunArity ArgDescr
| Thunk
| ThunkSelector SelectorOffset
| BlackHole
| IndStatic
type ConstrTag = Int
type ConstrDescription = [Word8] -- result of dataConIdentity
type FunArity = Int
type SelectorOffset = Int
-------------------------
-- We represent liveness bitmaps as a Bitmap (whose internal
-- representation really is a bitmap). These are pinned onto case return
-- vectors to indicate the state of the stack for the garbage collector.
--
-- In the compiled program, liveness bitmaps that fit inside a single
-- word (StgWord) are stored as a single word, while larger bitmaps are
-- stored as a pointer to an array of words.
type Liveness = [Bool] -- One Bool per word; True <=> non-ptr or dead
-- False <=> ptr
-------------------------
-- An ArgDescr describes the argument pattern of a function
data ArgDescr
= ArgSpec -- Fits one of the standard patterns
!Int -- RTS type identifier ARG_P, ARG_N, ...
| ArgGen -- General case
Liveness -- Details about the arguments
-----------------------------------------------------------------------------
-- Construction
mkHeapRep :: DynFlags -> IsStatic -> WordOff -> WordOff -> ClosureTypeInfo
-> SMRep
mkHeapRep dflags is_static ptr_wds nonptr_wds cl_type_info
= HeapRep is_static
ptr_wds
(nonptr_wds + slop_wds)
cl_type_info
where
slop_wds
| is_static = 0
| otherwise = max 0 (minClosureSize dflags - (hdr_size + payload_size))
hdr_size = closureTypeHdrSize dflags cl_type_info
payload_size = ptr_wds + nonptr_wds
mkRTSRep :: Int -> SMRep -> SMRep
mkRTSRep = RTSRep
mkStackRep :: [Bool] -> SMRep
mkStackRep liveness = StackRep liveness
blackHoleRep :: SMRep
blackHoleRep = HeapRep False 0 0 BlackHole
indStaticRep :: SMRep
indStaticRep = HeapRep True 1 0 IndStatic
arrPtrsRep :: DynFlags -> WordOff -> SMRep
arrPtrsRep dflags elems = ArrayPtrsRep elems (cardTableSizeW dflags elems)
smallArrPtrsRep :: WordOff -> SMRep
smallArrPtrsRep elems = SmallArrayPtrsRep elems
arrWordsRep :: DynFlags -> ByteOff -> SMRep
arrWordsRep dflags bytes = ArrayWordsRep (bytesToWordsRoundUp dflags bytes)
-----------------------------------------------------------------------------
-- Predicates
isStaticRep :: SMRep -> IsStatic
isStaticRep (HeapRep is_static _ _ _) = is_static
isStaticRep (RTSRep _ rep) = isStaticRep rep
isStaticRep _ = False
isStackRep :: SMRep -> Bool
isStackRep StackRep{} = True
isStackRep (RTSRep _ rep) = isStackRep rep
isStackRep _ = False
isConRep :: SMRep -> Bool
isConRep (HeapRep _ _ _ Constr{}) = True
isConRep _ = False
isThunkRep :: SMRep -> Bool
isThunkRep (HeapRep _ _ _ Thunk{}) = True
isThunkRep (HeapRep _ _ _ ThunkSelector{}) = True
isThunkRep (HeapRep _ _ _ BlackHole{}) = True
isThunkRep (HeapRep _ _ _ IndStatic{}) = True
isThunkRep _ = False
isFunRep :: SMRep -> Bool
isFunRep (HeapRep _ _ _ Fun{}) = True
isFunRep _ = False
isStaticNoCafCon :: SMRep -> Bool
-- This should line up exactly with CONSTR_NOCAF_STATIC above
-- See Note [Static NoCaf constructors]
isStaticNoCafCon (HeapRep True 0 _ Constr{}) = True
isStaticNoCafCon _ = False
-----------------------------------------------------------------------------
-- Size-related things
fixedHdrSize :: DynFlags -> ByteOff
fixedHdrSize dflags = wordsToBytes dflags (fixedHdrSizeW dflags)
-- | Size of a closure header (StgHeader in includes/rts/storage/Closures.h)
fixedHdrSizeW :: DynFlags -> WordOff
fixedHdrSizeW dflags = sTD_HDR_SIZE dflags + profHdrSize dflags
-- | Size of the profiling part of a closure header
-- (StgProfHeader in includes/rts/storage/Closures.h)
profHdrSize :: DynFlags -> WordOff
profHdrSize dflags
| gopt Opt_SccProfilingOn dflags = pROF_HDR_SIZE dflags
| otherwise = 0
-- | The garbage collector requires that every closure is at least as
-- big as this.
minClosureSize :: DynFlags -> WordOff
minClosureSize dflags = fixedHdrSizeW dflags + mIN_PAYLOAD_SIZE dflags
arrWordsHdrSize :: DynFlags -> ByteOff
arrWordsHdrSize dflags
= fixedHdrSize dflags + sIZEOF_StgArrBytes_NoHdr dflags
arrWordsHdrSizeW :: DynFlags -> WordOff
arrWordsHdrSizeW dflags =
fixedHdrSizeW dflags +
(sIZEOF_StgArrBytes_NoHdr dflags `quot` wORD_SIZE dflags)
arrPtrsHdrSize :: DynFlags -> ByteOff
arrPtrsHdrSize dflags
= fixedHdrSize dflags + sIZEOF_StgMutArrPtrs_NoHdr dflags
arrPtrsHdrSizeW :: DynFlags -> WordOff
arrPtrsHdrSizeW dflags =
fixedHdrSizeW dflags +
(sIZEOF_StgMutArrPtrs_NoHdr dflags `quot` wORD_SIZE dflags)
smallArrPtrsHdrSize :: DynFlags -> ByteOff
smallArrPtrsHdrSize dflags
= fixedHdrSize dflags + sIZEOF_StgSmallMutArrPtrs_NoHdr dflags
smallArrPtrsHdrSizeW :: DynFlags -> WordOff
smallArrPtrsHdrSizeW dflags =
fixedHdrSizeW dflags +
(sIZEOF_StgSmallMutArrPtrs_NoHdr dflags `quot` wORD_SIZE dflags)
-- Thunks have an extra header word on SMP, so the update doesn't
-- splat the payload.
thunkHdrSize :: DynFlags -> WordOff
thunkHdrSize dflags = fixedHdrSizeW dflags + smp_hdr
where smp_hdr = sIZEOF_StgSMPThunkHeader dflags `quot` wORD_SIZE dflags
hdrSize :: DynFlags -> SMRep -> ByteOff
hdrSize dflags rep = wordsToBytes dflags (hdrSizeW dflags rep)
hdrSizeW :: DynFlags -> SMRep -> WordOff
hdrSizeW dflags (HeapRep _ _ _ ty) = closureTypeHdrSize dflags ty
hdrSizeW dflags (ArrayPtrsRep _ _) = arrPtrsHdrSizeW dflags
hdrSizeW dflags (SmallArrayPtrsRep _) = smallArrPtrsHdrSizeW dflags
hdrSizeW dflags (ArrayWordsRep _) = arrWordsHdrSizeW dflags
hdrSizeW _ _ = panic "SMRep.hdrSizeW"
nonHdrSize :: DynFlags -> SMRep -> ByteOff
nonHdrSize dflags rep = wordsToBytes dflags (nonHdrSizeW rep)
nonHdrSizeW :: SMRep -> WordOff
nonHdrSizeW (HeapRep _ p np _) = p + np
nonHdrSizeW (ArrayPtrsRep elems ct) = elems + ct
nonHdrSizeW (SmallArrayPtrsRep elems) = elems
nonHdrSizeW (ArrayWordsRep words) = words
nonHdrSizeW (StackRep bs) = length bs
nonHdrSizeW (RTSRep _ rep) = nonHdrSizeW rep
-- | The total size of the closure, in words.
heapClosureSizeW :: DynFlags -> SMRep -> WordOff
heapClosureSizeW dflags (HeapRep _ p np ty)
= closureTypeHdrSize dflags ty + p + np
heapClosureSizeW dflags (ArrayPtrsRep elems ct)
= arrPtrsHdrSizeW dflags + elems + ct
heapClosureSizeW dflags (SmallArrayPtrsRep elems)
= smallArrPtrsHdrSizeW dflags + elems
heapClosureSizeW dflags (ArrayWordsRep words)
= arrWordsHdrSizeW dflags + words
heapClosureSizeW _ _ = panic "SMRep.heapClosureSize"
closureTypeHdrSize :: DynFlags -> ClosureTypeInfo -> WordOff
closureTypeHdrSize dflags ty = case ty of
Thunk{} -> thunkHdrSize dflags
ThunkSelector{} -> thunkHdrSize dflags
BlackHole{} -> thunkHdrSize dflags
IndStatic{} -> thunkHdrSize dflags
_ -> fixedHdrSizeW dflags
-- All thunks use thunkHdrSize, even if they are non-updatable.
-- this is because we don't have separate closure types for
-- updatable vs. non-updatable thunks, so the GC can't tell the
-- difference. If we ever have significant numbers of non-
-- updatable thunks, it might be worth fixing this.
-- ---------------------------------------------------------------------------
-- Arrays
-- | The byte offset into the card table of the card for a given element
card :: DynFlags -> Int -> Int
card dflags i = i `shiftR` mUT_ARR_PTRS_CARD_BITS dflags
-- | Convert a number of elements to a number of cards, rounding up
cardRoundUp :: DynFlags -> Int -> Int
cardRoundUp dflags i =
card dflags (i + ((1 `shiftL` mUT_ARR_PTRS_CARD_BITS dflags) - 1))
-- | The size of a card table, in bytes
cardTableSizeB :: DynFlags -> Int -> ByteOff
cardTableSizeB dflags elems = cardRoundUp dflags elems
-- | The size of a card table, in words
cardTableSizeW :: DynFlags -> Int -> WordOff
cardTableSizeW dflags elems =
bytesToWordsRoundUp dflags (cardTableSizeB dflags elems)
-----------------------------------------------------------------------------
-- deriving the RTS closure type from an SMRep
#include "../includes/rts/storage/ClosureTypes.h"
#include "../includes/rts/storage/FunTypes.h"
-- Defines CONSTR, CONSTR_1_0 etc
-- | Derives the RTS closure type from an 'SMRep'
rtsClosureType :: SMRep -> Int
rtsClosureType rep
= case rep of
RTSRep ty _ -> ty
HeapRep False 1 0 Constr{} -> CONSTR_1_0
HeapRep False 0 1 Constr{} -> CONSTR_0_1
HeapRep False 2 0 Constr{} -> CONSTR_2_0
HeapRep False 1 1 Constr{} -> CONSTR_1_1
HeapRep False 0 2 Constr{} -> CONSTR_0_2
HeapRep False _ _ Constr{} -> CONSTR
HeapRep False 1 0 Fun{} -> FUN_1_0
HeapRep False 0 1 Fun{} -> FUN_0_1
HeapRep False 2 0 Fun{} -> FUN_2_0
HeapRep False 1 1 Fun{} -> FUN_1_1
HeapRep False 0 2 Fun{} -> FUN_0_2
HeapRep False _ _ Fun{} -> FUN
HeapRep False 1 0 Thunk{} -> THUNK_1_0
HeapRep False 0 1 Thunk{} -> THUNK_0_1
HeapRep False 2 0 Thunk{} -> THUNK_2_0
HeapRep False 1 1 Thunk{} -> THUNK_1_1
HeapRep False 0 2 Thunk{} -> THUNK_0_2
HeapRep False _ _ Thunk{} -> THUNK
HeapRep False _ _ ThunkSelector{} -> THUNK_SELECTOR
-- Approximation: we use the CONSTR_NOCAF_STATIC type for static
-- constructors -- that have no pointer words only.
HeapRep True 0 _ Constr{} -> CONSTR_NOCAF_STATIC -- See isStaticNoCafCon below
HeapRep True _ _ Constr{} -> CONSTR_STATIC
HeapRep True _ _ Fun{} -> FUN_STATIC
HeapRep True _ _ Thunk{} -> THUNK_STATIC
HeapRep False _ _ BlackHole{} -> BLACKHOLE
HeapRep False _ _ IndStatic{} -> IND_STATIC
_ -> panic "rtsClosureType"
-- We export these ones
rET_SMALL, rET_BIG, aRG_GEN, aRG_GEN_BIG :: Int
rET_SMALL = RET_SMALL
rET_BIG = RET_BIG
aRG_GEN = ARG_GEN
aRG_GEN_BIG = ARG_GEN_BIG
{-
Note [Static NoCaf constructors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we know that a top-level binding 'x' is not Caffy (ie no CAFs are
reachable from 'x'), then a statically allocated constructor (Just x)
is also not Caffy, and the garbage collector need not follow its
argument fields. Exploiting this would require two static info tables
for Just, for the two cases where the argument was Caffy or non-Caffy.
Currently we don't do this; instead we treat nullary constructors
as non-Caffy, and the others as potentially Caffy.
************************************************************************
* *
Pretty printing of SMRep and friends
* *
************************************************************************
-}
instance Outputable ClosureTypeInfo where
ppr = pprTypeInfo
instance Outputable SMRep where
ppr (HeapRep static ps nps tyinfo)
= hang (header <+> lbrace) 2 (ppr tyinfo <+> rbrace)
where
header = text "HeapRep"
<+> if static then text "static" else empty
<+> pp_n "ptrs" ps <+> pp_n "nonptrs" nps
pp_n :: String -> Int -> SDoc
pp_n _ 0 = empty
pp_n s n = int n <+> text s
ppr (ArrayPtrsRep size _) = text "ArrayPtrsRep" <+> ppr size
ppr (SmallArrayPtrsRep size) = text "SmallArrayPtrsRep" <+> ppr size
ppr (ArrayWordsRep words) = text "ArrayWordsRep" <+> ppr words
ppr (StackRep bs) = text "StackRep" <+> ppr bs
ppr (RTSRep ty rep) = text "tag:" <> ppr ty <+> ppr rep
instance Outputable ArgDescr where
ppr (ArgSpec n) = text "ArgSpec" <+> ppr n
ppr (ArgGen ls) = text "ArgGen" <+> ppr ls
pprTypeInfo :: ClosureTypeInfo -> SDoc
pprTypeInfo (Constr tag descr)
= text "Con" <+>
braces (sep [ text "tag:" <+> ppr tag
, text "descr:" <> text (show descr) ])
pprTypeInfo (Fun arity args)
= text "Fun" <+>
braces (sep [ text "arity:" <+> ppr arity
, ptext (sLit ("fun_type:")) <+> ppr args ])
pprTypeInfo (ThunkSelector offset)
= text "ThunkSel" <+> ppr offset
pprTypeInfo Thunk = text "Thunk"
pprTypeInfo BlackHole = text "BlackHole"
pprTypeInfo IndStatic = text "IndStatic"
-- XXX Does not belong here!!
stringToWord8s :: String -> [Word8]
stringToWord8s s = map (fromIntegral . ord) s
pprWord8String :: [Word8] -> SDoc
-- Debug printing. Not very clever right now.
pprWord8String ws = text (show ws)
|
tjakway/ghcjvm
|
compiler/cmm/SMRep.hs
|
bsd-3-clause
| 19,295
| 0
| 14
| 4,626
| 3,857
| 2,021
| 1,836
| 322
| 27
|
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeInType #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeInType #-}
{-# LANGUAGE FlexibleInstances, UndecidableInstances, FunctionalDependencies #-}
-- The code from the ticket lacked necessary extension FlexibleContexts
-- which crashed the compiler with "GHC internal error"
-- This test case reproduces that scenario
{- # LANGUAGE FlexibleContexts #-}
module T12055a where
import GHC.Base ( Constraint, Type )
import GHC.Exts ( type (~~) )
type Cat k = k -> k -> Type
class Category (p :: Cat k) where
type Ob p :: k -> Constraint
class (Category (Dom f), Category (Cod f)) => Functor (f :: j -> k) where
type Dom f :: Cat j
type Cod f :: Cat k
functor :: forall a b.
Iso Constraint (:-) (:-)
(Ob (Dom f) a) (Ob (Dom f) b)
(Ob (Cod f) (f a)) (Ob (Cod f) (f b))
class (Functor f , Dom f ~ p, Cod f ~ q) =>
Fun (p :: Cat j) (q :: Cat k) (f :: j -> k) | f -> p q
instance (Functor f , Dom f ~ p, Cod f ~ q) =>
Fun (p :: Cat j) (q :: Cat k) (f :: j -> k)
data Nat (p :: Cat j) (q :: Cat k) (f :: j -> k) (g :: j -> k)
type Iso k (c :: Cat k) (d :: Cat k) s t a b =
forall p. (Cod p ~~ Nat d (->)) => p a b -> p s t
data (p :: Constraint) :- (q :: Constraint)
|
ezyang/ghc
|
testsuite/tests/polykinds/T12055a.hs
|
bsd-3-clause
| 1,430
| 0
| 12
| 368
| 534
| 304
| 230
| -1
| -1
|
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies #-}
{-# LANGUAGE ScopedTypeVariables, FlexibleContexts #-}
-- Trac #1783
-- Like Trac #1781 you could argue that this one should succeed
-- but we stick with the old behaviour for now. When we do
-- fundeps properly it'll probably start to work
module ShouldCompile where
import Prelude hiding (foldr, foldr1)
import Data.Maybe
class Elem a e | a -> e
class Foldable a where
foldr :: Elem a e => (e -> b -> b) -> b -> a -> b
-- foldr1 :: forall e. Elem a e => (e -> e -> e) -> a -> e -- WORKS!
foldr1 :: Elem a e => (e -> e -> e) -> a -> e
foldr1 f xs = fromMaybe (error "foldr1: empty structure")
(foldr mf Nothing xs)
where mf :: Elem a e => (e -> Maybe e -> Maybe e)
mf x Nothing = Just x
mf x (Just y) = Just (f x y)
|
olsner/ghc
|
testsuite/tests/typecheck/should_compile/FD2.hs
|
bsd-3-clause
| 842
| 0
| 13
| 224
| 226
| 119
| 107
| -1
| -1
|
{-# LANGUAGE TypeFamilies #-}
module Ccfail005 where
type family F a
type instance F Bool = D -> IO Int
type instance F Char = Int -> IO D
data D = D
-- These should be rejected as D isn't a type we can use with the FFI.
-- Note that, in the signature the user writes, there aren't an
-- "argument type" and "result type" to complain about, though.
foreign import ccall f1 :: F Bool
foreign import ccall f2 :: F Char
|
sdiehl/ghc
|
testsuite/tests/ffi/should_fail/ccfail005.hs
|
bsd-3-clause
| 422
| 0
| 6
| 90
| 81
| 47
| 34
| 8
| 0
|
module ForkExperiments where
import Control.Concurrent
import Control.Monad
import System.IO
main = do
hSetBuffering stdout NoBuffering
forkIO (replicateM_ 10 (putChar 'A'))
replicateM_ 10 (putChar 'B')
|
NickAger/LearningHaskell
|
ParallelConcurrent/ForkExperiments.hsproj/ForkExperiments.hs
|
mit
| 216
| 0
| 11
| 37
| 66
| 33
| 33
| 8
| 1
|
module ASTGenerator where
import AMPLParserMeta
--import Language.LBNF.Runtime
import Language.LBNF.Compiletime
import qualified TypesAMPL as T
failure x = Bad $ "Undefined case: " ++ show x
--transIdent :: Ident -> Result
transIdent x = case x of
Ident str -> str
--transUIdent :: UIdent -> Result
transUIdent x = case x of
UIdent str -> str
--transAMPLCODE :: AMPLCODE -> Result
transAMPLCODE x = case x of
Main handles cohandles constructors destructors processes functions start ->
let
handles1 = transHANDLES handles
cohandles1 = transCOHANDLES cohandles
constructors1 = transCONSTRUCTORS constructors
destructors1 = transDESTRUCTORS destructors
processes1 = transPROCESSES processes
functions1 = transFUNCTIONS functions
start1 = transSTART start
in
(handles1,cohandles1,constructors1,destructors1,processes1,functions1,start1)
--transHANDLE_SPEC :: HANDLE_SPEC -> Result
transHANDLE_SPEC x = case x of
Hand_spec uident handles -> (transUIdent uident,map transHandle handles)
--transHandle :: Handle -> Result
transHandle x = case x of
HandName uident -> transUIdent uident
--transCONSTRUCTORS :: CONSTRUCTORS -> Result
transCONSTRUCTORS x = case x of
Constructors structor_specs -> map transSTRUCTOR_SPEC structor_specs
Constructors_none -> []
--transDESTRUCTORS :: DESTRUCTORS -> Result
transDESTRUCTORS x = case x of
Destructors structor_specs -> map (transSTRUCTOR_SPEC) structor_specs
Destructors_none -> []
--transSTRUCTOR_SPEC :: STRUCTOR_SPEC -> Result
transSTRUCTOR_SPEC x = case x of
Struct_spec uident structs -> (transUIdent uident,map (transSTRUCT) structs)
--transSTRUCT :: STRUCT -> Result
transSTRUCT x = case x of
Struct uident n -> (transUIdent uident , (n::Integer))
--transHANDLES :: HANDLES -> Result
transHANDLES x = case x of
Handles handle_specs -> map (transHANDLE_SPEC) handle_specs
Handles_none -> []
--transCOHANDLES :: COHANDLES -> Result
transCOHANDLES x = case x of
Cohandles handle_specs -> map (transHANDLE_SPEC) handle_specs
Cohandles_none -> []
--transPROCESSES :: PROCESSES -> Result
transPROCESSES x = case x of
Processes process_specs -> map (transPROCESS_SPEC) process_specs
Processes_none -> []
--transPROCESS_SPEC :: PROCESS_SPEC -> Result
transPROCESS_SPEC x = case x of
Process_spec uident varss ids1 ids2 coms3 ->
((transUIdent uident),(map transVars varss),(map transIdent ids1),(map transIdent ids2),(transCOMS coms3))
--transVars :: Vars -> Result
transVars x = case x of
VName id -> transIdent id
--transFUNCTIONS :: FUNCTIONS -> Result
transFUNCTIONS x = case x of
Functions function_specs -> map (transFUNCTION_SPEC) function_specs
Functions_none -> []
--transFUNCTION_SPEC :: FUNCTION_SPEC -> Result
transFUNCTION_SPEC x = case x of
Function_spec uident varss coms ->
((transUIdent uident),(map transVars varss), (transCOMS coms))
--transSTART :: START -> Result
transSTART x = case x of
Start channel_spec coms -> ((transCHANNEL_SPEC channel_spec),(transCOMS coms))
--transCHANNEL_SPEC :: CHANNEL_SPEC -> Result
transCHANNEL_SPEC x = case x of
Channel_specf ids1 ids2 -> ((map transIdent ids1),(map transIdent ids1))
--Channel_spec cintegers1 cintegers2 -> failure x
--transCOMS :: COMS -> Result
transCOMS x = case x of
Prog coms -> map transCOM coms
--transCOM :: COM -> Result
transCOM x = case x of
AC_STORE -> T.AC_STORE
AC_STOREf id -> (T.AC_STORE)
AC_LOAD n -> T.AC_LOAD (fromIntegral n)
AC_LOADf id -> T.AC_LOADf (transIdent id)
AC_RET -> T.AC_RET
AC_FRET -> T.AC_FRET
--AC_CALL id -> T.AC_CALL (transIdent id)
AC_CALLf id ids -> T.AC_CALLf (transIdent id) (map transIdent ids)
AC_INT cinteger -> T.AC_INT (transCInteger cinteger)
AC_LEQ -> T.AC_LEQ
AC_ADD -> T.AC_ADD
AC_MUL -> T.AC_MUL
AC_CONCAT -> T.AC_CONCAT
AC_REVERSE -> T.AC_REVERSE
AC_CONS n1 n2 -> T.AC_CONS (fromIntegral n1) (fromIntegral n2)
-- AC_STRUCT uident1 uident2 -> T.AC_STRUCT (transUIdent uident1) (transUIdent uident2)
{- AC_STRUCTas uident1 uident2 ids3 -> T.AC_STRUCTas (transUIdent uident1)
(transUIdent uident2)
(map transIdent ids3)
-}
AC_CASE comss -> T.AC_CASE (map transCOMS comss)
AC_CASEf labelcomss -> T.AC_CASEf (map transLABELCOMS labelcomss)
AC_RECORD comss -> T.AC_RECORD (map transCOMS comss)
AC_RECORDf labelcomss -> T.AC_RECORDf (map transLABELCOMS labelcomss)
AC_DEST n1 n2 -> T.AC_DEST (fromIntegral n1) (fromIntegral n2)
AC_GET cinteger -> T.AC_GET (transCInteger cinteger)
AC_GETf id -> T.AC_GETf (transIdent id)
AC_HPUT cinteger n -> T.AC_HPUT (transCInteger cinteger) (fromIntegral n)
AC_HPUTf id uident1 uident2 -> T.AC_HPUTf (transIdent id)
((transUIdent uident1),
(transUIdent uident2))
AC_HCASE cinteger comss -> T.AC_HCASE (transCInteger cinteger)
(map transCOMS comss)
--AC_HCASEf id labelcomss -> T.AC_HCASEf (transIdent id)
-- (map transLABELCOMS labelcomss)
AC_PUT cinteger -> T.AC_PUT (transCInteger cinteger)
AC_PUTf id -> T.AC_PUTf (transIdent id)
AC_SPLIT cinteger1 cinteger2 cinteger3 -> T.AC_SPLIT
(transCInteger cinteger1)
((transCInteger cinteger2),
(transCInteger cinteger3))
AC_SPLITf id1 id2 id3 -> T.AC_SPLITf (transIdent id1)
((transIdent id2),
(transIdent id3))
--AC_FORK cinteger1 cinteger2 cintegers3 coms4 cinteger5 cintegers6 coms7 -> failure x
--AC_FORKf id1 id2 ids3 coms4 id5 ids6 coms7 -> failure x
{-AC_PLUG ncintegers cintegers1 coms2 cintegers3 coms4 -> T.AC_PLUG
(map transNCInteger ncintegers)
((map transCInteger cintegers1)
,( transCOMS coms2))
((map transCInteger cintegers3)
( transCOMS coms4))
--AC_PLUGf nidents ids1 coms2 ids3 coms4 -> failure x -}
--AC_RUN trans uident -> T.AC_RUN (map transTRAN trans) (transUIdent uident)
AC_RUNf uident ids1 ids2 ids3 -> T.AC_RUNf
(transUIdent uident)
(map transIdent ids1)
((map transIdent ids2),
(map transIdent ids3))
AC_CLOSE cinteger -> T.AC_CLOSE $ transCInteger cinteger
AC_CLOSEf id -> T.AC_CLOSEf $ transIdent id
AC_HALT cinteger -> T.AC_HALT $ transCInteger cinteger
AC_HALTf id -> T.AC_HALTf $ transIdent id
--transLABELCOMS :: LABELCOMS -> Result
transLABELCOMS x = case x of
Labelcoms uident1 uident2 coms3 -> ((transUIdent uident1),
(transUIdent uident2),
(transCOMS coms3))
--transTRAN :: TRAN -> Result
transTRAN x = case x of
TranIn1 n1 n2 -> ((n1::T.CH),(T.IN::T.POLARITY),(n2::T.CH))
TranIn2 n1 n2 -> ((n1::T.CH),(T.OUT::T.POLARITY),(n2::T.CH))
--transNCInteger :: NCInteger -> Result
transNCInteger x = case x of
Ncinteger cinteger -> transCInteger cinteger
--transNIdent :: NIdent -> Result
transNIdent x = case x of
Nident id -> transIdent id
--transCInteger :: CInteger -> Result
transCInteger x = case x of
Positive n -> (fromIntegral n)
Negative n -> negate(fromIntegral n)
{-
data COM =
AC_STORE
| AC_STOREf VAR
| AC_LOAD Int
| AC_LOADf VAR
| AC_RET
| AC_FRET
| AC_CALL String Int
| AC_CALLf String VARS
| AC_INT Int
| AC_STRING String -- experimental
| AC_LEQ
| AC_ADD
| AC_MUL
| AC_CONS Int Int
| AC_STRUCT STRUCTOR_NAME [String]
| AC_CASE [COMS]
| AC_CASEf [(STRUCTOR_NAME,[String],COMS)]
| AC_RECORD [COMS]
| AC_RECORDf [(STRUCTOR_NAME,[String],COMS)]
| AC_DEST Int Int
| AC_GET Int
| AC_GETf String
| AC_HPUT Int Int
| AC_HPUTf String STRUCTOR_NAME
| AC_PUT Int
| AC_PUTf String
| AC_SPLIT Int (Int,Int)
| AC_SPLITf CHANNEL (CHANNEL,CHANNEL)
| AC_FORK Int ((Int,[Int],COMS),(Int,[Int],COMS))
| AC_FORKf CHANNEL ((CHANNEL,CHANNELS,COMS),(CHANNEL,CHANNELS,COMS))
| AC_PLUG [(Int,POLARITY,POLARITY)] ([Int],COMS) ([Int],COMS)
| AC_PLUGf CHANNEL (CHANNELS,COMS) (CHANNELS,COMS)
| AC_CLOSE Int
| AC_CLOSEf CHANNEL
| AC_HALT Int
| AC_HALTf CHANNEL
| AC_HCASE Int [COMS]
| AC_HCASEf CHANNEL [(STRUCTOR_NAME,[COM])]
| AC_RUN [(Int,POLARITY,Int)] String Int
| AC_RUNf String VARS (CHANNELS,CHANNELS)
| AC_CONCAT
| AC_REVERSE
deriving (Eq,Ord,Show,Read)
-}
|
prashant007/AMPL
|
myAMPL/src/test_del/ConvAMPLTypes.hs
|
mit
| 9,244
| 0
| 12
| 2,636
| 1,846
| 930
| 916
| 120
| 33
|
module BasicIO where
import System.IO
main = do
putStrLn "Input file name:"
inf <- getLine
putStrLn "Output file name:"
outf <- getLine
inh <- openFile inf ReadMode
outh <- openFile outf WriteMode
mainloop inh outh
hClose inh
hClose outh
mainloop :: Handle -> Handle -> IO ()
mainloop inh outh = do
ineof <- hIsEOF inh
if ineof then return ()
else do inpStr <- hGetLine inh
hPutStrLn outh inpStr
mainloop inh outh
|
rockdragon/julia-programming
|
code/haskell/BasicIO.hs
|
mit
| 471
| 0
| 11
| 130
| 164
| 72
| 92
| 19
| 2
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable, ScopedTypeVariables, LambdaCase #-}
module Test.Tasty.ExpectedFailure (expectFail, expectFailBecause, ignoreTest, ignoreTestBecause, wrapTest) where
import Test.Tasty.Options
import Test.Tasty.Runners
import Test.Tasty.Providers
#if MIN_VERSION_tasty(1,3,1)
import Test.Tasty.Providers.ConsoleFormat ( ResultDetailsPrinter(..) )
#endif
import Test.Tasty ( Timeout(..), askOption, localOption )
import Data.Typeable
import Data.Tagged
import Data.Maybe
import Data.Monoid
import Control.Exception ( displayException, evaluate, try, SomeException )
import Control.Concurrent.Timeout ( timeout )
data WrappedTest t = WrappedTest Timeout (IO Result -> IO Result) t
deriving Typeable
instance forall t. IsTest t => IsTest (WrappedTest t) where
run opts (WrappedTest tmout wrap t) prog =
-- Re-implement timeouts and exception handling *inside* the
-- wrapper. The primary location for timeout and exception
-- handling is in `executeTest` in the Tasty module's
-- Test.Tasty.Run implementation, but that handling is above the
-- level of this wrapper which therefore cannot absorb timeouts
-- and exceptions as *expected* failures.
let (pre,post) = case tmout of
NoTimeout -> (fmap Just, fromJust)
Timeout t s -> (timeout t, fromMaybe (timeoutResult t s))
timeoutResult t s =
Result { resultOutcome = Failure $ TestTimedOut t
, resultDescription = "Timed out after " <> s
, resultShortDescription = "TIMEOUT"
, resultTime = fromIntegral t
#if MIN_VERSION_tasty(1,3,1)
, resultDetailsPrinter = ResultDetailsPrinter . const . const $ return ()
#endif
}
exceptionResult e =
Result { resultOutcome = Failure $ TestThrewException e
, resultDescription = "Exception: " ++ displayException e
, resultShortDescription = "FAIL"
, resultTime = 0
#if MIN_VERSION_tasty(1,3,1)
, resultDetailsPrinter = ResultDetailsPrinter . const . const $ return ()
#endif
}
forceList = foldr seq ()
in wrap $ try (pre (run opts t prog
-- Ensure exceptions trying to show the
-- failure result are caught as "expected"
-- (see Issue #24 and note below)
>>= \r -> evaluate (forceList (resultDescription r) `seq`
forceList (resultShortDescription r) `seq`
resultOutcome r `seq`
r)))
>>= \case
Right r -> return (post r)
Left (e :: SomeException) -> return $ exceptionResult e
testOptions = retag (testOptions :: Tagged t [OptionDescription])
-- Note regarding post-run evaluate above:
--
-- The normal behavior of tasty-expected-failure is to run the
-- test, show the failure result, but then note that the failure
-- is expected and not count that against a test failure. If the
-- test unexpectedly succeeds, a message to that effect is
-- printed, but there is no resultDescription display of the test
-- inputs.
--
-- As of Tasty 1.4, the core tasty code was enhanced to fix issue
-- #280 in tasty: essentially the test result report is forced.
-- However, when used with tests expected to fail that also throw
-- exceptions when attempting to show the result, the forcing in
-- Tasty 1.4 causes an exception to be thrown after the
-- tasty-expected-failure protections but still within the realm
-- where tasty would count it as a failure. The fix here attempts
-- to `show` the failing value here in tasty-expected-failure; if
-- an exception occurs during that `show` then code here will
-- report it (somewhat incorrectly) via the exceptionResult above,
-- where tasty's subsequent forcing of the text of that
-- exceptionResult no longer causes an exception *there*. Since
-- the value is only shown if there was already a failure, the
-- reason is misleading but the outcome is consistent with the
-- intent of tasty-expected-failure handling.
-- | 'wrapTest' allows you to modify the behaviour of the tests, e.g. by
-- modifying the result or not running the test at all. It is used to implement
-- 'expectFail' and 'ignoreTest'.
wrapTest :: (IO Result -> IO Result) -> TestTree -> TestTree
wrapTest wrap = go
where
go (SingleTest n t) =
askOption $ \(old_timeout :: Timeout) ->
localOption NoTimeout $ -- disable Tasty's timeout; handled here instead
SingleTest n (WrappedTest old_timeout wrap t)
go (TestGroup name tests) = TestGroup name (map go tests)
go (PlusTestOptions plus tree) = PlusTestOptions plus (go tree)
go (WithResource spec gentree) = WithResource spec (go . gentree)
go (AskOptions f) = AskOptions (go . f)
-- | Marks all tests in the given test suite as expected failures: The tests will
-- still be run, but if they succeed, it is reported as a test suite failure,
-- and conversely a the failure of the test is ignored.
--
-- Any output of a failing test is still printed.
--
-- This is useful if, in a test driven development, tests are written and
-- commited to the master branch before their implementation: It allows the
-- tests to fail (as expected) without making the whole test suite fail.
--
-- Similarly, regressions and bugs can be documented in the test suite this
-- way, until a fix is commited, and if a fix is applied (intentionally or
-- accidentially), the test suite will remind you to remove the 'expectFail'
-- marker.
expectFail :: TestTree -> TestTree
expectFail = expectFail' Nothing
-- | Like 'expectFail' but with additional comment
expectFailBecause :: String -> TestTree -> TestTree
expectFailBecause reason = expectFail' (Just reason)
expectFail' :: Maybe String -> TestTree -> TestTree
expectFail' reason = wrapTest (fmap change)
where
change r
| resultSuccessful r
= r { resultOutcome = Failure TestFailed
, resultDescription = resultDescription r <> " (unexpected success" <> comment <> ")"
, resultShortDescription = resultShortDescription r <> " (unexpected" <> comment <> ")"
}
| otherwise
= r { resultOutcome = Success
, resultDescription = resultDescription r <> " (expected failure)"
, resultShortDescription = resultShortDescription r <> " (expected" <> comment <> ")"
}
"" `append` s = s
t `append` s | last t == '\n' = t ++ s ++ "\n"
| otherwise = t ++ "\n" ++ s
comment = maybe "" (mappend ": ") reason
-- | Prevents the tests from running and reports them as succeeding.
--
-- This may be be desireable as an alternative to commenting out the tests. This
-- way, they are still typechecked (preventing bitrot), and the test report
-- lists them, which serves as a reminder that there are ignored tests.
--
-- Note that any setup/teardown actions executed by 'Test.Tasty.withResource'
-- are still executed. You can bypass this manually as in the following example:
--
-- @
-- askOption $ \\(MyFlag b) -> if b
-- then withResource mytest
-- else ignoreTest . mytest $ return junkvalue
-- @
ignoreTest :: TestTree -> TestTree
ignoreTest = ignoreTest' Nothing
-- | Like 'ignoreTest' but with additional comment
ignoreTestBecause :: String -> TestTree -> TestTree
ignoreTestBecause reason = ignoreTest' (Just reason)
ignoreTest' :: Maybe String -> TestTree -> TestTree
ignoreTest' reason = wrapTest $ const $ return $
(testPassed $ fromMaybe "" reason) {
resultShortDescription = "IGNORED"
}
|
nomeata/tasty-expected-failure
|
Test/Tasty/ExpectedFailure.hs
|
mit
| 7,983
| 1
| 23
| 2,106
| 1,238
| 685
| 553
| 77
| 5
|
module FractalMusic where
import HaskoreExamples
import Random
type Vector = [Float]
type Matrix = [Vector]
type AT = Vector -> Vector
type IFS = [AT]
-- First define some general matrix operations.
-- These will facilitate moving to higher dimensions later.
vadd :: Vector -> Vector -> Vector
vadd = zipWith (+)
vvmult :: Vector -> Vector -> Float
vvmult v1 v2 = foldl (+) 0 (zipWith (*) v1 v2)
mvmult :: Matrix -> Vector -> Vector
mvmult m v = map (vvmult v) m
cvmult :: Float -> Vector -> Vector
cvmult c v = map (c*) v
---------------------------------------------------------------------
-- The following simulates the Iterated Function System for the
-- SierPinski Triangle as described in Barnsley's "Desktop Fractal
-- Design Handbook".
-- First the affine transformations:
w1,w2,w3 :: AT
w1 v = (cvmult 0.01 ([[50,0],[0,50],[50,0]] `mvmult` v))
`vadd` [8,8,8]
w2 v = (cvmult 0.01 ([[50,0],[0,50],[50,0]] `mvmult` v))
`vadd` [30,16,2]
w3 v = (cvmult 0.01 ([[50,0],[0,50],[50,0]] `mvmult` v))
`vadd` [20,40,30]
init0 :: Vector
init0 = [0,0,0]
-- Now we have an Iterated Function System:
ws :: IFS
ws = [w1,w2,w3]
-- And here is the result:
result = scanl f init0 random
where f init r = (ws!!r) init
-- where "random" is a list of random indices in the range 0-2,
-- which simulates flipping the coin in Barnsley.
-- "randomInts" is imported from the Random.hs library.
random = map (`mod` 3) (randomInts 1 3)
--------
mkNote [a,b,c] = Rest (b/20) :+: Note (pitch (round a)) (c/20) []
sourceToHaskore :: [[Float]] -> Music
sourceToHaskore s = chord (map mkNote s)
sth n = sourceToHaskore (take n result)
|
Zolomon/edan40-functional-music
|
Fractals.hs
|
mit
| 1,737
| 0
| 11
| 394
| 611
| 361
| 250
| 33
| 1
|
{-# LANGUAGE FlexibleContexts #-}
module GitParsers where
import Control.Applicative ((<*>),(<$>))
import Control.Monad.Identity
import Text.Parsec
import Data.Maybe (fromMaybe)
import Types
fromBaseN :: (Num a, Ord a, Read a) => a -> String -> a
fromBaseN base = foldl (\a x -> a * 8 + (read [x])) 0
fromOctal :: (Num a, Ord a, Read a) => String -> a
fromOctal = fromBaseN 8
octal :: Stream s m Char => ParsecT s u m Integer
octal = fromOctal <$> many1 octDigit
space1 :: Stream s m Char => Monad m => ParsecT s u m [Char]
space1 = many1 (char ' ')
dropLine :: Stream s m Char => ParsecT s u m [Char]
dropLine = anyChar `manyTill` (lookAhead endOfLine)
labelNum :: Stream s m Char => String -> ParsecT s u m Int
labelNum label = do { try (do _ <- string label; return ()); _ <- char ':'; _ <- many space; res <- many1 digit; return (read res) }
accGitObjects :: ParsecT String GitObjectStats Identity GitObjectStats
accGitObjects = do { res <- fields `endBy1` (char '\n'); getState }
where store :: String -> (Int -> GitObjectStats -> GitObjectStats) -> ParsecT String GitObjectStats Identity GitObjectStats
store label f = do { val <- (labelNum label); modifyState (f val); getState }
fields = (choice [
store "size-pack" (\v s -> s { size_pack = v }),
store "size-garbage" (\v s -> s { size_garbage = v }),
store "size" (\v s -> s { size = v }),
store "prune-packable" (\v s -> s { prune_packable = v }),
store "packs" (\v s -> s { packs = v }),
store "in-pack" (\v s -> s { in_pack = v }),
store "garbage" (\v s -> s { garbage = v }),
store "count" (\v s -> s { Types.count = v })
])
parseGitObjects :: String -> Either ParseError GitObjectStats
parseGitObjects input = do
let init = GitObjectStats 0 0 0 0 0 0 0 0
runParser accGitObjects init "" input
accOrphans :: ParsecT String GitOrphanList Identity GitOrphanList
accOrphans = do { res <- fields `endBy` (char '\n'); modifyState reverse; getState }
where accumulate :: String -> (Hash -> GitOrphan) -> ParsecT String GitOrphanList Identity GitOrphanList
accumulate kind f = do { try (do _ <- string "unreachable "; _ <- string kind; return ()); _ <- many space; hash <- parseHash; modifyState ((f hash) :); getState}
fields = (choice [
accumulate "blob" OrphanBlob,
accumulate "commit" OrphanCommit
])
parseHash :: Stream s m Char => ParsecT s u m [Char]
parseHash = many1 hexDigit
parseGitOrphanList :: String -> Either ParseError GitOrphanList
parseGitOrphanList input = do
let init = []
runParser accOrphans init "" input
parseHashKindSize :: Monad m => String -> (Hash -> Size -> GitObject) -> ParsecT String u m GitObject
parseHashKindSize kind f = try (do
hash <- parseHash
_ <- space1
_ <- string kind
_ <- space1
size <- (read <$> many1 digit) <|> ((\x -> (-1)) <$> char '-')
return (f hash size))
parseKindHashSize :: Monad m => String -> (Hash -> Size -> GitObject) -> ParsecT String u m GitObject
parseKindHashSize kind f = do
_ <- try (string kind)
_ <- space1
hash <- parseHash
_ <- space1
size <- (read <$> many1 digit) <|> ((\x -> (-1)) <$> char '-')
return (f hash size)
objectDesc :: Monad m => (String -> (Hash -> Size -> GitObject) -> ParsecT String u m GitObject) -> ParsecT String u m GitObject
objectDesc f = (choice [
f "blob" GitBlobObject,
f "commit" simpleGitCommit,
f "tree" GitTreeObject
])
accObjects :: ParsecT String [GitObject] Identity [GitObject]
accObjects = do { _ <- (do o <- (objectDesc parseHashKindSize); modifyState (o:); return ()) `endBy` (char '\n'); modifyState reverse; getState }
parseGitObjectList :: String -> Either ParseError [GitObject]
parseGitObjectList input = do
let init = []
runParser accObjects init "" input
fileName :: Stream s m Char => ParsecT s u m String
fileName = many1 $ choice (alphaNum : (char <$> "-_ ."))
parseTreeLine :: ParsecT String u Identity GitTreeEntry
parseTreeLine = do
mode <- octal
_ <- space1
desc <- (objectDesc parseKindHashSize)
_ <- char '\t'
name <- fileName
return $ GitTreeEntry mode desc name
parseTree :: String -> Either ParseError [GitTreeEntry]
parseTree x = parse (parseTreeLine `endBy` (char '\n')) "" x
catCommitLines :: ParsecT String GitObject Identity GitObject
catCommitLines = do
let options = (choice [
f "tree" parseHash (\v s -> s { commitTree = Just v })
, f "parent" parseHash (\v s -> s { commitParents = (commitParents s) ++ [v] })
, f "author" dropLine (\v s -> s)
, f "committer" dropLine (\v s -> s)
])
_ <- options `endBy1` (char '\n')
getState
where f :: Stream s m Char => String -> ParsecT s u m v -> (v -> u -> u) -> ParsecT s u m u
f label parser update = do { _ <- try (string label); _ <- space1; v <- parser; modifyState (update v); getState }
parseCatCommit :: GitObject -> String -> Either ParseError GitObject
parseCatCommit init x = runParser catCommitLines init "" x
|
blast-hardcheese/git-dag-graph
|
src/GitParsers.hs
|
mit
| 5,462
| 0
| 20
| 1,559
| 2,112
| 1,080
| 1,032
| -1
| -1
|
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Hpack.Config (
packageConfig
, readPackageConfig
, renamePackage
, packageDependencies
, package
, section
, Package(..)
, Dependency(..)
, AddSource(..)
, GitUrl
, GitRef
, GhcOption
, CustomSetup(..)
, Section(..)
, Library(..)
, Executable(..)
, Conditional(..)
, Flag(..)
, SourceRepository(..)
#ifdef TEST
, renameDependencies
, HasFieldNames(..)
, CaptureUnknownFields(..)
, Empty(..)
, getModules
, determineModules
, BuildType(..)
#endif
) where
import Control.Applicative
import Control.Monad.Compat
import Data.Aeson.Types
import Data.Data
import Data.Map.Lazy (Map)
import qualified Data.Map.Lazy as Map
import qualified Data.HashMap.Lazy as HashMap
import Data.List.Compat (nub, (\\), sortBy, isPrefixOf)
import Data.Maybe
import Data.Ord
import Data.String
import Data.Text (Text)
import qualified Data.Text as T
import GHC.Generics (Generic, Rep)
import Prelude ()
import Prelude.Compat
import System.Directory
import System.FilePath
import Hpack.GenericsUtil
import Hpack.Util
import Hpack.Yaml
package :: String -> String -> Package
package name version = Package {
packageName = name
, packageVersion = version
, packageSynopsis = Nothing
, packageDescription = Nothing
, packageHomepage = Nothing
, packageBugReports = Nothing
, packageCategory = Nothing
, packageStability = Nothing
, packageAuthor = []
, packageMaintainer = []
, packageCopyright = []
, packageBuildType = Simple
, packageLicense = Nothing
, packageLicenseFile = []
, packageTestedWith = Nothing
, packageFlags = []
, packageExtraSourceFiles = []
, packageDataFiles = []
, packageSourceRepository = Nothing
, packageCustomSetup = Nothing
, packageLibrary = Nothing
, packageExecutables = []
, packageTests = []
, packageBenchmarks = []
}
renamePackage :: String -> Package -> Package
renamePackage name p@Package{..} = p {
packageName = name
, packageExecutables = map (renameDependencies packageName name) packageExecutables
, packageTests = map (renameDependencies packageName name) packageTests
, packageBenchmarks = map (renameDependencies packageName name) packageBenchmarks
}
renameDependencies :: String -> String -> Section a -> Section a
renameDependencies old new sect@Section{..} = sect {sectionDependencies = map rename sectionDependencies, sectionConditionals = map renameConditional sectionConditionals}
where
rename dep
| dependencyName dep == old = dep {dependencyName = new}
| otherwise = dep
renameConditional :: Conditional -> Conditional
renameConditional (Conditional condition then_ else_) = Conditional condition (renameDependencies old new then_) (renameDependencies old new <$> else_)
packageDependencies :: Package -> [Dependency]
packageDependencies Package{..} = nub . sortBy (comparing (lexicographically . dependencyName)) $
(concatMap sectionDependencies packageExecutables)
++ (concatMap sectionDependencies packageTests)
++ (concatMap sectionDependencies packageBenchmarks)
++ maybe [] sectionDependencies packageLibrary
section :: a -> Section a
section a = Section a [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] Nothing [] []
packageConfig :: FilePath
packageConfig = "package.yaml"
githubBaseUrl :: String
githubBaseUrl = "https://github.com/"
#if MIN_VERSION_aeson(1,0,0)
genericParseJSON_ :: forall a. (Generic a, GFromJSON Zero (Rep a), HasTypeName a) => Value -> Parser a
#else
genericParseJSON_ :: forall a. (Generic a, GFromJSON (Rep a), HasTypeName a) => Value -> Parser a
#endif
genericParseJSON_ = genericParseJSON defaultOptions {fieldLabelModifier = hyphenize name}
where
name :: String
name = typeName (Proxy :: Proxy a)
hyphenize :: String -> String -> String
hyphenize name =
#if MIN_VERSION_aeson(0,10,0)
camelTo2
#else
camelTo
#endif
'-' . drop (length name) . dropWhile (== '_')
type FieldName = String
class HasFieldNames a where
fieldNames :: Proxy a -> [FieldName]
default fieldNames :: (HasTypeName a, Selectors (Rep a)) => Proxy a -> [String]
fieldNames proxy = map (hyphenize $ typeName proxy) (selectors proxy)
ignoreUnderscoredUnknownFields :: Proxy a -> Bool
ignoreUnderscoredUnknownFields _ = False
data CaptureUnknownFields a = CaptureUnknownFields {
captureUnknownFieldsFields :: [FieldName]
, captureUnknownFieldsValue :: a
} deriving (Eq, Show, Generic)
captureUnknownFields :: forall a. (HasFieldNames a, FromJSON a) => Value -> Parser (CaptureUnknownFields a)
captureUnknownFields v = CaptureUnknownFields unknown <$> parseJSON v
where
unknown = getUnknownFields v (Proxy :: Proxy a)
instance (HasFieldNames a, FromJSON a) => FromJSON (CaptureUnknownFields (Section a)) where
parseJSON v = do
(unknownFields, sect) <- toSection <$> parseJSON v <*> parseJSON v
return (CaptureUnknownFields (unknownSectionFields ++ unknownFields) sect)
where
unknownSectionFields = getUnknownFields v (Proxy :: Proxy (Section a))
instance FromJSON (CaptureUnknownFields CustomSetupSection) where
parseJSON = captureUnknownFields
instance FromJSON (CaptureUnknownFields FlagSection) where
parseJSON = captureUnknownFields
getUnknownFields :: forall a. HasFieldNames a => Value -> Proxy a -> [FieldName]
getUnknownFields v _ = case v of
Object o -> ignoreUnderscored unknown
where
unknown = keys \\ fields
keys = map T.unpack (HashMap.keys o)
fields = fieldNames (Proxy :: Proxy a)
ignoreUnderscored
| ignoreUnderscoredUnknownFields (Proxy :: Proxy a) = filter (not . isPrefixOf "_")
| otherwise = id
_ -> []
data CustomSetupSection = CustomSetupSection {
customSetupSectionDependencies :: Maybe (List Dependency)
} deriving (Eq, Show, Generic)
instance HasFieldNames CustomSetupSection
instance FromJSON CustomSetupSection where
parseJSON = genericParseJSON_
data LibrarySection = LibrarySection {
librarySectionExposed :: Maybe Bool
, librarySectionExposedModules :: Maybe (List String)
, librarySectionOtherModules :: Maybe (List String)
, librarySectionReexportedModules :: Maybe (List String)
} deriving (Eq, Show, Generic)
instance HasFieldNames LibrarySection
instance FromJSON LibrarySection where
parseJSON = genericParseJSON_
data ExecutableSection = ExecutableSection {
executableSectionMain :: FilePath
, executableSectionOtherModules :: Maybe (List String)
} deriving (Eq, Show, Generic)
instance HasFieldNames ExecutableSection
instance FromJSON ExecutableSection where
parseJSON = genericParseJSON_
data CommonOptions = CommonOptions {
commonOptionsSourceDirs :: Maybe (List FilePath)
, commonOptionsDependencies :: Maybe (List Dependency)
, commonOptionsDefaultExtensions :: Maybe (List String)
, commonOptionsOtherExtensions :: Maybe (List String)
, commonOptionsGhcOptions :: Maybe (List GhcOption)
, commonOptionsGhcProfOptions :: Maybe (List GhcProfOption)
, commonOptionsGhcjsOptions :: Maybe (List GhcjsOption)
, commonOptionsCppOptions :: Maybe (List CppOption)
, commonOptionsCcOptions :: Maybe (List CcOption)
, commonOptionsCSources :: Maybe (List FilePath)
, commonOptionsJsSources :: Maybe (List FilePath)
, commonOptionsExtraLibDirs :: Maybe (List FilePath)
, commonOptionsExtraLibraries :: Maybe (List FilePath)
, commonOptionsIncludeDirs :: Maybe (List FilePath)
, commonOptionsInstallIncludes :: Maybe (List FilePath)
, commonOptionsLdOptions :: Maybe (List LdOption)
, commonOptionsBuildable :: Maybe Bool
, commonOptionsWhen :: Maybe (List ConditionalSection)
, commonOptionsBuildTools :: Maybe (List Dependency)
} deriving (Eq, Show, Generic)
instance HasFieldNames CommonOptions
instance FromJSON CommonOptions where
parseJSON = genericParseJSON_
data ConditionalSection = ThenElseConditional (CaptureUnknownFields ThenElse) | FlatConditional (CaptureUnknownFields (Section Condition))
deriving (Eq, Show)
instance FromJSON ConditionalSection where
parseJSON v
| hasKey "then" v || hasKey "else" v = ThenElseConditional <$> parseJSON v
| otherwise = FlatConditional <$> parseJSON v
hasKey :: Text -> Value -> Bool
hasKey key (Object o) = HashMap.member key o
hasKey _ _ = False
newtype Condition = Condition {
conditionCondition :: String
} deriving (Eq, Show, Generic)
instance FromJSON Condition where
parseJSON = genericParseJSON_
instance HasFieldNames Condition
data ThenElse = ThenElse {
_thenElseCondition :: String
, _thenElseThen :: (CaptureUnknownFields (Section Empty))
, _thenElseElse :: (CaptureUnknownFields (Section Empty))
} deriving (Eq, Show, Generic)
instance FromJSON (CaptureUnknownFields ThenElse) where
parseJSON = captureUnknownFields
instance HasFieldNames ThenElse
instance FromJSON ThenElse where
parseJSON = genericParseJSON_
data Empty = Empty
deriving (Eq, Show)
instance FromJSON Empty where
parseJSON _ = return Empty
instance HasFieldNames Empty where
fieldNames _ = []
-- From Cabal the library, copied here to avoid a dependency on Cabal.
data BuildType
= Simple
| Configure
| Make
| Custom
deriving (Eq, Show, Generic)
instance FromJSON BuildType where
parseJSON = withText "String" $ \case
"Simple" -> return Simple
"Configure" -> return Configure
"Make" -> return Make
"Custom" -> return Custom
_ -> fail "build-type must be one of: Simple, Configure, Make, Custom"
type ExecutableConfig = CaptureUnknownFields (Section ExecutableSection)
data PackageConfig = PackageConfig {
packageConfigName :: Maybe String
, packageConfigVersion :: Maybe String
, packageConfigSynopsis :: Maybe String
, packageConfigDescription :: Maybe String
, packageConfigHomepage :: Maybe (Maybe String)
, packageConfigBugReports :: Maybe (Maybe String)
, packageConfigCategory :: Maybe String
, packageConfigStability :: Maybe String
, packageConfigAuthor :: Maybe (List String)
, packageConfigMaintainer :: Maybe (List String)
, packageConfigCopyright :: Maybe (List String)
, packageConfigBuildType :: Maybe BuildType
, packageConfigLicense :: Maybe String
, packageConfigLicenseFile :: Maybe (List String)
, packageConfigTestedWith :: Maybe String
, packageConfigFlags :: Maybe (Map String (CaptureUnknownFields FlagSection))
, packageConfigExtraSourceFiles :: Maybe (List FilePath)
, packageConfigDataFiles :: Maybe (List FilePath)
, packageConfigGithub :: Maybe Text
, packageConfigGit :: Maybe String
, packageConfigCustomSetup :: Maybe (CaptureUnknownFields CustomSetupSection)
, packageConfigLibrary :: Maybe (CaptureUnknownFields (Section LibrarySection))
, packageConfigExecutable :: Maybe ExecutableConfig
, packageConfigExecutables :: Maybe (Map String ExecutableConfig)
, packageConfigTests :: Maybe (Map String (CaptureUnknownFields (Section ExecutableSection)))
, packageConfigBenchmarks :: Maybe (Map String (CaptureUnknownFields (Section ExecutableSection)))
} deriving (Eq, Show, Generic)
instance HasFieldNames PackageConfig where
ignoreUnderscoredUnknownFields _ = True
instance FromJSON PackageConfig where
parseJSON value = handleNullValues <$> genericParseJSON_ value
where
handleNullValues :: PackageConfig -> PackageConfig
handleNullValues =
ifNull "homepage" (\p -> p {packageConfigHomepage = Just Nothing})
. ifNull "bug-reports" (\p -> p {packageConfigBugReports = Just Nothing})
ifNull :: String -> (a -> a) -> a -> a
ifNull name f
| isNull name value = f
| otherwise = id
isNull :: String -> Value -> Bool
isNull name value = case parseMaybe p value of
Just Null -> True
_ -> False
where
p = parseJSON >=> (.: fromString name)
readPackageConfig :: FilePath -> IO (Either String ([String], Package))
readPackageConfig file = do
r <- decodeYaml file
case r of
Left err -> return (Left err)
Right config -> do
dir <- takeDirectory <$> canonicalizePath file
Right <$> mkPackage dir config
data Dependency = Dependency {
dependencyName :: String
, dependencyGitRef :: Maybe AddSource
} deriving (Eq, Show, Ord, Generic)
instance IsString Dependency where
fromString name = Dependency name Nothing
instance FromJSON Dependency where
parseJSON v = case v of
String _ -> fromString <$> parseJSON v
Object o -> addSourceDependency o
_ -> typeMismatch "String or an Object" v
where
addSourceDependency o = Dependency <$> name <*> (Just <$> (local <|> git))
where
name :: Parser String
name = o .: "name"
local :: Parser AddSource
local = Local <$> o .: "path"
git :: Parser AddSource
git = GitRef <$> url <*> ref <*> subdir
url :: Parser String
url =
((githubBaseUrl ++) <$> o .: "github")
<|> (o .: "git")
<|> fail "neither key \"git\" nor key \"github\" present"
ref :: Parser String
ref = o .: "ref"
subdir :: Parser (Maybe FilePath)
subdir = o .:? "subdir"
data AddSource = GitRef GitUrl GitRef (Maybe FilePath) | Local FilePath
deriving (Eq, Show, Ord)
type GitUrl = String
type GitRef = String
data Package = Package {
packageName :: String
, packageVersion :: String
, packageSynopsis :: Maybe String
, packageDescription :: Maybe String
, packageHomepage :: Maybe String
, packageBugReports :: Maybe String
, packageCategory :: Maybe String
, packageStability :: Maybe String
, packageAuthor :: [String]
, packageMaintainer :: [String]
, packageCopyright :: [String]
, packageBuildType :: BuildType
, packageLicense :: Maybe String
, packageLicenseFile :: [FilePath]
, packageTestedWith :: Maybe String
, packageFlags :: [Flag]
, packageExtraSourceFiles :: [FilePath]
, packageDataFiles :: [FilePath]
, packageSourceRepository :: Maybe SourceRepository
, packageCustomSetup :: Maybe CustomSetup
, packageLibrary :: Maybe (Section Library)
, packageExecutables :: [Section Executable]
, packageTests :: [Section Executable]
, packageBenchmarks :: [Section Executable]
} deriving (Eq, Show)
data CustomSetup = CustomSetup {
customSetupDependencies :: [Dependency]
} deriving (Eq, Show)
data Library = Library {
libraryExposed :: Maybe Bool
, libraryExposedModules :: [String]
, libraryOtherModules :: [String]
, libraryReexportedModules :: [String]
} deriving (Eq, Show)
data Executable = Executable {
executableName :: String
, executableMain :: FilePath
, executableOtherModules :: [String]
} deriving (Eq, Show)
data Section a = Section {
sectionData :: a
, sectionSourceDirs :: [FilePath]
, sectionDependencies :: [Dependency]
, sectionDefaultExtensions :: [String]
, sectionOtherExtensions :: [String]
, sectionGhcOptions :: [GhcOption]
, sectionGhcProfOptions :: [GhcProfOption]
, sectionGhcjsOptions :: [GhcjsOption]
, sectionCppOptions :: [CppOption]
, sectionCcOptions :: [CcOption]
, sectionCSources :: [FilePath]
, sectionJsSources :: [FilePath]
, sectionExtraLibDirs :: [FilePath]
, sectionExtraLibraries :: [FilePath]
, sectionIncludeDirs :: [FilePath]
, sectionInstallIncludes :: [FilePath]
, sectionLdOptions :: [LdOption]
, sectionBuildable :: Maybe Bool
, sectionConditionals :: [Conditional]
, sectionBuildTools :: [Dependency]
} deriving (Eq, Show, Functor, Foldable, Traversable)
data Conditional = Conditional {
conditionalCondition :: String
, conditionalThen :: Section ()
, conditionalElse :: Maybe (Section ())
} deriving (Eq, Show)
instance HasFieldNames a => HasFieldNames (Section a) where
fieldNames Proxy = fieldNames (Proxy :: Proxy a) ++ fieldNames (Proxy :: Proxy CommonOptions)
ignoreUnderscoredUnknownFields _ = ignoreUnderscoredUnknownFields (Proxy :: Proxy a)
data FlagSection = FlagSection {
_flagSectionDescription :: Maybe String
, _flagSectionManual :: Bool
, _flagSectionDefault :: Bool
} deriving (Eq, Show, Generic)
instance HasFieldNames FlagSection
instance FromJSON FlagSection where
parseJSON = genericParseJSON_
data Flag = Flag {
flagName :: String
, flagDescription :: Maybe String
, flagManual :: Bool
, flagDefault :: Bool
} deriving (Eq, Show)
toFlag :: (String, FlagSection) -> Flag
toFlag (name, FlagSection description manual def) = Flag name description manual def
data SourceRepository = SourceRepository {
sourceRepositoryUrl :: String
, sourceRepositorySubdir :: Maybe String
} deriving (Eq, Show)
mkPackage :: FilePath -> (CaptureUnknownFields (Section PackageConfig)) -> IO ([String], Package)
mkPackage dir (CaptureUnknownFields unknownFields globalOptions@Section{sectionData = PackageConfig{..}}) = do
libraryResult <- mapM (toLibrary dir packageName_ globalOptions) mLibrarySection
let
executableWarnings :: [String]
executableSections :: [(String, Section ExecutableSection)]
(executableWarnings, executableSections) = (warnings, map (fmap captureUnknownFieldsValue) sections)
where
sections = case (packageConfigExecutable, packageConfigExecutables) of
(Nothing, Nothing) -> []
(Just executable, _) -> [(packageName_, executable)]
(Nothing, Just executables) -> Map.toList executables
warnings = ignoringExecutablesWarning ++ unknownFieldWarnings
ignoringExecutablesWarning = case (packageConfigExecutable, packageConfigExecutables) of
(Just _, Just _) -> ["Ignoring field \"executables\" in favor of \"executable\""]
_ -> []
unknownFieldWarnings = formatUnknownSectionFields (isJust packageConfigExecutables) "executable" sections
mLibrary :: Maybe (Section Library)
mLibrary = fmap snd libraryResult
libraryWarnings :: [String]
libraryWarnings = maybe [] fst libraryResult
(executablesWarnings, executables) <- toExecutables dir globalOptions executableSections
(testsWarnings, tests) <- toExecutables dir globalOptions (map (fmap captureUnknownFieldsValue) testsSections)
(benchmarksWarnings, benchmarks) <- toExecutables dir globalOptions (map (fmap captureUnknownFieldsValue) benchmarkSections)
licenseFileExists <- doesFileExist (dir </> "LICENSE")
missingSourceDirs <- nub . sort <$> filterM (fmap not <$> doesDirectoryExist . (dir </>)) (
maybe [] sectionSourceDirs mLibrary
++ concatMap sectionSourceDirs executables
++ concatMap sectionSourceDirs tests
++ concatMap sectionSourceDirs benchmarks
)
(extraSourceFilesWarnings, extraSourceFiles) <-
expandGlobs "extra-source-files" dir (fromMaybeList packageConfigExtraSourceFiles)
(dataFilesWarnings, dataFiles) <-
expandGlobs "data-files" dir (fromMaybeList packageConfigDataFiles)
let defaultBuildType :: BuildType
defaultBuildType = maybe Simple (const Custom) mCustomSetup
configLicenseFiles :: Maybe (List String)
configLicenseFiles = packageConfigLicenseFile <|> do
guard licenseFileExists
Just (List ["LICENSE"])
pkg = Package {
packageName = packageName_
, packageVersion = fromMaybe "0.0.0" packageConfigVersion
, packageSynopsis = packageConfigSynopsis
, packageDescription = packageConfigDescription
, packageHomepage = homepage
, packageBugReports = bugReports
, packageCategory = packageConfigCategory
, packageStability = packageConfigStability
, packageAuthor = fromMaybeList packageConfigAuthor
, packageMaintainer = fromMaybeList packageConfigMaintainer
, packageCopyright = fromMaybeList packageConfigCopyright
, packageBuildType = fromMaybe defaultBuildType packageConfigBuildType
, packageLicense = packageConfigLicense
, packageLicenseFile = fromMaybeList configLicenseFiles
, packageTestedWith = packageConfigTestedWith
, packageFlags = flags
, packageExtraSourceFiles = extraSourceFiles
, packageDataFiles = dataFiles
, packageSourceRepository = sourceRepository
, packageCustomSetup = mCustomSetup
, packageLibrary = mLibrary
, packageExecutables = executables
, packageTests = tests
, packageBenchmarks = benchmarks
}
warnings =
formatUnknownFields "package description" unknownFields
++ nameWarnings
++ flagWarnings
++ maybe [] (formatUnknownFields "custom-setup section") (captureUnknownFieldsFields <$> packageConfigCustomSetup)
++ maybe [] (formatUnknownFields "library section") (captureUnknownFieldsFields <$> packageConfigLibrary)
++ formatUnknownSectionFields True "test" testsSections
++ formatUnknownSectionFields True "benchmark" benchmarkSections
++ formatMissingSourceDirs missingSourceDirs
++ libraryWarnings
++ executableWarnings
++ executablesWarnings
++ testsWarnings
++ benchmarksWarnings
++ extraSourceFilesWarnings
++ dataFilesWarnings
return (warnings, pkg)
where
nameWarnings :: [String]
packageName_ :: String
(nameWarnings, packageName_) = case packageConfigName of
Nothing -> let inferredName = takeBaseName dir in
(["Package name not specified, inferred " ++ show inferredName], inferredName)
Just n -> ([], n)
mCustomSetup :: Maybe CustomSetup
mCustomSetup = toCustomSetup <$> mCustomSetupSection
testsSections :: [(String, CaptureUnknownFields (Section ExecutableSection))]
testsSections = toList packageConfigTests
benchmarkSections :: [(String, CaptureUnknownFields (Section ExecutableSection))]
benchmarkSections = toList packageConfigBenchmarks
(flagWarnings, flags) = (concatMap formatUnknownFlagFields xs, map (toFlag . fmap captureUnknownFieldsValue) xs)
where
xs :: [(String, CaptureUnknownFields FlagSection)]
xs = toList packageConfigFlags
formatUnknownFlagFields :: (String, CaptureUnknownFields a) -> [String]
formatUnknownFlagFields (name, fields) = map f (captureUnknownFieldsFields fields)
where f field = "Ignoring unknown field " ++ show field ++ " for flag " ++ show name
toList :: Maybe (Map String a) -> [(String, a)]
toList = Map.toList . fromMaybe mempty
mCustomSetupSection :: Maybe CustomSetupSection
mCustomSetupSection = captureUnknownFieldsValue <$> packageConfigCustomSetup
mLibrarySection :: Maybe (Section LibrarySection)
mLibrarySection = captureUnknownFieldsValue <$> packageConfigLibrary
formatUnknownFields :: String -> [FieldName] -> [String]
formatUnknownFields name = map f . sort
where
f field = "Ignoring unknown field " ++ show field ++ " in " ++ name
formatUnknownSectionFields :: Bool -> String -> [(String, CaptureUnknownFields a)] -> [String]
formatUnknownSectionFields showSect sectionType = concatMap f . map (fmap captureUnknownFieldsFields)
where
f :: (String, [String]) -> [String]
f (sect, fields) = formatUnknownFields
(sectionType ++ " section" ++ if showSect then " " ++ show sect else "")
fields
formatMissingSourceDirs = map f
where
f name = "Specified source-dir " ++ show name ++ " does not exist"
sourceRepository :: Maybe SourceRepository
sourceRepository = github <|> (`SourceRepository` Nothing) <$> packageConfigGit
github :: Maybe SourceRepository
github = parseGithub <$> packageConfigGithub
where
parseGithub :: Text -> SourceRepository
parseGithub input = case map T.unpack $ T.splitOn "/" input of
[user, repo, subdir] ->
SourceRepository (githubBaseUrl ++ user ++ "/" ++ repo) (Just subdir)
_ -> SourceRepository (githubBaseUrl ++ T.unpack input) Nothing
homepage :: Maybe String
homepage = case packageConfigHomepage of
Just Nothing -> Nothing
_ -> join packageConfigHomepage <|> fromGithub
where
fromGithub = (++ "#readme") . sourceRepositoryUrl <$> github
bugReports :: Maybe String
bugReports = case packageConfigBugReports of
Just Nothing -> Nothing
_ -> join packageConfigBugReports <|> fromGithub
where
fromGithub = (++ "/issues") . sourceRepositoryUrl <$> github
expandCSources :: FilePath -> Section a -> IO ([String], Section a)
expandCSources dir sect@Section{..} = do
(warnings, files) <- expandGlobs "c-sources" dir sectionCSources
return (warnings, sect {sectionCSources = files})
expandJsSources :: FilePath -> Section a -> IO ([String], Section a)
expandJsSources dir sect@Section{..} = do
(warnings, files) <- expandGlobs "js-sources" dir sectionJsSources
return (warnings, sect {sectionJsSources = files})
expandForeignSources :: FilePath -> Section a -> IO ([String], Section a)
expandForeignSources dir sect = do
(cWarnings, sect_) <- expandCSources dir sect
(jsWarnings, sect__) <- expandJsSources dir sect_
return (cWarnings ++ jsWarnings, sect__)
toCustomSetup :: CustomSetupSection -> CustomSetup
toCustomSetup CustomSetupSection{..} = CustomSetup
{ customSetupDependencies = fromMaybeList customSetupSectionDependencies }
toLibrary :: FilePath -> String -> Section global -> Section LibrarySection -> IO ([String], Section Library)
toLibrary dir name globalOptions library = traverse fromLibrarySection sect >>= expandForeignSources dir
where
sect :: Section LibrarySection
sect = mergeSections globalOptions library
sourceDirs :: [FilePath]
sourceDirs = sectionSourceDirs sect
fromLibrarySection :: LibrarySection -> IO Library
fromLibrarySection LibrarySection{..} = do
modules <- concat <$> mapM (getModules dir) sourceDirs
let (exposedModules, otherModules) = determineModules name modules librarySectionExposedModules librarySectionOtherModules
reexportedModules = fromMaybeList librarySectionReexportedModules
return (Library librarySectionExposed exposedModules otherModules reexportedModules)
toExecutables :: FilePath -> Section global -> [(String, Section ExecutableSection)] -> IO ([String], [Section Executable])
toExecutables dir globalOptions executables = do
result <- mapM toExecutable sections >>= mapM (expandForeignSources dir)
let (warnings, xs) = unzip result
return (concat warnings, xs)
where
sections :: [(String, Section ExecutableSection)]
sections = map (fmap $ mergeSections globalOptions) executables
toExecutable :: (String, Section ExecutableSection) -> IO (Section Executable)
toExecutable (name, sect@Section{..}) = do
(executable, ghcOptions) <- fromExecutableSection sectionData
return (sect {sectionData = executable, sectionGhcOptions = sectionGhcOptions ++ ghcOptions})
where
fromExecutableSection :: ExecutableSection -> IO (Executable, [GhcOption])
fromExecutableSection ExecutableSection{..} = do
modules <- maybe (filterMain . concat <$> mapM (getModules dir) sectionSourceDirs) (return . fromList) executableSectionOtherModules
return (Executable name mainSrcFile modules, ghcOptions)
where
filterMain :: [String] -> [String]
filterMain = maybe id (filter . (/=)) (toModule $ splitDirectories executableSectionMain)
(mainSrcFile, ghcOptions) = parseMain executableSectionMain
mergeSections :: Section global -> Section a -> Section a
mergeSections globalOptions options
= Section {
sectionData = sectionData options
, sectionSourceDirs = sectionSourceDirs globalOptions ++ sectionSourceDirs options
, sectionDefaultExtensions = sectionDefaultExtensions globalOptions ++ sectionDefaultExtensions options
, sectionOtherExtensions = sectionOtherExtensions globalOptions ++ sectionOtherExtensions options
, sectionGhcOptions = sectionGhcOptions globalOptions ++ sectionGhcOptions options
, sectionGhcProfOptions = sectionGhcProfOptions globalOptions ++ sectionGhcProfOptions options
, sectionGhcjsOptions = sectionGhcjsOptions globalOptions ++ sectionGhcjsOptions options
, sectionCppOptions = sectionCppOptions globalOptions ++ sectionCppOptions options
, sectionCcOptions = sectionCcOptions globalOptions ++ sectionCcOptions options
, sectionCSources = sectionCSources globalOptions ++ sectionCSources options
, sectionJsSources = sectionJsSources globalOptions ++ sectionJsSources options
, sectionExtraLibDirs = sectionExtraLibDirs globalOptions ++ sectionExtraLibDirs options
, sectionExtraLibraries = sectionExtraLibraries globalOptions ++ sectionExtraLibraries options
, sectionIncludeDirs = sectionIncludeDirs globalOptions ++ sectionIncludeDirs options
, sectionInstallIncludes = sectionInstallIncludes globalOptions ++ sectionInstallIncludes options
, sectionLdOptions = sectionLdOptions globalOptions ++ sectionLdOptions options
, sectionBuildable = sectionBuildable options <|> sectionBuildable globalOptions
, sectionDependencies = sectionDependencies globalOptions ++ sectionDependencies options
, sectionConditionals = sectionConditionals globalOptions ++ sectionConditionals options
, sectionBuildTools = sectionBuildTools globalOptions ++ sectionBuildTools options
}
toSection :: a -> CommonOptions -> ([FieldName], Section a)
toSection a CommonOptions{..}
= ( concat unknownFields
, Section {
sectionData = a
, sectionSourceDirs = fromMaybeList commonOptionsSourceDirs
, sectionDefaultExtensions = fromMaybeList commonOptionsDefaultExtensions
, sectionOtherExtensions = fromMaybeList commonOptionsOtherExtensions
, sectionGhcOptions = fromMaybeList commonOptionsGhcOptions
, sectionGhcProfOptions = fromMaybeList commonOptionsGhcProfOptions
, sectionGhcjsOptions = fromMaybeList commonOptionsGhcjsOptions
, sectionCppOptions = fromMaybeList commonOptionsCppOptions
, sectionCcOptions = fromMaybeList commonOptionsCcOptions
, sectionCSources = fromMaybeList commonOptionsCSources
, sectionJsSources = fromMaybeList commonOptionsJsSources
, sectionExtraLibDirs = fromMaybeList commonOptionsExtraLibDirs
, sectionExtraLibraries = fromMaybeList commonOptionsExtraLibraries
, sectionIncludeDirs = fromMaybeList commonOptionsIncludeDirs
, sectionInstallIncludes = fromMaybeList commonOptionsInstallIncludes
, sectionLdOptions = fromMaybeList commonOptionsLdOptions
, sectionBuildable = commonOptionsBuildable
, sectionDependencies = fromMaybeList commonOptionsDependencies
, sectionConditionals = conditionals
, sectionBuildTools = fromMaybeList commonOptionsBuildTools
}
)
where
(unknownFields, conditionals) = unzip (map toConditional $ fromMaybeList commonOptionsWhen)
toConditional :: ConditionalSection -> ([FieldName], Conditional)
toConditional x = case x of
ThenElseConditional (CaptureUnknownFields fields (ThenElse condition (CaptureUnknownFields fieldsThen then_) (CaptureUnknownFields fieldsElse else_))) ->
(fields ++ fieldsThen ++ fieldsElse, Conditional condition (() <$ then_) (Just (() <$ else_)))
FlatConditional (CaptureUnknownFields fields sect) -> (fields, Conditional (conditionCondition $ sectionData sect) (() <$ sect) Nothing)
pathsModuleFromPackageName :: String -> String
pathsModuleFromPackageName name = "Paths_" ++ map f name
where
f '-' = '_'
f x = x
determineModules :: String -> [String] -> Maybe (List String) -> Maybe (List String) -> ([String], [String])
determineModules name modules mExposedModules mOtherModules = case (mExposedModules, mOtherModules) of
(Nothing, Nothing) -> (modules, [pathsModuleFromPackageName name])
_ -> (exposedModules, otherModules)
where
otherModules = maybe ((modules \\ exposedModules) ++ pathsModule) fromList mOtherModules
exposedModules = maybe (modules \\ otherModules) fromList mExposedModules
pathsModule = [pathsModuleFromPackageName name] \\ exposedModules
getModules :: FilePath -> FilePath -> IO [String]
getModules dir src_ = sort <$> do
exists <- doesDirectoryExist (dir </> src_)
if exists
then do
src <- canonicalizePath (dir </> src_)
removeSetup src . toModules <$> getModuleFilesRecursive src
else return []
where
toModules :: [[FilePath]] -> [String]
toModules = catMaybes . map toModule
removeSetup :: FilePath -> [String] -> [String]
removeSetup src
| src == dir = filter (/= "Setup")
| otherwise = id
fromMaybeList :: Maybe (List a) -> [a]
fromMaybeList = maybe [] fromList
|
mitchellwrosen/hpack
|
src/Hpack/Config.hs
|
mit
| 32,909
| 0
| 24
| 5,988
| 8,611
| 4,621
| 3,990
| 672
| 9
|
module Galua.Micro.Translate.ComputeInputs(computeBlockInputs) where
import Data.Set(Set)
import qualified Data.Set as Set
import qualified Data.Map as Map
import qualified Data.Vector as Vector
import Data.List(mapAccumL)
import Data.Foldable(foldl')
import Data.Graph.SCC
import Data.Graph(SCC(..))
import Galua.Micro.AST
-- | Compute the inputs needed by each of the basic blocks.
computeBlockInputs :: MicroFunction -> MicroFunction
computeBlockInputs mf = foldl' computeInputsSCC mf
$ stronglyConnCompR
$ map toNode
$ Map.toList
$ functionCode mf
where
toNode (l,b) = (b,l,blockNext b)
type BlockNode = (Block, BlockName, [BlockName])
computeInputsSCC :: MicroFunction -> SCC BlockNode -> MicroFunction
computeInputsSCC mf s =
case s of
AcyclicSCC b -> fst (computeInputs mf b)
CyclicSCC bs -> computeInputsRec mf bs
-- | Compute the inputs for a group of mutually recursive basic blocks.
computeInputsRec :: MicroFunction -> [BlockNode] -> MicroFunction
computeInputsRec mf bs = if done then newMF else computeInputsRec newMF newBs
where
(newMF,newBs) = mapAccumL computeInputs mf bs
done = and (zipWith sameInputs newBs bs)
sameInputs (x,_,_) (y,_,_) = blockInputs x == blockInputs y
-- | Compute the inputs for a single block, assuming that we know
-- the inputs for its successors.
computeInputs :: MicroFunction -> BlockNode -> (MicroFunction, BlockNode)
computeInputs mf (b,x,xs) = ( mf { functionCode = newCode }
, (newBlock, x, xs)
)
where
newCode = Map.insert x newBlock (functionCode mf)
newBlock = b { blockInputs = newInputs }
newInputs = foldl' stmtStep beforeLast (Vector.reverse (blockBody b))
beforeLast = Set.unions (uses (blockEnd b) : map inputsFor xs)
stmtStep after s = Set.union (uses s) (after `Set.difference` defines s)
inputsFor l = case Map.lookup l (functionCode mf) of
Just b' -> blockInputs b'
Nothing -> error "computeInputs: missing block"
--------------------------------------------------------------------------------
class Uses t where
uses :: t -> Set Input
instance Uses Reg where
uses r = Set.singleton (IReg r)
instance Uses ListReg where
uses r = Set.singleton (LReg r)
instance Uses a => Uses [a] where
uses = foldl' (\xs x -> Set.union (uses x) xs) Set.empty
instance (Uses a, Uses b) => Uses (a,b) where
uses (x,y) = Set.union (uses x) (uses y)
instance (Uses a, Uses b, Uses c) => Uses (a,b,c) where
uses (x,y,z) = uses (x,(y,z))
instance Uses Expr where
uses expr =
case expr of
EReg r -> Set.singleton (IReg r)
ELit {} -> Set.empty
EUp {} -> Set.empty
instance Uses Prop where
uses (Prop _ xs) = uses xs
instance Uses a => Uses (BlockStmt a) where
uses x = uses (stmtCode x)
instance Uses Stmt where
uses stmt =
case stmt of
Assign _ e -> uses e
NewTable _ -> Set.empty
LookupTable _ r e -> uses (r,e)
SetTable r e1 e2 -> uses (r,e1,e2)
SetTableList r _ -> uses (r,ListReg)
GetMeta _ e -> uses e
NewClosure _ _ f -> uses (funcUpvalRefExprs f)
Call r -> uses (r,ListReg)
Drop r _ -> uses r
Append r es -> uses (r,es)
SetList _ es -> uses es
AssignListReg _ y -> uses y
IndexList _ r _ -> uses r
Arith2 _ _ e1 e2 -> uses (e1,e2)
Arith1 _ _ e -> uses e
NewRef _ e -> uses e
ReadRef _ e -> uses e
WriteRef e1 e2 -> uses (e1,e2)
Comment {} -> Set.empty
SetUpVal {} -> error "uses: SetUpVal"
CloseStack {} -> error "uses: CloseStack"
instance Uses EndStmt where
uses stmt =
case stmt of
-- For local contral flow, we also need the arguments for
-- the blocks that we may call.
Case e _ _ -> uses e
If p _ _ -> uses p
Goto _ -> Set.empty
TailCall r -> uses (r,ListReg)
Return -> uses ListReg
Raise e -> uses e
defines :: BlockStmt Stmt -> Set Input
defines stmt =
case stmtCode stmt of
Assign r _ -> iReg r
NewTable r -> iReg r
LookupTable r1 _ _ -> iReg r1
SetTable {} -> none
SetTableList _ _ -> none
GetMeta r _ -> iReg r
NewClosure r _ _ -> iReg r
Call _ -> lReg ListReg
Drop r _ -> lReg r
Append r _ -> lReg r
SetList r _ -> lReg r
AssignListReg r _ -> lReg r
IndexList r _ _ -> iReg r
Arith2 r _ _ _ -> iReg r
Arith1 r _ _ -> iReg r
NewRef r _ -> iReg r
ReadRef r _ -> iReg r
WriteRef _ _ -> none
Comment {} -> none
SetUpVal {} -> error "defines: SetUpVal"
CloseStack {} -> error "defines: CloseStack"
where
iReg = Set.singleton . IReg
lReg = Set.singleton . LReg
none = Set.empty
|
GaloisInc/galua
|
galua-jit/src/Galua/Micro/Translate/ComputeInputs.hs
|
mit
| 5,186
| 0
| 12
| 1,702
| 1,816
| 911
| 905
| 121
| 21
|
{-# LANGUAGE JavaScriptFFI #-}
-- | An implementation of the NodeJS Stream API, as documented
-- <https://nodejs.org/api/stream.html here>.
module GHCJS.Node.Stream
( module GHCJS.Node.Stream -- FIXME: specific export list
) where
import GHCJS.Array
import GHCJS.Foreign.Callback
import GHCJS.Types
-- | FIXME: doc
newtype ReadStream
= MkReadStream JSVal
-- | FIXME: doc
newtype WriteStream
= MkWriteStream JSVal
-- | FIXME: doc
newtype DuplexStream
= MkDuplexStream JSVal
-- | FIXME: doc
newtype TransformStream
= MkTransformStream
{ fromTransformStream :: DuplexStream
-- ^ FIXME: doc
}
-- | FIXME: doc
class IsWriteStream stream where
-- | FIXME: doc
toWriteStream :: stream -> WriteStream
-- | FIXME: doc
class IsReadStream stream where
-- | FIXME: doc
toReadStream :: stream -> ReadStream
instance IsWriteStream WriteStream where
toWriteStream = id
instance IsWriteStream DuplexStream where
toWriteStream (MkDuplexStream val) = MkWriteStream val
instance IsReadStream ReadStream where
toReadStream = id
instance IsReadStream DuplexStream where
toReadStream (MkDuplexStream val) = MkReadStream val
|
taktoa/ghcjs-electron
|
src/GHCJS/Node/Stream.hs
|
mit
| 1,188
| 0
| 8
| 234
| 194
| 114
| 80
| 27
| 0
|
isBalanced xs = null $ foldl f [] xs
where f ('(':ys) ')' = ys
f ('[':ys) ']' = ys
f ('{':ys) '}' = ys
f ys x = x:ys
main = do
content <- getContents
print $ length $ filter isBalanced $ lines content
|
MAPSuio/spring-challenge16
|
balanced_brackets/larstvei.hs
|
mit
| 248
| 0
| 9
| 92
| 122
| 60
| 62
| 8
| 4
|
{-# LANGUAGE NoImplicitPrelude #-}
module Rx.Observable.Maybe where
import Prelude.Compat
import Control.Concurrent.MVar (newEmptyMVar, takeMVar, tryPutMVar)
import Control.Monad (void)
import Rx.Disposable (dispose)
import Rx.Scheduler (Async)
import Rx.Observable.First (first)
import Rx.Observable.Types
toMaybe :: Observable Async a -> IO (Maybe a)
toMaybe source = do
completedVar <- newEmptyMVar
subDisposable <-
subscribe
(first source)
(void . tryPutMVar completedVar . Just)
(\_ -> void $ tryPutMVar completedVar Nothing)
(void $ tryPutMVar completedVar Nothing)
result <- takeMVar completedVar
dispose subDisposable
return result
|
roman/Haskell-Reactive-Extensions
|
rx-core/src/Rx/Observable/Maybe.hs
|
mit
| 830
| 0
| 12
| 265
| 202
| 107
| 95
| 21
| 1
|
module GCompiler
where
import Types
import Heap
import Utilities
import Language
gmCompile:: CoreProgram -> GMState
gmCompile program
= ([], initialCode, [], [], heap, globals, statInitial)
where
(heap, globals) = buildInitialHeap program
initialCode = [PushGlobal "main", Eval, Print]
statInitial :: GMStats
statInitial = 0
buildInitialHeap :: CoreProgram -> (GMHeap, GMGlobals)
buildInitialHeap program
= mapAccumLeft allocateCompiledSc hInitial compiled
where
compiled = (map compileSc preludeDefs)
++ compiledPrimitives
++ (map compileSc program)
compiledPrimitives :: [GMCompiledSC]
compiledPrimitives
=
[
("+", 2, [Push 1, Eval, Push 1, Eval, Add, Update 2, Pop 2, Unwind])
, ("-", 2, [Push 1, Eval, Push 1, Eval, Sub, Update 2, Pop 2, Unwind])
, ("*", 2, [Push 1, Eval, Push 1, Eval, Mul, Update 2, Pop 2, Unwind])
, ("/", 2, [Push 1, Eval, Push 1, Eval, Div, Update 2, Pop 2, Unwind])
, ("negate", 1, [Push 0, Eval, Neg, Update 1, Pop 1, Unwind])
, ("==", 2, [Push 1, Eval, Push 1, Eval, Eq, Update 2, Pop 2, Unwind])
, ("~=", 2, [Push 1, Eval, Push 1, Eval, Ne, Update 2, Pop 2, Unwind])
, ("<", 2, [Push 1, Eval, Push 1, Eval, Lt, Update 2, Pop 2, Unwind])
, ("<=", 2, [Push 1, Eval, Push 1, Eval, Le, Update 2, Pop 2, Unwind])
, (">", 2, [Push 1, Eval, Push 1, Eval, Gt, Update 2, Pop 2, Unwind])
, (">=", 2, [Push 1, Eval, Push 1, Eval, Ge, Update 2, Pop 2, Unwind])
, ("if", 3, [Push 0, Eval, Cond [Push 1] [Push 2], Update 3, Pop 3, Unwind])
]
allocateCompiledSc :: GMHeap -> GMCompiledSC -> (GMHeap, (Name, Addr))
allocateCompiledSc heap (name, nargs, instns)
= (heap', (name, addr))
where
(heap', addr) = hAlloc heap (NGlobal nargs instns)
-- SuperCombinator compiler
compileSc :: (Name, [Name], CoreExpr) -> GMCompiledSC
compileSc (name, params, body)
= (name, length params, compileR body (zip params [0..]))
-- Compile RHS of SuperCombinator
-- 1. uses the modified expression compiler compileE
-- 2. adds code to cleanup the stack (slide)
-- 3. unwind to find the next redex
compileR :: GMCompiler
compileR e env = compileE e env ++ [Update numArgs, Pop numArgs, Unwind]
where
numArgs = length env
builtInDyadic :: Assoc Name Instruction -- list of pairs - [(Name, Instruction)]
builtInDyadic =
[ ("+", Add), ("-", Sub), ("*", Mul), ("div", Div),
("==", Eq), ("~=", Ne), (">=", Ge),
(">", Gt), ("<=", Le), ("<", Lt)]
-- Eta compiler
compileE :: GMCompiler
compileE (EAp (EAp (EVar op) e1) e2) env
| op `elem` binaryOps = compileE e2 env ++ compileE e1 env' ++ [value]
where
binaryOps = map fst builtInDyadic
value = aLookup builtInDyadic op (error "This can't happen")
env' = argOffset 1 env
compileE (EAp (EVar "negate") e) env
= compileE e env ++ [Neg]
compileE (EAp (EAp (EAp (EVar "if") e1) e2) e3) env
= compileE e1 env ++ [Cond (compileE e2 env) (compileE e3 env)]
compileE (ENum n) _ = [PushInt n]
compileE (ELet rec defs e) env
| rec = compileLetrec compileE defs e env
| otherwise = compileLet compileE defs e env
compileE (ECase e alts) env = compileE e env ++
[Casejump (compileAlts compileE' alts env)]
compileE (EPack tag arity) env = compilePack tag arity env ++ [Pack tag arity]
compileE e env = compileC e env ++ [Eval]
compilePack tag arity env = undefined
compileE' :: Int -> GMCompiler
compileE' offset expr env
= [Split offset] ++ compileE expr env ++ [Slide offset]
compileAlts :: (Int -> GMCompiler) -- compiler for alternative bodies
-> [CoreAlt] -- the list of alternatives
-> GMEnvironment -- the current environment
-> [(Int, GMCode)] -- list of alternative code sequences
compileAlts comp alts env
= [(tag, comp (length names) body (zip names [0..]
++ argOffset (length names) env))
| (tag, names, body) <- alts]
-- Expression compiler
compileC :: GMCompiler
compileC (EPack t n) _ = [Pack t n]
compileC (EVar v) env
| v `elem` aDomain env = [Push n]
| otherwise = [PushGlobal v]
where
n = aLookup env v (error "Can't happen")
compileC (ENum n) _ = [PushInt n]
compileC (EAp e1 e2) env
| saturatedCons spine = compileCS (reverse spine) env
| otherwise = compileC e2 env ++ compileC e1 (argOffset 1 env) ++ [MkAp]
where
spine = makeSpine (EAp e1 e2)
saturatedCons (EPack t a:es) = a == length es
saturatedCons (e:es) = False
compileC (ELet rec defs e) env
| rec = compileLetrec compileC defs e env
| otherwise = compileLet compileC defs e env
compileC _ _ = []
makeSpine (EAp e1 e2) = makeSpine e1 ++ [e2]
makeSpine e = [e]
compileCS [EPack t a] _ = [Pack t a]
compileCS (e:es) env = compileC e env ++ compileCS es (argOffset 1 env)
compileLetrec :: GMCompiler -> [(Name, CoreExpr)] -> GMCompiler
compileLetrec comp defs expr env =
[Alloc n]
++ compileLetrec' defs env
++ comp expr env'
++ [Slide n]
where
n = length defs
env' = compileArgs defs env
compileLetrec' :: [(Name, CoreExpr)] -> GMEnvironment -> GMCode
compileLetrec' [] _ = []
compileLetrec' ((_, expr):defs) env =
compileC expr env ++ [Update n] ++ compileLetrec' defs env
where n = length defs
compileLet :: GMCompiler -> [(Name, CoreExpr)] -> GMCompiler
compileLet comp defs expr env =
compileLet' defs env
++ comp expr env'
++ [Slide (length defs)]
where
env' = compileArgs defs env
compileLet' :: [(Name, CoreExpr)] -> GMEnvironment -> GMCode
compileLet' [] _ = []
compileLet' ((_, expr):defs) env =
compileC expr env ++ compileLet' defs (argOffset 1 env)
compileArgs :: [(Name, CoreExpr)] -> GMEnvironment -> GMEnvironment
compileArgs defs env =
zip (map fst defs) [n-1, n-2 .. 0] ++ argOffset n env
where n = length defs
argOffset :: Int -> GMEnvironment -> GMEnvironment
argOffset n env = [(v, n+m) | (v,m) <- env]
-- end of file
|
typedvar/hLand
|
hcore/GCompiler.hs
|
mit
| 6,109
| 0
| 13
| 1,516
| 2,537
| 1,370
| 1,167
| 129
| 2
|
import Control.Monad
import Data.List.Extra
import Data.Maybe
import Data.Bits
import qualified Data.Char as C
import qualified Data.Map as Map
import qualified Data.Set as Set
import Debug.Trace
------
iread :: String -> Int
iread = read
answer :: (Show a) => (String -> a) -> IO ()
answer f = interact $ (++"\n") . show . f
splitOn1 a b = fromJust $ stripInfix a b
-- pull out every part of a String that can be read in
-- for some Read a and ignore the rest
readOut :: Read a => String -> [a]
readOut "" = []
readOut s = case reads s of
[] -> readOut $ tail s
[(x, s')] -> x : readOut s'
_ -> error "ambiguous parse"
ireadOut :: String -> [Int]
ireadOut = readOut
replaceAtIndex n item ls = a ++ (item:b) where (a, (_:b)) = splitAt n ls
--------
data Instr' = Iadd | Imul | Iban | Ibor | Iset | Igt | Ieq
deriving (Enum, Show, Eq, Ord)
type Instr = (Instr', Bool, Bool)
eval_instr' :: Instr' -> Int -> Int -> Int
eval_instr' Iadd = (+)
eval_instr' Imul = (*)
eval_instr' Iban = (.&.)
eval_instr' Ibor = (.|.)
eval_instr' Iset = const
eval_instr' Igt = \x y -> fromEnum $ x > y
eval_instr' Ieq = \x y -> fromEnum $ x == y
step :: (Instr, (Int, Int, Int)) -> [Int] -> [Int]
step ((i, a_imm, b_imm), (a, b, c)) regs =
let va = if a_imm then a else regs !! a
vb = if b_imm then b else regs !! b
res = eval_instr' i va vb
in replaceAtIndex c res regs
parse [x, y, z, w] = (x, (y, z, w))
instrs = [(x, x == Iset && y , y) | x <- enumFrom Iadd, y <- [True, False]] ++
[(Igt, True, False), (Ieq, True, False)]
couldMatch (before:instr:after:_) = length res
where (_, args) = parse instr
res = filter (\i -> step (i, args) before == after) instrs
solve x = length $ filter (>=3) stuff
where stuff = map couldMatch $ chunksOf 4 x
main = answer $ solve . fst . splitOn1 [[], [], []] . map ireadOut . lines
|
msullivan/advent-of-code
|
2018/A16a.hs
|
mit
| 1,861
| 0
| 12
| 432
| 870
| 487
| 383
| 48
| 3
|
{-# LANGUAGE OverloadedStrings #-}
module Gratte.Command.Encryption where
import Control.Applicative
import Control.Monad
import Control.Monad.Trans
import Control.Monad.Gratte
import Data.Maybe
import Data.Monoid
import Gratte.Options
import Gratte.Options.Encryption
import Gratte.Utils
import Filesystem.Path.CurrentOS ( (</>), (<.>) )
import Crypto.Random.DRBG
import Crypto.Cipher.AES
import qualified Data.ByteString.Char8 as BS
import qualified Filesystem as FS
import qualified Filesystem.Path.CurrentOS as FS
-- ^ Get a password from the input and transform it
-- into a 16 bytes bytestring (if necessary
-- by repeating and truncating the input)
getPassword :: Gratte BS.ByteString
getPassword = do
mPassFile <- getOption passwordFile
liftIO $ case mPassFile of
Nothing -> do
putStrLn "Please enter your password"
strPass <- getLine
let pass = BS.pack strPass
go s = if BS.length s >= 32 then BS.take 32 s else go (s <> s)
return $ go pass
Just passFile -> FS.readFile passFile
sync :: FS.FilePath -- ^ Source folder
-> FS.FilePath -- ^ Target folder
-> (FS.FilePath -> FS.FilePath) -- ^ Basename transformation from the source to the target
-> (FS.FilePath -> FS.FilePath -> IO ()) -- ^ Transformation from source to target
-> IO (Int, Int) -- ^ (Number of synchronised files, total number of files)
sync sourceDir targetDir nameTrans fileTrans = do
-- /path/to/dir -> /path/to/dir/
-- otherwise stripPrefix doesn't work as it should
let sourceDir' = FS.decodeString $ FS.encodeString sourceDir ++ "/"
sourceFiles <- getFilesRecurs sourceDir'
let sourcePaths = map (FS.stripPrefix sourceDir') sourceFiles
-- keep the files that don't have a twin
toSync <- flip filterM (map fromJust sourcePaths) $ \p -> do
let targetFile = nameTrans $ targetDir </> p
not <$> FS.isFile targetFile
let filePairs = map (\p ->
( sourceDir' </> p
, nameTrans (targetDir </> p)
)
)
toSync
mapM_ (uncurry fileTrans) filePairs
return (length toSync, length sourcePaths)
encryptFiles :: EncryptionOptions -> Gratte ()
encryptFiles encOpts = do
sourceDir <- getOption folder
let targetDir = encryptFolder encOpts
password <- getPassword
let nameTrans = (<.> "enc")
(synced, total) <- liftIO $ sync sourceDir targetDir nameTrans (encryptFile password)
logNotice $ show synced ++ "/" ++ show total ++ " files encrypted"
decryptFiles :: EncryptionOptions -> Gratte ()
decryptFiles encOpts = do
let sourceDir = encryptFolder encOpts
targetDir <- getOption folder
password <- getPassword
opts <- getOptions
let logFailure s = withGratte opts $ logError $ "Error while decrypting " ++ FS.encodeString s
let write s t = do
isSuccess <- decryptFile password s t
unless isSuccess $ logFailure s
return ()
(synced, total) <- liftIO $ sync sourceDir targetDir FS.dropExtension write
logNotice $ show synced ++ "/" ++ show total ++ " files decrypted"
encryptFile :: BS.ByteString -- ^ The password
-> FS.FilePath -- ^ File to encrypt
-> FS.FilePath -- ^ Target
-> IO ()
encryptFile pass source target = do
decContent <- FS.readFile source
let encContent = encrypt pass decContent
FS.createTree $ FS.directory target
FS.writeFile target =<< encContent
decryptFile :: BS.ByteString -- ^ The password
-> FS.FilePath -- ^ File to decrypt
-> FS.FilePath -- ^ Target
-> IO Bool -- ^ Success?
decryptFile pass source target = do
encContent <- FS.readFile source
let mDecContent = decrypt pass encContent
case mDecContent of
Nothing -> return False
Just decContent -> do
FS.createTree $ FS.directory target
FS.writeFile target decContent
return True
encrypt :: BS.ByteString -- ^ The password (32 bytes)
-> BS.ByteString -- ^ The clear bytestring
-> IO BS.ByteString -- ^ The encrypted bytestring
encrypt pass decContent = do
let cipher = initAES pass
ivStr <- randomByteString 16 -- The initialization vector for the CBC encryption
let iv = aesIV_ ivStr
encrypted = encryptCBC cipher iv $ pad decContent
return $ ivStr <> wrapStart <> encrypted
decrypt :: BS.ByteString -- ^ The password (32 bytes)
-> BS.ByteString -- ^ The encrypted bytestring, with the form
-- thisistheivBEGIN_CONTENTencryptedcontent
-> Maybe BS.ByteString -- ^ The decrypted bytestring
decrypt pass wrapped =
let (ivStr, prefixed) = BS.breakSubstring wrapStart wrapped
enc = BS.drop (BS.length wrapStart) prefixed
cipher = initAES pass
iv = aesIV_ ivStr
padded = decryptCBC cipher iv enc
in unpad padded
wrapStart :: BS.ByteString
wrapStart = "BEGIN_CONTENT"
wrapEnd :: BS.ByteString
wrapEnd = "END_CONTENT"
-- | wrap the content like so: bla -> blaEND_CONTENT00000
-- So that the whole bytestring has a size multiple of 16
pad :: BS.ByteString -> BS.ByteString
pad str =
let suffixed = str <> wrapEnd
l = BS.length suffixed
in suffixed <> BS.replicate (16 - l `rem` 16) '0'
unpad :: BS.ByteString -> Maybe BS.ByteString
unpad str =
let unpadded = BS.reverse . BS.dropWhile (== '0') . BS.reverse $ str
in if wrapEnd `BS.isSuffixOf` unpadded
then Just $ BS.take (BS.length unpadded - BS.length wrapEnd) unpadded
else Nothing
-- | Random hexa ByteString of size n generator
randomByteString :: Int -> IO BS.ByteString
randomByteString n = do
gen <- newGenIO :: IO CtrDRBG
case genBytes n gen of
Right (bs, _) -> return $ BS.take n bs
Left _ -> error "Error while trying to generate a random string!"
|
ostapneko/gratte-papier
|
src/Gratte/Command/Encryption.hs
|
mit
| 6,022
| 0
| 19
| 1,593
| 1,554
| 774
| 780
| 129
| 3
|
-----------------------------------------------------------------------------
-- |
-- Module : UseCaseModel.Parsers.XML.XmlUseCaseModel
-- Copyright : (c) Rodrigo Bonifacio 2008, 2009
-- License : LGPL
--
-- Maintainer : rba2@cin.ufpe.br
-- Stability : provisional
-- Portability : portable
--
-- Several functions for transforming a use case model to a TaRGeT XML
-- representation. This module also define a representation in Haskell
-- for the TaRGeT XML document representation
--
-----------------------------------------------------------------------------
module UseCaseModel.Parsers.XML.XmlUseCaseModel
where
import Data.List
import UseCaseModel.Types
import BasicTypes
type XmlAction = String
type XmlState = String
type XmlResponse = String
type XmlFromStep = String
type XmlToStep = String
type XmlSetup = String
-- | The root element of the TaRGeT xml document.
-- Notice that this does not make scense, since
-- TaRGeT is being used for specifying use cases
-- in different domains.
data XmlPhone = XmlPhone {
ucms :: [XmlUseCaseModel]
} deriving (Show)
-- | The "feature" element of the TaRGeT xml document.
data XmlUseCaseModel = XmlUCM Id Name [XmlUseCase] [XmlAspectualUseCase]
deriving (Show)
-- | The use case TaRGeT element
data XmlUseCase = XmlUseCase Id Name Description XmlSetup [XmlScenario]
deriving (Show)
-- | This is a new element for dealing with
-- variabilities. Earlier versions of the TaRGeT XML documents
-- do not have this element.
data XmlAspectualUseCase = XmlAspectualUseCase {
xmlAspectId :: Id,
xmlAspectName :: Name,
xmlAdvices :: [XmlAdvice]
}
deriving (Show)
-- | The scenlario TaRGeT element.
data XmlScenario = XmlScenario Id Description XmlFromStep XmlToStep [XmlStep]
deriving (Show)
-- | Another new element fot the TaRGeT XML documents.
-- It represents an advice, being a sublement of the
-- XmlAspectualUseCase.
data XmlAdvice = XmlAdvice {
xmlAdviceId :: String,
xmlAdviceType :: String,
xmlAdviceDescription :: String,
xmlPointcut :: String,
xmlAdviceSteps :: [XmlStep]
}
deriving (Show)
-- | Another new element for the TaRGeT XML documents.
-- It represents an advice flow, being a subelement of
-- XmlAdvice.
-- data XmlAdviceFlow = XmlAdviceFlow {
-- xmlAdviceSteps :: [XmlStep]
-- } deriving (Show)
-- | The step TaRGeT element.
data XmlStep = XmlStep Id XmlAction XmlState XmlResponse
deriving (Show)
-- | Translate a Phone Document into a list of
-- use case models.
xmlPhone2UseCaseModels :: XmlPhone -> [UseCaseModel]
xmlPhone2UseCaseModels (XmlPhone xmlUCMs) = map xmlUseCaseModel2UseCaseModel xmlUCMs
-- | Translate a TaRGeT use case model to a base use case
-- model. Note that this is a straightforward mapping.
xmlUseCaseModel2UseCaseModel :: XmlUseCaseModel -> UseCaseModel
xmlUseCaseModel2UseCaseModel (XmlUCM umid name xmlUseCases xmlAspects) =
UCM name
[xmlUseCase2UseCase xmlUseCase | xmlUseCase <- xmlUseCases]
[xmlAspectualUseCase2AspectualUseCase xmlAspect | xmlAspect <- xmlAspects]
-- | Translate a TaRGeT use case to a base use case.
xmlUseCase2UseCase :: XmlUseCase -> UseCase
xmlUseCase2UseCase (XmlUseCase i n d s xmlScenarios) =
UseCase i n d [(xmlScenario2Scenario xmlScenario) | xmlScenario <- xmlScenarios]
-- | Translate a TaRGeT aspectual use case to a base aspectual use case.
xmlAspectualUseCase2AspectualUseCase :: XmlAspectualUseCase -> AspectualUseCase
xmlAspectualUseCase2AspectualUseCase xmlAspect =
AspectualUseCase {
aspectId = (xmlAspectId xmlAspect),
aspectName = (xmlAspectName xmlAspect),
advices = [xmlAdvice2Advice xmlAdvice | xmlAdvice <- (xmlAdvices xmlAspect)]
}
-- | Translate a TaRGeT advice to a base advice.
xmlAdvice2Advice :: XmlAdvice -> Advice
xmlAdvice2Advice (XmlAdvice i t d pc as) =
let
flow = [xmlStep2Step s | s <- as]
refs = xmlStepRefs2StepRefs pc
c = case t of
"before" -> Before
"after" -> After
"around" -> Around
in (Advice c i d refs flow)
-- | Translate a TaRGeT scenario to a scenario
xmlScenario2Scenario :: XmlScenario -> Scenario
xmlScenario2Scenario (XmlScenario sid description fromSteps toSteps steps) = scenario
where
scenario = Scenario sid
description
(xmlStepRefs2StepRefs fromSteps)
[xmlStep2Step step | step <- steps]
(xmlStepRefs2StepRefs toSteps)
-- | Translate a TaRGeT step to a base step
xmlStep2Step :: XmlStep -> Step
xmlStep2Step (XmlStep i a s r) =
let
ann = [tail ar | ar <- words (a ++ s ++ r), head ar == '@']
in
if i == "PROCEED" then Proceed else Step i a s r ann
-- | Translate a TaRGeT step refs to a list of StepRef
-- A step ref might be either an IdRef or a AnnotationRef,
-- which must start with the '@' character.
--
-- The start symbol of an annotation could be a variation
-- point.
xmlStepRefs2StepRefs :: String -> [StepRef]
xmlStepRefs2StepRefs s = map xmlStepRefs2StepRefs' refs
where
refs = [x | x <- (splitAndRemoveBlanks ',' s)]
xmlStepRefs2StepRefs' ref =
case ref of
('@':ss) -> AnnotationRef ss
otherwise -> IdRef ref
|
hephaestus-pl/hephaestus
|
willian/hephaestus-integrated/asset-base/uc-model/src/UseCaseModel/Parsers/XML/XmlUseCaseModel.hs
|
mit
| 5,378
| 42
| 15
| 1,202
| 955
| 538
| 417
| 78
| 3
|
module H.Parse where
import H.Data
import Control.Applicative
import Control.Monad
import Data.Time
import Text.Parsec (modifyState)
import Text.Parsec.String
import Text.ParserCombinators.Parsec as P hiding ((<|>), many)
parseTask :: String -> Either ParseError Task
parseTask = P.runParser taskParser emptyTask "task"
modifyAndReturn :: (a -> a) -> GenParser Char a a
modifyAndReturn f = do
modifyState f
getState
vchar :: Char -> GenParser Char a ()
vchar = void . char
taskParser :: GenParser Char Task Task
taskParser =
P.optional (try doneParser)
*> many (choice
[ tagParser
, contextParser
, timesParser
, startParser
, descriptionParser
])
*> modifyAndReturn normalize
descriptionParser :: GenParser Char Task Task
descriptionParser = do
d <- P.many1 (alphaNum <|> space)
modifyAndReturn (describe d)
doneParser :: GenParser Char Task ()
doneParser = char 'x' *> space *> modifyState complete
contextParser :: GenParser Char Task Task
contextParser = do
vchar '@'
c <- P.many1 alphaNum
spaces
modifyAndReturn (contextualize c)
tagParser :: GenParser Char Task Task
tagParser = do
vchar '+'
t <- P.many1 alphaNum
spaces
modifyAndReturn (tag t)
startParser :: GenParser Char Task Task
startParser = do
vchar '{'
spaces
mon <- read <$> many1 digit
vchar '/'
d <- read <$> many1 digit
vchar '/'
y <- read <$> many1 digit
spaces
ts <- choice [timeString, return 0]
spaces
vchar '}'
spaces
modifyAndReturn (start (lt mon d y ts))
where
lt :: Int -> Int -> Integer -> DiffTime -> LocalTime
lt mon d y ts = LocalTime
(fromGregorian y mon d)
(timeToTimeOfDay ts)
timesParser :: GenParser Char Task Task
timesParser = do
vchar '('
ts <- choice [timeString, return 0]
spaces
P.optional $ do
vchar ','
ss <- choice [timeString, return 0]
modifyState (spend ss)
spaces
vchar ')'
spaces
modifyAndReturn (estimate ts)
timeString :: GenParser Char Task DiffTime
timeString =
sum <$> many1 (spaces >> choice (fmap try
[ minuteParser
, hourParser
, hourMinuteParser
]))
hourMinuteParser :: GenParser Char Task DiffTime
hourMinuteParser = do
h <- read <$> many1 digit
vchar ':'
m <- read <$> many1 digit
return (hours h + minutes m)
shortTimeParser :: (Int -> DiffTime) -> Char -> GenParser Char Task DiffTime
shortTimeParser f c = do
n <- many1 digit
vchar c
return . f . read $ n
minuteParser :: GenParser Char Task DiffTime
minuteParser = shortTimeParser minutes 'm'
hourParser :: GenParser Char Task DiffTime
hourParser = shortTimeParser hours 'h'
|
josuf107/H
|
H/Parse.hs
|
gpl-2.0
| 2,800
| 0
| 13
| 744
| 944
| 453
| 491
| 97
| 1
|
{-|
Module : Setseer.Color
Description : Conversion and generation
Copyright : Erik Edlund
License : GPL-3
Maintainer : erik.edlund@32767.se
Stability : experimental
Portability : POSIX
-}
module Setseer.Color where
import Codec.Picture
import Data.Complex
import Data.Fixed
import Data.List
import Data.Vector
import Data.Word
import Test.QuickCheck
import Setseer.Glue
data PixelHSVd = PixelHSVd
!Double -- h
!Double -- s
!Double -- v
deriving (Eq, Ord, Show)
convertHSVdToPixelRGB8
:: Double
-> Double
-> Double
-> PixelRGB8
convertHSVdToPixelRGB8 h s v
| i == 0
= makePixel v t p
| i == 1
= makePixel q v p
| i == 2
= makePixel p v t
| i == 3
= makePixel p q v
| i == 4
= makePixel t p v
| i == 5
= makePixel v p q
| otherwise
= error $ "convertHSVdToPixelRGB8: invalid i: " Data.List.++ show i
where
makePixel r g b = PixelRGB8
(round (r * 255.0))
(round (g * 255.0))
(round (b * 255.0))
hh = abs $ (if h >= 360.0 then (h `mod'` 360.0) else h) / 60.0
i = floor hh
f = hh - frI i
p = v * (1.0 - s)
q = v * (1.0 - s * f)
t = v * (1.0 - s * (1.0 - f))
convertRGB8ToPixelHSVd
:: Word8
-> Word8
-> Word8
-> PixelHSVd
convertRGB8ToPixelHSVd r g b
= PixelHSVd (if h < 0 then h + 360.0 else h) s v
where
makeH r' g' b' xM dX
| dX == 0
= 0
| xM == r'
= ((g' - b') / dX) `mod'` 6
| xM == g'
= ((b' - r') / dX) + 2.0
| otherwise
= ((r' - g') / dX) + 4.0
r' = frI r / 255.0
g' = frI g / 255.0
b' = frI b / 255.0
xm = Data.List.foldl (min) 1 [r', g', b']
xM = Data.List.foldl (max) 0 [r', g', b']
dX = xM - xm
h = (makeH r' g' b' xM dX) * 60
s = if xM == 0 then 0 else dX / xM
v = xM
prop_RGB2HSV_HSV2RGB
:: Word8
-> Word8
-> Word8
-> Bool
prop_RGB2HSV_HSV2RGB r g b
= (goBack (convertRGB8ToPixelHSVd r g b)) == (PixelRGB8 r g b)
where
goBack
:: PixelHSVd
-> PixelRGB8
goBack (PixelHSVd h s v)
= convertHSVdToPixelRGB8 h s v
generateEscapeColors
:: Double
-> Double
-> Double
-> Vector PixelRGB8
generateEscapeColors rStretch gStretch bStretch
= fromList $ generate 0 rStretch gStretch bStretch
where
generate
:: Int
-> Double
-> Double
-> Double
-> [PixelRGB8]
generate n rStretch gStretch bStretch
| n > 255
= []
| otherwise
= PixelRGB8 r' g' b' : (generate (n + 1)
rStretch
gStretch
bStretch)
where
x = (frI n * 2.0) / 256.0
r = truncate $ rs * (1.0 + cos ((x - 1.0) * pi))
g = truncate $ gs * (1.0 + cos ((x - 1.0) * pi))
b = truncate $ bs * (1.0 + sin ((x - 1.0) * pi))
r' = frI $ min r 255
g' = frI $ min g 255
b' = frI $ min b 255
rs = rStretch * 127.5
gs = gStretch * 127.5
bs = bStretch * 127.5
|
edlund/setseer
|
sources/Setseer/Color.hs
|
gpl-3.0
| 2,995
| 0
| 16
| 1,037
| 1,184
| 617
| 567
| 118
| 3
|
-- -*-haskell-*-
-- Vision (for the Voice): an XMMS2 client.
--
-- Author: Oleg Belozeorov
-- Created: 18 Jun. 2010
--
-- Copyright (C) 2010, 2011, 2012 Oleg Belozeorov
--
-- This program is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public License as
-- published by the Free Software Foundation; either version 3 of
-- the License, or (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
--
{-# LANGUAGE DeriveDataTypeable, Rank2Types #-}
module Medialib
( Stamp
, MediaInfo
, RequestPriority (..)
, initMedialib
, WithMedialib
, withMedialib
, requestInfo
, retrieveProperties
, mediaInfoChan
) where
import Control.Concurrent
import Control.Concurrent.STM
import Control.Concurrent.STM.TGVar
import Control.Monad (when, forever)
import Control.Monad.Trans
import Data.Map (Map)
import Data.IntMap (IntMap)
import qualified Data.IntMap as IntMap
import qualified Data.IntSet as IntSet
import Data.PSQueue (PSQ, Binding (..))
import qualified Data.PSQueue as PSQ
import Data.Env
import Data.Typeable
import XMMS2.Client
import XMMS2.Client.Bindings (propdictToDict)
import Registry
import XMMS
import Utils
type Stamp = Int
type MediaInfo = Map String Property
data RequestPriority
= Current
| Visible
| Search
| Changed
| Background
deriving (Eq, Ord)
data CacheEntry
= CEReady Stamp MediaInfo
| CERetrieving RequestPriority
data Cache
= Cache { cEntries :: IntMap CacheEntry
, cNextStamp :: Stamp
}
emptyCache :: Cache
emptyCache =
Cache { cEntries = IntMap.empty
, cNextStamp = 1
}
data Ix = Ix deriving (Typeable)
data MLib
= MLib { _cache :: TVar (Maybe Cache)
, _mediaInfoChan :: TChan (MediaId, Stamp, MediaInfo)
, _reqQ :: TVar (PSQ MediaId RequestPriority)
}
deriving (Typeable)
type WithMedialib = ?_Medialib :: MLib
cache :: WithMedialib => TVar (Maybe Cache)
cache = _cache ?_Medialib
reqQ :: WithMedialib => TVar (PSQ MediaId RequestPriority)
reqQ = _reqQ ?_Medialib
mediaInfoChan :: WithMedialib => TChan (MediaId, Stamp, MediaInfo)
mediaInfoChan = _mediaInfoChan ?_Medialib
initMedialib :: WithRegistry => IO ()
initMedialib = withXMMS $ do
mlib <- mkMLib
addEnv Ix mlib
let ?_Medialib = mlib
xcW <- atomically $ newTGWatch connectedV
let mon xc
| xc = do
atomically $ writeTVar cache $ Just emptyCache
broadcastMedialibEntryChanged xmms >>* do
id <- result
let id' = fromIntegral id
liftIO $ atomically $ do
cc <- readTVar cache
withJust cc $ \cc ->
when (IntMap.member id' $ cEntries cc) $ do
r <- readTVar reqQ
writeTVar reqQ $ PSQ.insert id Changed r
return True
rt <- forkIO infoReqJob
xc <- atomically $ watch xcW
killThread rt
atomically $ do
writeTVar cache Nothing
writeTVar reqQ PSQ.empty
mon xc
| otherwise = do
xc <- atomically $ watch xcW
mon xc
forkIO $ mon False
return ()
withMedialib :: WithRegistry => (WithMedialib => IO a) -> IO a
withMedialib func = do
Just (Env mlib) <- getEnv (Extract :: Extract Ix MLib)
let ?_Medialib = mlib
func
requestInfo :: WithMedialib => RequestPriority -> MediaId -> IO ()
requestInfo prio id = atomically $ do
cc <- readTVar cache
withJust cc $ \cc ->
let id' = fromIntegral id
entries = cEntries cc
in case IntMap.lookup id' entries of
Nothing -> do
r <- readTVar reqQ
writeTVar reqQ $ PSQ.insert id prio r
writeTVar cache $ Just
cc { cEntries = IntMap.insert id' (CERetrieving prio) entries }
Just (CERetrieving old) | old > prio -> do
r <- readTVar reqQ
writeTVar reqQ $ PSQ.update (const $ Just prio) id r
writeTVar cache $ Just
cc { cEntries = IntMap.insert id' (CERetrieving prio) entries }
Just (CEReady s i) ->
writeBroadcastTChan mediaInfoChan (id, s, i)
_ -> return ()
mkMLib :: IO MLib
mkMLib = do
cache <- newTVarIO Nothing
mediaInfoChan <- newTChanIO
reqQ <- newTVarIO PSQ.empty
return MLib { _cache = cache
, _mediaInfoChan = mediaInfoChan
, _reqQ = reqQ
}
retrieveProperties ::WithMedialib => [MediaId] -> (Either Double [(MediaId, MediaInfo)] -> IO ()) -> IO (IO ())
retrieveProperties ids f = do
let ids' = IntSet.fromList $ map fromIntegral ids
len = IntSet.size ids'
step = len `div` 100
chan <- atomically $ dupTChan mediaInfoChan
let handler st@(ctr, todo, ready) = do
(id, _, info) <- atomically $ readTChan chan
let id' = fromIntegral id
if IntSet.member id' todo
then do
let todo' = IntSet.delete id' todo
if IntSet.null todo'
then do
f . Right $ reverse ((id, info) : ready)
return ()
else do
let ctr' = ctr + 1
when (step == 0 || ctr' `mod` step == 0) $
f . Left $ fromIntegral ctr' / fromIntegral len
handler (ctr', todo', (id, info) : ready)
else
handler st
tid <- forkIO $ handler (0, ids', [])
forkIO $ mapM_ (requestInfo Background . fromIntegral) $ IntSet.toList ids'
return $ killThread tid
infoReqJob :: (WithXMMS, WithMedialib) => IO a
infoReqJob = do
tv <- newTVarIO 0
forever $ do
id <- atomically $ do
c <- readTVar tv
when (c > 100) retry
r <- readTVar reqQ
case PSQ.minView r of
Nothing -> retry
Just (id :-> _, rest) -> do
writeTVar reqQ rest
writeTVar tv $ c + 1
return id
medialibGetInfo xmms id >>* do
rawv <- resultRawValue
liftIO $ do
info <- valueGet =<< propdictToDict rawv []
stamp <- atomically $ do
c <- readTVar tv
writeTVar tv $ c - 1
cc <- readTVar cache
case cc of
Just cc -> do
let stamp = cNextStamp cc
entries = cEntries cc
entry = CEReady stamp info
writeTVar cache $ Just
Cache { cEntries = IntMap.insert (fromIntegral id) entry entries
, cNextStamp = succ stamp
}
return $ Just stamp
Nothing -> return Nothing
withJust stamp $ \stamp -> atomically $
writeBroadcastTChan mediaInfoChan (id, stamp, info)
|
upwawet/vision
|
src/Medialib.hs
|
gpl-3.0
| 6,940
| 0
| 37
| 2,185
| 2,062
| 1,024
| 1,038
| -1
| -1
|
module Test.Lamdu.Db
( ramDB
) where
import qualified Control.Lens as Lens
import Data.IORef (IORef, newIORef, modifyIORef, readIORef)
import qualified Data.Map as Map
import Data.UUID.Types (UUID)
import qualified Lamdu.Data.Db.Init as DbInit
import Lamdu.Data.Db.Layout (DbM(..))
import Lamdu.Data.Export.JSON (fileImportAll)
import qualified Revision.Deltum.Transaction as Transaction
import System.Random (randomIO)
import Test.Lamdu.Prelude
initFreshDb :: [FilePath] -> Transaction.Store DbM -> IO ()
initFreshDb paths db = traverse fileImportAll paths <&> (^. traverse . Lens._2) >>= DbInit.initDb db
-- | Make an action to efficiently generate a fresh RAM DB
ramDB :: [FilePath] -> IO (IO (Transaction.Store DbM))
ramDB paths =
do
origDb <- newIORef Map.empty
let store :: IORef (Map UUID ByteString) -> Transaction.Store DbM
store db =
Transaction.onStoreM DbM Transaction.Store
{ Transaction.storeNewKey = randomIO
, Transaction.storeLookup = \key -> readIORef db <&> (^. Lens.at key)
, Transaction.storeAtomicWrite =
\updates ->
updates <&> updateKey & foldr (.) id & modifyIORef db
}
initFreshDb paths (store origDb)
readIORef origDb >>= newIORef <&> store
& pure
where
updateKey (k, v) = Lens.at k .~ v
|
lamdu/lamdu
|
test/Test/Lamdu/Db.hs
|
gpl-3.0
| 1,479
| 0
| 17
| 430
| 412
| 229
| 183
| 30
| 1
|
-- HsParser: A Parsec builder, a toy for experimenting things:
-- @2013 Angel Alvarez, Felipe Zapata, from The ResMol Group
-- Common types to use with Parsec 3.0 style parsers
module ParsecCommon where
import Data.Functor.Identity
import Text.Parsec
-- A parsecT based parser carring state of type "a" and returning data of type "b"
type MyParser a b = ParsecT [Char] a Identity b
|
AngelitoJ/HsParser
|
src/ParsecCommon.hs
|
gpl-3.0
| 390
| 0
| 6
| 71
| 39
| 26
| 13
| 4
| 0
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DFAReporting.PlacementGroups.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves a list of placement groups, possibly filtered. This method
-- supports paging.
--
-- /See:/ <https://developers.google.com/doubleclick-advertisers/ DCM/DFA Reporting And Trafficking API Reference> for @dfareporting.placementGroups.list@.
module Network.Google.Resource.DFAReporting.PlacementGroups.List
(
-- * REST Resource
PlacementGroupsListResource
-- * Creating a Request
, placementGroupsList
, PlacementGroupsList
-- * Request Lenses
, pglPlacementStrategyIds
, pglContentCategoryIds
, pglMaxEndDate
, pglCampaignIds
, pglPricingTypes
, pglSearchString
, pglIds
, pglProFileId
, pglPlacementGroupType
, pglDirectorySiteIds
, pglSortOrder
, pglSiteIds
, pglPageToken
, pglSortField
, pglMaxStartDate
, pglAdvertiserIds
, pglMinStartDate
, pglArchived
, pglMaxResults
, pglMinEndDate
) where
import Network.Google.DFAReporting.Types
import Network.Google.Prelude
-- | A resource alias for @dfareporting.placementGroups.list@ method which the
-- 'PlacementGroupsList' request conforms to.
type PlacementGroupsListResource =
"dfareporting" :>
"v2.7" :>
"userprofiles" :>
Capture "profileId" (Textual Int64) :>
"placementGroups" :>
QueryParams "placementStrategyIds" (Textual Int64) :>
QueryParams "contentCategoryIds" (Textual Int64) :>
QueryParam "maxEndDate" Text :>
QueryParams "campaignIds" (Textual Int64) :>
QueryParams "pricingTypes"
PlacementGroupsListPricingTypes
:>
QueryParam "searchString" Text :>
QueryParams "ids" (Textual Int64) :>
QueryParam "placementGroupType"
PlacementGroupsListPlacementGroupType
:>
QueryParams "directorySiteIds" (Textual Int64) :>
QueryParam "sortOrder"
PlacementGroupsListSortOrder
:>
QueryParams "siteIds" (Textual Int64) :>
QueryParam "pageToken" Text :>
QueryParam "sortField"
PlacementGroupsListSortField
:>
QueryParam "maxStartDate" Text :>
QueryParams "advertiserIds"
(Textual Int64)
:>
QueryParam "minStartDate" Text :>
QueryParam "archived" Bool :>
QueryParam "maxResults"
(Textual Int32)
:>
QueryParam "minEndDate" Text
:>
QueryParam "alt" AltJSON :>
Get '[JSON]
PlacementGroupsListResponse
-- | Retrieves a list of placement groups, possibly filtered. This method
-- supports paging.
--
-- /See:/ 'placementGroupsList' smart constructor.
data PlacementGroupsList = PlacementGroupsList'
{ _pglPlacementStrategyIds :: !(Maybe [Textual Int64])
, _pglContentCategoryIds :: !(Maybe [Textual Int64])
, _pglMaxEndDate :: !(Maybe Text)
, _pglCampaignIds :: !(Maybe [Textual Int64])
, _pglPricingTypes :: !(Maybe [PlacementGroupsListPricingTypes])
, _pglSearchString :: !(Maybe Text)
, _pglIds :: !(Maybe [Textual Int64])
, _pglProFileId :: !(Textual Int64)
, _pglPlacementGroupType :: !(Maybe PlacementGroupsListPlacementGroupType)
, _pglDirectorySiteIds :: !(Maybe [Textual Int64])
, _pglSortOrder :: !(Maybe PlacementGroupsListSortOrder)
, _pglSiteIds :: !(Maybe [Textual Int64])
, _pglPageToken :: !(Maybe Text)
, _pglSortField :: !(Maybe PlacementGroupsListSortField)
, _pglMaxStartDate :: !(Maybe Text)
, _pglAdvertiserIds :: !(Maybe [Textual Int64])
, _pglMinStartDate :: !(Maybe Text)
, _pglArchived :: !(Maybe Bool)
, _pglMaxResults :: !(Maybe (Textual Int32))
, _pglMinEndDate :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'PlacementGroupsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pglPlacementStrategyIds'
--
-- * 'pglContentCategoryIds'
--
-- * 'pglMaxEndDate'
--
-- * 'pglCampaignIds'
--
-- * 'pglPricingTypes'
--
-- * 'pglSearchString'
--
-- * 'pglIds'
--
-- * 'pglProFileId'
--
-- * 'pglPlacementGroupType'
--
-- * 'pglDirectorySiteIds'
--
-- * 'pglSortOrder'
--
-- * 'pglSiteIds'
--
-- * 'pglPageToken'
--
-- * 'pglSortField'
--
-- * 'pglMaxStartDate'
--
-- * 'pglAdvertiserIds'
--
-- * 'pglMinStartDate'
--
-- * 'pglArchived'
--
-- * 'pglMaxResults'
--
-- * 'pglMinEndDate'
placementGroupsList
:: Int64 -- ^ 'pglProFileId'
-> PlacementGroupsList
placementGroupsList pPglProFileId_ =
PlacementGroupsList'
{ _pglPlacementStrategyIds = Nothing
, _pglContentCategoryIds = Nothing
, _pglMaxEndDate = Nothing
, _pglCampaignIds = Nothing
, _pglPricingTypes = Nothing
, _pglSearchString = Nothing
, _pglIds = Nothing
, _pglProFileId = _Coerce # pPglProFileId_
, _pglPlacementGroupType = Nothing
, _pglDirectorySiteIds = Nothing
, _pglSortOrder = Nothing
, _pglSiteIds = Nothing
, _pglPageToken = Nothing
, _pglSortField = Nothing
, _pglMaxStartDate = Nothing
, _pglAdvertiserIds = Nothing
, _pglMinStartDate = Nothing
, _pglArchived = Nothing
, _pglMaxResults = Nothing
, _pglMinEndDate = Nothing
}
-- | Select only placement groups that are associated with these placement
-- strategies.
pglPlacementStrategyIds :: Lens' PlacementGroupsList [Int64]
pglPlacementStrategyIds
= lens _pglPlacementStrategyIds
(\ s a -> s{_pglPlacementStrategyIds = a})
. _Default
. _Coerce
-- | Select only placement groups that are associated with these content
-- categories.
pglContentCategoryIds :: Lens' PlacementGroupsList [Int64]
pglContentCategoryIds
= lens _pglContentCategoryIds
(\ s a -> s{_pglContentCategoryIds = a})
. _Default
. _Coerce
-- | Select only placements or placement groups whose end date is on or
-- before the specified maxEndDate. The date should be formatted as
-- \"yyyy-MM-dd\".
pglMaxEndDate :: Lens' PlacementGroupsList (Maybe Text)
pglMaxEndDate
= lens _pglMaxEndDate
(\ s a -> s{_pglMaxEndDate = a})
-- | Select only placement groups that belong to these campaigns.
pglCampaignIds :: Lens' PlacementGroupsList [Int64]
pglCampaignIds
= lens _pglCampaignIds
(\ s a -> s{_pglCampaignIds = a})
. _Default
. _Coerce
-- | Select only placement groups with these pricing types.
pglPricingTypes :: Lens' PlacementGroupsList [PlacementGroupsListPricingTypes]
pglPricingTypes
= lens _pglPricingTypes
(\ s a -> s{_pglPricingTypes = a})
. _Default
. _Coerce
-- | Allows searching for placement groups by name or ID. Wildcards (*) are
-- allowed. For example, \"placement*2015\" will return placement groups
-- with names like \"placement group June 2015\", \"placement group May
-- 2015\", or simply \"placements 2015\". Most of the searches also add
-- wildcards implicitly at the start and the end of the search string. For
-- example, a search string of \"placementgroup\" will match placement
-- groups with name \"my placementgroup\", \"placementgroup 2015\", or
-- simply \"placementgroup\".
pglSearchString :: Lens' PlacementGroupsList (Maybe Text)
pglSearchString
= lens _pglSearchString
(\ s a -> s{_pglSearchString = a})
-- | Select only placement groups with these IDs.
pglIds :: Lens' PlacementGroupsList [Int64]
pglIds
= lens _pglIds (\ s a -> s{_pglIds = a}) . _Default .
_Coerce
-- | User profile ID associated with this request.
pglProFileId :: Lens' PlacementGroupsList Int64
pglProFileId
= lens _pglProFileId (\ s a -> s{_pglProFileId = a})
. _Coerce
-- | Select only placement groups belonging with this group type. A package
-- is a simple group of placements that acts as a single pricing point for
-- a group of tags. A roadblock is a group of placements that not only acts
-- as a single pricing point but also assumes that all the tags in it will
-- be served at the same time. A roadblock requires one of its assigned
-- placements to be marked as primary for reporting.
pglPlacementGroupType :: Lens' PlacementGroupsList (Maybe PlacementGroupsListPlacementGroupType)
pglPlacementGroupType
= lens _pglPlacementGroupType
(\ s a -> s{_pglPlacementGroupType = a})
-- | Select only placement groups that are associated with these directory
-- sites.
pglDirectorySiteIds :: Lens' PlacementGroupsList [Int64]
pglDirectorySiteIds
= lens _pglDirectorySiteIds
(\ s a -> s{_pglDirectorySiteIds = a})
. _Default
. _Coerce
-- | Order of sorted results, default is ASCENDING.
pglSortOrder :: Lens' PlacementGroupsList (Maybe PlacementGroupsListSortOrder)
pglSortOrder
= lens _pglSortOrder (\ s a -> s{_pglSortOrder = a})
-- | Select only placement groups that are associated with these sites.
pglSiteIds :: Lens' PlacementGroupsList [Int64]
pglSiteIds
= lens _pglSiteIds (\ s a -> s{_pglSiteIds = a}) .
_Default
. _Coerce
-- | Value of the nextPageToken from the previous result page.
pglPageToken :: Lens' PlacementGroupsList (Maybe Text)
pglPageToken
= lens _pglPageToken (\ s a -> s{_pglPageToken = a})
-- | Field by which to sort the list.
pglSortField :: Lens' PlacementGroupsList (Maybe PlacementGroupsListSortField)
pglSortField
= lens _pglSortField (\ s a -> s{_pglSortField = a})
-- | Select only placements or placement groups whose start date is on or
-- before the specified maxStartDate. The date should be formatted as
-- \"yyyy-MM-dd\".
pglMaxStartDate :: Lens' PlacementGroupsList (Maybe Text)
pglMaxStartDate
= lens _pglMaxStartDate
(\ s a -> s{_pglMaxStartDate = a})
-- | Select only placement groups that belong to these advertisers.
pglAdvertiserIds :: Lens' PlacementGroupsList [Int64]
pglAdvertiserIds
= lens _pglAdvertiserIds
(\ s a -> s{_pglAdvertiserIds = a})
. _Default
. _Coerce
-- | Select only placements or placement groups whose start date is on or
-- after the specified minStartDate. The date should be formatted as
-- \"yyyy-MM-dd\".
pglMinStartDate :: Lens' PlacementGroupsList (Maybe Text)
pglMinStartDate
= lens _pglMinStartDate
(\ s a -> s{_pglMinStartDate = a})
-- | Select only archived placements. Don\'t set this field to select both
-- archived and non-archived placements.
pglArchived :: Lens' PlacementGroupsList (Maybe Bool)
pglArchived
= lens _pglArchived (\ s a -> s{_pglArchived = a})
-- | Maximum number of results to return.
pglMaxResults :: Lens' PlacementGroupsList (Maybe Int32)
pglMaxResults
= lens _pglMaxResults
(\ s a -> s{_pglMaxResults = a})
. mapping _Coerce
-- | Select only placements or placement groups whose end date is on or after
-- the specified minEndDate. The date should be formatted as
-- \"yyyy-MM-dd\".
pglMinEndDate :: Lens' PlacementGroupsList (Maybe Text)
pglMinEndDate
= lens _pglMinEndDate
(\ s a -> s{_pglMinEndDate = a})
instance GoogleRequest PlacementGroupsList where
type Rs PlacementGroupsList =
PlacementGroupsListResponse
type Scopes PlacementGroupsList =
'["https://www.googleapis.com/auth/dfatrafficking"]
requestClient PlacementGroupsList'{..}
= go _pglProFileId
(_pglPlacementStrategyIds ^. _Default)
(_pglContentCategoryIds ^. _Default)
_pglMaxEndDate
(_pglCampaignIds ^. _Default)
(_pglPricingTypes ^. _Default)
_pglSearchString
(_pglIds ^. _Default)
_pglPlacementGroupType
(_pglDirectorySiteIds ^. _Default)
_pglSortOrder
(_pglSiteIds ^. _Default)
_pglPageToken
_pglSortField
_pglMaxStartDate
(_pglAdvertiserIds ^. _Default)
_pglMinStartDate
_pglArchived
_pglMaxResults
_pglMinEndDate
(Just AltJSON)
dFAReportingService
where go
= buildClient
(Proxy :: Proxy PlacementGroupsListResource)
mempty
|
rueshyna/gogol
|
gogol-dfareporting/gen/Network/Google/Resource/DFAReporting/PlacementGroups/List.hs
|
mpl-2.0
| 14,089
| 0
| 32
| 4,068
| 2,074
| 1,186
| 888
| 289
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.ConsumerSurveys.Surveys.Update
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates a survey. Currently the only property that can be updated is the
-- owners property.
--
-- /See:/ <https://developers.google.com/surveys/ Consumer Surveys API Reference> for @consumersurveys.surveys.update@.
module Network.Google.Resource.ConsumerSurveys.Surveys.Update
(
-- * REST Resource
SurveysUpdateResource
-- * Creating a Request
, surveysUpdate
, SurveysUpdate
-- * Request Lenses
, suSurveyURLId
, suPayload
) where
import Network.Google.ConsumerSurveys.Types
import Network.Google.Prelude
-- | A resource alias for @consumersurveys.surveys.update@ method which the
-- 'SurveysUpdate' request conforms to.
type SurveysUpdateResource =
"consumersurveys" :>
"v2" :>
"surveys" :>
Capture "surveyUrlId" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Survey :> Put '[JSON] Survey
-- | Updates a survey. Currently the only property that can be updated is the
-- owners property.
--
-- /See:/ 'surveysUpdate' smart constructor.
data SurveysUpdate =
SurveysUpdate'
{ _suSurveyURLId :: !Text
, _suPayload :: !Survey
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'SurveysUpdate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'suSurveyURLId'
--
-- * 'suPayload'
surveysUpdate
:: Text -- ^ 'suSurveyURLId'
-> Survey -- ^ 'suPayload'
-> SurveysUpdate
surveysUpdate pSuSurveyURLId_ pSuPayload_ =
SurveysUpdate' {_suSurveyURLId = pSuSurveyURLId_, _suPayload = pSuPayload_}
-- | External URL ID for the survey.
suSurveyURLId :: Lens' SurveysUpdate Text
suSurveyURLId
= lens _suSurveyURLId
(\ s a -> s{_suSurveyURLId = a})
-- | Multipart request metadata.
suPayload :: Lens' SurveysUpdate Survey
suPayload
= lens _suPayload (\ s a -> s{_suPayload = a})
instance GoogleRequest SurveysUpdate where
type Rs SurveysUpdate = Survey
type Scopes SurveysUpdate =
'["https://www.googleapis.com/auth/consumersurveys",
"https://www.googleapis.com/auth/userinfo.email"]
requestClient SurveysUpdate'{..}
= go _suSurveyURLId (Just AltJSON) _suPayload
consumerSurveysService
where go
= buildClient (Proxy :: Proxy SurveysUpdateResource)
mempty
|
brendanhay/gogol
|
gogol-consumersurveys/gen/Network/Google/Resource/ConsumerSurveys/Surveys/Update.hs
|
mpl-2.0
| 3,204
| 0
| 13
| 700
| 387
| 234
| 153
| 61
| 1
|
{-# LANGUAGE CPP #-}
-- |
-- Module : System.Info.Health
-- Copyright : (c) 2013 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
module System.Info.Health
( Health(..)
, getHealth
) where
#ifdef darwin_HOST_OS
import System.Info.Health.OSX
#else
import System.Info.Health.Linux
#endif
import System.Info.Types
|
brendanhay/czar
|
system-info/src/System/Info/Health.hs
|
mpl-2.0
| 712
| 0
| 5
| 165
| 46
| 36
| 10
| 6
| 0
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AdSense.Accounts.Sites.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists all the sites available in an account.
--
-- /See:/ <http://code.google.com/apis/adsense/management/ AdSense Management API Reference> for @adsense.accounts.sites.list@.
module Network.Google.Resource.AdSense.Accounts.Sites.List
(
-- * REST Resource
AccountsSitesListResource
-- * Creating a Request
, accountsSitesList
, AccountsSitesList
-- * Request Lenses
, aslParent
, aslXgafv
, aslUploadProtocol
, aslAccessToken
, aslUploadType
, aslPageToken
, aslPageSize
, aslCallback
) where
import Network.Google.AdSense.Types
import Network.Google.Prelude
-- | A resource alias for @adsense.accounts.sites.list@ method which the
-- 'AccountsSitesList' request conforms to.
type AccountsSitesListResource =
"v2" :>
Capture "parent" Text :>
"sites" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListSitesResponse
-- | Lists all the sites available in an account.
--
-- /See:/ 'accountsSitesList' smart constructor.
data AccountsSitesList =
AccountsSitesList'
{ _aslParent :: !Text
, _aslXgafv :: !(Maybe Xgafv)
, _aslUploadProtocol :: !(Maybe Text)
, _aslAccessToken :: !(Maybe Text)
, _aslUploadType :: !(Maybe Text)
, _aslPageToken :: !(Maybe Text)
, _aslPageSize :: !(Maybe (Textual Int32))
, _aslCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AccountsSitesList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aslParent'
--
-- * 'aslXgafv'
--
-- * 'aslUploadProtocol'
--
-- * 'aslAccessToken'
--
-- * 'aslUploadType'
--
-- * 'aslPageToken'
--
-- * 'aslPageSize'
--
-- * 'aslCallback'
accountsSitesList
:: Text -- ^ 'aslParent'
-> AccountsSitesList
accountsSitesList pAslParent_ =
AccountsSitesList'
{ _aslParent = pAslParent_
, _aslXgafv = Nothing
, _aslUploadProtocol = Nothing
, _aslAccessToken = Nothing
, _aslUploadType = Nothing
, _aslPageToken = Nothing
, _aslPageSize = Nothing
, _aslCallback = Nothing
}
-- | Required. The account which owns the collection of sites. Format:
-- accounts\/{account}
aslParent :: Lens' AccountsSitesList Text
aslParent
= lens _aslParent (\ s a -> s{_aslParent = a})
-- | V1 error format.
aslXgafv :: Lens' AccountsSitesList (Maybe Xgafv)
aslXgafv = lens _aslXgafv (\ s a -> s{_aslXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
aslUploadProtocol :: Lens' AccountsSitesList (Maybe Text)
aslUploadProtocol
= lens _aslUploadProtocol
(\ s a -> s{_aslUploadProtocol = a})
-- | OAuth access token.
aslAccessToken :: Lens' AccountsSitesList (Maybe Text)
aslAccessToken
= lens _aslAccessToken
(\ s a -> s{_aslAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
aslUploadType :: Lens' AccountsSitesList (Maybe Text)
aslUploadType
= lens _aslUploadType
(\ s a -> s{_aslUploadType = a})
-- | A page token, received from a previous \`ListSites\` call. Provide this
-- to retrieve the subsequent page. When paginating, all other parameters
-- provided to \`ListSites\` must match the call that provided the page
-- token.
aslPageToken :: Lens' AccountsSitesList (Maybe Text)
aslPageToken
= lens _aslPageToken (\ s a -> s{_aslPageToken = a})
-- | The maximum number of sites to include in the response, used for paging.
-- If unspecified, at most 10000 sites will be returned. The maximum value
-- is 10000; values above 10000 will be coerced to 10000.
aslPageSize :: Lens' AccountsSitesList (Maybe Int32)
aslPageSize
= lens _aslPageSize (\ s a -> s{_aslPageSize = a}) .
mapping _Coerce
-- | JSONP
aslCallback :: Lens' AccountsSitesList (Maybe Text)
aslCallback
= lens _aslCallback (\ s a -> s{_aslCallback = a})
instance GoogleRequest AccountsSitesList where
type Rs AccountsSitesList = ListSitesResponse
type Scopes AccountsSitesList =
'["https://www.googleapis.com/auth/adsense",
"https://www.googleapis.com/auth/adsense.readonly"]
requestClient AccountsSitesList'{..}
= go _aslParent _aslXgafv _aslUploadProtocol
_aslAccessToken
_aslUploadType
_aslPageToken
_aslPageSize
_aslCallback
(Just AltJSON)
adSenseService
where go
= buildClient
(Proxy :: Proxy AccountsSitesListResource)
mempty
|
brendanhay/gogol
|
gogol-adsense/gen/Network/Google/Resource/AdSense/Accounts/Sites/List.hs
|
mpl-2.0
| 5,760
| 0
| 18
| 1,370
| 887
| 515
| 372
| 125
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.BigQuery.TableData.InsertAll
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Streams data into BigQuery one record at a time without needing to run a
-- load job. Requires the WRITER dataset role.
--
-- /See:/ <https://cloud.google.com/bigquery/ BigQuery API Reference> for @bigquery.tabledata.insertAll@.
module Network.Google.Resource.BigQuery.TableData.InsertAll
(
-- * REST Resource
TableDataInsertAllResource
-- * Creating a Request
, tableDataInsertAll
, TableDataInsertAll
-- * Request Lenses
, tdiaPayload
, tdiaDataSetId
, tdiaProjectId
, tdiaTableId
) where
import Network.Google.BigQuery.Types
import Network.Google.Prelude
-- | A resource alias for @bigquery.tabledata.insertAll@ method which the
-- 'TableDataInsertAll' request conforms to.
type TableDataInsertAllResource =
"bigquery" :>
"v2" :>
"projects" :>
Capture "projectId" Text :>
"datasets" :>
Capture "datasetId" Text :>
"tables" :>
Capture "tableId" Text :>
"insertAll" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] TableDataInsertAllRequest :>
Post '[JSON] TableDataInsertAllResponse
-- | Streams data into BigQuery one record at a time without needing to run a
-- load job. Requires the WRITER dataset role.
--
-- /See:/ 'tableDataInsertAll' smart constructor.
data TableDataInsertAll =
TableDataInsertAll'
{ _tdiaPayload :: !TableDataInsertAllRequest
, _tdiaDataSetId :: !Text
, _tdiaProjectId :: !Text
, _tdiaTableId :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'TableDataInsertAll' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tdiaPayload'
--
-- * 'tdiaDataSetId'
--
-- * 'tdiaProjectId'
--
-- * 'tdiaTableId'
tableDataInsertAll
:: TableDataInsertAllRequest -- ^ 'tdiaPayload'
-> Text -- ^ 'tdiaDataSetId'
-> Text -- ^ 'tdiaProjectId'
-> Text -- ^ 'tdiaTableId'
-> TableDataInsertAll
tableDataInsertAll pTdiaPayload_ pTdiaDataSetId_ pTdiaProjectId_ pTdiaTableId_ =
TableDataInsertAll'
{ _tdiaPayload = pTdiaPayload_
, _tdiaDataSetId = pTdiaDataSetId_
, _tdiaProjectId = pTdiaProjectId_
, _tdiaTableId = pTdiaTableId_
}
-- | Multipart request metadata.
tdiaPayload :: Lens' TableDataInsertAll TableDataInsertAllRequest
tdiaPayload
= lens _tdiaPayload (\ s a -> s{_tdiaPayload = a})
-- | Dataset ID of the destination table.
tdiaDataSetId :: Lens' TableDataInsertAll Text
tdiaDataSetId
= lens _tdiaDataSetId
(\ s a -> s{_tdiaDataSetId = a})
-- | Project ID of the destination table.
tdiaProjectId :: Lens' TableDataInsertAll Text
tdiaProjectId
= lens _tdiaProjectId
(\ s a -> s{_tdiaProjectId = a})
-- | Table ID of the destination table.
tdiaTableId :: Lens' TableDataInsertAll Text
tdiaTableId
= lens _tdiaTableId (\ s a -> s{_tdiaTableId = a})
instance GoogleRequest TableDataInsertAll where
type Rs TableDataInsertAll =
TableDataInsertAllResponse
type Scopes TableDataInsertAll =
'["https://www.googleapis.com/auth/bigquery",
"https://www.googleapis.com/auth/bigquery.insertdata",
"https://www.googleapis.com/auth/cloud-platform"]
requestClient TableDataInsertAll'{..}
= go _tdiaProjectId _tdiaDataSetId _tdiaTableId
(Just AltJSON)
_tdiaPayload
bigQueryService
where go
= buildClient
(Proxy :: Proxy TableDataInsertAllResource)
mempty
|
brendanhay/gogol
|
gogol-bigquery/gen/Network/Google/Resource/BigQuery/TableData/InsertAll.hs
|
mpl-2.0
| 4,486
| 0
| 18
| 1,085
| 552
| 328
| 224
| 93
| 1
|
-- brittany { lconfig_columnAlignMode: { tag: ColumnAlignModeDisabled }, lconfig_indentPolicy: IndentPolicyLeft }
func :: a -> (a -> a)
|
lspitzner/brittany
|
data/Test363.hs
|
agpl-3.0
| 136
| 0
| 7
| 18
| 18
| 10
| 8
| 1
| 0
|
module ViperVM.Platform.ProcessorCapabilities where
data ProcessorCapability =
DoubleFloatingPoint
deriving (Eq,Ord,Show)
|
hsyl20/HViperVM
|
lib/ViperVM/Platform/ProcessorCapabilities.hs
|
lgpl-3.0
| 129
| 0
| 6
| 16
| 29
| 17
| 12
| 4
| 0
|
{-
Copyright 2015 Martin Buck
This file is part of H2D.
H2D is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
H2D is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with H2D. If not, see <http://www.gnu.org/licenses/>.
-}
{-# LANGUAGE FlexibleInstances #-}
module Moveable where
import Types2D
import Base2D
class Moveable a where
move :: a -> Vec2D -> a
instance Moveable Vec2D where
move (Vec2D a b) (Vec2D x y) = Vec2D (a+x) (b+y)
instance Moveable Path2D where
move path delta = chunkParMap pointChunkSize (move delta) path
|
I3ck/H2D
|
src/Moveable.hs
|
lgpl-3.0
| 987
| 0
| 8
| 181
| 118
| 62
| 56
| 10
| 0
|
import Control.Monad.Writer
--import Data.Monoid
gcd' :: Int -> Int -> Writer [String] Int
gcd' a b
| b == 0 = do
tell ["Finished with " ++ show a]
return a
| otherwise = do
tell [show a ++ " mod " ++ show b ++ " = " ++ show (a `mod` b)]
gcd' b (a `mod` b)
newtype DiffList a = DiffList {getDiffList :: [a] -> [a]}
toDiffList :: [a] -> DiffList a
toDiffList xs = DiffList (xs++)
fromDiffList :: DiffList a -> [a]
fromDiffList (DiffList f) = f []
instance Monoid (DiffList a) where
mempty = DiffList (\xs -> [] ++ xs)
(DiffList f) `mappend` (DiffList g) = DiffList (\xs -> f (g xs))
gcd'' :: Int -> Int -> Writer (DiffList String) Int
gcd'' a b
| b == 0 = do
tell (toDiffList ["Finished with " ++ show a])
return a
| otherwise = do
--result <- gcd'' b (a `mod` b)
tell (toDiffList [show a ++ " mod " ++ show b ++ " = " ++ show (a `mod` b)])
--return result
gcd'' b (a `mod` b)
finalCountDown :: Int -> Writer (DiffList String) ()
finalCountDown 0 = do
tell (toDiffList ["0"])
finalCountDown x = do
finalCountDown (x-1)
tell (toDiffList [show x])
finalCountDown' :: Int -> Writer [String] ()
finalCountDown' 0 = do
tell ["0"]
finalCountDown' x = do
finalCountDown' (x-1)
tell [show x]
|
aniketd/learn.haskell
|
LYAH/euclid.hs
|
unlicense
| 1,325
| 0
| 16
| 373
| 611
| 304
| 307
| 37
| 1
|
{-
Created : 2014 Jun 19 (Thu) 10:59:09 by Harold Carr.
Last Modified : 2014 Jun 19 (Thu) 18:44:44 by Harold Carr.
-}
module HW10_HC_AParser where
import Control.Applicative
import Data.Char
import Data.Maybe (fromJust)
import qualified Test.HUnit as T
import qualified Test.HUnit.Util as U
-- A parser for a value of type a is a function which takes a String
-- represnting the input to be parsed, and succeeds or fails; if it
-- succeeds, it returns the parsed value along with the remainder of
-- the input.
newtype Parser a = Parser { runParser :: String -> Maybe (a, String) }
-- For example, 'satisfy' takes a predicate on Char, and constructs a
-- parser which succeeds only if it sees a Char that satisfies the
-- predicate (which it then returns). If it encounters a Char that
-- does not satisfy the predicate (or an empty input), it fails.
satisfy :: (Char -> Bool) -> Parser Char
satisfy p = Parser f
where
f [] = Nothing -- fail on the empty input
f (x:xs) -- check if x satisfies the predicate
-- if so, return x along with the remainder
-- of the input (that is, xs)
| p x = Just (x, xs)
| otherwise = Nothing -- otherwise, fail
-- Using satisfy, we can define the parser 'char c' which expects to
-- see exactly the character c, and fails otherwise.
char :: Char -> Parser Char
char c = satisfy (== c)
-- For convenience, we've also provided a parser for positive
-- integers.
posInt :: Parser Integer
posInt = Parser f
where
f xs
| null ns = Nothing
| otherwise = Just (read ns, rest)
where (ns, rest) = span isDigit xs
ex0 :: T.Test
ex0 = T.TestList
[
U.teq "e00" (runParser (satisfy isUpper) "ABC") (Just ('A',"BC"))
, U.teq "e01" (runParser (satisfy isUpper) "abc") Nothing
, U.teq "e02" (runParser (char 'x') "xyz") (Just ('x',"yz"))
, U.teq "e03" (runParser posInt "10ab20") (Just (10,"ab20"))
, U.teq "e03" (runParser posInt "ab20") Nothing
, U.teq "e03" (runParser posInt "20") (Just (20,""))
]
------------------------------------------------------------
-- Your code goes below here
------------------------------------------------------------
------------------------------------------------------------------------------
-- Exercise 1
instance Functor Parser where
fmap f (Parser p) = Parser (\s -> p s >>= \(r,rest) -> return (f r, rest))
ex1 :: T.Test
ex1 = T.TestList
[
U.teq "e10" (runParser (fmap toUpper (satisfy isLower))
"abc")
(Just ('A', "bc"))
, U.teq "e11" (runParser (fmap (*2) posInt)
"20")
(Just (40, ""))
, U.teq "e12" (runParser ( (*3) <$> posInt)
"20")
(Just (60, ""))
]
------------------------------------------------------------------------------
-- Exercise 2
instance Applicative Parser where
pure a = Parser (\s -> Just (a, s))
-- Parser l <*> Parser r = Parser (\s -> l s >>= \(a,rest) -> r rest >>= \(a',rest') -> return (a a', rest'))
Parser l <*> r = Parser (\s -> l s >>= \(a,rest) -> runParser (a <$> r) rest)
-- for test
type Name = String
data Employee = Emp { name :: Name, phone :: String } deriving (Eq, Show)
parseName :: Parser Name
parseName = Parser $ pp isAlpha
parsePhone :: Parser String
parsePhone = Parser $ pp isDigit
parseEmployee :: Parser Employee
parseEmployee = Emp <$> parseName <*> parsePhone
pp :: (Char -> Bool) -> String -> Maybe (String, String)
pp f s0 = pp' s0 []
where
pp' s acc = case runParser (satisfy f) s of
Nothing -> if null acc then Nothing else Just (reverse acc, s) -- TODO avoid reverse
(Just (c,rest)) -> pp' rest (c:acc)
ex2 :: T.Test
ex2 = T.TestList
[
U.teq "e20" (runParser parseName "Harold8016824058etc") (Just ("Harold", "8016824058etc"))
, U.teq "e21" (runParser parsePhone "Harold8016824058etc") Nothing
, U.teq "e22" (runParser parsePhone "8016824058Harold000") (Just ("8016824058", "Harold000"))
, U.teq "e23" (runParser parseEmployee "Harold8016824058etc") (Just (Emp "Harold" "8016824058", "etc"))
]
ex2' :: [T.Test]
ex2' = U.tt "trc"
[ runParser parseEmployee "H8e"
, runParser (Emp <$> parseName <*> parsePhone) "H8e"
, runParser (Parser (\s -> let (r,rest) = fromJust $ (runParser parseName) s in Just (Emp r,rest)) <*> parsePhone) "H8e"
, runParser (Parser (\s' -> (\s -> let (r,rest) = fromJust $ (runParser parseName) s in Just (Emp r,rest)) s'
>>= \(a,rest) -> (runParser parsePhone) rest
>>= \(a',rest') -> return (a a', rest'))) "H8e"
, (\s -> let (r,rest) = fromJust $ (runParser parseName) s in Just (Emp r,rest)) "H8e"
>>= \(a,rest) -> (runParser parsePhone) rest
>>= \(a',rest') -> return (a a', rest')
, let (r,rest) = fromJust $ (runParser parseName) "H8e" in Just (Emp r,rest)
>>= \(a,rest) -> (runParser parsePhone) rest
>>= \(a',rest') -> return (a a', rest')
, Just (Emp "H","8e")
>>= \(a,rest) -> (runParser parsePhone) rest
>>= \(a',rest') -> return (a a', rest')
, (runParser parsePhone) "8e"
>>= \(a',rest') -> return (Emp "H" a', rest')
, Just (Emp "H" "8", "e")
]
(Just (Emp "H" "8", "e"))
------------------------------------------------------------------------------
-- Exercise 3
abParser :: Parser (Char, Char)
abParser = (,) <$> satisfy ('a'==) <*> satisfy ('b'==)
abParser_ :: Parser ()
abParser_ = (\(_,_) -> ()) <$> abParser
-- an explicit one just to see how it works
intPairP :: Parser [Integer]
intPairP = Parser (\s -> let (x,r1) = fromJust $ runParser posInt s
(_,r2) = fromJust $ runParser (satisfy (' '==)) r1
(y,r3) = fromJust $ runParser posInt r2
in Just ([x,y], r3))
-- the real deal using applicatives
intPair :: Parser [Integer]
intPair = (\x _ y -> [x,y]) <$> posInt <*> satisfy (' '==) <*> posInt
ex3 :: T.Test
ex3 = T.TestList
[
U.teq "e300" (runParser abParser "abcdef") (Just (('a','b'),"cdef"))
, U.teq "e301" (runParser abParser "aebcdf") Nothing
, U.teq "e310" (runParser abParser_ "abcdef") (Just (() ,"cdef"))
, U.teq "e311" (runParser abParser_ "aebcdf") Nothing
, U.teq "e320" (runParser intPairP "12 34") (Just ([12,34],""))
, U.teq "e321" (runParser intPair "12 34") (Just ([12,34],""))
]
------------------------------------------------------------------------------
-- Exercise 4
instance Alternative Parser where
empty = Parser (const Nothing)
(Parser l) <|> (Parser r) = Parser (\s -> case l s of
Nothing -> r s
result -> result)
ex4 :: T.Test
ex4 = T.TestList
[
U.teq "e40" (runParser (satisfy isAlpha <|> satisfy isDigit) "a1b2") (Just ('a',"1b2"))
, U.teq "e41" (runParser (satisfy isDigit <|> satisfy isAlpha) "a1b2") (Just ('a',"1b2"))
]
------------------------------------------------------------------------------
-- Exercise 5
-- parses either an integer value or an uppercase character
intOrUppercase :: Parser ()
intOrUppercase = u <$> posInt <|> u <$> satisfy isUpper
where u _ = ()
ex5 :: T.Test
ex5 = T.TestList
[
U.teq "e50" (runParser intOrUppercase "XYZ") (Just ((), "YZ"))
, U.teq "e51" (runParser intOrUppercase "foo") Nothing
, U.teq "e51" (runParser intOrUppercase "10X") (Just ((), "X"))
]
------------------------------------------------------------------------------
hw10 :: IO T.Counts
hw10 = do
T.runTestTT ex0
T.runTestTT ex1
T.runTestTT ex2
T.runTestTT $ T.TestList ex2'
T.runTestTT ex3
T.runTestTT ex4
T.runTestTT ex5
-- End of file.
|
haroldcarr/learn-haskell-coq-ml-etc
|
haskell/course/2014-06-upenn/cis194/src/HW10_HC_AParser.hs
|
unlicense
| 8,675
| 0
| 23
| 2,745
| 2,631
| 1,401
| 1,230
| 136
| 3
|
import Helpers.ConvexHull (convexHull)
import Data.List (nub, (\\))
-- Create a walk of steps sqrt(1), sqrt(4), sqrt(5), sqrt(8), sqrt(9), sqrt(10), sqrt(12)
-- minimizing the perimeter of the convexHull.
-- Slow, but it works for now.
stepsOfLengthSqrtN :: Int -> [(Int, Int)]
stepsOfLengthSqrtN n = filter (\(a, b) -> a^2 + b^2 == n) [(a, b) | a <- [0..n], b <- [a..n]]
planeFigureSeq :: [(Int, Int)]
planeFigureSeq = (0, 0) : (1, 0) : (0, 1) : recurse 3 where
recurse :: Int -> [(Int, Int)]
recurse n = nextTerm : recurse (n + 1) where
knownTerms = take n planeFigureSeq
currentPosition = planeFigureSeq !! (n - 1)
nextTerm :: (Int, Int)
nextTerm = minByUniq (perimeter . (:knownTerms)) validPossibleSteps where
possibleSteps = nextSteps currentPosition $ stepsOfLengthSqrtN $ a001481 (n + 1)
validPossibleSteps = possibleSteps \\ knownTerms
perimeter :: [(Int, Int)] -> Float
perimeter ps@(p:_) = computePerimeter $ convexHull ps where
computePerimeter [] = 0
computePerimeter [a] = 0
computePerimeter [a, b] = dist a b
computePerimeter ps = dist (head ps) (last ps) + adj where
adj = sum $ zipWith dist ps $ tail ps
dist :: (Int, Int) -> (Int, Int) -> Float
dist (x_1, y_1) (x_2, y_2) = sqrt $ fromIntegral $ (x_1 - x_2)^2 + (y_1 - y_2)^2
steps :: (Int, Int) -> (Int, Int) -> [(Int, Int)]
steps (a, b) (0, y) = [(a + y, b), (a - y, b), (a, b + y), (a, b - y)]
steps (a, b) (x, y) = [(a + x, b + y), (a + x, b - y), (a - x, b + y), (a - x, b - y), (a + y, b + x), (a + y, b - x), (a - y, b + x), (a - y, b - x)]
nextSteps :: (Int, Int) -> [(Int, Int)] -> [(Int, Int)]
nextSteps currentPosition = nub . concatMap (steps currentPosition)
-- A001481 1 = 0
-- A001481 2 = 1
-- A001481 3 = 2
-- A001481 4 = 4
-- A001481 5 = 5
-- A001481 6 = 8
-- A001481 7 = 9
-- A001481 8 = 10
-- A001481 9 = 13
a001481_list = filter (not . null . stepsOfLengthSqrtN ) [0..]
a001481 n = a001481_list !! (n - 1)
-- Find minimum, but only if there's only one minimum.
minByUniq :: (Show a, Ord b) => (a -> b) -> [a] -> a
minByUniq f (a:as) = recurse [a] as where
recurse [knownMin] [] = knownMin
recurse (x:x':xs) [] = error $ show (x, x')
recurse ms@(m:_) (x:xs)
| f m < f x = recurse ms xs
| f m == f x = recurse (x:ms) xs
| f m > f x = recurse [x] xs
|
peterokagey/haskellOEIS
|
src/Sandbox/PlaneFigure.hs
|
apache-2.0
| 2,308
| 0
| 14
| 530
| 1,133
| 633
| 500
| 38
| 4
|
-- bubble sort
iter :: [Int] -> (Bool,Int,[Int])
iter [] = (False,0,[])
iter (x:[]) = (False,0,(x:[]))
iter (x0:x1:xs) =
let (f,n,x) = if x0 > x1
then iter (x0:xs)
else iter (x1:xs)
in
if x0 > x1
then (True,n+1,x1:x)
else (f, n ,x0:x)
ans n x =
let (f,n',x') = iter x
in
if f
then ans (n'+n) x'
else (n'+n,x')
ans' (0:_) = []
ans' (n:x) =
let d = take n x
r = drop n x
in
(ans 0 d):(ans' r)
main = do
c <- getContents
let i = map read $ lines c :: [Int]
o = ans' i
mapM_ (\(n,_) -> print n) o
|
a143753/AOJ
|
0167.hs
|
apache-2.0
| 595
| 0
| 12
| 212
| 406
| 218
| 188
| 25
| 3
|
{-# LANGUAGE TemplateHaskell #-}
module Topical.Text.Types
( Tokenizer
, PlainTokenizer
, Tree(..)
, _EmptyTree
, _Node
, nodeData
, nodeLeft
, nodeRight
, unfoldTree
, nlr
, lnr
) where
import Control.Lens
import Taygeta.Types (PlainTokenizer, Tokenizer)
data Tree a = EmptyTree
| Node { _nodeData :: a
, _nodeLeft :: Tree a
, _nodeRight :: Tree a
}
deriving (Show, Eq)
makePrisms ''Tree
makeLenses ''Tree
instance Functor Tree where
fmap f (Node a l r) = Node (f a) (fmap f l) (fmap f r)
fmap _ EmptyTree = EmptyTree
instance Applicative Tree where
pure x = Node x EmptyTree EmptyTree
(Node f lf rf) <*> (Node x lx rx) = Node (f x) (lf <*> lx) (rf <*> rx)
EmptyTree <*> _ = EmptyTree
_ <*> EmptyTree = EmptyTree
nlr :: Tree a -> [a]
nlr EmptyTree = []
nlr (Node n l r) = n : nlr l ++ nlr r
lnr :: Tree a -> [a]
lnr EmptyTree = []
lnr (Node n l r) = nlr l ++ n : nlr r
unfoldTree :: (a -> (b, (Maybe a, Maybe a))) -> a -> Tree b
unfoldTree f a = Node n l' r'
where
(n, (l, r)) = f a
l' = maybe EmptyTree (unfoldTree f) l
r' = maybe EmptyTree (unfoldTree f) r
|
erochest/topical
|
src/Topical/Text/Types.hs
|
apache-2.0
| 1,317
| 0
| 10
| 476
| 530
| 277
| 253
| 41
| 1
|
-- |Contract test-cases. The .js files in the contracts directory are
-- parsed as:
--
-- tests ::= test ;
-- | test ; tests
--
-- test ::= succeeds { typedjs-statement* } { untypedjs-statement* }
--
-- Note that there is a trailing ';' at the end of a list of tests.
-- JavaScript-style comments are permitted in .test files.
module Contracts where
import Text.Regex.Posix
import Text.ParserCombinators.Parsec
import Text.ParserCombinators.Parsec.Pos
import Text.PrettyPrint.HughesPJ (render)
import Test.HUnit
import qualified Control.Exception as E
import qualified Data.ByteString.Char8 as B
import qualified WebBits.JavaScript as JS
import TypedJavaScript.Syntax (Statement,showSp)
import TypedJavaScript.Lexer (semi,reservedOp, reserved,stringLiteral)
import TypedJavaScript.Parser (parseBlockStmt)
import TypedJavaScript.TypeChecker (typeCheck)
import TypedJavaScript.TypeErasure (eraseTypesStmts)
import TypedJavaScript.Contracts (encapsulate,
encapsulateTypedModule, getContractsLib)
import TypedJavaScript.Test
--TODO: do what old rhino did for sucess/fail
import System.Exit
--TODO: these can be considered type-check tests, too?
runTest rs pos tjs js shouldEncaps = do
--env <- typeCheck [tjs] --TODO: print line numbers for type-check errors
result <- E.try $ typeCheck [tjs]
case result of
Left (err::(E.SomeException)) -> return $ Left $ assertFailure
((showSp pos) ++ ": failed to type-check: " ++ (show $ err))
Right env -> return $ Right $ do
case shouldEncaps of
True -> do
tjs' <- return $ encapsulate (eraseTypesStmts [tjs]) env []
let str = render (JS.pp $ JS.BlockStmt pos [tjs',js])
feedRhino rs (B.pack str)
False -> do
tjs' <- return $ eraseTypesStmts [tjs]
let str = render (JS.pp $ JS.BlockStmt pos (tjs' ++ [js]))
feedRhino rs (B.pack str)
assertSucceeds :: RhinoService
-> SourcePos
-> Statement SourcePos
-> JS.Statement SourcePos
-> Bool
-> Assertion
assertSucceeds rs pos tjs js shouldEncaps = do
--possible exception names so far: JavaScriptException, EcmaError
--possible exception types so far: Exception, TypeError
let regexp = B.pack $ "org.mozilla.javascript.[a-zA-Z0-9_]*: "
retval <- runTest rs pos tjs js shouldEncaps
case retval of
Left failed -> failed
Right retstr' -> do
retstr <- retstr'
case retstr =~ regexp of
True -> assertFailure $ (showSp pos) ++ ": Expected success, but an" ++
" exception was printed:\n" ++ B.unpack retstr
False -> return ()
assertBlames :: RhinoService
-> SourcePos
-> String -- ^guilty party
-> Statement SourcePos
-> JS.Statement SourcePos
-> Bool
-> Assertion
assertBlames rs pos blamed tjs js shouldEncaps = do
let regexp = B.pack $ blamed ++ " violated the contract"
retval <- runTest rs pos tjs js shouldEncaps
case retval of
Left fail -> fail
Right retstr' -> do
retstr <- retstr'
case retstr =~ regexp of
True -> return ()
False -> assertFailure $
(showSp pos) ++ ": Expected contract violation blaming " ++
blamed ++ " at " ++ show pos ++ "; rhino returned " ++
B.unpack retstr
closeRhinoTest :: RhinoService -> Assertion
closeRhinoTest rs = do
code <- stopRhino rs
case code of
ExitSuccess -> assertBool "Rhino succeed" True
ExitFailure n -> assertFailure $ "Rhino fail, exit code: " ++ (show n)
parseTestCase :: RhinoService -> CharParser st Test
parseTestCase rs = do
pos <- getPosition
shouldEncaps <- (reserved "dontencapsulate" >> return False) <|>
(return True)
let succeeds = do
reserved "succeeds"
tjs <- parseBlockStmt
js <- JS.parseBlockStmt
return $ TestCase (assertSucceeds rs pos tjs js shouldEncaps)
blames = do
reserved "blames"
blamed <- stringLiteral
tjs <- parseBlockStmt
js <- JS.parseBlockStmt
return $ TestCase (assertBlames rs pos blamed tjs js shouldEncaps)
succeeds <|> blames
readTestFile :: RhinoService -> FilePath -> IO Test
readTestFile rs path = do
result <- parseFromFile ((parseTestCase rs) `endBy` semi) path
case result of
-- Reporting the parse error is deferred until the test is run.
Left err -> return $ TestCase (assertFailure (show err))
Right tests -> return $ TestList tests
main = do
rs <- startRhinoService
--feed the rs the contracts library code
lib <- getContractsLib
let str = "var window = { };\n" ++ render
(JS.pp $ JS.BlockStmt (initialPos "contractslib") lib)
rez <- feedRhino rs $ B.pack str
--TODO: check for exceptions for this initial feeding
putStrLn $ "Contracts lib initialized, rhino returned: " ++ B.unpack rez
testPaths <- getPathsWithExtension ".js" "contracts"
testCases <- mapM (readTestFile rs) testPaths
return (TestList $ testCases ++ [TestCase $ closeRhinoTest rs])
|
brownplt/strobe-old
|
tests/Contracts.hs
|
bsd-2-clause
| 5,166
| 0
| 27
| 1,319
| 1,320
| 658
| 662
| -1
| -1
|
module Network.Mail.SMTP.Auth
where
import Data.Digest.MD5
import Codec.Utils
import qualified Codec.Binary.Base64.String as B64 (encode, decode)
import Data.List
import Data.Bits
import Data.Array
type UserName = String
type Password = String
data AuthType = PLAIN
| LOGIN
| CRAM_MD5
deriving Eq
instance Show AuthType where
showsPrec d at = showParen (d>app_prec) $ showString $ showMain at
where app_prec = 10
showMain PLAIN = "PLAIN"
showMain LOGIN = "LOGIN"
showMain CRAM_MD5 = "CRAM-MD5"
b64Encode :: String -> String
b64Encode = map (toEnum.fromEnum) . B64.encode . map (toEnum.fromEnum)
b64Decode :: String -> String
b64Decode = map (toEnum.fromEnum) . B64.decode . map (toEnum.fromEnum)
showOctet :: [Octet] -> String
showOctet = concat . map hexChars
where hexChars c = [arr ! (c `div` 16), arr ! (c `mod` 16)]
arr = listArray (0, 15) "0123456789abcdef"
hmacMD5 :: String -> String -> [Octet]
hmacMD5 text key = hash $ okey ++ hash (ikey ++ map (toEnum.fromEnum) text)
where koc = map (toEnum.fromEnum) key
key' = if length koc > 64
then hash koc ++ replicate 48 0
else koc ++ replicate (64-length koc) 0
ipad = replicate 64 0x36
opad = replicate 64 0x5c
ikey = zipWith xor key' ipad
okey = zipWith xor key' opad
plain :: UserName -> Password -> String
plain user pass = b64Encode $ concat $ intersperse "\0" [user, user, pass]
login :: UserName -> Password -> (String, String)
login user pass = (b64Encode user, b64Encode pass)
cramMD5 :: String -> UserName -> Password -> String
cramMD5 challenge user pass =
b64Encode (user ++ " " ++ showOctet (hmacMD5 challenge pass))
auth :: AuthType -> String -> UserName -> Password -> String
auth PLAIN _ u p = plain u p
auth LOGIN _ u p = let (u', p') = login u p in unwords [u', p']
auth CRAM_MD5 c u p = cramMD5 c u p
|
jtdaugherty/smtp
|
src/Network/Mail/SMTP/Auth.hs
|
bsd-3-clause
| 2,002
| 0
| 12
| 534
| 725
| 387
| 338
| 48
| 2
|
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DataKinds, PolyKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances, FlexibleContexts #-}
{-# OPTIONS_GHC -Wall #-}
{-# OPTIONS_GHC -Wno-redundant-constraints #-}
-- {-# LANGUAGE OverlappingInstances #-}
-- Only for MemberU below, when emulating Monad Transformers
{-# LANGUAGE FunctionalDependencies, UndecidableInstances #-}
{-# LANGUAGE Strict #-}
-- Open unions (type-indexed co-products) for extensible effects
-- All operations are constant-time, and there is no Typeable constraint
-- This is a variation of OpenUion5.hs, which relies on overlapping
-- instances instead of closed type families. Closed type families
-- have their problems: overlapping instances can resolve even
-- for unground types, but closed type families are subject to a
-- strict apartness condition.
-- This implementation is very similar to OpenUnion1.hs, but without
-- the annoying Typeable constraint. We sort of emulate it:
-- Our list r of open union components is a small Universe.
-- Therefore, we can use the Typeable-like evidence in that
-- universe. We hence can define
--
-- data Union r v where
-- Union :: t v -> TRep t r -> Union r v -- t is existential
-- where
-- data TRep t r where
-- T0 :: TRep t (t ': r)
-- TS :: TRep t r -> TRep (any ': r)
-- Then Member is a type class that produces TRep
-- Taken literally it doesn't seem much better than
-- OpenUinion41.hs. However, we can cheat and use the index of the
-- type t in the list r as the TRep. (We will need UnsafeCoerce then).
-- The interface is the same as of other OpenUnion*.hs
module Data.Iota.Tagged.OpenUnion51 (Union, inj, prj, decomp,
Member, MemberU2, weaken
) where
import Unsafe.Coerce(unsafeCoerce)
import Data.Word (Word8)
type Index = Word8
-- The data constructors of Union are not exported
-- Strong Sum (Existential with the evidence) is an open union
-- t is can be a GADT and hence not necessarily a Functor.
-- Index is the index of t in the list r; that is, the index of t in the
-- universe r
data Union (r :: [ * -> * ]) (v :: k) where
Union0 :: t v -> Union r v
Union1 :: t v -> Union r v
Union2 :: t v -> Union r v
Union3 :: t v -> Union r v
Union4 :: t v -> Union r v
Union5 :: t v -> Union r v
Union :: {-# UNPACK #-} !Index -> t v -> Union r v
{-# INLINE prj' #-}
{-# INLINE inj' #-}
inj' :: Index -> t v -> Union r v
inj' 0 = Union0
inj' 1 = Union1
inj' 2 = Union2
inj' 3 = Union3
inj' 4 = Union4
inj' 5 = Union5
inj' n = Union n
prj' :: Index -> Union r v -> Maybe (t v)
prj' 0 (Union0 x) = Just (unsafeCoerce x)
prj' 1 (Union1 x) = Just (unsafeCoerce x)
prj' 2 (Union2 x) = Just (unsafeCoerce x)
prj' 3 (Union3 x) = Just (unsafeCoerce x)
prj' 4 (Union4 x) = Just (unsafeCoerce x)
prj' 5 (Union5 x) = Just (unsafeCoerce x)
prj' n (Union n' x) | n == n' = Just (unsafeCoerce x)
| otherwise = Nothing
prj' _ _ = Nothing
newtype P t r = P{unP :: Index}
class (FindElem t r) => Member (t :: * -> *) r where
inj :: t v -> Union r v
prj :: Union r v -> Maybe (t v)
-- Optimized specialized instance
instance {-# INCOHERENT #-} Member t '[t] where
{-# INLINE inj #-}
{-# INLINE prj #-}
inj x = Union 0 x
prj (Union0 x) = Just (unsafeCoerce x)
prj (Union1 x) = Just (unsafeCoerce x)
prj (Union2 x) = Just (unsafeCoerce x)
prj (Union3 x) = Just (unsafeCoerce x)
prj (Union4 x) = Just (unsafeCoerce x)
prj (Union5 x) = Just (unsafeCoerce x)
prj (Union _ x) = Just (unsafeCoerce x)
instance {-# INCOHERENT #-} (FindElem t r) => Member t r where
{-# INLINE inj #-}
{-# INLINE prj #-}
inj = inj' (unP $ (elemNo :: P t r))
prj = prj' (unP $ (elemNo :: P t r))
{-# INLINE [2] decomp #-}
decomp :: Union (t ': r) v -> Either (Union r v) (t v)
decomp (Union0 v) = Right $ unsafeCoerce v
decomp (Union1 v) = Left $ Union0 v
decomp (Union2 v) = Left $ Union1 v
decomp (Union3 v) = Left $ Union2 v
decomp (Union4 v) = Left $ Union3 v
decomp (Union5 v) = Left $ Union4 v
decomp (Union n v) = Left $ if n == 6 then Union5 v else Union (n-1) v
-- Specialized version
{-# RULES "decomp/singleton" decomp = decomp0 #-}
{-# INLINE decomp0 #-}
decomp0 :: Union '[t] v -> Either (Union '[] v) (t v)
decomp0 (Union0 v) = Right $ unsafeCoerce v
decomp0 _ = error "Not possible"
-- decomp0 (Union1 v) = Right $ unsafeCoerce v
-- decomp0 (Union2 v) = Right $ unsafeCoerce v
-- decomp0 (Union3 v) = Right $ unsafeCoerce v
-- decomp0 (Union4 v) = Right $ unsafeCoerce v
-- decomp0 (Union5 v) = Right $ unsafeCoerce v
-- decomp0 (Union _ v) = Right $ unsafeCoerce v
-- No other case is possible
weaken :: Union r w -> Union (any ': r) w
weaken (Union0 v) = Union1 v
weaken (Union1 v) = Union2 v
weaken (Union2 v) = Union3 v
weaken (Union3 v) = Union4 v
weaken (Union4 v) = Union5 v
weaken (Union5 v) = Union 6 v
weaken (Union n v) = Union (n+1) v
-- Find an index of an element in a `list'
-- The element must exist
-- This is essentially a compile-time computation.
class FindElem (t :: * -> *) r where
elemNo :: P t r
instance {-# OVERLAPPING #-} FindElem t (t ': r) where
elemNo = P 0
instance {-# OVERLAPS #-} FindElem t r => FindElem t (t' ': r) where
elemNo = P $ 1 + (unP $ (elemNo :: P t r))
type family EQU (a :: k) (b :: k) :: Bool where
EQU a a = 'True
EQU a b = 'False
-- This class is used for emulating monad transformers
class Member t r => MemberU2 (tag :: k -> * -> *) (t :: * -> *) r | tag r -> t
instance (MemberU' (EQU t1 t2) tag t1 (t2 ': r)) => MemberU2 tag t1 (t2 ': r)
class Member t r =>
MemberU' (f::Bool) (tag :: k -> * -> *) (t :: * -> *) r | tag r -> t
instance MemberU' 'True tag (tag e) (tag e ': r)
instance (Member t (t' ': r), MemberU2 tag t r) =>
MemberU' 'False tag t (t' ': r)
|
AaronFriel/eff-experiments
|
src/Data/Iota/Tagged/OpenUnion51.hs
|
bsd-3-clause
| 5,918
| 0
| 10
| 1,356
| 1,800
| 947
| 853
| -1
| -1
|
{-# LANGUAGE FlexibleContexts #-}
module Math.Integrators.ImplicitMidpointRule
( imr
) where
import Linear
import Math.Integrators.Implicit
eps :: Floating a => a
eps = 1e-14
imr :: (Metric f, Floating a, Ord a)
=> (f a -> f a) -> a -> f a -> f a
imr f = \h y ->
fixedPoint (\x -> y ^+^ h *^ ( f ( (y^+^x)^/2) ))
(\x1 x2 -> breakNormIR (x1 ^-^ x2) eps)
y
|
qnikst/numeric-ode
|
src/Math/Integrators/ImplicitMidpointRule.hs
|
bsd-3-clause
| 395
| 0
| 15
| 112
| 176
| 95
| 81
| 13
| 1
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE RecordWildCards #-}
module Test.TorCell(torCellTests) where
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative
#endif
import Control.Monad
import Crypto.Hash
import Data.ASN1.OID
import Data.Binary.Get
import Data.Binary.Put
import Data.ByteArray(convert)
import Data.ByteString(ByteString)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as BSC
import qualified Data.ByteString.Lazy as BSL
import Data.ByteString.Lazy(toStrict,fromStrict)
import Data.Hourglass
import Data.List
import Data.String
import Data.Word
import Data.X509
import Numeric
import Test.QuickCheck
import Test.Framework
import Test.Framework.Providers.QuickCheck2
import Test.Standard
import Tor.DataFormat.RelayCell
import Tor.DataFormat.TorAddress
import Tor.DataFormat.TorCell
import Tor.State.Credentials
instance Arbitrary TorAddress where
arbitrary = oneof [ Hostname <$> genHostname
, IP4 <$> genIP4
, IP6 <$> genIP6
, return (TransientError "External transient error.")
, return (NontransientError "External nontransient error.")
]
genHostname :: Gen String
genHostname = take 255 <$>
intercalate "." <$>
(listOf (listOf (elements ['a'..'z'])))
genIP4 :: Gen String
genIP4 = intercalate "." <$>
(replicateM 4 (show <$> (arbitrary :: Gen Word8)))
genIP6 :: Gen String
genIP6 = do x <- genIP6'
return ("[" ++ intercalate ":" x ++ "]")
where
genIP6' = map showHex' <$>
replicateM 8 (arbitrary :: Gen Word16)
prop_TorAddrSerial :: TorAddress -> Bool
prop_TorAddrSerial = serialProp getTorAddress putTorAddress
data TorAddressBS = TABS ByteString TorAddress
deriving (Show, Eq)
instance Arbitrary TorAddressBS where
arbitrary = oneof [ do x <- replicateM 4 arbitrary
let str = intercalate "." (map show x)
bstr = BS.pack x
return (TABS bstr (IP4 str))
, do x <- replicateM 16 arbitrary
let bstr = BSL.pack x
xs = runGet (replicateM 8 getWord16be) bstr
str = "[" ++ intercalate ":" (map showHex' xs) ++ "]"
return (TABS (toStrict bstr) (IP6 str))
]
prop_TorAddrBSSerial :: TorAddressBS -> Bool
prop_TorAddrBSSerial (TABS bstr x) = bstr == torAddressByteString x
showHex' :: (Show a, Integral a) => a -> String
showHex' x = showHex x ""
instance Arbitrary ExtendSpec where
arbitrary = oneof [ ExtendIP4 <$> genIP4 <*> arbitrary
, ExtendIP6 <$> genIP6 <*> arbitrary
, ExtendDigest <$>
(BSC.pack <$>
replicateM 20 (elements "abcdef0123456789"))
]
prop_ExtendSpecSerial :: ExtendSpec -> Bool
prop_ExtendSpecSerial = serialProp getExtendSpec putExtendSpec
instance Arbitrary DestroyReason where
arbitrary = elements [NoReason, TorProtocolViolation, InternalError,
RequestedDestroy, NodeHibernating, HitResourceLimit,
ConnectionFailed, ORIdentityIssue, ORConnectionClosed,
Finished, CircuitConstructionTimeout, CircuitDestroyed,
NoSuchService]
prop_DestroyReasonSerial1 :: DestroyReason -> Bool
prop_DestroyReasonSerial1 = serialProp getDestroyReason putDestroyReason
prop_DestroyReasonSerial2 :: Word8 -> Bool
prop_DestroyReasonSerial2 x =
[x] == BSL.unpack (runPut (putDestroyReason
(runGet getDestroyReason (BSL.pack [x]))))
instance Arbitrary RelayEndReason where
arbitrary = oneof [ ReasonExitPolicy <$> (IP4 <$> genIP4) <*> arbitrary
, ReasonExitPolicy <$> (IP6 <$> genIP6) <*> arbitrary
, elements [ReasonMisc, ReasonResolveFailed,
ReasonConnectionRefused, ReasonDestroyed, ReasonDone,
ReasonTimeout, ReasonNoRoute, ReasonHibernating,
ReasonInternal, ReasonResourceLimit,
ReasonConnectionReset, ReasonTorProtocol,
ReasonNotDirectory ]
]
prop_RelayEndRsnSerial :: RelayEndReason -> Bool
prop_RelayEndRsnSerial rsn =
let bstr = runPut (putRelayEndReason rsn)
len = case rsn of
ReasonExitPolicy (IP4 _) _ -> 9
ReasonExitPolicy (IP6 _) _ -> 21
_ -> 1
rsn' = runGet (getRelayEndReason len) bstr
in rsn == rsn'
instance Arbitrary RelayCell where
arbitrary =
oneof [ RelayBegin <$> arbitrary <*> legalTorAddress True
<*> arbitrary <*> arbitrary <*> arbitrary
<*> arbitrary
, RelayData <$> arbitrary
<*> ((BS.pack . take 503) <$> arbitrary)
, RelayEnd <$> arbitrary <*> arbitrary
, RelayConnected <$> arbitrary <*> legalTorAddress False
<*> arbitrary
, RelaySendMe <$> arbitrary
, RelayExtend <$> arbitrary <*> genIP4
<*> arbitrary <*> arbitraryBS 186 <*> arbitraryBS 20
, RelayExtended <$> arbitrary
<*> (BS.pack <$> replicateM 148 arbitrary)
, RelayTruncate <$> arbitrary
, RelayTruncated <$> arbitrary <*> arbitrary
, RelayDrop <$> arbitrary
, RelayResolve <$> arbitrary
<*> (filter (/= '\0') <$> arbitrary)
, do strm <- arbitrary
vals <- listOf $ do x <- legalTorAddress True
y <- arbitrary
return (x,y)
return (RelayResolved strm vals)
, RelayBeginDir <$> arbitrary
, RelayExtend2 <$> arbitrary <*> arbitrary <*> arbitrary
<*> (BS.pack <$> arbitrary)
, RelayExtended2 <$> arbitrary <*> (BS.pack <$> arbitrary)
, RelayEstablishIntro <$> arbitrary <*> arbitraryBS 128
<*> arbitraryBS 20 <*> arbitraryBS 128
, RelayEstablishRendezvous <$> arbitrary <*> arbitraryBS 20
, RelayIntroduce1 <$> arbitrary <*> arbitraryBS 20
<*> (BS.pack <$> arbitrary)
, RelayIntroduce2 <$> arbitrary <*> (BS.pack <$> arbitrary)
, RelayRendezvous1 <$> arbitrary <*> arbitraryBS 20
<*> arbitraryBS 128 <*> arbitraryBS 20
, RelayRendezvous2 <$> arbitrary <*> arbitraryBS 128
<*> arbitraryBS 20
, RelayIntroEstablished <$> arbitrary
, RelayRendezvousEstablished <$> arbitrary
, RelayIntroduceAck <$> arbitrary
]
legalTorAddress :: Bool -> Gen TorAddress
legalTorAddress allowHostname =
do x <- arbitrary
case x of
Hostname "" -> legalTorAddress allowHostname
Hostname _ | allowHostname -> return x
IP4 "0.0.0.0" -> legalTorAddress allowHostname
IP4 _ -> return x
IP6 _ -> return x
_ -> legalTorAddress allowHostname
prop_RelayCellSerial :: RelayCell -> Property
prop_RelayCellSerial x =
let (_, gutsBS) = runPutM (putRelayCellGuts x)
bstr = runPut (putRelayCell (BS.replicate 4 0) x)
(_, y) = runGet getRelayCell bstr
in (BSL.length gutsBS <= (509 - 11)) ==> (x == y)
prop_RelayCellDigestWorks1 :: RelayCell -> Property
prop_RelayCellDigestWorks1 x =
let (_, gutsBS) = runPutM (putRelayCellGuts x)
(bstr, _) = renderRelayCell hashInit x
(x', _) = runGet (parseRelayCell hashInit) (fromStrict bstr)
in (BSL.length gutsBS <= (509 - 11)) ==> (x == x')
prop_RelayCellDigestWorks2 :: NonEmptyList RelayCell -> Property
prop_RelayCellDigestWorks2 xs =
let mxSize = maximum (map putGuts (getNonEmpty xs))
xs' = runCheck hashInit hashInit (getNonEmpty xs)
in (mxSize <= (509 - 11)) ==> (getNonEmpty xs == xs')
where
putGuts x =
let (_, gutsBS) = runPutM (putRelayCellGuts x)
in BSL.length gutsBS
runCheck _ _ [] = []
runCheck rstate pstate (f:rest) =
let (bstr, rstate') = renderRelayCell rstate f
(f', pstate') = runGet (parseRelayCell pstate) (fromStrict bstr)
in f' : runCheck rstate' pstate' rest
instance Arbitrary HandshakeType where
arbitrary = elements [TAP, Reserved, NTor]
prop_HandTypeSerial1 :: HandshakeType -> Bool
prop_HandTypeSerial1 = serialProp getHandshakeType putHandshakeType
prop_HandTypeSerial2 :: Word16 -> Bool
prop_HandTypeSerial2 x =
let ht = runGet getHandshakeType (runPut (putWord16be x))
in runPut (putWord16be x) == runPut (putHandshakeType ht)
instance Arbitrary (SignedExact Certificate) where
arbitrary =
do certVersion <- arbitrary
certSerial <- arbitrary
certIssuerDN <- arbitrary
certSubjectDN <- arbitrary
hashAlg <- elements [HashSHA1, HashSHA256, HashSHA384]
g <- arbitraryRNG
let (pub, _, _) = generateKeyPair g 1024
let keyAlg = PubKeyALG_RSA -- FIXME?
certSignatureAlg = SignatureALG hashAlg keyAlg
certValidity = (timeFromElapsed (Elapsed (Seconds 257896558)),
timeFromElapsed (Elapsed (Seconds 2466971758)))
certPubKey = PubKeyRSA pub
certExtensions = Extensions Nothing
let baseCert = Certificate{ .. }
sigfun = case hashAlg of
HashSHA1 -> wrapSignatureAlg certSignatureAlg sha1
HashSHA224 -> wrapSignatureAlg certSignatureAlg sha224
HashSHA256 -> wrapSignatureAlg certSignatureAlg sha256
HashSHA384 -> wrapSignatureAlg certSignatureAlg sha384
HashSHA512 -> wrapSignatureAlg certSignatureAlg sha512
_ -> error "INTERNAL WEIRDNESS"
let (signedCert, _) = objectToSignedExact sigfun baseCert
return signedCert
newtype ReadableStr = ReadableStr { unReadableStr :: String }
instance Show ReadableStr where
show = show . unReadableStr
instance Arbitrary ReadableStr where
arbitrary =
do len <- choose (1, 256)
str <- replicateM len (elements printableChars)
return (ReadableStr str)
where printableChars = ['a'..'z'] ++ ['A'..'Z'] ++ ['_','.',' ']
instance Arbitrary DistinguishedName where
arbitrary =
do cn <- unReadableStr <$> arbitrary
co <- unReadableStr <$> arbitrary
og <- unReadableStr <$> arbitrary
ou <- unReadableStr <$> arbitrary
return (DistinguishedName [
(getObjectID DnCommonName, fromString cn)
, (getObjectID DnCountry, fromString co)
, (getObjectID DnOrganization, fromString og)
, (getObjectID DnOrganizationUnit, fromString ou)
])
wrapSignatureAlg :: SignatureALG ->
(ByteString -> ByteString) ->
ByteString ->
(ByteString, SignatureALG, ())
wrapSignatureAlg name sha bstr =
let hashed = convert (sha bstr)
in (hashed, name, ())
sha1 :: ByteString -> ByteString
sha1 = convert . hashWith SHA1
sha224 :: ByteString -> ByteString
sha224 = convert . hashWith SHA224
sha256 :: ByteString -> ByteString
sha256 = convert . hashWith SHA256
sha384 :: ByteString -> ByteString
sha384 = convert . hashWith SHA384
sha512 :: ByteString -> ByteString
sha512 = convert . hashWith SHA512
instance Arbitrary TorCert where
arbitrary = oneof [ LinkKeyCert <$> arbitrary
, RSA1024Identity <$> arbitrary
, RSA1024Auth <$> arbitrary
]
prop_torCertSerial :: TorCert -> Bool
prop_torCertSerial = serialProp getTorCert putTorCert
torCellTests :: Test
torCellTests =
testGroup "TorCell Serialization" [
testProperty "TorAddress round-trips" prop_TorAddrSerial
, testProperty "TorAddress makes sensible ByteStrings" prop_TorAddrBSSerial
, testProperty "ExtendSpec serializes" prop_ExtendSpecSerial
, testProperty "DestroyReason serializes (check #1)" prop_DestroyReasonSerial1
, testProperty "DestroyReason serializes (check #2)" prop_DestroyReasonSerial2
, testProperty "HandshakeType serializes (check #1)" prop_HandTypeSerial1
, testProperty "HandshakeType serializes (check #2)" prop_HandTypeSerial2
, testProperty "RelayEndReason serializes" prop_RelayEndRsnSerial
, testProperty "RelayCell serializes" prop_RelayCellSerial
, testProperty "RelayCell serializes w/ digest" prop_RelayCellDigestWorks1
, testProperty "RelayCell serializes w/ digest" prop_RelayCellDigestWorks2
, testProperty "Tor certificates serialize" prop_torCertSerial
]
|
GaloisInc/haskell-tor
|
test/Test/TorCell.hs
|
bsd-3-clause
| 13,072
| 0
| 17
| 3,899
| 3,196
| 1,651
| 1,545
| 272
| 6
|
-- Many extensions. I overload many things from Haskell Prelude in the style
-- of Awesome Prelude. Also you may need a Template Haskell transformations
-- on declarations, which derives classes and type families instances, etc, etc.
{-# LANGUAGE TypeFamilies, TypeOperators, FlexibleContexts, RecursiveDo #-}
{-# LANGUAGE DeriveDataTypeable, NoImplicitPrelude, TemplateHaskell #-}
{-# LANGUAGE UndecidableInstances #-}
module Hardware.HHDL.Examples.RunningSumMaybes where
import Data.Word
-- The main module of the library. It exports everything I deemed useful
-- for hardware description - code generation over ADT, netlist operators, some
-- handy functions from Prelude...
-- Also, it contains making of wires for Either and Maybe and matching functions.
import Hardware.HHDL
-- The description of clocking frequency for our example.
import Hardware.HHDL.Examples.Clock
-------------------------------------------------------------------------------
-- How to pattern match an algebraic type.
-- Clocked is a type of entity. It has three arguments: a list of clocking frequencies allowed
-- in netlist, types of inputs and outputs.
runningSumMaybes :: Clock c => Mealy c (Wire c (Maybe Word8) :. Nil) (Wire c Word8 :. Nil)
runningSumMaybes = mkMealyNamed
-- names of inputs and outputs.
(Just ("maybeA" :. Nil, "currentSum" :. Nil))
-- default value for state.
(0 :. Nil)
"runningSumMaybes" $ \(sum :. Nil) (mbA :. Nil) -> do
-- here we pattern match in the <a href=http://hackage.haskell.org/package/first-class-patterns>"First class patterns"</a> style.
-- the idea is that for each constructor Cons of algebraic type T we automatically
-- create two functions:
-- - mkCons which creates a wire (of type Wire c T) from wires of arguments and
-- - pCons which matches a wire of type Wire c T with patterns of types of Cons
-- arguments.
-- pJust and pNothing were generated in Hardware.HHDL.HHDL from the description of
-- Maybe type.
-- pvar is a pattern that matches anything and passes that anything as an argument
-- to processing function.
a <- match mbA [
-- if we have Just x, return it!
pJust pvar --> \(x :. Nil) -> return x
-- default with 0, if Nothing.
, pNothing --> \Nil -> return (constant 0)
]
-- compute the sum.
nextSum <- assignWire (sum .+ a)
-- return currently locked sum.
return (nextSum :. Nil, sum :. Nil)
-- How to obtain VHDL text - we fix polymorphic parameters in Clocked, generate text (with any
-- entities we have to use) and pass it to display and write function.
runningSumMaybesVHDLText = writeHDLText VHDL (runningSumMaybes :: Mealy Clk (Wire Clk (Maybe Word8) :. Nil) (Wire Clk Word8 :. Nil))
(\s -> putStrLn s >> writeFile "runningSumMaybes.vhdl" s)
-- a shortcut.
test = runningSumMaybesVHDLText
|
thesz/hhdl
|
src/Hardware/HHDL/Examples/RunningSumMaybes.hs
|
bsd-3-clause
| 2,902
| 12
| 16
| 588
| 359
| 204
| 155
| 20
| 1
|
module Main where
import TestUtil
import Test.HUnit hiding (path)
import Database.TokyoCabinet
import qualified Database.TokyoCabinet.BDB as B
import Data.Maybe (catMaybes)
import Data.List (sort)
import Control.Monad
import Control.Exception
import Control.Monad.Trans (liftIO)
withoutFileM :: String -> (String -> TCM a) -> TCM a
withoutFileM fn action = liftIO $ bracket (setupFile fn) teardownFile
(runTCM . action)
withOpenedTC :: (TCDB tc) => String -> tc -> (tc -> TCM a) -> TCM a
withOpenedTC name tc action = do
open tc name [OREADER, OWRITER, OCREAT]
res <- action tc
close tc
return res
tcdb :: (TCDB tc) => (tc -> TCM a) -> TCM a
tcdb = (new >>=)
bdb :: (BDB -> TCM a) -> TCM a
bdb = tcdb
hdb :: (HDB -> TCM a) -> TCM a
hdb = tcdb
fdb :: (FDB -> TCM a) -> TCM a
fdb = tcdb
tdb :: (TDB -> TCM a) -> TCM a
tdb = tcdb
bbdb :: (B.BDB -> TCM a) -> TCM a
bbdb = tcdb
dbname tc = "foo" ++ (defaultExtension tc)
test_new_delete tc = delete tc
e @=?: a = liftIO $ e @=? a
e @?=: a = liftIO $ e @?= a
e @?: msg = liftIO $ runTCM e @? msg
test_ecode tc =
withoutFileM (dbname tc) $ \fn -> do
open tc fn [OREADER]
ecode tc >>= (ENOFILE @=?:)
test_open_close tc =
withoutFileM (dbname tc) $ \fn -> do
not `liftM` open tc fn [OREADER] @?: "file does not exist"
open tc fn [OREADER, OWRITER, OCREAT] @?: "open"
close tc @?: "close"
not `liftM` close tc @?: "cannot close closed file"
test_putxx tc =
withoutFileM (dbname tc) $ \fn ->
withOpenedTC fn tc $ \tc' -> do
put tc' "1" "bar"
get tc' "1" >>= (Just "bar" @=?:)
putkeep tc' "1" "baz"
get tc' "1" >>= (Just "bar" @=?:)
putcat tc' "1" "baz"
get tc' "1" >>= (Just "barbaz" @=?:)
test_out tc =
withoutFileM (dbname tc) $ \fn ->
withOpenedTC fn tc $ \tc' -> do
put tc' "1" "bar"
get tc' "1" >>= (Just "bar" @=?:)
out tc' "1" @?: "out succeeded"
get tc' "1" >>= ((Nothing :: Maybe String) @=?:)
test_put_get tc =
withoutFileM (dbname tc) $ \fn ->
withOpenedTC fn tc $ \tc' -> do
put tc' "1" "foo"
put tc' "2" "bar"
put tc' "3" "baz"
get tc' "1" >>= (Just "foo" @=?:)
get tc' "2" >>= (Just "bar" @=?:)
get tc' "3" >>= (Just "baz" @=?:)
test_vsiz tc =
withoutFileM (dbname tc) $ \fn ->
withOpenedTC fn tc $ \tc' -> do
put tc' "1" "bar"
vsiz tc' "1" >>= (Just 3 @=?:)
vsiz tc' "2" >>= ((Nothing :: Maybe Int) @=?:)
test_iterate tc =
withoutFileM (dbname tc) $ \fn ->
withOpenedTC fn tc $ \tc' -> do
let keys = [1..3] :: [Int]
vals = ["foo", "bar", "baz"]
zipWithM_ (put tc') keys vals
iterinit tc'
keys' <- sequence $ replicate (length keys) (iternext tc')
(sort $ catMaybes keys') @?=: (sort keys)
test_fwmkeys tc =
withoutFileM (dbname tc) $ \fn ->
withOpenedTC fn tc $ \tc' -> do
mapM_ (uncurry (put tc')) ([ ("foo", 100)
, ("bar", 200)
, ("baz", 201)
, ("jkl", 300)] :: [(String, Int)])
fwmkeys tc' "ba" 10 >>= (["bar", "baz"] @=?:) . sort
fwmkeys tc' "ba" 1 >>= (["bar"] @=?:)
fwmkeys tc' "" 10 >>= (["bar", "baz", "foo", "jkl"] @=?:) . sort
test_fwmkeys_fdb tc =
withoutFileM (dbname tc) $ \fn ->
withOpenedTC fn tc $ \tc' -> do
zipWithM_ (put tc') ([1..10] :: [Int]) ([100, 200..1000] :: [Int])
fwmkeys tc' "[min,max]" 10 >>= (([1..10] :: [Int]) @=?:)
test_addint tc =
withoutFileM (dbname tc) $ \fn ->
withOpenedTC fn tc $ \tc' -> do
let ini = 32 :: Int
put tc' "100" ini
get tc' "100" >>= (Just ini @=?:)
addint tc' "100" 3
get tc' "100" >>= (Just (ini+3) @=?:)
addint tc' "200" 1 >>= (Just 1 @=?:)
put tc' "200" "foo"
addint tc' "200" 1 >>= (Nothing @=?:)
test_adddouble tc =
withoutFileM (dbname tc) $ \fn ->
withOpenedTC fn tc $ \tc' -> do
let ini = 0.003 :: Double
put tc' "100" ini
get tc' "100" >>= (Just ini @=?:)
adddouble tc' "100" 0.3
(get tc' "100" >>= (return . isIn (ini+0.3))) @?: "isIn"
adddouble tc' "200" 0.5 >>= (Just 0.5 @=?:)
put tc' "200" "foo"
adddouble tc' "200" 1.2 >>= (Nothing @=?:)
where
margin = 1e-30
isIn :: Double -> (Maybe Double) -> Bool
isIn expected (Just actual) =
let diff = expected - actual
in abs diff <= margin
test_vanish tc =
withoutFileM (dbname tc) $ \fn ->
withOpenedTC fn tc $ \tc' -> do
put tc' "100" "111"
put tc' "200" "222"
put tc' "300" "333"
rnum tc' >>= (3 @=?:)
vanish tc'
rnum tc' >>= (0 @=?:)
test_copy tc =
withoutFileM (dbname tc) $ \fns ->
withoutFileM ("bar" ++ defaultExtension tc) $ \fnd ->
withOpenedTC fns tc $ \tc' -> do
put tc' "100" "bar"
copy tc' fnd
close tc'
open tc' fnd [OREADER]
get tc' "100" >>= (Just "bar" @=?:)
test_path tc =
withoutFileM (dbname tc) $ \fn ->
withOpenedTC fn tc $ \tc' ->
path tc' >>= (Just (dbname tc) @=?:)
test_util tc =
withoutFileM (dbname tc) $ \fn -> do
open tc fn [OWRITER, OCREAT]
path tc >>= (Just fn @=?:)
rnum tc >>= (0 @=?:)
((> 0) `liftM` size tc) @?: "fsiz"
sync tc @?: "sync"
close tc
tests = test [
"new delete BDB" ~: (runTCM $ bdb test_new_delete)
, "new delete HDB" ~: (runTCM $ hdb test_new_delete)
, "new delete FDB" ~: (runTCM $ fdb test_new_delete)
, "new delete B.BDB" ~: (runTCM $ bbdb test_new_delete)
, "ecode BDB" ~: (runTCM $ bdb test_ecode)
, "ecode HDB" ~: (runTCM $ hdb test_ecode)
, "ecode FDB" ~: (runTCM $ fdb test_ecode)
, "ecode B.BDB" ~: (runTCM $ bbdb test_ecode)
, "open close BDB" ~: (runTCM $ bdb test_open_close)
, "open close HDB" ~: (runTCM $ hdb test_open_close)
, "open close FDB" ~: (runTCM $ fdb test_open_close)
, "open close B.BDB" ~: (runTCM $ bbdb test_open_close)
, "putxxx BDB" ~: (runTCM $ bdb test_putxx)
, "putxxx HDB" ~: (runTCM $ hdb test_putxx)
, "putxxx FDB" ~: (runTCM $ fdb test_putxx)
, "putxxx B.BDB" ~: (runTCM $ bbdb test_putxx)
, "out BDB" ~: (runTCM $ bdb test_out)
, "out HDB" ~: (runTCM $ hdb test_out)
, "out FDB" ~: (runTCM $ fdb test_out)
, "out B.BDB" ~: (runTCM $ bbdb test_out)
, "put get BDB" ~: (runTCM $ bdb test_put_get)
, "put get HDB" ~: (runTCM $ hdb test_put_get)
, "put get FDB" ~: (runTCM $ fdb test_put_get)
, "put get B.BDB" ~: (runTCM $ bbdb test_put_get)
, "vsiz BDB" ~: (runTCM $ bdb test_vsiz)
, "vsiz HDB" ~: (runTCM $ hdb test_vsiz)
, "vsiz FDB" ~: (runTCM $ fdb test_vsiz)
, "vsiz B.BDB" ~: (runTCM $ bbdb test_vsiz)
, "iterate BDB" ~: (runTCM $ bdb test_iterate)
, "iterate HDB" ~: (runTCM $ hdb test_iterate)
, "iterate FDB" ~: (runTCM $ fdb test_iterate)
, "fwmkeys BDB" ~: (runTCM $ bdb test_fwmkeys)
, "fwmkeys HDB" ~: (runTCM $ hdb test_fwmkeys)
, "fwmkeys FDB" ~: (runTCM $ fdb test_fwmkeys_fdb)
, "fwmkeys B.BDB" ~: (runTCM $ bbdb test_fwmkeys)
, "addint BDB" ~: (runTCM $ bdb test_addint)
, "addint HDB" ~: (runTCM $ hdb test_addint)
, "addint FDB" ~: (runTCM $ fdb test_addint)
, "addint B.BDB" ~: (runTCM $ bbdb test_addint)
, "adddouble BDB" ~: (runTCM $ bdb test_adddouble)
, "adddouble HDB" ~: (runTCM $ hdb test_adddouble)
, "adddouble FDB" ~: (runTCM $ fdb test_adddouble)
, "adddouble B.BDB" ~: (runTCM $ bbdb test_adddouble)
, "vanish BDB" ~: (runTCM $ bdb test_vanish)
, "vanish HDB" ~: (runTCM $ hdb test_vanish)
, "vanish FDB" ~: (runTCM $ fdb test_vanish)
, "vanish B.BDB" ~: (runTCM $ bbdb test_vanish)
, "copy BDB" ~: (runTCM $ bdb test_copy)
, "copy HDB" ~: (runTCM $ hdb test_copy)
, "copy FDB" ~: (runTCM $ fdb test_copy)
, "copy B.BDB" ~: (runTCM $ bbdb test_copy)
, "path BDB" ~: (runTCM $ bdb test_path)
, "path HDB" ~: (runTCM $ hdb test_path)
, "path FDB" ~: (runTCM $ fdb test_path)
, "path B.BDB" ~: (runTCM $ bbdb test_path)
, "util BDB" ~: (runTCM $ bdb test_util)
, "util HDB" ~: (runTCM $ hdb test_util)
, "util FDB" ~: (runTCM $ fdb test_util)
, "util B.BDB" ~: (runTCM $ bbdb test_util)
, "new delete TDB" ~: (runTCM $ tdb test_new_delete)
, "ecode TDB" ~: (runTCM $ tdb test_ecode)
, "open close TDB" ~: (runTCM $ tdb test_open_close)
, "iterate TDB" ~: (runTCM $ tdb test_iterate)
, "fwmkeys B.BDB" ~: (runTCM $ bbdb test_fwmkeys)
, "vanish TDB" ~: (runTCM $ tdb test_vanish)
, "path TDB" ~: (runTCM $ tdb test_path)
, "util TDB" ~: (runTCM $ tdb test_util)
]
main = runTestTT tests
|
tom-lpsd/tokyocabinet-haskell
|
tests/TCDBTest.hs
|
bsd-3-clause
| 9,420
| 0
| 18
| 3,100
| 3,511
| 1,786
| 1,725
| 227
| 1
|
{-# LANGUAGE DeriveDataTypeable, NamedFieldPuns #-}
module Main where
import Prelude ()
import Air.Env
import Air.Extra
import Text.JSON.Generic
import Text.JSON.Pretty (pp_value)
import Data.List (isInfixOf, find)
import Data.Maybe (fromMaybe)
import Network.Curl
import System.Directory
import Text.Printf
import ExpandURL (expand_url)
import Control.Monad (when)
import qualified Data.ByteString.Char8 as StrictByteString
data Feed = Feed
{
feed_date :: Integer
, feed_summary :: String
, feed_link :: String
}
deriving (Show, Eq, Data, Typeable)
instance Default Feed where
def = Feed
{
feed_date = def
, feed_summary = def
, feed_link = def
}
data TwitterListResponse = TwitterListResponse
{
text :: String
, user :: TwitterUser
, created_at :: String
}
deriving (Show, Eq, Data, Typeable)
instance Default TwitterListResponse where
def = TwitterListResponse
{
text = def
, user = def
, created_at = def
}
data TwitterUser = TwitterUser
{
screen_name :: String
, name :: String
}
deriving (Show, Eq, Data, Typeable)
instance Default TwitterUser where
def = TwitterUser
{
screen_name = def
, name = def
}
data URLCache = URLCache
{
original_url :: String
, short_url :: String
}
deriving (Show, Eq, Data, Typeable)
dummy_feed :: Feed
dummy_feed = def
{
feed_summary = "google"
, feed_link = "http://google.com"
}
list_api_url = "https://api.twitter.com/1/lists/statuses.json?slug=peace-hime&owner_screen_name=nfjinjing&per_page=60"
list_json_path = "res/list.json"
-- Sat Jun 28 21:40:02 +0000 2008
twitter_time_format = "%a %b %d %H:%M:%S %z %Y"
download_list :: IO ()
download_list = do
(_, r) <- curlGetString list_api_url []
writeFile list_json_path r
read_list :: IO [TwitterListResponse]
read_list = do
json <- readFile list_json_path
return - decodeJSON json
write_template :: IO ()
write_template = do
writeFile "res/list-template.json" - encodeJSON [ def :: TwitterListResponse ]
list2feeds :: TwitterListResponse -> Feed
list2feeds r =
let feed_date = r.created_at.parse_time twitter_time_format .t2i
feed_summary = r.user.screen_name + " " + r.user.name + ": " + r.text
feed_link = r.text.parse_url.fromMaybe ""
in
def
{
feed_date
, feed_summary
, feed_link
}
parse_url :: String -> Maybe String
parse_url x =
if "http://" `isInfixOf` x
then
let xs = x.split "http://"
in
if xs.length.is 1
then
Just - x.words.first
else
Just - "http://" + xs.second.words.first
else
Nothing
normalize_feed :: Feed -> IO Feed
normalize_feed feed = do
feed_original_link <- expand_url - feed.feed_link
return - feed {feed_link = feed_original_link}
read_feed :: IO [Feed]
read_feed = do
raw_feeds <- read_list ^ map list2feeds
let feeds_count = raw_feeds.length
raw_feeds.indexed.mapM (\(i, raw_feed) -> do
normalized_feed <- do
url_cache_string <- StrictByteString.readFile url_cache_path ^ StrictByteString.unpack
let url_cache = url_cache_string.decodeJSON :: [URLCache]
case url_cache.find (original_url > is (raw_feed.feed_link)) of
Nothing -> do
normalized_feed <- normalize_feed raw_feed
let new_cache_item = URLCache {original_url = raw_feed.feed_link, short_url = normalized_feed.feed_link}
let new_cache_string = ( new_cache_item : url_cache ) . take 200 . toJSON.pp_value.show
-- puts new_cache_string
StrictByteString.writeFile url_cache_path - new_cache_string . StrictByteString.pack
return - normalized_feed
Just found_link -> do
return - raw_feed {feed_link = found_link.short_url}
putStr - printf "%2d/%2d" (i + 1 :: Int) (feeds_count :: Int)
puts - ": " + normalized_feed.feed_link
return normalized_feed
)
url_cache_path :: String
url_cache_path = "tmp/url_cache.json"
main = do
let sync_json_path = "public/sync.json"
cache_exist <- doesFileExist url_cache_path
when (not cache_exist) - do
StrictByteString.writeFile url_cache_path - (([] :: [URLCache]). encodeJSON . StrictByteString.pack)
download_list
feed <- read_feed
writeFile sync_json_path - feed.toJSON.pp_value.show
removeFile list_json_path
|
nfjinjing/human-rights-ios
|
src/Sync.hs
|
bsd-3-clause
| 4,497
| 1
| 26
| 1,095
| 1,179
| 628
| 551
| 117
| 3
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
#include "Circat/AbsTy.inc"
AbsTyPragmas
{-# OPTIONS_GHC -Wall #-}
-- {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- TEMP
-- {-# OPTIONS_GHC -fno-warn-unused-binds #-} -- TEMP
----------------------------------------------------------------------
-- |
-- Module : ShapedTypes.Types.Vec
-- Copyright : (c) 2016 Conal Elliott
-- License : BSD3
--
-- Maintainer : conal@conal.net
-- Stability : experimental
--
-- Length-typed lists/vectors
----------------------------------------------------------------------
module ShapedTypes.Types.Vec (Vec(..)) where
import Prelude hiding (id,(.))
import Circat.Category (exl,exr,id,second,twiceP,(&&&),(***),(.),Uncurriable(..))
import Circat.Circuit
import Circat.Classes (unitIf)
import Circat.Classes (BottomCat (..),IfCat (..))
import Circat.Misc ((:*),Unit)
import Circat.Rep
#include "Circat/AbsTy.inc"
import ShapedTypes.Nat
infixr 5 :<
-- | Vectors with type-determined length, having empty vector ('ZVec') and
-- vector cons ('(:<)').
data Vec :: Nat -> * -> * where
ZVec :: Vec Z a
(:<) :: a -> Vec n a -> Vec (S n) a
-- deriving Typeable
instance HasRep (Vec Z a) where
type Rep (Vec Z a) = ()
repr ZVec = ()
abst () = ZVec
instance HasRep (Vec (S n) a) where
type Rep (Vec (S n) a) = (a,Vec n a)
repr (a :< as) = (a, as)
abst (a, as) = (a :< as)
{--------------------------------------------------------------------
Circuit support
--------------------------------------------------------------------}
#if 0
AbsTy(Vec Z a)
AbsTy(Vec (S n) a)
-- TODO: custom AbsTy replacement for Vec, as I'll be using it for n-ary trees.
#else
instance GenBuses q_q => Uncurriable (:>) q_q (Vec n a) where
uncurries = id
instance (Applicative (Vec n), Traversable (Vec n), GenBuses a) => GenBuses (Vec n a) where
genBuses' prim ins = buses <$> sequenceA (pure gb)
where
gb :: BusesM (Buses a)
gb = genBuses' prim ins
{-# NOINLINE gb #-} -- still necessary?
buses :: Vec m (Buses a) -> Buses (Vec m a)
buses ZVec = abstB UnitB
buses (b :< bs) = abstB (PairB b (buses bs))
delay :: Vec n a -> (Vec n a :> Vec n a)
delay = go
where
go :: Vec m a -> (Vec m a :> Vec m a)
go ZVec = id
go (a :< as) = abstC . (del a *** go as) . reprC
del :: a -> (a :> a)
del = delay
{-# NOINLINE del #-} -- still necessary?
ty :: Vec n a -> Ty
ty = const (foldr PairT UnitT (pure t :: Vec n Ty))
where
t = ty (undefined :: a)
{-# NOINLINE t #-}
instance (Applicative (Vec n), BottomCat (:>) a) => BottomCat (:>) (Vec n a) where
bottomC :: Unit :> Vec n a
bottomC = go (pure ())
where
go :: Vec m () -> (Unit :> Vec m a)
go ZVec = abstC
go (() :< units) = abstC . (bc &&& go units)
bc :: Unit :> a
bc = bottomC
{-# NOINLINE bc #-}
instance (Applicative (Vec n), IfCat (:>) a) => IfCat (:>) (Vec n a)
where
ifC :: Bool :* (Vec n a :* Vec n a) :> Vec n a
ifC = go (pure ())
where
go :: Vec m () -> Bool :* (Vec m a :* Vec m a) :> Vec m a
go ZVec = abstC . unitIf . second (twiceP reprC)
go (() :< units) = abstC
. ((ifc . second (twiceP exl)) &&& (go units . second (twiceP exr)))
. second (twiceP reprC)
ifc :: Bool :* (a :* a) :> a
ifc = ifC
{-# NOINLINE ifc #-}
-- Without these NOINLINE pragmas, GHC's typechecker does exponential work for
-- n-ary trees.
#if 0
reprC :: Vec (S m) a :> Rep (Vec (S m) a)
twiceP reprC :: Vec (S m) a :* Vec (S m) a :> Rep (Vec (S m) a) :* Rep (Vec (S m) a)
second (twiceP reprC)
:: Bool :* (Vec (S m) a :* Vec (S m) a) :> Bool :* (Rep (Vec (S m) a) :* Rep (Vec (S m) a))
:: Bool :* (Vec (S m) a :* Vec (S m) a) :> Bool :* (a :* Vec m a) :* (a :* Vec m a)
second (twiceP exl) :: Bool :* (a :* Vec m a) :* (a :* Vec m a) :> Bool :* a :* a
ifc . second (twiceP exl) :: Bool :* (a :* Vec m a) :* (a :* Vec m a) :> a
second (twiceP exr)
:: Bool :* (a :* Vec m a) :* (a :* Vec m a) :> Bool :* (Vec m a :* Vec m a)
go units :: Bool :* (Vec m a :* Vec m a) :> Vec m a
go units . second (twiceP exr) :: Bool :* (a :* Vec m a) :* (a :* Vec m a) :> Vec m a
(ifc . second (twiceP exl)) &&& (go units . second (twiceP exr))
:: Bool :* (a :* Vec m a) :* (a :* Vec m a) :> a :* Vec m a
abstC . ((ifc . second (twiceP exl)) &&& (go units . second (twiceP exr))) . second (twiceP reprC)
:: Bool :* (Vec (S m) a) :* (Vec (S m) a) :> Vec (S m) a
#endif
-- TODO: Look for simple formulations
-- Without NOINLINE pragmas, GHC's typechecker does exponential work for
-- trees.
--
-- TODO: Try again without NOINLINE, since I've reworked these definitions.
-- TODO: Abstract these definitions into something reusable.
#endif
|
conal/shaped-types
|
src/ShapedTypes/Types/Vec.hs
|
bsd-3-clause
| 5,054
| 1
| 10
| 1,287
| 432
| 260
| 172
| -1
| -1
|
-- Copyright (c) 1998-1999 Chris Okasaki.
-- See COPYRIGHT file for terms and conditions.
module LeftistHeap (
-- type of leftist heaps
Heap, -- instance of Coll/CollX, OrdColl/OrdCollX
-- CollX operations
empty,single,fromSeq,insert,insertSeq,union,unionSeq,delete,deleteAll,
deleteSeq,null,size,member,count,
-- Coll operations
toSeq, lookup, lookupM, lookupAll, lookupWithDefault, fold, fold1,
filter, partition,
-- OrdCollX operations
deleteMin,deleteMax,unsafeInsertMin,unsafeInsertMax,unsafeFromOrdSeq,
unsafeAppend,filterLT,filterLE,filterGT,filterGE,partitionLT_GE,
partitionLE_GT,partitionLT_GT,
-- OrdColl operations
minView,minElem,maxView,maxElem,foldr,foldl,foldr1,foldl1,toOrdSeq,
-- other supported operations
unsafeMapMonotonic,
-- documentation
moduleName,
-- re-export view type from EdisonPrelude for convenience
Maybe2(..)
) where
import Prelude hiding (null,foldr,foldl,foldr1,foldl1,lookup,filter)
import EdisonPrelude(Maybe2(..))
import qualified Collection as C ( CollX(..), OrdCollX(..), Coll(..), OrdColl(..),
unionList, toOrdList )
import qualified Sequence as S
import CollectionDefaults
import Monad
import QuickCheck
moduleName = "LeftistHeap"
-- Adapted from
-- Chris Okasaki. Purely Functional Data Structures. 1998.
-- Section 3.1.
data Heap a = E | L !Int a !(Heap a) !(Heap a)
-- want to say !a, but would need Eval a context
node x a E = L 1 x a E
node x E b = L 1 x b E
node x a@(L m _ _ _) b@(L n _ _ _)
| m <= n = L (m + 1) x b a
| otherwise = L (n + 1) x a b
{-
Note: when we want to recurse down both sides, and we have a choice,
recursing down the smaller side first will minimize stack usage.
For delete,deleteAll,filter,partition: could compute fringe and reduce
rather that rebuilding with union at every deleted node
-}
empty :: Ord a => Heap a
empty = E
single :: Ord a => a -> Heap a
single x = L 1 x E E
insert :: Ord a => a -> Heap a -> Heap a
insert x E = L 1 x E E
insert x h@(L m y a b)
| x <= y = L 1 x h E
| otherwise = node y a (insert x b)
union :: Ord a => Heap a -> Heap a -> Heap a
union E h = h
union h@(L _ x a b) h' = union' h x a b h'
where union' h x a b E = h
union' hx x a b hy@(L _ y c d)
| x <= y = node x a (union' hy y c d b)
| otherwise = node y c (union' hx x a b d)
{-
union E h = h
union h E = h
union h1@(L _ x a b) h2@(L _ y c d)
| x <= y = node x a (union b h2)
| otherwise = node y c (union h1 d)
-- ??? optimize to catch fact that h1 or h2 is known to be L case?
-}
delete :: Ord a => a -> Heap a -> Heap a
delete x h = case del h of
Just h' -> h'
Nothing -> h
where del (L _ y a b) =
case compare x y of
LT -> Nothing
EQ -> Just (union a b)
GT -> case del b of
Just b' -> Just (node y a b')
Nothing -> case del a of
Just a' -> Just (node y a' b)
Nothing -> Nothing
del E = Nothing
deleteAll :: Ord a => a -> Heap a -> Heap a
deleteAll x h@(L _ y a b) =
case compare x y of
LT -> h
EQ -> union (deleteAll x a) (deleteAll x b)
GT -> node y (deleteAll x a) (deleteAll x b)
deleteAll x E = E
null :: Ord a => Heap a -> Bool
null E = True
null _ = False
size :: Ord a => Heap a -> Int
size h = sz h 0
where sz E i = i
sz (L _ _ a b) i = sz a (sz b (i + 1))
member :: Ord a => Heap a -> a -> Bool
member E x = False
member (L _ y a b) x =
case compare x y of
LT -> False
EQ -> True
GT -> member b x || member a x
count :: Ord a => Heap a -> a -> Int
count E x = 0
count (L _ y a b) x =
case compare x y of
LT -> 0
EQ -> 1 + count b x + count a x
GT -> count b x + count a x
toSeq :: (Ord a,S.Sequence seq) => Heap a -> seq a
toSeq h = tol h S.empty
where tol E rest = rest
tol (L _ x a b) rest = S.cons x (tol b (tol a rest))
lookupM :: Ord a => Heap a -> a -> Maybe a
lookupM E x = Nothing
lookupM (L _ y a b) x =
case compare x y of
LT -> Nothing
EQ -> Just y
GT -> lookupM b x `mplus` lookupM a x
lookupAll :: (Ord a,S.Sequence seq) => Heap a -> a -> seq a
lookupAll h x = look h S.empty
where look E ys = ys
look (L _ y a b) ys =
case compare x y of
LT -> ys
EQ -> S.cons y (look b (look a ys))
GT -> look b (look a ys)
fold :: Ord a => (a -> b -> b) -> b -> Heap a -> b
fold f e E = e
fold f e (L _ x a b) = f x (fold f (fold f e a) b)
fold1 :: Ord a => (a -> a -> a) -> Heap a -> a
fold1 f E = error "LeftistHeap.fold1: empty collection"
fold1 f (L _ x a b) = fold f (fold f x a) b
filter :: Ord a => (a -> Bool) -> Heap a -> Heap a
filter p E = E
filter p (L _ x a b)
| p x = node x (filter p a) (filter p b)
| otherwise = union (filter p a) (filter p b)
partition :: Ord a => (a -> Bool) -> Heap a -> (Heap a, Heap a)
partition p E = (E, E)
partition p (L _ x a b)
| p x = (node x a' b', union a'' b'')
| otherwise = (union a' b', node x a'' b'')
where (a', a'') = partition p a
(b', b'') = partition p b
deleteMin :: Ord a => Heap a -> Heap a
deleteMin E = E
deleteMin (L _ x a b) = union a b
deleteMax :: Ord a => Heap a -> Heap a
deleteMax h = case maxView h of
Nothing2 -> E
Just2 h' x -> h'
unsafeInsertMin :: Ord a => a -> Heap a -> Heap a
unsafeInsertMin x h = L 1 x h E
unsafeAppend :: Ord a => Heap a -> Heap a -> Heap a
unsafeAppend E h = h
unsafeAppend (L _ y a b) h = node y a (unsafeAppend b h)
filterLT :: Ord a => a -> Heap a -> Heap a
filterLT y (L _ x a b) | x < y = node x (filterLT y a) (filterLT y b)
filterLT y _ = E
filterLE :: Ord a => a -> Heap a -> Heap a
filterLE y (L _ x a b) | x <= y = node x (filterLE y a) (filterLE y b)
filterLE y _ = E
filterGT :: Ord a => a -> Heap a -> Heap a
filterGT y h = C.unionList (collect h [])
where collect E hs = hs
collect h@(L _ x a b) hs
| x > y = h : hs
| otherwise = collect a (collect b hs)
filterGE :: Ord a => a -> Heap a -> Heap a
filterGE y h = C.unionList (collect h [])
where collect E hs = hs
collect h@(L _ x a b) hs
| x >= y = h : hs
| otherwise = collect b (collect a hs)
partitionLT_GE :: Ord a => a -> Heap a -> (Heap a, Heap a)
partitionLT_GE y h = (h', C.unionList hs)
where (h', hs) = collect h []
collect E hs = (E, hs)
collect h@(L _ x a b) hs
| x >= y = (E, h:hs)
| otherwise = let (a', hs') = collect a hs
(b', hs'') = collect b hs'
in (node x a' b', hs'')
partitionLE_GT :: Ord a => a -> Heap a -> (Heap a, Heap a)
partitionLE_GT y h = (h', C.unionList hs)
where (h', hs) = collect h []
collect E hs = (E, hs)
collect h@(L _ x a b) hs
| x > y = (E, h:hs)
| otherwise = let (a', hs') = collect a hs
(b', hs'') = collect b hs'
in (node x a' b', hs'')
partitionLT_GT :: Ord a => a -> Heap a -> (Heap a, Heap a)
partitionLT_GT y h = (h', C.unionList hs)
where (h', hs) = collect h []
collect E hs = (E, hs)
collect h@(L _ x a b) hs =
case compare x y of
GT -> (E, h:hs)
EQ -> let (a', hs') = collect a hs
(b', hs'') = collect b hs'
in (union a' b', hs'')
LT -> let (a', hs') = collect a hs
(b', hs'') = collect b hs'
in (node x a' b', hs'')
minView :: Ord a => Heap a -> Maybe2 a (Heap a)
minView E = Nothing2
minView (L _ x a b) = Just2 x (union a b)
minElem :: Ord a => Heap a -> a
minElem E = error "LeftistHeap.minElem: empty collection"
minElem (L _ x a b) = x
maxView :: Ord a => Heap a -> Maybe2 (Heap a) a
maxView E = Nothing2
maxView (L _ x E _) = Just2 E x
maxView (L _ x a E) = Just2 (L 1 x a' E) y
where Just2 a' y = maxView a
maxView (L _ x a b)
| y >= z = Just2 (node x a' b) y
| otherwise = Just2 (node x a b') z
where Just2 a' y = maxView a
Just2 b' z = maxView b
-- warning: maxView and maxElem may disagree if root is equal to max!
maxElem :: Ord a => Heap a -> a
maxElem E = error "LeftistHeap.maxElem: empty collection"
maxElem (L _ x E _) = x
maxElem (L _ x a b) = findMax b (findLeaf a)
where findMax E m = m
findMax (L _ x E _) m
| m >= x = m
| otherwise = x
findMax (L _ x a b) m = findMax a (findMax b m)
findLeaf E = error "LeftistHeap.maxElem: bug"
findLeaf (L _ x E _) = x
findLeaf (L _ x a b) = findMax b (findLeaf a)
foldr :: Ord a => (a -> b -> b) -> b -> Heap a -> b
foldr f e E = e
foldr f e (L _ x a b) = f x (foldr f e (union a b))
foldl :: Ord a => (b -> a -> b) -> b -> Heap a -> b
foldl f e E = e
foldl f e (L _ x a b) = foldl f (f e x) (union a b)
foldr1 :: Ord a => (a -> a -> a) -> Heap a -> a
foldr1 f E = error "LeftistHeap.foldr1: empty collection"
foldr1 f (L _ x E _) = x
foldr1 f (L _ x a b) = f x (foldr1 f (union a b))
foldl1 :: Ord a => (a -> a -> a) -> Heap a -> a
foldl1 f E = error "LeftistHeap.foldl1: empty collection"
foldl1 f (L _ x a b) = foldl f x (union a b)
{- ???? -}
unsafeMapMonotonic :: Ord a => (a -> a) -> Heap a -> Heap a
unsafeMapMonotonic f E = E
unsafeMapMonotonic f (L i x a b) =
L i (f x) (unsafeMapMonotonic f a) (unsafeMapMonotonic f b)
-- the remaining functions all use default definitions
fromSeq :: (Ord a,S.Sequence seq) => seq a -> Heap a
fromSeq = fromSeqUsingUnionSeq
insertSeq :: (Ord a,S.Sequence seq) => seq a -> Heap a -> Heap a
insertSeq = insertSeqUsingUnion
unionSeq :: (Ord a,S.Sequence seq) => seq (Heap a) -> Heap a
unionSeq = unionSeqUsingReduce
deleteSeq :: (Ord a,S.Sequence seq) => seq a -> Heap a -> Heap a
deleteSeq = deleteSeqUsingDelete
lookup :: Ord a => Heap a -> a -> a
lookup = lookupUsingLookupM
lookupWithDefault :: Ord a => a -> Heap a -> a -> a
lookupWithDefault = lookupWithDefaultUsingLookupM
unsafeInsertMax :: Ord a => Heap a -> a -> Heap a
unsafeInsertMax = unsafeInsertMaxUsingUnsafeAppend
unsafeFromOrdSeq :: (Ord a,S.Sequence seq) => seq a -> Heap a
unsafeFromOrdSeq = unsafeFromOrdSeqUsingUnsafeInsertMin
toOrdSeq :: (Ord a,S.Sequence seq) => Heap a -> seq a
toOrdSeq = toOrdSeqUsingFoldr
-- instance declarations
instance Ord a => C.CollX Heap a where
{empty = empty; single = single; fromSeq = fromSeq; insert = insert;
insertSeq = insertSeq; union = union; unionSeq = unionSeq;
delete = delete; deleteAll = deleteAll; deleteSeq = deleteSeq;
null = null; size = size; member = member; count = count;
instanceName c = moduleName}
instance Ord a => C.OrdCollX Heap a where
{deleteMin = deleteMin; deleteMax = deleteMax;
unsafeInsertMin = unsafeInsertMin; unsafeInsertMax = unsafeInsertMax;
unsafeFromOrdSeq = unsafeFromOrdSeq; unsafeAppend = unsafeAppend;
filterLT = filterLT; filterLE = filterLE; filterGT = filterGT;
filterGE = filterGE; partitionLT_GE = partitionLT_GE;
partitionLE_GT = partitionLE_GT; partitionLT_GT = partitionLT_GT}
instance Ord a => C.Coll Heap a where
{toSeq = toSeq; lookup = lookup; lookupM = lookupM;
lookupAll = lookupAll; lookupWithDefault = lookupWithDefault;
fold = fold; fold1 = fold1; filter = filter; partition = partition}
instance Ord a => C.OrdColl Heap a where
{minView = minView; minElem = minElem; maxView = maxView;
maxElem = maxElem; foldr = foldr; foldl = foldl; foldr1 = foldr1;
foldl1 = foldl1; toOrdSeq = toOrdSeq}
instance Ord a => Eq (Heap a) where
xs == ys = C.toOrdList xs == C.toOrdList ys
instance (Ord a, Show a) => Show (Heap a) where
show xs = show (C.toOrdList xs)
instance (Ord a, Arbitrary a) => Arbitrary (Heap a) where
arbitrary = sized (\n -> arbTree n)
where arbTree 0 = return E
arbTree n =
frequency [(1, return E),
(4, liftM3 snode arbitrary (arbTree (n `div` 2))
(arbTree (n `div` 4)))]
snode x a b = sift (node x a b)
sift E = E
sift t@(L _ x a E)
| a == E || x <= minElem a = t
sift (L r x (L r' y a b) E) =
L r y (sift (L r' x a b)) E
sift t@(L r x a b)
| x <= minElem a && x <= minElem b = t
sift (L r x (L r' y a b) c)
| y <= minElem c =
L r y (sift (L r' x a b)) c
sift (L r x a (L r' y b c)) =
L r y a (sift (L r' x b c))
coarbitrary E = variant 0
coarbitrary (L _ x a b) =
variant 1 . coarbitrary x . coarbitrary a . coarbitrary b
|
OS2World/DEV-UTIL-HUGS
|
oldlib/LeftistHeap.hs
|
bsd-3-clause
| 12,838
| 0
| 18
| 4,005
| 5,960
| 3,015
| 2,945
| -1
| -1
|
{-# LANGUAGE DeriveDataTypeable #-}
------------------------------------------------------------------------------
-- |
-- Module: Control.Concurrent.STM.TChan.Split.Implementation
-- Copyright: (c) 2013 Leon P Smith
-- License: BSD3
-- Maintainer: Leon P Smith <leon@melding-monads.com>
-- Stability: experimental
--
------------------------------------------------------------------------------
module Control.Concurrent.STM.TChan.Split.Implementation where
import Control.Concurrent.STM
import Data.Typeable (Typeable)
type TVarList a = TVar (TList a)
data TList a = TNil | TCons a {-# UNPACK #-} !(TVarList a)
newtype SendPort a
= SendPort (TVar (TVarList a))
deriving (Eq, Typeable)
newtype ReceivePort a
= ReceivePort (TVar (TVarList a))
deriving (Eq, Typeable)
new :: STM (SendPort a, ReceivePort a)
new = do
hole <- newTVar TNil
read <- newTVar hole
write <- newTVar hole
return (SendPort write, ReceivePort read)
newSendPort :: STM (SendPort a)
newSendPort = do
hole <- newTVar TNil
write <- newTVar hole
return (SendPort write)
send :: SendPort a -> a -> STM ()
send (SendPort write) a = do
listend <- readTVar write
new_listend <- newTVar TNil
writeTVar listend (TCons a new_listend)
writeTVar write new_listend
receive :: ReceivePort a -> STM a
receive (ReceivePort read) = do
listhead <- readTVar read
head <- readTVar listhead
case head of
TNil -> retry
TCons a tail -> do
writeTVar read tail
return a
tryReceive :: ReceivePort a -> STM (Maybe a)
tryReceive (ReceivePort read) = do
listhead <- readTVar read
head <- readTVar listhead
case head of
TNil -> return Nothing
TCons a tail -> do
writeTVar read tail
return (Just a)
peek :: ReceivePort a -> STM a
peek (ReceivePort read) = do
listhead <- readTVar read
head <- readTVar listhead
case head of
TNil -> retry
TCons a _tail -> do
return a
tryPeek :: ReceivePort a -> STM (Maybe a)
tryPeek (ReceivePort read) = do
listhead <- readTVar read
head <- readTVar listhead
case head of
TNil -> return Nothing
TCons a _tail -> do
return (Just a)
unget :: ReceivePort a -> a -> STM ()
unget (ReceivePort read) a = do
listhead <- readTVar read
new_listhead <- newTVar $! TCons a listhead
writeTVar read new_listhead
isEmpty :: ReceivePort a -> STM Bool
isEmpty (ReceivePort read) = do
listhead <- readTVar read
head <- readTVar listhead
case head of
TNil -> return True
TCons _ _ -> return False
listen :: SendPort a -> STM (ReceivePort a)
listen (SendPort write) = do
listend <- readTVar write
read <- newTVar listend
return (ReceivePort read)
duplicate :: ReceivePort a -> STM (ReceivePort a)
duplicate (ReceivePort read) = do
listhead <- readTVar read
read <- newTVar listhead
return (ReceivePort read)
split :: SendPort a -> STM (ReceivePort a, SendPort a)
split (SendPort write) = do
new_hole <- newTVar TNil
old_hole <- swapTVar write new_hole
read <- newTVar new_hole
write' <- newTVar old_hole
return (ReceivePort read, SendPort write')
|
lpsmith/split-tchan
|
src/Control/Concurrent/STM/TChan/Split/Implementation.hs
|
bsd-3-clause
| 3,248
| 0
| 14
| 795
| 1,091
| 510
| 581
| 92
| 2
|
module Examples.Example1 where
import Graphics.UI.VE
import ErrVal
import Examples.Utils(testC)
data Person = Person {
st_name :: String,
st_age :: Int
} deriving (Show)
instance HasVE Person
where
mkVE = mapVE toStruct fromStruct
( label "Name" mkVE
.*. label "Age" mkVE
)
where
toStruct (a,b) = eVal (Person a b)
fromStruct (Person a b) = (a,b)
test = testC (mkVE :: VE ConstE Person)
|
timbod7/veditor
|
demo/Examples/Example1.hs
|
bsd-3-clause
| 456
| 0
| 10
| 132
| 158
| 87
| 71
| 15
| 1
|
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE BangPatterns #-}
{- | A witness for arbitrary recursive ADTs that expresses finiteness without
sacrificing lazyness.
-}
module Sky.Ideas.FiniteRecursiveType where
import Data.Fix
----------------------------------------------------------------------------------------------------
newtype Finite n r = Finite { unFinite :: n r }
instance Show (n r) => Show (Finite n r) where
show (Finite a) = show a
type FiniteFix f = Fix (Finite f)
{-
Problem: What if someone goes ahead and just does this:
-}
data ListR a r = Nil | Cons a r
deriving (Show, Eq, Functor)
type List a = Fix (ListR a)
type FiniteList a = FiniteFix (ListR a)
oh_come_on0 :: FiniteList Int
oh_come_on0 = Fix . Finite . Cons 0 $ oh_come_on0
{-
Solution: Export only stuff that allows us to create finite data.
The problem is that "Fix . Finite" is very general any allows us
to create infinite lists.
-}
fix :: f (Fix f) -> Fix f
fix !r = Fix r
fixFinite :: n (Fix (Finite n)) -> Fix (Finite n)
fixFinite !r = fix (Finite r)
oh_come_on1 :: FiniteList Int
oh_come_on1 = fixFinite . Cons 0 $ oh_come_on1
{- That won't work, of course:
- ListR is still lazy, so oh_come_on1 will run
- Detection is not at compile time
- Forcing strict evaluation in fixFinite is just the same as forcing
strict evaluation in the first place!
-}
{- We need to prohibit the use of arbitrary variables on the right of fixFinite!
This means we want to enforce a partial order in values of type "FiniteFix a"
at compile time.
Since the compile can only make judgements on types, we need to encode the
order in types.
-}
|
xicesky/sky-haskell-playground
|
src/Sky/Ideas/FiniteRecursiveType.hs
|
bsd-3-clause
| 1,848
| 0
| 10
| 406
| 303
| 164
| 139
| 24
| 1
|
-- {-# OPTIONS_GHC -fplugin=Monomorph.Plugin -O -fobject-code -dcore-lint #-}
{-# OPTIONS_GHC -fforce-recomp -fplugin=ReifyCore.Plugin -O -fobject-code -dcore-lint #-}
{-# LANGUAGE CPP, TupleSections, GADTs, TypeOperators, Rank2Types #-}
{-# OPTIONS_GHC -Wall #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-} -- TEMP
-- {-# OPTIONS_GHC -fno-warn-unused-binds #-} -- TEMP
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
----------------------------------------------------------------------
-- |
-- Module : Examples
-- Copyright : (c) 2016 Conal Elliott
-- License : BSD3
--
-- Maintainer : conal@conal.net
-- Stability : experimental
--
-- Examples / tinkering.
----------------------------------------------------------------------
module Examples where
-- Oddly, this import breaks unfolding needed by monomorphize.
import ReifyCore.Lambda (EP,reifyEP)
-- -- So does this one.
-- import ReifyCore.Lambda ()
-- -- This one is okay.
-- import ReifyCore.Reify ()
-- import ReifyCore.Reify (externals)
-- import ReifyCore.Sham ()
import Data.Monoid (Sum(..))
import Control.Applicative (liftA2)
import TypeUnary.Vec
import Circat.Misc (Unop,Binop)
import Circat.Rep
import Circat.Pair
import Circat.RTree
-- t1 :: RTree N1 Int
-- t1 = B (L 3 :# L 4)
-- q1 = reifyEP (\ x -> x + 0 :: Int)
-- -- In ghci: "ghc: panic! (the 'impossible' happened) ... floatExpr tick break<0>()"
-- boodly = "boodly"
sumv4 = reifyEP (sum :: Vec N4 Int -> Int)
sumt4 = reifyEP (sum :: Tree N4 Int -> Int)
|
conal/reify-core
|
test/Tests.hs
|
bsd-3-clause
| 1,523
| 0
| 8
| 242
| 151
| 101
| 50
| 16
| 1
|
module Data.Singular where
import Data.Countable
import Data.Searchable
class (Finite a, AtLeastOneCountable a) => Singular a where
single :: a
instance Singular () where
single = ()
instance (Singular a, Singular b) => Singular (a, b) where
single = (single, single)
|
AshleyYakeley/countable
|
src/Data/Singular.hs
|
bsd-3-clause
| 284
| 0
| 6
| 56
| 102
| 57
| 45
| 9
| 0
|
{-# LANGUAGE TemplateHaskell #-}
module LimitedHashMap where
import Control.Arrow ((&&&))
import Control.Concurrent.MVar (MVar, modifyMVar_, readMVar)
import Control.Monad (forM_, liftM, when, (>=>))
import Data.Maybe (fromJust)
import Data.Word (Word64)
import Control.Lens (makeLenses, over, view, (%~), (.~),
(^.))
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as C8
import qualified Data.HashMap.Lazy as HML
import Data.Time.Clock.POSIX (POSIXTime, getPOSIXTime)
-- | Data that is stored together with a value
data Value = Value
{ _value :: !ByteString -- ^ The actual value as supplied
, _flags :: !Int -- ^ Arbitrary flags
, _ttl :: !POSIXTime -- ^ The time to live of this KVP
, _uniq :: !Integer -- ^ Monotonically increasing unique value
} deriving (Show)
makeLenses ''Value
instance Eq Value where
x == y = x^.value == y^.value
-- | The type used as unique identifier for KVPs
type Key = ByteString
-- | Stateful data needed for the size-limited hashtable
data LimitedHashMap = LimitedHashMap
{ _hashMap :: !(HML.HashMap Key Value) -- ^ The Hashmap used
, _maxSize :: !Int -- ^ Maximum hashmap size
, _mru :: ![ByteString] -- ^ Recently used hashes
, _counter :: !Integer -- ^ Insertion counter
}
makeLenses ''LimitedHashMap
-- | The inital state to use when starting up
initialLHM :: Int -> LimitedHashMap
initialLHM msize = LimitedHashMap HML.empty msize [] 0
-- | Insert a new KVP
set :: MVar LimitedHashMap -> Key -> Int -> POSIXTime -> ByteString -> IO ()
set lhm k f t v = do
time <- convertTime t
unique <- getUnique lhm
let value = Value v f time unique
modifyMVar_ lhm $ \s -> do
let isFull = HML.size (s^.hashMap) >= s^.maxSize
alreadyMember = HML.member k $ s^.hashMap
needsDeletion = isFull && not alreadyMember
delCandidate = head $ s^.mru
addToMRU = if alreadyMember
then mru %~ ((++ [k]) . filter (/= k))
else mru %~ (++ [k])
performDeletion = if needsDeletion
then (mru %~ tail) . (hashMap %~ HML.delete delCandidate)
else id
return $ hashMap %~ HML.insert k value $ performDeletion $ addToMRU s
-- | Append a value to an existing value
append :: MVar LimitedHashMap -> Key -> ByteString -> IO ()
append lhm k v = do
updateUnique lhm k
modifyMVar_ lhm $ updateMRU k >=>
return . (hashMap %~ HML.adjust (value %~ (`C8.append` v)) k)
-- | Prepend a value to an existing value
prepend :: MVar LimitedHashMap -> Key -> ByteString -> IO ()
prepend lhm k v = do
updateUnique lhm k
modifyMVar_ lhm $ updateMRU k >=>
return . (hashMap %~ HML.adjust (value %~ C8.append v) k)
-- | Query a value for a key
get :: MVar LimitedHashMap -> Key -> IO (Maybe Value)
get lhm k = do
state <- readMVar lhm
now <- getPOSIXTime
let rv = get' state k
case rv of
Nothing -> return Nothing
Just val -> if val^.ttl < now
then do
delete lhm k
return Nothing
else do
modifyMVar_ lhm $ updateMRU k
return rv
-- | Pure version of get for testing
get' :: LimitedHashMap -> Key -> Maybe Value
get' s k = HML.lookup k $ s^.hashMap
-- | Check if a key is part of the LHM without updating the MRU like get would
isMember :: MVar LimitedHashMap -> Key -> IO Bool
isMember lhm k = do
l <- readMVar lhm
return . HML.member k $ view hashMap l
-- | Delete a KVP
delete :: MVar LimitedHashMap -> Key -> IO ()
delete lhm k = modifyMVar_ lhm $
return . (hashMap %~ HML.delete k) . (mru %~ filter (/= k))
-- | Perform an incr command and return the new value
incr :: MVar LimitedHashMap -> Key -> Word64 -> IO ByteString
incr lhm k n = do
modifyMVar_ lhm $ return . (hashMap %~ HML.adjust (doIncr n) k)
liftM (view value . fromJust) $ get lhm k
-- | Increment a value by n, wrapping at the unsigned 64-bit mark
doIncr :: Word64 -> Value -> Value
doIncr n = over value increment
where
increment :: C8.ByteString -> C8.ByteString
increment bs = let num = read $ C8.unpack bs :: Word64
in C8.pack . show $ num + n
-- | Perform a decr command and return the new value
decr :: MVar LimitedHashMap -> Key -> Word64 -> IO ByteString
decr lhm k n = do
modifyMVar_ lhm $ return . (hashMap %~ HML.adjust (doDecr n) k)
liftM (view value . fromJust) $ get lhm k
-- | Decrement a value by n, stopping at zero
doDecr :: Word64 -> Value -> Value
doDecr n = over value decrement
where
decrement :: C8.ByteString -> C8.ByteString
decrement bs = let num = read $ C8.unpack bs :: Word64
new = num - n :: Word64
repr = C8.pack . show
in if new < num then repr new else repr 0
-- | Remove all expired KVPs from the LHM
cleanup :: MVar LimitedHashMap -> IO ()
cleanup lhm = do
s <- readMVar lhm
now <- getPOSIXTime
let isExpired k v = now > v^.ttl
toBeDeleted = HML.keys . HML.filterWithKey isExpired $ view hashMap s
forM_ toBeDeleted $ delete lhm
-- | Update just the TTL of a KVP
touch :: MVar LimitedHashMap -> Key -> POSIXTime -> IO ()
touch lhm k t = do
time <- convertTime t
updateUnique lhm k
modifyMVar_ lhm $
updateMRU k >=> return . (hashMap %~ HML.adjust (ttl .~ time) k)
-- | Flush out all KVPs that are valid as least as long as the specified time.
-- Short times are relative, long ones absolute, like when setting keys. A time
-- of zero empties the whole LHM
flush :: MVar LimitedHashMap -> POSIXTime -> IO ()
flush lhm 0 = modifyMVar_ lhm $ return . (hashMap .~ HML.empty) . (mru .~ [])
flush lhm t = do
s <- readMVar lhm
time <- convertTime t
let isToBeFlushed k v = time <= v^.ttl
toBeFlushed = HML.keys . HML.filterWithKey isToBeFlushed $ view hashMap s
forM_ toBeFlushed $ delete lhm
-- | Check if a supplied time is relative or absolute and convert it if
-- neccessary
convertTime :: POSIXTime -> IO POSIXTime
convertTime t = do
now <- getPOSIXTime
return $ if t >= 60*60*24*30
then t
else now + t
-- | Update the most recently mru list to reflect a query
updateMRU :: Key -> LimitedHashMap -> IO LimitedHashMap
updateMRU k = return . (mru %~ (++ [k]) . filter (/= k))
{-# ANN updateMRU "HLint: ignore Redundant bracket" #-}
-- | Get a new unique number and assign it to a value
updateUnique :: MVar LimitedHashMap -> Key -> IO ()
updateUnique lhm k = do
new <- getUnique lhm
modifyMVar_ lhm $ return . (hashMap %~ HML.adjust (uniq .~ new) k)
-- | Get the unique number of a value, if it exists
viewUnique :: MVar LimitedHashMap -> Key -> IO (Maybe Integer)
viewUnique lhm k = do
mlhm <- readMVar lhm
let value = get' mlhm k
case value of
Nothing -> return Nothing
Just val -> return . Just $ val^.uniq
-- | Get a unique number and increment the insertion counter
getUnique :: MVar LimitedHashMap -> IO Integer
getUnique lhm = do
unique <- view counter <$> readMVar lhm
modifyMVar_ lhm $ return . (counter %~ (+1))
return unique
|
sulami/hcached
|
src/LimitedHashMap.hs
|
bsd-3-clause
| 7,246
| 0
| 19
| 1,921
| 2,245
| 1,134
| 1,111
| -1
| -1
|
-- for mtl
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE UndecidableInstances #-}
-- for hoist and inputThrough
{-# LANGUAGE Rank2Types #-}
module Control.Yield (
-- * Types
Producing,
resume,
fromStep,
Consuming(Consuming, provide),
ProducerState(Produced, Done),
Resumable,
-- * Basic introduction and elimination
yield,
pfold,
-- * Handy conversion functions
delay,
discardingFirstInput,
step,
asNextYieldOf,
(/>/),
peeking,
overConsumption,
overProduction,
afterYielding,
-- * Meaningful specializations of pfold
replaceYield,
foreverYield,
yieldingTo,
(/$/),
-- * Connecting computations
($-),
($~),
($$),
connectResume,
-- * Misc
yieldEach,
echo,
echo_,
voidC,
-- * In the category of monads
hoist,
squash,
selfConnection,
inputThrough,
through,
-- * Manipulating layers of Producing
insert0,
insert1,
insert2,
commute,
) where
import Util
import Control.Yield.External
import Control.Arrow
import Control.Applicative
import Control.Category
import Data.Monoid
import Data.Foldable
import Prelude hiding ((.), id, mapM_, foldl)
import Control.Monad hiding (mapM_)
import Control.Monad.IO.Class
import Control.Monad.Reader.Class
import Control.Monad.State.Class
import Control.Monad.Writer.Class
import Control.Monad.Error.Class
import Control.Monad.Cont.Class
import Control.Monad.Trans.Class
import Control.Monad.Fix
-- Basic introduction and elimination
----------------------------------------------------------------
-- | Surrender an o over the interface,
-- waiting for an i in response.
yield :: (Monad m) => o -> Producing o i m i
yield o = fromStep $ return $ Produced o $ Consuming return
-- | A general algorithm for interpreting a Producing computation.
--
-- > pfold lift yield โก id
-- > pfold f' k' โ pfold f k โก pfold (f' โ f) (k' โ k)
pfold :: (Monad m, Monad n)
=> (forall x. m x -> n x) -- (m (ProducerState o i m r) -> m' (ProducerState o i m r))
-> (o -> n i) -> Producing o i m r -> n r
pfold morph yield' = go where
go p = morph (resume p) >>= \s -> case s of
Done r -> return r
Produced o k -> yield' o >>= go . provide k
-- Handy conversion functions
-----------------------------------------------------------------
-- | Wait for input before you actually need it,
-- delaying production. Each time new input arrives,
-- the previous input is used instead.
delay :: (Monad m) => Producing o i m r -> Consuming r m i o
delay p = Consuming $ \i -> lift (resume p) >>= \s -> case s of
Done r -> return r
Produced o k -> yield o >>= provide (delay $ provide k i)
-- | Wait for input before you actually need it,
-- discarding it when it arrives.
discardingFirstInput :: (Monad m) => Producing o i m r -> Consuming r m i o
discardingFirstInput p = Consuming $ \_ -> p
-- | Provide an action to be plugged in at the next yield.
-- All underlying effects up until that point will be run.
-- You will get back either the result r,
-- or a new Producing computation which you can 'resume'
-- just where it left of: right after that yield you just replaced,
-- but before any of the replacement code has run.
--
-- > step f (return r) โก return (Left r)
-- > step f (yield o >>= k) โก return (Right (f o >>= k))
-- > step f (lift m >>= k) โก m >>= step f โ k
step :: (Monad m) => (o -> Producing o i m i) -> Producing o i m r -> m (Either r (Producing o i m r))
step f p = step' `liftM` resume p where
step' (Done r) = Left r
step' (Produced o k) = Right $ f o >>= provide k
-- | Provide an action to replace the next yield.
-- If p does not yield, then @f \`asNextYieldOf\` p โก p@.
--
-- > asNextYieldOf f (return r) โก return r
-- > asNextYieldOf f (yield o >>= k) โก f o >>= k
-- > asNextYieldOf f (lift m >>= k) โก lift m >>= asNextYieldOf f โ k
asNextYieldOf :: (Monad m) => (o -> Producing o i m i)
-> Producing o i m r -> Producing o i m r
asNextYieldOf f p = lift (resume p) >>= step' where
step' (Done r) = return r
step' (Produced o k) = f o >>= provide k
infixl 3 />/
-- | Pronounced \"with next yield\".
-- This is just a flipped `asNextYieldOf`
-- with monoid laws.
--
-- > yield />/ f โก f
-- > f />/ yield โก f
-- > f />/ (g />/ h) โก (f />/ g) />/ h
-- >
-- > return />/ f โก return
-- > lift />/ f โก lift
(/>/) :: (Monad m)
=> (a -> Producing o i m r)
-> (o -> Producing o i m i)
-> a -> Producing o i m r
(f1 />/ f2) a = f2 `asNextYieldOf` f1 a
-- | Peeking! Look at the next n inputs.
--
-- Inside the peeking block, you are guaranteed
-- that your first n yields will correspond in order
-- to the list of inputs you are peeking at.
-- The list of inputs is guaranteed to be of length n,
-- making it convenient to bind all of the inputs
-- with a single pattern match.
--
-- After the peeking block, you are guaranteed
-- that yield has been used at least n times,
-- possibly more.
--
-- > do r <- peeking 2 $ \[i1, i2] -> do
-- > The next two yields in here
-- > will get you i1 and i2 respectively.
-- > You may yield more if you wish.
-- > If you yield fewer than 2 times in this block,
-- > then following this block,
-- > it will be as though you had yielded exactly 2 times.
-- > At this point, you are guaranteed that
-- > at least 2 yields have happened, possibly more.
peeking :: (Monad m) => Int -> ([i] -> Producing () i m r)
-> Producing () i m r
peeking n k = do
is <- replicateM n (yield ())
foldl (\m i -> const (return i) `asNextYieldOf` m) (k is) is
{- ???
expedite :: (MonadFix m) => Consuming r m i o -> Producing o i m r
expedite = undefined
-}
-- | Take a transformation of Producing computations,
-- and apply it to a Consuming computation.
--
-- > overProduction id โก id
-- > overProduction (f โ g) โก overProduction f โ overProduction g
overProduction ::
(Producing o i m r -> Producing o' i m' r')
-> Consuming r m i o -> Consuming r' m' i o'
overProduction f k = Consuming (f . provide k)
-- | Take a transformation of Consuming computations,
-- and apply it to a Producing computation.
--
-- > overConsumption id โก id
-- > overConsumption (f โ g) โก overConsumption f โ overConsumption g
overConsumption :: (Monad m)
=> (Consuming r m i o -> Consuming r m i' o)
-> Producing o i m r -> Producing o i' m r
overConsumption f p = fromStep $ resume p >>= \s -> case s of
Done r -> return $ Done r
Produced o k -> return $ Produced o (f k)
-- | Take a transformation of Producing computations,
-- and wait until right after the next yield to apply it.
--
-- > afterYielding โก overConsumption โ overProduction
-- > afterYielding id โก id
-- > afterYielding (f โ g) โก afterYielding f โ afterYielding g
afterYielding :: (Monad m)
=> (Producing o i m r -> Producing o i m r)
-> Producing o i m r -> Producing o i m r
afterYielding = overConsumption . overProduction
-- Meaningful specializations of pfold
--------------------------------------------------------------------
-- | Replace the Producing o i monad transformer
-- with some other monad transformer, by filling in
-- the yield holes with a given computation from o to i.
--
--
-- > replaceYield yield โก id
-- > replaceYield f' โ replaceYield f โก replaceYield (f' <=< lift โ f)
-- > replaceYield return โก lift โ selfConnect
replaceYield :: (Monad m, MonadTrans t, Monad (t m))
=> (o -> t m i) -> Producing o i m r -> t m r
replaceYield = pfold lift
-- | Take an action from i to o,
-- and create a Consuming computation that continuously
-- waits for the i, applies the action, and yields the o.
--
-- > foreverYield return โก id โท Consuming r m i i
foreverYield :: (Monad m) => (i -> m o) -> Consuming r m i o
foreverYield k = replaceYield (lift . k >=> yield) `overProduction` id
-- | Plug in the yield holes with a computation in the base
-- monad from o to i.
--
-- > yieldingTo k p โก p $- foreverYield k
yieldingTo :: (Monad m) => (o -> m i) -> Producing o i m r -> m r
yieldingTo = pfold id
infixr 4 /$/
-- | Composable replaceYield with monoid laws.
--
-- > yield /$/ x โก x
-- > x /$/ yield โก x
-- > a /$/ (b /$/ c) โก (a /$/ b) /$/ c
--
-- > return /$/ x โก return
-- > lift /$/ x โก lift
-- > x /$/ return โก lift โ selfConnect
--
-- Note that when you specialize t to Producing o' i',
-- you get the type signature:
--
-- > (Monad m)
-- > โ (a โ Producing o i m r)
-- > โ (o โ Producing o' i' m i)
-- > โ (a โ Producing o' i' m r)
(/$/) :: (Monad m, MonadTrans t, Monad (t m))
=> (a -> Producing o i m r) -> (o -> t m i)
-> (a -> t m r)
k1 /$/ k2 = replaceYield k2 . k1
-- Connecting computations
------------------------------------------------------------------
infixl 0 $-
-- | Connect a Producing computation with a Consuming computation
-- that has a matching interface. The two computations take turns,
-- yielding information back and forth at every switch.
-- Either one can terminate the overall computation by supplying
-- an r.
($-) :: (Monad m) => Producing a b m r -> Consuming r m a b -> m r
p $- c = resume p >>= \s -> case s of
Done r -> return r
Produced o k -> provide c o $- k
infixl 0 $~
-- | For when you don't want to wrap your function-to-Producing
-- in the Consuming newtype manually.
-- Using this form can look like a foreach loop:
--
-- example
--
-- > someProducer $~ \i -> do
-- > someBodyWhichCanYieldToProducer
($~) :: (Monad m) => Producing a b m r -> (a -> Producing b a m r) -> m r
p $~ k = p $- Consuming k
-- TODO: run them in parallel with MonadPar
infixl 0 $$
-- | Connect two Producing computations together. The left one goes first,
-- and the second gets `delay`ed.
--
-- > p $$ p2 โก p $- delay p2
($$) :: (Monad m) => Producing a b m r -> Producing b a m r -> m r
p $$ p2 = p $- delay p2
-- | Connect two computations, but with the ability to resume one
-- at the point where the other terminates.
--
-- > connectResume (delay (return r)) k b โก return (Left (r, Produced b k))
connectResume :: (Monad m) => Consuming r m b a -> Consuming r' m a b -> b -> m (Resumable b a m r r')
connectResume k1 k2 = \b -> resume (provide k1 b) >>= \s -> case s of
Done r -> return (Left (r, Produced b k2))
Produced a k1' -> resume (provide k2 a) >>= \s2 -> case s2 of
Done r' -> return (Right (Produced a k1', r'))
Produced b' k2' -> connectResume k1' k2' b'
-- Misc
------------------------------------------------------------------
-- | Yield the contents of a Foldable one by one.
--
-- > yieldEach โก mapM_ yield
yieldEach :: (Monad m, Foldable f) => f a -> Producing a b m ()
yieldEach = mapM_ yield
-- | Parrot back a certain number of inputs from the interface.
-- The next value sent in gets returned as the result.
echo :: (Monad m) => Int -> Consuming a m a a
echo n | n >= 0 = Consuming $ replicateK n yield
echo _ = Consuming $ \_ -> fail "echo requires a nonnegative argument"
-- | Parrot back a certain number of inputs from the interface.
-- The next value sent in terminates the computation.
echo_ :: (Monad m) => Int -> Consuming () m a a
echo_ = voidC . echo
-- | Forget the result type. The Comsuning analogue of Control.Monad.void.
--
-- > voidC โก overProduction void
voidC :: (Monad m) => Consuming r m i o -> Consuming () m i o
voidC = overProduction void
-- As a functor in the category of monads
------------------------------------------------------------------
-- | The hoist function is fmap in the category of monads.
-- The transformation function will occur as many times
-- as the computation yields, plus one for transforming
-- the computation preceding the final result.
-- So using something like @(flip runStateT s0)@ as the argument
-- to hoist is probably not what you want, because the state would not
-- be carried from one step to the next.
--
-- > hoist id x โก x
hoist :: (Monad m, Monad n)
=> (forall x. m x -> n x)
-> Producing o i m r -> Producing o i n r
hoist f = pfold (lift . f) yield
-- As a monad in the category of monads
-------------------------------------------------------------------
-- | The squash function is join in the category of monads.
-- It interleaves two layers of yielding on the same interface.
--
-- > squash (insert0 x) โก x
-- > squash (insert1 x) โก x
-- > squash (insert2 x) โก insert1 (squash x)
squash :: (Monad m)
=> Producing o i (Producing o i m) r
-> Producing o i m r
squash = yieldingTo yield
-- | The embed function is bind in the category of monads.
-- It lets you convert the computations in-between yields
-- into computations over a new monad.
-- This conversion can inject new yields.
--
-- > embed f m โก squash (hoist f m)
embed :: (Monad m, Monad n)
=> (forall x. m x -> Producing i o n x)
-> Producing i o m r -> Producing i o n r
embed f m = squash (hoist f m)
-- As an indexed comonad in the category of monads
-------------------------------------------------
-- | The selfConnection function is indexed copoint in the category of monads.
-- It allows a yielding computation to just yield to itself.
--
-- > selfConnection โก yieldingTo return
-- > selfConnection (selfConnection x) โก selfConnection (hoist selfConnection x)
--
-- Note that the following are also true:
--
-- > selfConnection (squash x) โก selfConnection (selfConnection x)
-- > selfConnection (squash x) โก selfConnection (hoist selfConnection x)
--
-- If you interleave the two layers and then performing selfConnection,
-- you might expect that sometimes one layer might yield to the other.
-- Not so. Upon yielding, when a layer switches to consuming mode,
-- it is immediately supplied with the value it just yielded.
-- Thus, each layer will always yield to itself, even though
-- the two layers' yields are interleaved.
selfConnection :: (Monad m) => Producing i i m r -> m r
selfConnection = yieldingTo return
-- | The inputThrough function is indexed extend in the category of monads.
-- .
-- Given that the monad n has the ability to simulate
-- the monad m, as well as simulate yielding on the j/k interface,
-- then you can adjust the input end of a producing computation
-- from k to j.
--
-- > inputThrough selfConnection x โก x
--
-- The implementation can be depicted visually like so:
--
-- given:
--
-- @
-- \/ i -> \\
-- = p
-- \\ <- o \/
-- @
--
-- and given:
--
-- @
-- /\ x -> \
-- = morph (yield x)
-- \\ <- i /
-- @
--
-- We can sort of bend these around, and combine them into:
--
-- @
-- \/ x -> \\ \/ i -> \\
-- - ~morph~ |
-- \`inputThrough\` |
-- - - p - |
-- \\ <- o \/ \\ <- i \/
-- @
--
-- From looking at the type signature,
-- this may seem like @morph@ is being used backwards.
-- Think of the morphing function as the power
-- to yield an x and get an i, so the way we use it
-- really is @x@ going in, and @i@ coming out.
inputThrough :: (Monad m, Monad n)
=> (forall z. Producing x i m z -> n z)
-> Producing o i m r -> Producing o x n r
inputThrough morph = go where
go p = fromStep $ morph $ liftM map' (lift (resume p))
map' (Done r) = Done r
map' (Produced o consuming) = Produced o $ Consuming $ \x -> do
i <- lift (xToI x)
go (provide consuming i)
xToI x = morph (yield x)
-- | The through function is indexed duplicate in the category of monads.
--
-- The through function allows you to split one interface into two,
-- where the input of one gets fed into the output of the other.
--
-- > selfConnection (through x) โก x
--
-- Illustrated visually, it looks like when you go @through p@,
-- you take p's interface, and bend it around so that its
-- input and output ends are now on two separate interfaces.
-- Then you fill in the missing parts of the two interfaces
-- with a simple pass-through from one to the other.
--
-- @
-- \/ i -> \\
-- = p
-- \\ <- o \/
-- @
--
-- @
-- \/ x -> \\ \/ x -> \\
-- - ----- -
-- through
-- - - p - -
-- \\ <- o \/ \\ <- i \/
-- @
--
-- This "simple pass-through"
-- behavior is due to the implementation, where we pass the
-- "simple pass-through" function, @id@, to inputThrough:
--
-- > through โก inputThrough id
--
-- If you are wondering where the second interface hole came from,
-- given the diagram for inputThrough doesn't have it,
-- just stretch the top around to the right,
-- since @morph = id@, we have @id (yield x) = yield x@,
-- which yields over the x/i interface.
through :: (Monad m)
=> Producing o i m r
-> Producing o x (Producing x i m) r
through = inputThrough id
-- Manipulating layers of Producing
------------------------------------------------------------
-- | Insert a new layer at the top of the monad transformer stack.
--
-- > insert0 โก lift
insert0 :: (Monad m, MonadTrans t, Monad (t m)) => m r -> t m r
insert0 = lift
-- | Insert a new layer one layer deep in the monad transformer stack.
--
-- > insert1 = hoist insert0
insert1 :: (Monad m, MonadTrans t, Monad (t m))
=> Producing o i m r
-> Producing o i (t m) r
insert1 = hoist insert0
-- | Insert a new layer two layers deep in the monad transformer stack.
-- This pattern can be repeated ad infinitum, but you really shouldn't
-- be dealing with too many layers at the same time.
--
-- > insert2 = hoist insert1
insert2 :: (Monad m, MonadTrans t, Monad (t m))
=> Producing o i (Producing o' i' m) r
-> Producing o i (Producing o' i' (t m)) r
insert2 = hoist insert1
-- | Producing layers can commute with each other.
--
-- > commute (commute x) โก x
-- > commute (lift $ yield x) โก yield x
-- > commute (yield x) โก lift $ yield x
-- > commute (return x) โก return x
-- > commute โ (f >=> g) โก commute โ f >=> commute โ g
commute :: (Monad m)
=> Producing a b (Producing c d m) r
-> Producing c d (Producing a b m) r
commute p = p' $- idP where
p' = insert2 p
idP = insert1 `overProduction` idProxy
idProxy = foreverYield yield
-- = Consuming $ fix ((lift . yield >=> yield) >=>)
-- Instances
----------------------------------------------------------------
-- a common pattern in implementing instances
rewrap :: (MonadTrans t, Monad m) =>
Consuming r m i o -> i -> t m (ProducerState o i m r)
rewrap p a = lift (resume (provide p a))
-- Producing instances
---------------------------------
instance (Monad m) => Functor (Producing o i m) where
fmap = liftM
instance (Monad m) => Applicative (Producing o i m) where
pure = return
(<*>) = ap
instance (Monad m) => Monad (Producing o i m) where
return x = lift (return x)
p >>= f = fromStep $ resume p >>= \s -> case s of
Done r -> resume (f r)
Produced o k -> return $ Produced o $ Consuming (provide k >=> f)
fail = lift . fail
instance MonadTrans (Producing o i) where
lift m = fromStep $ liftM Done m
instance (Monad m, MonadIO m) => MonadIO (Producing o i m) where
liftIO = lift . liftIO
{- ???
instance (MonadFix m) => MonadFix (Producing o i m) where
mfix = undefined
-}
-- mtl instances for Producing
-----------------------------------
instance (Monad m, MonadReader r m) => MonadReader r (Producing o i m) where
ask = lift ask
local f = hoist (local f)
reader = lift . reader
instance (Monad m, MonadState r m) => MonadState r (Producing o i m) where
get = lift get
put = lift . put
state = lift . state
instance (Monad m, Monoid w, MonadWriter w m) => MonadWriter w (Producing o i m) where
writer = lift . writer
tell = lift . tell
listen m = fromStep $ listen (resume m) >>= \(s, w) -> case s of
Done r -> return $ Done (r, w)
Produced o k ->
let k' = liftM (second (w <>)) . listen . provide k
in return $ Produced o (Consuming k')
-- not sure if this is legit
pass m = fromStep $ pass $ resume m >>= \s -> case s of
Done (r, f) -> return (Done r, f)
Produced o k ->
let k' = pass . provide k
in return (Produced o (Consuming k'), id)
instance (Monad m, MonadError e m) => MonadError e (Producing o i m) where
throwError = lift . throwError
p `catchError` h = lift (safely (resume p)) >>= \s -> case s of
Left err -> h err
Right (Done r) -> return r
Right (Produced o k) -> do
i <- yield o
provide k i `catchError` h
where
safely m = liftM Right m `catchError` \e -> return (Left e)
instance (Monad m, MonadCont m) => MonadCont (Producing o i m) where
callCC f = fromStep $ callCC $ \k ->
resume (f $ lift . k . Done)
-- Consuming instances
--------------------------------------
instance (Monad m) => Functor (Consuming r m a) where
fmap f = overProduction $ replaceYield (yield . f)
instance (Monad m) => Applicative (Consuming r m a) where
pure a = arr (const a)
kf <*> kx = Consuming $ \a -> rewrap kf a >>= \s -> case s of
Done r -> return r
Produced f kf' -> rewrap kx a >>= \s -> case s of
Done r -> return r
Produced x kx' ->
yield (f x) >>= provide (kf' <*> kx')
instance (Monad m) => Category (Consuming r m) where
id = Consuming $ let go = yield >=> go in go
k2 . k1 = Consuming $ rewrap k1 >=> \s -> case s of
Done r -> return r
Produced b k1' -> rewrap k2 b >>= \s2 -> case s2 of
Done r -> return r
Produced c k2' ->
yield c >>= provide (k2' . k1')
instance (Monad m) => Arrow (Consuming r m) where
arr f = Consuming go where go = yield . f >=> go
first k = Consuming $ \(b, d) -> rewrap k b >>= \s -> case s of
Done r -> return r
Produced c k' -> yield (c, d) >>= provide (first k')
instance (Monad m, Monoid r) => ArrowZero (Consuming r m) where
zeroArrow = Consuming $ \_ -> return mempty
instance (Monad m, Monoid r) => ArrowPlus (Consuming r m) where
k1 <+> k2 = Consuming $ \i -> rewrap k1 i >>= \s -> case s of
Done r -> liftM (r <>) (provide k2 i)
Produced o k1' -> yield o >>= provide (k1' <+> k2)
instance (Monad m) => ArrowChoice (Consuming r m) where
left k = Consuming go where
go = \e -> case e of
Right d -> yield (Right d) >>= go
Left b -> rewrap k b >>= \s -> case s of
Done r -> return r
Produced c k' -> yield (Left c) >>= provide (left k')
{-
-- left =?= leftApp
instance (Monad m) => ArrowApply (Consuming r m) where
app = Consuming go where
go = \(kf, b) -> rewrap kf b >>= \s -> case s of
Done r -> return r
Produced c _ -> yield c >>= go
-- ignoring k' makes me weary
-}
|
DanBurton/yield
|
Control/Yield.hs
|
bsd-3-clause
| 22,587
| 0
| 21
| 5,271
| 5,425
| 2,908
| 2,517
| -1
| -1
|
module Sublist (Sublist(..), sublist) where
import Data.List (isInfixOf)
data Sublist = Equal
| Sublist
| Superlist
| Unequal
deriving (Eq, Show)
sublist :: Eq a => [a] -> [a] -> Sublist
sublist xs ys
| xs == ys = Equal
| xs `isInfixOf` ys = Sublist
| ys `isInfixOf` xs = Superlist
| otherwise = Unequal
|
pminten/xhaskell
|
sublist/example.hs
|
mit
| 383
| 0
| 8
| 135
| 138
| 76
| 62
| 13
| 1
|
import Data.Char
inRange :: Int -> Int -> [Int] -> [Int]
inRange _ _ []
= []
inRange a b (x:xs)
= if x >= a && x<=b then x : inRange a b xs else inRange a b xs
countPositives :: [Int] -> Int
countPositives []
= 0
countPositives (x:xs)
= if x > 0 then 1 + countPositives xs else countPositives xs
capitalised :: String -> String
capitalised []
= []
capitalised [x]
= [toUpper x]
capitalised xs
= capitalised(init(xs)) ++ [toLower(last(xs))]
title :: [String] -> [String]
title []
= []
title (cs:[])
= [capitalised cs]
title ccs
= title(init(ccs)) ++ if length(last(ccs)) >= 4 then [capitalised(last(ccs))] else [last(ccs)]
--Insertion sort implementation
isort :: Ord a => [a] -> [a]
isort []
= []
isort [x]
= [x]
isort (x:xs)
= insertInt x (isort xs)
insertInt :: Ord a => a -> [a] -> [a]
insertInt n xs
= [x | x <- xs, x <= n] ++ [n] ++ [x | x <- xs, x > n]
-- Merge sort implementation
merge :: Ord a => [a] -> [a] -> [a]
merge xs []
= xs
merge [] ys
= ys
merge (x:xs) (y:ys)
| x <= y = x : merge xs (y:ys)
| otherwise = y : merge (x:xs) ys
mergeSort :: Ord a => [a] -> [a]
mergeSort [x]
= [x]
mergeSort xs
| even (length xs) = merge firstHalf lastHalf
| otherwise = merge firstHalf lastHalf'
where
half = (length xs) `div` 2
half' = 1 + half
firstHalf = mergeSort $ take half xs
lastHalf = mergeSort $ drop half xs
lastHalf' = mergeSort $ drop half' xs
-- Cipher
rotor :: Int -> String -> String
rotor n s
| n < 0 = "Offset must be bigger than 0"
| n >= length s = "Offset must be less than the length of the string"
| otherwise = drop n s ++ take n s
makeKey :: Int -> [(Char, Char)]
makeKey n = zip ['A'..'Z'] (rotor n ['A'..'Z'])
lookUp :: Char -> [(Char, Char)] -> Char
lookUp c [] = c
lookUp c (x:xs)
|ย c == fst x = snd x
| otherwise = lookUp c xs
encipher :: Int -> Char -> Char
encipher n c = lookUp c (makeKey n)
normalise :: String -> String
normalise [] = []
normalise cs = filter (\c -> isLetter c || isDigit c) (map toUpper cs)
encipherStr :: Int -> String -> String
encipherStr n cs = map f (normalise cs)
where
f c = lookUp c key
key = makeKey n
|
martrik/COMP101
|
Term1/COMP101/Lab2/LabSheet2.hs
|
mit
| 2,192
| 0
| 11
| 561
| 1,151
| 591
| 560
| 75
| 2
|
-- | 'StateLockMetrics' for txp.
module Pos.DB.Txp.MemState.Metrics
( recordTxpMetrics
) where
import Universum
import Data.Aeson.Types (ToJSON (..))
import Formatting (sformat, shown, (%))
import qualified System.Metrics as Metrics
import qualified System.Metrics.Gauge as Metrics.Gauge
import Pos.Chain.Txp (MemPool (_mpSize))
import Pos.Core.JsonLog.LogEvents (JLEvent (..), JLMemPool (..),
MemPoolModifyReason (..))
import Pos.Core.Metrics.Constants (withCardanoNamespace)
import Pos.DB.GState.Lock (StateLockMetrics (..))
import Pos.Util.Wlog (logDebug)
-- | 'StateLockMetrics' to record txp MemPool metrics.
recordTxpMetrics :: Metrics.Store -> TVar MemPool -> IO (StateLockMetrics MemPoolModifyReason)
recordTxpMetrics ekgStore memPoolVar = do
ekgMemPoolSize <-
Metrics.createGauge (withCardanoNamespace "MemPoolSize") ekgStore
ekgMemPoolWaitTimeApplyBlock <-
Metrics.createGauge (withCardanoNamespace "MemPoolWaitTimeApplyBlock_microseconds") ekgStore
ekgMemPoolModifyTimeApplyBlock <-
Metrics.createGauge (withCardanoNamespace "MemPoolModifyTimeApplyBlock_microseconds") ekgStore
ekgMemPoolWaitTimeApplyBlockWithRollback <-
Metrics.createGauge (withCardanoNamespace "MemPoolWaitTimeApplyBlockWithRollback_microseconds") ekgStore
ekgMemPoolModifyTimeApplyBlockWithRollback <-
Metrics.createGauge (withCardanoNamespace "MemPoolModifyTimeApplyBlockWithRollback_microseconds") ekgStore
ekgMemPoolWaitTimeProcessTx <-
Metrics.createGauge (withCardanoNamespace "MemPoolWaitTimeProcessTx_microseconds") ekgStore
ekgMemPoolModifyTimeProcessTx <-
Metrics.createGauge (withCardanoNamespace "MemPoolModifyTimeProcessTx_microseconds") ekgStore
ekgMemPoolQueueLength <-
Metrics.createGauge (withCardanoNamespace "MemPoolQueueLength") ekgStore
-- An exponential moving average is used for the time gauges (wait
-- and modify durations). The parameter alpha is chosen somewhat
-- arbitrarily.
-- FIXME take alpha from configuration/CLI, or use a better
-- estimator.
let alpha :: Double
alpha = 0.75
-- This TxpMetrics specifies what to do when waiting on the
-- mempool lock, when the mempool lock has been granted, and
-- when that lock has been released. It updates EKG metrics
-- and also logs each data point at debug level.
pure StateLockMetrics
{ slmWait = \reason -> do
liftIO $ Metrics.Gauge.inc ekgMemPoolQueueLength
qlen <- liftIO $ Metrics.Gauge.read ekgMemPoolQueueLength
logDebug $ sformat ("MemPool metrics wait: "%shown%" queue length is "%shown) reason qlen
, slmAcquire = \reason timeWaited -> do
liftIO $ Metrics.Gauge.dec ekgMemPoolQueueLength
let ekgMemPoolWaitTime = case reason of
ApplyBlock -> ekgMemPoolWaitTimeApplyBlock
ApplyBlockWithRollback -> ekgMemPoolWaitTimeApplyBlockWithRollback
ProcessTransaction -> ekgMemPoolWaitTimeProcessTx
timeWaited' <- liftIO $ Metrics.Gauge.read ekgMemPoolWaitTime
-- Assume a 0-value estimate means we haven't taken
-- any samples yet.
let new_ = if timeWaited' == 0
then fromIntegral timeWaited
else round $ alpha * fromIntegral timeWaited + (1 - alpha) * fromIntegral timeWaited'
liftIO $ Metrics.Gauge.set ekgMemPoolWaitTime new_
logDebug $ sformat ("MemPool metrics acquire: "%shown
%" wait time was "%shown) reason timeWaited
, slmRelease = \reason timeWaited timeElapsed memAllocated -> do
qlen <- liftIO $ Metrics.Gauge.read ekgMemPoolQueueLength
oldMemPoolSize <- liftIO $ Metrics.Gauge.read ekgMemPoolSize
newMemPoolSize <- _mpSize <$> readTVarIO memPoolVar
liftIO $ Metrics.Gauge.set ekgMemPoolSize (fromIntegral newMemPoolSize)
let ekgMemPoolModifyTime = case reason of
ApplyBlock -> ekgMemPoolModifyTimeApplyBlock
ApplyBlockWithRollback -> ekgMemPoolModifyTimeApplyBlockWithRollback
ProcessTransaction -> ekgMemPoolModifyTimeProcessTx
timeElapsed' <- liftIO $ Metrics.Gauge.read ekgMemPoolModifyTime
let new_ = if timeElapsed' == 0
then fromIntegral timeElapsed
else round $ alpha * fromIntegral timeElapsed + (1 - alpha) * fromIntegral timeElapsed'
liftIO $ Metrics.Gauge.set ekgMemPoolModifyTime new_
logDebug $ sformat ("MemPool metrics release: "%shown
%" modify time was "%shown%" size is "%shown)
reason timeElapsed newMemPoolSize
pure . toJSON . JLMemPoolEvent $ JLMemPool
reason
(fromIntegral timeWaited)
(fromIntegral qlen)
(fromIntegral timeElapsed)
(fromIntegral oldMemPoolSize)
(fromIntegral newMemPoolSize)
(fromIntegral memAllocated)
}
|
input-output-hk/pos-haskell-prototype
|
db/src/Pos/DB/Txp/MemState/Metrics.hs
|
mit
| 5,389
| 0
| 20
| 1,523
| 931
| 485
| 446
| 76
| 7
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.Route53Domains.DisableDomainAutoRenew
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- This operation disables automatic renewal of domain registration for the
-- specified domain.
--
-- Caution! Amazon Route 53 doesn\'t have a manual renewal process, so if
-- you disable automatic renewal, registration for the domain will not be
-- renewed when the expiration date passes, and you will lose control of
-- the domain name.
--
-- /See:/ <http://docs.aws.amazon.com/Route53/latest/APIReference/api-DisableDomainAutoRenew.html AWS API Reference> for DisableDomainAutoRenew.
module Network.AWS.Route53Domains.DisableDomainAutoRenew
(
-- * Creating a Request
disableDomainAutoRenew
, DisableDomainAutoRenew
-- * Request Lenses
, ddarDomainName
-- * Destructuring the Response
, disableDomainAutoRenewResponse
, DisableDomainAutoRenewResponse
-- * Response Lenses
, ddarrsResponseStatus
) where
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
import Network.AWS.Route53Domains.Types
import Network.AWS.Route53Domains.Types.Product
-- | /See:/ 'disableDomainAutoRenew' smart constructor.
newtype DisableDomainAutoRenew = DisableDomainAutoRenew'
{ _ddarDomainName :: Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DisableDomainAutoRenew' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ddarDomainName'
disableDomainAutoRenew
:: Text -- ^ 'ddarDomainName'
-> DisableDomainAutoRenew
disableDomainAutoRenew pDomainName_ =
DisableDomainAutoRenew'
{ _ddarDomainName = pDomainName_
}
-- | Undocumented member.
ddarDomainName :: Lens' DisableDomainAutoRenew Text
ddarDomainName = lens _ddarDomainName (\ s a -> s{_ddarDomainName = a});
instance AWSRequest DisableDomainAutoRenew where
type Rs DisableDomainAutoRenew =
DisableDomainAutoRenewResponse
request = postJSON route53Domains
response
= receiveEmpty
(\ s h x ->
DisableDomainAutoRenewResponse' <$>
(pure (fromEnum s)))
instance ToHeaders DisableDomainAutoRenew where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("Route53Domains_v20140515.DisableDomainAutoRenew" ::
ByteString),
"Content-Type" =#
("application/x-amz-json-1.1" :: ByteString)])
instance ToJSON DisableDomainAutoRenew where
toJSON DisableDomainAutoRenew'{..}
= object
(catMaybes [Just ("DomainName" .= _ddarDomainName)])
instance ToPath DisableDomainAutoRenew where
toPath = const "/"
instance ToQuery DisableDomainAutoRenew where
toQuery = const mempty
-- | /See:/ 'disableDomainAutoRenewResponse' smart constructor.
newtype DisableDomainAutoRenewResponse = DisableDomainAutoRenewResponse'
{ _ddarrsResponseStatus :: Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DisableDomainAutoRenewResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ddarrsResponseStatus'
disableDomainAutoRenewResponse
:: Int -- ^ 'ddarrsResponseStatus'
-> DisableDomainAutoRenewResponse
disableDomainAutoRenewResponse pResponseStatus_ =
DisableDomainAutoRenewResponse'
{ _ddarrsResponseStatus = pResponseStatus_
}
-- | The response status code.
ddarrsResponseStatus :: Lens' DisableDomainAutoRenewResponse Int
ddarrsResponseStatus = lens _ddarrsResponseStatus (\ s a -> s{_ddarrsResponseStatus = a});
|
fmapfmapfmap/amazonka
|
amazonka-route53-domains/gen/Network/AWS/Route53Domains/DisableDomainAutoRenew.hs
|
mpl-2.0
| 4,406
| 0
| 13
| 919
| 500
| 303
| 197
| 69
| 1
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
-- | Extract loop-invariant "complex" expressions from comprehensions
module Database.DSH.CL.Opt.LoopInvariant
( loopInvariantR
) where
import Data.List
import Data.Maybe
import Database.DSH.CL.Kure
import Database.DSH.CL.Lang
import Database.DSH.CL.Opt.Auxiliary
import qualified Database.DSH.CL.Primitives as P
import Database.DSH.Common.Impossible
import Database.DSH.Common.Kure
import Database.DSH.Common.Lang
-- | Extract complex loop-invariant expressions from comprehension
-- heads and guards.
loopInvariantR :: RewriteC CL
loopInvariantR = logR "loopinvariant.guard" loopInvariantGuardR
<+ logR "loopinvariant.head" loopInvariantHeadR
--------------------------------------------------------------------------------
-- Common code for searching loop-invariant expressions
traverseT :: [Ident] -> TransformC CL (Expr, PathC)
traverseT localVars = readerT $ \expr -> case expr of
-- We do not traverse into comprehensions which are nested in our current
-- comprehension.
ExprCL Comp{} -> fail "we don't traverse into comprehensions"
-- Search in let-bindings. We need to check whether the binding generates
-- transitive dependencies on generator variables.
ExprCL (Let _ x e1 _) -> let localVars' = if not $ null $ freeVars e1 `intersect` localVars
then localVars ++ [x]
else localVars
in childT LetBind (searchInvariantExprT localVars)
<+
childT LetBody (searchInvariantExprT localVars')
ExprCL _ -> oneT $ searchInvariantExprT localVars
_ -> fail "we only consider expressions"
-- | Collect a path to a complex expression
complexPathT :: [Ident] -> TransformC CL (Expr, PathC)
complexPathT localVars = do
ExprCL e <- idR
path <- snocPathToPath <$> absPathT
-- We are only interested in constant expressions that do not
-- depend on variables bound by generators in the enclosing
-- comprehension.
-- debugMsg $ "free: " ++ pp (freeVars e)
guardM $ null $ freeVars e `intersect` localVars
-- FIXME more precise heuristics could be employed: A
-- comprehension is only "complex" if it has more than one
-- generator OR a filter OR something complex in the head.
case e of
Comp{} -> return (e, path)
If{} -> return (e, path)
AppE2 _ op _ _ | complexPrim2 op -> return (e, path)
AppE1 _ op _ | complexPrim1 op -> return (e, path)
_ -> fail "not a complex expression"
-- | Traverse expressions top-down, searching for loop-invariant
-- complex expressions.
searchInvariantExprT :: [Ident] -> TransformC CL (Expr, PathC)
searchInvariantExprT localVars = complexPathT localVars <+ promoteT (traverseT localVars)
invariantQualR :: [Ident] -> TransformC CL (Expr, PathC)
invariantQualR localVars = readerT $ \expr -> case expr of
QualsCL (BindQ{} :* _) -> childT QualsTail (invariantQualR localVars)
QualsCL (GuardQ _ :* _) -> childT QualsHead (searchInvariantExprT localVars)
<+
childT QualsTail (invariantQualR localVars)
QualsCL (S (GuardQ _)) -> pathT [QualsSingleton, GuardQualExpr] (searchInvariantExprT localVars)
QualsCL (S BindQ{}) -> fail "no match"
_ -> $impossible
--------------------------------------------------------------------------------
-- Search and replace loop-invariant expressions
-- | 'pullCompInvariantR e p ns' replaces expression 'e' in a comprehension at
-- local path 'p' by a variable that is bound by a let-expression.
pullCompInvariantR :: Expr -> PathC -> [Ident] -> RewriteC CL
pullCompInvariantR invExpr invPath avoidNames = do
letName <- freshNameT avoidNames
localPath <- localizePathT invPath
let invVar = Var (typeOf invExpr) letName
ExprCL comp' <- pathR localPath (constT $ return $ inject invVar)
return $ inject $ P.let_ letName invExpr comp'
loopInvariantGuardR :: RewriteC CL
loopInvariantGuardR = do
c@(Comp _ _ qs) <- promoteT idR
-- FIXME passing *all* generator variables in the current
-- comprehension is too conservative. It would be sufficient to
-- consider those preceding the guard that is under investigation.
let genVars = compBoundVars qs
(invExpr, invPath) <- childT CompQuals (invariantQualR genVars)
pullCompInvariantR invExpr invPath (genVars ++ boundVars c)
loopInvariantHeadR :: RewriteC CL
loopInvariantHeadR = do
Comp _ h qs <- promoteT idR
let genVars = fmap fst $ catMaybes $ fromGen <$> toList qs
(invExpr, invPath) <- childT CompHead (searchInvariantExprT genVars)
pullCompInvariantR invExpr invPath (genVars ++ boundVars h)
|
ulricha/dsh
|
src/Database/DSH/CL/Opt/LoopInvariant.hs
|
bsd-3-clause
| 5,180
| 0
| 17
| 1,357
| 1,050
| 541
| 509
| 70
| 5
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE GADTs #-}
module Language.Embedded.Hardware.Command
(
-- Regular hardware compilers.
compile
, icompile
, runIO
-- hardware compilers without process wrapping.
, compileSig
, icompileSig
-- AXI compilers.
, compileAXILite
, icompileAXILite
--
, VHDL.Mode(..)
--
, module CMD
, module Language.Embedded.Hardware.Command.CMD
, module Language.Embedded.Hardware.Command.Frontend
, module Language.Embedded.Hardware.Command.Backend.VHDL
) where
import Language.Embedded.Hardware.Command.CMD as CMD (Signal, Variable, Array)
import Language.Embedded.Hardware.Command.CMD hiding (Signal, Variable, Array)
import Language.Embedded.Hardware.Command.Frontend
import Language.Embedded.Hardware.Command.Backend.VHDL
import Language.Embedded.Hardware.Interface
import Language.Embedded.Hardware.Interface.AXI
import Language.Embedded.VHDL (VHDL, prettyVHDL)
import qualified Language.VHDL as VHDL -- temp
import qualified Language.Embedded.VHDL as VHDL -- temp
import Control.Monad.Operational.Higher
import Control.Monad.Identity
import qualified GHC.Exts as GHC (Constraint)
--------------------------------------------------------------------------------
-- * Compilation and evaluation.
--------------------------------------------------------------------------------
-- | Compile a program to VHDL code represented as a string.
compile :: forall instr (exp :: * -> *) (pred :: * -> GHC.Constraint) a.
( Interp instr VHDLGen (Param2 exp pred)
, HFunctor instr
, ProcessCMD :<: instr
, VHDLCMD :<: instr
, pred Bool
)
=> Program instr (Param2 exp pred) ()
-> String
compile = show
. VHDL.prettyVHDL
. VHDL.wrapMain
. flip runVHDLGen emptyEnv
. interpret
. process []
-- | Compile a program to VHDL code and print it on the screen.
icompile :: forall instr (exp :: * -> *) (pred :: * -> GHC.Constraint) a.
( Interp instr VHDLGen (Param2 exp pred)
, HFunctor instr
, ProcessCMD :<: instr
, VHDLCMD :<: instr
, pred Bool
)
=> Program instr (Param2 exp pred) ()
-> IO ()
icompile = putStrLn . compile
-- | Run a program in 'IO'.
runIO :: forall instr (exp :: * -> *) (pred :: * -> GHC.Constraint) a
. ( InterpBi instr IO (Param1 pred)
, HBifunctor instr
, EvaluateExp exp
)
=> Program instr (Param2 exp pred) a
-> IO a
runIO = interpretBi (return . evalE)
--------------------------------------------------------------------------------
compileSig :: forall instr (exp :: * -> *) (pred :: * -> GHC.Constraint) a .
( Interp instr VHDLGen (Param2 exp pred)
, HFunctor instr
, ComponentCMD :<: instr
)
=> Sig instr exp pred Identity a
-> String
compileSig =
show
. VHDL.prettyVHDL
. flip runVHDLGen emptyEnv
. interpret
. component
icompileSig :: forall instr (exp :: * -> *) (pred :: * -> GHC.Constraint) a .
( Interp instr VHDLGen (Param2 exp pred)
, HFunctor instr
, ComponentCMD :<: instr
)
=> Sig instr exp pred Identity a
-> IO ()
icompileSig = putStrLn . compileSig
--------------------------------------------------------------------------------
-- Some extra compilers that might be handy to have.
compileAXILite :: forall instr (exp :: * -> *) (pred :: * -> GHC.Constraint) a .
( Interp instr VHDLGen (Param2 exp pred)
, HFunctor instr
, AXIPred instr exp pred
)
=> Sig instr exp pred Identity a
-> String
compileAXILite sig =
show
. VHDL.prettyVHDL
. flip runVHDLGen emptyEnv
. interpret
$ do comp <- component sig
clockedComponent "AXI" "S_AXI_ACLK" "S_AXI_ARESETN" (axi_light comp)
icompileAXILite :: forall instr (exp :: * -> *) (pred :: * -> GHC.Constraint) a .
( Interp instr VHDLGen (Param2 exp pred)
, HFunctor instr
, AXIPred instr exp pred
)
=> Sig instr exp pred Identity a
-> IO ()
icompileAXILite = putStrLn . compileAXILite
--------------------------------------------------------------------------------
|
markus-git/imperative-edsl-vhdl
|
src/Language/Embedded/Hardware/Command.hs
|
bsd-3-clause
| 4,188
| 0
| 10
| 838
| 1,050
| 609
| 441
| -1
| -1
|
module Series (slices) where
import Data.Char (digitToInt)
import Data.List (tails)
slices :: Int -> String -> [[Int]]
slices n s = map (take n) . take (length s - n + 1) . tails $ numberSeries
where numberSeries = map digitToInt s
|
pminten/xhaskell
|
series/example.hs
|
mit
| 237
| 0
| 12
| 47
| 108
| 58
| 50
| 6
| 1
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -funbox-strict-fields #-}
{-# OPTIONS_GHC -fno-warn-orphans #-} -- this file adds missing instances for GTK stuff
{-# OPTIONS_HADDOCK show-extensions #-}
-- |
-- Module : Yi.Frontend.Pango
-- License : GPL-2
-- Maintainer : yi-devel@googlegroups.com
-- Stability : experimental
-- Portability : portable
--
-- This module defines a user interface implemented using gtk2hs and
-- pango for direct text rendering.
module Yi.Frontend.Pango (start, startGtkHook) where
import Control.Applicative
import Control.Concurrent
import Control.Exception (catch, SomeException)
import Lens.Micro.Platform hiding (set)
import Control.Monad hiding (forM_, mapM_, forM, mapM)
import Data.Foldable
import Data.IORef
import qualified Data.List.PointedList as PL (moveTo)
import qualified Data.List.PointedList.Circular as PL
import qualified Data.Map as M
import Data.Maybe
import Data.Monoid
import Data.Text (unpack, Text)
import qualified Data.Text as T
import Data.Traversable
import qualified Graphics.UI.Gtk as Gtk
import Graphics.UI.Gtk hiding (Region, Window, Action , Point,
Style, Modifier, on)
import qualified Graphics.UI.Gtk.Gdk.EventM as EventM
import qualified Graphics.UI.Gtk.Gdk.GC as Gtk
import Graphics.UI.Gtk.Gdk.GC hiding (foreground)
import Prelude hiding (error, elem, mapM_, foldl, concat, mapM)
import System.Glib.GError
import Yi.Buffer
import Yi.Config
import Yi.Debug
import Yi.Editor
import Yi.Event
import Yi.Keymap
import Yi.Layout(DividerPosition, DividerRef)
import Yi.Monad
import qualified Yi.Rope as R
import Yi.Style
import Yi.Tab
import Yi.Types (fontsizeVariation, attributes)
import qualified Yi.UI.Common as Common
import Yi.Frontend.Pango.Control (keyTable)
import Yi.Frontend.Pango.Layouts
import Yi.Frontend.Pango.Utils
import Yi.String (showT)
import Yi.UI.TabBar
import Yi.UI.Utils
import Yi.Utils
import Yi.Window
-- We use IORefs in all of these datatypes for all fields which could
-- possibly change over time. This ensures that no 'UI', 'TabInfo',
-- 'WinInfo' will ever go out of date.
data UI = UI
{ uiWindow :: Gtk.Window
, uiNotebook :: SimpleNotebook
, uiStatusbar :: Statusbar
, tabCache :: IORef TabCache
, uiActionCh :: Action -> IO ()
, uiConfig :: UIConfig
, uiFont :: IORef FontDescription
, uiInput :: IMContext
}
type TabCache = PL.PointedList TabInfo
-- We don't need to know the order of the windows (the layout manages
-- that) so we might as well use a map
type WindowCache = M.Map WindowRef WinInfo
data TabInfo = TabInfo
{ coreTabKey :: TabRef
, layoutDisplay :: LayoutDisplay
, miniwindowPage :: MiniwindowDisplay
, tabWidget :: Widget
, windowCache :: IORef WindowCache
, fullTitle :: IORef Text
, abbrevTitle :: IORef Text
}
instance Show TabInfo where
show t = show (coreTabKey t)
data WinInfo = WinInfo
{ coreWinKey :: WindowRef
, coreWin :: IORef Window
, shownTos :: IORef Point
, lButtonPressed :: IORef Bool
, insertingMode :: IORef Bool
, inFocus :: IORef Bool
, winLayoutInfo :: MVar WinLayoutInfo
, winMetrics :: FontMetrics
, textview :: DrawingArea
, modeline :: Label
, winWidget :: Widget -- ^ Top-level widget for this window.
}
data WinLayoutInfo = WinLayoutInfo {
winLayout :: !PangoLayout,
tos :: !Point,
bos :: !Point,
bufEnd :: !Point,
cur :: !Point,
buffer :: !FBuffer,
regex :: !(Maybe SearchExp)
}
instance Show WinInfo where
show w = show (coreWinKey w)
instance Ord EventM.Modifier where
x <= y = fromEnum x <= fromEnum y
mkUI :: UI -> Common.UI Editor
mkUI ui = Common.dummyUI
{ Common.main = main
, Common.end = const end
, Common.suspend = windowIconify (uiWindow ui)
, Common.refresh = refresh ui
, Common.layout = doLayout ui
, Common.reloadProject = const reloadProject
}
updateFont :: UIConfig -> IORef FontDescription -> IORef TabCache -> Statusbar
-> FontDescription -> IO ()
updateFont cfg fontRef tc status font = do
maybe (return ()) (fontDescriptionSetFamily font) (configFontName cfg)
writeIORef fontRef font
widgetModifyFont status (Just font)
tcs <- readIORef tc
forM_ tcs $ \tabinfo -> do
wcs <- readIORef (windowCache tabinfo)
forM_ wcs $ \wininfo -> do
withMVar (winLayoutInfo wininfo) $ \WinLayoutInfo{winLayout} ->
layoutSetFontDescription winLayout (Just font)
-- This will cause the textview to redraw
widgetModifyFont (textview wininfo) (Just font)
widgetModifyFont (modeline wininfo) (Just font)
askBuffer :: Window -> FBuffer -> BufferM a -> a
askBuffer w b f = fst $ runBuffer w b f
-- | Initialise the ui
start :: UIBoot
start = startGtkHook (const $ return ())
-- | Initialise the ui, calling a given function
-- on the Gtk window. This could be used to
-- set additional callbacks, adjusting the window
-- layout, etc.
startGtkHook :: (Gtk.Window -> IO ()) -> UIBoot
startGtkHook userHook cfg ch outCh ed =
catch (startNoMsgGtkHook userHook cfg ch outCh ed)
(\(GError _dom _code msg) -> fail $ unpack msg)
startNoMsgGtkHook :: (Gtk.Window -> IO ()) -> UIBoot
startNoMsgGtkHook userHook cfg ch outCh ed = do
logPutStrLn "startNoMsgGtkHook"
void unsafeInitGUIForThreadedRTS
win <- windowNew
ico <- loadIcon "yi+lambda-fat-32.png"
vb <- vBoxNew False 1 -- Top-level vbox
im <- imMulticontextNew
imContextSetUsePreedit im False -- handler for preedit string not implemented
-- Yi.Buffer.Misc.insertN for atomic input?
let imContextCommitS :: Signal IMContext (String -> IO ())
imContextCommitS = imContextCommit
im `on` imContextCommitS $ mapM_ (\k -> ch [Event (KASCII k) []])
set win [ windowDefaultWidth := 700
, windowDefaultHeight := 900
, windowTitle := ("Yi" :: T.Text)
, windowIcon := Just ico
, containerChild := vb
]
win `on` deleteEvent $ io $ mainQuit >> return True
win `on` keyPressEvent $ handleKeypress ch im
paned <- hPanedNew
tabs <- simpleNotebookNew
panedAdd2 paned (baseWidget tabs)
status <- statusbarNew
-- Allow multiple lines in statusbar, GitHub issue #478
statusbarGetMessageArea status >>= containerGetChildren >>= \case
[w] -> labelSetSingleLineMode (castToLabel w) False
_ -> return ()
-- statusbarGetContextId status "global"
set vb [ containerChild := paned
, containerChild := status
, boxChildPacking status := PackNatural
]
fontRef <- fontDescriptionNew >>= newIORef
let actionCh = outCh . return
tc <- newIORef =<< newCache ed actionCh
let watchFont = (fontDescriptionFromString ("Monospace 10" :: T.Text) >>=)
watchFont $ updateFont (configUI cfg) fontRef tc status
-- I think this is the correct place to put it...
userHook win
-- use our magic threads thingy
-- http://haskell.org/gtk2hs/archives/2005/07/24/writing-multi-threaded-guis/
void $ timeoutAddFull (yield >> return True) priorityDefaultIdle 50
widgetShowAll win
let ui = UI win tabs status tc actionCh (configUI cfg) fontRef im
-- Keep the current tab focus up to date
let move n pl = fromMaybe pl (PL.moveTo n pl)
runAction = uiActionCh ui . makeAction
-- why does this cause a hang without postGUIAsync?
simpleNotebookOnSwitchPage (uiNotebook ui) $ \n -> postGUIAsync $
runAction ((%=) tabsA (move n) :: EditorM ())
return (mkUI ui)
main :: IO ()
main = logPutStrLn "GTK main loop running" >> mainGUI
-- | Clean up and go home
end :: IO ()
end = mainQuit
-- | Modify GUI and the 'TabCache' to reflect information in 'Editor'.
updateCache :: UI -> Editor -> IO ()
updateCache ui e = do
cache <- readIORef $ tabCache ui
-- convert to a map for convenient lookups
let cacheMap = mapFromFoldable . fmap (\t -> (coreTabKey t, t)) $ cache
-- build the new cache
cache' <- forM (e ^. tabsA) $ \tab ->
case M.lookup (tkey tab) cacheMap of
Just t -> updateTabInfo e ui tab t >> return t
Nothing -> newTab e ui tab
-- store the new cache
writeIORef (tabCache ui) cache'
-- update the GUI
simpleNotebookSet (uiNotebook ui)
=<< forM cache' (\t -> (tabWidget t,) <$> readIORef (abbrevTitle t))
-- | Modify GUI and given 'TabInfo' to reflect information in 'Tab'.
updateTabInfo :: Editor -> UI -> Tab -> TabInfo -> IO ()
updateTabInfo e ui tab tabInfo = do
-- update the window cache
wCacheOld <- readIORef (windowCache tabInfo)
wCacheNew <- mapFromFoldable <$> forM (tab ^. tabWindowsA) (\w ->
case M.lookup (wkey w) wCacheOld of
Just wInfo -> updateWindow e ui w wInfo >> return (wkey w, wInfo)
Nothing -> (wkey w,) <$> newWindow e ui w)
writeIORef (windowCache tabInfo) wCacheNew
-- TODO update renderer, etc?
let lookupWin w = wCacheNew M.! w
-- set layout
layoutDisplaySet (layoutDisplay tabInfo)
. fmap (winWidget . lookupWin) . tabLayout $ tab
-- set minibox
miniwindowDisplaySet (miniwindowPage tabInfo)
. fmap (winWidget . lookupWin . wkey) . tabMiniWindows $ tab
-- set focus
setWindowFocus e ui tabInfo . lookupWin . wkey . tabFocus $ tab
updateWindow :: Editor -> UI -> Window -> WinInfo -> IO ()
updateWindow e _ui win wInfo = do
writeIORef (inFocus wInfo) False -- see also 'setWindowFocus'
writeIORef (coreWin wInfo) win
writeIORef (insertingMode wInfo)
(askBuffer win (findBufferWith (bufkey win) e) $ use insertingA)
setWindowFocus :: Editor -> UI -> TabInfo -> WinInfo -> IO ()
setWindowFocus e ui t w = do
win <- readIORef (coreWin w)
let bufferName = shortIdentString (length $ commonNamePrefix e) $
findBufferWith (bufkey win) e
ml = askBuffer win (findBufferWith (bufkey win) e) $
getModeLine (T.pack <$> commonNamePrefix e)
im = uiInput ui
writeIORef (inFocus w) True -- see also 'updateWindow'
update (textview w) widgetIsFocus True
update (modeline w) labelText ml
writeIORef (fullTitle t) bufferName
writeIORef (abbrevTitle t) (tabAbbrevTitle bufferName)
drawW <- catch (fmap Just $ widgetGetDrawWindow $ textview w)
(\(_ :: SomeException) -> return Nothing)
imContextSetClientWindow im drawW
imContextFocusIn im
getWinInfo :: UI -> WindowRef -> IO WinInfo
getWinInfo ui ref =
let tabLoop [] = error "Yi.UI.Pango.getWinInfo: window not found"
tabLoop (t:ts) = do
wCache <- readIORef (windowCache t)
case M.lookup ref wCache of
Just w -> return w
Nothing -> tabLoop ts
in readIORef (tabCache ui) >>= (tabLoop . toList)
-- | Make the cache from the editor and the action channel
newCache :: Editor -> (Action -> IO ()) -> IO TabCache
newCache e actionCh = mapM (mkDummyTab actionCh) (e ^. tabsA)
-- | Make a new tab, and populate it
newTab :: Editor -> UI -> Tab -> IO TabInfo
newTab e ui tab = do
t <- mkDummyTab (uiActionCh ui) tab
updateTabInfo e ui tab t
return t
-- | Make a minimal new tab, without any windows.
-- This is just for bootstrapping the UI; 'newTab' should normally
-- be called instead.
mkDummyTab :: (Action -> IO ()) -> Tab -> IO TabInfo
mkDummyTab actionCh tab = do
ws <- newIORef M.empty
ld <- layoutDisplayNew
layoutDisplayOnDividerMove ld (handleDividerMove actionCh)
mwp <- miniwindowDisplayNew
tw <- vBoxNew False 0
set tw [containerChild := baseWidget ld,
containerChild := baseWidget mwp,
boxChildPacking (baseWidget ld) := PackGrow,
boxChildPacking (baseWidget mwp) := PackNatural]
ftRef <- newIORef ""
atRef <- newIORef ""
return (TabInfo (tkey tab) ld mwp (toWidget tw) ws ftRef atRef)
-- | Make a new window.
newWindow :: Editor -> UI -> Window -> IO WinInfo
newWindow e ui w = do
let b = findBufferWith (bufkey w) e
f <- readIORef (uiFont ui)
ml <- labelNew (Nothing :: Maybe Text)
widgetModifyFont ml (Just f)
set ml [ miscXalign := 0.01 ] -- so the text is left-justified.
-- allow the modeline to be covered up, horizontally
widgetSetSizeRequest ml 0 (-1)
v <- drawingAreaNew
widgetModifyFont v (Just f)
widgetAddEvents v [Button1MotionMask]
widgetModifyBg v StateNormal . mkCol False . Yi.Style.background
. baseAttributes . configStyle $ uiConfig ui
sw <- scrolledWindowNew Nothing Nothing
scrolledWindowAddWithViewport sw v
scrolledWindowSetPolicy sw PolicyAutomatic PolicyNever
box <- if isMini w
then do
prompt <- labelNew (Just $ miniIdentString b)
widgetModifyFont prompt (Just f)
hb <- hBoxNew False 1
set hb [ containerChild := prompt,
containerChild := sw,
boxChildPacking prompt := PackNatural,
boxChildPacking sw := PackGrow]
return (castToBox hb)
else do
vb <- vBoxNew False 1
set vb [ containerChild := sw,
containerChild := ml,
boxChildPacking ml := PackNatural]
return (castToBox vb)
tosRef <- newIORef (askBuffer w b (use . markPointA
=<< fromMark <$> askMarks))
context <- widgetCreatePangoContext v
layout <- layoutEmpty context
layoutRef <- newMVar (WinLayoutInfo layout 0 0 0 0
(findBufferWith (bufkey w) e) Nothing)
language <- contextGetLanguage context
metrics <- contextGetMetrics context f language
ifLButton <- newIORef False
imode <- newIORef False
focused <- newIORef False
winRef <- newIORef w
layoutSetFontDescription layout (Just f)
-- stops layoutGetText crashing (as of gtk2hs 0.10.1)
layoutSetText layout T.empty
let ref = wkey w
win = WinInfo { coreWinKey = ref
, coreWin = winRef
, winLayoutInfo = layoutRef
, winMetrics = metrics
, textview = v
, modeline = ml
, winWidget = toWidget box
, shownTos = tosRef
, lButtonPressed = ifLButton
, insertingMode = imode
, inFocus = focused
}
updateWindow e ui w win
v `on` buttonPressEvent $ handleButtonClick ui ref
v `on` buttonReleaseEvent $ handleButtonRelease ui win
v `on` scrollEvent $ handleScroll ui win
-- todo: allocate event rather than configure?
v `on` configureEvent $ handleConfigure ui
v `on` motionNotifyEvent $ handleMove ui win
void $ v `onExpose` render ui win
-- also redraw when the window receives/loses focus
uiWindow ui `on` focusInEvent $ io (widgetQueueDraw v) >> return False
uiWindow ui `on` focusOutEvent $ io (widgetQueueDraw v) >> return False
-- todo: consider adding an 'isDirty' flag to WinLayoutInfo,
-- so that we don't have to recompute the Attributes when focus changes.
return win
refresh :: UI -> Editor -> IO ()
refresh ui e = do
postGUIAsync $ do
contextId <- statusbarGetContextId (uiStatusbar ui) ("global" :: T.Text)
statusbarPop (uiStatusbar ui) contextId
void $ statusbarPush (uiStatusbar ui) contextId $ T.intercalate " " $
statusLine e
updateCache ui e -- The cursor may have changed since doLayout
cache <- readIORef $ tabCache ui
forM_ cache $ \t -> do
wCache <- readIORef (windowCache t)
forM_ wCache $ \w -> do
updateWinInfoForRendering e ui w
widgetQueueDraw (textview w)
-- | Record all the information we need for rendering.
--
-- This information is kept in an MVar so that the PangoLayout and
-- tos/bos/buffer are in sync.
updateWinInfoForRendering :: Editor -> UI -> WinInfo -> IO ()
updateWinInfoForRendering e _ui w = modifyMVar_ (winLayoutInfo w) $ \wli -> do
win <- readIORef (coreWin w)
return $! wli{buffer=findBufferWith (bufkey win) e,regex=currentRegex e}
-- | Tell the 'PangoLayout' what colours to draw, and draw the 'PangoLayout'
-- and the cursor onto the screen
render :: UI -> WinInfo -> t -> IO Bool
render ui w _event =
withMVar (winLayoutInfo w) $
\WinLayoutInfo{winLayout=layout,tos,bos,cur,buffer=b,regex} -> do
-- read the information
win <- readIORef (coreWin w)
-- add color attributes.
let picture = askBuffer win b $ attributesPictureAndSelB sty regex
(mkRegion tos bos)
sty = configStyle $ uiConfig ui
picZip = zip picture $ drop 1 (fst <$> picture) <> [bos]
strokes = [ (start',s,end') | ((start', s), end') <- picZip
, s /= emptyAttributes ]
rel p = fromIntegral (p - tos)
allAttrs = concat $ do
(p1, Attributes fg bg _rv bd itlc udrl, p2) <- strokes
let atr x = x (rel p1) (rel p2)
if' p x y = if p then x else y
return [ atr AttrForeground $ mkCol True fg
, atr AttrBackground $ mkCol False bg
, atr AttrStyle $ if' itlc StyleItalic StyleNormal
, atr AttrUnderline $ if' udrl UnderlineSingle UnderlineNone
, atr AttrWeight $ if' bd WeightBold WeightNormal
]
layoutSetAttributes layout allAttrs
drawWindow <- widgetGetDrawWindow $ textview w
gc <- gcNew drawWindow
-- see Note [PangoLayout width]
-- draw the layout
drawLayout drawWindow gc 1 0 layout
-- calculate the cursor position
im <- readIORef (insertingMode w)
-- check focus, and decide whether we want a wide cursor
bufferFocused <- readIORef (inFocus w)
uiFocused <- Gtk.windowHasToplevelFocus (uiWindow ui)
let focused = bufferFocused && uiFocused
wideCursor =
case configCursorStyle (uiConfig ui) of
AlwaysFat -> True
NeverFat -> False
FatWhenFocused -> focused
FatWhenFocusedAndInserting -> focused && im
(PangoRectangle (succ -> curX) curY curW curH, _) <-
layoutGetCursorPos layout (rel cur)
-- tell the input method
imContextSetCursorLocation (uiInput ui) $
Rectangle (round curX) (round curY) (round curW) (round curH)
-- paint the cursor
gcSetValues gc
(newGCValues { Gtk.foreground = mkCol True . Yi.Style.foreground
. baseAttributes . configStyle $
uiConfig ui
, Gtk.lineWidth = if wideCursor then 2 else 1 })
-- tell the renderer
if im
then -- if we are inserting, we just want a line
drawLine drawWindow gc (round curX, round curY)
(round $ curX + curW, round $ curY + curH)
-- we aren't inserting, we want a rectangle around the current character
else do
PangoRectangle (succ -> chx) chy chw chh <- layoutIndexToPos
layout (rel cur)
drawRectangle drawWindow gc False (round chx) (round chy)
(if chw > 0 then round chw else 8) (round chh)
return True
doLayout :: UI -> Editor -> IO Editor
doLayout ui e = do
updateCache ui e
tabs <- readIORef $ tabCache ui
f <- readIORef (uiFont ui)
dims <- fold <$> mapM (getDimensionsInTab ui f e) tabs
let e' = (tabsA %~ fmap (mapWindows updateWin)) e
updateWin w = case M.lookup (wkey w) dims of
Nothing -> w
Just (wi,h,rgn) -> w { width = wi, height = h, winRegion = rgn }
-- Don't leak references to old Windows
let forceWin x w = height w `seq` winRegion w `seq` x
return $ (foldl . tabFoldl) forceWin e' (e' ^. tabsA)
-- | Width, Height
getDimensionsInTab :: UI -> FontDescription -> Editor
-> TabInfo -> IO (M.Map WindowRef (Int,Int,Region))
getDimensionsInTab ui f e tab = do
wCache <- readIORef (windowCache tab)
forM wCache $ \wi -> do
(wid, h) <- widgetGetSize $ textview wi
win <- readIORef (coreWin wi)
let metrics = winMetrics wi
lineHeight = ascent metrics + descent metrics
charWidth = max (approximateCharWidth metrics) (approximateDigitWidth metrics)
width = round $ fromIntegral wid / charWidth - 1
height = round $ fromIntegral h / lineHeight
b0 = findBufferWith (bufkey win) e
rgn <- shownRegion ui f wi b0
return (width, height, rgn)
shownRegion :: UI -> FontDescription -> WinInfo -> FBuffer -> IO Region
shownRegion ui f w b = modifyMVar (winLayoutInfo w) $ \wli -> do
(tos, cur, bos, bufEnd) <- updatePango ui f w b (winLayout wli)
return (wli{tos,cur=clampTo tos bos cur,bos,bufEnd}, mkRegion tos bos)
where clampTo lo hi x = max lo (min hi x)
-- during scrolling, cur might not lie between tos and bos,
-- so we clamp it to avoid Pango errors
{-|
== Note [PangoLayout width]
We start rendering the PangoLayout one pixel from the left of the
rendering area, which means a few +/-1 offsets in Pango rendering and
point lookup code. The reason for this is to support the "wide
cursor", which is 2 pixels wide. If we started rendering the
PangoLayout directly from the left of the rendering area instead of at
a 1-pixel offset, then the "wide cursor" would only be half-displayed
when the cursor is at the beginning of the line, and would then be a
"thin cursor".
An alternative would be to special-case the wide cursor rendering at
the beginning of the line, and draw it one pixel to the right of where
it "should" be. I haven't tried this out to see how it looks.
Reiner
-}
-- we update the regex and the buffer to avoid holding on to potential garbage.
-- These will be overwritten with correct values soon, in
-- updateWinInfoForRendering.
updatePango :: UI -> FontDescription -> WinInfo -> FBuffer
-> PangoLayout -> IO (Point, Point, Point, Point)
updatePango ui font w b layout = do
(width_', height') <- widgetGetSize $ textview w
let width' = max 0 (width_' - 1) -- see Note [PangoLayout width]
fontDescriptionToStringT :: FontDescription -> IO Text
fontDescriptionToStringT = fontDescriptionToString
-- Resize (and possibly copy) the currently used font.
curFont <- case fromIntegral <$> configFontSize (uiConfig ui) of
Nothing -> return font
Just defSize -> fontDescriptionGetSize font >>= \case
Nothing -> fontDescriptionSetSize font defSize >> return font
Just currentSize -> let fsv = fontsizeVariation $ attributes b
newSize = max 1 (fromIntegral fsv + defSize) in
if newSize == currentSize
then return font
else do
-- This seems like it would be very expensive but I'm
-- justifying it with that it only gets ran once per font
-- size change. If the font size stays the same, we only
-- enter this once per layout. We're effectivelly copying
-- the default font for each layout that changes. An
-- alternative would be to assign each buffer its own font
-- but that seems a pain to maintain and if the user never
-- changes font sizes, it's a waste of memory.
nf <- fontDescriptionCopy font
fontDescriptionSetSize nf newSize
return nf
oldFont <- layoutGetFontDescription layout
oldFontStr <- maybe (return Nothing)
(fmap Just . fontDescriptionToStringT) oldFont
newFontStr <- Just <$> fontDescriptionToStringT curFont
when (oldFontStr /= newFontStr) $
layoutSetFontDescription layout (Just curFont)
win <- readIORef (coreWin w)
let [width'', height''] = fmap fromIntegral [width', height']
metrics = winMetrics w
lineHeight = ascent metrics + descent metrics
charWidth = max (approximateCharWidth metrics)
(approximateDigitWidth metrics)
winw = max 1 $ floor (width'' / charWidth)
winh = max 1 $ floor (height'' / lineHeight)
maxChars = winw * winh
conf = uiConfig ui
(tos, size, point, text) = askBuffer win b $ do
from <- use . markPointA =<< fromMark <$> askMarks
rope <- streamB Forward from
p <- pointB
bufEnd <- sizeB
let content = takeContent conf maxChars . fst $ R.splitAtLine winh rope
-- allow BOS offset to be just after the last line
let addNL = if R.countNewLines content == winh
then id
else (`R.snoc` '\n')
return (from, bufEnd, p, R.toText $ addNL content)
if configLineWrap conf
then wrapToWidth layout WrapAnywhere width''
else do
(Rectangle px _py pwidth _pheight, _) <- layoutGetPixelExtents layout
widgetSetSizeRequest (textview w) (px+pwidth) (-1)
-- optimize for cursor movement
oldText <- layoutGetText layout
when (oldText /= text) (layoutSetText layout text)
(_, bosOffset, _) <- layoutXYToIndex layout width''
(fromIntegral winh * lineHeight - 1)
return (tos, point, tos + fromIntegral bosOffset + 1, size)
-- | This is a hack that makes this renderer not suck in the common
-- case. There are two scenarios: we're line wrapping or we're not
-- line wrapping. This function already assumes that the contents
-- given have all the possible lines we can fit on the screen.
--
-- If we are line wrapping then the most text we'll ever need to
-- render is precisely the number of characters that can fit on the
-- screen. If that's the case, that's precisely what we do, truncate
-- up to the point where the text would be off-screen anyway.
--
-- If we aren't line-wrapping then we can't simply truncate at the max
-- number of characters: lines might be really long, but considering
-- we're not truncating, we should still be able to see every single
-- line that can fit on screen up to the screen bound. This suggests
-- that we could simply render each line up to the bound. While this
-- does work wonders for performance and would work regardless whether
-- we're wrapping or not, currently our implementation of the rest of
-- the module depends on all characters used being set into the
-- layout: if we cut some text off, painting strokes on top or going
-- to the end makes for strange effects. So currently we have no
-- choice but to render all characters in the visible lines. If you
-- have really long lines, this will kill the performance.
--
-- So here we implement the hack for the line-wrapping case. Once we
-- fix stroke painting &c, this distinction can be removed and we can
-- simply snip at the screen boundary whether we're wrapping or not
-- which actually results in great performance in the end. Until that
-- happens, only the line-wrapping case doesn't suck. Fortunately it
-- is the default.
takeContent :: UIConfig -> Int -> R.YiString -> R.YiString
takeContent cf cl t = if configLineWrap cf
then R.take cl t
else t
-- | Wraps the layout according to the given 'LayoutWrapMode', using
-- the specified width.
--
-- In contrast to the past, it actually implements wrapping properly
-- which was previously broken.
wrapToWidth :: PangoLayout -> LayoutWrapMode -> Double -> IO ()
wrapToWidth l wm w = do
layoutGetWrap l >>= \wr -> case (wr, wm) of
-- No Eq instanceโฆ
(WrapWholeWords, WrapWholeWords) -> return ()
(WrapAnywhere, WrapAnywhere) -> return ()
(WrapPartialWords, WrapPartialWords) -> return ()
_ -> layoutSetWrap l wm
layoutGetWidth l >>= \case
Just x | x == w -> return ()
_ -> layoutSetWidth l (Just w)
reloadProject :: IO ()
reloadProject = return ()
mkCol :: Bool -- ^ is foreground?
-> Yi.Style.Color -> Gtk.Color
mkCol True Default = Color 0 0 0
mkCol False Default = Color maxBound maxBound maxBound
mkCol _ (RGB x y z) = Color (fromIntegral x * 256)
(fromIntegral y * 256)
(fromIntegral z * 256)
-- * GTK Event handlers
-- | Process GTK keypress if IM fails
handleKeypress :: ([Event] -> IO ()) -- ^ Event dispatcher (Yi.Core.dispatch)
-> IMContext
-> EventM EKey Bool
handleKeypress ch im = do
gtkMods <- eventModifier
gtkKey <- eventKeyVal
ifIM <- imContextFilterKeypress im
let char = keyToChar gtkKey
modsWithShift = M.keys $ M.filter (`elem` gtkMods) modTable
mods | isJust char = filter (/= MShift) modsWithShift
| otherwise = modsWithShift
key = case char of
Just c -> Just $ KASCII c
Nothing -> M.lookup (keyName gtkKey) keyTable
case (ifIM, key) of
(True, _ ) -> return ()
(_, Nothing) -> logPutStrLn $ "Event not translatable: " <> showT key
(_, Just k ) -> io $ ch [Event k mods]
return True
-- | Map Yi modifiers to GTK
modTable :: M.Map Modifier EventM.Modifier
modTable = M.fromList
[ (MShift, EventM.Shift )
, (MCtrl, EventM.Control)
, (MMeta, EventM.Alt )
, (MSuper, EventM.Super )
, (MHyper, EventM.Hyper )
]
-- | Same as Gtk.on, but discards the ConnectId
on :: object -> Signal object callback -> callback -> IO ()
on widget signal handler = void $ Gtk.on widget signal handler
handleButtonClick :: UI -> WindowRef -> EventM EButton Bool
handleButtonClick ui ref = do
(x, y) <- eventCoordinates
click <- eventClick
button <- eventButton
io $ do
w <- getWinInfo ui ref
point <- pointToOffset (x, y) w
let focusWindow = focusWindowE ref
runAction = uiActionCh ui . makeAction
runAction focusWindow
win <- io $ readIORef (coreWin w)
let selectRegion tu = runAction $ do
b <- gets $ bkey . findBufferWith (bufkey win)
withGivenBufferAndWindow win b $
moveTo point >> regionOfB tu >>= setSelectRegionB
case (click, button) of
(SingleClick, LeftButton) -> do
io $ writeIORef (lButtonPressed w) True
runAction $ do
b <- gets $ bkey . findBufferWith (bufkey win)
withGivenBufferAndWindow win b $ do
m <- selMark <$> askMarks
markPointA m .= point
moveTo point
setVisibleSelection False
(DoubleClick, LeftButton) -> selectRegion unitWord
(TripleClick, LeftButton) -> selectRegion Line
_ -> return ()
return True
handleButtonRelease :: UI -> WinInfo -> EventM EButton Bool
handleButtonRelease ui w = do
(x, y) <- eventCoordinates
button <- eventButton
io $ do
point <- pointToOffset (x, y) w
disp <- widgetGetDisplay $ textview w
cb <- clipboardGetForDisplay disp selectionPrimary
case button of
MiddleButton -> pasteSelectionClipboard ui w point cb
LeftButton -> setSelectionClipboard ui w cb >>
writeIORef (lButtonPressed w) False
_ -> return ()
return True
handleScroll :: UI -> WinInfo -> EventM EScroll Bool
handleScroll ui w = do
scrollDirection <- eventScrollDirection
xy <- eventCoordinates
io $ do
ifPressed <- readIORef $ lButtonPressed w
-- query new coordinates
let editorAction =
withCurrentBuffer $ scrollB $ case scrollDirection of
ScrollUp -> negate configAmount
ScrollDown -> configAmount
_ -> 0 -- Left/right scrolling not supported
configAmount = configScrollWheelAmount $ uiConfig ui
uiActionCh ui (EditorA editorAction)
when ifPressed $ selectArea ui w xy
return True
handleConfigure :: UI -> EventM EConfigure Bool
handleConfigure ui = do
-- trigger a layout
-- why does this cause a hang without postGUIAsync?
io $ postGUIAsync $ uiActionCh ui (makeAction (return () :: EditorM()))
return False -- allow event to be propagated
handleMove :: UI -> WinInfo -> EventM EMotion Bool
handleMove ui w = eventCoordinates >>= (io . selectArea ui w) >>
return True
handleDividerMove :: (Action -> IO ()) -> DividerRef -> DividerPosition -> IO ()
handleDividerMove actionCh ref pos =
actionCh (makeAction (setDividerPosE ref pos))
-- | Convert point coordinates to offset in Yi window
pointToOffset :: (Double, Double) -> WinInfo -> IO Point
pointToOffset (x,y) w =
withMVar (winLayoutInfo w) $ \WinLayoutInfo{winLayout,tos,bufEnd} -> do
im <- readIORef (insertingMode w)
-- see Note [PangoLayout width]
(_, charOffsetX, extra) <- layoutXYToIndex winLayout (max 0 (x-1)) y
return $ min bufEnd (tos + fromIntegral
(charOffsetX + if im then extra else 0))
selectArea :: UI -> WinInfo -> (Double, Double) -> IO ()
selectArea ui w (x,y) = do
p <- pointToOffset (x,y) w
let editorAction = do
txt <- withCurrentBuffer $ do
moveTo p
setVisibleSelection True
readRegionB =<< getSelectRegionB
setRegE txt
uiActionCh ui (makeAction editorAction)
-- drawWindowGetPointer (textview w) -- be ready for next message.
pasteSelectionClipboard :: UI -> WinInfo -> Point -> Clipboard -> IO ()
pasteSelectionClipboard ui w p cb = do
win <- io $ readIORef (coreWin w)
let cbHandler :: Maybe R.YiString -> IO ()
cbHandler Nothing = return ()
cbHandler (Just txt) = uiActionCh ui $ EditorA $ do
b <- gets $ bkey . findBufferWith (bufkey win)
withGivenBufferAndWindow win b $ do
pointB >>= setSelectionMarkPointB
moveTo p
insertN txt
clipboardRequestText cb (cbHandler . fmap R.fromText)
-- | Set selection clipboard contents to current selection
setSelectionClipboard :: UI -> WinInfo -> Clipboard -> IO ()
setSelectionClipboard ui _w cb = do
-- Why uiActionCh doesn't allow returning values?
selection <- newIORef mempty
let yiAction = do
txt <- withCurrentBuffer $
fmap R.toText . readRegionB =<< getSelectRegionB :: YiM T.Text
io $ writeIORef selection txt
uiActionCh ui $ makeAction yiAction
txt <- readIORef selection
unless (T.null txt) $ clipboardSetText cb txt
|
noughtmare/yi
|
yi-frontend-pango/src/Yi/Frontend/Pango.hs
|
gpl-2.0
| 35,005
| 260
| 20
| 9,538
| 9,015
| 4,638
| 4,377
| 654
| 8
|
-- | A circuit is a standard one of among many ways of representing a
-- propositional logic formula. This module provides a flexible circuit type
-- class and various representations that admit efficient conversion to funsat
-- CNF.
--
-- The implementation for this module was adapted from
-- <http://okmij.org/ftp/Haskell/DSLSharing.hs>.
module Funsat.Circuit
(
-- ** Circuit type class
Circuit(..)
, CastCircuit(..)
-- ** Explicit sharing circuit
, Shared(..)
, FrozenShared(..)
, runShared
, CircuitHash
, falseHash
, trueHash
, CCode(..)
, CMaps(..)
, emptyCMaps
-- ** Explicit tree circuit
, Tree(..)
, foldTree
-- *** Circuit simplification
, simplifyTree
-- ** Explicit graph circuit
, Graph
, runGraph
, shareGraph
, NodeType(..)
, EdgeType(..)
-- ** Circuit evaluator
, BEnv
, Eval(..)
, runEval
-- ** Convert circuit to CNF
, CircuitProblem(..)
, toCNF
, projectCircuitSolution
)
where
{-
This file is part of funsat.
funsat is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
funsat is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with funsat. If not, see <http://www.gnu.org/licenses/>.
Copyright 2008 Denis Bueno
-}
import Control.Monad.Reader
import Control.Monad.State.Strict hiding ((>=>), forM_)
import Data.Bimap( Bimap )
import Data.List( nub )
import Data.Map( Map )
import Data.Maybe()
import Data.Ord()
import Data.Set( Set )
import Funsat.Types( CNF(..), Lit(..), Var(..), var, lit, Solution(..), litSign, litAssignment )
import Prelude hiding( not, and, or )
import qualified Data.Bimap as Bimap
import qualified Data.Foldable as Foldable
import qualified Data.Graph.Inductive.Graph as Graph
import qualified Data.Graph.Inductive.Graph as G
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Prelude as Prelude
-- * Circuit representation
-- | A class representing a grammar for logical circuits. Default
-- implemenations are indicated.
class Circuit repr where
true :: (Ord var, Show var) => repr var
false :: (Ord var, Show var) => repr var
input :: (Ord var, Show var) => var -> repr var
not :: (Ord var, Show var) => repr var -> repr var
-- | Defined as @`and' p q = not (not p `or` not q)@.
and :: (Ord var, Show var) => repr var -> repr var -> repr var
and p q = not (not p `or` not q)
-- | Defined as @`or' p q = not (not p `and` not q)@.
or :: (Ord var, Show var) => repr var -> repr var -> repr var
or p q = not (not p `and` not q)
-- | If-then-else circuit. @ite c t e@ returns a circuit that evaluates to
-- @t@ when @c@ evaluates to true, and @e@ otherwise.
--
-- Defined as @(c `and` t) `or` (not c `and` f)@.
ite :: (Ord var, Show var) => repr var -> repr var -> repr var -> repr var
ite c t f = (c `and` t) `or` (not c `and` f)
-- | Defined as @`onlyif' p q = not p `or` q@.
onlyif :: (Ord var, Show var) => repr var -> repr var -> repr var
onlyif p q = not p `or` q
-- | Defined as @`iff' p q = (p `onlyif` q) `and` (q `onlyif` p)@.
iff :: (Ord var, Show var) => repr var -> repr var -> repr var
iff p q = (p `onlyif` q) `and` (q `onlyif` p)
-- | Defined as @`xor' p q = (p `or` q) `and` not (p `and` q)@.
xor :: (Ord var, Show var) => repr var -> repr var -> repr var
xor p q = (p `or` q) `and` not (p `and` q)
-- | Instances of `CastCircuit' admit converting one circuit representation to
-- another.
class CastCircuit c where
castCircuit :: (Circuit cOut, Ord var, Show var) => c var -> cOut var
-- ** Explicit sharing circuit
-- The following business is for elimination of common subexpressions from
-- boolean functions. Part of conversion to CNF.
-- | A `Circuit' constructed using common-subexpression elimination. This is a
-- compact representation that facilitates converting to CNF. See `runShared'.
newtype Shared v = Shared { unShared :: State (CMaps v) CCode }
-- | A shared circuit that has already been constructed.
data FrozenShared v = FrozenShared !CCode !(CMaps v) deriving (Eq, Show)
-- | Reify a sharing circuit.
runShared :: Shared v -> FrozenShared v
runShared = uncurry FrozenShared . (`runState` emptyCMaps) . unShared
instance CastCircuit Shared where
castCircuit = castCircuit . runShared
instance CastCircuit FrozenShared where
castCircuit (FrozenShared code maps) = go code
where
go (CTrue{}) = true
go (CFalse{}) = false
go c@(CVar{}) = input $ getChildren c (varMap maps)
go c@(CAnd{}) = uncurry and . go2 $ getChildren c (andMap maps)
go c@(COr{}) = uncurry or . go2 $ getChildren c (orMap maps)
go c@(CNot{}) = not . go $ getChildren c (notMap maps)
go c@(CXor{}) = uncurry xor . go2 $ getChildren c (xorMap maps)
go c@(COnlyif{}) = uncurry onlyif . go2 $ getChildren c (onlyifMap maps)
go c@(CIff{}) = uncurry iff . go2 $ getChildren c (iffMap maps)
go c@(CIte{}) = uncurry3 ite . go3 $ getChildren c (iteMap maps)
go2 = (go `onTup`)
go3 (x, y, z) = (go x, go y, go z)
uncurry3 f (x, y, z) = f x y z
getChildren :: (Ord v) => CCode -> Bimap CircuitHash v -> v
getChildren code codeMap =
case Bimap.lookup (circuitHash code) codeMap of
Nothing -> findError
Just c -> c
where findError = error $ "getChildren: unknown code: " ++ show code
-- | 0 is false, 1 is true. Any positive value labels a logical circuit node.
type CircuitHash = Int
falseHash, trueHash :: CircuitHash
falseHash = 0
trueHash = 1
-- | A `CCode' represents a circuit element for `Shared' circuits. A `CCode' is
-- a flattened tree node which has been assigned a unique number in the
-- corresponding map inside `CMaps', which indicates children, if any.
--
-- For example, @CAnd i@ has the two children of the tuple @lookup i (andMap
-- cmaps)@ assuming @cmaps :: CMaps v@.
data CCode = CTrue { circuitHash :: !CircuitHash }
| CFalse { circuitHash :: !CircuitHash }
| CVar { circuitHash :: !CircuitHash }
| CAnd { circuitHash :: !CircuitHash }
| COr { circuitHash :: !CircuitHash }
| CNot { circuitHash :: !CircuitHash }
| CXor { circuitHash :: !CircuitHash }
| COnlyif { circuitHash :: !CircuitHash }
| CIff { circuitHash :: !CircuitHash }
| CIte { circuitHash :: !CircuitHash }
deriving (Eq, Ord, Show, Read)
-- | Maps used to implement the common-subexpression sharing implementation of
-- the `Circuit' class. See `Shared'.
data CMaps v = CMaps
{ hashCount :: [CircuitHash]
-- ^ Source of unique IDs used in `Shared' circuit generation. Should not
-- include 0 or 1.
, varMap :: Bimap CircuitHash v
-- ^ Mapping of generated integer IDs to variables.
, andMap :: Bimap CircuitHash (CCode, CCode)
, orMap :: Bimap CircuitHash (CCode, CCode)
, notMap :: Bimap CircuitHash CCode
, xorMap :: Bimap CircuitHash (CCode, CCode)
, onlyifMap :: Bimap CircuitHash (CCode, CCode)
, iffMap :: Bimap CircuitHash (CCode, CCode)
, iteMap :: Bimap CircuitHash (CCode, CCode, CCode) }
deriving (Eq, Show)
-- | A `CMaps' with an initial `hashCount' of 2.
emptyCMaps :: CMaps v
emptyCMaps = CMaps
{ hashCount = [2 ..]
, varMap = Bimap.empty
, andMap = Bimap.empty
, orMap = Bimap.empty
, notMap = Bimap.empty
, xorMap = Bimap.empty
, onlyifMap = Bimap.empty
, iffMap = Bimap.empty
, iteMap = Bimap.empty }
-- | Find key mapping to given value.
lookupv :: Ord v => v -> Bimap Int v -> Maybe Int
lookupv = Bimap.lookupR
-- prj: "projects relevant map out of state"
-- upd: "stores new map back in state"
recordC :: (Ord a) =>
(CircuitHash -> b)
-> (CMaps v -> Bimap Int a) -- ^ prj
-> (CMaps v -> Bimap Int a -> CMaps v) -- ^ upd
-> a
-> State (CMaps v) b
recordC _ _ _ x | x `seq` False = undefined
recordC cons prj upd x = do
s <- get
c:cs <- gets hashCount
maybe (do let s' = upd (s{ hashCount = cs })
(Bimap.insert c x (prj s))
put s'
-- trace "updating map" (return ())
return (cons c))
(return . cons) $ lookupv x (prj s)
instance Circuit Shared where
false = Shared . return $ CFalse falseHash
true = Shared . return $ CTrue trueHash
input v = Shared $ recordC CVar varMap (\s e -> s{ varMap = e }) v
and e1 e2 = Shared $ do
hl <- unShared e1
hr <- unShared e2
recordC CAnd andMap (\s e -> s{ andMap = e}) (hl, hr)
or e1 e2 = Shared $ do
hl <- unShared e1
hr <- unShared e2
recordC COr orMap (\s e -> s{ orMap = e }) (hl, hr)
not e = Shared $ do
h <- unShared e
recordC CNot notMap (\s e' -> s{ notMap = e' }) h
xor l r = Shared $ do
hl <- unShared l ; hr <- unShared r
recordC CXor xorMap (\s e' -> s{ xorMap = e' }) (hl, hr)
iff l r = Shared $ do
hl <- unShared l ; hr <- unShared r
recordC CIff iffMap (\s e' -> s{ iffMap = e' }) (hl, hr)
onlyif l r = Shared $ do
hl <- unShared l ; hr <- unShared r
recordC COnlyif onlyifMap (\s e' -> s{ onlyifMap = e' }) (hl, hr)
ite x t e = Shared $ do
hx <- unShared x
ht <- unShared t ; he <- unShared e
recordC CIte iteMap (\s e' -> s{ iteMap = e' }) (hx, ht, he)
{-
-- | An And-Inverter graph edge may complement its input.
data AIGEdge = AIGPos | AIGNeg
type AIGGr g v = g (Maybe v) AIGEdge
-- | * 0 is the output.
data AndInverterGraph gr v = AIG
{ aigGraph :: AIGGr gr v
-- ^ Node 0 is the output node. Node 1 is hardwired with a 'true' input.
-- The edge from Node 1 to 0 may or may not be complemented.
, aigInputs :: [G.Node]
-- ^ Node 1 is always an input set to true.
}
instance (G.Graph gr, Show v, Ord v) => Monoid (AndInverterGraph gr v) where
mempty = true
mappend a1 a2 =
AIG{ aigGraph = mergedGraph
, aigInputs = nub (aigInputs a1 ++ aigInputs a2) }
where
mergedGraph = G.mkGraph
(G.labNodes (aigGraph a1) ++ G.labNodes (aigGraph a2))
(G.labEdges (aigGraph a1) ++ G.labEdges (aigGraph a2))
instance (G.Graph gr) => Circuit (AndInverterGraph gr) where
true = AIG{ aigGraph = G.mkGraph [(0,Nothing), (1,Nothing)] [(1, 0, AIGPos)]
, aigInputs = [1] }
false = AIG{ aigGraph = G.mkGraph [(0,Nothing), (1,Nothing)] [(1, 0, AIGNeg)]
, aigInputs = [1] }
input v = let [n] = G.newNodes 1 true
in AIG{ aigGraph = G.insNode (n, Just v) true
, aigInputs `= [n, 1] }
-}
-- and l r = let g' = l `mappend` r
-- [n] = G.newNodes 1 g'
-- in G.insNode (n, Nothing)
-- ** Explicit tree circuit
-- | Explicit tree representation, which is a generic description of a circuit.
-- This representation enables a conversion operation to any other type of
-- circuit. Trees evaluate from variable values at the leaves to the root.
data Tree v = TTrue
| TFalse
| TLeaf v
| TNot (Tree v)
| TAnd (Tree v) (Tree v)
| TOr (Tree v) (Tree v)
| TXor (Tree v) (Tree v)
| TIff (Tree v) (Tree v)
| TOnlyIf (Tree v) (Tree v)
| TIte (Tree v) (Tree v) (Tree v)
deriving (Show, Eq, Ord)
foldTree :: (t -> v -> t) -> t -> Tree v -> t
foldTree _ i TTrue = i
foldTree _ i TFalse = i
foldTree f i (TLeaf v) = f i v
foldTree f i (TAnd t1 t2) = foldTree f (foldTree f i t1) t2
foldTree f i (TOr t1 t2) = foldTree f (foldTree f i t1) t2
foldTree f i (TNot t) = foldTree f i t
foldTree f i (TXor t1 t2) = foldTree f (foldTree f i t1) t2
foldTree f i (TIff t1 t2) = foldTree f (foldTree f i t1) t2
foldTree f i (TOnlyIf t1 t2) = foldTree f (foldTree f i t1) t2
foldTree f i (TIte x t e) = foldTree f (foldTree f (foldTree f i x) t) e
instance Circuit Tree where
true = TTrue
false = TFalse
input = TLeaf
and = TAnd
or = TOr
not = TNot
xor = TXor
iff = TIff
onlyif = TOnlyIf
ite = TIte
instance CastCircuit Tree where
castCircuit TTrue = true
castCircuit TFalse = false
castCircuit (TLeaf l) = input l
castCircuit (TAnd t1 t2) = and (castCircuit t1) (castCircuit t2)
castCircuit (TOr t1 t2) = or (castCircuit t1) (castCircuit t2)
castCircuit (TXor t1 t2) = xor (castCircuit t1) (castCircuit t2)
castCircuit (TNot t) = not (castCircuit t)
castCircuit (TIff t1 t2) = iff (castCircuit t1) (castCircuit t2)
castCircuit (TOnlyIf t1 t2) = onlyif (castCircuit t1) (castCircuit t2)
castCircuit (TIte x t e) = ite (castCircuit x) (castCircuit t) (castCircuit e)
-- ** Circuit evaluator
type BEnv v = Map v Bool
-- | A circuit evaluator, that is, a circuit represented as a function from
-- variable values to booleans.
newtype Eval v = Eval { unEval :: BEnv v -> Bool }
-- | Evaluate a circuit given inputs.
runEval :: BEnv v -> Eval v -> Bool
runEval = flip unEval
instance Circuit Eval where
true = Eval $ const True
false = Eval $ const False
input v = Eval $ \env ->
Map.findWithDefault
(error $ "Eval: no such var: " ++ show v
++ " in " ++ show env)
v env
and c1 c2 = Eval (\env -> unEval c1 env && unEval c2 env)
or c1 c2 = Eval (\env -> unEval c1 env || unEval c2 env)
not c = Eval (\env -> Prelude.not $ unEval c env)
-- ** Graph circuit
-- | A circuit type that constructs a `G.Graph' representation. This is useful
-- for visualising circuits, for example using the @graphviz@ package.
newtype Graph v = Graph
{ unGraph :: State Graph.Node (Graph.Node,
[Graph.LNode (NodeType v)],
[Graph.LEdge EdgeType]) }
-- | Node type labels for graphs.
data NodeType v = NInput v
| NTrue
| NFalse
| NAnd
| NOr
| NNot
| NXor
| NIff
| NOnlyIf
| NIte
deriving (Eq, Ord, Show, Read)
data EdgeType = ETest -- ^ the edge is the condition for an `ite' element
| EThen -- ^ the edge is the /then/ branch for an `ite' element
| EElse -- ^ the edge is the /else/ branch for an `ite' element
| EVoid -- ^ no special annotation
deriving (Eq, Ord, Show, Read)
runGraph :: (G.DynGraph gr) => Graph v -> gr (NodeType v) EdgeType
runGraph graphBuilder =
let (_, nodes, edges) = evalState (unGraph graphBuilder) 1
in Graph.mkGraph nodes edges
instance Circuit Graph where
input v = Graph $ do
n <- newNode
return $ (n, [(n, NInput v)], [])
true = Graph $ do
n <- newNode
return $ (n, [(n, NTrue)], [])
false = Graph $ do
n <- newNode
return $ (n, [(n, NFalse)], [])
not gs = Graph $ do
(node, nodes, edges) <- unGraph gs
n <- newNode
return (n, (n, NNot) : nodes, (node, n, EVoid) : edges)
and = binaryNode NAnd
or = binaryNode NOr
xor = binaryNode NXor
iff = binaryNode NIff
onlyif = binaryNode NOnlyIf
ite x t e = Graph $ do
(xNode, xNodes, xEdges) <- unGraph x
(tNode, tNodes, tEdges) <- unGraph t
(eNode, eNodes, eEdges) <- unGraph e
n <- newNode
return (n, (n, NIte) : xNodes ++ tNodes ++ eNodes
, (xNode, n, ETest) : (tNode, n, EThen) : (eNode, n, EElse)
: xEdges ++ tEdges ++ eEdges)
binaryNode :: NodeType v -> Graph v -> Graph v -> Graph v
{-# INLINE binaryNode #-}
binaryNode ty l r = Graph $ do
(lNode, lNodes, lEdges) <- unGraph l
(rNode, rNodes, rEdges) <- unGraph r
n <- newNode
return (n, (n, ty) : lNodes ++ rNodes,
(lNode, n, EVoid) : (rNode, n, EVoid) : lEdges ++ rEdges)
newNode :: State Graph.Node Graph.Node
newNode = do i <- get ; put (succ i) ; return i
{-
defaultNodeAnnotate :: (Show v) => LNode (FrozenShared v) -> [GraphViz.Attribute]
defaultNodeAnnotate (_, FrozenShared (output, cmaps)) = go output
where
go CTrue{} = "true"
go CFalse{} = "false"
go (CVar _ i) = show $ extract i varMap
go (CNot{}) = "NOT"
go (CAnd{hlc=h}) = maybe "AND" goHLC h
go (COr{hlc=h}) = maybe "OR" goHLC h
goHLC (Xor{}) = "XOR"
goHLC (Onlyif{}) = go (output{ hlc=Nothing })
goHLC (Iff{}) = "IFF"
extract code f =
IntMap.findWithDefault (error $ "shareGraph: unknown code: " ++ show code)
code
(f cmaps)
defaultEdgeAnnotate = undefined
dotGraph :: (Graph gr) => gr (FrozenShared v) (FrozenShared v) -> DotGraph
dotGraph g = graphToDot g defaultNodeAnnotate defaultEdgeAnnotate
-}
-- | Given a frozen shared circuit, construct a `G.DynGraph' that exactly
-- represents it. Useful for debugging constraints generated as `Shared'
-- circuits.
shareGraph :: (G.DynGraph gr, Eq v, Show v) =>
FrozenShared v -> gr (FrozenShared v) (FrozenShared v)
shareGraph (FrozenShared output cmaps) =
(`runReader` cmaps) $ do
(_, nodes, edges) <- go output
return $ Graph.mkGraph (nub nodes) (nub edges)
where
-- Invariant: The returned node is always a member of the returned list of
-- nodes. Returns: (node, node-list, edge-list).
go c@(CVar i) = return (i, [(i, frz c)], [])
go c@(CTrue i) = return (i, [(i, frz c)], [])
go c@(CFalse i) = return (i, [(i, frz c)], [])
go c@(CNot i) = do
(child, nodes, edges) <- extract i notMap >>= go
return (i, (i, frz c) : nodes, (child, i, frz c) : edges)
go c@(CAnd i) = extract i andMap >>= tupM2 go >>= addKids c
go c@(COr i) = extract i orMap >>= tupM2 go >>= addKids c
go c@(CXor i) = extract i xorMap >>= tupM2 go >>= addKids c
go c@(COnlyif i) = extract i onlyifMap >>= tupM2 go >>= addKids c
go c@(CIff i) = extract i iffMap >>= tupM2 go >>= addKids c
go c@(CIte i) = do (x, y, z) <- extract i iteMap
( (cNode, cNodes, cEdges)
,(tNode, tNodes, tEdges)
,(eNode, eNodes, eEdges)) <- liftM3 (,,) (go x) (go y) (go z)
return (i, (i, frz c) : cNodes ++ tNodes ++ eNodes
,(cNode, i, frz c)
: (tNode, i, frz c)
: (eNode, i, frz c)
: cEdges ++ tEdges ++ eEdges)
addKids ccode ((lNode, lNodes, lEdges), (rNode, rNodes, rEdges)) =
let i = circuitHash ccode
in return (i, (i, frz ccode) : lNodes ++ rNodes,
(lNode, i, frz ccode) : (rNode, i, frz ccode) : lEdges ++ rEdges)
tupM2 f (x, y) = liftM2 (,) (f x) (f y)
frz ccode = FrozenShared ccode cmaps
extract code f = do
maps <- ask
let lookupError = error $ "shareGraph: unknown code: " ++ show code
case Bimap.lookup code (f maps) of
Nothing -> lookupError
Just x -> return x
-- ** Circuit simplification
-- | Performs obvious constant propagations.
simplifyTree :: Tree v -> Tree v
simplifyTree l@(TLeaf _) = l
simplifyTree TFalse = TFalse
simplifyTree TTrue = TTrue
simplifyTree (TNot t) =
let t' = simplifyTree t
in case t' of
TTrue -> TFalse
TFalse -> TTrue
_ -> TNot t'
simplifyTree (TAnd l r) =
let l' = simplifyTree l
r' = simplifyTree r
in case l' of
TFalse -> TFalse
TTrue -> case r' of
TTrue -> TTrue
TFalse -> TFalse
_ -> r'
_ -> case r' of
TTrue -> l'
TFalse -> TFalse
_ -> TAnd l' r'
simplifyTree (TOr l r) =
let l' = simplifyTree l
r' = simplifyTree r
in case l' of
TFalse -> r'
TTrue -> TTrue
_ -> case r' of
TTrue -> TTrue
TFalse -> l'
_ -> TOr l' r'
simplifyTree (TXor l r) =
let l' = simplifyTree l
r' = simplifyTree r
in case l' of
TFalse -> r'
TTrue -> case r' of
TFalse -> TTrue
TTrue -> TFalse
_ -> TNot r'
_ -> TXor l' r'
simplifyTree (TIff l r) =
let l' = simplifyTree l
r' = simplifyTree r
in case l' of
TFalse -> case r' of
TFalse -> TTrue
TTrue -> TFalse
_ -> l' `TIff` r'
TTrue -> case r' of
TTrue -> TTrue
TFalse -> TFalse
_ -> l' `TIff` r'
_ -> l' `TIff` r'
simplifyTree (l `TOnlyIf` r) =
let l' = simplifyTree l
r' = simplifyTree r
in case l' of
TFalse -> TTrue
TTrue -> r'
_ -> l' `TOnlyIf` r'
simplifyTree (TIte x t e) =
let x' = simplifyTree x
t' = simplifyTree t
e' = simplifyTree e
in case x' of
TTrue -> t'
TFalse -> e'
_ -> TIte x' t' e'
-- ** Convert circuit to CNF
-- this data is private to toCNF.
data CNFResult = CP !Lit !(Set (Set Lit))
data CNFState = CNFS{ toCnfVars :: [Var]
-- ^ infinite fresh var source, starting at 1
, toCnfMap :: Bimap Var CCode
-- ^ record var mapping
}
emptyCNFState :: CNFState
emptyCNFState = CNFS{ toCnfVars = [V 1 ..]
, toCnfMap = Bimap.empty }
-- retrieve and create (if necessary) a cnf variable for the given ccode.
--findVar :: (MonadState CNFState m) => CCode -> m Lit
findVar ccode = do
m <- gets toCnfMap
v:vs <- gets toCnfVars
case Bimap.lookupR ccode m of
Nothing -> do modify $ \s -> s{ toCnfMap = Bimap.insert v ccode m
, toCnfVars = vs }
return . lit $ v
Just v' -> return . lit $ v'
-- | A circuit problem packages up the CNF corresponding to a given
-- `FrozenShared' circuit, and the mapping between the variables in the CNF and
-- the circuit elements of the circuit.
data CircuitProblem v = CircuitProblem
{ problemCnf :: CNF
, problemCircuit :: FrozenShared v
, problemCodeMap :: Bimap Var CCode }
-- | Produces a CNF formula that is satisfiable if and only if the input circuit
-- is satisfiable. /Note that it does not produce an equivalent CNF formula./
-- It is not equivalent in general because the transformation introduces
-- variables into the CNF which were not present as circuit inputs. (Variables
-- in the CNF correspond to circuit elements.) Returns equisatisfiable CNF
-- along with the frozen input circuit, and the mapping between the variables of
-- the CNF and the circuit elements.
--
-- The implementation uses the Tseitin transformation, to guarantee that the
-- output CNF formula is linear in the size of the circuit. Contrast this with
-- the naive DeMorgan-laws transformation which produces an exponential-size CNF
-- formula.
toCNF :: (Ord v, Show v) => FrozenShared v -> CircuitProblem v
toCNF cIn =
let c@(FrozenShared sharedCircuit circuitMaps) =
runShared . removeComplex $ cIn
(cnf, m) = ((`runReader` circuitMaps) . (`runStateT` emptyCNFState)) $ do
(CP l theClauses) <- toCNF' sharedCircuit
return $ Set.insert (Set.singleton l) theClauses
in CircuitProblem
{ problemCnf = CNF { numVars = Set.fold max 1
. Set.map (Set.fold max 1)
. Set.map (Set.map (unVar . var))
$ cnf
, numClauses = Set.size cnf
, clauses = Set.map Foldable.toList cnf }
, problemCircuit = c
, problemCodeMap = toCnfMap m }
where
-- Returns (CP l c) where {l} U c is CNF equisatisfiable with the input
-- circuit. Note that CNF conversion only has cases for And, Or, Not, True,
-- False, and Var circuits. We therefore remove the complex circuit before
-- passing stuff to this function.
toCNF' c@(CVar{}) = do l <- findVar c
return (CP l Set.empty)
toCNF' c@(CTrue{}) = do
l <- findVar c
return (CP l (Set.singleton . Set.singleton $ l))
toCNF' c@(CFalse{}) = do
l <- findVar c
return (CP l (Set.fromList [Set.singleton (negate l)]))
-- -- x <-> -y
-- -- <-> (-x, -y) & (y, x)
toCNF' c@(CNot i) = do
notLit <- findVar c
eTree <- extract i notMap
(CP eLit eCnf) <- toCNF' eTree
return
(CP notLit
(Set.fromList [ Set.fromList [negate notLit, negate eLit]
, Set.fromList [eLit, notLit] ]
`Set.union` eCnf))
-- -- x <-> (y | z)
-- -- <-> (-y, x) & (-z, x) & (-x, y, z)
toCNF' c@(COr i) = do
orLit <- findVar c
(l, r) <- extract i orMap
(CP lLit lCnf) <- toCNF' l
(CP rLit rCnf) <- toCNF' r
return
(CP orLit
(Set.fromList [ Set.fromList [negate lLit, orLit]
, Set.fromList [negate rLit, orLit]
, Set.fromList [negate orLit, lLit, rLit] ]
`Set.union` lCnf `Set.union` rCnf))
-- -- x <-> (y & z)
-- -- <-> (-x, y), (-x, z) & (-y, -z, x)
toCNF' c@(CAnd i) = do
andLit <- findVar c
(l, r) <- extract i andMap
(CP lLit lCnf) <- toCNF' l
(CP rLit rCnf) <- toCNF' r
return
(CP andLit
(Set.fromList [ Set.fromList [negate andLit, lLit]
, Set.fromList [negate andLit, rLit]
, Set.fromList [negate lLit, negate rLit, andLit] ]
`Set.union` lCnf `Set.union` rCnf))
toCNF' c = do
m <- ask
error $ "toCNF' bug: unknown code: " ++ show c
++ " with maps:\n" ++ show m
extract code f = do
m <- asks f
case Bimap.lookup code m of
Nothing -> error $ "toCNF: unknown code: " ++ show code
Just x -> return x
-- | Returns an equivalent circuit with no iff, xor, onlyif, and ite nodes.
removeComplex :: (Ord v, Show v, Circuit c) => FrozenShared v -> c v
removeComplex (FrozenShared code maps) = go code
where
go (CTrue{}) = true
go (CFalse{}) = false
go c@(CVar{}) = input $ getChildren c (varMap maps)
go c@(COr{}) = uncurry or (go `onTup` getChildren c (orMap maps))
go c@(CAnd{}) = uncurry and (go `onTup` getChildren c (andMap maps))
go c@(CNot{}) = not . go $ getChildren c (notMap maps)
go c@(CXor{}) =
let (l, r) = go `onTup` getChildren c (xorMap maps)
in (l `or` r) `and` not (l `and` r)
go c@(COnlyif{}) =
let (p, q) = go `onTup` getChildren c (onlyifMap maps)
in not p `or` q
go c@(CIff{}) =
let (p, q) = go `onTup` getChildren c (iffMap maps)
in (not p `or` q) `and` (not q `or` p)
go c@(CIte{}) =
let (cc, tc, ec) = getChildren c (iteMap maps)
(cond, t, e) = (go cc, go tc, go ec)
in (cond `and` t) `or` (not cond `and` e)
onTup :: (a -> b) -> (a, a) -> (b, b)
onTup f (x, y) = (f x, f y)
-- | Projects a funsat `Solution' back into the original circuit space,
-- returning a boolean environment containing an assignment of all circuit
-- inputs to true and false.
projectCircuitSolution :: (Ord v) => Solution -> CircuitProblem v -> BEnv v
projectCircuitSolution sol pblm = case sol of
Sat lits -> projectLits lits
Unsat lits -> projectLits lits
where
projectLits lits =
-- only the lits whose vars are (varMap maps) go to benv
foldl (\m l -> case Bimap.lookup (litHash l) (varMap maps) of
Nothing -> m
Just v -> Map.insert v (litSign l) m)
Map.empty
(litAssignment lits)
where
(FrozenShared _ maps) = problemCircuit pblm
litHash l = case Bimap.lookup (var l) (problemCodeMap pblm) of
Nothing -> error $ "projectSolution: unknown lit: " ++ show l
Just code -> circuitHash code
|
alessandroleite/hephaestus-pl
|
src/funsat-0.6.2/src/Funsat/Circuit.hs
|
lgpl-3.0
| 29,446
| 45
| 21
| 9,570
| 8,584
| 4,529
| 4,055
| 548
| 27
|
{-# LANGUAGE TemplateHaskell, MultiParamTypeClasses, PatternGuards #-}
-- | Fetch URL page titles of HTML links.
module Plugin.Url (theModule) where
import Plugin
import Network.URI
import qualified Text.Regex as R -- legacy
$(plugin "Url")
instance Module UrlModule Bool where
moduleHelp _ "url-title" = "url-title <url>. Fetch the page title."
moduleCmds _ = ["url-title", "tiny-url"]
modulePrivs _ = ["url-on", "url-off"]
moduleDefState _ = return True -- url on
moduleSerialize _ = Just stdSerial
process_ _ "url-title" txt = lift $ maybe (return ["Url not valid."])
fetchTitle (containsUrl txt)
process_ _ "tiny-url" txt = lift $ maybe (return ["Url not valid."])
fetchTiny (containsUrl txt)
process_ _ "url-on" _ = writeMS True >> return ["Url enabled"]
process_ _ "url-off" _ = writeMS False >> return ["Url disabled"]
contextual _ _ _ text = do
alive <- readMS
if alive && (not $ areSubstringsOf ignoredStrings text)
then case containsUrl text of
Nothing -> return []
Just url
| length url > 65 -> do
title <- lift $ fetchTitle url
tiny <- lift $ fetchTiny url
return $ zipWith' cat title tiny
| otherwise -> lift $ fetchTitle url
else return []
where cat x y = x ++ ", " ++ y
zipWith' _ [] ys = ys
zipWith' _ xs [] = xs
zipWith' f (x:xs) (y:ys) = f x y : zipWith' f xs ys
------------------------------------------------------------------------
-- | Fetch the title of the specified URL.
fetchTitle :: String -> LB [String]
fetchTitle url = do
title <- io $ runWebReq (urlPageTitle url) (proxy config)
return $ maybe [] return title
-- | base url for fetching tiny urls
tinyurl :: String
tinyurl = "http://tinyurl.com/api-create.php?url="
-- | Fetch the title of the specified URL.
fetchTiny :: String -> LB [String]
fetchTiny url
| Just uri <- parseURI (tinyurl ++ url) = do
tiny <- io $ runWebReq (getHtmlPage uri) (proxy config)
return $ maybe [] return $ findTiny $ foldl' cat "" tiny
| otherwise = return $ maybe [] return $ Just url
where cat x y = x ++ " " ++ y
-- | Tries to find the start of a tinyurl
findTiny :: String -> Maybe String
findTiny text = do
(_,kind,rest,_) <- R.matchRegexAll begreg text
let url = takeWhile (/=' ') rest
return $ stripSuffixes ignoredUrlSuffixes $ kind ++ url
where
begreg = R.mkRegexWithOpts "http://tinyurl.com/" True False
-- | List of strings that, if present in a contextual message, will
-- prevent the looking up of titles. This list can be used to stop
-- responses to lisppaste for example. Another important use is to
-- another lambdabot looking up a url title that contains another
-- url in it (infinite loop). Ideally, this list could be added to
-- by an admin via a privileged command (TODO).
ignoredStrings :: [String]
ignoredStrings =
["paste", -- Ignore lisppaste, rafb.net
"cpp.sourcforge.net", -- C++ paste bin
"HaskellIrcPastePage", -- Ignore paste page
"title of that page", -- Ignore others like the old me
urlTitlePrompt] -- Ignore others like me
-- | Suffixes that should be stripped off when identifying URLs in
-- contextual messages. These strings may be punctuation in the
-- current sentence vs part of a URL. Included here is the NUL
-- character as well.
ignoredUrlSuffixes :: [String]
ignoredUrlSuffixes = [".", ",", ";", ")", "\"", "\1", "\n"]
-- | Searches a string for an embeddded URL and returns it.
containsUrl :: String -> Maybe String
containsUrl text = do
(_,kind,rest,_) <- R.matchRegexAll begreg text
let url = takeWhile (`notElem` " \n\t\v") rest
return $ stripSuffixes ignoredUrlSuffixes $ kind ++ url
where
begreg = R.mkRegexWithOpts "https?://" True False
-- | Utility function to remove potential suffixes from a string.
-- Note, once a suffix is found, it is stripped and returned, no other
-- suffixes are searched for at that point.
stripSuffixes :: [String] -> String -> String
stripSuffixes [] str = str
stripSuffixes (s:ss) str
| isSuffixOf s str = take (length str - length s) $ str
| otherwise = stripSuffixes ss str
-- | Utility function to check of any of the Strings in the specified
-- list are substrings of the String.
areSubstringsOf :: [String] -> String -> Bool
areSubstringsOf = flip (any . flip isSubstringOf)
where
isSubstringOf s str = any (isPrefixOf s) (tails str)
|
zeekay/lambdabot
|
Plugin/Url.hs
|
mit
| 4,775
| 0
| 17
| 1,296
| 1,144
| 586
| 558
| 76
| 1
|
module CombinatorParser (module CombinatorParser, module Control.Applicative) where
import Control.Monad
import Control.Applicative
newtype Parser t a = Parser ([t] -> [([t], a)])
parse :: Parser t a -> [t] -> [a]
parse (Parser p) ts = [a | ([], a) <- p ts]
instance Functor (Parser t) where
fmap f (Parser p) = Parser (\ts -> [(ts', f a) | (ts', a) <- p ts])
instance Applicative (Parser t) where
pure a = Parser (\ts -> [(ts, a)])
Parser p <*> Parser q = Parser (\ts -> [(ts'', f a) | (ts', f) <- p ts, (ts'', a) <- q ts'])
instance Alternative (Parser t) where
empty = Parser (\ts -> [])
Parser p <|> Parser q = Parser (\ts -> p ts ++ q ts)
token :: Eq t => t -> Parser t t
token t = Parser (\ts -> case ts of
t':ts' | t == t' -> [(ts', t')]
_ -> [])
-- Convenience parsers
anyof :: [Parser t a] -> Parser t a
anyof = foldr (<|>) empty
tokens :: Eq t => [t] -> Parser t [t]
tokens [] = pure []
tokens (x:xs) = (:) <$> token x <*> tokens xs
lexicon :: [(a, [String])] -> Parser String a
lexicon alts = anyof [a <$ anyof (map (tokens.words) alt) | (a, alt) <- alts]
|
carlostome/shrdlite
|
haskell/CombinatorParser.hs
|
gpl-3.0
| 1,154
| 0
| 14
| 307
| 639
| 340
| 299
| 25
| 2
|
{-# LANGUAGE Rank2Types #-}
module Opaleye.SQLite.Internal.PackMap where
import qualified Opaleye.SQLite.Internal.Tag as T
import qualified Opaleye.SQLite.Internal.HaskellDB.PrimQuery as HPQ
import Control.Applicative (Applicative, pure, (<*>), liftA2)
import qualified Control.Monad.Trans.State as State
import Data.Profunctor (Profunctor, dimap)
import Data.Profunctor.Product (ProductProfunctor, empty, (***!))
import qualified Data.Profunctor.Product as PP
import qualified Data.Functor.Identity as I
-- This is rather like a Control.Lens.Traversal with the type
-- parameters switched but I'm not sure if it should be required to
-- obey the same laws.
--
-- TODO: We could attempt to generalise this to
--
-- data LensLike f a b s t = LensLike ((a -> f b) -> s -> f t)
--
-- i.e. a wrapped, argument-flipped Control.Lens.LensLike
--
-- This would allow us to do the Profunctor and ProductProfunctor
-- instances (requiring just Functor f and Applicative f respectively)
-- and share them between many different restrictions of f. For
-- example, TableColumnMaker is like a Setter so we would restrict f
-- to the Distributive case.
-- | A 'PackMap' @a@ @b@ @s@ @t@ encodes how an @s@ contains an
-- updatable sequence of @a@ inside it. Each @a@ in the sequence can
-- be updated to a @b@ (and the @s@ changes to a @t@ to reflect this
-- change of type).
--
-- 'PackMap' is just like a @Traversal@ from the lens package.
-- 'PackMap' has a different order of arguments to @Traversal@ because
-- it typically needs to be made a 'Profunctor' (and indeed
-- 'ProductProfunctor') in @s@ and @t@. It is unclear at this point
-- whether we want the same @Traversal@ laws to hold or not. Our use
-- cases may be much more general.
data PackMap a b s t = PackMap (Applicative f =>
(a -> f b) -> s -> f t)
-- | Replaces the targeted occurences of @a@ in @s@ with @b@ (changing
-- the @s@ to a @t@ in the process). This can be done via an
-- 'Applicative' action.
--
-- 'traversePM' is just like @traverse@ from the @lens@ package.
-- 'traversePM' used to be called @packmap@.
traversePM :: Applicative f => PackMap a b s t -> (a -> f b) -> s -> f t
traversePM (PackMap f) = f
-- | Modify the targeted occurrences of @a@ in @s@ with @b@ (changing
-- the @s@ to a @t@ in the process).
--
-- 'overPM' is just like @over@ from the @lens@ pacakge.
overPM :: PackMap a b s t -> (a -> b) -> s -> t
overPM p f = I.runIdentity . traversePM p (I.Identity . f)
-- {
-- | A helpful monad for writing columns in the AST
type PM a = State.State (a, Int)
new :: PM a String
new = do
(a, i) <- State.get
State.put (a, i + 1)
return (show i)
write :: a -> PM [a] ()
write a = do
(as, i) <- State.get
State.put (as ++ [a], i)
run :: PM [a] r -> (r, [a])
run m = (r, as)
where (r, (as, _)) = State.runState m ([], 0)
-- }
-- { General functions for writing columns in the AST
-- | Make a fresh name for an input value (the variable @primExpr@
-- type is typically actually a 'HPQ.PrimExpr') based on the supplied
-- function and the unique 'T.Tag' that is used as part of our
-- @QueryArr@.
--
-- Add the fresh name and the input value it refers to to the list in
-- the state parameter.
extractAttrPE :: (primExpr -> String -> String) -> T.Tag -> primExpr
-> PM [(HPQ.Symbol, primExpr)] HPQ.PrimExpr
extractAttrPE mkName t pe = do
i <- new
let s = HPQ.Symbol (mkName pe i) t
write (s, pe)
return (HPQ.AttrExpr s)
-- | As 'extractAttrPE' but ignores the 'primExpr' when making the
-- fresh column name and just uses the supplied 'String' and 'T.Tag'.
extractAttr :: String -> T.Tag -> primExpr
-> PM [(HPQ.Symbol, primExpr)] HPQ.PrimExpr
extractAttr s = extractAttrPE (const (s ++))
-- }
eitherFunction :: Functor f
=> (a -> f b)
-> (a' -> f b')
-> Either a a'
-> f (Either b b')
eitherFunction f g = fmap (either (fmap Left) (fmap Right)) (f PP.+++! g)
-- {
-- Boilerplate instance definitions. There's no choice here apart
-- from the order in which the applicative is applied.
instance Functor (PackMap a b s) where
fmap f (PackMap g) = PackMap ((fmap . fmap . fmap) f g)
instance Applicative (PackMap a b s) where
pure x = PackMap (pure (pure (pure x)))
PackMap f <*> PackMap x = PackMap (liftA2 (liftA2 (<*>)) f x)
instance Profunctor (PackMap a b) where
dimap f g (PackMap q) = PackMap (fmap (dimap f (fmap g)) q)
instance ProductProfunctor (PackMap a b) where
empty = PP.defaultEmpty
(***!) = PP.defaultProfunctorProduct
instance PP.SumProfunctor (PackMap a b) where
f +++! g = (PackMap (\x -> eitherFunction (f' x) (g' x)))
where PackMap f' = f
PackMap g' = g
-- }
|
bergmark/haskell-opaleye
|
opaleye-sqlite/src/Opaleye/SQLite/Internal/PackMap.hs
|
bsd-3-clause
| 4,785
| 0
| 12
| 1,065
| 1,139
| 632
| 507
| 59
| 1
|
import Test.Cabal.Prelude
main = cabalTest $ withRepo "repo"
$ forM_ ["--new-freeze-file", "--freeze-file"] $ \arg -> do
cabal' "outdated" [arg] >>=
(\out -> do
assertOutputContains "base" out
assertOutputContains "template-haskell" out)
cabal' "outdated" [arg, "--ignore=base,template-haskell"] >>=
(\out -> do
assertOutputDoesNotContain "base" out
assertOutputDoesNotContain "template-haskell" out)
cabal' "outdated" [arg, "--minor=base,template-haskell"] >>=
(\out -> do
assertOutputDoesNotContain "base" out
assertOutputContains "template-haskell" out)
|
themoritz/cabal
|
cabal-testsuite/PackageTests/Outdated/outdated_freeze.test.hs
|
bsd-3-clause
| 633
| 0
| 14
| 133
| 164
| 80
| 84
| 15
| 1
|
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Actions.RotSlaves
-- Copyright : (c) Hans Philipp Annen <haphi@gmx.net>, Mischa Dieterle <der_m@freenet.de>
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : Hans Philipp Annen <haphi@gmx.net>
-- Stability : stable
-- Portability : unportable
--
-- Rotate all windows except the master window and keep the focus in
-- place.
-----------------------------------------------------------------------------
module XMonad.Actions.RotSlaves (
-- $usage
rotSlaves', rotSlavesUp, rotSlavesDown,
rotAll', rotAllUp, rotAllDown
) where
import XMonad.StackSet
import XMonad
-- $usage
--
-- To use this module, import it with:
--
-- > import XMonad.Actions.RotSlaves
--
-- and add whatever keybindings you would like, for example:
--
-- > , ((modm .|. shiftMask, xK_Tab ), rotSlavesUp)
--
-- This operation will rotate all windows except the master window,
-- while the focus stays where it is. It is useful together with the
-- TwoPane layout (see "XMonad.Layout.TwoPane").
--
-- For detailed instructions on editing your key bindings, see
-- "XMonad.Doc.Extending#Editing_key_bindings".
-- | Rotate the windows in the current stack, excluding the first one
-- (master).
rotSlavesUp,rotSlavesDown :: X ()
rotSlavesUp = windows $ modify' (rotSlaves' (\l -> (tail l)++[head l]))
rotSlavesDown = windows $ modify' (rotSlaves' (\l -> [last l]++(init l)))
-- | The actual rotation, as a pure function on the window stack.
rotSlaves' :: ([a] -> [a]) -> Stack a -> Stack a
rotSlaves' _ s@(Stack _ [] []) = s
rotSlaves' f (Stack t [] rs) = Stack t [] (f rs) -- Master has focus
rotSlaves' f s@(Stack _ ls _ ) = Stack t' (reverse revls') rs' -- otherwise
where (master:ws) = integrate s
(revls',t':rs') = splitAt (length ls) (master:(f ws))
-- | Rotate all the windows in the current stack.
rotAllUp,rotAllDown :: X ()
rotAllUp = windows $ modify' (rotAll' (\l -> (tail l)++[head l]))
rotAllDown = windows $ modify' (rotAll' (\l -> [last l]++(init l)))
-- | The actual rotation, as a pure function on the window stack.
rotAll' :: ([a] -> [a]) -> Stack a -> Stack a
rotAll' f s = Stack r (reverse revls) rs
where (revls,r:rs) = splitAt (length (up s)) (f (integrate s))
|
pjones/xmonad-test
|
vendor/xmonad-contrib/XMonad/Actions/RotSlaves.hs
|
bsd-2-clause
| 2,367
| 0
| 13
| 460
| 556
| 311
| 245
| 20
| 1
|
-- | Create a tiled map given a generated map
module Game.World.Gen.Compile where
import Game.World.Gen.Types
|
mfpi/q-inqu
|
Game/World/Gen/Compile.hs
|
mit
| 112
| 0
| 4
| 17
| 16
| 12
| 4
| 2
| 0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.