code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE ScopedTypeVariables #-}
module Test.RSCoin.Local.MessagePackSpec
( spec
) where
import Data.MessagePack (MessagePack (..), pack, unpack)
import Data.Proxy (Proxy (Proxy))
import Test.Hspec (Spec, describe)
import Test.Hspec.QuickCheck (prop)
import Test.QuickCheck (Arbitrary, (===))
import qualified RSCoin.Core as C
import RSCoin.Mintette.Error (MintetteError)
import RSCoin.Notary.Error (NotaryError)
import Test.RSCoin.Local.Arbitrary ()
spec :: Spec
spec =
describe "MessagePack" $
describe "Identity Properties" $ do
makeMsgPackProp "Either MintetteError CheckConfirmation"
(Proxy :: Proxy (Either MintetteError C.CheckConfirmation))
makeMsgPackProp "MintetteError" (Proxy :: Proxy MintetteError)
makeMsgPackProp "NotaryError" (Proxy :: Proxy NotaryError)
makeMsgPackProp
:: forall a.
(Show a, Eq a, MessagePack a, Arbitrary a)
=> String -> Proxy a -> Spec
makeMsgPackProp s Proxy = prop s $ \(x :: a) -> x === mid x
mid :: MessagePack a => a -> a
mid = maybe err id . unpack . pack
where
err = error "[MessagePackSpec] Failed MessagePack decoding"
|
input-output-hk/rscoin-haskell
|
test/Test/RSCoin/Local/MessagePackSpec.hs
|
gpl-3.0
| 1,347
| 0
| 13
| 418
| 340
| 190
| 150
| 28
| 1
|
-- | > ghc -threaded windows.hs && windows.exe
-- Plain red screen will appear.
module Main where
import System.Win32.DLL (getModuleHandle)
import Graphics.Win32
import Graphics.Win32.Message
import Graphics.Win32.Window
import Data.Int
import Data.Maybe
import Control.Monad
import Foreign.C.String
import Graphics.EGL
import Graphics.OpenGLES
import Graphics.OpenGLES.Base
import Foreign
import Foreign.C
foreign import stdcall "PostQuitMessage" postQuitMessage
:: Int32 -> IO ()
main = do
hwnd <- platformCreateWindow
hdc <- getDC (Just hwnd)
egl <- eglInitializeOn (Just hdc) -- Nothing also works
[[(egl_RenderableType, egl_OpenGLES2)]]
[(egl_ClientVersion, 2)]
forkGL (eglResume egl hwnd >> return False) (eglSuspend egl) (eglPostFrame egl)
putStrLn "hello!"
putStrLn.show =<< glReadLogs
putStrLn.show =<< eglScreenDims egl
--updateWindow hwnd
allocaMessage pump
--stopGL
--unregisterClass clsName hinst
platformCreateWindow = do
let clsName = mkClassName "My Window Class"
hinst <- getModuleHandle Nothing
whiteBrush <- getStockBrush wHITE_BRUSH
curArrow <- loadCursor Nothing iDC_ARROW
mAtom <- registerClass (
cS_OWNDC, --cS_DBLCLKS,
hinst, -- HINSTANCE
Nothing, -- Maybe HICON
Just curArrow, -- Maybe HCURSOR
Just whiteBrush,-- Maybe HBRUSH
Nothing, -- Maybe LPCTSTR
clsName)
-- adjustWindowRect V4 wStyle False
--when (isJust mAtom) $ do
hwnd <- createWindow
clsName
"Redbox"
(wS_THICKFRAME + wS_CAPTION + wS_SYSMENU)
Nothing
Nothing
(Just 600)
(Just 600)
Nothing
Nothing
hinst
wndProc
showWindow hwnd sW_SHOWNORMAL
return hwnd
pump lpmsg = do
fContinue <- getMessage lpmsg Nothing
when fContinue $ do
translateMessage lpmsg
dispatchMessage lpmsg
pump lpmsg
wndProc
:: HWND
-> WindowMessage
-> WPARAM
-> LPARAM
-> IO LRESULT
wndProc hwnd wm wp lp
| wm == wM_KEYDOWN = doFinish
| wm == wM_LBUTTONDOWN = doFinish
| wm == wM_DESTROY = postQuitMessage 0 >> return 0
| wm == wM_PAINT = onPaint
| otherwise = defWindowProc (Just hwnd) wm wp lp
where
doFinish = sendMessage hwnd wM_CLOSE 1 0 >> return 0
onPaint = allocaPAINTSTRUCT $ \ lpps -> do
-- OpenGL ES Rendering
runGL $ do
putStrLn . show $ [glVersion, glRenderer, glVendor, glShadingLanguageVersion, show glExtensions]
clear [clearColor 1 0 0 1] (colorBuffer)
endFrameGL
-- GDI Rendering
hdc <- beginPaint hwnd lpps
render hwnd hdc
endPaint hwnd lpps
--invalidateRect (Just ) (const (return ()))
return 0
render :: HWND -> HDC -> IO ()
render hwnd hdc = do
setBkMode hdc tRANSPARENT
setTextColor hdc $ rgb 0 0 0
textOut hdc 5 5 "hello world!"
|
capsjac/opengles
|
examples/windows.hs
|
lgpl-3.0
| 2,889
| 1
| 16
| 740
| 777
| 382
| 395
| 87
| 1
|
-- | ECDSA Signatures
module Network.Haskoin.Crypto.ECDSA
( SecretT
, Signature(..)
, withSource
, getEntropy
, signMsg
, verifySig
, genPrvKey
, isCanonicalHalfOrder
, decodeDerSig
, decodeStrictSig
) where
import Numeric (showHex)
import Control.DeepSeq (NFData, rnf)
import Control.Monad (when, unless, guard)
import Control.Monad.Trans (lift)
import qualified Control.Monad.State as S
( StateT
, evalStateT
, get, put
)
import Data.Maybe (fromMaybe)
import Data.Binary (Binary, get, put)
import Data.Binary.Put (putByteString, putByteString)
import Data.Binary.Get (getWord8, lookAhead, getByteString)
import Data.ByteString (ByteString)
import System.Entropy (getEntropy)
import qualified Crypto.Secp256k1 as EC
import Network.Haskoin.Constants
import Network.Haskoin.Crypto.Hash
import Network.Haskoin.Crypto.Keys
-- | Internal state of the 'SecretT' monad
type SecretState m = (WorkingState, Int -> m ByteString)
-- | StateT monad stack tracking the internal state of HMAC DRBG
-- pseudo random number generator using SHA-256. The 'SecretT' monad is
-- run with the 'withSource' function by providing it a source of entropy.
type SecretT m = S.StateT (SecretState m) m
-- | Run a 'SecretT' monad by providing it a source of entropy. You can
-- use 'getEntropy' or provide your own entropy source function.
withSource :: Monad m => (Int -> m ByteString) -> SecretT m a -> m a
withSource f m = do
seed <- f 32 -- Read 256 bits from the random source
nonce <- f 16 -- Read 128 bits from the random source
let ws = hmacDRBGNew seed nonce haskoinUserAgent
S.evalStateT m (ws,f)
-- | Generate a new random 'EC.SecKey' value from the 'SecretT' monad. This
-- will invoke the HMAC DRBG routine. Of the internal entropy pool of the HMAC
-- DRBG was stretched too much, this function will reseed it.
nextSecret :: Monad m => SecretT m EC.SecKey
nextSecret = do
(ws, f) <- S.get
let (ws', randM) = hmacDRBGGen ws 32 haskoinUserAgent
case randM of
(Just rand) -> do
S.put (ws', f)
case EC.secKey rand of
Just key -> return key
Nothing -> nextSecret
Nothing -> do
seed <- lift $ f 32 -- Read 256 bits to re-seed the PRNG
let ws0 = hmacDRBGRsd ws' seed haskoinUserAgent
S.put (ws0, f)
nextSecret
-- | Produce a new 'PrvKey' randomly from the 'SecretT' monad.
genPrvKey :: Monad m => SecretT m PrvKey
genPrvKey = makePrvKey <$> nextSecret
-- | Data type representing an ECDSA signature.
newtype Signature = Signature { getSignature :: EC.Sig }
deriving (Read, Show, Eq)
instance NFData Signature where
rnf (Signature s) = s `seq` ()
hashToMsg :: Hash256 -> EC.Msg
hashToMsg =
fromMaybe e . EC.msg . getHash256
where
e = error "Could not convert 32-byte hash to secp256k1 message"
-- <http://www.secg.org/sec1-v2.pdf Section 4.1.3>
-- | Sign a message
signMsg :: Hash256 -> PrvKey -> Signature
signMsg h d = Signature $ EC.signMsg (prvKeySecKey d) (hashToMsg h)
-- | Verify an ECDSA signature
verifySig :: Hash256 -> Signature -> PubKey -> Bool
verifySig h s q =
EC.verifySig p g m
where
(g, _) = EC.normalizeSig $ getSignature s
m = hashToMsg h
p = pubKeyPoint q
instance Binary Signature where
get = do
l <- lookAhead $ do
t <- getWord8
-- 0x30 is DER sequence type
unless (t == 0x30) $ fail $
"Bad DER identifier byte 0x" ++ showHex t ". Expecting 0x30"
l <- getWord8
when (l == 0x00) $ fail "Indeterminate form unsupported"
when (l >= 0x80) $ fail "Multi-octect length not supported"
return $ fromIntegral l
bs <- getByteString $ l + 2
case decodeDerSig bs of
Just s -> return s
Nothing -> fail "Invalid signature"
put (Signature s) = putByteString $ EC.exportSig s
isCanonicalHalfOrder :: Signature -> Bool
isCanonicalHalfOrder = not . snd . EC.normalizeSig . getSignature
decodeDerSig :: ByteString -> Maybe Signature
decodeDerSig bs = Signature <$> EC.laxImportSig bs
decodeStrictSig :: ByteString -> Maybe Signature
decodeStrictSig bs = do
g <- EC.importSig bs
let compact = EC.exportCompactSig g
-- <http://www.secg.org/sec1-v2.pdf Section 4.1.4>
-- 4.1.4.1 (r and s can not be zero)
guard $ EC.getCompactSigR compact /= 0
guard $ EC.getCompactSigS compact /= 0
return $ Signature g
|
tphyahoo/haskoin
|
haskoin-core/Network/Haskoin/Crypto/ECDSA.hs
|
unlicense
| 4,491
| 0
| 17
| 1,068
| 1,139
| 600
| 539
| 96
| 3
|
module Main where
import Test.Framework (defaultMain)
-- Util tests
import qualified Network.Haskoin.Util.Tests (tests)
-- Crypto tests
import qualified Network.Haskoin.Crypto.ECDSA.Tests (tests)
import qualified Network.Haskoin.Crypto.Base58.Tests (tests)
import qualified Network.Haskoin.Crypto.Base58.Units (tests)
import qualified Network.Haskoin.Crypto.Keys.Tests (tests)
import qualified Network.Haskoin.Crypto.ExtendedKeys.Tests (tests)
import qualified Network.Haskoin.Crypto.ExtendedKeys.Units (tests)
import qualified Network.Haskoin.Crypto.Hash.Tests (tests)
import qualified Network.Haskoin.Crypto.Hash.Units (tests)
import qualified Network.Haskoin.Crypto.Mnemonic.Tests (tests)
import qualified Network.Haskoin.Crypto.Mnemonic.Units (tests)
import qualified Network.Haskoin.Crypto.Units (tests)
-- Node tests
import qualified Network.Haskoin.Node.Units (tests)
-- Script tests
import qualified Network.Haskoin.Script.Tests (tests)
import qualified Network.Haskoin.Script.Units (tests)
-- Transaction tests
import qualified Network.Haskoin.Transaction.Tests (tests)
import qualified Network.Haskoin.Transaction.Units (tests)
-- Block tests
import qualified Network.Haskoin.Block.Tests (tests)
import qualified Network.Haskoin.Block.Units (tests)
-- Json tests
import qualified Network.Haskoin.Json.Tests (tests)
-- Binary tests
import qualified Network.Haskoin.Binary.Tests (tests)
main :: IO ()
main = defaultMain
( Network.Haskoin.Json.Tests.tests
++ Network.Haskoin.Binary.Tests.tests
++ Network.Haskoin.Util.Tests.tests
++ Network.Haskoin.Crypto.ECDSA.Tests.tests
++ Network.Haskoin.Crypto.Base58.Tests.tests
++ Network.Haskoin.Crypto.Base58.Units.tests
++ Network.Haskoin.Crypto.Hash.Tests.tests
++ Network.Haskoin.Crypto.Hash.Units.tests
++ Network.Haskoin.Crypto.Keys.Tests.tests
++ Network.Haskoin.Crypto.ExtendedKeys.Tests.tests
++ Network.Haskoin.Crypto.ExtendedKeys.Units.tests
++ Network.Haskoin.Crypto.Mnemonic.Tests.tests
++ Network.Haskoin.Crypto.Mnemonic.Units.tests
++ Network.Haskoin.Crypto.Units.tests
++ Network.Haskoin.Node.Units.tests
++ Network.Haskoin.Script.Tests.tests
++ Network.Haskoin.Script.Units.tests
++ Network.Haskoin.Transaction.Tests.tests
++ Network.Haskoin.Transaction.Units.tests
++ Network.Haskoin.Block.Tests.tests
++ Network.Haskoin.Block.Units.tests
)
|
tphyahoo/haskoin
|
haskoin-core/tests/Main.hs
|
unlicense
| 2,408
| 0
| 27
| 264
| 521
| 361
| 160
| 46
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module FormEngine.FormContext where
import FormEngine.FormElement.FormElement (FormElement)
data FormContext =
FormContext
{ allElems :: [FormElement]
, validImg :: String
, invalidImg :: String
, addImg :: String
, removeImg :: String
}
|
DataStewardshipPortal/ds-form-engine
|
FormContext.hs
|
apache-2.0
| 337
| 0
| 9
| 99
| 60
| 39
| 21
| 10
| 0
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ViewPatterns #-}
-- | Expressions in K3.
module Language.K3.Core.Expression (
Expression(..),
ImperativeExpression(..),
Constant(..),
Operator(..),
Binder(..),
Annotation(..),
PropertyE
, onEProperty
, ePropertyName
, ePropertyValue
, ePropertyV
, isEQualified
, isESpan
, isEUID
, isEUIDSpan
, isEAnnotation
, isEProperty
, isEInferredProperty
, isEUserProperty
, isESyntax
, isEApplyGen
, isEType
, isETypeOrBound
, isEQType
, isEPType
, isEAnyType
, isEProvenance
, isESEffect
, isEFStructure
, isEMaterialization
, isAnyETypeAnn
, isAnyEEffectAnn
, isAnyETypeOrEffectAnn
, namedEAnnotations
) where
import Control.DeepSeq
import Data.List
import Data.Tree
import Data.Typeable
import Data.Word (Word8)
import qualified Data.Map as M
import GHC.Generics (Generic)
import Language.K3.Core.Annotation
import Language.K3.Core.Annotation.Analysis
import Language.K3.Core.Annotation.Codegen
import Language.K3.Core.Annotation.Syntax
import Language.K3.Core.Common
import Language.K3.Core.Type
import Language.K3.Core.Literal
import Language.K3.Analysis.HMTypes.DataTypes
import Language.K3.Analysis.Provenance.Core
import qualified Language.K3.Analysis.SEffects.Core as S
import Language.K3.Transform.Hints
import qualified Language.K3.Codegen.CPP.Materialization.Hints as Z
import Language.K3.Utils.Pretty
import Data.Text ( Text )
import qualified Data.Text as T
import qualified Language.K3.Utils.PrettyText as PT
-- | Cycle-breaking import for metaprogramming
import {-# SOURCE #-} Language.K3.Core.Metaprogram ( SpliceEnv )
-- | Expression tags. Every expression can be qualified with a mutability annotation.
data Expression
= EConstant Constant
| EVariable Identifier
| ESome
| EIndirect
| ETuple
| ERecord [Identifier]
| ELambda Identifier
| EOperate Operator
| EProject Identifier
| ELetIn Identifier
| EAssign Identifier
| ECaseOf Identifier
| EBindAs Binder
| EIfThenElse
| EAddress
| ESelf
| EImperative ImperativeExpression
deriving (Eq, Ord, Read, Show, Typeable, Generic)
data ImperativeExpression
= EWhile
deriving (Eq, Ord, Read, Show, Typeable, Generic)
-- | Constant expression values.
data Constant
= CBool Bool
| CByte Word8
| CInt Int
| CReal Double
| CString String
| CNone NoneMutability
| CEmpty (K3 Type)
deriving (Eq, Ord, Read, Show, Typeable, Generic)
-- | Operators (unary and binary).
data Operator
= OAdd
| OSub
| OMul
| ODiv
| OMod
| ONeg
| OEqu
| ONeq
| OLth
| OLeq
| OGth
| OGeq
| OAnd
| OOr
| ONot
| OConcat
| OSeq
| OApp
| OSnd
deriving (Eq, Ord, Read, Show, Typeable, Generic)
-- | Binding Forms.
data Binder
= BIndirection Identifier
| BTuple [Identifier]
| BRecord [(Identifier, Identifier)]
deriving (Eq, Ord, Read, Show, Typeable, Generic)
-- | Annotations on expressions.
data instance Annotation Expression
= ESpan Span
| EUID UID
| EMutable
| EImmutable
| EAnnotation Identifier
| EProperty PropertyE
-- ^ Properties are either types, with a left variant indicating a
-- user-defined property, while a right variant represents an inferred property.
| EApplyGen Bool Identifier SpliceEnv
-- ^ Apply a K3 generator, with a bool indicating a control annotation generator (vs a data annotation),
-- a generator name, and a splice environment.
| ESyntax SyntaxAnnotation
| EAnalysis AnalysisAnnotation
-- TODO: the remainder of these should be pushed into
-- an annotation category (e.g., EType, EAnalysis, etc)
| EProvenance (K3 Provenance)
| ESEffect (K3 S.Effect)
| EFStructure (K3 S.Effect)
| EOpt OptHint
| EMaterialization (M.Map Identifier Z.Decision)
| EType (K3 Type)
| EQType (K3 QType)
| ETypeLB (K3 Type)
| ETypeUB (K3 Type)
| EPType (K3 Type) -- Annotation embedding for pattern types
| EEmbedding EmbeddingAnnotation
deriving (Eq, Ord, Read, Show, Generic)
-- | Data Conflicts
-- TODO: move to Language.K3.Core.Annotation.Analysis
data Conflict
= RW [(Annotation Expression)] (Annotation Expression)
| WR (Annotation Expression) [(Annotation Expression)]
| WW (Annotation Expression) (Annotation Expression)
deriving (Eq, Ord, Read, Show, Generic)
{- NFData instances for expressions. -}
instance NFData Expression
instance NFData ImperativeExpression
instance NFData Constant
instance NFData Operator
instance NFData Binder
instance NFData (Annotation Expression)
instance NFData Conflict
{- HasUID instances. -}
instance HasUID (Annotation Expression) where
getUID (EUID u) = Just u
getUID _ = Nothing
instance HasSpan (Annotation Expression) where
getSpan (ESpan s) = Just s
getSpan _ = Nothing
-- | Property helpers
type PropertyV = (Identifier, Maybe (K3 Literal))
type PropertyE = Either PropertyV PropertyV
onEProperty :: (PropertyV -> a) -> PropertyE -> a
onEProperty f (Left (n, lopt)) = f (n, lopt)
onEProperty f (Right (n, lopt)) = f (n, lopt)
ePropertyName :: PropertyE -> String
ePropertyName (Left (n,_)) = n
ePropertyName (Right (n,_)) = n
ePropertyValue :: PropertyE -> Maybe (K3 Literal)
ePropertyValue (Left (_,v)) = v
ePropertyValue (Right (_,v)) = v
ePropertyV :: PropertyE -> PropertyV
ePropertyV (Left pv) = pv
ePropertyV (Right pv) = pv
{- Expression annotation predicates -}
isEQualified :: Annotation Expression -> Bool
isEQualified EImmutable = True
isEQualified EMutable = True
isEQualified _ = False
isESpan :: Annotation Expression -> Bool
isESpan (ESpan _) = True
isESpan _ = False
isEUID :: Annotation Expression -> Bool
isEUID (EUID _) = True
isEUID _ = False
isEUIDSpan :: Annotation Expression -> Bool
isEUIDSpan a = isESpan a || isEUID a
isEAnnotation :: Annotation Expression -> Bool
isEAnnotation (EAnnotation _) = True
isEAnnotation _ = False
isEProperty :: Annotation Expression -> Bool
isEProperty (EProperty _) = True
isEProperty _ = False
isEInferredProperty :: Annotation Expression -> Bool
isEInferredProperty (EProperty (Right _)) = True
isEInferredProperty _ = False
isEUserProperty :: Annotation Expression -> Bool
isEUserProperty (EProperty (Left _)) = True
isEUserProperty _ = False
isESyntax :: Annotation Expression -> Bool
isESyntax (ESyntax _) = True
isESyntax _ = False
isEApplyGen :: Annotation Expression -> Bool
isEApplyGen (EApplyGen _ _ _) = True
isEApplyGen _ = False
isEType :: Annotation Expression -> Bool
isEType (EType _) = True
isEType _ = False
isETypeOrBound :: Annotation Expression -> Bool
isETypeOrBound (EType _) = True
isETypeOrBound (ETypeLB _) = True
isETypeOrBound (ETypeUB _) = True
isETypeOrBound _ = False
isEQType :: Annotation Expression -> Bool
isEQType (EQType _) = True
isEQType _ = False
isEPType :: Annotation Expression -> Bool
isEPType (EPType _) = True
isEPType _ = False
isEAnyType :: Annotation Expression -> Bool
isEAnyType (EType _) = True
isEAnyType (ETypeLB _) = True
isEAnyType (ETypeUB _) = True
isEAnyType (EQType _) = True
isEAnyType (EPType _) = True
isEAnyType _ = False
isEProvenance :: Annotation Expression -> Bool
isEProvenance (EProvenance _) = True
isEProvenance _ = False
isESEffect :: Annotation Expression -> Bool
isESEffect (ESEffect _) = True
isESEffect _ = False
isEFStructure :: Annotation Expression -> Bool
isEFStructure (EFStructure _) = True
isEFStructure _ = False
isAnyETypeAnn :: Annotation Expression -> Bool
isAnyETypeAnn a = isETypeOrBound a || isEQType a
isAnyEEffectAnn :: Annotation Expression -> Bool
isAnyEEffectAnn a = isEProvenance a || isESEffect a || isEFStructure a
isAnyETypeOrEffectAnn :: Annotation Expression -> Bool
isAnyETypeOrEffectAnn a = isAnyETypeAnn a || isAnyEEffectAnn a
namedEAnnotations :: [Annotation Expression] -> [Identifier]
namedEAnnotations anns = map extractId $ filter isEAnnotation anns
where extractId (EAnnotation n) = n
extractId _ = error "Invalid named annotation"
isEMaterialization :: Annotation Expression -> Bool
isEMaterialization (EMaterialization _) = True
isEMaterialization _ = False
{- Pretty instances -}
instance Pretty (K3 Expression) where
prettyLines (Node (ETuple :@: as) []) =
let (annStr, pAnnStrs) = drawExprAnnotations as
in ["EUnit" ++ annStr] ++ (shift "`- " " " pAnnStrs)
prettyLines (Node (EConstant (CEmpty t) :@: as) []) =
let (annStr, pAnnStrs) = drawExprAnnotations as
in ["EConstant CEmpty" ++ annStr] ++ (shift "+- " "| " pAnnStrs) ++ ["|"] ++ terminalShift t
prettyLines (Node (t :@: as) es) =
let (annStr, pAnnStrs) = drawExprAnnotations as
shiftedTAnns = if null es then (shift "`- " " " pAnnStrs)
else (shift "+- " "| " pAnnStrs)
in
[show t ++ annStr] ++ shiftedTAnns ++ drawSubTrees es
drawExprAnnotations :: [Annotation Expression] -> (String, [String])
drawExprAnnotations as =
let (typeAnns, anns) = partition isTypeAnn as
(effectAnns, anns') = partition isAnyEEffectAnn anns
(restPAnns, anns'') = partition isEApplyGen anns'
prettyTypeAnns = case typeAnns of
[] -> []
[EType t] -> drawETypeAnnotation $ EType t
[EQType t] -> drawETypeAnnotation $ EQType t
[EPType t] -> drawETypeAnnotation $ EPType t
[t, l, u] -> drawETypeAnnotation t
%+ indent 2 (drawETypeAnnotation l
%+ indent 2 (drawETypeAnnotation u))
_ -> error "Invalid type bound annotations"
prettyAnns = drawGroup $ [prettyTypeAnns] ++ map drawEEffectAnnotations effectAnns
++ map drawPrettyAnns restPAnns
in (drawAnnotations anns'', prettyAnns)
where drawETypeAnnotation (ETypeLB t) = ["ETypeLB "] %+ prettyLines t
drawETypeAnnotation (ETypeUB t) = ["ETypeUB "] %+ prettyLines t
drawETypeAnnotation (EType t) = ["EType "] %+ prettyLines t
drawETypeAnnotation (EQType t) = ["EQType "] %+ prettyLines t
drawETypeAnnotation (EPType t) = ["EPType "] %+ prettyLines t
drawETypeAnnotation _ = error "Invalid argument to drawETypeAnnotation"
drawEEffectAnnotations (EProvenance p) = ["EProvenance "] %+ prettyLines p
drawEEffectAnnotations (ESEffect e) = ["ESEffect "] %+ prettyLines e
drawEEffectAnnotations (EFStructure e) = ["EFStructure "] %+ prettyLines e
drawEEffectAnnotations _ = error "Invalid effect annotation"
drawPrettyAnns (EApplyGen _ genId sEnv) = ["EApplyGen " ++ genId ++ " "] %+ prettyLines sEnv
drawPrettyAnns _ = []
isTypeAnn a = isETypeOrBound a || isEQType a || isEPType a
{- PrettyText instance -}
tPipe :: Text
tPipe = T.pack "|"
aPipe :: [Text] -> [Text]
aPipe t = t ++ [tPipe]
ntShift :: [Text] -> [Text]
ntShift = PT.shift (T.pack "+- ") (T.pack "| ")
tShift :: [Text] -> [Text]
tShift = PT.shift (T.pack "`- ") (T.pack " ")
tTA :: Bool -> String -> [Annotation Expression] -> [Text]
tTA asTerm s as =
let (annTxt, pAnnTxt) = drawExprAnnotationsT as in
let suffix = if null pAnnTxt then id else aPipe in
(suffix [T.append (T.pack s) annTxt])
++ (if null pAnnTxt then []
else if asTerm then tShift pAnnTxt else (aPipe $ tShift pAnnTxt))
instance PT.Pretty (K3 Expression) where
prettyLines (Node (ETuple :@: as) []) = tTA True "EUnit" as
prettyLines (Node (EConstant (CEmpty t) :@: as) []) =
tTA False ("EConstant CEmpty") as ++ PT.terminalShift t
prettyLines (Node (t :@: as) es) =
let (annTxt, pAnnTxt) = drawExprAnnotationsT as
shiftedTAnns = if null es then tShift pAnnTxt else ntShift pAnnTxt
in
[T.append (T.pack $ show t) annTxt] ++ shiftedTAnns ++ PT.drawSubTrees es
drawExprAnnotationsT :: [Annotation Expression] -> (Text, [Text])
drawExprAnnotationsT as =
let (typeAnns, anns) = partition (\a -> isETypeOrBound a || isEQType a || isEPType a) as
(effectAnns, anns') = partition isAnyEEffectAnn anns
prettyTypeAnns = case typeAnns of
[] -> []
[EType t] -> drawETypeAnnotationT $ EType t
[EQType t] -> drawETypeAnnotationT $ EQType t
[EPType t] -> drawETypeAnnotationT $ EPType t
[t, l, u] -> drawETypeAnnotationT t
PT.%+ PT.indent 2 (drawETypeAnnotationT l
PT.%+ PT.indent 2 (drawETypeAnnotationT u))
_ -> error "Invalid type bound annotations"
prettyAnns = PT.drawGroup $ [prettyTypeAnns] ++ map drawEEffectAnnotationsT effectAnns
in (PT.drawAnnotations anns', prettyAnns)
-- TODO: PT.Pretty instances for K3 Type, K3 Effect, K3 Symbol
where drawETypeAnnotationT (ETypeLB t) = [T.pack "ETypeLB "] PT.%+ PT.prettyLines t
drawETypeAnnotationT (ETypeUB t) = [T.pack "ETypeUB "] PT.%+ PT.prettyLines t
drawETypeAnnotationT (EType t) = [T.pack "EType "] PT.%+ PT.prettyLines t
drawETypeAnnotationT (EQType t) = [T.pack "EQType "] PT.%+ (map T.pack $ prettyLines t)
drawETypeAnnotationT (EPType t) = [T.pack "EPType "] PT.%+ PT.prettyLines t
drawETypeAnnotationT _ = error "Invalid argument to drawETypeAnnotation"
drawEEffectAnnotationsT (EProvenance p) = [T.pack "EProvenance "] PT.%+ PT.prettyLines p
drawEEffectAnnotationsT (ESEffect e) = [T.pack "ESEffect "] PT.%+ PT.prettyLines e
drawEEffectAnnotationsT (EFStructure e) = [T.pack "EFStructure "] PT.%+ PT.prettyLines e
drawEEffectAnnotationsT _ = error "Invalid effect annotation"
|
yliu120/K3
|
src/Language/K3/Core/Expression.hs
|
apache-2.0
| 14,424
| 0
| 18
| 3,611
| 4,169
| 2,210
| 1,959
| 338
| 15
|
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances #-}
module DecisionProcedure.Opra4
( module DecisionProcedure.Opra
) where
-- standard modules
-- local modules
import Basics
import Calculus.Opra4
import DecisionProcedure
import DecisionProcedure.AlgebraicClosure
--import DecisionProcedure.AlgebraicGeometric
import DecisionProcedure.Opra
instance HasBinAClosureGqr ARel Opra4
instance HasBinAClosureGqr GRel Opra4
instance HasBinAClosureSparq ARel Opra4
instance HasBinAClosureSparq GRel Opra4
--instance HasAReasoning ARel Opra4
--instance HasAReasoning GRel Opra4
instance HasDecisionProcedure (ARel Opra4) where
procedures _ =
[ algebraicClosureGQR
, algebraicClosure
-- , algebraicClosureSpS
-- , algebraicReasoning
] ++ map (firstApply opramNetToOpraNetAtomic)
(procedures (undefined :: ARel Opra))
instance HasDecisionProcedure (GRel Opra4) where
procedures _ =
[ algebraicClosure
-- , algebraicReasoning
]
|
spatial-reasoning/zeno
|
src/DecisionProcedure/Opra4.hs
|
bsd-2-clause
| 1,035
| 0
| 11
| 211
| 163
| 90
| 73
| 21
| 0
|
{-# LANGUAGE Haskell2010 #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeOperators #-}
module Bug825 where
data a :~: b
data (:~~:) a b
|
haskell/haddock
|
hoogle-test/src/Bug825/Bug825.hs
|
bsd-2-clause
| 149
| 0
| 4
| 24
| 20
| 16
| 4
| -1
| -1
|
-----------------------------------------------------------------------------
-- |
-- Module : Application.HXournal.Util.Verbatim
-- Copyright : (c) 2011, 2012 Ian-Woo Kim
--
-- License : BSD3
-- Maintainer : Ian-Woo Kim <ianwookim@gmail.com>
-- Stability : experimental
-- Portability : GHC
--
module Application.HXournal.Util.Verbatim where
import Language.Haskell.TH.Quote
import Language.Haskell.TH.Lib
verbatim :: QuasiQuoter
verbatim = QuasiQuoter { quoteExp = litE . stringL
, quotePat = undefined
, quoteType = undefined
, quoteDec = undefined
}
-- , quotePat = litP . stringP
-- }
|
wavewave/hxournal
|
lib/Application/HXournal/Util/Verbatim.hs
|
bsd-2-clause
| 724
| 0
| 7
| 209
| 74
| 53
| 21
| 8
| 1
|
{-# LANGUAGE GADTs, TemplateHaskell #-}
module Database.Drasil.SystemInformation(SystemInformation(..), Block(..),
citeDB, ReferenceDB, citationsFromBibMap, citationDB, rdb, RefMap, simpleMap,
conceptDB
) where
import Language.Drasil
import Theory.Drasil (DataDefinition)
import Database.Drasil.ChunkDB (ChunkDB)
import Control.Lens ((^.), makeLenses)
import Data.Function (on)
import Data.List (concatMap, find, groupBy, sortBy)
import qualified Data.Map as Map
-- | Data structure for holding all of the requisite information about a system
-- to be used in artefact generation
data SystemInformation where
--FIXME:
--There should be a way to remove redundant "Quantity" constraint.
-- I'm thinking for getting concepts that are also quantities, we could
-- use a lookup of some sort from their internal (Drasil) ids.
SI :: (CommonIdea a, Idea a, Idea b, HasName c,
Quantity e, Eq e, MayHaveUnit e, Quantity f, MayHaveUnit f, Concept f, Eq f,
Quantity h, MayHaveUnit h, Quantity i, MayHaveUnit i,
HasUID j, Constrained j) =>
{ _sys :: a
, _kind :: b
, _authors :: [c]
, _quants :: [e]
, _concepts :: [f]
, _definitions :: [QDefinition] --FIXME: will be removed upon migration to use of [DataDefinition] below
, _datadefs :: [DataDefinition]
, _inputs :: [h]
, _outputs :: [i]
, _defSequence :: [Block QDefinition]
, _constraints :: [j] --TODO: Add SymbolMap OR enough info to gen SymbolMap
, _constants :: [QDefinition]
, _sysinfodb :: ChunkDB
, _usedinfodb :: ChunkDB
, refdb :: ReferenceDB
, sampleData :: FilePath
} -> SystemInformation
-- | for listing QDefs in SystemInformation
data Block a = Coupled a a [a] | Parallel a [a]
-- | Helper for extracting bibliography
citeDB :: SystemInformation -> BibRef
citeDB si = citationsFromBibMap (_citationDB (refdb si))
citationsFromBibMap :: BibMap -> [Citation]
citationsFromBibMap bm = sortBy compareAuthYearTitle citations
where citations :: [Citation]
citations = map fst (Map.elems bm)
compareAuthYearTitle :: (HasFields c) => c -> c -> Ordering
compareAuthYearTitle c1 c2
| cp /= EQ = cp
| y1 /= y2 = y1 `compare` y2
| t1 /= t2 = t1 `compare` t2
| otherwise = error "Couldn't sort authors"
where
cp = comparePeople (getAuthor c1) (getAuthor c2)
y1 = getYear c1
y2 = getYear c2
t1 = getTitle c1
t2 = getTitle c2
getAuthor :: (HasFields c) => c -> People
getAuthor c = maybe (error "No author found") (\(Author x) -> x) (find isAuthor (c ^. getFields))
where isAuthor :: CiteField -> Bool
isAuthor (Author _) = True
isAuthor _ = False
getYear :: (HasFields c) => c -> Int
getYear c = maybe (error "No year found") (\(Year x) -> x) (find isYear (c ^. getFields))
where isYear :: CiteField -> Bool
isYear (Year _) = True
isYear _ = False
getTitle :: (HasFields c) => c -> String
getTitle c = maybe (error "No title found") (\(Title x) -> x) (find isTitle (c ^. getFields))
where isTitle :: CiteField -> Bool
isTitle (Title _) = True
isTitle _ = False
-- | Database for maintaining references.
-- The Int is that reference's number.
-- Maintains access to both num and chunk for easy reference swapping
-- between number and shortname/refname when necessary (or use of number
-- if no shortname exists)
type RefMap a = Map.Map UID (a, Int)
-- | Citation Database (bibliography information)
type BibMap = RefMap Citation
-- | ConceptInstance Database
type ConceptMap = RefMap ConceptInstance
-- | Database for internal references.
data ReferenceDB = RDB -- organized in order of appearance in SmithEtAl template
{ _citationDB :: BibMap
, _conceptDB :: ConceptMap
}
makeLenses ''ReferenceDB
rdb :: BibRef -> [ConceptInstance] -> ReferenceDB
rdb citations con = RDB (bibMap citations) (conceptMap con)
simpleMap :: HasUID a => [a] -> RefMap a
simpleMap xs = Map.fromList $ zip (map (^. uid) xs) (zip xs [1..])
bibMap :: [Citation] -> BibMap
bibMap cs = Map.fromList $ zip (map (^. uid) scs) (zip scs [1..])
where scs :: [Citation]
scs = sortBy compareAuthYearTitle cs
-- Sorting is necessary if using elems to pull all the citations
-- (as it sorts them and would change the order).
-- We can always change the sorting to whatever makes most sense
conGrp :: ConceptInstance -> ConceptInstance -> Bool
conGrp a b = cdl a == cdl b where
cdl :: ConceptInstance -> UID
cdl = sDom . cdom
conceptMap :: [ConceptInstance] -> ConceptMap
conceptMap cs = Map.fromList $ zip (map (^. uid) (concat grp)) $ concatMap
(\x -> zip x [1..]) grp
where grp :: [[ConceptInstance]]
grp = groupBy conGrp $ sortBy uidSort cs
uidSort :: HasUID c => c -> c -> Ordering
uidSort = compare `on` (^. uid)
|
JacquesCarette/literate-scientific-software
|
code/drasil-database/Database/Drasil/SystemInformation.hs
|
bsd-2-clause
| 4,769
| 0
| 11
| 994
| 1,395
| 778
| 617
| -1
| -1
|
module PureFunctionComposition where
pureFunctionComponent1 :: Int -> Int
pureFunctionComponent1 x = x * x
pureFunctionComponent2 :: Int -> Int
pureFunctionComponent2 x = x `mod` 5
pureFunctionCompositionPointStyle :: Int -> Int
pureFunctionCompositionPointStyle = pureFunctionComponent2 . pureFunctionComponent1
pureFunctionComposition :: Int -> Int
pureFunctionComposition x = pureFunctionComponent2(pureFunctionComponent1 x)
|
benkio/ThesisExamples
|
src/PureFunctionComposition.hs
|
bsd-3-clause
| 432
| 0
| 7
| 49
| 92
| 50
| 42
| 9
| 1
|
----------------------------------------------------------------------------
--
-- Module : AssocList.hs
-- Copyright : (C) 2000-2002 Joe English. Freely redistributable.
-- License : "MIT-style"
--
-- Author : Joe English <jenglish@flightlab.com>
-- Stability : provisional
-- Portability : portable
--
-- CVS : $Id: AssocList.hs,v 1.5 2002/10/12 01:58:56 joe Exp $
--
----------------------------------------------------------------------------
--
-- Quick hack; need a stub FiniteMap implementation
--
module AssocList
( FM, unsafeLookup, lookupM, lookupWithDefault, empty
, insert , insertWith
) where
import Prelude -- hiding (null,map,foldr,foldl,foldr1,foldl1,filter)
type FM k a = [(k,a)]
lookupM :: (Eq k) => FM k a -> k -> Maybe a
lookupM = flip Prelude.lookup
lookupWithDefault :: (Eq key) => FM key elt -> elt -> key -> elt
lookupWithDefault m d = maybe d id . lookupM m
unsafeLookup :: (Eq a) => FM a b -> a -> b
unsafeLookup m = maybe (error "Error: Not found") id . lookupM m
insertWith :: (Eq k) => (a -> a -> a) -> k -> a -> FM k a -> FM k a
insertWith _ key elt [] = [(key,elt)]
insertWith c key elt ((k,e):l)
| k == key = (k,c e elt):l
| otherwise = (k,e):insertWith c key elt l
insert :: (Eq k) => k -> a -> FM k a -> FM k a
insert = insertWith (\_old new -> new)
{-
-- GHC 'data' library convention:
addToFM_C :: (elt -> elt -> elt) -> FM key elt -> key -> elt -> FM key elt
addToFM :: FM key elt -> key -> elt -> FM key elt
lookupFM :: FM key elt -> key -> Maybe elt
lookupWithDefaultFM :: FM key elt -> elt -> key -> elt
-}
empty :: FM a b
empty = []
-- EOF --
|
pepeiborra/xml-bench
|
hxml-0.2/AssocList.hs
|
bsd-3-clause
| 1,614
| 4
| 10
| 322
| 443
| 243
| 200
| 20
| 1
|
--------------------------------------------------------------------------------
-- | Implementation of Hakyll commands: build, preview...
{-# LANGUAGE CPP #-}
module Hakyll.Commands
( build
, check
, clean
, preview
, rebuild
, server
, deploy
, watch
) where
--------------------------------------------------------------------------------
import System.Exit (exitWith, ExitCode)
import Control.Applicative
import Control.Concurrent
--------------------------------------------------------------------------------
import qualified Hakyll.Check as Check
import Hakyll.Core.Configuration
import Hakyll.Core.Logger (Verbosity)
import Hakyll.Core.Rules
import Hakyll.Core.Rules.Internal
import Hakyll.Core.Runtime
import Hakyll.Core.Util.File
--------------------------------------------------------------------------------
#ifdef WATCH_SERVER
import Hakyll.Preview.Poll (watchUpdates)
#endif
#ifdef PREVIEW_SERVER
import Hakyll.Preview.Server
#endif
--------------------------------------------------------------------------------
-- | Build the site
build :: Configuration -> Verbosity -> Rules a -> IO ExitCode
build conf verbosity rules = fst <$> run conf verbosity rules
--------------------------------------------------------------------------------
-- | Run the checker and exit
check :: Configuration -> Verbosity -> Check.Check -> IO ()
check config verbosity check' = Check.check config verbosity check' >>= exitWith
--------------------------------------------------------------------------------
-- | Remove the output directories
clean :: Configuration -> IO ()
clean conf = do
remove $ destinationDirectory conf
remove $ storeDirectory conf
remove $ tmpDirectory conf
where
remove dir = do
putStrLn $ "Removing " ++ dir ++ "..."
removeDirectory dir
--------------------------------------------------------------------------------
-- | Preview the site
preview :: Configuration -> Verbosity -> Rules a -> Int -> IO ()
#ifdef PREVIEW_SERVER
preview conf verbosity rules port = do
deprecatedMessage
watch conf verbosity port True rules
where
deprecatedMessage = mapM_ putStrLn [ "The preview command has been deprecated."
, "Use the watch command for recompilation and serving."
]
#else
preview _ _ _ _ = previewServerDisabled
#endif
--------------------------------------------------------------------------------
-- | Watch and recompile for changes
watch :: Configuration -> Verbosity -> Int -> Bool -> Rules a -> IO ()
#ifdef WATCH_SERVER
watch conf verbosity port runServer rules = do
watchUpdates conf update
_ <- forkIO (server')
loop
where
update = do
(_, ruleSet) <- run conf verbosity rules
return $ rulesPattern ruleSet
loop = threadDelay 100000 >> loop
server' = if runServer then server conf port else return ()
#else
watch _ _ _ _ _ = watchServerDisabled
#endif
--------------------------------------------------------------------------------
-- | Rebuild the site
rebuild :: Configuration -> Verbosity -> Rules a -> IO ExitCode
rebuild conf verbosity rules =
clean conf >> build conf verbosity rules
--------------------------------------------------------------------------------
-- | Start a server
server :: Configuration -> Int -> IO ()
#ifdef PREVIEW_SERVER
server conf port = do
let destination = destinationDirectory conf
staticServer destination preServeHook port
where
preServeHook _ = return ()
#else
server _ _ = previewServerDisabled
#endif
--------------------------------------------------------------------------------
-- | Upload the site
deploy :: Configuration -> IO ExitCode
deploy conf = deploySite conf conf
--------------------------------------------------------------------------------
-- | Print a warning message about the preview serving not being enabled
#ifndef PREVIEW_SERVER
previewServerDisabled :: IO ()
previewServerDisabled =
mapM_ putStrLn
[ "PREVIEW SERVER"
, ""
, "The preview server is not enabled in the version of Hakyll. To"
, "enable it, set the flag to True and recompile Hakyll."
, "Alternatively, use an external tool to serve your site directory."
]
#endif
#ifndef WATCH_SERVER
watchServerDisabled :: IO ()
watchServerDisabled =
mapM_ putStrLn
[ "WATCH SERVER"
, ""
, "The watch server is not enabled in the version of Hakyll. To"
, "enable it, set the flag to True and recompile Hakyll."
, "Alternatively, use an external tool to serve your site directory."
]
#endif
|
freizl/freizl.github.com-old
|
src/Hakyll/Commands.hs
|
bsd-3-clause
| 4,829
| 0
| 11
| 1,019
| 774
| 418
| 356
| 59
| 1
|
{-# LANGUAGE MultiParamTypeClasses, FlexibleContexts, ConstraintKinds, FlexibleInstances #-}
module System.Hermite.Keys (
Keybinding(..)
, Runnable(..)
, HermiteState(..)
, NoState
, swap
, runKeys
, setKey
, bindkeys
, keyPressed
) where
import Control.Monad.IO.Class
import Control.Monad.State
import Graphics.UI.Gtk.Gdk.EventM
import Graphics.UI.Gtk hiding (get)
import Data.IORef
-- A keybinding type that is able to match against a pressed key
-- and a state. If matched, an action is ran mapped to the key.
class (Eq a, Runnable a) => Keybinding a b where
match :: b -> [Modifier] -> String -> a -> Bool
class Runnable a where
run :: a -> IO ()
-- run :: a -> IO b -> IO ()
class HermiteState s where
stateNew :: a -> IO (s a)
stateGet :: (s a) -> IO a
stateWrite :: (s a) -> a -> IO ()
-- A bougus state, for to use when no state is needed
instance HermiteState ((->) ()) where
stateNew a = return $ const a
stateGet sa = return $ sa ()
stateWrite _ _ = return ()
instance HermiteState IORef where
stateNew = newIORef
stateGet = readIORef
stateWrite = writeIORef
-- A dummystate when you don't want a state.
type NoState = ((->) () ())
swap :: Eq a => a -> a -> a
swap k1 k2 = if k1 == k2 then k2 else k1
runKeys :: (Keybinding a b) => [a] -> State [a] () -> [a]
runKeys = flip execState
-- change the keys in a monadic way.
setKey :: (Eq a, MonadState [a] m) => (a -> a -> a) -> a -> m ()
setKey f key' = do
vec <- get
let matched = filter ((==) $ key') vec
case matched of
[] -> put $ key' : vec
_ -> put $ map (f key') vec
-- bindkeys takes a set of bingings, a widget for the bindings and
-- a starting state.
bindkeys :: (WidgetClass object, HermiteState m, Keybinding a b) =>
[a] -> object -> m b -> IO (ConnectId object)
bindkeys bindings vte state' = on vte keyPressEvent $ keyPressed state' bindings
keyPressed :: (HermiteState m, Keybinding a b) => (m b) -> [a] -> EventM EKey Bool
keyPressed b bindings = do
m <- eventModifier
key' <- eventKeyName
b' <- liftIO $ stateGet b
let matched = filter (match b' m key') bindings
case matched of
[] -> return False
(a:_) -> liftIO (run a) >> return True
|
dagle/hermite
|
src/System/Hermite/Keys.hs
|
bsd-3-clause
| 2,305
| 0
| 13
| 597
| 804
| 424
| 380
| 56
| 2
|
-- !!! local aliases
module M where
import qualified Maybe as M
import qualified List as M
x = M.length
b = M.isJust
|
FranklinChen/Hugs
|
tests/static/mod106.hs
|
bsd-3-clause
| 119
| 0
| 5
| 25
| 31
| 22
| 9
| 5
| 1
|
module Data.RDF.Graph.MapSP_Test (triplesOf',uniqTriplesOf',empty',mkRdf') where
import Data.RDF.Types
import Data.RDF.Graph.MapSP (MapSP)
import Data.RDF.GraphTestUtils
import qualified Data.Map as Map
import Control.Monad
import Test.QuickCheck
instance Arbitrary MapSP where
arbitrary = liftM3 mkRdf arbitraryTs (return Nothing) (return $ PrefixMappings Map.empty)
--coarbitrary = undefined
empty' :: MapSP
empty' = empty
mkRdf' :: Triples -> Maybe BaseUrl -> PrefixMappings -> MapSP
mkRdf' = mkRdf
triplesOf' :: MapSP -> Triples
triplesOf' = triplesOf
uniqTriplesOf' :: MapSP -> Triples
uniqTriplesOf' = uniqTriplesOf
|
jutaro/rdf4h
|
testsuite/tests/Data/RDF/Graph/MapSP_Test.hs
|
bsd-3-clause
| 633
| 0
| 10
| 84
| 168
| 99
| 69
| 17
| 1
|
module LAuREL.Parser (parseLAuREL) where
import LAuREL.Types
import System.IO
import Control.Monad
import Control.Applicative ((<$>), (<*), (*>))
import Text.ParserCombinators.Parsec
import Text.ParserCombinators.Parsec.Expr
import Text.ParserCombinators.Parsec.Language
import qualified Text.ParserCombinators.Parsec.Token as Token
_prefix_opers = ["not", "¬"]
_infix_opers = ["+", "-", "*", "/", ":=", ">", "<", "<=", "≤", "⩽", ">=", "≥", "⩾", "eq", "==", "=", "neq", "/=", "≠", "and", "&&", "∧", "⋀", "or", "||", "∨", "⋁", "⊕", "⊗", "@", "!", ">>"]
_rassoc_opers = ["$"]
languageDef =
emptyDef { Token.commentStart
= "{%",
Token.commentEnd
= "%}",
Token.commentLine
= "%",
Token.identStart
= letter,
Token.identLetter
= alphaNum <|> oneOf (['_']),
Token.reservedNames
= [ "if", "else", "end",
"let", "in",
"true", "false",
"->", "→", "λ", "\\", "=", ":" ],
Token.reservedOpNames
= _infix_opers ++ _prefix_opers ++ _rassoc_opers,
Token.caseSensitive
= True
}
lexer
= Token.makeTokenParser languageDef
identifier
= Token.identifier lexer
reserved
= Token.reserved lexer
comma
= Token.comma lexer
reservedOp
= Token.reservedOp lexer
parens
= Token.parens lexer
brackets
= Token.brackets lexer
integer
= Token.integer lexer
floating
= Token.float lexer
stringLiteral
= Token.stringLiteral lexer
whiteSpace
= Token.whiteSpace lexer
type_
= whiteSpace
>> ((brackets type_ >>= \a -> return $ "["++a++"]")
<|> do c <- upper
cs <- many alphaNum
return (c:cs))
types
= whiteSpace
>> sepBy type_ ((try $ whiteSpace *> reserved "->" <* whiteSpace) <|> (try $ whiteSpace *> reserved "→" <* whiteSpace))
types'
= whiteSpace
>> sepBy1 type_ ((try $ whiteSpace *> reserved "->" <* whiteSpace) <|> (try $ whiteSpace *> reserved "→" <* whiteSpace))
arguments
= whiteSpace
>> sepBy identifier whiteSpace
arguments'
= whiteSpace
>> sepBy1 identifier whiteSpace
parseLAuREL ::
String
-> Expr
parseLAuREL input
= case parse laurelParser "" input of
Right parsed -> Root parsed
Left err -> error $ show err
laurelParser ::
Parser Exprs
laurelParser
= whiteSpace
>> many1 functionDefinition
functionDefinition ::
Parser Expr
functionDefinition
= do name <- identifier
reserved ":"
types <- types'
newline
string name
args <- arguments
reservedOp ":="
body <- statement
reserved "."
return Fun { funId = name,
funDoc = Nothing,
funType = types,
funArgs = args,
funMain = body }
statement ::
Parser Expr
statement
= exprStatement
<|> ifStatement
<|> lambdaStatement
<|> letStatement
<|> callStatement
ifStatement ::
Parser Expr
ifStatement
= do reserved "if"
condition <- exprStatement
reserved "->" <|> reserved "→"
ifpos <- statement
reserved "else"
ifneg <- statement
return If { ifCond = condition,
ifTrue = ifpos,
ifFalse = ifneg }
lambdaStatement ::
Parser Expr
lambdaStatement
= do reserved "λ" <|> reserved "\\"
args <- arguments'
reserved "->" <|> reserved "→"
body <- statement
return Lambda { lambdaArgs = args,
lambdaMain = body }
letStatement ::
Parser Expr
letStatement
= do reserved "let"
name <- identifier
reservedOp ":="
value <- statement
reserved "in"
body <- statement
return Let { letName = name,
letValue = value,
letMain = body }
callStatement ::
Parser Expr
callStatement
= do name <- identifier
args <- many exprStatement
return Call { callId = name,
callArgs = args }
exprStatement ::
Parser Expr
exprStatement
= buildExpressionParser oOperators oTerms
where
oTerms
= parens statement
<|> callStatement
<|> Type <$> dataP
dataP
= (do reserved "true"
return $ Bool $ True)
<|> (do reserved "false"
return $ Bool $ False)
<|> (do reservedOp "-"
i <- integer
return $ Integer $ (-) 0 $ fromInteger i)
<|> (do i <- integer
return $ Integer $ fromInteger i)
<|> (do reservedOp "-"
f <- floating
return $ Float $ 0 - f)
<|> (do f <- floating
return $ Float f)
<|> (do l <- brackets (sepBy1 dataP comma)
return $ List l)
<|> (do s <- stringLiteral
return $ String s)
oOperators
= [ [Infix (reservedOp op >> return (Op op)) AssocLeft ] | op <- _infix_opers ]
++ [ [Infix (reservedOp op >> return (Op op)) AssocRight ] | op <- _rassoc_opers ]
|
davbaumgartner/LAuREL
|
LAuREL/Parser.hs
|
bsd-3-clause
| 5,661
| 1
| 19
| 2,203
| 1,516
| 788
| 728
| 179
| 2
|
{-# LANGUAGE GADTs, OverloadedStrings #-}
module WebLog where
import Control.Monad.Reader
import Data.Aeson.Types
import Data.Aeson.Encode as E
import Data.Aeson.Parser
import qualified Data.Attoparsec as A
import qualified Data.ByteString.Lazy.Char8 as C
import qualified Data.ByteString.Char8 as SC
import Network.HTTP.Types
import Network.HTTP.Types.Status
import Network.HTTP.Conduit
import System.FilePath
-- from other hep-platform package
import Application.WebLogger.Type
-- from this package
import Control.Monad.Coroutine
import Control.Monad.Coroutine.Logger
import Control.Monad.Coroutine.Object
type Url = String
-- |
weblogger :: (MonadIO m) => Url -> LogServer m ()
weblogger url = webloggerW url 0
-- |
webloggerW :: (MonadIO m) => Url -> Int -> LogServer m ()
webloggerW url num = ReaderT (f num)
where
f n req =
case req of
Input WriteLog msg -> do
let logmsg = WebLoggerInfo ("log number " ++ show n ++ " : " ++ msg)
liftIO $ comm url "upload" methodPost logmsg
req' <- request (Output WriteLog ())
f (n+1) req'
-- |
comm :: Url
-> String
-> Method
-> WebLoggerInfo
-> IO (Either String (Result Value))
comm url api mthd mi = do
request <- parseUrl (url </> api)
withManager $ \manager -> do
let mijson = E.encode (toJSON mi)
myrequestbody = RequestBodyLBS mijson
let requestjson = request
{ method = mthd
, requestHeaders = [ ("Accept", "application/json; charset=utf-8") ]
, requestBody = myrequestbody }
r <- httpLbs requestjson manager
if statusCode (responseStatus r) == 200
then return . parseJson . SC.concat . C.toChunks . responseBody $ r
else return (Left $ "status code : " ++ show (statusCode (responseStatus r)))
-- |
parseJson :: (FromJSON a) => SC.ByteString -> Either String (Result a)
parseJson bs =
let resultjson = A.parse json bs
in case resultjson of
(A.Done rest rjson) -> return (parse parseJSON rjson)
_ -> Left "parseJson"
|
wavewave/cmdmanager
|
src/WebLog.hs
|
bsd-3-clause
| 2,210
| 4
| 21
| 629
| 650
| 346
| 304
| 53
| 2
|
{-# LANGUAGE QuasiQuotes #-}
import LiquidHaskell
import Language.Haskell.Liquid.Prelude
goo x = let z = [x] in z
z0 _ = True
z1 _ = False
poo (x:_) = True
poo ([]) = liquidAssertB False
xs = goo (choose 0)
prop0 = liquidAssertB True
prop1 = liquidAssertB (poo xs)
|
spinda/liquidhaskell
|
tests/gsoc15/unknown/pos/meas0a.hs
|
bsd-3-clause
| 281
| 0
| 9
| 64
| 117
| 61
| 56
| 11
| 1
|
module Music.Time.Reactive (
-- * Reactive type
Reactive,
-- * Query
initial,
final,
intermediate,
discrete,
updates,
occs,
atTime,
-- * Construction
-- * Combine
switchR,
trimR,
-- * Split
splitReactive,
-- * Sampling
Segment,
continous,
continousWith,
sample,
-- TODO
-- window,
-- windowed,
) where
-- Reactive values, or piecewise functions of time.
--
-- Similar to Conal's definition in <http://conal.net/blog/posts/reactive-normal-form>,
-- but defined in negative time as well. Its semantics function is either 'occs' @&&&@ '?'
-- /or/ 'initial' @&&&@ 'updates', where 'intial' is the value from negative infinity
-- to the first update.
--
-- TODO integrate better in the library
--
import Control.Applicative
import Control.Lens hiding (Indexable, Level, above, below,
index, inside, parts, reversed,
transform, (<|), (|>))
import Control.Monad
import Control.Monad.Plus
import Data.Distributive
import Data.Maybe (fromJust)
import Data.Functor.Rep
import Data.Functor.Rep.Lens
import qualified Data.List as List
import Data.Semigroup hiding ()
import Data.Typeable
import Music.Dynamics.Literal
import Music.Pitch.Alterable
import Music.Pitch.Augmentable
import Music.Pitch.Literal
import Music.Pitch.Literal
import Music.Time.Behavior
import Music.Time.Event
import Music.Time.Score
import Music.Time.Juxtapose
-- |
-- Forms an applicative as per 'Behavior', but only switches at discrete points.
--
newtype Reactive a = Reactive { getReactive :: ([Time], Behavior a) }
deriving (Functor, Semigroup, Monoid, Typeable)
{-
TODO Semantic fuzz
Reactive implies that values change at switchpoints, but should not make assumptions about what the value is *at* the
switchpoint.
Behavior represents continous values, so it knows the value at each switchpoint (semantically: Time -> a).
Hence the combinator (switch' :: Time -> B a -> B a -> B a -> B a) makes sense.
Reactive can do this as well (i.e. with the current semantics: ([Time], Behavior a)), however this is not necessarily
desirable.
Bad:
updates - Promotes association of Time with value (though it makes no assumption that the Reactive *is* this value at the given time).
discrete/atTime/continous - Forces implementation to choose arbitrary value at switchpoint
-}
instance Transformable (Reactive a) where
transform s (Reactive (t,r)) = Reactive (transform s t, transform s r)
instance Reversible (Reactive a) where
rev = stretch (-1)
instance Wrapped (Reactive a) where
type Unwrapped (Reactive a) = ([Time], Behavior a)
_Wrapped' = iso getReactive Reactive
instance Rewrapped (Reactive a) (Reactive b)
instance Applicative Reactive where
pure = pureDefault
where
pureDefault = view _Unwrapped . pure . pure
(<*>) = apDefault
where
(view _Wrapped -> (tf, rf)) `apDefault` (view _Wrapped -> (tx, rx)) = view _Unwrapped (tf <> tx, rf <*> rx)
instance IsPitch a => IsPitch (Reactive a) where
fromPitch = pure . fromPitch
instance IsInterval a => IsInterval (Reactive a) where
fromInterval = pure . fromInterval
instance IsDynamics a => IsDynamics (Reactive a) where
fromDynamics = pure . fromDynamics
instance Alterable a => Alterable (Reactive a) where
sharpen = fmap sharpen
flatten = fmap flatten
instance Augmentable a => Augmentable (Reactive a) where
augment = fmap augment
diminish = fmap diminish
-- |
-- Get the initial value.
--
initial :: Reactive a -> a
initial r = r `atTime` minB (occs r)
where
-- If there are no updates, just use value at time 0
-- Otherwise pick an arbitrary time /before/ the first value
-- It looks strange but it works
minB [] = 0
minB (x:_) = x - 1
-- | Get the time of all updates and the value switched to at this point.
updates :: Reactive a -> [(Time, a)]
updates r = (\t -> (t, r `atTime` t)) <$> (List.sort . List.nub) (occs r)
occs :: Reactive a -> [Time]
occs = fst . (^. _Wrapped')
-- | Split a reactive into events, as well as the values before and after the first/last update
splitReactive :: Reactive a -> Either a ((a, Time), [Event a], (Time, a))
splitReactive r = case updates r of
[] -> Left (initial r)
(t,x):[] -> Right ((initial r, t), [], (t, x))
(t,x):xs -> Right ((initial r, t), fmap mkEvent $ mrights (res $ (t,x):xs), head $ mlefts (res $ (t,x):xs))
where
mkEvent (t,u,x) = (t <-> u, x)^.event
-- Always returns a 0 or more Right followed by one left
res :: [(Time, a)] -> [Either (Time, a) (Time, Time, a)]
res rs = let (ts,xs) = unzip rs in
flip fmap (withNext ts `zip` xs) $
\ ((t, mu), x) -> case mu of
Nothing -> Left (t, x)
Just u -> Right (t, u, x)
-- lenght xs == length (withNext xs)
withNext :: [a] -> [(a, Maybe a)]
withNext = go
where
go [] = []
go [x] = [(x, Nothing)]
go (x:y:rs) = (x, Just y) : withNext (y : rs)
atTime :: Reactive a -> Time -> a
atTime = (!) . snd . (^. _Wrapped')
-- |
-- Get the final value.
--
final :: Reactive a -> a
final x = case (initial x, updates x) of
(i,[]) -> i
(i,xs) -> snd $ last xs
-- | @switch t a b@ behaves as @a@ before time @t@, then as @b@.
switchR :: Time -> Reactive a -> Reactive a -> Reactive a
switchR t (Reactive (tx, bx)) (Reactive (ty, by)) = Reactive $ (,)
(filter (< t) tx <> [t] <> filter (> t) ty) (switch t bx by)
trimR :: Monoid a => Span -> Reactive a -> Reactive a
trimR (view onsetAndOffset -> (t, u)) x = switchR t mempty (switchR u x mempty)
-- |
-- Get all intermediate values.
--
intermediate :: Transformable a => Reactive a -> [Event a]
intermediate (updates -> []) = []
intermediate (updates -> xs) = fmap (\((t1, x), (t2, _)) -> (t1 <-> t2, x)^.event) $ withNext $ xs
where
withNext xs = zip xs (tail xs)
-- |
-- Realize a 'Reactive' value as a discretely changing behavior.
--
discrete :: Reactive a -> Behavior a
discrete = continous . fmap pure
type Segment a = Behavior a
-- | Realize a 'Reactive' value as an continous behavior.
continous :: Reactive (Segment a) -> Behavior a
continous = join . reactiveToBehavior
where
reactiveToBehavior (Reactive (_,b)) = b
-- TODO this is actually wrong! (Reactive vs Segment!)
-- Fine in the span where 'intermediate' is defined:
-- continous = g
-- where
-- g = \x -> fmap (fromJust.fmap getFirst.getOption) . continousM . fmap (fmap (Option . Just . First)) $ x
--
-- continousM :: Monoid a => Reactive (Behavior a) -> Behavior a
-- continousM = concatB . view score . intermediate
-- | Realize a 'Reactive' value as an continous behavior.
continousWith :: Segment (a -> b) -> Reactive a -> Behavior b
continousWith f x = continous $ liftA2 (<*>) (pure f) (fmap pure x)
-- | Sample a 'Behavior' into a reactive.
sample :: [Time] -> Behavior a -> Reactive a
sample ts b = Reactive (ts,b)
|
FranklinChen/music-score
|
src/Music/Time/Reactive.hs
|
bsd-3-clause
| 7,512
| 0
| 15
| 2,092
| 2,020
| 1,130
| 890
| -1
| -1
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ViewPatterns #-}
module Stack.Setup.Installed
( getCompilerVersion
, markInstalled
, unmarkInstalled
, listInstalled
, Tool (..)
, toolString
, toolNameString
, parseToolText
, ExtraDirs (..)
, extraDirs
, installDir
) where
import Control.Applicative
import Control.Monad.Catch
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Logger
import Control.Monad.Reader (MonadReader, asks)
import Control.Monad.Trans.Control
import qualified Data.ByteString.Char8 as S8
import Data.List hiding (concat, elem, maximumBy)
import Data.Maybe
import Data.Monoid
import Data.Text (Text)
import qualified Data.Text as T
import Distribution.System (Platform (..))
import qualified Distribution.System as Cabal
import Path
import Path.IO
import Prelude hiding (concat, elem) -- Fix AMP warning
import Stack.Types
import qualified System.FilePath as FP
import System.Process.Read
data Tool
= Tool PackageIdentifier -- ^ e.g. ghc-7.8.4, msys2-20150512
| ToolGhcjs CompilerVersion -- ^ e.g. ghcjs-0.1.0_ghc-7.10.2
toolString :: Tool -> String
toolString (Tool ident) = packageIdentifierString ident
toolString (ToolGhcjs cv) = compilerVersionString cv
toolNameString :: Tool -> String
toolNameString (Tool ident) = packageNameString $ packageIdentifierName ident
toolNameString ToolGhcjs{} = "ghcjs"
parseToolText :: Text -> Maybe Tool
parseToolText (parseCompilerVersion -> Just (cv@GhcjsVersion{})) = Just (ToolGhcjs cv)
parseToolText (parsePackageIdentifierFromString . T.unpack -> Just pkgId) = Just (Tool pkgId)
parseToolText _ = Nothing
markInstalled :: (MonadIO m, MonadReader env m, HasConfig env, MonadThrow m)
=> Tool
-> m ()
markInstalled tool = do
dir <- asks $ configLocalPrograms . getConfig
fpRel <- parseRelFile $ toolString tool ++ ".installed"
liftIO $ writeFile (toFilePath $ dir </> fpRel) "installed"
unmarkInstalled :: (MonadIO m, MonadReader env m, HasConfig env, MonadThrow m)
=> Tool
-> m ()
unmarkInstalled tool = do
dir <- asks $ configLocalPrograms . getConfig
fpRel <- parseRelFile $ toolString tool ++ ".installed"
removeFileIfExists $ dir </> fpRel
listInstalled :: (MonadIO m, MonadReader env m, HasConfig env, MonadThrow m)
=> m [Tool]
listInstalled = do
dir <- asks $ configLocalPrograms . getConfig
createTree dir
(_, files) <- listDirectory dir
return $ mapMaybe toTool files
where
toTool fp = do
x <- T.stripSuffix ".installed" $ T.pack $ toFilePath $ filename fp
parseToolText x
getCompilerVersion :: (MonadLogger m, MonadCatch m, MonadBaseControl IO m, MonadIO m)
=> EnvOverride -> WhichCompiler -> m CompilerVersion
getCompilerVersion menv wc =
case wc of
Ghc -> do
bs <- readProcessStdout Nothing menv "ghc" ["--numeric-version"]
let (_, ghcVersion) = versionFromEnd bs
GhcVersion <$> parseVersion ghcVersion
Ghcjs -> do
-- Output looks like
--
-- The Glorious Glasgow Haskell Compilation System for JavaScript, version 0.1.0 (GHC 7.10.2)
bs <- readProcessStdout Nothing menv "ghcjs" ["--version"]
let (rest, ghcVersion) = versionFromEnd bs
(_, ghcjsVersion) = versionFromEnd rest
GhcjsVersion <$> parseVersion ghcjsVersion <*> parseVersion ghcVersion
where
versionFromEnd = S8.spanEnd isValid . fst . S8.breakEnd isValid
isValid c = c == '.' || ('0' <= c && c <= '9')
-- | Binary directories for the given installed package
extraDirs :: (MonadReader env m, HasConfig env, MonadThrow m, MonadLogger m)
=> Tool
-> m ExtraDirs
extraDirs tool = do
platform <- asks getPlatform
dir <- installDir tool
case (platform, toolNameString tool) of
(Platform _ Cabal.Windows, isGHC -> True) -> return mempty
{ edBins = goList
[ dir </> $(mkRelDir "bin")
, dir </> $(mkRelDir "mingw") </> $(mkRelDir "bin")
]
}
(Platform _ Cabal.Windows, "msys2") -> return mempty
{ edBins = goList
[ dir </> $(mkRelDir "usr") </> $(mkRelDir "bin")
]
, edInclude = goList
[ dir </> $(mkRelDir "mingw64") </> $(mkRelDir "include")
, dir </> $(mkRelDir "mingw32") </> $(mkRelDir "include")
]
, edLib = goList
[ dir </> $(mkRelDir "mingw64") </> $(mkRelDir "lib")
, dir </> $(mkRelDir "mingw32") </> $(mkRelDir "lib")
]
}
(_, isGHC -> True) -> return mempty
{ edBins = goList
[ dir </> $(mkRelDir "bin")
]
}
(_, isGHCJS -> True) -> return mempty
{ edBins = goList
[ dir </> $(mkRelDir "bin")
]
}
(Platform _ x, toolName) -> do
$logWarn $ "binDirs: unexpected OS/tool combo: " <> T.pack (show (x, toolName))
return mempty
where
goList = map toFilePathNoTrailingSlash
isGHC n = "ghc" == n || "ghc-" `isPrefixOf` n
isGHCJS n = "ghcjs" == n
data ExtraDirs = ExtraDirs
{ edBins :: ![FilePath]
, edInclude :: ![FilePath]
, edLib :: ![FilePath]
}
instance Monoid ExtraDirs where
mempty = ExtraDirs [] [] []
mappend (ExtraDirs a b c) (ExtraDirs x y z) = ExtraDirs
(a ++ x)
(b ++ y)
(c ++ z)
installDir :: (MonadReader env m, HasConfig env, MonadThrow m, MonadLogger m)
=> Tool
-> m (Path Abs Dir)
installDir tool = do
config <- asks getConfig
reldir <- parseRelDir $ toolString tool
return $ configLocalPrograms config </> reldir
toFilePathNoTrailingSlash :: Path loc Dir -> FilePath
toFilePathNoTrailingSlash = FP.dropTrailingPathSeparator . toFilePath
|
rrnewton/stack
|
src/Stack/Setup/Installed.hs
|
bsd-3-clause
| 6,255
| 0
| 19
| 1,840
| 1,768
| 919
| 849
| 145
| 5
|
{-# LANGUAGE KindSignatures, RankNTypes #-}
module Data.Sequence.Strategies where
import Control.Monad
import Control.Parallel.Strategies
import qualified Data.Sequence as S
import qualified Data.Foldable as F
{-# INLINE parSequence #-}
parSequence :: forall (t :: * -> *) a.
(F.Foldable t, NFData a) =>
Int
-> t a
-> Eval (S.Seq a)
parSequence n = liftM S.fromList . parListChunk n rdeepseq . F.toList
|
Smurf/Tempoist
|
src/Data/Sequence/Strategies.hs
|
bsd-3-clause
| 457
| 0
| 12
| 113
| 124
| 71
| 53
| 13
| 1
|
module Sexy.Instances.Show.Char () where
import Sexy.Classes (Show(..))
import Sexy.Data (Char)
import qualified Prelude as P
instance Show Char where
show = P.show
|
DanBurton/sexy
|
src/Sexy/Instances/Show/Char.hs
|
bsd-3-clause
| 169
| 0
| 6
| 26
| 56
| 36
| 20
| 6
| 0
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TypeOperators #-}
module UseHaskellAPIClient where
import Data.Proxy
import Servant.API
import Servant.Client
import UseHaskellAPI
restLoginAPI :: Proxy LoginAPI
restLoginAPI = Proxy
restLockingAPI :: Proxy LockingAPI
restLockingAPI = Proxy
restFileAPI :: Proxy FileAPI
restFileAPI = Proxy
restDirAPI :: Proxy DirAPI
restDirAPI = Proxy
restTAPI :: Proxy TAPI
restTAPI = Proxy
login :: Message -> ClientM [ResponseData]
signup :: Message -> ClientM ResponseData
lock :: Message -> ClientM Bool
unlock :: Message -> ClientM Bool
islocked :: Message -> ClientM Bool
download :: Message -> ClientM [Message]
upload :: StrWrap3 -> ClientM Bool
pushFile :: StrWrap3 -> ClientM [FileMapping]
pullFile :: StrWrap3 -> ClientM [FileMapping]
directories :: StrWrap1 -> ClientM [DirFiles]
dirFiles :: Message -> ClientM [DirFiles]
commitConfirmation :: Message -> ClientM Bool
newTransaction :: StrWrap1 -> ClientM ResponseData
addFile :: Tref -> ClientM Bool
abortTransaction :: Message -> ClientM Bool
commitTransaction :: Message -> ClientM Bool
-- | The following provides the implementations of these types
-- Note that the order of the functions must match the endpoints in the type API from UseHaskell.hs
(login :<|> signup) = client restLoginAPI
(lock :<|> unlock :<|> islocked) = client restLockingAPI
(download :<|> upload) = client restFileAPI
(pushFile :<|> pullFile :<|> directories :<|> dirFiles) = client restDirAPI
(commitConfirmation :<|> newTransaction :<|> addFile :<|> abortTransaction :<|> commitTransaction) = client restTAPI
|
EEviston/distro_api
|
src/UseHaskellAPIClient.hs
|
bsd-3-clause
| 1,789
| 0
| 9
| 315
| 409
| 217
| 192
| 42
| 1
|
module Wikirick.Backends.Repository
( module Wikirick.Repository
, initRepository
, makeRepository
) where
import qualified Control.Concurrent.ReadWriteLock as RWL
import Control.Monad.CatchIO
import qualified Data.Attoparsec as A
import qualified Data.ByteString as BS
import qualified Data.Char as C
import qualified Data.Text.Encoding as TE
import qualified Data.Time as Time
import Data.Word
import Snap
import System.Exit
import System.FilePath
import qualified System.IO.Streams as S
import qualified System.IO.Streams.Attoparsec as SA
import qualified System.Locale as L
import Wikirick.Import
import Wikirick.Repository
import Wikirick.Util
initRepository :: Repository -> SnapletInit b Repository
initRepository = makeSnaplet "repo" "Serves Wiki articles" Nothing . return
makeRepository :: FilePath -> IO Repository
makeRepository dbDir = do
lock <- RWL.new
return Repository
{ _fetchArticle = \title -> liftIO $
RWL.withRead lock $
fetchArticle' title ["-p", title ^. unpacked]
, _fetchRevision = \title rev -> liftIO $
RWL.withRead lock $ do
when (rev < 1) $ throw InvalidRevision
fetchArticle' title ["-r1." <> show rev, "-p", title ^. unpacked]
, _postArticle = \a -> liftIO $
RWL.withWrite lock $ do
checkOutRCSFile a
S.withFileAsOutput (articlePath a) $ \out -> do
textOut <- S.encodeUtf8 out
S.write (Just $ a ^. articleSource) textOut
(in_, _, err, p) <- runInteractiveProcess "ci" [a ^. articleTitle . unpacked]
S.write Nothing in_
S.waitForProcess p >>= \case
ExitSuccess -> return ()
_ -> throwFromRCSError err
, _fetchAllArticleTitles = undefined
}
where
fetchArticle' title coOptions = do
(_, out, err, p) <- runInteractiveProcess "co" coOptions
source <- consumeText =<< S.decodeUtf8 out
rev <- S.waitForProcess p >>= \case
ExitSuccess -> SA.parseFromStream revParser err
_ -> throwFromRCSError err
(_, out', err', p') <- runInteractiveProcess "rlog" ["-r1." <> show rev, title ^. unpacked]
log' <- S.waitForProcess p' >>= \case
ExitSuccess -> SA.parseFromStream rlogParser out'
_ -> throwFromRCSError err'
return $ def
& articleTitle .~ title
& articleSource .~ source
& articleRevision .~ Just rev
& editLog .~ Just log'
checkOutRCSFile article = do
(_, _, err, p) <- runInteractiveProcess "co" ["-l", article ^. articleTitle . unpacked]
S.waitForProcess p >>= \case
ExitSuccess -> return ()
_ -> throwFromRCSError err `catch` \case
ArticleNotFound -> return ()
e -> throw e
articlePath a = dbDir </> a ^. articleTitle . unpacked
runInteractiveProcess cmd opts = S.runInteractiveProcess cmd opts (Just dbDir) Nothing
throwFromRCSError :: S.InputStream BS.ByteString -> IO a
throwFromRCSError = throw <=< SA.parseFromStream errorParser
errorParser :: A.Parser RepositoryException
errorParser
= ArticleNotFound <$ A.try (skipToAfterColon *> skipToAfterColon *> A.string " No such file or directory\n")
<|> RepositoryException <$> consumeAll where
consumeAll = BS.pack <$> A.manyTill A.anyWord8 A.endOfInput
skipToAfterColon = skipTill $ A.word8 $ c2w ':'
revParser :: A.Parser Integer
revParser = do
skipTill $ A.word8 $ c2w '\n'
skipTill $ A.word8 $ c2w '.'
rev <- A.manyTill A.anyWord8 (A.word8 $ c2w '\n')
return $ read $ w2c <$> rev
w2c :: Word8 -> Char
w2c = C.chr . fromIntegral
c2w :: Char -> Word8
c2w = fromIntegral . C.ord
skipTill :: A.Parser a -> A.Parser ()
skipTill = void . A.manyTill A.anyWord8
rlogParser :: A.Parser EditLog
rlogParser = do
skipTill $ A.string "\n----------------------------\n"
skipALine
void $ A.string "date: "
d <- A.manyTill A.anyWord8 $ A.word8 $ c2w ';'
skipALine
c <- A.manyTill A.anyWord8 (A.string "\n=============================================================================")
maybe (fail "parsing editDate") (makeEditLog $ packToText c) $ parseDate d
where
makeEditLog comment d = pure $ EditLog d comment
parseDate ws = Time.parseTime L.defaultTimeLocale "%Y/%m/%d %T" $ w2c <$> ws
packToText = TE.decodeUtf8 . BS.pack
skipALine = skipTill $ A.word8 $ c2w '\n'
|
keitax/wikirick
|
src/Wikirick/Backends/Repository.hs
|
bsd-3-clause
| 4,356
| 0
| 21
| 975
| 1,355
| 690
| 665
| -1
| -1
|
-----------------------------------------------------------------------------
-- |
-- Module : TestSuite.Puzzles.NQueens
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : erkokl@gmail.com
-- Stability : experimental
--
-- Test suite for Data.SBV.Examples.Puzzles.NQueens
-----------------------------------------------------------------------------
module TestSuite.Puzzles.NQueens(tests) where
import Data.SBV.Examples.Puzzles.NQueens
import Utils.SBVTestFramework
tests :: TestTree
tests =
testGroup "Puzzles.NQueens"
-- number of *distinct* solutions is given in http://en.wikipedia.org/wiki/Eight_queens_puzzle
[ testCase "nQueens 1" (assert $ (== 1) `fmap` numberOfModels (mkQueens 1))
, testCase "nQueens 2" (assert $ (== 0) `fmap` numberOfModels (mkQueens 2))
, testCase "nQueens 3" (assert $ (== 0) `fmap` numberOfModels (mkQueens 3))
, testCase "nQueens 4" (assert $ (== 2) `fmap` numberOfModels (mkQueens 4))
, testCase "nQueens 5" (assert $ (== 10) `fmap` numberOfModels (mkQueens 5))
, testCase "nQueens 6" (assert $ (== 4) `fmap` numberOfModels (mkQueens 6))
, testCase "nQueens 7" (assert $ (== 40) `fmap` numberOfModels (mkQueens 7))
, testCase "nQueens 8" (assert $ (== 92) `fmap` numberOfModels (mkQueens 8))
]
mkQueens :: Int -> Symbolic SBool
mkQueens n = isValid n `fmap` mkExistVars n
|
josefs/sbv
|
SBVTestSuite/TestSuite/Puzzles/NQueens.hs
|
bsd-3-clause
| 1,384
| 0
| 12
| 228
| 355
| 204
| 151
| 16
| 1
|
{-|
Module : Parsing
Description : Parsing the arguments
Copyright : (c) Thomas Lang, 2014
License : BSD-3
Stability : stable
Portability : Imports module "CalcStats" and Text.ParserCombinators.Parsec
This module parses the command line arguments.
As implemented here, if only the filename is
passed (no other arguments), all statistical
computations will be performed.
It also reacts if parameters are not legal or
too few/many arguments were passed. Furthermore
it holds functions for printing a help and a
version message.
-}
module Parsing ( parseArgs ) where
import Text.ParserCombinators.Parsec
import System.Directory ( doesFileExist )
import Data.List
import CalcStats
import Plot
------------------------ [ARGUMENT PARSING SECTION] ----------------------
-- |Checks if correct, too much or too few parameters
-- Special check for "--help" Flag. If this is passed,
-- the help message will be displayed and NO FURTHER PROCESSING
parseArgs :: [String] -> String -> IO ()
parseArgs args fname | "--help" `elem` args = printHelp
| "--version" `elem` args = printVersion
| length args < 1 = error "Too few parameters."
| length args > length params - 2 = error "Too many parameters."
| not (correctArgs $ init args) = error "Incorrect parameter(s), use --help for more information."
| otherwise = procArgs (init args) fname
-- Helper function do determine, if the
-- passed arguments are valid
correctArgs :: [String] -> Bool
correctArgs x = all correct x || x == []
where correct x = x `elem` params
-- |List of all available parameters
params :: [String]
params = ["--help","--version","--am","--gm","--hm","--me","--ra","--ev","--es", "--plot"]
-- |Parses the passed CSV file, removes all spaces (because they
-- raise Errors during conversion), converts the Strings to Numbers
-- and calls the calculating function
procArgs :: [String] -> String -> IO ()
procArgs args fname = do
okFile <- doesFileExist fname
case okFile of
False -> error "File does not exist, check for correct file name."
True -> do file <- readFile fname
case parseFile file of
Left err -> putStrLn $ show err
Right list -> let l = map (\x -> read x :: Double) (removeSpaces $ concat list)
in case "--plot" `elem` args of
False -> putStrLn "*** INFO: No LaTex output generated ... ok.\n" >>
case args \\ ["--plot"] of
[] -> calculate params l
_ -> calculate args l
True -> plot l >>= (\_ -> putStrLn "*** INFO: LaTex plot generated.\n") >>
case args \\ ["--plot"] of
[] -> calculate params l
_ -> calculate args l
-- helper function for removing all Spaces and empty Strings in the list
removeSpaces = filter (\x -> (not (" " == x)) && (not ("" == x)))
------------------- [FILE PARSING SECTION] ----------------
-- Notes:
--
-- Here a CSV file has lines, that are separated by eol.
-- Each line has cells, that are comma separated.
--
-- This means for the structure of the passed file, that
-- it has to have at least one newline at the very end.
-- If not, the parser will fail.
--
-- Thanks to the authors of the book "Real World Haskell"
-- who created this minimalistic CSV parser.
--
-- Internal Notes:
--
-- noneOf -> succeeds if the one of the passed character
-- are NOT the read ones, returns the parsed char
-- many -> applies the passed parser 0 or more times
-- |Parses the text stored in "file"
parseFile :: String -> Either ParseError [[String]]
parseFile file = parse csvFile "(unknown)" file
-- |Definitions of the structure of a valid CSV file
csvFile = endBy line eol
line = sepBy cell (char ',')
cell = many (noneOf ",\n\r")
eol = try (string "\n\r")
<|> try (string "\r\n")
<|> string "\n"
<|> string "\r"
-------------------- [HELP SECTION] ------------------------
-- |Prints the help message on the screen
printHelp :: IO ()
printHelp = putStrLn helpMsg
-- |The help message
helpMsg :: String
helpMsg = "usage: stats [OPTIONS] FILENAME\n\n\
\ available Options:\n \
\ --help\t\tPrints this message\n \
\ --version\t\tPrints out the version\n \
\ --am\t\t\tArithmetic Mean \n \
\ --gm\t\t\tGeometric Mean \n \
\ --hm\t\t\tHarmonic Mean \n \
\ --me\t\t\tMedian \n \
\ --ra\t\t\tRange \n \
\ --ev\t\t\tEmpirical Standard Variance\n \
\ --es\t\t\tEmpirical Standard deviation\n \
\ --plot\t\tCreates a LaTex file that contains the plot\n \
\ \t\t\tof the passed list as Tikz picture.\n"
-- |Prints the version message on the screen
printVersion :: IO ()
printVersion = putStrLn versionMsg
-- |The version message
versionMsg :: String
versionMsg = "Stats - The Statistical Command Line Tool written in Haskell only.\n \
\ Author : Thomas Lang\n \
\ Version: 1.2 2014/10/06\n"
|
langthom/stats
|
Parsing.hs
|
bsd-3-clause
| 5,862
| 10
| 24
| 2,046
| 828
| 425
| 403
| 54
| 6
|
module Problem11 where
import Data.List (transpose)
--
-- Problem 11: Largest product in a grid
--
-- In the 20×20 grid below, four numbers along a diagonal line have been marked
-- in red. The product of these numbers is 26 × 63 × 78 × 14 = 1788696.
--
-- What is the greatest product of four adjacent numbers in the same direction
-- (up, down, left, right, or diagonally) in the 20×20 grid?
problem11 = maximum $ map product allPerms where
len = 4
allPerms = concat
[ concatMap (perms len) horz
, concatMap (perms len) vert
, concatMap (perms len) diagLeft
, concatMap (perms len) diagRight
]
perms len xs@(y:ys) = let perm = take len xs in
if length perm < len then []
else perm : perms len ys
horz = grid
vert = transpose grid
height = length horz
width = length vert
diag grid = grid' where
cell x y = (grid !! y) !! x
grid' =
[[ cell (col + y) y | y <- [0..(height-col-1)]]
| col <- [0..(width-1)]
] ++
[[ cell x (row + x) | x <- [0..(width-row-1)]]
| row <- [1..(height-1)]
]
diagLeft = diag horz
diagRight = diag (map reverse horz)
grid :: [[Int]]
grid = map (map read . words)
[ "08 02 22 97 38 15 00 40 00 75 04 05 07 78 52 12 50 77 91 08"
, "49 49 99 40 17 81 18 57 60 87 17 40 98 43 69 48 04 56 62 00"
, "81 49 31 73 55 79 14 29 93 71 40 67 53 88 30 03 49 13 36 65"
, "52 70 95 23 04 60 11 42 69 24 68 56 01 32 56 71 37 02 36 91"
, "22 31 16 71 51 67 63 89 41 92 36 54 22 40 40 28 66 33 13 80"
, "24 47 32 60 99 03 45 02 44 75 33 53 78 36 84 20 35 17 12 50"
, "32 98 81 28 64 23 67 10 26 38 40 67 59 54 70 66 18 38 64 70"
, "67 26 20 68 02 62 12 20 95 63 94 39 63 08 40 91 66 49 94 21"
, "24 55 58 05 66 73 99 26 97 17 78 78 96 83 14 88 34 89 63 72"
, "21 36 23 09 75 00 76 44 20 45 35 14 00 61 33 97 34 31 33 95"
, "78 17 53 28 22 75 31 67 15 94 03 80 04 62 16 14 09 53 56 92"
, "16 39 05 42 96 35 31 47 55 58 88 24 00 17 54 24 36 29 85 57"
, "86 56 00 48 35 71 89 07 05 44 44 37 44 60 21 58 51 54 17 58"
, "19 80 81 68 05 94 47 69 28 73 92 13 86 52 17 77 04 89 55 40"
, "04 52 08 83 97 35 99 16 07 97 57 32 16 26 26 79 33 27 98 66"
, "88 36 68 87 57 62 20 72 03 46 33 67 46 55 12 32 63 93 53 69"
, "04 42 16 73 38 25 39 11 24 94 72 18 08 46 29 32 40 62 76 36"
, "20 69 36 41 72 30 23 88 34 62 99 69 82 67 59 85 74 04 36 16"
, "20 73 35 29 78 31 90 01 74 31 49 71 48 86 81 16 23 57 05 54"
, "01 70 54 71 83 51 54 69 16 92 33 48 61 43 52 01 89 19 67 48"
]
|
c0deaddict/project-euler
|
src/Part1/Problem11.hs
|
bsd-3-clause
| 2,574
| 0
| 17
| 837
| 477
| 264
| 213
| 48
| 2
|
module Network.Punch.Peer.Types
( Peer (..)
, RawPeer
, mkRawPeer
, fromPeer
, toPeer
) where
import Control.Monad (when)
import Control.Monad.IO.Class (liftIO)
import qualified Data.ByteString as B
import Network.Socket (Socket, SockAddr, sClose)
import qualified Network.Socket.ByteString as B
import qualified Pipes.Concurrent as P
import Data.Typeable (Typeable)
import Pipes (Producer, Consumer, await, yield)
class Peer a where
sendPeer :: a -> B.ByteString -> IO Bool
recvPeer :: a -> IO (Maybe B.ByteString)
closePeer :: a -> IO ()
-- | Represents a UDP connection
data RawPeer = RawPeer
{ rawSock :: Socket
, rawAddr :: SockAddr
, rawRecvSize :: Int
}
fromPeer :: Peer a => a -> Producer B.ByteString IO ()
fromPeer p = go
where
go = maybe (return ()) ((>> go) . yield) =<< (liftIO $ recvPeer p)
toPeer :: Peer a => a -> Consumer B.ByteString IO ()
toPeer p = go
where
go = do
bs <- await
ok <- liftIO $ sendPeer p bs
when ok go
-- In case in the future we need to add IO-related refs.
mkRawPeer :: Socket -> SockAddr -> Int -> IO RawPeer
mkRawPeer sock addr size = return $ RawPeer sock addr size
instance Peer RawPeer where
sendPeer (RawPeer {..}) bs = do
B.sendAllTo rawSock bs rawAddr >> return True
recvPeer RawPeer {..} = go
where
go = do
(bs, fromAddr) <- B.recvFrom rawSock rawRecvSize
-- | Ignore data sent from unknown hosts
if rawAddr /= fromAddr
then go
else return (Just bs)
closePeer RawPeer {..} = sClose rawSock
|
overminder/punch-forward
|
src/Network/Punch/Peer/Types.hs
|
bsd-3-clause
| 1,542
| 0
| 13
| 354
| 534
| 289
| 245
| -1
| -1
|
{-# LANGUAGE RebindableSyntax #-}
-- Copyright : (C) 2009 Corey O'Connor
-- License : BSD-style (see the file LICENSE)
import Bind.Marshal.Prelude
import Bind.Marshal.Verify
import Bind.Marshal.SerAction.Base
import Bind.Marshal.SerAction.Storable
main = run_test $ do
returnM () :: Test ()
|
coreyoconnor/bind-marshal
|
test/verify_seraction_storable.hs
|
bsd-3-clause
| 307
| 0
| 9
| 53
| 55
| 33
| 22
| 7
| 1
|
{-# LANGUAGE LambdaCase, NamedFieldPuns #-}
module Game.Ur where
import qualified Data.IntSet as IS
import Control.Applicative
import Control.Monad
import Data.Maybe
import System.Random
( getStdRandom, randomR )
data Board = Board
{ black :: Side
, white :: Side
, dice :: Int
, turn :: Turn
} deriving (Eq, Show)
initialBoard :: Int -> Turn -> Board
initialBoard dice turn = Board initialSide initialSide dice turn
newBoard :: IO Board
newBoard = pure initialBoard <*> newDiceRoll <*> fmap ([Black, White] !! ) (getStdRandom $ randomR (0, 1))
-- | A side represents a players total pieces, the current positions of pieces
-- on the board, and how many pieces have been scored.
data Side = Side
{ pieces :: Int
-- ^ The amount of pieces a player can place on the board
, path :: IS.IntSet
-- ^ The positions of the pieces currently on the board
, scored :: Int
-- ^ How many pieces have made it through the board
} deriving (Eq, Show)
initialSide :: Side
initialSide = Side 7 IS.empty 0
-- | A return type to tell what the next game state should be.
data Ur
= AnotherTurn
| PlaceSucceed
deriving Show
data Turn
= White
| Black
deriving (Show, Eq, Ord)
nextTurn :: Turn -> Turn
nextTurn = \case
Black -> White
White -> Black
-- getPlayerLane :: Turn -> Lanes -> V.Vector Piece
-- getPlayerLane BlackTurn = blackLane
-- getPlayerLane WhiteTurn = whiteLane
nextBoardState :: (Ur, Board) -> IO Board
nextBoardState ur = do
newDice <- newDiceRoll
pure $ nextBoard newDice ur
nextBoard :: Int -> (Ur, Board) -> Board
nextBoard newDice = \case
(AnotherTurn, board) -> do
board { dice = newDice }
(PlaceSucceed, board@Board{ turn }) -> do
board { dice = newDice, turn = nextTurn turn }
-- | Helper function for generating dice rolls.
newDiceRoll :: IO Int
newDiceRoll = fmap sum . replicateM 4 $ getStdRandom (randomR (0, 1))
move :: Int -> Board -> Maybe (Ur, Board)
move i b@Board{ black = black@Side{ path = blackPath }, white = white@Side{ path = whitePath }, dice, turn } =
case turn of
Black -> findMove black
White -> findMove white
where
findMove side = placePiece side <|> movePiece side <|> takePiece side
placePiece side@Side{ pieces, path } =
case (i == dice && not (IS.member i path), turn) of
(True, Black) ->
if pieces > 0
then if i == 4
then
Just $ (AnotherTurn, b { black = side { pieces = pieces - 1, path = IS.insert i path } })
else
Just $ (PlaceSucceed, b { black = side { pieces = pieces - 1, path = IS.insert i path } })
else Nothing
(True, White) ->
if pieces > 0
then if i == 4
then
Just $ (AnotherTurn, b { white = side { pieces = pieces - 1, path = IS.insert i path } })
else
Just $ (PlaceSucceed, b { white = side { pieces = pieces - 1, path = IS.insert i path } })
else Nothing
_ ->
Nothing
movePiece side@Side{ path } =
case (IS.member i path && i + dice < 15 && not (IS.member (i + dice) path), turn) of
(True, Black) ->
if not (i + dice == 8 && IS.member 8 whitePath)
then case (i + dice > 4 && i + dice < 13, IS.member (i + dice) whitePath) of
(True, True) ->
Just $ (PlaceSucceed, b { black = side { path = IS.insert (i + dice) $ IS.delete i path }
, white = white { path = IS.delete (i + dice) whitePath
, pieces = pieces white + 1 }
})
(True, False) ->
if i + dice == 8
then Just $ (AnotherTurn, b { black = side { path = IS.insert (i + dice) $ IS.delete i path } })
else Just $ (PlaceSucceed, b { black = side { path = IS.insert (i + dice) $ IS.delete i path } })
_ ->
if i + dice == 4 || i + dice == 14
then Just $ (AnotherTurn, b { black = side { path = IS.insert (i + dice) $ IS.delete i path } })
else Just $ (PlaceSucceed, b { black = side { path = IS.insert (i + dice) $ IS.delete i path } })
else Nothing
(True, White) ->
if not (i + dice == 8 && IS.member 8 blackPath)
then case (i + dice > 4 && i + dice < 13, IS.member (i + dice) blackPath) of
(True, True) ->
Just $ (PlaceSucceed, b { white = side { path = IS.insert (i + dice) $ IS.delete i path }
, black = black { path = IS.delete (i + dice) blackPath
, pieces = pieces black + 1 }
})
(True, False) ->
if i + dice == 8
then Just $ (AnotherTurn, b { white = side { path = IS.insert (i + dice) $ IS.delete i path } })
else Just $ (PlaceSucceed, b { white = side { path = IS.insert (i + dice) $ IS.delete i path } })
_ ->
if i + dice == 4 || i + dice == 14
then Just $ (AnotherTurn, b { white = side { path = IS.insert (i + dice) $ IS.delete i path } })
else Just $ (PlaceSucceed, b { white = side { path = IS.insert (i + dice) $ IS.delete i path } })
else Nothing
_ ->
Nothing
takePiece side@Side{ path, scored } =
case (IS.member i path && i + dice == 15, turn) of
(True, Black) ->
Just $ (PlaceSucceed, b { black = side { path = IS.delete i path, scored = scored + 1 } })
(True, White) ->
Just $ (PlaceSucceed, b { white = side { path = IS.delete i path, scored = scored + 1 } })
_ ->
Nothing
piecesOnBoard :: Board -> Int
piecesOnBoard Board{ black, white, turn } =
case turn of
Black -> IS.size (path black)
White -> IS.size (path white)
availableMoves :: Board -> [(Ur, Board)]
availableMoves board =
mapMaybe (`move` board) [1..14]
|
taksuyu/game-of-ur
|
src/Game/Ur.hs
|
bsd-3-clause
| 6,048
| 0
| 24
| 2,042
| 2,167
| 1,200
| 967
| 123
| 22
|
-- |
-- Module : Data.Ini.Config.Raw
-- Copyright : (c) Getty Ritter, 2017
-- License : BSD
-- Maintainer : Getty Ritter <config-ini@infinitenegativeutility.com>
-- Stability : experimental
--
-- __Warning!__ This module is subject to change in the future, and therefore should
-- not be relied upon to have a consistent API.
module Data.Ini.Config.Raw
( -- * INI types
RawIni (..),
IniSection (..),
IniValue (..),
BlankLine (..),
NormalizedText (..),
normalize,
-- * serializing and deserializing
parseRawIni,
printRawIni,
-- * inspection
lookupInSection,
lookupSection,
lookupValue,
)
where
import Control.Monad (void)
import qualified Data.Foldable as F
import Data.Monoid ((<>))
import Data.Sequence (Seq)
import qualified Data.Sequence as Seq
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Lazy as LazyText
import qualified Data.Text.Lazy.Builder as Builder
import Data.Void (Void)
import Text.Megaparsec
import Text.Megaparsec.Char
type Parser = Parsec Void Text
-- | The 'NormalizedText' type is an abstract representation of text
-- which has had leading and trailing whitespace removed and been
-- normalized to lower-case, but from which we can still extract the
-- original, non-normalized version. This acts like the normalized
-- text for the purposes of 'Eq' and 'Ord' operations, so
--
-- @
-- 'normalize' " x " == 'normalize' \"X\"
-- @
--
-- This type is used to store section and key names in the
data NormalizedText = NormalizedText
{ actualText :: Text,
normalizedText :: Text
}
deriving (Show)
-- | The constructor function to build a 'NormalizedText' value. You
-- probably shouldn't be using this module directly, but if for some
-- reason you are using it, then you should be using this function to
-- create 'NormalizedText' values.
normalize :: Text -> NormalizedText
normalize t = NormalizedText t (T.toLower (T.strip t))
instance Eq NormalizedText where
NormalizedText _ x == NormalizedText _ y =
x == y
instance Ord NormalizedText where
NormalizedText _ x `compare` NormalizedText _ y =
x `compare` y
-- | An 'Ini' value is a mapping from section names to
-- 'IniSection' values. The section names in this mapping are
-- normalized to lower-case and stripped of whitespace. This
-- sequence retains the ordering of the original source file.
newtype RawIni = RawIni
{ fromRawIni :: Seq (NormalizedText, IniSection)
}
deriving (Eq, Show)
-- | An 'IniSection' consists of a name, a mapping of key-value pairs,
-- and metadata about where the section starts and ends in the
-- file. The section names found in 'isName' are __not__ normalized
-- to lower-case or stripped of whitespace, and thus should appear
-- exactly as they appear in the original source file.
data IniSection = IniSection
{ -- | The name of the section, as it appears in the
-- original INI source
isName :: Text,
-- | The key-value mapping within that section. Key
-- names here are normalized to lower-case and
-- stripped of whitespace. This sequence retains
-- the ordering of the original source file.
isVals :: Seq (NormalizedText, IniValue),
-- | The line on which the section begins. This
-- field is ignored when serializing, and is only
-- used for error messages produced when parsing
-- and deserializing an INI structure.
isStartLine :: Int,
-- | The line on which the section ends. This field
-- is ignored when serializing, and is only used
-- for error messages produced when parsing and
-- deserializing an INI structure.
isEndLine :: Int,
-- | The blank lines and comments that appear prior
-- to the section head declaration, retained for
-- pretty-printing identical INI files.
isComments :: Seq BlankLine
}
deriving (Eq, Show)
-- | An 'IniValue' represents a key-value mapping, and also stores the
-- line number where it appears. The key names and values found in
-- 'vName' and 'vValue' respectively are _not_ normalized to
-- lower-case or stripped of whitespace, and thus should appear
-- exactly as they appear in the original source file.
data IniValue = IniValue
{ -- | The line on which the key/value mapping
-- appears. This field is ignored when
-- serializing, and is only used for error
-- messages produced when parsing and
-- deserializing an INI structure.
vLineNo :: Int,
-- | The name of the key, as it appears in the INI source.
vName :: Text,
-- | The value of the key
vValue :: Text,
vComments :: Seq BlankLine,
-- | Right now, this will never show up in a parsed INI file, but
-- it's used when emitting a default INI file: it causes the
-- key-value line to include a leading comment as well.
vCommentedOut :: Bool,
vDelimiter :: Char
}
deriving (Eq, Show)
-- | We want to keep track of the whitespace/comments in between KV
-- lines, so this allows us to track those lines in a reproducible
-- way.
data BlankLine
= CommentLine Char Text
| BlankLine
deriving (Eq, Show)
-- | Parse a 'Text' value into an 'Ini' value, retaining a maximal
-- amount of structure as needed to reconstruct the original INI file.
parseRawIni :: Text -> Either String RawIni
parseRawIni t = case runParser pIni "ini file" t of
Left err -> Left (errorBundlePretty err)
Right v -> Right v
pIni :: Parser RawIni
pIni = do
leading <- sBlanks
pSections leading Seq.empty
sBlanks :: Parser (Seq BlankLine)
sBlanks = Seq.fromList <$> many ((BlankLine <$ void eol) <|> sComment)
sComment :: Parser BlankLine
sComment = do
c <- oneOf ";#"
txt <- T.pack `fmap` manyTill anySingle eol
return (CommentLine c txt)
pSections :: Seq BlankLine -> Seq (NormalizedText, IniSection) -> Parser RawIni
pSections leading prevs =
pSection leading prevs <|> (RawIni prevs <$ void eof)
pSection :: Seq BlankLine -> Seq (NormalizedText, IniSection) -> Parser RawIni
pSection leading prevs = do
start <- getCurrentLine
void (char '[')
name <- T.pack `fmap` some (noneOf "[]")
void (char ']')
void eol
comments <- sBlanks
pPairs (T.strip name) start leading prevs comments Seq.empty
pPairs ::
Text ->
Int ->
Seq BlankLine ->
Seq (NormalizedText, IniSection) ->
Seq BlankLine ->
Seq (NormalizedText, IniValue) ->
Parser RawIni
pPairs name start leading prevs comments pairs = newPair <|> finishedSection
where
newPair = do
(n, pair) <- pPair comments
rs <- sBlanks
pPairs name start leading prevs rs (pairs Seq.|> (n, pair))
finishedSection = do
end <- getCurrentLine
let newSection =
IniSection
{ isName = name,
isVals = pairs,
isStartLine = start,
isEndLine = end,
isComments = leading
}
pSections comments (prevs Seq.|> (normalize name, newSection))
pPair :: Seq BlankLine -> Parser (NormalizedText, IniValue)
pPair leading = do
pos <- getCurrentLine
key <- T.pack `fmap` some (noneOf "[]=:")
delim <- oneOf ":="
val <- T.pack `fmap` manyTill anySingle eol
return
( normalize key,
IniValue
{ vLineNo = pos,
vName = key,
vValue = val,
vComments = leading,
vCommentedOut = False,
vDelimiter = delim
}
)
getCurrentLine :: Parser Int
getCurrentLine = (fromIntegral . unPos . sourceLine) `fmap` getSourcePos
-- | Serialize an INI file to text, complete with any comments which
-- appear in the INI structure, and retaining the aesthetic details
-- which are present in the INI file.
printRawIni :: RawIni -> Text
printRawIni = LazyText.toStrict . Builder.toLazyText . F.foldMap build . fromRawIni
where
build (_, ini) =
F.foldMap buildComment (isComments ini)
<> Builder.singleton '['
<> Builder.fromText (isName ini)
<> Builder.fromString "]\n"
<> F.foldMap buildKV (isVals ini)
buildComment BlankLine = Builder.singleton '\n'
buildComment (CommentLine c txt) =
Builder.singleton c <> Builder.fromText txt <> Builder.singleton '\n'
buildKV (_, val) =
F.foldMap buildComment (vComments val)
<> (if vCommentedOut val then Builder.fromString "# " else mempty)
<> Builder.fromText (vName val)
<> Builder.singleton (vDelimiter val)
<> Builder.fromText (vValue val)
<> Builder.singleton '\n'
-- | Look up an Ini value by section name and key. Returns the sequence
-- of matches.
lookupInSection ::
-- | The section name. Will be normalized prior to
-- comparison.
Text ->
-- | The key. Will be normalized prior to comparison.
Text ->
-- | The Ini to search.
RawIni ->
Seq.Seq Text
lookupInSection sec opt ini =
vValue <$> F.asum (lookupValue opt <$> lookupSection sec ini)
-- | Look up an Ini section by name. Returns a sequence of all matching
-- section records.
lookupSection ::
-- | The section name. Will be normalized prior to
-- comparison.
Text ->
-- | The Ini to search.
RawIni ->
Seq.Seq IniSection
lookupSection name ini =
snd <$> Seq.filter ((== normalize name) . fst) (fromRawIni ini)
-- | Look up an Ini key's value in a given section by the key. Returns
-- the sequence of matches.
lookupValue ::
-- | The key. Will be normalized prior to comparison.
Text ->
-- | The section to search.
IniSection ->
Seq.Seq IniValue
lookupValue name section =
snd <$> Seq.filter ((== normalize name) . fst) (isVals section)
|
aisamanra/config-ini
|
src/Data/Ini/Config/Raw.hs
|
bsd-3-clause
| 9,591
| 0
| 15
| 2,197
| 1,811
| 1,000
| 811
| 164
| 3
|
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE CPP #-}
--------------------------------------------------------------------
-- |
-- Copyright : (c) Andreas Reuleaux 2015
-- License : BSD2
-- Maintainer: Andreas Reuleaux <rx@a-rx.info>
-- Stability : experimental
-- Portability: non-portable
--
-- This module provides Pire's syntax,
-- ie. Pire's flavour of Pi-forall syntax: telescopes
--------------------------------------------------------------------
module Pire.Syntax.Telescope where
#ifdef MIN_VERSION_GLASGOW_HASKELL
#if MIN_VERSION_GLASGOW_HASKELL(7,10,3,0)
-- ghc >= 7.10.3
#else
-- older ghc versions, but MIN_VERSION_GLASGOW_HASKELL defined
#endif
#else
-- MIN_VERSION_GLASGOW_HASKELL not even defined yet (ghc <= 7.8.x)
import Data.Foldable
import Data.Traversable
import Control.Applicative
#endif
import Pire.Syntax.Token
import Pire.Syntax.Nm
import Pire.Syntax.Expr
import Pire.Syntax.Eps
import Data.Bifoldable
import Data.Bifunctor
import Data.Bitraversable
import Pire.Syntax.Binder
-- -------------
-- -- * Telescopes
-- -------------
-- -- | A telescope is like a first class context. It binds each name
-- -- in the rest of the telescope. For example
-- -- Delta = x:* , y:x, y = w, empty
data Telescope t a =
EmptyTele
| Cons Eps t (Expr t a) (Telescope t a)
| ConsInParens_ Eps (Token 'ParenOpenTy t) (Nm1 t) (Token 'ColonTy t) (Expr t a) (Token 'ParenCloseTy t) (Telescope t a)
-- no colon (and no ws to keep)
-- | ConsWildInParens_ Eps (Token ParenOpenTy t) t (Expr t a) (Token ParenCloseTy t) (Telescope t a)
| ConsWildInParens_ Eps (Token 'ParenOpenTy t) (Binder t) (Expr t a) (Token 'ParenCloseTy t) (Telescope t a)
| ConsInBrackets_ Eps (Token 'BracketOpenTy t) (Nm1 t) (Token 'ColonTy t) (Expr t a) (Token 'BracketCloseTy t) (Telescope t a)
| Constraint (Expr t a) (Expr t a) (Telescope t a)
-- need this as well - cf. equal_
-- should keep = as well
| Constraint_ (Token 'BracketOpenTy t) (Expr t a) (Token 'EqualTy t) (Expr t a) (Token 'BracketCloseTy t) (Telescope t a)
deriving (Eq,Ord,Show,Functor,Foldable,Traversable)
instance Bifunctor Telescope where
bimap = bimapDefault
instance Bifoldable Telescope where
bifoldMap = bifoldMapDefault
-- (silence $ runExceptT $ getModules_ ["samples"] "M") >>= return . (\m -> fromRight' $ (ezipper $ Mod m) >>= lineColumn 6 5 >>= focus) . last . fromRight'
-- _decls mm
instance Bitraversable Telescope where
{-# INLINE bitraverse #-}
bitraverse f g = bt where
bt EmptyTele = pure EmptyTele
bt (Cons eps tt ex tele) = Cons <$> pure eps <*> f tt <*> bitraverseExpr f g ex <*> bt tele
bt (ConsInParens_ eps po nm colon ex pc tele) =
ConsInParens_ <$> pure eps <*> traverse f po <*> traverse f nm <*> traverse f colon <*> bitraverseExpr f g ex <*> traverse f pc <*> bt tele
-- bt (ConsWildInParens_ eps po tt ex pc tele) =
-- ConsWildInParens_ <$> pure eps <*> traverse f po <*> f tt <*> bitraverseExpr f g ex <*> traverse f pc <*> bt tele
bt (ConsWildInParens_ eps po tt ex pc tele) =
ConsWildInParens_ <$> pure eps <*> traverse f po <*> traverse f tt <*> bitraverseExpr f g ex <*> traverse f pc <*> bt tele
bt (ConsInBrackets_ eps bo nm colon ex bc tele) =
ConsInBrackets_ <$> pure eps <*> traverse f bo <*> traverse f nm <*> traverse f colon <*> bitraverseExpr f g ex <*> traverse f bc <*> bt tele
bt (Constraint ex ty tele) =
Constraint <$> bitraverseExpr f g ex <*> bitraverseExpr f g ty <*> bt tele
bt (Constraint_ bo ex eq ty bc tele) =
Constraint_ <$> traverse f bo <*> bitraverseExpr f g ex <*> traverse f eq <*> bitraverseExpr f g ty <*> traverse f bc <*> bt tele
bitraverseTele :: Applicative f => (a -> f c) -> (b -> f d) -> Telescope a b -> f (Telescope c d)
bitraverseTele = bitraverse
|
reuleaux/pire
|
src/Pire/Syntax/Telescope.hs
|
bsd-3-clause
| 3,987
| 0
| 15
| 834
| 1,001
| 516
| 485
| 47
| 1
|
--
--
--
----------------
-- Exercise 3.4.
----------------
--
--
--
module E'3''4 where
import Prelude hiding ( (&&) , (||) )
(&&) :: Bool -> Bool -> Bool
(&&) True True = True
(&&) True False = False
(&&) False True = False
(&&) False False = False
(||) :: Bool -> Bool -> Bool
(||) True True = True
(||) True False = True
(||) False True = True
(||) False False = False
|
pascal-knodel/haskell-craft
|
_/links/E'3''4.hs
|
mit
| 387
| 0
| 6
| 90
| 155
| 98
| 57
| 12
| 1
|
-- Compiler Toolkit: operations on file
--
-- Author : Manuel M T Chakravarty
-- Created: 6 November 1999
--
-- Version $Revision: 1.1.1.1 $ from $Date: 2004/11/13 16:42:49 $
--
-- Copyright (c) [1999..2003] Manuel M T Chakravarty
--
-- This file is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This file is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
--- DESCRIPTION ---------------------------------------------------------------
--
-- Typical operations needed when manipulating file names.
--
--- DOCU ----------------------------------------------------------------------
--
-- language: Haskell 98
--
--- TODO ----------------------------------------------------------------------
--
module FileOps (fileFindIn, mktemp)
where
import Prelude hiding (catch)
-- standard libs
import Data.Char (chr, ord)
import System.Directory (doesFileExist)
import System.IO (Handle, IOMode(..), openFile)
import Control.Monad (liftM)
import Control.Exception (catch, SomeException)
import System.Random (newStdGen, randomRs)
import FNameOps (dirname, stripDirname, addPath)
-- search for the given file in the given list of directories (EXPORTED)
--
-- * if the file does not exist, an exception is raised
--
-- * if the given file name is absolute, it is first tried whether this file
-- exists, afterwards the path component is stripped and the given
-- directories are searched; otherwise, if the file name is not absolute,
-- the path component is retained while searching the directories
--
fileFindIn :: FilePath -> [FilePath] -> IO FilePath
"" `fileFindIn` paths = fail "Empty file name"
file `fileFindIn` paths =
do
let (paths', file') = if head file == '/'
then (dirname file : paths, stripDirname file)
else (paths, file)
files = map (`addPath` file') paths'
existsFlags <- mapM doesFileExist files
let existingFiles = [file | (file, flag) <- zip files existsFlags, flag]
if null existingFiles
then fail (file ++ ": File does not exist")
else return $ head existingFiles
-- |Create a temporary file with a unique name.
--
-- * A unique sequence of at least six characters and digits is added
-- inbetween the two given components (the latter of which must include the
-- file suffix if any is needed)
--
-- * Default permissions are used, which might not be optimal, but
-- unfortunately the Haskell standard libs don't support proper permission
-- management.
--
-- * We make 100 attempts on getting a unique filename before giving up.
--
mktemp :: FilePath -> FilePath -> IO (Handle, FilePath)
mktemp pre post =
do
rs <- liftM (randomRs (0, 61)) newStdGen
-- range for lower and upper case letters plus digits
createLoop 100 rs
where
createLoop 0 _ = fail "mktemp: failed 100 times"
createLoop attempts rs = let
(rs', fname) = nextName rs
in do
h <- openFile fname ReadWriteMode
return (h, fname)
`catch` handler attempts rs'
--
handler :: Int -> [Int] -> SomeException -> IO (Handle,FilePath)
handler attempts rs' _ = createLoop (attempts - 1) rs'
sixChars :: [Int] -> ([Int], String)
sixChars is =
let
(sixInts, is') = splitAt 6 is
--
toChar i | i < 10 = chr . (ord '0' +) $ i
| i < 36 = chr . (ord 'A' +) . (subtract 10) $ i
| otherwise = chr . (ord 'a' +) . (subtract 36) $ i
in
(is', map toChar sixInts)
--
nextName :: [Int] -> ([Int], String)
nextName is = let
(is', rndChars) = sixChars is
in
(is', pre ++ rndChars ++ post)
|
k0001/gtk2hs
|
tools/c2hs/base/general/FileOps.hs
|
gpl-3.0
| 4,272
| 0
| 16
| 1,173
| 794
| 448
| 346
| 48
| 3
|
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Main
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Main (main) where
import Test.Tasty
import Test.AWS.SWF
import Test.AWS.SWF.Internal
main :: IO ()
main = defaultMain $ testGroup "SWF"
[ testGroup "tests" tests
, testGroup "fixtures" fixtures
]
|
olorin/amazonka
|
amazonka-swf/test/Main.hs
|
mpl-2.0
| 522
| 0
| 8
| 103
| 76
| 47
| 29
| 9
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.CodeDeploy.DeleteDeploymentConfig
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes a deployment configuration.
--
-- A deployment configuration cannot be deleted if it is currently in use.
-- Also, predefined configurations cannot be deleted.
--
-- /See:/ <http://docs.aws.amazon.com/codedeploy/latest/APIReference/API_DeleteDeploymentConfig.html AWS API Reference> for DeleteDeploymentConfig.
module Network.AWS.CodeDeploy.DeleteDeploymentConfig
(
-- * Creating a Request
deleteDeploymentConfig
, DeleteDeploymentConfig
-- * Request Lenses
, ddcDeploymentConfigName
-- * Destructuring the Response
, deleteDeploymentConfigResponse
, DeleteDeploymentConfigResponse
) where
import Network.AWS.CodeDeploy.Types
import Network.AWS.CodeDeploy.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | Represents the input of a delete deployment configuration operation.
--
-- /See:/ 'deleteDeploymentConfig' smart constructor.
newtype DeleteDeploymentConfig = DeleteDeploymentConfig'
{ _ddcDeploymentConfigName :: Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DeleteDeploymentConfig' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ddcDeploymentConfigName'
deleteDeploymentConfig
:: Text -- ^ 'ddcDeploymentConfigName'
-> DeleteDeploymentConfig
deleteDeploymentConfig pDeploymentConfigName_ =
DeleteDeploymentConfig'
{ _ddcDeploymentConfigName = pDeploymentConfigName_
}
-- | The name of an existing deployment configuration associated with the
-- applicable IAM user or AWS account.
ddcDeploymentConfigName :: Lens' DeleteDeploymentConfig Text
ddcDeploymentConfigName = lens _ddcDeploymentConfigName (\ s a -> s{_ddcDeploymentConfigName = a});
instance AWSRequest DeleteDeploymentConfig where
type Rs DeleteDeploymentConfig =
DeleteDeploymentConfigResponse
request = postJSON codeDeploy
response
= receiveNull DeleteDeploymentConfigResponse'
instance ToHeaders DeleteDeploymentConfig where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("CodeDeploy_20141006.DeleteDeploymentConfig" ::
ByteString),
"Content-Type" =#
("application/x-amz-json-1.1" :: ByteString)])
instance ToJSON DeleteDeploymentConfig where
toJSON DeleteDeploymentConfig'{..}
= object
(catMaybes
[Just
("deploymentConfigName" .=
_ddcDeploymentConfigName)])
instance ToPath DeleteDeploymentConfig where
toPath = const "/"
instance ToQuery DeleteDeploymentConfig where
toQuery = const mempty
-- | /See:/ 'deleteDeploymentConfigResponse' smart constructor.
data DeleteDeploymentConfigResponse =
DeleteDeploymentConfigResponse'
deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DeleteDeploymentConfigResponse' with the minimum fields required to make a request.
--
deleteDeploymentConfigResponse
:: DeleteDeploymentConfigResponse
deleteDeploymentConfigResponse = DeleteDeploymentConfigResponse'
|
olorin/amazonka
|
amazonka-codedeploy/gen/Network/AWS/CodeDeploy/DeleteDeploymentConfig.hs
|
mpl-2.0
| 3,978
| 0
| 12
| 829
| 403
| 245
| 158
| 63
| 1
|
{-# LANGUAGE LambdaCase #-}
module OpenCog.Lojban.Syntax.AtomUtil where
import Prelude hiding (id,(.),(<*>),(<$>),pure,(*>),(<*),foldl)
import Control.Category (id,(.))
import Control.Isomorphism.Partial
import Control.Isomorphism.Partial.Derived
import Control.Isomorphism.Partial.Unsafe
import Text.Syntax
import qualified Data.Map as M
import qualified Data.Foldable as F
import Data.Maybe (fromJust,isJust)
import Data.List (isSuffixOf,nub)
import Data.Hashable
import Data.Char (chr)
import System.Random
import OpenCog.AtomSpace (Atom(..),noTv,TruthVal(..),stv,atomType
,atomGetAllNodes,atomElem,nodeName)
import OpenCog.Lojban.Syntax.Types
import OpenCog.Lojban.Syntax.Util
import OpenCog.Lojban.Util
--Various semi-isos to easily transfrom Certain Atom types
_eval :: Iso (Atom,[Atom]) Atom
_eval = eval . tolist2 . second list
_evalTv :: Iso (TruthVal,(Atom,[Atom])) Atom
_evalTv = evalTv . second (tolist2 . second list)
_ctx :: Iso (Maybe Atom,Atom) Atom
_ctx = ((ctx . tolist2) ||| id) . ifJustA
_ctxold :: Iso (Atom,(Atom,[Atom])) Atom
_ctxold = ctx . tolist2 . second _eval
_ssl :: Iso Atom Atom
_ssl = ssl . tolist2 . addVarNode
addVarNode :: Iso Atom (Atom,Atom)
addVarNode = Iso (\a -> Just (Node "VariableNode" "$var" noTv,a))
(\(_,a) -> Just a)
_satl :: Iso ((String,Atom),[Atom]) Atom
_satl = Iso (\((_,a),s) -> let all = Link "ListLink" (a:s) noTv
in Just $ Link "SatisfactionLink" [all] noTv)
(\case {(Link "SatisfactionLink" (a:s) _) -> Just (("xu",a),s)
;_ -> Nothing})
_iil :: Iso (Atom,Atom) Atom
_iil = iil . tolist2
ctx :: Iso [Atom] Atom
ctx = linkIso "ContextLink" noTv
eval :: Iso [Atom] Atom
eval = linkIso "EvaluationLink" noTv
--Iso Atom Atom
--eval . node x . listl .a pred . addAsnd arg
--addAsnd :: Iso c b -> c -> Iso a (a,b)
--addAsnd iso c = iso >. addsnd c
--addAfst :: Iso c b -> c -> Iso a (b,a)
--addAfst iso c = iso <. addfst c
--(.a) :: Iso [a] a -> Iso b (a,a) -> Iso b a
--(.a) iso1 iso2 = iso1 . tolist2 iso2
evalTv :: Iso (TruthVal,[Atom]) Atom
evalTv = linkIso2 "EvaluationLink"
ssl :: Iso [Atom] Atom
ssl = linkIso "SatisfyingSetLink" noTv
setTypeL :: Iso [Atom] Atom
setTypeL = linkIso "SetTypeLink" noTv
subsetL :: Iso (Atom,Atom) Atom
subsetL = linkIso "SubsetLink" noTv . tolist2
sizeL :: Iso [Atom] Atom
sizeL = linkIso "SetSizeLink" noTv
iil :: Iso [Atom] Atom
iil = linkIso "IntensionalImplicationLink" noTv
list :: Iso [Atom] Atom
list = linkIso "ListLink" noTv
--varl :: Iso [Atom] Atom
--varl = linkIso "VariableLink" noTv
notl :: Iso [Atom] Atom
notl = linkIso "NotLink" noTv
andl :: Iso [Atom] Atom
andl = linkIso "AndLink" noTv
orl :: Iso [Atom] Atom
orl = linkIso "OrLink" noTv
iffl :: Iso [Atom] Atom
iffl = orl . tolist2 . (andl *** andl) . reorder
where reorder = Iso (Just . f) (Just . g)
f ls = (ls,map (fromJust . apply (notl . tolist1)) ls)
g (ls,_) = ls
u_l :: Iso [Atom] Atom
u_l = orl . tolist2 . (andl *** andl) . reorder
where reorder = Iso (Just . f) (Just . g)
f [a,b] = let nb = fromJust $ apply notl [b]
in ([a,b],[a,nb])
f a = error $ show a ++ " is not a accepted value for limpl"
g (ls,_) = ls
anotbl :: Iso [Atom] Atom
anotbl = andl . tolist2 . second (notl . tolist1) . inverse tolist2
onlyif :: Iso [Atom] Atom
onlyif = orl . tolist2 . (andl *** notl) . reorder
where reorder = Iso (Just . f) (Just . g)
f [a,b] = ([a,b],[a])
g (a,_) = a
xorl :: Iso [Atom] Atom
xorl = orl . tolist2
. (myand . first mynot *** myand . second mynot)
. reorder
where reorder = Iso (Just . f) (Just . g)
f [a,b] = ((a,b),(a,b))
g ((a,b),_) = [a,b]
myand = andl .tolist2
mynot = notl . tolist1
handleConNeg :: Iso (LCON,[Atom]) (String,[Atom])
handleConNeg = Iso (Just . f) (Just . g)
where f ((mna,(s,mnai)),[a1,a2]) = let na1 = if isJust mna
then cNL noTv a1
else a1
na2 = if isJust mnai
then cNL noTv a2
else a2
in (s,[na1,na2])
g (s,[na1,na2]) = let (mna,a1) = case na1 of
(NL [a1]) -> (Just "na",a1)
_ -> (Nothing,na1)
(mnai,a2) = case na2 of
(NL [a2]) -> (Just "nai",a2)
_ -> (Nothing,na2)
in ((mna,(s,mnai)),[a1,a2])
conLink :: Iso (LCON,[Atom]) Atom
conLink = conLink' . handleConNeg
conLink' :: Iso (String,[Atom]) Atom
conLink' = Iso (\(s,args) -> case s of
"e" -> apply andl args
"a" -> apply orl args
"o" -> apply iffl args
"u" -> apply u_l args
--FIXME: "ji" -> apply varl args
"enai" -> apply anotbl args
"onai" -> apply xorl args
"na.a" -> apply onlyif args
_ -> error $ "Can't handle conLink: " ++ show s)
(\a -> case a of
Link "OrLink"
[Link "AndLink" args _
,Link "AndLink"
[Link "NotLink" _arg1 _
,Link "NotLink" _arg2 _
]_
] _ -> Just ("o",args)
Link "OrLink"
[Link "AndLink" args _
,Link "AndLink"
[arg1
,Link "NotLink" _arg2 _
]_
] _ -> Just ("u",args)
Link "AndLink" args _ -> Just ("e",args)
Link "OrLink" args _ -> Just ("a",args)
Link "VariableLink" args _ -> Just ("ji",args)
_ -> Nothing)
_JAtoA :: Iso String String
_JAtoA = mkSynonymIso [("je","e")
,("ja","a")
,("jo","o")
,("ju","u")
,("jonai","onai")
,("jenai","enai")
,("naja","na.a")
,("je'i","ji")]
_GIhAtoA :: Iso String String
_GIhAtoA = mkSynonymIso [("gi'e","e")
,("gi'a","a")
,("gi'o","o")
,("gi'u","u")
,("gi'enai","enai")
,("gi'onai","onai")
,("nagi'a","na.a")
,("gi'i","ji")]
_GAtoA :: Iso String String
_GAtoA = mkSynonymIso [("ge","e")
,("ga","a")
,("go","o")
,("gu","u")
,("ganai","na.a")
,("gonai","onai")
,("ge'i","ji")]
linkIso :: String -> TruthVal -> Iso [Atom] Atom
linkIso n t = link . Iso (\l -> Just (n,(l,t)))
(\(an,(l,at)) -> if an == n
then Just l
else Nothing)
linkIso2 :: String -> Iso (TruthVal,[Atom]) Atom
linkIso2 n = link . Iso (\(t,l) -> Just (n,(l,t)))
(\(an,(l,t)) -> if an == n
then Just (t,l)
else Nothing)
nodeIso :: String -> TruthVal -> Iso String Atom
nodeIso n t = node . Iso (\l -> Just (n,(l,t)))
(\(an,(l,at)) -> if an == n
then Just l
else Nothing)
concept :: Iso String Atom
concept = nodeIso "ConceptNode" noTv
wordNode :: Iso String Atom
wordNode = nodeIso "WordNode" noTv
predicate :: Iso String Atom
predicate = nodeIso "PredicateNode" noTv
varnode :: Iso String Atom
varnode = nodeIso "VariableNode" noTv
number :: Iso String Atom
number = nodeIso "VariableNode" noTv
_frames :: Iso (Tagged Selbri,[Sumti]) Atom
_frames = (id ||| andl) . isSingle . mapIso (handleDA . _frame) . isoDistribute . handleTAG
where isSingle = Iso (Just . f) (Just . g)
f [a] = Left a
f as = Right as
g (Left a) = [a]
g (Right as) = as
handleDA :: Iso Atom Atom
handleDA = Iso (Just . f) (Just . g) where
f (EvalL tv ps (LL [p1,CN n]))
| n == "da" || n == "de" || n == "di"
= let i = cVN ((randName 0 (show p1)) ++ "___" ++ n)
in cExL tv i (cEvalL tv ps (cLL [p1,i]))
f a = a
g (ExL _ _ (EvalL tv ps (LL [p1,VN name])))
= let n = drop 23 name
da = cCN n lowTv
in cEvalL tv ps (cLL [p1,da])
g a = a
handleTAG :: Iso (Tagged Selbri,[Sumti]) (Selbri,[(Atom,Tag)])
handleTAG = handleTAGupdater . second tagger
where handleTAGupdater = Iso (Just . f) (Just . g)
f ((s,Nothing),args) = (s,args)
f ((s,Just u) ,args) = (s,map (mapf u) args)
g (s,args) = ((s,Nothing),args)
mapf u = mapSnd $ fromJust . apply (tagUpdater u)
tagUpdater :: String -> Iso Tag Tag
tagUpdater "se" = try $ mkSynonymIso [("1","2"),("2","1")]
tagUpdater "te" = try $ mkSynonymIso [("1","3"),("3","1")]
tagUpdater "ve" = try $ mkSynonymIso [("1","4"),("4","1")]
tagUpdater "xe" = try $ mkSynonymIso [("1","5"),("5","1")]
--Get the argumetn location of all Sumties
tagger :: Iso [(Atom,Maybe String)] [(Atom,String)]
tagger = post . foldl tagOne . init
where init = Iso (\a -> Just (([],("0",startMap)),a))
(\(_,a) -> Just a)
startMap = M.fromList [("1",True),("2",True),("3",True),("4",True),("5",True)]
post = Iso (\(l,(_,_)) -> Just l)
(\l -> Just (l,(show $ length l,M.empty)))
tagOne = Iso (Just . f) (Just . g)
f ((r,(p,u)),(a,Just s))
| length s > 1 = ((a,s):r,(p,u))
| length s == 1 = ((a,s):r,(s,M.update (\_ -> Just False) s u))
f ((r,(p,u)),(a,Nothing)) =
((a,t):r,(t,M.update (\_ -> Just False) t u))
where next s = show (read s + 1)
t = findNext p
findNext s = let t = next s
in if u M.! t then t else findNext t
g ((a,s):r,(p,u))
| length s > 1 = ((r,(p ,u)), (a,Just s ))
| s == p = ((r,(prev p,u)), (a,Nothing))
| otherwise = ((r,(prev p,u)), (a,Just s ))
where prev s = show (read s - 1 )
-- Iso Selbri Stumti Atom
_frame :: Iso ((TruthVal,Atom),(Atom,Tag)) Atom
_frame = _evalTv . (id *** (_framePred *** tolist2)) . reorder
where reorder = Iso f g
f ((tv,s),(a,t)) = Just (tv,((s,t),(s,a)))
g (tv,((_,t),(s,a))) = Just ((tv,s),(a,t))
_framePred :: Iso (Atom,Tag) Atom
_framePred = handleVar $ node . second (first (isoConcat "_sumti". tolist2 .< isoDrop 23)) . reorder .< (inverse node)
where reorder = Iso (Just . f) (Just . g) where
f ((t,(n,tv)),tag) = (t,((n,tag),tv))
g (t,((n,tag),tv)) = ((t,(n,tv)),tag)
handleVar iso = Iso f g where
f (n,"?") = Just $ cVN (nodeName n)
f a = apply iso a
g (VN name) = Just (cPN name noTv,"$var")
g a = unapply iso a
randName :: Int -> String -> String
randName = take 20 . map chr . filter pred . randomRs (48,122) . mkStdGen ... hashWithSalt
where pred i = (i >= 48 && i <= 57)
|| (i >= 65 && i <= 90)
|| (i >= 97 && i <= 122)
--Most pronouns are instances of a more general concept
--This will create the inheritance link to show this relation
instanceOf :: Iso (Atom,Int) (State Atom)
instanceOf = genInstance "InheritanceLink"
iInstanceOf :: Iso (Atom,Int) (State Atom)
iInstanceOf = genInstance "IntensionalInheritanceLink"
implicationOf :: Iso (Atom,Int) (State Atom)
implicationOf = genInstance "ImplicationLink"
genInstance :: String -> Iso (Atom,Int) (State Atom)
genInstance typeL = Iso f g where
f (e,seed) = let salt = show e
(t,name) = if "Link" `isSuffixOf` atomType e
then ("ConceptNode","")
else (atomType e,nodeName e)
fullname = (randName seed salt) ++ "___"++ name
i = Node t fullname noTv
l = Link typeL [i,e] highTv
in Just (i,[l])
g (n,ls) = (\(Link _ [_,i] _) -> (i,0)) `fmap` F.find (ff n) ls
ff n (Link "InheritanceLink" [b,_] _) = n == b
ff n a = False
filterState :: Iso (State Sumti) (State Sumti)
filterState = Iso f g where
f = apply id
g ((a,t),s) = Just ((a,t),getDefinitons [a] s)
getDefinitons :: [Atom] -> [Atom] -> [Atom]
getDefinitons ns ls = if ns == nns then links else getDefinitons nns ls
where links = filter ff ls --Get all links that contain a node from ns
ff l = any (`atomElem` l) ns --Check if the link contains a node from ns
nodes = concatMap atomGetAllNodes links --Get all Nodes from the links
nns = nub $ ns ++ nodes --Remove duplicates
findSetType :: Atom -> [Atom] -> Maybe Atom
findSetType a = fmap getType . F.find f
where f (Link "SetTypeLink" [s,t] _) = s == a
f _ = False
getType (Link "SetTypeLink" [s,t] _) = t
|
ruiting/opencog
|
opencog/nlp/lojban/HaskellLib/src/OpenCog/Lojban/Syntax/AtomUtil.hs
|
agpl-3.0
| 13,970
| 1
| 19
| 5,226
| 5,520
| 3,071
| 2,449
| -1
| -1
|
{-# LANGUAGE CPP, NoImplicitPrelude #-}
module Data.Bool.Compat (
module Base
, bool
) where
import Data.Bool as Base
#if !(MIN_VERSION_base(4,7,0))
-- | Case analysis for the 'Bool' type.
-- @bool a b p@ evaluates to @a@ when @p@ is @False@, and evaluates to @b@
-- when @p@ is @True@.
--
-- /Since: 4.7.0.0/
bool :: a -> a -> Bool -> a
bool f _ False = f
bool _ t True = t
#endif
|
beni55/base-compat
|
src/Data/Bool/Compat.hs
|
mit
| 387
| 0
| 7
| 80
| 72
| 45
| 27
| 8
| 1
|
module Expression.Instances where
import Expression.Type
import Expression.Class
----------------------------------------------------------------------------
instance Expressive Int where
toExp i = Num (fromIntegral i)
fromExp (Num e) = fromIntegral e
instance Expressive Integer where
toExp i = Num (fromIntegral i)
fromExp (Num e) = fromIntegral e
instance Expressive String where
toExp s = Strg s
fromExp (Strg s) = s
instance Expressive a => Expressive [ a ] where
toExp = List . map toExp
fromExp ( List es ) = map fromExp es
instance Expressive () where
toExp () = Tuple []
fromExp ( Tuple [] ) = ()
instance ( Expressive a, Expressive b ) => Expressive ( a, b ) where
toExp ( x, y ) = Tuple [ toExp x, toExp y ]
fromExp ( Tuple [ x, y ] ) = ( fromExp x, fromExp y )
instance ( Expressive a, Expressive b, Expressive c )
=> Expressive ( a, b, c ) where
toExp ( x, y, z ) = Tuple [ toExp x, toExp y, toExp z ]
fromExp ( Tuple [ x, y, z ] ) = ( fromExp x, fromExp y, fromExp z )
instance Expressive a => Expressive (Maybe a) where
toExp Nothing = Id "Nothing"
toExp (Just x) = Positional "Just" [ toExp x ]
fromExp (Id "Nothing") = Nothing
fromExp ( Positional "Just" [ x ] ) = Just (fromExp x)
|
florianpilz/autotool
|
src/Expression/Instances.hs
|
gpl-2.0
| 1,292
| 4
| 9
| 320
| 536
| 273
| 263
| 30
| 0
|
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.BuildPaths
-- Copyright : Isaac Jones 2003-2004,
-- Duncan Coutts 2008
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- A bunch of dirs, paths and file names used for intermediate build steps.
--
{- All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Isaac Jones nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -}
module Distribution.Simple.BuildPaths (
defaultDistPref, srcPref,
hscolourPref, haddockPref,
autogenModulesDir,
autogenModuleName,
cppHeaderName,
haddockName,
mkLibName,
mkProfLibName,
mkSharedLibName,
exeExtension,
objExtension,
dllExtension,
) where
import System.FilePath ((</>), (<.>))
import Distribution.Package
( packageName )
import Distribution.ModuleName (ModuleName)
import qualified Distribution.ModuleName as ModuleName
import Distribution.Compiler
( CompilerId(..) )
import Distribution.PackageDescription (PackageDescription)
import Distribution.Simple.LocalBuildInfo
( LocalBuildInfo(buildDir), LibraryName(..) )
import Distribution.Simple.Setup (defaultDistPref)
import Distribution.Text
( display )
import Distribution.System (OS(..), buildOS)
-- ---------------------------------------------------------------------------
-- Build directories and files
srcPref :: FilePath -> FilePath
srcPref distPref = distPref </> "src"
hscolourPref :: FilePath -> PackageDescription -> FilePath
hscolourPref = haddockPref
haddockPref :: FilePath -> PackageDescription -> FilePath
haddockPref distPref pkg_descr
= distPref </> "doc" </> "html" </> display (packageName pkg_descr)
-- |The directory in which we put auto-generated modules
autogenModulesDir :: LocalBuildInfo -> String
autogenModulesDir lbi = buildDir lbi </> "autogen"
cppHeaderName :: String
cppHeaderName = "cabal_macros.h"
-- |The name of the auto-generated module associated with a package
autogenModuleName :: PackageDescription -> ModuleName
autogenModuleName pkg_descr =
ModuleName.fromString $
"Paths_" ++ map fixchar (display (packageName pkg_descr))
where fixchar '-' = '_'
fixchar c = c
haddockName :: PackageDescription -> FilePath
haddockName pkg_descr = display (packageName pkg_descr) <.> "haddock"
-- ---------------------------------------------------------------------------
-- Library file names
mkLibName :: LibraryName -> String
mkLibName (LibraryName lib) = "lib" ++ lib <.> "a"
mkProfLibName :: LibraryName -> String
mkProfLibName (LibraryName lib) = "lib" ++ lib ++ "_p" <.> "a"
-- Implement proper name mangling for dynamical shared objects
-- libHS<packagename>-<compilerFlavour><compilerVersion>
-- e.g. libHSbase-2.1-ghc6.6.1.so
mkSharedLibName :: CompilerId -> LibraryName -> String
mkSharedLibName (CompilerId compilerFlavor compilerVersion) (LibraryName lib)
= "lib" ++ lib ++ "-" ++ comp <.> dllExtension
where comp = display compilerFlavor ++ display compilerVersion
-- ------------------------------------------------------------
-- * Platform file extensions
-- ------------------------------------------------------------
-- ToDo: This should be determined via autoconf (AC_EXEEXT)
-- | Extension for executable files
-- (typically @\"\"@ on Unix and @\"exe\"@ on Windows or OS\/2)
exeExtension :: String
exeExtension = case buildOS of
Windows -> "exe"
_ -> ""
-- ToDo: This should be determined via autoconf (AC_OBJEXT)
-- | Extension for object files. For GHC and NHC the extension is @\"o\"@.
-- Hugs uses either @\"o\"@ or @\"obj\"@ depending on the used C compiler.
objExtension :: String
objExtension = "o"
-- | Extension for dynamically linked (or shared) libraries
-- (typically @\"so\"@ on Unix and @\"dll\"@ on Windows)
dllExtension :: String
dllExtension = case buildOS of
Windows -> "dll"
OSX -> "dylib"
_ -> "so"
|
jwiegley/ghc-release
|
libraries/Cabal/cabal/Distribution/Simple/BuildPaths.hs
|
gpl-3.0
| 5,438
| 0
| 10
| 985
| 626
| 364
| 262
| 65
| 3
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeFamilies #-}
-- Module : Network.AWS.Signing.Internal.V4
-- Copyright : (c) 2013-2015 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
module Network.AWS.Signing.Internal.V4
( V4
) where
import Control.Applicative
import Control.Lens
import qualified Crypto.Hash.SHA256 as SHA256
import Data.ByteString (ByteString)
import qualified Data.ByteString.Base16 as Base16
import qualified Data.ByteString.Char8 as BS
import qualified Data.CaseInsensitive as CI
import qualified Data.Foldable as Fold
import Data.Function hiding ((&))
import Data.List (groupBy, intersperse, sortBy, sort)
import Data.Maybe
import Data.Monoid
import Data.Ord
import Data.Time
import Network.AWS.Data
import Network.AWS.Request.Internal
import Network.AWS.Signing.Internal
import Network.AWS.Types
import Network.HTTP.Types.Header
data V4
data instance Meta V4 = Meta
{ _mAlgorithm :: ByteString
, _mScope :: ByteString
, _mSigned :: ByteString
, _mCReq :: ByteString
, _mSTS :: ByteString
, _mSignature :: ByteString
, _mTime :: UTCTime
}
instance ToBuilder (Meta V4) where
build Meta{..} = mconcat $ intersperse "\n"
[ "[Version 4 Metadata] {"
, " algorithm = " <> build _mAlgorithm
, " credential scope = " <> build _mScope
, " signed headers = " <> build _mSigned
, " canonical request = {"
, build _mCReq
, " }"
, " string to sign = " <> build _mSTS
, " signature = " <> build _mSignature
, " time = " <> build _mTime
, "}"
]
instance AWSPresigner V4 where
presigned a r rq t ex = out & sgRequest
. queryString <>~ auth (out ^. sgMeta)
where
out = finalise qry hash r service a inp t
qry cs sh =
pair (CI.original hAMZAlgorithm) algorithm
. pair (CI.original hAMZCredential) cs
. pair (CI.original hAMZDate) (Time t :: AWSTime)
. pair (CI.original hAMZExpires) ex
. pair (CI.original hAMZSignedHeaders) sh
. pair (CI.original hAMZToken) (toBS <$> _authToken a)
inp = rq & rqHeaders .~ []
auth = mappend "&X-Amz-Signature=" . _mSignature
hash = "UNSIGNED-PAYLOAD"
instance AWSSigner V4 where
signed a r rq t = out & sgRequest
%~ requestHeaders
%~ hdr hAuthorization (authorisation $ out ^. sgMeta)
where
out = finalise (\_ _ -> id) hash r service a inp t
inp = rq & rqHeaders %~ hdr hAMZDate date . hdrs (maybeToList tok)
date = toBS (Time t :: AWSTime)
tok = (hAMZToken,) . toBS <$> _authToken a
hash = bodyHash (_rqBody rq)
authorisation :: Meta V4 -> ByteString
authorisation Meta{..} = BS.concat
[ _mAlgorithm
, " Credential="
, _mScope
, ", SignedHeaders="
, _mSigned
, ", Signature="
, _mSignature
]
algorithm :: ByteString
algorithm = "AWS4-HMAC-SHA256"
finalise :: (ByteString -> ByteString -> Query -> Query)
-> ByteString
-> Region
-> Service (Sv a)
-> AuthEnv
-> Request a
-> UTCTime
-> Signed a V4
finalise qry hash r s@Service{..} AuthEnv{..} Request{..} t =
Signed meta rq
where
meta = Meta
{ _mAlgorithm = algorithm
, _mCReq = canonicalRequest
, _mScope = accessScope
, _mSigned = signedHeaders
, _mSTS = stringToSign
, _mSignature = signature
, _mTime = t
}
rq = clientRequest
& method .~ meth
& host .~ _endpointHost
& path .~ _rqPath
& queryString .~ BS.cons '?' (toBS query)
& requestHeaders .~ headers
& requestBody .~ _bdyBody _rqBody
meth = toBS _rqMethod
query = qry accessScope signedHeaders _rqQuery
Endpoint{..} = endpoint s r
canonicalQuery = toBS (query & valuesOf %~ Just . fromMaybe "")
headers = sortBy (comparing fst) (hdr hHost _endpointHost _rqHeaders)
joinedHeaders = map f $ groupBy ((==) `on` fst) headers
where
f [] = ("", "")
f (h:hs) = (fst h, g (h : hs))
g = BS.intercalate "," . sort . map snd
signedHeaders = mconcat
. intersperse ";"
. map (CI.foldedCase . fst)
$ joinedHeaders
canonicalHeaders = Fold.foldMap f joinedHeaders
where
f (k, v) = CI.foldedCase k
<> ":"
<> stripBS v
<> "\n"
canonicalRequest = mconcat $ intersperse "\n"
[ meth
, collapsePath _rqPath
, canonicalQuery
, canonicalHeaders
, signedHeaders
, hash
]
scope =
[ toBS (Time t :: BasicTime)
, toBS _endpointScope
, toBS _svcPrefix
, "aws4_request"
]
credentialScope = BS.intercalate "/" scope
accessScope = toBS _authAccess <> "/" <> credentialScope
signingKey = Fold.foldl1 hmacSHA256 $ ("AWS4" <> toBS _authSecret) : scope
stringToSign = BS.intercalate "\n"
[ algorithm
, toBS (Time t :: AWSTime)
, credentialScope
, Base16.encode (SHA256.hash canonicalRequest)
]
signature = Base16.encode (hmacSHA256 signingKey stringToSign)
|
dysinger/amazonka
|
core/src/Network/AWS/Signing/Internal/V4.hs
|
mpl-2.0
| 6,147
| 0
| 18
| 2,103
| 1,497
| 814
| 683
| -1
| -1
|
{-# LANGUAGE TemplateHaskell, ScopedTypeVariables #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Unittests for the SlotMap.
-}
{-
Copyright (C) 2014 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Test.Ganeti.SlotMap
( testSlotMap
, genSlotLimit
, genTestKey
, overfullKeys
) where
import Prelude hiding (all)
import Control.Applicative
import Control.Monad
import Data.Foldable (all)
import qualified Data.Map as Map
import Data.Map (Map, member, keys, keysSet)
import Data.Set (Set, size, union)
import qualified Data.Set as Set
import Data.Traversable (traverse)
import Test.HUnit
import Test.QuickCheck
import Test.Ganeti.TestCommon
import Test.Ganeti.TestHelper
import Test.Ganeti.Types ()
import Ganeti.SlotMap
{-# ANN module "HLint: ignore Use camelCase" #-}
-- | Generates a number typical for the limit of a `Slot`.
-- Useful for constructing resource bounds when not directly constructing
-- the relevant `Slot`s.
genSlotLimit :: Gen Int
genSlotLimit = frequency [ (9, choose (1, 5))
, (1, choose (1, 100))
] -- Don't create huge slot limits.
instance Arbitrary Slot where
arbitrary = do
limit <- genSlotLimit
occ <- choose (0, limit * 2)
return $ Slot occ limit
-- | Generates a number typical for the occupied count of a `Slot`.
-- Useful for constructing `CountMap`s.
genSlotCount :: Gen Int
genSlotCount = slotOccupied <$> arbitrary
-- | Takes a slot and resamples its `slotOccupied` count to fit the limit.
resampleFittingSlot :: Slot -> Gen Slot
resampleFittingSlot (Slot _ limit) = do
occ <- choose (0, limit)
return $ Slot occ limit
-- | What we use as key for testing `SlotMap`s.
type TestKey = String
-- | Generates short strings used as `SlotMap` keys.
--
-- We limit ourselves to a small set of key strings with high probability to
-- increase the chance that `SlotMap`s actually have more than one slot taken.
genTestKey :: Gen TestKey
genTestKey = frequency [ (9, elements ["a", "b", "c", "d", "e"])
, (1, genPrintableAsciiString)
]
-- | Generates small lists.
listSizeGen :: Gen Int
listSizeGen = frequency [ (9, choose (1, 5))
, (1, choose (1, 100))
]
-- | Generates a `SlotMap` given a generator for the keys (see `genTestKey`).
genSlotMap :: (Ord a) => Gen a -> Gen (SlotMap a)
genSlotMap keyGen = do
n <- listSizeGen -- don't create huge `SlotMap`s
Map.fromList <$> vectorOf n ((,) <$> keyGen <*> arbitrary)
-- | Generates a `CountMap` given a generator for the keys (see `genTestKey`).
genCountMap :: (Ord a) => Gen a -> Gen (CountMap a)
genCountMap keyGen = do
n <- listSizeGen -- don't create huge `CountMap`s
Map.fromList <$> vectorOf n ((,) <$> keyGen <*> genSlotCount)
-- | Tells which keys of a `SlotMap` are overfull.
overfullKeys :: (Ord a) => SlotMap a -> Set a
overfullKeys sm =
Set.fromList [ a | (a, Slot occ limit) <- Map.toList sm, occ > limit ]
-- | Generates a `SlotMap` for which all slots are within their limits.
genFittingSlotMap :: (Ord a) => Gen a -> Gen (SlotMap a)
genFittingSlotMap keyGen = do
-- Generate a SlotMap, then resample all slots to be fitting.
slotMap <- traverse resampleFittingSlot =<< genSlotMap keyGen
when (isOverfull slotMap) $ error "BUG: FittingSlotMap Gen is wrong"
return slotMap
-- * Test cases
case_isOverfull :: Assertion
case_isOverfull = do
assertBool "overfull"
. isOverfull $ Map.fromList [("buck", Slot 3 2)]
assertBool "not overfull"
. not . isOverfull $ Map.fromList [("buck", Slot 2 2)]
assertBool "empty"
. not . isOverfull $ (Map.fromList [] :: SlotMap TestKey)
case_occupySlots_examples :: Assertion
case_occupySlots_examples = do
let a n = ("a", Slot n 2)
let b n = ("b", Slot n 4)
let sm = Map.fromList [a 1, b 2]
cm = Map.fromList [("a", 1), ("b", 1), ("c", 5)]
assertEqual "fitting occupySlots"
(sm `occupySlots` cm)
(Map.fromList [a 2, b 3, ("c", Slot 5 0)])
-- | Union of the keys of two maps.
keyUnion :: (Ord a) => Map a b -> Map a c -> Set a
keyUnion a b = keysSet a `union` keysSet b
-- | Tests properties of `SlotMap`s being filled up.
prop_occupySlots :: Property
prop_occupySlots =
forAll arbitrary $ \(sm :: SlotMap Int, cm :: CountMap Int) ->
let smOcc = sm `occupySlots` cm
in conjoin
[ counterexample "input keys are preserved" $
all (`member` smOcc) (keyUnion sm cm)
, counterexample "all keys must come from the input keys" $
all (`Set.member` keyUnion sm cm) (keys smOcc)
]
-- | Tests for whether there's still space for a job given its rate
-- limits.
case_hasSlotsFor_examples :: Assertion
case_hasSlotsFor_examples = do
let a n = ("a", Slot n 2)
let b n = ("b", Slot n 4)
let c n = ("c", Slot n 8)
let sm = Map.fromList [a 1, b 2]
assertBool "fits" $
sm `hasSlotsFor` Map.fromList [("a", 1), ("b", 1)]
assertBool "doesn't fit"
. not $ sm `hasSlotsFor` Map.fromList [("a", 1), ("b", 3)]
let smOverfull = Map.fromList [a 1, b 2, c 10]
assertBool "fits (untouched keys overfull)" $
isOverfull smOverfull
&& smOverfull `hasSlotsFor` Map.fromList [("a", 1), ("b", 1)]
assertBool "empty fitting" $
Map.empty `hasSlotsFor` (Map.empty :: CountMap TestKey)
assertBool "empty not fitting"
. not $ Map.empty `hasSlotsFor` Map.fromList [("a", 1), ("b", 100)]
assertBool "empty not fitting"
. not $ Map.empty `hasSlotsFor` Map.fromList [("a", 1)]
-- | Tests properties of `hasSlotsFor` on `SlotMap`s that are known to
-- respect their limits.
prop_hasSlotsFor_fitting :: Property
prop_hasSlotsFor_fitting =
forAll (genFittingSlotMap genTestKey) $ \sm ->
forAll (genCountMap genTestKey) $ \cm ->
sm `hasSlotsFor` cm ==? not (isOverfull $ sm `occupySlots` cm)
-- | Tests properties of `hasSlotsFor`, irrespective of whether the
-- input `SlotMap`s respect their limits or not.
prop_hasSlotsFor :: Property
prop_hasSlotsFor =
let -- Generates `SlotMap`s for combining.
genMaps = resize 10 $ do -- We don't need very large SlotMaps.
sm1 <- genSlotMap genTestKey
-- We need to make sm2 smaller to make `hasSlots` below more
-- likely (otherwise the LHS of ==> is always false).
sm2 <- sized $ \n -> resize (n `div` 3) (genSlotMap genTestKey)
-- We also want to test (sm1, sm1); we have to make it more
-- likely for it to ever happen.
frequency [ (1, return (sm1, sm1))
, (9, return (sm1, sm2)) ]
in forAll genMaps $ \(sm1, sm2) ->
let fits = sm1 `hasSlotsFor` toCountMap sm2
smOcc = sm1 `occupySlots` toCountMap sm2
oldOverfullBucks = overfullKeys sm1
newOverfullBucks = overfullKeys smOcc
in conjoin
[ counterexample "if there's enough extra space, then the new\
\ overfull keys must be as before" $
fits ==> (newOverfullBucks ==? oldOverfullBucks)
-- Note that the other way around does not hold:
-- (newOverfullBucks == oldOverfullBucks) ==> fits
, counterexample "joining SlotMaps must not change the number of\
\ overfull keys (but may change their slot\
\ counts"
. property $ size newOverfullBucks >= size oldOverfullBucks
]
testSuite "SlotMap"
[ 'case_isOverfull
, 'case_occupySlots_examples
, 'prop_occupySlots
, 'case_hasSlotsFor_examples
, 'prop_hasSlotsFor_fitting
, 'prop_hasSlotsFor
]
|
apyrgio/ganeti
|
test/hs/Test/Ganeti/SlotMap.hs
|
bsd-2-clause
| 8,949
| 0
| 17
| 2,125
| 1,953
| 1,054
| 899
| 139
| 1
|
{-# LANGUAGE CPP, FlexibleInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module QC.Common
(
ASCII(..)
, parseBS
, parseT
, toLazyBS
, toStrictBS
, Repack
, repackBS
, repackBS_
, repackT
, repackT_
, liftOp
) where
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative ((<*>), (<$>))
#endif
import Data.Char (isAlpha)
import Test.QuickCheck
import Test.QuickCheck.Unicode (shrinkChar, string)
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import qualified Data.Attoparsec.ByteString.Lazy as BL
import qualified Data.Attoparsec.Text.Lazy as TL
#if !MIN_VERSION_base(4,4,0)
-- This should really be a dependency on the random package :-(
instance Random Word8 where
randomR = integralRandomR
random = randomR (minBound,maxBound)
instance Arbitrary Word8 where
arbitrary = choose (minBound, maxBound)
#endif
parseBS :: BL.Parser r -> BL.ByteString -> Maybe r
parseBS p = BL.maybeResult . BL.parse p
parseT :: TL.Parser r -> TL.Text -> Maybe r
parseT p = TL.maybeResult . TL.parse p
toStrictBS :: BL.ByteString -> B.ByteString
toStrictBS = B.concat . BL.toChunks
toLazyBS :: B.ByteString -> BL.ByteString
toLazyBS = BL.fromChunks . (:[])
instance Arbitrary B.ByteString where
arbitrary = B.pack <$> arbitrary
shrink = map B.pack . shrink . B.unpack
instance Arbitrary BL.ByteString where
arbitrary = repackBS <$> arbitrary <*> arbitrary
shrink = map BL.pack . shrink . BL.unpack
newtype ASCII a = ASCII { fromASCII :: a }
deriving (Eq, Ord, Show)
instance Arbitrary (ASCII B.ByteString) where
arbitrary = (ASCII . B.pack) <$> listOf (choose (0,127))
shrink = map (ASCII . B.pack) . shrink . B.unpack . fromASCII
instance Arbitrary (ASCII BL.ByteString) where
arbitrary = ASCII <$> (repackBS <$> arbitrary <*> (fromASCII <$> arbitrary))
shrink = map (ASCII . BL.pack) . shrink . BL.unpack . fromASCII
type Repack = NonEmptyList (Positive (Small Int))
repackBS :: Repack -> B.ByteString -> BL.ByteString
repackBS (NonEmpty bs) =
BL.fromChunks . repackBS_ (map (getSmall . getPositive) bs)
repackBS_ :: [Int] -> B.ByteString -> [B.ByteString]
repackBS_ = go . cycle
where go (b:bs) s
| B.null s = []
| otherwise = let (h,t) = B.splitAt b s
in h : go bs t
go _ _ = error "unpossible"
instance Arbitrary T.Text where
arbitrary = T.pack <$> string
shrink = map T.pack . shrinkList shrinkChar . T.unpack
instance Arbitrary TL.Text where
arbitrary = TL.pack <$> string
shrink = map TL.pack . shrinkList shrinkChar . TL.unpack
repackT :: Repack -> T.Text -> TL.Text
repackT (NonEmpty bs) =
TL.fromChunks . repackT_ (map (getSmall . getPositive) bs)
repackT_ :: [Int] -> T.Text -> [T.Text]
repackT_ = go . cycle
where go (b:bs) s
| T.null s = []
| otherwise = let (h,t) = T.splitAt b s
in h : go bs t
go _ _ = error "unpossible"
liftOp :: (Show a, Testable prop) =>
String -> (a -> a -> prop) -> a -> a -> Property
liftOp name f x y = counterexample desc (f x y)
where op = case name of
(c:_) | isAlpha c -> " `" ++ name ++ "` "
| otherwise -> " " ++ name ++ " "
_ -> " ??? "
desc = "not (" ++ show x ++ op ++ show y ++ ")"
|
beni55/attoparsec
|
tests/QC/Common.hs
|
bsd-3-clause
| 3,464
| 0
| 13
| 856
| 1,232
| 660
| 572
| 87
| 2
|
module Offset where
-- Test getOffset in various indented cases
bob a b = x
where x = 3
bib a b = x
where
x = 3
bab a b =
let bar = 3
in b + bar
foo x y =
do c <- getChar
return c
fud a b = let bar = 3
in b + bar
|
RefactoringTools/HaRe
|
test/testdata/Offset.hs
|
bsd-3-clause
| 258
| 0
| 8
| 107
| 112
| 56
| 56
| 13
| 1
|
{-# LANGUAGE ParallelListComp #-}
module Main where
import Text.PrettyPrint
import System.Environment ( getArgs )
main = do
[s] <- getArgs
let n = read s
mapM_ (putStrLn . render . generate) [2..n]
generate :: Int -> Doc
generate n =
vcat [ text "#ifdef DEFINE_INSTANCES"
, data_instance "MVector s" "MV"
, data_instance "Vector" "V"
, class_instance "Unbox"
, class_instance "M.MVector MVector" <+> text "where"
, nest 2 $ vcat $ map method methods_MVector
, class_instance "G.Vector Vector" <+> text "where"
, nest 2 $ vcat $ map method methods_Vector
, text "#endif"
, text "#ifdef DEFINE_MUTABLE"
, define_zip "MVector s" "MV"
, define_unzip "MVector s" "MV"
, text "#endif"
, text "#ifdef DEFINE_IMMUTABLE"
, define_zip "Vector" "V"
, define_zip_rule
, define_unzip "Vector" "V"
, text "#endif"
]
where
vars = map char $ take n ['a'..]
varss = map (<> char 's') vars
tuple xs = parens $ hsep $ punctuate comma xs
vtuple xs = parens $ sep $ punctuate comma xs
con s = text s <> char '_' <> int n
var c = text (c : "_")
data_instance ty c
= hang (hsep [text "data instance", text ty, tuple vars])
4
(hsep [char '=', con c, text "{-# UNPACK #-} !Int"
, vcat $ map (\v -> char '!' <> parens (text ty <+> v)) vars])
class_instance cls
= text "instance" <+> vtuple [text "Unbox" <+> v | v <- vars]
<+> text "=>" <+> text cls <+> tuple vars
define_zip ty c
= sep [text "-- | /O(1)/ Zip" <+> int n <+> text "vectors"
,name <+> text "::"
<+> vtuple [text "Unbox" <+> v | v <- vars]
<+> text "=>"
<+> sep (punctuate (text " ->") [text ty <+> v | v <- vars])
<+> text "->"
<+> text ty <+> tuple vars
,text "{-# INLINE_STREAM" <+> name <+> text "#-}"
,name <+> sep varss
<+> text "="
<+> con c
<+> text "len"
<+> sep [parens $ text "unsafeSlice"
<+> char '0'
<+> text "len"
<+> vs | vs <- varss]
,nest 2 $ hang (text "where")
2
$ text "len ="
<+> sep (punctuate (text " `delayed_min`")
[text "length" <+> vs | vs <- varss])
]
where
name | n == 2 = text "zip"
| otherwise = text "zip" <> int n
define_zip_rule
= hang (text "{-# RULES" <+> text "\"stream/" <> name "zip"
<> text " [Vector.Unboxed]\" forall" <+> sep varss <+> char '.')
2 $
text "G.stream" <+> parens (name "zip" <+> sep varss)
<+> char '='
<+> text "Stream." <> name "zipWith" <+> tuple (replicate n empty)
<+> sep [parens $ text "G.stream" <+> vs | vs <- varss]
$$ text "#-}"
where
name s | n == 2 = text s
| otherwise = text s <> int n
define_unzip ty c
= sep [text "-- | /O(1)/ Unzip" <+> int n <+> text "vectors"
,name <+> text "::"
<+> vtuple [text "Unbox" <+> v | v <- vars]
<+> text "=>"
<+> text ty <+> tuple vars
<+> text "->" <+> vtuple [text ty <+> v | v <- vars]
,text "{-# INLINE" <+> name <+> text "#-}"
,name <+> pat c <+> text "="
<+> vtuple varss
]
where
name | n == 2 = text "unzip"
| otherwise = text "unzip" <> int n
pat c = parens $ con c <+> var 'n' <+> sep varss
patn c n = parens $ con c <+> (var 'n' <> int n)
<+> sep [v <> int n | v <- varss]
qM s = text "M." <> text s
qG s = text "G." <> text s
gen_length c _ = (pat c, var 'n')
gen_unsafeSlice mod c rec
= (var 'i' <+> var 'm' <+> pat c,
con c <+> var 'm'
<+> vcat [parens
$ text mod <> char '.' <> text rec
<+> var 'i' <+> var 'm' <+> vs
| vs <- varss])
gen_overlaps rec = (patn "MV" 1 <+> patn "MV" 2,
vcat $ r : [text "||" <+> r | r <- rs])
where
r : rs = [qM rec <+> v <> char '1' <+> v <> char '2' | v <- varss]
gen_unsafeNew rec
= (var 'n',
mk_do [v <+> text "<-" <+> qM rec <+> var 'n' | v <- varss]
$ text "return $" <+> con "MV" <+> var 'n' <+> sep varss)
gen_unsafeReplicate rec
= (var 'n' <+> tuple vars,
mk_do [vs <+> text "<-" <+> qM rec <+> var 'n' <+> v
| v <- vars | vs <- varss]
$ text "return $" <+> con "MV" <+> var 'n' <+> sep varss)
gen_unsafeRead rec
= (pat "MV" <+> var 'i',
mk_do [v <+> text "<-" <+> qM rec <+> vs <+> var 'i' | v <- vars
| vs <- varss]
$ text "return" <+> tuple vars)
gen_unsafeWrite rec
= (pat "MV" <+> var 'i' <+> tuple vars,
mk_do [qM rec <+> vs <+> var 'i' <+> v | v <- vars | vs <- varss]
empty)
gen_clear rec
= (pat "MV", mk_do [qM rec <+> vs | vs <- varss] empty)
gen_set rec
= (pat "MV" <+> tuple vars,
mk_do [qM rec <+> vs <+> v | vs <- varss | v <- vars] empty)
gen_unsafeCopy c q rec
= (patn "MV" 1 <+> patn c 2,
mk_do [q rec <+> vs <> char '1' <+> vs <> char '2' | vs <- varss]
empty)
gen_unsafeMove rec
= (patn "MV" 1 <+> patn "MV" 2,
mk_do [qM rec <+> vs <> char '1' <+> vs <> char '2' | vs <- varss]
empty)
gen_unsafeGrow rec
= (pat "MV" <+> var 'm',
mk_do [vs <> char '\'' <+> text "<-"
<+> qM rec <+> vs <+> var 'm' | vs <- varss]
$ text "return $" <+> con "MV"
<+> parens (var 'm' <> char '+' <> var 'n')
<+> sep (map (<> char '\'') varss))
gen_unsafeFreeze rec
= (pat "MV",
mk_do [vs <> char '\'' <+> text "<-" <+> qG rec <+> vs | vs <- varss]
$ text "return $" <+> con "V" <+> var 'n'
<+> sep [vs <> char '\'' | vs <- varss])
gen_unsafeThaw rec
= (pat "V",
mk_do [vs <> char '\'' <+> text "<-" <+> qG rec <+> vs | vs <- varss]
$ text "return $" <+> con "MV" <+> var 'n'
<+> sep [vs <> char '\'' | vs <- varss])
gen_basicUnsafeIndexM rec
= (pat "V" <+> var 'i',
mk_do [v <+> text "<-" <+> qG rec <+> vs <+> var 'i'
| vs <- varss | v <- vars]
$ text "return" <+> tuple vars)
gen_elemseq rec
= (char '_' <+> tuple vars,
vcat $ r : [char '.' <+> r | r <- rs])
where
r : rs = [qG rec <+> parens (text "undefined :: Vector" <+> v)
<+> v | v <- vars]
mk_do cmds ret = hang (text "do")
2
$ vcat $ cmds ++ [ret]
method (s, f) = case f s of
(p,e) -> text "{-# INLINE" <+> text s <+> text " #-}"
$$ hang (text s <+> p)
4
(char '=' <+> e)
methods_MVector = [("basicLength", gen_length "MV")
,("basicUnsafeSlice", gen_unsafeSlice "M" "MV")
,("basicOverlaps", gen_overlaps)
,("basicUnsafeNew", gen_unsafeNew)
,("basicUnsafeReplicate", gen_unsafeReplicate)
,("basicUnsafeRead", gen_unsafeRead)
,("basicUnsafeWrite", gen_unsafeWrite)
,("basicClear", gen_clear)
,("basicSet", gen_set)
,("basicUnsafeCopy", gen_unsafeCopy "MV" qM)
,("basicUnsafeMove", gen_unsafeMove)
,("basicUnsafeGrow", gen_unsafeGrow)]
methods_Vector = [("basicUnsafeFreeze", gen_unsafeFreeze)
,("basicUnsafeThaw", gen_unsafeThaw)
,("basicLength", gen_length "V")
,("basicUnsafeSlice", gen_unsafeSlice "G" "V")
,("basicUnsafeIndexM", gen_basicUnsafeIndexM)
,("basicUnsafeCopy", gen_unsafeCopy "V" qG)
,("elemseq", gen_elemseq)]
|
moonKimura/vector-0.10.9.1
|
internal/GenUnboxTuple.hs
|
bsd-3-clause
| 9,078
| 5
| 22
| 4,042
| 3,030
| 1,469
| 1,561
| 190
| 1
|
{-# Language OverloadedStrings #-}
-- from https://ocharles.org.uk/blog/posts/2014-12-17-overloaded-strings.html
import Data.String
n :: Num a => a
n = 43
f :: Fractional a => a
f = 03.1420
-- foo :: Text
foo :: Data.String.IsString a => a
foo = "hello\n there"
|
ezyang/ghc
|
testsuite/tests/printer/Ppr026.hs
|
bsd-3-clause
| 267
| 0
| 8
| 46
| 81
| 38
| 43
| 8
| 1
|
{--
This is a script to generate the necessary tables to support Windows code page
encoding/decoding.
License: see libraries/base/LICENSE
The code page tables are available from :
http://www.unicode.org/Public/MAPPINGS/
To run this script, use e.g.
runghc MakeTable.hs <module-name> <output-file> <codepage-dir>/*.TXT
Currently, this script only supports single-byte encodings, since the lookup
tables required for the CJK double-byte codepages are too large to be
statically linked into every executable. We plan to add support for them once
GHC is able to produce Windows DLLs.
--}
module Main where
import System.FilePath
import qualified Data.Map as Map
import System.IO
import Data.Maybe (mapMaybe)
import Data.List (intersperse)
import Data.Word
import Numeric
import Control.Monad.State
import System.Environment
import Control.Exception(evaluate)
main :: IO ()
main = do
moduleName:outFile:files <- getArgs
let badFiles = -- These fail with an error like
-- MakeTable: Enum.toEnum{Word8}: tag (33088) is outside of bounds (0,255)
-- I have no idea what's going on, so for now we just
-- skip them.
["CPs/CP932.TXT",
"CPs/CP936.TXT",
"CPs/CP949.TXT",
"CPs/CP950.TXT"]
let files' = filter (`notElem` badFiles) files
sbes <- mapM readMapAndIx files'
putStrLn "Writing output"
withBinaryFile outFile WriteMode $ flip hPutStr
$ unlines $ makeTableFile moduleName files' sbes
where
readMapAndIx f = do
putStrLn ("Reading " ++ f)
m <- readMap f
return (codePageNum f, m)
-- filenames are assumed to be of the form "CP1250.TXT"
codePageNum :: FilePath -> Int
codePageNum = read . drop 2 . takeBaseName
readMap :: (Ord a, Enum a) => FilePath -> IO (Map.Map a Char)
readMap f = withBinaryFile f ReadMode $ \h -> do
contents <- hGetContents h
let ms = Map.fromList $ mapMaybe parseLine $ lines contents
evaluate $ Map.size ms
return ms
parseLine :: Enum a => String -> Maybe (a,Char)
parseLine s = case words s of
('#':_):_ -> Nothing
bs:"#DBCS":_ -> Just (readHex' bs, toEnum 0xDC00)
bs:"#UNDEFINED":_ -> Just (readHex' bs, toEnum 0)
bs:cs:('#':_):_ -> Just (readHex' bs, readCharHex cs)
_ -> Nothing
readHex' :: Enum a => String -> a
readHex' ('0':'x':s) = case readHex s of
[(n,"")] -> toEnum n -- explicitly call toEnum to catch overflow errors.
_ -> errorWithoutStackTrace $ "Can't read hex: " ++ show s
readHex' s = errorWithoutStackTrace $ "Can't read hex: " ++ show s
readCharHex :: String -> Char
readCharHex s = if c > fromEnum (maxBound :: Word16)
then errorWithoutStackTrace "Can't handle non-BMP character."
else toEnum c
where c = readHex' s
-------------------------------------------
-- Writing out the main data values.
makeTableFile :: String -> [FilePath] -> [(Int,Map.Map Word8 Char)] -> [String]
makeTableFile moduleName files maps = concat
[ languageDirectives, firstComment files, header,
theImports, theTypes, blockSizeText, tablePart]
where
header = [ "module " ++ moduleName ++ " where"
, ""
]
tablePart = [ "codePageMap :: [(Word32, CodePageArrays)]"
, "codePageMap = ["
] ++ (intersperse "\n ," $ map mkTableEntry maps)
++ [" ]"]
mkTableEntry (i,m) = " (" ++ show i ++ ", " ++ makeSBE m ++ " )"
blockSizeText = ["blockBitSize :: Int", "blockBitSize = " ++ show blockBitSize]
makeSBE :: Map.Map Word8 Char -> String
makeSBE m = unlines
[ "SingleByteCP {"
, " decoderArray = " ++ mkConvArray es
, " , encoderArray = " ++ mkCompactArray (swapMap m)
, " }"
]
where
es = [Map.findWithDefault '\0' x m | x <- [minBound..maxBound]]
swapMap :: (Ord a, Ord b, Enum a, Enum b) => Map.Map a b -> Map.Map b a
swapMap = Map.insert (toEnum 0) (toEnum 0) . Map.fromList . map swap . Map.toList
where
swap (x,y) = (y,x)
mkConvArray :: Embed a => [a] -> String
mkConvArray xs = "ConvArray \"" ++ concatMap mkHex xs ++ "\"#"
-------------------------------------------
-- Compact arrays
--
-- The decoding map (from Word8 to Char) can be implemented with a simple array
-- of 256 Word16's. Bytes which do not belong to the code page are mapped to
-- '\0'.
--
-- However, a naive table mapping Char to Word8 would require 2^16 Word8's. We
-- can use much less space with the right data structure, since at most 256 of
-- those entries are nonzero.
--
-- We use "compact arrays", as described in "Unicode Demystified" by Richard
-- Gillam.
--
-- Fix a block size S which is a power of two. We compress an array of N
-- entries (where N>>S) as follows. First, split the array into blocks of size
-- S, then remove all repeate blocks to form the "value" array. Then construct
-- a separate "index" array which maps the position of blocks in the old array
-- to a position in the value array.
--
-- For example, assume that S=32 we have six blocks ABABCA, each with 32
-- elements.
--
-- Then the compressed table consists of two arrays:
-- 1) An array "values", concatenating the unique blocks ABC
-- 2) An array "indices" which equals [0,1,0,1,2,0].
--
-- To look up '\100', first calculate divMod 100 32 = (3,4). Since
-- indices[3]=1, we look at the second unique block B; thus the encoded byte is
-- B[4].
--
-- The upshot of this representation is that the lookup is very quick as it only
-- requires two array accesses plus some bit masking/shifting.
-- From testing, this is an optimal size.
blockBitSize :: Int
blockBitSize = 6
mkCompactArray :: (Embed a, Embed b) => Map.Map a b -> String
mkCompactArray m = unlines [
""
, " CompactArray {"
, " encoderIndices = " ++ mkConvArray is'
, " , encoderValues = "
++ mkConvArray (concat $ Map.elems vs)
, " , encoderMax = " ++ show (fst $ Map.findMax m)
, " }"
]
where
blockSize = 2 ^ blockBitSize
(is,(vs,_)) = compress blockSize $ m
is' = map (* blockSize) is
type CompressState b = (Map.Map Int [b], Map.Map [b] Int)
-- each entry in the list corresponds to a block of size n.
compress :: (Bounded a, Enum a, Ord a, Bounded b, Ord b) => Int -> Map.Map a b
-> ([Int], CompressState b)
compress n ms = runState (mapM lookupOrAdd chunks) (Map.empty, Map.empty)
where
chunks = mkChunks $ map (\i -> Map.findWithDefault minBound i ms)
$ [minBound..fst (Map.findMax ms)]
mkChunks [] = []
mkChunks xs = take n xs : mkChunks (drop n xs)
lookupOrAdd xs = do
(m,rm) <- get
case Map.lookup xs rm of
Just i -> return i
Nothing -> do
let i = if Map.null m
then 0
else 1 + fst (Map.findMax m)
put (Map.insert i xs m, Map.insert xs i rm)
return i
-------------------------------------------
-- Static parts of the generated module.
languageDirectives :: [String]
languageDirectives = ["{-# LANGUAGE MagicHash, NoImplicitPrelude #-}"]
firstComment :: [FilePath] -> [String]
firstComment files = map ("-- " ++) $
[ "Do not edit this file directly!"
, "It was generated by the MakeTable.hs script using the files below."
, "To regenerate it, run \"make\" in ../../../../codepages/"
, ""
, "Files:"
] ++ map takeFileName files
theImports :: [String]
theImports = map ("import " ++ )
["GHC.Prim", "GHC.Base", "GHC.Word"]
theTypes :: [String]
theTypes = [ "data ConvArray a = ConvArray Addr#"
, "data CompactArray a b = CompactArray {"
, " encoderMax :: !a,"
, " encoderIndices :: !(ConvArray Int),"
, " encoderValues :: !(ConvArray b)"
, " }"
, ""
, "data CodePageArrays = SingleByteCP {"
, " decoderArray :: !(ConvArray Char),"
, " encoderArray :: !(CompactArray Char Word8)"
, " }"
, ""
]
-------------------------------------------
-- Embed class and associated functions
class (Ord a, Enum a, Bounded a, Show a) => Embed a where
mkHex :: a -> String
-- | @since 4.2.0.0
instance Embed Word8 where
mkHex = showHex'
-- | @since 4.2.0.0
instance Embed Word16 where
mkHex = repDualByte
-- | @since 4.2.0.0
instance Embed Char where
mkHex = repDualByte
-- this is used for the indices of the compressed array.
-- | @since 4.2.0.0
instance Embed Int where
mkHex = repDualByte
showHex' :: Integral a => a -> String
showHex' s = "\\x" ++ showHex s ""
repDualByte :: Enum c => c -> String
repDualByte c
| n >= 2^(16::Int) = errorWithoutStackTrace "value is too high!"
-- NOTE : this assumes little-endian architecture. But we're only using this on Windows,
-- so it's probably OK.
| otherwise = showHex' (n `mod` 256) ++ showHex' (n `div` 256)
where
n = fromEnum c
|
ezyang/ghc
|
libraries/base/codepages/MakeTable.hs
|
bsd-3-clause
| 9,306
| 0
| 22
| 2,585
| 2,068
| 1,102
| 966
| 152
| 5
|
module RayAndQ where
import qualified Matrix4f as M4x4
import qualified Vector4f as V4
-- | Calculate @(ray, q)@ for the given inverse projection matrix and frustum corners
ray_and_q :: M4x4.T -> (V4.T, V4.T) -> (V4.T, V4.T)
ray_and_q inverse_m (near, far) =
let
-- Unproject the NDC coordinates to eye-space
near_hom = M4x4.mult_v inverse_m near
near_eye = V4.div_s near_hom (V4.w near_hom)
far_hom = M4x4.mult_v inverse_m far
far_eye = V4.div_s far_hom (V4.w far_hom)
-- Calculate a ray with ray.z == 1.0
ray_initial = V4.sub4 far_eye near_eye
ray = V4.div_s ray_initial (V4.z ray_initial)
-- Subtract the scaled ray from the near corner to calculate q
q = V4.sub4 near_eye (V4.scale ray (V4.z near_eye))
in
(ray, q)
|
io7m/r2
|
com.io7m.r2.documentation/src/main/resources/com/io7m/r2/documentation/haskell/RayAndQ.hs
|
isc
| 793
| 0
| 14
| 187
| 209
| 115
| 94
| 14
| 1
|
module Test.Helpers where
import Prelude hiding (fail)
import Test.HUnit
import Data.SyntaxIR
assertTrue description expression = assertEqual description True expression
assertFalse description expression = assertEqual description False expression
fail description = assertEqual description False True
assertIsErr (Err _) = return ()
assertIsErr other = fail $ (show other) ++ " is not an error"
|
AKST/lisp.hs
|
test/Test/Helpers.hs
|
mit
| 409
| 0
| 8
| 66
| 115
| 59
| 56
| 9
| 1
|
{-# LANGUAGE NamedFieldPuns, FlexibleInstances, FlexibleContexts, GeneralizedNewtypeDeriving #-}
{-
Copyright (C) 2012-2017 Jimmy Liang, Kacper Bak, Michal Antkiewicz <http://gsd.uwaterloo.ca>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
-}
module Language.Clafer.Intermediate.ResolverType (resolveTModule) where
import Language.ClaferT
import Language.Clafer.Common
import Language.Clafer.Intermediate.Intclafer hiding (uid)
import Language.Clafer.Intermediate.Desugarer
import Language.Clafer.Intermediate.TypeSystem
import Language.Clafer.Front.PrintClafer
import Control.Applicative
import Control.Exception (assert)
import Control.Lens ((&), (%~), traversed)
import Control.Monad.Except
import Control.Monad.List
import Control.Monad.Reader
import Data.Either
import Data.List
import Data.Maybe
import Prelude hiding (exp)
type TypeDecls = [(String, IType)]
data TypeInfo = TypeInfo {iTypeDecls::TypeDecls, iUIDIClaferMap::UIDIClaferMap, iCurThis::IClafer, iCurPath::Maybe IType}
newtype TypeAnalysis a = TypeAnalysis (ReaderT TypeInfo (Either ClaferSErr) a)
deriving (MonadError ClaferSErr, Monad, Functor, MonadReader TypeInfo, Applicative)
-- return the type of a UID but give preference to local declarations in quantified expressions, which shadow global names
typeOfUid :: MonadTypeAnalysis m => UID -> m IType
typeOfUid uid = (fromMaybe (TClafer [uid]) . lookup uid) <$> typeDecls
class (Functor m, Monad m) => MonadTypeAnalysis m where
-- What "this" refers to
curThis :: m IClafer
localCurThis :: IClafer -> m a -> m a
-- The next path is a child of curPath (or Nothing)
curPath :: m (Maybe IType)
localCurPath :: IType -> m a -> m a
-- Extra declarations
typeDecls :: m TypeDecls
localDecls :: TypeDecls -> m a -> m a
instance MonadTypeAnalysis TypeAnalysis where
curThis = TypeAnalysis $ asks iCurThis
localCurThis newThis (TypeAnalysis d) =
TypeAnalysis $ local setCurThis d
where
setCurThis t = t{iCurThis = newThis}
curPath = TypeAnalysis $ asks iCurPath
localCurPath newPath (TypeAnalysis d) =
TypeAnalysis $ local setCurPath d
where
setCurPath t = t{iCurPath = Just newPath}
typeDecls = TypeAnalysis $ asks iTypeDecls
localDecls extra (TypeAnalysis d) =
TypeAnalysis $ local addTypeDecls d
where
addTypeDecls t@TypeInfo{iTypeDecls = c} = t{iTypeDecls = extra ++ c}
instance MonadTypeAnalysis m => MonadTypeAnalysis (ListT m) where
curThis = lift curThis
localCurThis = mapListT . localCurThis
curPath = lift curPath
localCurPath = mapListT . localCurPath
typeDecls = lift typeDecls
localDecls = mapListT . localDecls
instance MonadTypeAnalysis m => MonadTypeAnalysis (ExceptT ClaferSErr m) where
curThis = lift curThis
localCurThis = mapExceptT . localCurThis
curPath = lift curPath
localCurPath = mapExceptT . localCurPath
typeDecls = lift typeDecls
localDecls = mapExceptT . localDecls
-- | Type inference and checking
runTypeAnalysis :: TypeAnalysis a -> IModule -> Either ClaferSErr a
runTypeAnalysis (TypeAnalysis tc) imodule = runReaderT tc $ TypeInfo [] (createUidIClaferMap imodule) undefined Nothing
claferWithUid :: (Monad m) => UIDIClaferMap -> String -> m IClafer
claferWithUid uidIClaferMap' u = case findIClafer uidIClaferMap' u of
Just c -> return c
Nothing -> fail $ "ResolverType.claferWithUid: " ++ u ++ " not found!"
parentOf :: (Monad m) => UIDIClaferMap -> UID -> m UID
parentOf uidIClaferMap' c = case _parentUID <$> findIClafer uidIClaferMap' c of
Just u -> return u
Nothing -> fail $ "ResolverType.parentOf: " ++ c ++ " not found!"
{-
- C is an direct child of B.
-
- abstract A
- C // C - child
- B : A // B - parent
-}
isIndirectChild :: (Monad m) => UIDIClaferMap -> UID -> UID -> m Bool
isIndirectChild uidIClaferMap' child parent = do
(_:allSupers) <- hierarchy uidIClaferMap' parent
childOfSupers <- mapM ((isChild uidIClaferMap' child)._uid) allSupers
return $ or childOfSupers
isChild :: (Monad m) => UIDIClaferMap -> UID -> UID -> m Bool
isChild uidIClaferMap' child parent =
case findIClafer uidIClaferMap' child of
Nothing -> return False
Just childIClafer -> do
let directChild = (parent == _parentUID childIClafer)
indirectChild <- isIndirectChild uidIClaferMap' child parent
return $ directChild || indirectChild
str :: IType -> String
str t =
case unionType t of
[t'] -> t'
ts -> "[" ++ intercalate "," ts ++ "]"
showType :: PExp -> String
showType PExp{ _iType=Nothing } = "unknown type"
showType PExp{ _iType=(Just t) } = show t
data TAMode
= TAReferences -- ^ Phase one: only process references
| TAExpressions -- ^ Phase two: only process constraints and goals
resolveTModule :: (IModule, GEnv) -> Either ClaferSErr IModule
resolveTModule (imodule, _) =
case runTypeAnalysis (analysisReferences $ _mDecls imodule) imodule of
Right mDecls' -> case runTypeAnalysis (analysisExpressions $ mDecls') imodule{_mDecls = mDecls'} of
Right mDecls'' -> return imodule{_mDecls = mDecls''}
Left err -> throwError err
Left err -> throwError err
where
analysisReferences = mapM (resolveTElement TAReferences rootIdent)
analysisExpressions = mapM (resolveTElement TAExpressions rootIdent)
-- Phase one: only process references
resolveTElement :: TAMode -> String -> IElement -> TypeAnalysis IElement
resolveTElement TAReferences _ (IEClafer iclafer) =
do
uidIClaferMap' <- asks iUIDIClaferMap
reference' <- case _reference iclafer of
Nothing -> return Nothing
Just originalReference -> do
refs' <- resolveTPExp $ _ref originalReference
case refs' of
[] -> return Nothing
[ref'] -> return $ refWithNewType uidIClaferMap' originalReference ref'
(ref':_) -> return $ refWithNewType uidIClaferMap' originalReference ref'
elements' <- mapM (resolveTElement TAReferences (_uid iclafer)) (_elements iclafer)
return $ IEClafer iclafer{_elements = elements', _reference=reference'}
where
refWithNewType uMap oRef r = let
r' = r & iType.traversed %~ (addHierarchy uMap)
in case _iType r' of
Nothing -> Nothing
Just t -> if isTBoolean t
then Nothing
else Just $ oRef{_ref=r'}
resolveTElement TAReferences _ iec@IEConstraint{} = return iec
resolveTElement TAReferences _ ieg@IEGoal{} = return ieg
-- Phase two: only process constraints and goals
resolveTElement TAExpressions _ (IEClafer iclafer) =
do
elements' <- mapM (resolveTElement TAExpressions (_uid iclafer)) (_elements iclafer)
return $ IEClafer iclafer{_elements = elements'}
resolveTElement TAExpressions parent' (IEConstraint _isHard _pexp) =
IEConstraint _isHard <$> (testBoolean =<< resolveTConstraint parent' _pexp)
where
testBoolean pexp' =
do
unless (isTBoolean $ typeOf pexp') $
throwError $ SemanticErr (_inPos pexp') ("A constraint requires an expression of type 'TBoolean' but got '" ++ showType pexp' ++ "'")
return pexp'
resolveTElement TAExpressions parent' (IEGoal isMaximize' pexp') =
IEGoal isMaximize' <$> resolveTConstraint parent' pexp'
resolveTConstraint :: String -> PExp -> TypeAnalysis PExp
resolveTConstraint curThis' constraint =
do
uidIClaferMap' <- asks iUIDIClaferMap
curThis'' <- claferWithUid uidIClaferMap' curThis'
head <$> localCurThis curThis'' (resolveTPExp constraint :: TypeAnalysis [PExp])
resolveTPExp :: PExp -> TypeAnalysis [PExp]
resolveTPExp p =
do
x <- resolveTPExp' p
case partitionEithers x of
(f:_, []) -> throwError f -- Case 1: Only fails. Complain about the first one.
([], []) -> throwError $ SemanticErr (_inPos p) ("No results but no errors for " ++ show p) -- Case 2: No success and no error message. Bug.
(_, xs) -> return xs -- Case 3: At least one success.
resolveTPExp' :: PExp -> TypeAnalysis [Either ClaferSErr PExp]
resolveTPExp' p@PExp{_inPos, _exp = IClaferId{_sident = "dref"}} = do
uidIClaferMap' <- asks iUIDIClaferMap
runListT $ runExceptT $ do
curPath' <- curPath
case curPath' of
Just curPath'' -> do
case concatMap (getTMaps uidIClaferMap') $ getTClafers uidIClaferMap' curPath'' of
[t'] -> return $ p `withType` t'
(t':_) -> return $ p `withType` t'
[] -> throwError $ SemanticErr _inPos ("Cannot deref from type '" ++ str curPath'' ++ "'")
Nothing -> throwError $ SemanticErr _inPos ("Cannot deref at the start of a path")
resolveTPExp' p@PExp{_inPos, _exp = IClaferId{_sident = "parent"}} = do
uidIClaferMap' <- asks iUIDIClaferMap
runListT $ runExceptT $ do
curPath' <- curPath
case curPath' of
Just curPath'' -> do
parent' <- fromUnionType <$> runListT (parentOf uidIClaferMap' =<< liftList (unionType curPath''))
when (isNothing parent') $
throwError $ SemanticErr _inPos "Cannot parent from root"
let result = p `withType` fromJust parent'
return result
Nothing -> throwError $ SemanticErr _inPos "Cannot parent at the start of a path"
resolveTPExp' p@PExp{_exp = IClaferId{_sident = "integer"}} = runListT $ runExceptT $ return $ p `withType` TInteger
resolveTPExp' p@PExp{_exp = IClaferId{_sident = "int"}} = runListT $ runExceptT $ return $ p `withType` TInteger
resolveTPExp' p@PExp{_exp = IClaferId{_sident = "string"}} = runListT $ runExceptT $ return $ p `withType` TString
resolveTPExp' p@PExp{_exp = IClaferId{_sident = "double"}} = runListT $ runExceptT $ return $ p `withType` TDouble
resolveTPExp' p@PExp{_exp = IClaferId{_sident = "real"}} = runListT $ runExceptT $ return $ p `withType` TReal
resolveTPExp' p@PExp{_inPos, _exp = IClaferId{_sident="this"}} =
runListT $ runExceptT $ do
sident' <- _uid <$> curThis
result <- (p `withType`) <$> typeOfUid sident'
return result
<++>
addDref result -- Case 2: Dereference the sident 1..* times
resolveTPExp' p@PExp{_inPos, _exp = IClaferId{_sident, _isTop}} = do
uidIClaferMap' <- asks iUIDIClaferMap
runListT $ runExceptT $ do
curPath' <- curPath
sident' <- if _sident == "this" then _uid <$> curThis else return _sident
when (isJust curPath') $ do
c <- mapM (isChild uidIClaferMap' sident') $ unionType $ fromJust curPath'
let parentId' = str (fromJust curPath')
unless (or c || parentId' == "root") $ throwError $ SemanticErr _inPos ("'" ++ sident' ++ "' is not a child of type '" ++ parentId' ++ "'")
result <- (p `withType`) <$> typeOfUid sident'
if _isTop
then return result -- Case 1: Use the sident
<++>
addDref result -- Case 2: Dereference the sident 1..* times
<++>
addSome result
else return result -- all not top-level identifiers must be in a path
resolveTPExp' p@PExp{_inPos, _exp} =
runListT $ runExceptT $ (case _exp of
e@IFunExp {_op = ".", _exps = [arg1, arg2]} -> do
(iType', exp') <- do
arg1' <- lift $ ListT $ resolveTPExp arg1
localCurPath (typeOf arg1') $ do
arg2' <- liftError $ lift $ ListT $ resolveTPExp arg2
(case _iType arg2' of
Just (t'@TClafer{}) -> return (t', e{_exps = [arg1', arg2']})
Just (TMap{_ta=t'}) -> return (t', e{_exps = [arg1', arg2']})
_ -> fail $ "Function '.' cannot be performed on " ++ showType arg1' ++ "\n.\n " ++ showType arg2')
let result = p{_iType = Just iType', _exp = exp'}
return result -- Case 1: Use the sident
<++>
addDref result -- Case 2: Dereference the sident 1..* times
<++>
addSome result
_ -> do
(iType', exp') <- ExceptT $ ListT $ resolveTExp _exp
return p{_iType = Just iType', _exp = exp'})
where
resolveTExp :: IExp -> TypeAnalysis [Either ClaferSErr (IType, IExp)]
resolveTExp e@(IInt _) = runListT $ runExceptT $ return (TInteger, e)
resolveTExp e@(IDouble _) = runListT $ runExceptT $ return (TDouble, e)
resolveTExp e@(IReal _) = runListT $ runExceptT $ return (TReal, e)
resolveTExp e@(IStr _) = runListT $ runExceptT $ return (TString, e)
resolveTExp e@IFunExp {_op, _exps = [arg]} =
runListT $ runExceptT $ do
arg' <- lift $ ListT $ resolveTPExp arg
let t = typeOf arg'
let
test c =
unless c $
throwError $ SemanticErr _inPos ("Function '" ++ _op ++ "' cannot be performed on " ++ _op ++ " '" ++ showType arg' ++ "'")
let result
| _op == iNot = test (isTBoolean t) >> return TBoolean
| _op `elem` ltlUnOps = test (isTBoolean t) >> return TBoolean
| _op == iCSet = return TInteger
| _op == iSumSet = test (isTInteger t) >> return TInteger
| _op == iProdSet = test (isTInteger t) >> return TInteger
| _op `elem` [iMin, iMinimum, iMaximum, iMinimize, iMaximize] = test (numeric t) >> return t
| otherwise = assert False $ error $ "Unknown op '" ++ _op ++ "'"
result' <- result
return (result', e{_exps = [arg']})
resolveTExp e@IFunExp {_op = "++", _exps = [arg1, arg2]} = do
-- arg1s' <- resolveTPExp arg1
-- arg2s' <- resolveTPExp arg2
-- let union' a b = typeOf a +++ typeOf b
-- return [ return (union' arg1' arg2', e{_exps = [arg1', arg2']})
-- | (arg1', arg2') <- sortBy (comparing $ length . unionType . uncurry union') $ liftM2 (,) arg1s' arg2s'
-- , not (isTBoolean $ typeOf arg1') && not (isTBoolean $ typeOf arg2') ]
runListT $ runExceptT $ do
arg1' <- lift $ ListT $ resolveTPExp arg1
arg2' <- lift $ ListT $ resolveTPExp arg2
let t1 = typeOf arg1'
let t2 = typeOf arg2'
return (t1 +++ t2, e{_exps = [arg1', arg2']})
resolveTExp e@IFunExp {_op, _exps = [arg1, arg2]} = do
uidIClaferMap' <- asks iUIDIClaferMap
runListT $ runExceptT $ do
arg1' <- lift $ ListT $ resolveTPExp arg1
arg2' <- lift $ ListT $ resolveTPExp arg2
let t1 = typeOf arg1'
let t2 = typeOf arg2'
let testIntersect e1 e2 =
do
it <- intersection uidIClaferMap' e1 e2
case it of
Just it' -> if isTBoolean it'
then throwError $ SemanticErr _inPos ("Function '" ++ _op ++ "' cannot be performed on\n" ++ showType arg1' ++ "\n" ++ _op ++ "\n" ++ showType arg2')
else return it'
Nothing -> throwError $ SemanticErr _inPos ("Function '" ++ _op ++ "' cannot be performed on\n" ++ showType arg1' ++ "\n" ++ _op ++ "\n" ++ showType arg2')
let testNotSame e1 e2 =
when (e1 `sameAs` e2) $
throwError $ SemanticErr _inPos ("Function '" ++ _op ++ "' is redundant because the two subexpressions are always equivalent")
let test c =
unless c $
throwError $ SemanticErr _inPos ("Function '" ++ _op ++ "' cannot be performed on\n" ++ showType arg1' ++ "\n" ++ _op ++ "\n" ++ showType arg2')
let result
| _op `elem` logBinOps = test (isTBoolean t1 && isTBoolean t2) >> return TBoolean
| _op `elem` ltlBinOps = test (isTBoolean t1 && isTBoolean t2) >> return TBoolean
| _op `elem` [iLt, iGt, iLte, iGte] = test (numeric t1 && numeric t2) >> return TBoolean
| _op `elem` [iEq, iNeq] = testNotSame arg1' arg2' >> testIntersect t1 t2 >> return TBoolean
| _op == iDifference = testNotSame arg1' arg2' >> testIntersect t1 t2 >> return t1
| _op == iIntersection = testNotSame arg1' arg2' >> testIntersect t1 t2
| _op `elem` [iDomain, iRange] = testIntersect t1 t2
| _op `elem` relSetBinOps = testIntersect t1 t2 >> return TBoolean
| _op `elem` [iSub, iMul, iDiv, iRem] = test (numeric t1 && numeric t2) >> return (coerce t1 t2)
| _op == iPlus =
(test (isTString t1 && isTString t2) >> return TString) -- Case 1: String concatenation
`catchError`
const (test (numeric t1 && numeric t2) >> return (coerce t1 t2)) -- Case 2: Addition
| otherwise = error $ "ResolverType: Unknown op: " ++ show e
result' <- result
return (result', e{_exps = [arg1', arg2']})
resolveTExp e@(IFunExp "ifthenelse" [arg1, arg2, arg3]) = do
uidIClaferMap' <- asks iUIDIClaferMap
runListT $ runExceptT $ do
arg1' <- lift $ ListT $ resolveTPExp arg1
arg2' <- lift $ ListT $ resolveTPExp arg2
arg3' <- lift $ ListT $ resolveTPExp arg3
let t1 = typeOf arg1'
let t2 = typeOf arg2'
let t3 = typeOf arg3'
unless (isTBoolean t1) $
throwError $ SemanticErr _inPos ("The type of condition in 'if/then/else' must be 'TBoolean', insted it is " ++ showType arg1')
it <- getIfThenElseType uidIClaferMap' t2 t3
t <- case it of
Just it' -> return it'
Nothing -> throwError $ SemanticErr _inPos ("Function 'if/then/else' cannot be performed on \nif\n" ++ showType arg1' ++ "\nthen\n" ++ showType arg2' ++ "\nelse\n" ++ showType arg3')
return (t, e{_exps = [arg1', arg2', arg3']})
-- some P, no P, one P
-- P must not be TBoolean
resolveTExp e@IDeclPExp{_oDecls=[], _bpexp} =
runListT $ runExceptT $ do
bpexp' <- liftError $ lift $ ListT $ resolveTPExp _bpexp
case _iType bpexp' of
Nothing -> fail $ "resolveTExp@IDeclPExp: No type computed for body\n" ++ show bpexp'
Just t' -> if isTBoolean t'
then throwError $ SemanticErr _inPos "The type of body of a quantified expression without local declarations must not be 'TBoolean'"
else return $ (TBoolean, e{_bpexp = bpexp'})
-- some x : X | P, no x : X | P, one x : X | P
-- X must not be TBoolean, P must be TBoolean
resolveTExp e@IDeclPExp{_oDecls, _bpexp} =
runListT $ runExceptT $ do
oDecls' <- mapM resolveTDecl _oDecls
let extraDecls = [(decl, typeOf $ _body oDecl) | oDecl <- oDecls', decl <- _decls oDecl]
localDecls extraDecls $ do
bpexp' <- liftError $ lift $ ListT $ resolveTPExp _bpexp
case _iType bpexp' of
Nothing -> fail $ "resolveTExp@IDeclPExp: No type computed for body\n" ++ show bpexp'
Just t' -> if isTBoolean t'
then return $ (TBoolean, e{_oDecls = oDecls', _bpexp = bpexp'})
else throwError $ SemanticErr _inPos $ "The type of body of a quantified expression with local declarations must be 'TBoolean', instead it is\n" ++ showType bpexp'
where
resolveTDecl d@IDecl{_body} =
do
body' <- lift $ ListT $ resolveTPExp _body
case _iType body' of
Nothing -> fail $ "resolveTExp@IDeclPExp: No type computed for local declaration\n" ++ show body'
Just t' -> if isTBoolean t'
then throwError $ SemanticErr _inPos "The type of declaration of a quantified expression must not be 'TBoolean'"
else return $ d{_body = body'}
resolveTExp e = error $ "Unknown iexp: " ++ show e
-- Adds "dref"s at the end, effectively dereferencing Clafers when needed.
addDref :: PExp -> ExceptT ClaferSErr (ListT TypeAnalysis) PExp
addDref pexp =
do
localCurPath (typeOf pexp) $ do
deref <- (ExceptT $ ListT $ resolveTPExp' $ newPExp $ IClaferId "" "dref" False NoBind) `catchError` const (lift mzero)
let result = (newPExp $ IFunExp "." [pexp, deref]) `withType` typeOf deref
return result <++> addDref result
where
newPExp = PExp Nothing "" $ _inPos pexp
-- Adds a quantifier "some" at the beginning, effectively turning an identifier into a TBoolean expression
addSome :: PExp -> ExceptT ClaferSErr (ListT TypeAnalysis) PExp
addSome pexp =
do
localCurPath (typeOf pexp) $ return $ (newPExp $ IDeclPExp ISome [] pexp) `withType` TBoolean
where
newPExp = PExp Nothing "" $ _inPos pexp
typeOf :: PExp -> IType
typeOf pexp = fromMaybe (error "No type") $ _iType pexp
withType :: PExp -> IType -> PExp
withType p t = p{_iType = Just t}
(<++>) :: MonadPlus m => ExceptT e m a -> ExceptT e m a -> ExceptT e m a
(ExceptT a) <++> (ExceptT b) = ExceptT $ a `mplus` b
liftError :: MonadError e m => ExceptT e m a -> ExceptT e m a
liftError e =
liftCatch catchError e throwError
where
liftCatch catchError' m h = ExceptT $ runExceptT m `catchError'` (runExceptT . h)
{-
-
- Utility functions
-
-}
liftList :: Monad m => [a] -> ListT m a
liftList = ListT . return
syntaxOf :: PExp -> String
syntaxOf = printTree . sugarExp
-- Returns true iff the left and right expressions are syntactically identical
sameAs :: PExp -> PExp -> Bool
sameAs e1 e2 = syntaxOf e1 == syntaxOf e2 -- Not very efficient but hopefully correct
|
gsdlab/clafer
|
src/Language/Clafer/Intermediate/ResolverType.hs
|
mit
| 22,039
| 0
| 30
| 5,422
| 6,366
| 3,195
| 3,171
| -1
| -1
|
module Labyrinth.Reachability where
import Control.Lens
import Control.Monad.Reader
import Data.Function
import Data.List
import qualified Data.Map as M
import Data.Maybe
import Data.Monoid
import Labyrinth.Map
type PositionMap a = M.Map Position a
type Connectivity = PositionMap [Position]
type Distribution = PositionMap Double
type Reachability = PositionMap Bool
nextCell :: Position -> Reader Labyrinth Position
nextCell pos = do
ct <- view $ cell pos . ctype
case ct of
River d -> return $ advance pos d
Pit i -> do
npits <- asks pitCount
let i' = (i + 1) `mod` npits
asks (pit i')
_ -> return pos
-- A list of positions player can go from a given cell
reachable :: Position -> Reader Labyrinth [Position]
reachable pos = do
dirs <- filterM (liftM (NoWall ==) . view . wall pos) allDirections
let npos = pos : map (advance pos) dirs
npos' <- filterM (asks . isInside) npos
npos'' <- forM npos' nextCell
return $ nub npos''
connectivity :: Labyrinth -> Connectivity
connectivity = runReader $ do
pos <- asks allPositions
posReach <- mapM reachable pos
return $ M.fromList $ zip pos posReach
insertAppend :: (Ord k) => k -> v -> M.Map k [v] -> M.Map k [v]
insertAppend k v = M.alter (addToList v) k
where addToList v = Just . (v :) . fromMaybe []
inverse :: (Ord a, Ord b) => M.Map a [b] -> M.Map b [a]
inverse = M.foldWithKey insertAll M.empty
where insertAll k vs m = foldr (`insertAppend` k) m vs
foldConcat :: (Monoid v) => M.Map k [v] -> M.Map k v
foldConcat = M.map mconcat
distribute :: (Ord k, Monoid v) => M.Map k [k] -> M.Map k v -> M.Map k v
distribute dist = foldConcat . M.foldWithKey insertAll M.empty
where insertAll k v m = foldr (`insertAppend` v) m k2s
where k2s = M.findWithDefault [] k dist
distributeN :: (Ord k, Monoid v) => Int -> M.Map k [k] -> M.Map k v -> M.Map k v
distributeN n dist init = foldr distribute init $ replicate n dist
distributeU :: (Ord k, Monoid v, Eq v) => M.Map k [k] -> M.Map k v -> M.Map k v
distributeU dist init =
if next == init then init else distributeU dist next
where next = distribute dist init
normalize :: (Fractional v) => M.Map k v -> M.Map k v
normalize m = M.map norm m
where norm = (/ s)
s = sum $ M.elems m
converge :: Int -> Labyrinth -> Distribution
converge n l = normalize $ M.map getSum $ distributeN n conn init
where conn = connectivity l
pos = allPositions l
init = uniformBetween (Sum 1) pos
reachConverge :: Int -> Labyrinth -> Reachability
reachConverge n l = M.map getAny $ distributeN n conn init
where conn = inverse $ connectivity l
init = armoriesDist l
reachConvergeU :: Labyrinth -> Reachability
reachConvergeU l = M.map getAny $ distributeU conn init
where conn = inverse $ connectivity l
init = armoriesDist l
uniformBetween :: a -> [Position] -> PositionMap a
uniformBetween x pos = M.fromList $ zip pos $ repeat x
armoriesDist :: Labyrinth -> PositionMap Any
armoriesDist = uniformBetween (Any True) . armories
maxKeyBy :: (Ord n) => (k -> n) -> M.Map k a -> n
maxKeyBy prop = maximum . M.keys . M.mapKeys prop
showReach :: Reachability -> String
showReach = showGrid showReachValue
where showReachValue = pad 2 ' ' . showR . fromMaybe False
showR True = "*"
showR False = "."
showDist :: Distribution -> String
showDist = showGrid showDistValue
where showDistValue = pad 2 ' ' . show . round . (100 *) . fromMaybe 0
showGrid :: (Maybe a -> String) -> PositionMap a -> String
showGrid s g = intercalate "\n" $ flip map [0..maxY] $ showGridLine s g
where maxY = maxKeyBy pY g
showGridLine :: (Maybe a -> String) -> PositionMap a -> Int -> String
showGridLine s g y = unwords $ flip map [0..maxX] $ showGridPos s g y
where maxX = maxKeyBy pX g
showGridPos :: (Maybe a -> String) -> PositionMap a -> Int -> Int -> String
showGridPos s g y x = s $ M.lookup (Pos x y) g
pad :: Int -> a -> [a] -> [a]
pad n c l = replicate d c ++ l where d = max 0 $ n - length l
|
koterpillar/labyrinth
|
src/Labyrinth/Reachability.hs
|
mit
| 4,154
| 0
| 17
| 1,037
| 1,716
| 862
| 854
| 94
| 3
|
module Handler.ContentProxy where
import Import
import Network.Wai.Conduit (responseSource)
import qualified Network.Wai as Wai
import Blaze.ByteString.Builder (fromByteString)
import Data.CaseInsensitive (CI)
import qualified Data.ByteString as S
import FP.EnvSettings (learningSiteApproot)
-- | Creates a proxy for user content, allowing us to avoid insecure content on
-- https pages.
getContentProxyR :: Handler Html
getContentProxyR = do
-- The purpose of this line is to discourage usage of our proxy feature for
-- hotlinking from other sites.
wr <- waiRequest
case lookup "referer" $ Wai.requestHeaders wr of
Nothing -> return ()
Just referer ->
unless (encodeUtf8 learningSiteApproot `S.isPrefixOf` referer) notFound
msrc <- lookupGetParam "src"
case msrc of
Nothing -> notFound
Just url -> do
req <- liftIO $ parseUrl $ unpack url
(_, res) <- acquireResponse req >>= allocateAcquire
-- Protect against an XSS attack #3989
if (maybe True (not . isImage) $ lookup "content-type" $ responseHeaders res)
then defaultLayout $ do
let content = "Content proxy for non-images disallowed" :: Html
setTitle content
[whamlet|<p>#{content}|]
else sendWaiResponse $ responseSource
(responseStatus res)
(filter ((`member` safeResponseHeaders) . fst) $ responseHeaders res)
(mapOutput (Chunk . fromByteString) $ responseBody res)
where
isImage = isPrefixOf "image/"
safeResponseHeaders :: HashSet (CI ByteString)
safeResponseHeaders = setFromList
[ "content-type"
, "content-length"
, "etag"
, "expires"
, "last-modified"
]
|
fpco/schoolofhaskell.com
|
src/Handler/ContentProxy.hs
|
mit
| 1,805
| 0
| 20
| 491
| 399
| 212
| 187
| -1
| -1
|
module Optimize where
import ObjInfo
import IntermedSyntax
type FlowGraph = [(Label, Node)]
data Node = Node { icode :: [ICode],
next :: Label } deriving(Eq, Show)
makeFlowGraph :: IProgram -> IProgram
analyzeBody :: [ICode] -> FlowGraph
|
yu-i9/HaSC
|
src/HaSC/Prim/Optimize.hs
|
mit
| 265
| 0
| 9
| 63
| 81
| 50
| 31
| 8
| 0
|
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.HTMLTextAreaElement
(js_checkValidity, checkValidity, js_setCustomValidity,
setCustomValidity, js_select, select, js_setRangeText,
setRangeText, js_setRangeText4, setRangeText4,
js_setSelectionRange, setSelectionRange, js_setAutofocus,
setAutofocus, js_getAutofocus, getAutofocus, js_setCols, setCols,
js_getCols, getCols, js_setDirName, setDirName, js_getDirName,
getDirName, js_setDisabled, setDisabled, js_getDisabled,
getDisabled, js_getForm, getForm, js_setMaxLength, setMaxLength,
js_getMaxLength, getMaxLength, js_setName, setName, js_getName,
getName, js_setPlaceholder, setPlaceholder, js_getPlaceholder,
getPlaceholder, js_setReadOnly, setReadOnly, js_getReadOnly,
getReadOnly, js_setRequired, setRequired, js_getRequired,
getRequired, js_setRows, setRows, js_getRows, getRows, js_setWrap,
setWrap, js_getWrap, getWrap, js_getType, getType,
js_setDefaultValue, setDefaultValue, js_getDefaultValue,
getDefaultValue, js_setValue, setValue, js_getValue, getValue,
js_getTextLength, getTextLength, js_getWillValidate,
getWillValidate, js_getValidity, getValidity,
js_getValidationMessage, getValidationMessage, js_getLabels,
getLabels, js_setSelectionStart, setSelectionStart,
js_getSelectionStart, getSelectionStart, js_setSelectionEnd,
setSelectionEnd, js_getSelectionEnd, getSelectionEnd,
js_setSelectionDirection, setSelectionDirection,
js_getSelectionDirection, getSelectionDirection, js_setAutocorrect,
setAutocorrect, js_getAutocorrect, getAutocorrect,
js_setAutocapitalize, setAutocapitalize, js_getAutocapitalize,
getAutocapitalize, HTMLTextAreaElement, castToHTMLTextAreaElement,
gTypeHTMLTextAreaElement)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSRef(..), JSString, castRef)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSRef(..), FromJSRef(..))
import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.Enums
foreign import javascript unsafe
"($1[\"checkValidity\"]() ? 1 : 0)" js_checkValidity ::
JSRef HTMLTextAreaElement -> IO Bool
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.checkValidity Mozilla HTMLTextAreaElement.checkValidity documentation>
checkValidity :: (MonadIO m) => HTMLTextAreaElement -> m Bool
checkValidity self
= liftIO (js_checkValidity (unHTMLTextAreaElement self))
foreign import javascript unsafe "$1[\"setCustomValidity\"]($2)"
js_setCustomValidity ::
JSRef HTMLTextAreaElement -> JSRef (Maybe JSString) -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.setCustomValidity Mozilla HTMLTextAreaElement.setCustomValidity documentation>
setCustomValidity ::
(MonadIO m, ToJSString error) =>
HTMLTextAreaElement -> Maybe error -> m ()
setCustomValidity self error
= liftIO
(js_setCustomValidity (unHTMLTextAreaElement self)
(toMaybeJSString error))
foreign import javascript unsafe "$1[\"select\"]()" js_select ::
JSRef HTMLTextAreaElement -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.select Mozilla HTMLTextAreaElement.select documentation>
select :: (MonadIO m) => HTMLTextAreaElement -> m ()
select self = liftIO (js_select (unHTMLTextAreaElement self))
foreign import javascript unsafe "$1[\"setRangeText\"]($2)"
js_setRangeText :: JSRef HTMLTextAreaElement -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.setRangeText Mozilla HTMLTextAreaElement.setRangeText documentation>
setRangeText ::
(MonadIO m, ToJSString replacement) =>
HTMLTextAreaElement -> replacement -> m ()
setRangeText self replacement
= liftIO
(js_setRangeText (unHTMLTextAreaElement self)
(toJSString replacement))
foreign import javascript unsafe
"$1[\"setRangeText\"]($2, $3, $4,\n$5)" js_setRangeText4 ::
JSRef HTMLTextAreaElement ->
JSString -> Word -> Word -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.setRangeText Mozilla HTMLTextAreaElement.setRangeText documentation>
setRangeText4 ::
(MonadIO m, ToJSString replacement, ToJSString selectionMode) =>
HTMLTextAreaElement ->
replacement -> Word -> Word -> selectionMode -> m ()
setRangeText4 self replacement start end selectionMode
= liftIO
(js_setRangeText4 (unHTMLTextAreaElement self)
(toJSString replacement)
start
end
(toJSString selectionMode))
foreign import javascript unsafe
"$1[\"setSelectionRange\"]($2, $3,\n$4)" js_setSelectionRange ::
JSRef HTMLTextAreaElement -> Int -> Int -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.setSelectionRange Mozilla HTMLTextAreaElement.setSelectionRange documentation>
setSelectionRange ::
(MonadIO m, ToJSString direction) =>
HTMLTextAreaElement -> Int -> Int -> direction -> m ()
setSelectionRange self start end direction
= liftIO
(js_setSelectionRange (unHTMLTextAreaElement self) start end
(toJSString direction))
foreign import javascript unsafe "$1[\"autofocus\"] = $2;"
js_setAutofocus :: JSRef HTMLTextAreaElement -> Bool -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.autofocus Mozilla HTMLTextAreaElement.autofocus documentation>
setAutofocus :: (MonadIO m) => HTMLTextAreaElement -> Bool -> m ()
setAutofocus self val
= liftIO (js_setAutofocus (unHTMLTextAreaElement self) val)
foreign import javascript unsafe "($1[\"autofocus\"] ? 1 : 0)"
js_getAutofocus :: JSRef HTMLTextAreaElement -> IO Bool
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.autofocus Mozilla HTMLTextAreaElement.autofocus documentation>
getAutofocus :: (MonadIO m) => HTMLTextAreaElement -> m Bool
getAutofocus self
= liftIO (js_getAutofocus (unHTMLTextAreaElement self))
foreign import javascript unsafe "$1[\"cols\"] = $2;" js_setCols ::
JSRef HTMLTextAreaElement -> Int -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.cols Mozilla HTMLTextAreaElement.cols documentation>
setCols :: (MonadIO m) => HTMLTextAreaElement -> Int -> m ()
setCols self val
= liftIO (js_setCols (unHTMLTextAreaElement self) val)
foreign import javascript unsafe "$1[\"cols\"]" js_getCols ::
JSRef HTMLTextAreaElement -> IO Int
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.cols Mozilla HTMLTextAreaElement.cols documentation>
getCols :: (MonadIO m) => HTMLTextAreaElement -> m Int
getCols self = liftIO (js_getCols (unHTMLTextAreaElement self))
foreign import javascript unsafe "$1[\"dirName\"] = $2;"
js_setDirName :: JSRef HTMLTextAreaElement -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.dirName Mozilla HTMLTextAreaElement.dirName documentation>
setDirName ::
(MonadIO m, ToJSString val) => HTMLTextAreaElement -> val -> m ()
setDirName self val
= liftIO
(js_setDirName (unHTMLTextAreaElement self) (toJSString val))
foreign import javascript unsafe "$1[\"dirName\"]" js_getDirName ::
JSRef HTMLTextAreaElement -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.dirName Mozilla HTMLTextAreaElement.dirName documentation>
getDirName ::
(MonadIO m, FromJSString result) => HTMLTextAreaElement -> m result
getDirName self
= liftIO
(fromJSString <$> (js_getDirName (unHTMLTextAreaElement self)))
foreign import javascript unsafe "$1[\"disabled\"] = $2;"
js_setDisabled :: JSRef HTMLTextAreaElement -> Bool -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.disabled Mozilla HTMLTextAreaElement.disabled documentation>
setDisabled :: (MonadIO m) => HTMLTextAreaElement -> Bool -> m ()
setDisabled self val
= liftIO (js_setDisabled (unHTMLTextAreaElement self) val)
foreign import javascript unsafe "($1[\"disabled\"] ? 1 : 0)"
js_getDisabled :: JSRef HTMLTextAreaElement -> IO Bool
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.disabled Mozilla HTMLTextAreaElement.disabled documentation>
getDisabled :: (MonadIO m) => HTMLTextAreaElement -> m Bool
getDisabled self
= liftIO (js_getDisabled (unHTMLTextAreaElement self))
foreign import javascript unsafe "$1[\"form\"]" js_getForm ::
JSRef HTMLTextAreaElement -> IO (JSRef HTMLFormElement)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.form Mozilla HTMLTextAreaElement.form documentation>
getForm ::
(MonadIO m) => HTMLTextAreaElement -> m (Maybe HTMLFormElement)
getForm self
= liftIO ((js_getForm (unHTMLTextAreaElement self)) >>= fromJSRef)
foreign import javascript unsafe "$1[\"maxLength\"] = $2;"
js_setMaxLength :: JSRef HTMLTextAreaElement -> Int -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.maxLength Mozilla HTMLTextAreaElement.maxLength documentation>
setMaxLength :: (MonadIO m) => HTMLTextAreaElement -> Int -> m ()
setMaxLength self val
= liftIO (js_setMaxLength (unHTMLTextAreaElement self) val)
foreign import javascript unsafe "$1[\"maxLength\"]"
js_getMaxLength :: JSRef HTMLTextAreaElement -> IO Int
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.maxLength Mozilla HTMLTextAreaElement.maxLength documentation>
getMaxLength :: (MonadIO m) => HTMLTextAreaElement -> m Int
getMaxLength self
= liftIO (js_getMaxLength (unHTMLTextAreaElement self))
foreign import javascript unsafe "$1[\"name\"] = $2;" js_setName ::
JSRef HTMLTextAreaElement -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.name Mozilla HTMLTextAreaElement.name documentation>
setName ::
(MonadIO m, ToJSString val) => HTMLTextAreaElement -> val -> m ()
setName self val
= liftIO (js_setName (unHTMLTextAreaElement self) (toJSString val))
foreign import javascript unsafe "$1[\"name\"]" js_getName ::
JSRef HTMLTextAreaElement -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.name Mozilla HTMLTextAreaElement.name documentation>
getName ::
(MonadIO m, FromJSString result) => HTMLTextAreaElement -> m result
getName self
= liftIO
(fromJSString <$> (js_getName (unHTMLTextAreaElement self)))
foreign import javascript unsafe "$1[\"placeholder\"] = $2;"
js_setPlaceholder :: JSRef HTMLTextAreaElement -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.placeholder Mozilla HTMLTextAreaElement.placeholder documentation>
setPlaceholder ::
(MonadIO m, ToJSString val) => HTMLTextAreaElement -> val -> m ()
setPlaceholder self val
= liftIO
(js_setPlaceholder (unHTMLTextAreaElement self) (toJSString val))
foreign import javascript unsafe "$1[\"placeholder\"]"
js_getPlaceholder :: JSRef HTMLTextAreaElement -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.placeholder Mozilla HTMLTextAreaElement.placeholder documentation>
getPlaceholder ::
(MonadIO m, FromJSString result) => HTMLTextAreaElement -> m result
getPlaceholder self
= liftIO
(fromJSString <$> (js_getPlaceholder (unHTMLTextAreaElement self)))
foreign import javascript unsafe "$1[\"readOnly\"] = $2;"
js_setReadOnly :: JSRef HTMLTextAreaElement -> Bool -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.readOnly Mozilla HTMLTextAreaElement.readOnly documentation>
setReadOnly :: (MonadIO m) => HTMLTextAreaElement -> Bool -> m ()
setReadOnly self val
= liftIO (js_setReadOnly (unHTMLTextAreaElement self) val)
foreign import javascript unsafe "($1[\"readOnly\"] ? 1 : 0)"
js_getReadOnly :: JSRef HTMLTextAreaElement -> IO Bool
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.readOnly Mozilla HTMLTextAreaElement.readOnly documentation>
getReadOnly :: (MonadIO m) => HTMLTextAreaElement -> m Bool
getReadOnly self
= liftIO (js_getReadOnly (unHTMLTextAreaElement self))
foreign import javascript unsafe "$1[\"required\"] = $2;"
js_setRequired :: JSRef HTMLTextAreaElement -> Bool -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.required Mozilla HTMLTextAreaElement.required documentation>
setRequired :: (MonadIO m) => HTMLTextAreaElement -> Bool -> m ()
setRequired self val
= liftIO (js_setRequired (unHTMLTextAreaElement self) val)
foreign import javascript unsafe "($1[\"required\"] ? 1 : 0)"
js_getRequired :: JSRef HTMLTextAreaElement -> IO Bool
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.required Mozilla HTMLTextAreaElement.required documentation>
getRequired :: (MonadIO m) => HTMLTextAreaElement -> m Bool
getRequired self
= liftIO (js_getRequired (unHTMLTextAreaElement self))
foreign import javascript unsafe "$1[\"rows\"] = $2;" js_setRows ::
JSRef HTMLTextAreaElement -> Int -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.rows Mozilla HTMLTextAreaElement.rows documentation>
setRows :: (MonadIO m) => HTMLTextAreaElement -> Int -> m ()
setRows self val
= liftIO (js_setRows (unHTMLTextAreaElement self) val)
foreign import javascript unsafe "$1[\"rows\"]" js_getRows ::
JSRef HTMLTextAreaElement -> IO Int
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.rows Mozilla HTMLTextAreaElement.rows documentation>
getRows :: (MonadIO m) => HTMLTextAreaElement -> m Int
getRows self = liftIO (js_getRows (unHTMLTextAreaElement self))
foreign import javascript unsafe "$1[\"wrap\"] = $2;" js_setWrap ::
JSRef HTMLTextAreaElement -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.wrap Mozilla HTMLTextAreaElement.wrap documentation>
setWrap ::
(MonadIO m, ToJSString val) => HTMLTextAreaElement -> val -> m ()
setWrap self val
= liftIO (js_setWrap (unHTMLTextAreaElement self) (toJSString val))
foreign import javascript unsafe "$1[\"wrap\"]" js_getWrap ::
JSRef HTMLTextAreaElement -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.wrap Mozilla HTMLTextAreaElement.wrap documentation>
getWrap ::
(MonadIO m, FromJSString result) => HTMLTextAreaElement -> m result
getWrap self
= liftIO
(fromJSString <$> (js_getWrap (unHTMLTextAreaElement self)))
foreign import javascript unsafe "$1[\"type\"]" js_getType ::
JSRef HTMLTextAreaElement -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.type Mozilla HTMLTextAreaElement.type documentation>
getType ::
(MonadIO m, FromJSString result) => HTMLTextAreaElement -> m result
getType self
= liftIO
(fromJSString <$> (js_getType (unHTMLTextAreaElement self)))
foreign import javascript unsafe "$1[\"defaultValue\"] = $2;"
js_setDefaultValue ::
JSRef HTMLTextAreaElement -> JSRef (Maybe JSString) -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.defaultValue Mozilla HTMLTextAreaElement.defaultValue documentation>
setDefaultValue ::
(MonadIO m, ToJSString val) =>
HTMLTextAreaElement -> Maybe val -> m ()
setDefaultValue self val
= liftIO
(js_setDefaultValue (unHTMLTextAreaElement self)
(toMaybeJSString val))
foreign import javascript unsafe "$1[\"defaultValue\"]"
js_getDefaultValue ::
JSRef HTMLTextAreaElement -> IO (JSRef (Maybe JSString))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.defaultValue Mozilla HTMLTextAreaElement.defaultValue documentation>
getDefaultValue ::
(MonadIO m, FromJSString result) =>
HTMLTextAreaElement -> m (Maybe result)
getDefaultValue self
= liftIO
(fromMaybeJSString <$>
(js_getDefaultValue (unHTMLTextAreaElement self)))
foreign import javascript unsafe "$1[\"value\"] = $2;" js_setValue
:: JSRef HTMLTextAreaElement -> JSRef (Maybe JSString) -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.value Mozilla HTMLTextAreaElement.value documentation>
setValue ::
(MonadIO m, ToJSString val) =>
HTMLTextAreaElement -> Maybe val -> m ()
setValue self val
= liftIO
(js_setValue (unHTMLTextAreaElement self) (toMaybeJSString val))
foreign import javascript unsafe "$1[\"value\"]" js_getValue ::
JSRef HTMLTextAreaElement -> IO (JSRef (Maybe JSString))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.value Mozilla HTMLTextAreaElement.value documentation>
getValue ::
(MonadIO m, FromJSString result) =>
HTMLTextAreaElement -> m (Maybe result)
getValue self
= liftIO
(fromMaybeJSString <$> (js_getValue (unHTMLTextAreaElement self)))
foreign import javascript unsafe "$1[\"textLength\"]"
js_getTextLength :: JSRef HTMLTextAreaElement -> IO Word
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.textLength Mozilla HTMLTextAreaElement.textLength documentation>
getTextLength :: (MonadIO m) => HTMLTextAreaElement -> m Word
getTextLength self
= liftIO (js_getTextLength (unHTMLTextAreaElement self))
foreign import javascript unsafe "($1[\"willValidate\"] ? 1 : 0)"
js_getWillValidate :: JSRef HTMLTextAreaElement -> IO Bool
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.willValidate Mozilla HTMLTextAreaElement.willValidate documentation>
getWillValidate :: (MonadIO m) => HTMLTextAreaElement -> m Bool
getWillValidate self
= liftIO (js_getWillValidate (unHTMLTextAreaElement self))
foreign import javascript unsafe "$1[\"validity\"]" js_getValidity
:: JSRef HTMLTextAreaElement -> IO (JSRef ValidityState)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.validity Mozilla HTMLTextAreaElement.validity documentation>
getValidity ::
(MonadIO m) => HTMLTextAreaElement -> m (Maybe ValidityState)
getValidity self
= liftIO
((js_getValidity (unHTMLTextAreaElement self)) >>= fromJSRef)
foreign import javascript unsafe "$1[\"validationMessage\"]"
js_getValidationMessage :: JSRef HTMLTextAreaElement -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.validationMessage Mozilla HTMLTextAreaElement.validationMessage documentation>
getValidationMessage ::
(MonadIO m, FromJSString result) => HTMLTextAreaElement -> m result
getValidationMessage self
= liftIO
(fromJSString <$>
(js_getValidationMessage (unHTMLTextAreaElement self)))
foreign import javascript unsafe "$1[\"labels\"]" js_getLabels ::
JSRef HTMLTextAreaElement -> IO (JSRef NodeList)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.labels Mozilla HTMLTextAreaElement.labels documentation>
getLabels ::
(MonadIO m) => HTMLTextAreaElement -> m (Maybe NodeList)
getLabels self
= liftIO
((js_getLabels (unHTMLTextAreaElement self)) >>= fromJSRef)
foreign import javascript unsafe "$1[\"selectionStart\"] = $2;"
js_setSelectionStart :: JSRef HTMLTextAreaElement -> Int -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.selectionStart Mozilla HTMLTextAreaElement.selectionStart documentation>
setSelectionStart ::
(MonadIO m) => HTMLTextAreaElement -> Int -> m ()
setSelectionStart self val
= liftIO (js_setSelectionStart (unHTMLTextAreaElement self) val)
foreign import javascript unsafe "$1[\"selectionStart\"]"
js_getSelectionStart :: JSRef HTMLTextAreaElement -> IO Int
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.selectionStart Mozilla HTMLTextAreaElement.selectionStart documentation>
getSelectionStart :: (MonadIO m) => HTMLTextAreaElement -> m Int
getSelectionStart self
= liftIO (js_getSelectionStart (unHTMLTextAreaElement self))
foreign import javascript unsafe "$1[\"selectionEnd\"] = $2;"
js_setSelectionEnd :: JSRef HTMLTextAreaElement -> Int -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.selectionEnd Mozilla HTMLTextAreaElement.selectionEnd documentation>
setSelectionEnd ::
(MonadIO m) => HTMLTextAreaElement -> Int -> m ()
setSelectionEnd self val
= liftIO (js_setSelectionEnd (unHTMLTextAreaElement self) val)
foreign import javascript unsafe "$1[\"selectionEnd\"]"
js_getSelectionEnd :: JSRef HTMLTextAreaElement -> IO Int
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.selectionEnd Mozilla HTMLTextAreaElement.selectionEnd documentation>
getSelectionEnd :: (MonadIO m) => HTMLTextAreaElement -> m Int
getSelectionEnd self
= liftIO (js_getSelectionEnd (unHTMLTextAreaElement self))
foreign import javascript unsafe "$1[\"selectionDirection\"] = $2;"
js_setSelectionDirection ::
JSRef HTMLTextAreaElement -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.selectionDirection Mozilla HTMLTextAreaElement.selectionDirection documentation>
setSelectionDirection ::
(MonadIO m, ToJSString val) => HTMLTextAreaElement -> val -> m ()
setSelectionDirection self val
= liftIO
(js_setSelectionDirection (unHTMLTextAreaElement self)
(toJSString val))
foreign import javascript unsafe "$1[\"selectionDirection\"]"
js_getSelectionDirection ::
JSRef HTMLTextAreaElement -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.selectionDirection Mozilla HTMLTextAreaElement.selectionDirection documentation>
getSelectionDirection ::
(MonadIO m, FromJSString result) => HTMLTextAreaElement -> m result
getSelectionDirection self
= liftIO
(fromJSString <$>
(js_getSelectionDirection (unHTMLTextAreaElement self)))
foreign import javascript unsafe "$1[\"autocorrect\"] = $2;"
js_setAutocorrect :: JSRef HTMLTextAreaElement -> Bool -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.autocorrect Mozilla HTMLTextAreaElement.autocorrect documentation>
setAutocorrect ::
(MonadIO m) => HTMLTextAreaElement -> Bool -> m ()
setAutocorrect self val
= liftIO (js_setAutocorrect (unHTMLTextAreaElement self) val)
foreign import javascript unsafe "($1[\"autocorrect\"] ? 1 : 0)"
js_getAutocorrect :: JSRef HTMLTextAreaElement -> IO Bool
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.autocorrect Mozilla HTMLTextAreaElement.autocorrect documentation>
getAutocorrect :: (MonadIO m) => HTMLTextAreaElement -> m Bool
getAutocorrect self
= liftIO (js_getAutocorrect (unHTMLTextAreaElement self))
foreign import javascript unsafe "$1[\"autocapitalize\"] = $2;"
js_setAutocapitalize ::
JSRef HTMLTextAreaElement -> JSRef (Maybe JSString) -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.autocapitalize Mozilla HTMLTextAreaElement.autocapitalize documentation>
setAutocapitalize ::
(MonadIO m, ToJSString val) =>
HTMLTextAreaElement -> Maybe val -> m ()
setAutocapitalize self val
= liftIO
(js_setAutocapitalize (unHTMLTextAreaElement self)
(toMaybeJSString val))
foreign import javascript unsafe "$1[\"autocapitalize\"]"
js_getAutocapitalize ::
JSRef HTMLTextAreaElement -> IO (JSRef (Maybe JSString))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTextAreaElement.autocapitalize Mozilla HTMLTextAreaElement.autocapitalize documentation>
getAutocapitalize ::
(MonadIO m, FromJSString result) =>
HTMLTextAreaElement -> m (Maybe result)
getAutocapitalize self
= liftIO
(fromMaybeJSString <$>
(js_getAutocapitalize (unHTMLTextAreaElement self)))
|
plow-technologies/ghcjs-dom
|
src/GHCJS/DOM/JSFFI/Generated/HTMLTextAreaElement.hs
|
mit
| 25,205
| 348
| 11
| 4,015
| 4,811
| 2,533
| 2,278
| 372
| 1
|
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
import qualified Web.Scotty as S
import qualified Text.Blaze.Html.Renderer.Text as B
import Text.Blaze.Html5 (html, (!), input, p, form)
import qualified Text.Blaze.Html5.Attributes as A
import Database.Persist
import Database.Persist.TH
import Database.Persist.Sqlite
import Data.Text (Text)
import Data.Time (getCurrentTime, UTCTime)
import System.IO.Unsafe (unsafePerformIO)
share [mkPersist sqlSettings, mkMigrate "migrateAll"] [persistLowerCase|
Link
email Text
list Text
createdAt UTCTime
deriving Show
|]
main = S.scotty 3000 $ do
S.get "/" $ do
blaze renderRoot
S.post "/signup" $ do
email <- S.param "Email"
list <- S.param "List"
runSqlite "smallEmail.sqlite3" $ do
runMigration migrateAll
insert $ Link email list (unsafePerformIO getCurrentTime)
S.html "Thank you for the submission!"
blaze = S.html . B.renderHtml
renderRoot = html $ do
p "Submit your email address and a mailman list to never have to worry \
\about large attachments again!"
form ! A.action "/signup" ! A.method "post" $ do
input ! A.type_ "email" ! A.name "Email" ! A.placeholder "Email"
input ! A.type_ "email" ! A.name "List" ! A.placeholder "List"
|
MattWis/smallEmail
|
smallEmail/Main.hs
|
mit
| 1,651
| 0
| 17
| 410
| 364
| 192
| 172
| 36
| 1
|
module Grammar.IO.RandomSample where
import Data.Array
import Data.Random.RVar
import Data.Random.Distribution.Uniform
import Data.Random.Source.DevRandom
import Data.List
import Data.Maybe
import qualified Data.Sequence as Seq
import Data.Sequence ((><), ViewL((:<)))
randomSample :: Int -> [a] -> IO [a]
randomSample n xs = if len == 0 || n == 0 then return [] else do
ixs <- runRVar (sample ct [0..len - 1]) DevRandom
let sorted = sort ixs
return $ fmap (\i -> arr ! i) sorted
where
arr = listArray (0, len - 1) xs
ct = max 0 $ min len n
len = length xs
-- https://github.com/aristidb/random-extras/blob/master/Data/Random/Extras.hs
sample :: Int -> [a] -> RVar [a]
sample m = sampleSeq m . Seq.fromList
(.:) :: (c -> c') -> (a -> b -> c) -> (a -> b -> c')
(.:) = (.).(.)
extractSeq :: Seq.Seq a -> Int -> Maybe (Seq.Seq a, a)
extractSeq s i | Seq.null r = Nothing
| otherwise = Just (a >< c, b)
where (a, r) = Seq.splitAt i s
(b :< c) = Seq.viewl r
backsaw :: Int -> [Int]
backsaw n = [n - 1, n - 2 .. 0]
shuffleSeq' :: Seq.Seq a -> [Int] -> [a]
shuffleSeq' = snd .: mapAccumL (fromJust .: extractSeq)
sampleSeq :: Int -> Seq.Seq a -> RVar [a]
sampleSeq m s = do
samples <- mapM (uniform 0) . take m . backsaw $ Seq.length s
return (shuffleSeq' s samples)
|
ancientlanguage/haskell-analysis
|
grammar/src/Grammar/IO/RandomSample.hs
|
mit
| 1,317
| 1
| 13
| 290
| 617
| 330
| 287
| 34
| 2
|
-----------------------------------------------------------------------------
--
-- Module : Language.PureScript.Optimizer.Unused
-- Copyright : (c) Phil Freeman 2013-14
-- License : MIT
--
-- Maintainer : Phil Freeman <paf31@cantab.net>
-- Stability : experimental
-- Portability :
--
-- |
-- Removes unused variables
--
-----------------------------------------------------------------------------
module Language.PureScript.Optimizer.Unused (
removeUnusedVariables,
removeCodeAfterReturnStatements
) where
import Language.PureScript.CodeGen.JS.AST
import Language.PureScript.Optimizer.Common
removeUnusedVariables :: JS -> JS
removeUnusedVariables = everywhereOnJS (removeFromBlock withBlock)
where
withBlock :: [JS] -> [JS]
withBlock sts = go sts sts
go :: [JS] -> [JS] -> [JS]
go _ [] = []
go sts (JSVariableIntroduction var _ : rest) | not (any (isUsed var) sts) = go sts rest
go sts (s : rest) = s : go sts rest
removeCodeAfterReturnStatements :: JS -> JS
removeCodeAfterReturnStatements = everywhereOnJS (removeFromBlock go)
where
go :: [JS] -> [JS]
go jss | not (any isJSReturn jss) = jss
| otherwise = let (body, ret : _) = span (not . isJSReturn) jss in body ++ [ret]
isJSReturn (JSReturn _) = True
isJSReturn _ = False
|
bergmark/purescript
|
src/Language/PureScript/Optimizer/Unused.hs
|
mit
| 1,291
| 0
| 14
| 225
| 347
| 189
| 158
| 20
| 3
|
-- https://www.reddit.com/r/dailyprogrammer/comments/pwox3/2192012_challenge_11_intermediate/
module Main where
rotate :: [Int] -> [Maybe Int]
rotate = reverse . fmap (flip lookup helper) where
helper :: [(Int,Int)]
helper = [ (0, 0), (1, 1), (2, 2), (5, 5)
, (6, 9), (8, 8), (9, 6)]
isUpsideUp :: [Int] -> Bool
isUpsideUp ns = fmap Just ns == rotate ns
upsideUps :: Int -> Int -> [Int]
upsideUps m n =
fmap (read . concat . fmap show)
$ filter isUpsideUp
$ fmap (fmap (read . (:[])) . show)
$ [m..n]
main :: IO ()
main = do
let numbers = upsideUps 0 10000
print numbers
print $ length numbers
|
gallais/dailyprogrammer
|
intermediate/011/Main.hs
|
mit
| 642
| 0
| 14
| 153
| 293
| 161
| 132
| 19
| 1
|
module Javascript where
import Text.Blaze.Html5 hiding (head,map)
import Text.Blaze.Html5.Attributes
import Text.Blaze.Html5 as H hiding (head,map)
import Text.Blaze.Html5.Attributes as A
import Text.Blaze.Html.Renderer.Pretty
import Data.Monoid
import Models
import Utils
import Components
modal headerHtml bodyHtml footerHtml =
let
header = H.div (closeButton >> headerHtml) ! class_ (toValue "modal-header")
body = H.div bodyHtml ! class_ (toValue "modal-body")
footer = H.div footerHtml ! class_ (toValue "modal-footer")
content = H.div (header >> body >> footer) ! class_ (toValue "modal-content")
dialog = H.div content ! class_ (toValue "modal-dialog")
in
dialog
carousel :: String -> [String] -> Html
carousel carouselId imgLinks =
let
carouselLink = (toValue ("#" ++ carouselId))
leftControl = a (glyphicon "chevron-left") ! class_ (toValue "left carousel-control") ! href carouselLink ! dataSlide (toValue "prev")
rightControl = a (glyphicon "chevron-right") ! class_ (toValue "right carousel-control") ! href carouselLink ! dataSlide (toValue "next")
controls = leftControl >> rightControl
lis = mconcat [li noHtml ! dataTarget carouselLink ! dataSlideTo (toValue (show i)) ! if i == 0 then class_ (toValue "active") else class_ (toValue "")| i <- [0 .. (length imgLinks)-1]]
indicators = ol lis ! class_ (toValue "carousel-indicators")
items = mconcat [H.div (img ! src (toValue imLink)) ! if imLink == (head imgLinks) then class_ (toValue "item active") else class_ (toValue "item")| imLink <- imgLinks]
innerCarousel = H.div items ! class_ (toValue "carousel-inner")
in
H.div (indicators >> innerCarousel >> controls) ! A.id (toValue carouselId) ! class_ (toValue "carousel slide") ! dataRide (toValue "carousel")
|
lnunno/blaze-bootstrap3
|
Bootstrap3/Javascript.hs
|
mit
| 2,042
| 0
| 17
| 549
| 657
| 337
| 320
| 30
| 3
|
{-# LANGUAGE OverloadedStrings, NoImplicitPrelude #-}
module Network.Images.Search
( Gapi(..)
, config
, linksOfQuery
, luckyLinkOfQuery
) where
import BasePrelude hiding ((&))
import Control.Lens
import Data.Aeson.Lens
import Data.Text (Text)
import qualified Network.Wreq as Wreq
import System.Random
data Gapi = Gapi { apiKey :: Text, cx :: Text }
type PartialQuery = Wreq.Options -> Wreq.Options
imgApiRoot :: String
imgApiRoot = "https://www.googleapis.com/customsearch/v1"
-- https://developers.google.com/custom-search/json-api/v1/reference/cse/list
imgApiQuery :: Gapi -> PartialQuery
imgApiQuery s = (Wreq.param "key" .~ [apiKey s])
. (Wreq.param "cx" .~ [cx s])
. (Wreq.param "searchType" .~ ["image"])
. (Wreq.param "safe" .~ ["high"])
. (Wreq.param "imgSize" .~ ["large"])
config :: Text -> Text -> Gapi
config = Gapi
linksOfQuery :: Gapi -> Text -> IO [Text]
linksOfQuery gapi query = do
let opts = Wreq.defaults
& imgApiQuery gapi
& (Wreq.param "q" .~ [query])
r <- Wreq.getWith opts imgApiRoot
return (r ^.. links)
where
links = Wreq.responseBody . key "items" . values . key "link" . _String
luckyLinkOfQuery :: Gapi -> Text -> IO (Maybe Text)
luckyLinkOfQuery gapi query = linksOfQuery gapi query >>= sample
sample :: [a] -> IO (Maybe a)
sample [] = return Nothing
sample xs = do
rand <- randomIO :: IO Int
return . Just $ xs !! (rand `mod` length xs)
|
dpatti/jpg-to
|
src/Network/Images/Search.hs
|
mit
| 1,523
| 0
| 14
| 357
| 496
| 266
| 230
| 39
| 1
|
import Data.Char (intToDigit)
import Test.Hspec (Spec, it)
import Test.Hspec.Runner (configFastFail, defaultConfig, hspecWith)
import Test.QuickCheck (Positive(Positive), (==>), property)
import qualified Numeric as Num (showIntAtBase)
import Trinary (readTri, showTri)
main :: IO ()
main = hspecWith defaultConfig {configFastFail = True} specs
specs :: Spec
specs = do
let refShowTri n = Num.showIntAtBase 3 intToDigit n ""
it "can show Int trinary" $
property $ \(Positive n) -> refShowTri n == showTri (n :: Int)
it "can show Integer trinary" $
property $ \(Positive n) -> refShowTri n == showTri (n :: Integer)
it "can read Int trinary" $
property $ \(Positive n) -> n == readTri (refShowTri (n :: Int))
it "can read Integer trinary" $
property $ \(Positive n) -> n == readTri (refShowTri (n :: Integer))
it "can read invalid trinary" $
\n -> any (`notElem` ['0'..'2']) (show n) ==> (readTri . show $ n) == (0 :: Int)
|
exercism/xhaskell
|
exercises/practice/trinary/test/Tests.hs
|
mit
| 1,003
| 0
| 13
| 230
| 378
| 204
| 174
| 21
| 1
|
{-
- Extract a slice from a list.
- Given two indices, i and k, the slice is the list containing the elements
- between the i'th and the k'th element of the original list (both limits
- included). Start counting the elements with 1.
-
- Example:
- > slice ['a','b','c','d','e','f','g','h','i','k'] 3 7
- "cdefg"
-}
slice :: [a] -> Int -> Int -> [a]
slice xs start end
| (start < 0) || (end < 0) = error "Can't have negative start or end."
| otherwise =
-- Because we want to include the starting and ending elements, we
-- split before the start.
let (_,rst) = splitAt (start-1) xs
(slice',_) = splitAt (end - (start-1)) rst
in slice'
slice' :: [a] -> Int -> Int -> [a]
slice' xs start end =
let slice'' (piece,_) [] = piece
slice'' (piece,counter) (y:ys)
| counter < start = slice'' (piece,counter+1) ys
| counter > end = piece
| otherwise = slice'' (piece ++ [y],counter+1) ys
in slice'' ([],1) xs
|
LucianU/99problems
|
P18.hs
|
mit
| 1,010
| 0
| 14
| 280
| 300
| 156
| 144
| 15
| 2
|
locationsWithin :: Int -> (Int, Int) -> Int -> [(Int, Int)]
locationsWithin num loc steps = snd (until done update ([[loc]], []))
where done = (null . fst)
update ((p:ps), seen) = (insertAll ps (map (\loc -> loc : p) (nextLocs num p)) seen, (head p) : seen)
insertAll ps [] _ = ps
insertAll ps (x:xs) seen = insertAll (insert ps x seen) xs seen
insert [] x seen
| (length x) - 1 > steps = []
| elem (head x) seen = []
| otherwise = [x]
insert (p:ps) x seen
| elem (head x) seen = p : ps
| (head x) == (head p) = p : ps
| (length x) - 1 > steps = p : ps
| length p <= length x = p : (insert ps x seen)
| otherwise = x : p : ps
shortestPath :: Int -> (Int, Int) -> (Int, Int) -> [(Int, Int)]
shortestPath num loc target = head (until done update [[loc]])
where done = (==target) . head . head
update (p:ps) = insertAll ps (map (\loc -> loc : p) (nextLocs num p))
insertAll ps [] = ps
insertAll ps (x:xs) = insertAll (insert ps x) xs
insert [] x = [x]
insert (p:ps) x
| (head x) == (head p) = p : ps
| length p <= length x = p : (insert ps x)
| otherwise = x : p : ps
nextLocs :: Int -> [(Int, Int)] -> [(Int, Int)]
nextLocs num path = ((filter (openSpace num)). (filter legalMove) . allNextLocs . head) path
allNextLocs :: (Int, Int) -> [(Int, Int)]
allNextLocs (x, y) = [(x + 1, y), (x - 1, y), (x, y + 1), (x, y - 1)]
legalMove :: (Int, Int) -> Bool
legalMove (x, y) = all (>=0) [x, y]
openSpace :: Int -> (Int, Int) -> Bool
openSpace num (x, y) = (even . length . (filter (=='1')) . toBinary) (t1 + num)
where t1 = (x * x) + (3 * x) + (2 * x * y) + y + (y * y)
toBinary :: Int -> String
toBinary 0 = "0"
toBinary 1 = "1"
toBinary n = (toBinary n') ++ (show r')
where (n', r') = divMod n 2
main = do
let (favoriteNumber, target) = (1358, (31, 39))
print $
(\path -> (length path) - 1) $
shortestPath favoriteNumber (1, 1) target
print $
length $
locationsWithin favoriteNumber (1, 1) 50
|
ajm188/advent_of_code
|
2016/13/Main.hs
|
mit
| 2,188
| 0
| 13
| 713
| 1,211
| 641
| 570
| 49
| 3
|
-- | Tests some properties against Language.TaPL.Boolean.
module Language.TaPL.Boolean.Tests where
import Test.QuickCheck (quickCheck)
import Text.Printf (printf)
import Language.TaPL.ShowPretty (showp)
import Language.TaPL.Boolean (Term, parseString, eval, eval')
-- | A test runner.
main = mapM_ (\(s,a) -> printf "%-25s: " s >> a) tests
-- | Both eval functions yield the same result.
prop_evaluates_the_same :: Term -> Bool
prop_evaluates_the_same t = eval t == eval' t
-- | parse . showp is an identity function.
prop_showp_parse_id :: Term -> Bool
prop_showp_parse_id t = t == parseString (showp t)
-- | eval . parse . showp evaluates the same as eval.
prop_showp_parse_evaluates_the_same :: Term -> Bool
prop_showp_parse_evaluates_the_same t = eval (parseString (showp t)) == eval t
-- | eval' . parse . showp evaluates the same as eval'.
prop_showp_parse_evaluates_the_same' :: Term -> Bool
prop_showp_parse_evaluates_the_same' t = eval' (parseString (showp t)) == eval' t
-- | List of tests and their names.
tests = [("evaluates_the_same", quickCheck prop_evaluates_the_same)
,("showp_parse_id", quickCheck prop_showp_parse_id)
,("showp_parse_evaluates_the_same", quickCheck prop_showp_parse_evaluates_the_same)
,("showp_parse_evaluates_the_same'", quickCheck prop_showp_parse_evaluates_the_same')
]
|
zeckalpha/TaPL
|
src/Language/TaPL/Boolean/Tests.hs
|
mit
| 1,361
| 0
| 10
| 215
| 287
| 160
| 127
| 18
| 1
|
#!/usr/bin/runhugs
module Main where
import Data.List
main = putStrLn(unlines p++" ["++s p++"]") where
s = concat . intersperse ",\n " . map show
p =
["#!/usr/bin/runhugs",
"module Main where",
"import Data.List",
"",
"main = putStrLn(unlines p++\" [\"++s p++\"]\") where",
" s = concat . intersperse \",\\n \" . map show",
" p ="]
|
google-code/bkil-open
|
volatile/edu/quine1.hs
|
gpl-2.0
| 353
| 0
| 10
| 74
| 86
| 48
| 38
| 12
| 1
|
module Cryptography.KeyEncapsulation where
import Notes
import Functions.Basics.Macro
import Probability.ProbabilityMeasure.Macro
import Probability.ProbabilityMeasure.Terms
import Probability.RandomVariable.Terms
import Cryptography.PublicKeyEncryption.Macro
import Cryptography.PublicKeyEncryption.Terms
import Cryptography.SymmetricCryptography.Macro
import Cryptography.SymmetricCryptography.Terms
import Cryptography.KeyEncapsulation.Macro
import Cryptography.KeyEncapsulation.Terms
keyEncapsulationS :: Note
keyEncapsulationS = section "Key encapsulation" $ do
keyEncapsulationMechanismDefinition
keyEncapsulationINDCCAGameDefinition
keyEncapsulationMechanismDefinition :: Note
keyEncapsulationMechanismDefinition = do
de $ do
s ["Let", m pksp_, "be a", publicKeySpace <> ",", m sksp_, "a", secretKeySpace <> ",", m ksp_, "a symmetric", keySpace, and, m csp_, "a", ciphertextSpace]
s ["A", keyEncapsulationMechanism', or, kEM', m kem_, "consists of..."]
itemize $ do
item $ s [m kpdist_ <> ": a" , probabilityDistribution, "on", m $ pksp_ ⨯ sksp_, "called the", keyPairDistribution']
item $ s [m encapf_ <> ": a randomized", encapsulationFunction', m $ fun encapf_ pksp_ $ csp_ ⨯ ksp_]
item $ s [m decapf_ <> ": a", decapsulationFunction', m $ fun decapf_ (csp_ ⨯ ksp_) $ ksp_ ⨯ setof undef]
let p_ = "p"
s_ = "s"
c_ = "c"
k_ = "k"
s ["... such that for every", publicKey, "/", secretKey, "pair, sampled from", m kpdist_ <> ",", m $ tuple p_ s_, and, m $ tuple c_ k_ =: encap p_, "it holds that", m $ decap c_ s_ =: k_]
nte $ do
s ["Note that this equality does not have to hald for every", publicKey, "/", secretKey, "pair"]
s ["Indeed, most", kEMs, "will have a", keyPairDistribution, "in which some", keyPairs, "have", probability, "mass", m 0]
keyEncapsulationINDCCAGameDefinition :: Note
keyEncapsulationINDCCAGameDefinition = de $ do
s ["Let", m pksp_, "be a", publicKeySpace <> ",", m sksp_, "a", secretKeySpace <> ",", m ksp_, "a symmetric", keySpace, and, m csp_, "a", ciphertextSpace]
s [the, iNDCCA, "game for a", keyEncapsulationMechanism, m kem_, "proceeds as follows"]
let p_ = "p"
s_ = "s"
c_ = "c"
k_ = "k"
b = "b"
b' = b <> "'"
r = "r"
enumerate $ do
item $ do
s [the, challenger, "samples a", keyPair, m $ (tuple p_ s_) ∈ (pksp_ ⨯ sksp_), "of a", publicKey, anda, secretKey, "according to the", keyPairDistribution, m kpdist_, "and computes a", ciphertext, "/", symmetricKey, "pair", m $ tuple c_ k_ =: encap p_]
s ["He then samples a uniform bit", m b, "and sends", m $ triple p_ c_ k_, "to the adversary if", m b, "is not set and otherwise", m $ triple p_ c_ r, "for an independently and uniformly sampled", m $ r ∈ ksp_, "if", m b, "is set"]
let c_' = c_ <> "'"
k_' = k_ <> "'"
item $ s [the, adversary, "can choose", ciphertexts, m $ c_' ∈ csp_, "different from", m c_, "and receive their decapsulations", m $ k_' =: decap c_' s_, "from the", challenger]
item $ s [the, adversary, "guesses", m b, "by outputting a bit", m b']
s [the, adversary, "wins the game if he guesses", m b, "correctly and his", advantage, "is defined as follows"]
ma $ 2 * (pars $ prob (b' =: b) - 1 /: 2)
|
NorfairKing/the-notes
|
src/Cryptography/KeyEncapsulation.hs
|
gpl-2.0
| 3,527
| 0
| 21
| 889
| 1,066
| 582
| 484
| 54
| 1
|
{- |
Module : ./Driver/WriteLibDefn.hs
Description : Writing out a DOL library
Copyright : (c) Klaus Luettich, C.Maeder, Uni Bremen 2002-2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : non-portable(DevGraph)
Writing out DOL env files as much as is needed for
the static analysis
-}
module Driver.WriteLibDefn
( getFilePrefix
, getFilePrefixGeneric
, writeLibDefn
, writeLibDefnLatex
, toShATermString
, writeShATermFile
, writeFileInfo
) where
import Common.Utils
import Common.Doc
import Common.LibName
import Common.PrintLaTeX
import Common.GlobalAnnotations (GlobalAnnos)
import Common.ConvertGlobalAnnos ()
import Common.IO
import Control.Exception as Exception
import ATerm.AbstractSyntax
import qualified ATerm.ReadWrite as AT
import ATC.AS_Library ()
import ATC.DevGraph ()
import ATC.Grothendieck
import Logic.Grothendieck
import Syntax.AS_Library (LIB_DEFN ())
import Syntax.Print_AS_Library ()
import Syntax.Print_AS_Structured
import Syntax.ToXml
import Text.XML.Light (ppTopElement)
import Driver.Options
import Driver.Version
import System.FilePath
-- | Compute the prefix for files to be written out
getFilePrefix :: HetcatsOpts -> FilePath -> (FilePath, FilePath)
getFilePrefix opts = getFilePrefixGeneric (envSuffix : downloadExtensions)
$ outdir opts
-- | Version of getFilePrefix with explicit parameters
getFilePrefixGeneric :: [String] -- ^ list of suffixes
-> FilePath -- ^ the outdir
-> FilePath -> (FilePath, FilePath)
getFilePrefixGeneric suffs odir' file =
let (base, path, _) = fileparse suffs file
odir = if null odir' then path else odir'
in (odir, odir </> base)
{- |
Write the given LIB_DEFN in every format that HetcatsOpts includes.
Filenames are determined by the output formats.
-}
writeLibDefn :: LogicGraph -> GlobalAnnos -> FilePath -> HetcatsOpts
-> LIB_DEFN -> IO ()
writeLibDefn lg ga fullFileName opts ld = do
let file = tryToStripPrefix "file://" fullFileName
(odir, filePrefix) = getFilePrefix opts file
printXml fn = writeFile fn $ ppTopElement (xmlLibDefn lg ga ld)
printAscii b fn = writeEncFile (ioEncoding opts) fn
$ renderExtText (StripComment b) ga (prettyLG lg ld) ++ "\n"
printHtml fn = writeEncFile (ioEncoding opts) fn
$ renderHtml ga $ prettyLG lg ld
write_type :: OutType -> IO ()
write_type ty = case ty of
PrettyOut pty -> do
let fn = filePrefix ++ "." ++ show ty
putIfVerbose opts 2 $ "Writing file: " ++ fn
case pty of
PrettyXml -> printXml fn
PrettyAscii b -> printAscii b fn
PrettyHtml -> printHtml fn
PrettyLatex b -> writeLibDefnLatex lg b ga fn ld
_ -> return () -- implemented elsewhere
putIfVerbose opts 3 ("Current OutDir: " ++ odir)
mapM_ write_type $ outtypes opts
writeLibDefnLatex :: LogicGraph -> Bool -> GlobalAnnos -> FilePath -> LIB_DEFN
-> IO ()
writeLibDefnLatex lg lbl ga oup = writeFile oup . renderLatex Nothing
. toLatexAux (StripComment False) (MkLabel lbl) ga . prettyLG lg
toShATermString :: ShATermLG a => a -> IO String
toShATermString = fmap AT.writeSharedATerm . versionedATermTable
writeShATermFile :: ShATermLG a => FilePath -> a -> IO ()
writeShATermFile fp atcon = toShATermString atcon >>= writeFile fp
versionedATermTable :: ShATermLG a => a -> IO ATermTable
versionedATermTable atcon = do
(att1, versionno) <- toShATermLG emptyATermTable hetsVersionNumeric
(att2, aterm) <- toShATermLG att1 atcon
return $ fst $ addATerm (ShAAppl "hets" [versionno, aterm] []) att2
writeShATermFileSDoc :: ShATermLG a => FilePath -> a -> IO ()
writeShATermFileSDoc fp atcon =
versionedATermTable atcon >>= AT.writeSharedATermFile fp
writeFileInfo :: ShATermLG a => HetcatsOpts -> LibName
-> FilePath -> LIB_DEFN -> a -> IO ()
writeFileInfo opts ln fullFileName ld gctx =
let file = tryToStripPrefix "file://" fullFileName
envFile = snd (getFilePrefix opts file) ++ envSuffix in
case analysis opts of
Basic -> do
putIfVerbose opts 2 ("Writing file: " ++ envFile)
Exception.catch (writeShATermFileSDoc envFile (ln, (ld, gctx)))
$ \ err -> do
putIfVerbose opts 2 (envFile ++ " not written")
putIfVerbose opts 3 ("see following error description:\n"
++ shows (err :: SomeException) "\n")
_ -> putIfVerbose opts 2 ("Not writing " ++ envFile)
|
spechub/Hets
|
Driver/WriteLibDefn.hs
|
gpl-2.0
| 4,678
| 0
| 20
| 1,095
| 1,204
| 613
| 591
| 93
| 5
|
{- |
Module : $Header$
Description : datastructures for annotations of (Het)CASL.
Copyright : (c) Klaus Luettich, Christian Maeder, and Uni Bremen 2002-2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : portable
Datastructures for annotations of (Het)CASL.
There is also a paramterized data type for an 'Annoted' 'item'.
See also chapter II.5 of the CASL Reference Manual.
-}
module Common.AS_Annotation where
import Common.Id
import Common.IRI (IRI)
import Data.Maybe
import qualified Data.Map as Map
-- DrIFT command
{-! global: GetRange !-}
-- | start of an annote with its WORD or a comment
data Annote_word = Annote_word String | Comment_start deriving (Show, Eq, Ord)
-- | line or group for 'Unparsed_anno'
data Annote_text = Line_anno String | Group_anno [String]
deriving (Show, Eq, Ord)
{- | formats to be displayed (may be extended in the future).
Drop 3 from the show result to get the string for parsing and printing -}
data Display_format = DF_HTML | DF_LATEX | DF_RTF deriving (Show, Eq, Ord)
-- | swap the entries of a lookup table
swapTable :: [(a, b)] -> [(b, a)]
swapTable = map $ \ (a, b) -> (b, a)
-- | drop the first 3 characters from the show result
toTable :: (Show a) => [a] -> [(a, String)]
toTable = map $ \ a -> (a, drop 3 $ show a)
-- | a lookup table for the textual representation of display formats
display_format_table :: [(Display_format, String)]
display_format_table = toTable [ DF_HTML, DF_LATEX, DF_RTF ]
{- | lookup the textual representation of a display format
in 'display_format_table' -}
lookupDisplayFormat :: Display_format -> String
lookupDisplayFormat df =
fromMaybe (error "lookupDisplayFormat: unknown display format")
$ lookup df display_format_table
{- | precedence 'Lower' means less and 'BothDirections' means less and greater.
'Higher' means greater but this is syntactically not allowed in 'Prec_anno'.
'NoDirection' can also not be specified explicitly,
but covers those ids that are not mentionend in precedences. -}
data PrecRel = Higher | Lower | BothDirections | NoDirection
deriving (Show, Eq, Ord)
-- | either left or right associative
data AssocEither = ALeft | ARight deriving (Show, Eq, Ord)
{- | semantic (line) annotations without further information.
Use the same drop-3-trick as for the 'Display_format'. -}
data Semantic_anno = SA_cons | SA_def | SA_implies | SA_mono | SA_implied
| SA_mcons | SA_ccons
deriving (Show, Eq, Ord)
-- | a lookup table for the textual representation of semantic annos
semantic_anno_table :: [(Semantic_anno, String)]
semantic_anno_table =
toTable [SA_cons, SA_def, SA_implies, SA_mono, SA_implied, SA_mcons, SA_ccons]
{- | lookup the textual representation of a semantic anno
in 'semantic_anno_table' -}
lookupSemanticAnno :: Semantic_anno -> String
lookupSemanticAnno sa =
fromMaybe (error "lookupSemanticAnno: no semantic anno")
$ lookup sa semantic_anno_table
-- | all possible annotations (without comment-outs)
data Annotation = -- | constructor for comments or unparsed annotes
Unparsed_anno Annote_word Annote_text Range
-- | known annotes
| Display_anno Id [(Display_format, String)] Range
-- position of anno start, keywords and anno end
| List_anno Id Id Id Range
-- position of anno start, commas and anno end
| Number_anno Id Range
-- position of anno start, commas and anno end
| Float_anno Id Id Range
-- position of anno start, commas and anno end
| String_anno Id Id Range
-- position of anno start, commas and anno end
| Prec_anno PrecRel [Id] [Id] Range
{- positions: "{",commas,"}", RecRel, "{",commas,"}"
Lower = "< " BothDirections = "<>" -}
| Assoc_anno AssocEither [Id] Range -- position of commas
| Label [String] Range
-- position of anno start and anno end
| Prefix_anno [(String, IRI)] Range
-- position of anno start and anno end
-- All annotations below are only as annote line allowed
| Semantic_anno Semantic_anno Range
{- position information for annotations is provided
by every annotation -}
deriving (Show, Eq, Ord)
{- | 'isLabel' tests if the given 'Annotation' is a label
(a 'Label' typically follows a formula) -}
isLabel :: Annotation -> Bool
isLabel a = case a of
Label _ _ -> True
_ -> False
isImplies :: Annotation -> Bool
isImplies a = case a of
Semantic_anno SA_implies _ -> True
_ -> False
isImplied :: Annotation -> Bool
isImplied a = case a of
Semantic_anno SA_implied _ -> True
_ -> False
-- | 'isSemanticAnno' tests if the given 'Annotation' is a semantic one
isSemanticAnno :: Annotation -> Bool
isSemanticAnno a = case a of
Semantic_anno _ _ -> True
_ -> False
{- | 'isComment' tests if the given 'Annotation' is a comment line or a
comment group -}
isComment :: Annotation -> Bool
isComment c = case c of
Unparsed_anno Comment_start _ _ -> True
_ -> False
-- | 'isAnnote' is the negation of 'isComment'
isAnnote :: Annotation -> Bool
isAnnote = not . isComment
-- | separate prefix annotations and put them into a map
partPrefixes :: [Annotation] -> (Map.Map String IRI, [Annotation])
partPrefixes = foldr (\ a (m, l) -> case a of
Prefix_anno p _ -> (Map.union m $ Map.fromList p, l)
_ -> (m, a : l)) (Map.empty, [])
{- | an item wrapped in preceding (left 'l_annos')
and following (right 'r_annos') annotations.
'opt_pos' should carry the position of an optional semicolon
following a formula (but is currently unused). -}
data Annoted a = Annoted
{ item :: a
, opt_pos :: Range
, l_annos :: [Annotation]
, r_annos :: [Annotation] } deriving (Show, Ord, Eq)
annoRange :: (a -> [Pos]) -> Annoted a -> [Pos]
annoRange f a =
joinRanges $ map (rangeToList . getRange) (l_annos a) ++ [f $ item a]
++ [rangeToList (opt_pos a)] ++ map (rangeToList . getRange) (r_annos a)
notImplied :: Annoted a -> Bool
notImplied = not . any isImplied . r_annos
-- | naming or labelling sentences
data SenAttr s a = SenAttr
{ senAttr :: a
, isAxiom :: Bool
, isDef :: Bool
, wasTheorem :: Bool
{- will be set to True when status of isAxiom
changes from False to True -}
, simpAnno :: Maybe Bool -- for %simp or %nosimp annotations
, attrOrigin :: Maybe Id
, senMark :: String -- a marker for theoroidal comorphisms
, sentence :: s } deriving (Eq, Ord, Show)
-- | equip a sentence with a name
makeNamed :: a -> s -> SenAttr s a
makeNamed a s = SenAttr
{ senAttr = a
, isAxiom = True
, isDef = False
, wasTheorem = False
, simpAnno = Nothing
, attrOrigin = Nothing
, senMark = ""
, sentence = s }
type Named s = SenAttr s String
markSen :: String -> Named s -> Named s
markSen m n = n { senMark = m }
unmark :: Named s -> Named s
unmark = markSen ""
reName :: (a -> b) -> SenAttr s a -> SenAttr s b
reName f x = x { senAttr = f $ senAttr x }
-- | extending sentence maps to maps on labelled sentences
mapNamed :: (s -> t) -> SenAttr s a -> SenAttr t a
mapNamed f x = x { sentence = f $ sentence x }
-- | extending sentence maybe-maps to maps on labelled sentences
mapNamedM :: Monad m => (s -> m t) -> Named s -> m (Named t)
mapNamedM f x = do
y <- f $ sentence x
return x {sentence = y}
-- | process all items and wrap matching annotations around the results
mapAnM :: (Monad m) => (a -> m b) -> [Annoted a] -> m [Annoted b]
mapAnM f al =
do il <- mapM (f . item) al
return $ zipWith (flip replaceAnnoted) al il
instance Functor Annoted where
fmap f (Annoted x o l r) = Annoted (f x) o l r
-- | replace the 'item'
replaceAnnoted :: b -> Annoted a -> Annoted b
replaceAnnoted x (Annoted _ o l r) = Annoted x o l r
{- one could use this fmap variant instead (less efficient)
replaceAnnoted x = fmap (const x)
or even:
replaceAnnoted = (<$) -}
-- | add further following annotations
appendAnno :: Annoted a -> [Annotation] -> Annoted a
appendAnno (Annoted x p l r) = Annoted x p l . (r ++)
-- | put together preceding annotations and an item
addLeftAnno :: [Annotation] -> a -> Annoted a
addLeftAnno l i = Annoted i nullRange l []
-- | decorate with no annotations
emptyAnno :: a -> Annoted a
emptyAnno = addLeftAnno []
-- | get the label following (or to the right of) an 'item'
getRLabel :: Annoted a -> String
getRLabel a =
case filter isLabel (r_annos a) of
Label l _ : _ -> unwords $ concatMap words l
_ -> ""
{- | check for an annotation starting with % and the input str
(does not work for known annotation words) -}
identAnno :: String -> Annotation -> Bool
identAnno str an = case an of
Unparsed_anno (Annote_word wrd) _ _ -> wrd == str
_ -> False
-- | test all anntotions for an item
hasIdentAnno :: String -> Annoted a -> Bool
hasIdentAnno str a = any (identAnno str) $ l_annos a ++ r_annos a
makeNamedSen :: Annoted a -> Named a
makeNamedSen a = (makeNamed (getRLabel a) $ item a)
{ isAxiom = notImplied a
, simpAnno = case (hasIdentAnno "simp" a, hasIdentAnno "nosimp" a) of
(True, False) -> Just True
(False, True) -> Just False
_ -> Nothing }
annoArg :: Annote_text -> String
annoArg txt = case txt of
Line_anno str -> str
Group_anno ls -> unlines ls
newtype Name = Name String
instance Show Name where
show (Name s) = s
getAnnoName :: Annoted a -> Name
getAnnoName = Name . foldr (\ an -> case an of
Unparsed_anno (Annote_word wrd) txt _ | wrd == "name" ->
(annoArg txt ++)
_ -> id) "" . l_annos
|
nevrenato/HetsAlloy
|
Common/AS_Annotation.der.hs
|
gpl-2.0
| 9,853
| 4
| 17
| 2,382
| 2,298
| 1,257
| 1,041
| 158
| 3
|
-- | Library with auxiliary functions needed in multiple other modules.
module Bio.ViennaRNAParserLibrary (
parseNucleotideSequence,
parseNucleotideAlignmentEntry,
parseProteinSequence,
parseProteinAlignmentEntry,
readInt,
readDouble
) where
import Text.ParserCombinators.Parsec
readDouble :: String -> Double
readDouble = read
readInt :: String -> Int
readInt = read
-- | Parse nucleotide sequence. Allowed letters according to IUPAC
parseNucleotideSequence :: GenParser Char st String
parseNucleotideSequence = do
nucleotideSequence <- many1 (oneOf "RYSWKMBDHVNATUGCryswkmbdhvnatugc")
return $ nucleotideSequence
-- | Parse nucleotide alignment entry. Allowed letters according to IUPAC and commonly used gap characters
parseNucleotideAlignmentEntry :: GenParser Char st String
parseNucleotideAlignmentEntry = do
entry <- many1 (oneOf "~_-.RYSWKMBDHVNATUGCryswkmbdhvnatugc")
return $ entry
-- | Parse protein amino acid code sequence. Allowed letters according to IUPAC
parseProteinSequence :: GenParser Char st String
parseProteinSequence = do
proteinSequence <- many1 (oneOf "ABCDEFGHIKLMNPQRSTVWXYZabcdefghiklmnpqrstvwxyz")
return $ proteinSequence
-- | Parse protein amino acid code alignment entry. Allowed letters according to IUPAC and commonly used gap characters
parseProteinAlignmentEntry :: GenParser Char st String
parseProteinAlignmentEntry = do
entry <- many1 (oneOf "~_-.ABCDEFGHIKLMNPQRSTVWXYZabcdefghiklmnpqrstvwxyz")
return $ entry
|
eggzilla/ViennaRNAParser
|
src/Bio/ViennaRNAParserLibrary.hs
|
gpl-3.0
| 1,647
| 0
| 10
| 361
| 225
| 118
| 107
| 28
| 1
|
-- | Implementation of diff, match, and patch.
--
-- Based on Neil Fraser's work, as found at
-- https://code.google.com/archive/p/google-diff-match-patch/
{-# LANGUAGE ViewPatterns #-}
module Data.DiffMatchPatch
( DiffChange(..)
, calculateDiff
) where
import BasicPrelude hiding (insert, delete)
import qualified Data.Text as Text
import Data.DiffMatchPatch.Internal ( TextPair
, makeTextPair
, commonPrefix
, commonSuffix
, getTextCores
)
-- | A single change in a diff. A full diff is a sequence of these.
data DiffChange a = Delete a | Insert a | Equal a deriving (Eq, Show)
change :: (Text -> DiffChange Text) -> Text -> Maybe (DiffChange Text)
change f x
| Text.null x = Nothing
| otherwise = Just (f x)
insert' :: Text -> Maybe (DiffChange Text)
insert' = change Insert
insert :: Text -> [DiffChange Text]
insert = maybeToList . insert'
delete' :: Text -> Maybe (DiffChange Text)
delete' = change Delete
delete :: Text -> [DiffChange Text]
delete = maybeToList . delete'
equal' :: Text -> Maybe (DiffChange Text)
equal' = change Equal
equal :: Text -> [DiffChange Text]
equal = maybeToList . equal'
-- | Calculate the difference between two texts.
--
-- XXX: The only reason this is 'Text' is because we need a null check. Could
-- maybe use AsEmpty prism from Control.Lens.
calculateDiff :: Text -> Text -> [DiffChange Text]
calculateDiff x y
| x == y = equal x
| otherwise =
let pair = makeTextPair x y
in equal (commonPrefix pair) <> computeDiff pair <> equal (commonSuffix pair)
computeDiff :: TextPair -> [DiffChange Text]
computeDiff (getTextCores -> (x, y))
| Text.null x = insert y
| Text.null y = delete x
| otherwise =
let (swapped, (shorter, longer)) = ordered Text.length (x, y)
in
case extractInfix shorter longer of
-- 'shorter' is inside 'longer'
Just (prefix, suffix) ->
case swapped of
-- x is shorter, and thus inside y
NotSwapped -> [Insert prefix, Equal shorter, Insert suffix]
-- y is shorter, and thus inside x
Swapped -> [Delete prefix, Equal shorter, Delete suffix]
Nothing ->
if Text.length shorter == 1
then
-- TextPair guarantees that x and y are either both empty or never
-- equal
[Delete x, Insert y]
else
error "TODO: half-match; line mode; bisect"
-- | If `needle` is inside `haystack`, return the bit before `needle` and the
-- bit after it. Otherwise, return 'Nothing'.
--
-- If `needle` occurs multiple times inside `haystack`, just break on the
-- first occurrence.
--
-- Laws:
--
-- If `extractInfix x y` returns `Just (a, b)`, then a <> x <> b == y
-- If `extractInfix x y` returns `Nothing`, then there is no `a` and `b`
-- such that a <> x <> b == y
--
-- Examples:
--
-- > extractInfix "foo" "bananafooapple" == Just ("banana", "apple")
-- > extractInfix "foo" "bananaapple" == Nothing
extractInfix :: Text -> Text -> Maybe (Text, Text)
extractInfix needle haystack =
case Text.breakOn needle haystack of
(_, "") -> Nothing
(prefix, suffix) -> Just (prefix, Text.drop (Text.length needle) suffix)
data Swap = NotSwapped | Swapped
ordered :: Ord b => (a -> b) -> (a, a) -> (Swap, (a, a))
ordered f (x, y) = if f x < f y then (NotSwapped, (x, y)) else (Swapped, (y, x))
|
jml/diff-match-patch
|
src/Data/DiffMatchPatch.hs
|
gpl-3.0
| 3,563
| 0
| 15
| 986
| 871
| 472
| 399
| 57
| 4
|
-- (c) The University of Glasgow 2006
{-# LANGUAGE CPP #-}
-- | Highly random utility functions
--
module Language.Haskell.Utility.Util (abstractConstr,
snocView,
thenCmp,
isSingleton,
partitionWith,
dropWhileEndLE,
debugIsOn,
dropTail,
readRational,
fuzzyLookup,
mapFst,
takeList,
looksLikePackageName,
expectJust,
orElse,
concatMapM,
foldrM,
mapAccumLM,
anyM) where
-- import U.Panic
import Control.Monad
import Data.Data
import Data.List hiding (group)
import Control.Applicative ( liftA2 )
import Data.Char ( isAlphaNum, ord, isDigit )
import Data.Ratio ( (%) )
import Data.Ord ( comparing )
import Data.Bits
import Data.Word
import qualified Data.IntMap as IM
import Data.Maybe
#if __GLASGOW_HASKELL__ >= 800
import GHC.Stack
#else
import GHC.Exts (Constraint)
type HasCallStack = (() :: Constraint)
#endif
panic = error "SHAYAN HACK!"
infixr 9 `thenCmp`
{-
************************************************************************
* *
\subsection{Is DEBUG on, are we on Windows, etc?}
* *
************************************************************************
These booleans are global constants, set by CPP flags. They allow us to
recompile a single module (this one) to change whether or not debug output
appears. They sometimes let us avoid even running CPP elsewhere.
It's important that the flags are literal constants (True/False). Then,
with -0, tests of the flags in other modules will simplify to the correct
branch of the conditional, thereby dropping debug code altogether when
the flags are off.
-}
debugIsOn :: Bool
#ifdef DEBUG
debugIsOn = True
#else
debugIsOn = False
#endif
partitionWith :: (a -> Either b c) -> [a] -> ([b], [c])
-- ^ Uses a function to determine which of two output lists an input element should join
partitionWith _ [] = ([],[])
partitionWith f (x:xs) = case f x of
Left b -> (b:bs, cs)
Right c -> (bs, c:cs)
where (bs,cs) = partitionWith f xs
mapFst :: (a->c) -> [(a,b)] -> [(c,b)]
mapFst f xys = [(f x, y) | (x,y) <- xys]
isSingleton :: [a] -> Bool
isSingleton [_] = True
isSingleton _ = False
{-
************************************************************************
* *
\subsection[Utils-accum]{Accumulating}
* *
************************************************************************
A combination of foldl with zip. It works with equal length lists.
-}
{-
@splitAt@, @take@, and @drop@ but with length of another
list giving the break-off point:
-}
takeList :: [b] -> [a] -> [a]
-- (takeList as bs) trims bs to the be same length
-- as as, unless as is longer in which case it's a no-op
takeList [] _ = []
takeList (_:xs) ls =
case ls of
[] -> []
(y:ys) -> y : takeList xs ys
-- drop from the end of a list
dropTail :: Int -> [a] -> [a]
-- Specification: dropTail n = reverse . drop n . reverse
-- Better implemention due to Joachim Breitner
-- http://www.joachim-breitner.de/blog/archives/600-On-taking-the-last-n-elements-of-a-list.html
dropTail n xs
= go (drop n xs) xs
where
go (_:ys) (x:xs) = x : go ys xs
go _ _ = [] -- Stop when ys runs out
-- It'll always run out before xs does
-- dropWhile from the end of a list. This is similar to Data.List.dropWhileEnd,
-- but is lazy in the elements and strict in the spine. For reasonably short lists,
-- such as path names and typical lines of text, dropWhileEndLE is generally
-- faster than dropWhileEnd. Its advantage is magnified when the predicate is
-- expensive--using dropWhileEndLE isSpace to strip the space off a line of text
-- is generally much faster than using dropWhileEnd isSpace for that purpose.
-- Specification: dropWhileEndLE p = reverse . dropWhile p . reverse
-- Pay attention to the short-circuit (&&)! The order of its arguments is the only
-- difference between dropWhileEnd and dropWhileEndLE.
dropWhileEndLE :: (a -> Bool) -> [a] -> [a]
dropWhileEndLE p = foldr (\x r -> if null r && p x then [] else x:r) []
snocView :: [a] -> Maybe ([a],a)
-- Split off the last element
snocView [] = Nothing
snocView xs = go [] xs
where
-- Invariant: second arg is non-empty
go acc [x] = Just (reverse acc, x)
go acc (x:xs) = go (x:acc) xs
go _ [] = panic "Util: snocView"
split :: Char -> String -> [String]
split c s = case rest of
[] -> [chunk]
_:rest -> chunk : split c rest
where (chunk, rest) = break (==c) s
thenCmp :: Ordering -> Ordering -> Ordering
{-# INLINE thenCmp #-}
thenCmp EQ ordering = ordering
thenCmp ordering _ = ordering
-- Boolean operators lifted to Applicative
(<&&>) :: Applicative f => f Bool -> f Bool -> f Bool
(<&&>) = liftA2 (&&)
infixr 3 <&&> -- same as (&&)
{-
************************************************************************
* *
\subsection{Edit distance}
* *
************************************************************************
-}
-- | Find the "restricted" Damerau-Levenshtein edit distance between two strings.
-- See: <http://en.wikipedia.org/wiki/Damerau-Levenshtein_distance>.
-- Based on the algorithm presented in "A Bit-Vector Algorithm for Computing
-- Levenshtein and Damerau Edit Distances" in PSC'02 (Heikki Hyyro).
-- See http://www.cs.uta.fi/~helmu/pubs/psc02.pdf and
-- http://www.cs.uta.fi/~helmu/pubs/PSCerr.html for an explanation
restrictedDamerauLevenshteinDistance :: String -> String -> Int
restrictedDamerauLevenshteinDistance str1 str2
= restrictedDamerauLevenshteinDistanceWithLengths m n str1 str2
where
m = length str1
n = length str2
restrictedDamerauLevenshteinDistanceWithLengths
:: Int -> Int -> String -> String -> Int
restrictedDamerauLevenshteinDistanceWithLengths m n str1 str2
| m <= n
= if n <= 32 -- n must be larger so this check is sufficient
then restrictedDamerauLevenshteinDistance' (undefined :: Word32) m n str1 str2
else restrictedDamerauLevenshteinDistance' (undefined :: Integer) m n str1 str2
| otherwise
= if m <= 32 -- m must be larger so this check is sufficient
then restrictedDamerauLevenshteinDistance' (undefined :: Word32) n m str2 str1
else restrictedDamerauLevenshteinDistance' (undefined :: Integer) n m str2 str1
restrictedDamerauLevenshteinDistance'
:: (Bits bv, Num bv) => bv -> Int -> Int -> String -> String -> Int
restrictedDamerauLevenshteinDistance' _bv_dummy m n str1 str2
| [] <- str1 = n
| otherwise = extractAnswer $
foldl' (restrictedDamerauLevenshteinDistanceWorker
(matchVectors str1) top_bit_mask vector_mask)
(0, 0, m_ones, 0, m) str2
where
m_ones@vector_mask = (2 ^ m) - 1
top_bit_mask = (1 `shiftL` (m - 1)) `asTypeOf` _bv_dummy
extractAnswer (_, _, _, _, distance) = distance
restrictedDamerauLevenshteinDistanceWorker
:: (Bits bv, Num bv) => IM.IntMap bv -> bv -> bv
-> (bv, bv, bv, bv, Int) -> Char -> (bv, bv, bv, bv, Int)
restrictedDamerauLevenshteinDistanceWorker str1_mvs top_bit_mask vector_mask
(pm, d0, vp, vn, distance) char2
= seq str1_mvs $ seq top_bit_mask $ seq vector_mask $
seq pm' $ seq d0' $ seq vp' $ seq vn' $
seq distance'' $ seq char2 $
(pm', d0', vp', vn', distance'')
where
pm' = IM.findWithDefault 0 (ord char2) str1_mvs
d0' = ((((sizedComplement vector_mask d0) .&. pm') `shiftL` 1) .&. pm)
.|. ((((pm' .&. vp) + vp) .&. vector_mask) `xor` vp) .|. pm' .|. vn
-- No need to mask the shiftL because of the restricted range of pm
hp' = vn .|. sizedComplement vector_mask (d0' .|. vp)
hn' = d0' .&. vp
hp'_shift = ((hp' `shiftL` 1) .|. 1) .&. vector_mask
hn'_shift = (hn' `shiftL` 1) .&. vector_mask
vp' = hn'_shift .|. sizedComplement vector_mask (d0' .|. hp'_shift)
vn' = d0' .&. hp'_shift
distance' = if hp' .&. top_bit_mask /= 0 then distance + 1 else distance
distance'' = if hn' .&. top_bit_mask /= 0 then distance' - 1 else distance'
sizedComplement :: Bits bv => bv -> bv -> bv
sizedComplement vector_mask vect = vector_mask `xor` vect
matchVectors :: (Bits bv, Num bv) => String -> IM.IntMap bv
matchVectors = snd . foldl' go (0 :: Int, IM.empty)
where
go (ix, im) char = let ix' = ix + 1
im' = IM.insertWith (.|.) (ord char) (2 ^ ix) im
in seq ix' $ seq im' $ (ix', im')
{-# SPECIALIZE INLINE restrictedDamerauLevenshteinDistance'
:: Word32 -> Int -> Int -> String -> String -> Int #-}
{-# SPECIALIZE INLINE restrictedDamerauLevenshteinDistance'
:: Integer -> Int -> Int -> String -> String -> Int #-}
{-# SPECIALIZE restrictedDamerauLevenshteinDistanceWorker
:: IM.IntMap Word32 -> Word32 -> Word32
-> (Word32, Word32, Word32, Word32, Int)
-> Char -> (Word32, Word32, Word32, Word32, Int) #-}
{-# SPECIALIZE restrictedDamerauLevenshteinDistanceWorker
:: IM.IntMap Integer -> Integer -> Integer
-> (Integer, Integer, Integer, Integer, Int)
-> Char -> (Integer, Integer, Integer, Integer, Int) #-}
{-# SPECIALIZE INLINE sizedComplement :: Word32 -> Word32 -> Word32 #-}
{-# SPECIALIZE INLINE sizedComplement :: Integer -> Integer -> Integer #-}
{-# SPECIALIZE matchVectors :: String -> IM.IntMap Word32 #-}
{-# SPECIALIZE matchVectors :: String -> IM.IntMap Integer #-}
-- | Search for possible matches to the users input in the given list,
-- returning a small number of ranked results
fuzzyLookup :: String -> [(String,a)] -> [a]
fuzzyLookup user_entered possibilites
= map fst $ take mAX_RESULTS $ sortBy (comparing snd)
[ (poss_val, distance) | (poss_str, poss_val) <- possibilites
, let distance = restrictedDamerauLevenshteinDistance
poss_str user_entered
, distance <= fuzzy_threshold ]
where
-- Work out an approriate match threshold:
-- We report a candidate if its edit distance is <= the threshold,
-- The threshhold is set to about a quarter of the # of characters the user entered
-- Length Threshold
-- 1 0 -- Don't suggest *any* candidates
-- 2 1 -- for single-char identifiers
-- 3 1
-- 4 1
-- 5 1
-- 6 2
--
fuzzy_threshold = truncate $ fromIntegral (length user_entered + 2) / (4 :: Rational)
mAX_RESULTS = 3
-- Global variables:
-- Module names:
-- Similar to 'parse' for Distribution.Package.PackageName,
-- but we don't want to depend on Cabal.
looksLikePackageName :: String -> Bool
looksLikePackageName = all (all isAlphaNum <&&> not . (all isDigit)) . split '-'
{-
-- -----------------------------------------------------------------------------
-- Floats
-}
readRational__ :: ReadS Rational -- NB: doesn't handle leading "-"
readRational__ r = do
(n,d,s) <- readFix r
(k,t) <- readExp s
return ((n%1)*10^^(k-d), t)
where
readFix r = do
(ds,s) <- lexDecDigits r
(ds',t) <- lexDotDigits s
return (read (ds++ds'), length ds', t)
readExp (e:s) | e `elem` "eE" = readExp' s
readExp s = return (0,s)
readExp' ('+':s) = readDec s
readExp' ('-':s) = do (k,t) <- readDec s
return (-k,t)
readExp' s = readDec s
readDec s = do
(ds,r) <- nonnull isDigit s
return (foldl1 (\n d -> n * 10 + d) [ ord d - ord '0' | d <- ds ],
r)
lexDecDigits = nonnull isDigit
lexDotDigits ('.':s) = return (span isDigit s)
lexDotDigits s = return ("",s)
nonnull p s = do (cs@(_:_),t) <- return (span p s)
return (cs,t)
readRational :: String -> Rational -- NB: *does* handle a leading "-"
readRational top_s
= case top_s of
'-' : xs -> - (read_me xs)
xs -> read_me xs
where
read_me s
= case (do { (x,"") <- readRational__ s ; return x }) of
[x] -> x
[] -> error ("readRational: no parse:" ++ top_s)
_ -> error ("readRational: ambiguous parse:" ++ top_s)
{-
************************************************************************
* *
\subsection[Utils-Data]{Utils for defining Data instances}
* *
************************************************************************
These functions helps us to define Data instances for abstract types.
-}
abstractConstr :: String -> Constr
abstractConstr n = mkConstr (abstractDataType n) ("{abstract:"++n++"}") [] Prefix
abstractDataType :: String -> DataType
abstractDataType n = mkDataType n [abstractConstr n]
expectJust :: HasCallStack => String -> Maybe a -> a
{-# INLINE expectJust #-}
expectJust _ (Just x) = x
expectJust err Nothing = error ("expectJust " ++ err)
infixr 4 `orElse`
-- | Flipped version of @fromMaybe@, useful for chaining.
orElse :: Maybe a -> a -> a
orElse = flip fromMaybe
-- | Monadic version of mapAccumL
mapAccumLM :: Monad m
=> (acc -> x -> m (acc, y)) -- ^ combining funcction
-> acc -- ^ initial state
-> [x] -- ^ inputs
-> m (acc, [y]) -- ^ final state, outputs
mapAccumLM _ s [] = return (s, [])
mapAccumLM f s (x:xs) = do
(s1, x') <- f s x
(s2, xs') <- mapAccumLM f s1 xs
return (s2, x' : xs')
-- | Monadic version of concatMap
concatMapM :: Monad m => (a -> m [b]) -> [a] -> m [b]
concatMapM f xs = liftM concat (mapM f xs)
-- | Monadic version of 'any', aborts the computation at the first @True@ value
anyM :: Monad m => (a -> m Bool) -> [a] -> m Bool
anyM _ [] = return False
anyM f (x:xs) = do b <- f x
if b then return True
else anyM f xs
-- | Monadic version of foldr
foldrM :: (Monad m) => (b -> a -> m a) -> a -> [b] -> m a
foldrM _ z [] = return z
foldrM k z (x:xs) = do { r <- foldrM k z xs; k x r }
|
shayan-najd/HsParser
|
Language/Haskell/Utility/Util.hs
|
gpl-3.0
| 14,989
| 0
| 17
| 4,393
| 3,432
| 1,864
| 1,568
| -1
| -1
|
{-
By Denis Krjuchkov
http://stackoverflow.com/questions/2354707/in-haskell-is-there-num-a-infinity-a
-}
module Infinitable where
data Infinitable a = NegativeInfinity | Regular a | PositiveInfinity
deriving (Eq, Show)
instance Ord a => Ord (Infinitable a) where
compare NegativeInfinity NegativeInfinity = EQ
compare PositiveInfinity PositiveInfinity = EQ
compare NegativeInfinity _ = LT
compare PositiveInfinity _ = GT
compare _ PositiveInfinity = LT
compare _ NegativeInfinity = GT
compare (Regular x) (Regular y) = compare x y
main =
let five = Regular 5
pinf = PositiveInfinity::Infinitable Integer
ninf = NegativeInfinity::Infinitable Integer
results = [(pinf > five), (ninf < pinf), (five > ninf)]
in
do putStrLn (show results)
|
graninas/Haskell-Algorithms
|
Data/Infinitable.hs
|
gpl-3.0
| 834
| 2
| 11
| 199
| 230
| 121
| 109
| 17
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.SQL.Instances.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes a Cloud SQL instance.
--
-- /See:/ <https://developers.google.com/cloud-sql/ Cloud SQL Admin API Reference> for @sql.instances.delete@.
module Network.Google.Resource.SQL.Instances.Delete
(
-- * REST Resource
InstancesDeleteResource
-- * Creating a Request
, instancesDelete
, InstancesDelete
-- * Request Lenses
, idXgafv
, idUploadProtocol
, idProject
, idAccessToken
, idUploadType
, idCallback
, idInstance
) where
import Network.Google.Prelude
import Network.Google.SQLAdmin.Types
-- | A resource alias for @sql.instances.delete@ method which the
-- 'InstancesDelete' request conforms to.
type InstancesDeleteResource =
"v1" :>
"projects" :>
Capture "project" Text :>
"instances" :>
Capture "instance" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] Operation
-- | Deletes a Cloud SQL instance.
--
-- /See:/ 'instancesDelete' smart constructor.
data InstancesDelete =
InstancesDelete'
{ _idXgafv :: !(Maybe Xgafv)
, _idUploadProtocol :: !(Maybe Text)
, _idProject :: !Text
, _idAccessToken :: !(Maybe Text)
, _idUploadType :: !(Maybe Text)
, _idCallback :: !(Maybe Text)
, _idInstance :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'InstancesDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'idXgafv'
--
-- * 'idUploadProtocol'
--
-- * 'idProject'
--
-- * 'idAccessToken'
--
-- * 'idUploadType'
--
-- * 'idCallback'
--
-- * 'idInstance'
instancesDelete
:: Text -- ^ 'idProject'
-> Text -- ^ 'idInstance'
-> InstancesDelete
instancesDelete pIdProject_ pIdInstance_ =
InstancesDelete'
{ _idXgafv = Nothing
, _idUploadProtocol = Nothing
, _idProject = pIdProject_
, _idAccessToken = Nothing
, _idUploadType = Nothing
, _idCallback = Nothing
, _idInstance = pIdInstance_
}
-- | V1 error format.
idXgafv :: Lens' InstancesDelete (Maybe Xgafv)
idXgafv = lens _idXgafv (\ s a -> s{_idXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
idUploadProtocol :: Lens' InstancesDelete (Maybe Text)
idUploadProtocol
= lens _idUploadProtocol
(\ s a -> s{_idUploadProtocol = a})
-- | Project ID of the project that contains the instance to be deleted.
idProject :: Lens' InstancesDelete Text
idProject
= lens _idProject (\ s a -> s{_idProject = a})
-- | OAuth access token.
idAccessToken :: Lens' InstancesDelete (Maybe Text)
idAccessToken
= lens _idAccessToken
(\ s a -> s{_idAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
idUploadType :: Lens' InstancesDelete (Maybe Text)
idUploadType
= lens _idUploadType (\ s a -> s{_idUploadType = a})
-- | JSONP
idCallback :: Lens' InstancesDelete (Maybe Text)
idCallback
= lens _idCallback (\ s a -> s{_idCallback = a})
-- | Cloud SQL instance ID. This does not include the project ID.
idInstance :: Lens' InstancesDelete Text
idInstance
= lens _idInstance (\ s a -> s{_idInstance = a})
instance GoogleRequest InstancesDelete where
type Rs InstancesDelete = Operation
type Scopes InstancesDelete =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/sqlservice.admin"]
requestClient InstancesDelete'{..}
= go _idProject _idInstance _idXgafv
_idUploadProtocol
_idAccessToken
_idUploadType
_idCallback
(Just AltJSON)
sQLAdminService
where go
= buildClient
(Proxy :: Proxy InstancesDeleteResource)
mempty
|
brendanhay/gogol
|
gogol-sqladmin/gen/Network/Google/Resource/SQL/Instances/Delete.hs
|
mpl-2.0
| 4,863
| 0
| 18
| 1,201
| 780
| 454
| 326
| 114
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DFAReporting.CreativeFields.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets one creative field by ID.
--
-- /See:/ <https://developers.google.com/doubleclick-advertisers/ Campaign Manager 360 API Reference> for @dfareporting.creativeFields.get@.
module Network.Google.Resource.DFAReporting.CreativeFields.Get
(
-- * REST Resource
CreativeFieldsGetResource
-- * Creating a Request
, creativeFieldsGet
, CreativeFieldsGet
-- * Request Lenses
, cfgXgafv
, cfgUploadProtocol
, cfgAccessToken
, cfgUploadType
, cfgProFileId
, cfgId
, cfgCallback
) where
import Network.Google.DFAReporting.Types
import Network.Google.Prelude
-- | A resource alias for @dfareporting.creativeFields.get@ method which the
-- 'CreativeFieldsGet' request conforms to.
type CreativeFieldsGetResource =
"dfareporting" :>
"v3.5" :>
"userprofiles" :>
Capture "profileId" (Textual Int64) :>
"creativeFields" :>
Capture "id" (Textual Int64) :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] CreativeField
-- | Gets one creative field by ID.
--
-- /See:/ 'creativeFieldsGet' smart constructor.
data CreativeFieldsGet =
CreativeFieldsGet'
{ _cfgXgafv :: !(Maybe Xgafv)
, _cfgUploadProtocol :: !(Maybe Text)
, _cfgAccessToken :: !(Maybe Text)
, _cfgUploadType :: !(Maybe Text)
, _cfgProFileId :: !(Textual Int64)
, _cfgId :: !(Textual Int64)
, _cfgCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'CreativeFieldsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cfgXgafv'
--
-- * 'cfgUploadProtocol'
--
-- * 'cfgAccessToken'
--
-- * 'cfgUploadType'
--
-- * 'cfgProFileId'
--
-- * 'cfgId'
--
-- * 'cfgCallback'
creativeFieldsGet
:: Int64 -- ^ 'cfgProFileId'
-> Int64 -- ^ 'cfgId'
-> CreativeFieldsGet
creativeFieldsGet pCfgProFileId_ pCfgId_ =
CreativeFieldsGet'
{ _cfgXgafv = Nothing
, _cfgUploadProtocol = Nothing
, _cfgAccessToken = Nothing
, _cfgUploadType = Nothing
, _cfgProFileId = _Coerce # pCfgProFileId_
, _cfgId = _Coerce # pCfgId_
, _cfgCallback = Nothing
}
-- | V1 error format.
cfgXgafv :: Lens' CreativeFieldsGet (Maybe Xgafv)
cfgXgafv = lens _cfgXgafv (\ s a -> s{_cfgXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
cfgUploadProtocol :: Lens' CreativeFieldsGet (Maybe Text)
cfgUploadProtocol
= lens _cfgUploadProtocol
(\ s a -> s{_cfgUploadProtocol = a})
-- | OAuth access token.
cfgAccessToken :: Lens' CreativeFieldsGet (Maybe Text)
cfgAccessToken
= lens _cfgAccessToken
(\ s a -> s{_cfgAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
cfgUploadType :: Lens' CreativeFieldsGet (Maybe Text)
cfgUploadType
= lens _cfgUploadType
(\ s a -> s{_cfgUploadType = a})
-- | User profile ID associated with this request.
cfgProFileId :: Lens' CreativeFieldsGet Int64
cfgProFileId
= lens _cfgProFileId (\ s a -> s{_cfgProFileId = a})
. _Coerce
-- | Creative Field ID
cfgId :: Lens' CreativeFieldsGet Int64
cfgId
= lens _cfgId (\ s a -> s{_cfgId = a}) . _Coerce
-- | JSONP
cfgCallback :: Lens' CreativeFieldsGet (Maybe Text)
cfgCallback
= lens _cfgCallback (\ s a -> s{_cfgCallback = a})
instance GoogleRequest CreativeFieldsGet where
type Rs CreativeFieldsGet = CreativeField
type Scopes CreativeFieldsGet =
'["https://www.googleapis.com/auth/dfatrafficking"]
requestClient CreativeFieldsGet'{..}
= go _cfgProFileId _cfgId _cfgXgafv
_cfgUploadProtocol
_cfgAccessToken
_cfgUploadType
_cfgCallback
(Just AltJSON)
dFAReportingService
where go
= buildClient
(Proxy :: Proxy CreativeFieldsGetResource)
mempty
|
brendanhay/gogol
|
gogol-dfareporting/gen/Network/Google/Resource/DFAReporting/CreativeFields/Get.hs
|
mpl-2.0
| 5,032
| 0
| 19
| 1,227
| 821
| 474
| 347
| 116
| 1
|
{-|
Module : Async
Description : hnfs-tester - Nfs (client library) test tool
Copyright : (c) 2014 Arne Redlich <arne.redlich@googlemail.com>
License : LGPL v2.1
Maintainer : Arne Redlich <arne.redlich@googlemail.com>
Stability : experimental
Portability : POSIX
hnfs-tester tests using hnfs' async interface.
-}
module Async ( nfs ) where
import Base
import Control.Concurrent.MVar
import Control.Exception (bracket)
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Trans.Either
import Data.Maybe (fromJust)
import Data.Monoid
import qualified System.Linux.Epoll as Ep
import qualified System.Nfs as Nfs
-- epoll and poll events are (mostly) the same. However, we don't have any
-- means to get to the underlying event codes. The below should hopefully get
-- us far enough.
nfs_to_ep_event :: Nfs.Event -> Ep.EventType
nfs_to_ep_event ev
| ev == Nfs.eventRead = Ep.inEvent
| ev == Nfs.eventWrite = Ep.outEvent
| ev == Nfs.eventRead `mappend` Nfs.eventWrite = Ep.combineEvents [ Ep.inEvent
, Ep.outEvent ]
| otherwise = undefined
ep_to_nfs_event :: Ep.EventType -> Nfs.Event
ep_to_nfs_event ev
| ev Ep.=~ Ep.inEvent && ev Ep.=~ Ep.outEvent =
Nfs.eventRead `mappend` Nfs.eventWrite
| ev Ep.=~ Ep.inEvent = Nfs.eventRead
| ev Ep.=~ Ep.outEvent = Nfs.eventWrite
| ev Ep.=~ Ep.hangupEvent = Nfs.eventHup
| ev Ep.=~ Ep.urgentEvent = Nfs.eventPri
| ev Ep.=~ Ep.errorEvent = Nfs.eventErr
| otherwise = undefined
event_loop :: Nfs.Context -> MVar (Either String a) -> IO (Either String a)
event_loop ctx mv = do
mret <- tryTakeMVar mv
case mret of
Just ret -> return ret
Nothing -> do
bracket open Ep.close poll
event_loop ctx mv
where
-- Arbitrarily chosen size.
open = Ep.create (fromJust $ Ep.toSize 32)
-- The Context's fd can (and does!) change, so we need to get it out each time.
poll dev = do
fd <- Nfs.getFd ctx
evts <- Nfs.whichEvents ctx
bracket
(Ep.add dev () [ nfs_to_ep_event evts, Ep.oneShotEvent ] fd)
Ep.freeDesc
$ \_ -> do
-- Arbitrarily chosen timeout. epoll_wait allows -1 to wait indefinitely
-- but that does not seem to be supported by the haskell bindings.
evts' <- Ep.wait (fromJust $ Ep.toDuration 10) dev
let etypes = map Ep.eventType evts'
etype = Ep.combineEvents etypes
Nfs.service ctx $ ep_to_nfs_event etype
sync_wrap :: Nfs.Context ->
(Nfs.Callback a -> IO (Either String ())) ->
IO (Either String a)
sync_wrap ctx async_action = runEitherT $ do
mv <- liftIO $ newEmptyMVar
ret <- liftIO $ async_action $ putMVar mv
case ret of
Left s -> left $ "failed to invoke async action: " ++ s
Right () -> (liftIO $ event_loop ctx mv) >>= hoistEither
nfs :: SyncNfs
nfs = SyncNfs { syncMount = \ctx addr xprt ->
sync_wrap ctx $ Nfs.mountAsync ctx addr xprt
, syncOpenDir = \ctx path ->
sync_wrap ctx $ Nfs.openDirAsync ctx path
, syncMkDir = \ctx path ->
sync_wrap ctx $ Nfs.mkDirAsync ctx path
, syncRmDir = \ctx path ->
sync_wrap ctx $ Nfs.rmDirAsync ctx path
, syncStat = \ctx path ->
sync_wrap ctx $ Nfs.statAsync ctx path
, syncCreat = \ctx path mode ->
sync_wrap ctx $ Nfs.creatAsync ctx path mode
, syncUnlink = \ctx path ->
sync_wrap ctx $ Nfs.unlinkAsync ctx path
, syncOpen = \ctx path mode ->
sync_wrap ctx $ Nfs.openAsync ctx path mode
, syncRead = \ctx fh size ->
sync_wrap ctx $ Nfs.readAsync fh size
, syncPRead = \ctx fh size off ->
sync_wrap ctx $ Nfs.preadAsync fh size off
, syncWrite = \ctx fh bs ->
sync_wrap ctx $ Nfs.writeAsync fh bs
, syncPWrite = \ctx fh bs off ->
sync_wrap ctx $ Nfs.pwriteAsync fh bs off
, syncTruncate = \ctx path off ->
sync_wrap ctx $ Nfs.truncateAsync ctx path off
, syncFTruncate = \ctx fh off ->
sync_wrap ctx $ Nfs.ftruncateAsync fh off
, syncFStat = \ctx fh ->
sync_wrap ctx $ Nfs.fstatAsync fh
, syncLSeek = \ctx fh off mode ->
sync_wrap ctx $ Nfs.lseekAsync fh off mode }
|
aredlich/hnfs
|
hnfs-tester/Async.hs
|
lgpl-2.1
| 4,543
| 0
| 18
| 1,408
| 1,245
| 632
| 613
| 89
| 2
|
module Data.GI.CodeGen.Code
( Code(..)
, ModuleInfo(..)
, ModuleFlag(..)
, BaseCodeGen
, CodeGen
, ExcCodeGen
, CGError(..)
, genCode
, evalCodeGen
, writeModuleTree
, listModuleTree
, codeToText
, transitiveModuleDeps
, minBaseVersion
, BaseVersion(..)
, showBaseVersion
, ModuleName
, registerNSDependency
, qualified
, getDeps
, recurseWithAPIs
, handleCGExc
, describeCGError
, notImplementedError
, badIntroError
, missingInfoError
, indent
, bline
, line
, blank
, group
, hsBoot
, submodule
, setLanguagePragmas
, setGHCOptions
, setModuleFlags
, setModuleMinBase
, addModuleDocumentation
, exportToplevel
, exportModule
, exportDecl
, exportMethod
, exportProperty
, exportSignal
, findAPI
, getAPI
, findAPIByName
, getAPIs
, config
, currentModule
) where
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative ((<$>))
import Data.Monoid (Monoid(..))
#endif
import Control.Monad.Reader
import Control.Monad.State.Strict
import Control.Monad.Except
import qualified Data.Foldable as F
import Data.Maybe (fromMaybe, catMaybes)
import Data.Monoid ((<>))
import Data.Sequence (Seq, ViewL ((:<)), (><), (|>), (<|))
import qualified Data.Map.Strict as M
import qualified Data.Sequence as S
import qualified Data.Set as Set
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
import System.Directory (createDirectoryIfMissing)
import System.FilePath (joinPath, takeDirectory)
import Data.GI.CodeGen.API (API, Name(..))
import Data.GI.CodeGen.Config (Config(..))
import Data.GI.CodeGen.Type (Type(..))
import Data.GI.CodeGen.Util (tshow, terror, padTo)
import Data.GI.CodeGen.ProjectInfo (authors, license, maintainers)
import Data.GI.GIR.Documentation (Documentation(..))
data Code
= NoCode -- ^ No code
| Line Text -- ^ A single line, indented to current indentation
| Indent Code -- ^ Indented region
| Sequence (Seq Code) -- ^ The basic sequence of code
| Group Code -- ^ A grouped set of lines
deriving (Eq, Show)
instance Monoid Code where
mempty = NoCode
NoCode `mappend` NoCode = NoCode
x `mappend` NoCode = x
NoCode `mappend` x = x
(Sequence a) `mappend` (Sequence b) = Sequence (a >< b)
(Sequence a) `mappend` b = Sequence (a |> b)
a `mappend` (Sequence b) = Sequence (a <| b)
a `mappend` b = Sequence (a <| b <| S.empty)
type Deps = Set.Set Text
type ModuleName = [Text]
-- | Subsection of the haddock documentation where the export should
-- be located.
type HaddockSection = Text
-- | Symbol to export.
type SymbolName = Text
-- | Possible exports for a given module. Every export type
-- constructor has two parameters: the section of the haddocks where
-- it should appear, and the symbol name to export in the export list
-- of the module.
data Export = Export {
exportType :: ExportType -- ^ Which kind of export.
, exportSymbol :: SymbolName -- ^ Actual symbol to export.
} deriving (Show, Eq, Ord)
-- | Possible types of exports.
data ExportType = ExportTypeDecl -- ^ A type declaration.
| ExportToplevel -- ^ An export in no specific section.
| ExportMethod HaddockSection -- ^ A method for a struct/union, etc.
| ExportProperty HaddockSection -- ^ A property for an object/interface.
| ExportSignal HaddockSection -- ^ A signal for an object/interface.
| ExportModule -- ^ Reexport of a whole module.
deriving (Show, Eq, Ord)
-- | Information on a generated module.
data ModuleInfo = ModuleInfo {
moduleName :: ModuleName -- ^ Full module name: ["GI", "Gtk", "Label"].
, moduleCode :: Code -- ^ Generated code for the module.
, bootCode :: Code -- ^ Interface going into the .hs-boot file.
, submodules :: M.Map Text ModuleInfo -- ^ Indexed by the relative
-- module name.
, moduleDeps :: Deps -- ^ Set of dependencies for this module.
, moduleExports :: Seq Export -- ^ Exports for the module.
, qualifiedImports :: Set.Set ModuleName -- ^ Qualified (source) imports
, modulePragmas :: Set.Set Text -- ^ Set of language pragmas for the module.
, moduleGHCOpts :: Set.Set Text -- ^ GHC options for compiling the module.
, moduleFlags :: Set.Set ModuleFlag -- ^ Flags for the module.
, moduleDoc :: Maybe Text -- ^ Documentation for the module.
, moduleMinBase :: BaseVersion -- ^ Minimal version of base the
-- module will work on.
}
-- | Flags for module code generation.
data ModuleFlag = ImplicitPrelude -- ^ Use the standard prelude,
-- instead of the haskell-gi-base short one.
deriving (Show, Eq, Ord)
-- | Minimal version of base supported by a given module.
data BaseVersion = Base47 -- ^ 4.7.0
| Base48 -- ^ 4.8.0
deriving (Show, Eq, Ord)
-- | A `Text` representation of the given base version bound.
showBaseVersion :: BaseVersion -> Text
showBaseVersion Base47 = "4.7"
showBaseVersion Base48 = "4.8"
-- | Generate the empty module.
emptyModule :: ModuleName -> ModuleInfo
emptyModule m = ModuleInfo { moduleName = m
, moduleCode = NoCode
, bootCode = NoCode
, submodules = M.empty
, moduleDeps = Set.empty
, moduleExports = S.empty
, qualifiedImports = Set.empty
, modulePragmas = Set.empty
, moduleGHCOpts = Set.empty
, moduleFlags = Set.empty
, moduleDoc = Nothing
, moduleMinBase = Base47
}
-- | Information for the code generator.
data CodeGenConfig = CodeGenConfig {
hConfig :: Config -- ^ Ambient config.
, loadedAPIs :: M.Map Name API -- ^ APIs available to the generator.
}
data CGError = CGErrorNotImplemented Text
| CGErrorBadIntrospectionInfo Text
| CGErrorMissingInfo Text
deriving (Show)
type BaseCodeGen excType a =
ReaderT CodeGenConfig (StateT ModuleInfo (ExceptT excType IO)) a
-- | The code generator monad, for generators that cannot throw
-- errors. The fact that they cannot throw errors is encoded in the
-- forall, which disallows any operation on the error, except
-- discarding it or passing it along without inspecting. This last
-- operation is useful in order to allow embedding `CodeGen`
-- computations inside `ExcCodeGen` computations, while disallowing
-- the opposite embedding without explicit error handling.
type CodeGen a = forall e. BaseCodeGen e a
-- | Code generators that can throw errors.
type ExcCodeGen a = BaseCodeGen CGError a
-- | Run a `CodeGen` with given `Config` and initial `ModuleInfo`,
-- returning either the resulting exception, or the result and final
-- state of the codegen.
runCodeGen :: BaseCodeGen e a -> CodeGenConfig -> ModuleInfo ->
IO (Either e (a, ModuleInfo))
runCodeGen cg cfg state = runExceptT (runStateT (runReaderT cg cfg) state)
-- | This is useful when we plan run a subgenerator, and `mconcat` the
-- result to the original structure later.
cleanInfo :: ModuleInfo -> ModuleInfo
cleanInfo info = info { moduleCode = NoCode, submodules = M.empty,
bootCode = NoCode, moduleExports = S.empty,
qualifiedImports = Set.empty,
moduleDoc = Nothing, moduleMinBase = Base47 }
-- | Run the given code generator using the state and config of an
-- ambient CodeGen, but without adding the generated code to
-- `moduleCode`, instead returning it explicitly.
recurseCG :: BaseCodeGen e a -> BaseCodeGen e (a, Code)
recurseCG cg = do
cfg <- ask
oldInfo <- get
-- Start the subgenerator with no code and no submodules.
let info = cleanInfo oldInfo
liftIO (runCodeGen cg cfg info) >>= \case
Left e -> throwError e
Right (r, new) -> put (mergeInfoState oldInfo new) >>
return (r, moduleCode new)
-- | Like `recurse`, giving explicitly the set of loaded APIs for the
-- subgenerator.
recurseWithAPIs :: M.Map Name API -> CodeGen () -> CodeGen ()
recurseWithAPIs apis cg = do
cfg <- ask
oldInfo <- get
-- Start the subgenerator with no code and no submodules.
let info = cleanInfo oldInfo
cfg' = cfg {loadedAPIs = apis}
liftIO (runCodeGen cg cfg' info) >>= \case
Left e -> throwError e
Right (_, new) -> put (mergeInfo oldInfo new)
-- | Merge everything but the generated code for the two given `ModuleInfo`.
mergeInfoState :: ModuleInfo -> ModuleInfo -> ModuleInfo
mergeInfoState oldState newState =
let newDeps = Set.union (moduleDeps oldState) (moduleDeps newState)
newSubmodules = M.unionWith mergeInfo (submodules oldState) (submodules newState)
newExports = moduleExports oldState <> moduleExports newState
newImports = qualifiedImports oldState <> qualifiedImports newState
newPragmas = Set.union (modulePragmas oldState) (modulePragmas newState)
newGHCOpts = Set.union (moduleGHCOpts oldState) (moduleGHCOpts newState)
newFlags = Set.union (moduleFlags oldState) (moduleFlags newState)
newBoot = bootCode oldState <> bootCode newState
newDoc = moduleDoc oldState <> moduleDoc newState
newMinBase = max (moduleMinBase oldState) (moduleMinBase newState)
in oldState {moduleDeps = newDeps, submodules = newSubmodules,
moduleExports = newExports, qualifiedImports = newImports,
modulePragmas = newPragmas,
moduleGHCOpts = newGHCOpts, moduleFlags = newFlags,
bootCode = newBoot, moduleDoc = newDoc,
moduleMinBase = newMinBase }
-- | Merge the infos, including code too.
mergeInfo :: ModuleInfo -> ModuleInfo -> ModuleInfo
mergeInfo oldInfo newInfo =
let info = mergeInfoState oldInfo newInfo
in info { moduleCode = moduleCode oldInfo <> moduleCode newInfo }
-- | Add the given submodule to the list of submodules of the current
-- module.
addSubmodule :: Text -> ModuleInfo -> ModuleInfo -> ModuleInfo
addSubmodule modName submodule current = current { submodules = M.insertWith mergeInfo modName submodule (submodules current)}
-- | Run the given CodeGen in order to generate a single submodule of the
-- current module. Note that we do not generate the submodule if the
-- code generator generated no code and the module does not have
-- submodules.
submodule' :: Text -> BaseCodeGen e () -> BaseCodeGen e ()
submodule' modName cg = do
cfg <- ask
oldInfo <- get
let info = emptyModule (moduleName oldInfo ++ [modName])
liftIO (runCodeGen cg cfg info) >>= \case
Left e -> throwError e
Right (_, smInfo) -> if moduleCode smInfo == NoCode &&
M.null (submodules smInfo)
then return ()
else modify' (addSubmodule modName smInfo)
-- | Run the given CodeGen in order to generate a submodule (specified
-- an an ordered list) of the current module.
submodule :: [Text] -> BaseCodeGen e () -> BaseCodeGen e ()
submodule [] cg = cg
submodule (m:ms) cg = submodule' m (submodule ms cg)
-- | Try running the given `action`, and if it fails run `fallback`
-- instead.
handleCGExc :: (CGError -> CodeGen a) -> ExcCodeGen a -> CodeGen a
handleCGExc fallback
action = do
cfg <- ask
oldInfo <- get
let info = cleanInfo oldInfo
liftIO (runCodeGen action cfg info) >>= \case
Left e -> fallback e
Right (r, newInfo) -> do
put (mergeInfo oldInfo newInfo)
return r
-- | Return the currently loaded set of dependencies.
getDeps :: CodeGen Deps
getDeps = moduleDeps <$> get
-- | Return the ambient configuration for the code generator.
config :: CodeGen Config
config = hConfig <$> ask
-- | Return the name of the current module.
currentModule :: CodeGen Text
currentModule = do
s <- get
return (T.intercalate "." (moduleName s))
-- | Return the list of APIs available to the generator.
getAPIs :: CodeGen (M.Map Name API)
getAPIs = loadedAPIs <$> ask
-- | Due to the `forall` in the definition of `CodeGen`, if we want to
-- run the monad transformer stack until we get an `IO` action, our
-- only option is ignoring the possible error code from
-- `runExceptT`. This is perfectly safe, since there is no way to
-- construct a computation in the `CodeGen` monad that throws an
-- exception, due to the higher rank type.
unwrapCodeGen :: CodeGen a -> CodeGenConfig -> ModuleInfo ->
IO (a, ModuleInfo)
unwrapCodeGen cg cfg info =
runCodeGen cg cfg info >>= \case
Left _ -> error "unwrapCodeGen:: The impossible happened!"
Right (r, newInfo) -> return (r, newInfo)
-- | Like `evalCodeGen`, but discard the resulting output value.
genCode :: Config -> M.Map Name API -> ModuleName -> CodeGen () ->
IO ModuleInfo
genCode cfg apis mName cg = snd <$> evalCodeGen cfg apis mName cg
-- | Run a code generator, and return the information for the
-- generated module together with the return value of the generator.
evalCodeGen :: Config -> M.Map Name API -> ModuleName -> CodeGen a ->
IO (a, ModuleInfo)
evalCodeGen cfg apis mName cg = do
let initialInfo = emptyModule mName
cfg' = CodeGenConfig {hConfig = cfg, loadedAPIs = apis}
unwrapCodeGen cg cfg' initialInfo
-- | Mark the given dependency as used by the module.
registerNSDependency :: Text -> CodeGen ()
registerNSDependency name = do
deps <- getDeps
unless (Set.member name deps) $ do
let newDeps = Set.insert name deps
modify' $ \s -> s {moduleDeps = newDeps}
-- | Return the transitive set of dependencies, i.e. the union of
-- those of the module and (transitively) its submodules.
transitiveModuleDeps :: ModuleInfo -> Deps
transitiveModuleDeps minfo =
Set.unions (moduleDeps minfo
: map transitiveModuleDeps (M.elems $ submodules minfo))
-- | Given a module name and a symbol in the module (including a
-- proper namespace), return a qualified name for the symbol.
qualified :: ModuleName -> Name -> CodeGen Text
qualified mn (Name ns s) = do
cfg <- config
-- Make sure the module is listed as a dependency.
when (modName cfg /= Just ns) $
registerNSDependency ns
minfo <- get
if mn == moduleName minfo
then return s
else do
qm <- qualifiedImport mn
return (qm <> "." <> s)
-- | Import the given module name qualified (as a source import if the
-- namespace is the same as the current one), and return the name
-- under which the module was imported.
qualifiedImport :: ModuleName -> CodeGen Text
qualifiedImport mn = do
modify' $ \s -> s {qualifiedImports = Set.insert mn (qualifiedImports s)}
return (qualifiedModuleName mn)
-- | Construct a simplified version of the module name, suitable for a
-- qualified import.
qualifiedModuleName :: ModuleName -> Text
qualifiedModuleName ["GI", ns, "Objects", o] = ns <> "." <> o
qualifiedModuleName ["GI", ns, "Interfaces", i] = ns <> "." <> i
qualifiedModuleName ["GI", ns, "Structs", s] = ns <> "." <> s
qualifiedModuleName ["GI", ns, "Unions", u] = ns <> "." <> u
qualifiedModuleName ("GI" : rest) = dotModuleName rest
qualifiedModuleName mn = dotModuleName mn
-- | Return the minimal base version supported by the module and all
-- its submodules.
minBaseVersion :: ModuleInfo -> BaseVersion
minBaseVersion minfo =
maximum (moduleMinBase minfo
: map minBaseVersion (M.elems $ submodules minfo))
-- | Give a friendly textual description of the error for presenting
-- to the user.
describeCGError :: CGError -> Text
describeCGError (CGErrorNotImplemented e) = "Not implemented: " <> tshow e
describeCGError (CGErrorBadIntrospectionInfo e) = "Bad introspection data: " <> tshow e
describeCGError (CGErrorMissingInfo e) = "Missing info: " <> tshow e
notImplementedError :: Text -> ExcCodeGen a
notImplementedError s = throwError $ CGErrorNotImplemented s
badIntroError :: Text -> ExcCodeGen a
badIntroError s = throwError $ CGErrorBadIntrospectionInfo s
missingInfoError :: Text -> ExcCodeGen a
missingInfoError s = throwError $ CGErrorMissingInfo s
findAPI :: Type -> CodeGen (Maybe API)
findAPI TError = Just <$> findAPIByName (Name "GLib" "Error")
findAPI (TInterface ns n) = Just <$> findAPIByName (Name ns n)
findAPI _ = return Nothing
-- | Find the API associated with a given type. If the API cannot be
-- found this raises an `error`.
getAPI :: Type -> CodeGen API
getAPI t = findAPI t >>= \case
Just a -> return a
Nothing -> terror ("Could not resolve type \"" <> tshow t <> "\".")
findAPIByName :: Name -> CodeGen API
findAPIByName n@(Name ns _) = do
apis <- getAPIs
case M.lookup n apis of
Just api -> return api
Nothing ->
terror $ "couldn't find API description for " <> ns <> "." <> name n
-- | Add some code to the current generator.
tellCode :: Code -> CodeGen ()
tellCode c = modify' (\s -> s {moduleCode = moduleCode s <> c})
-- | Print out a (newline-terminated) line.
line :: Text -> CodeGen ()
line = tellCode . Line
-- | Print out the given line both to the normal module, and to the
-- HsBoot file.
bline :: Text -> CodeGen ()
bline l = hsBoot (line l) >> line l
-- | A blank line
blank :: CodeGen ()
blank = line ""
-- | Increase the indent level for code generation.
indent :: BaseCodeGen e a -> BaseCodeGen e a
indent cg = do
(x, code) <- recurseCG cg
tellCode (Indent code)
return x
-- | Group a set of related code.
group :: BaseCodeGen e a -> BaseCodeGen e a
group cg = do
(x, code) <- recurseCG cg
tellCode (Group code)
blank
return x
-- | Write the given code into the .hs-boot file for the current module.
hsBoot :: BaseCodeGen e a -> BaseCodeGen e a
hsBoot cg = do
(x, code) <- recurseCG cg
modify' (\s -> s{bootCode = bootCode s <> code})
return x
-- | Add a export to the current module.
export :: Export -> CodeGen ()
export e =
modify' $ \s -> s{moduleExports = moduleExports s |> e}
-- | Reexport a whole module.
exportModule :: SymbolName -> CodeGen ()
exportModule m = export (Export ExportModule m)
-- | Export a toplevel (i.e. belonging to no section) symbol.
exportToplevel :: SymbolName -> CodeGen ()
exportToplevel t = export (Export ExportToplevel t)
-- | Add a type declaration-related export.
exportDecl :: SymbolName -> CodeGen ()
exportDecl d = export (Export ExportTypeDecl d)
-- | Add a method export under the given section.
exportMethod :: HaddockSection -> SymbolName -> CodeGen ()
exportMethod s n = export (Export (ExportMethod s) n)
-- | Add a property-related export under the given section.
exportProperty :: HaddockSection -> SymbolName -> CodeGen ()
exportProperty s n = export (Export (ExportProperty s) n)
-- | Add a signal-related export under the given section.
exportSignal :: HaddockSection -> SymbolName -> CodeGen ()
exportSignal s n = export (Export (ExportSignal s) n)
-- | Set the language pragmas for the current module.
setLanguagePragmas :: [Text] -> CodeGen ()
setLanguagePragmas ps =
modify' $ \s -> s{modulePragmas = Set.fromList ps}
-- | Set the GHC options for compiling this module (in a OPTIONS_GHC pragma).
setGHCOptions :: [Text] -> CodeGen ()
setGHCOptions opts =
modify' $ \s -> s{moduleGHCOpts = Set.fromList opts}
-- | Set the given flags for the module.
setModuleFlags :: [ModuleFlag] -> CodeGen ()
setModuleFlags flags =
modify' $ \s -> s{moduleFlags = Set.fromList flags}
-- | Set the minimum base version supported by the current module.
setModuleMinBase :: BaseVersion -> CodeGen ()
setModuleMinBase v =
modify' $ \s -> s{moduleMinBase = max v (moduleMinBase s)}
-- | Add the given text to the module-level documentation for the
-- module being generated.
addModuleDocumentation :: Maybe Documentation -> CodeGen ()
addModuleDocumentation Nothing = return ()
addModuleDocumentation (Just doc) =
modify' $ \s -> s{moduleDoc = moduleDoc s <> Just (docText doc)}
-- | Return a text representation of the `Code`.
codeToText :: Code -> Text
codeToText c = T.concat $ str 0 c []
where
str :: Int -> Code -> [Text] -> [Text]
str _ NoCode cont = cont
str n (Line s) cont = paddedLine n s : cont
str n (Indent c) cont = str (n + 1) c cont
str n (Sequence s) cont = deseq n (S.viewl s) cont
str n (Group c) cont = str n c cont
deseq _ S.EmptyL cont = cont
deseq n (c :< cs) cont = str n c (deseq n (S.viewl cs) cont)
-- | Pad a line to the given number of leading spaces, and add a
-- newline at the end.
paddedLine :: Int -> Text -> Text
paddedLine n s = T.replicate (n * 4) " " <> s <> "\n"
-- | Put a (padded) comma at the end of the text.
comma :: Text -> Text
comma s = padTo 40 s <> ","
-- | Format the list of exported modules.
formatExportedModules :: [Export] -> Maybe Text
formatExportedModules [] = Nothing
formatExportedModules exports =
Just . T.concat . map ( paddedLine 1
. comma
. ("module " <>)
. exportSymbol)
. filter ((== ExportModule) . exportType) $ exports
-- | Format the toplevel exported symbols.
formatToplevel :: [Export] -> Maybe Text
formatToplevel [] = Nothing
formatToplevel exports =
Just . T.concat . map (paddedLine 1 . comma . exportSymbol)
. filter ((== ExportToplevel) . exportType) $ exports
-- | Format the type declarations section.
formatTypeDecls :: [Export] -> Maybe Text
formatTypeDecls exports =
let exportedTypes = filter ((== ExportTypeDecl) . exportType) exports
in if exportedTypes == []
then Nothing
else Just . T.unlines $ [ "-- * Exported types"
, T.concat . map ( paddedLine 1
. comma
. exportSymbol )
$ exportedTypes ]
-- | Format a given section made of subsections.
formatSection :: Text -> (Export -> Maybe (HaddockSection, SymbolName)) ->
[Export] -> Maybe Text
formatSection section filter exports =
if M.null exportedSubsections
then Nothing
else Just . T.unlines $ [" -- * " <> section
, ( T.unlines
. map formatSubsection
. M.toList ) exportedSubsections]
where
filteredExports :: [(HaddockSection, SymbolName)]
filteredExports = catMaybes (map filter exports)
exportedSubsections :: M.Map HaddockSection (Set.Set SymbolName)
exportedSubsections = foldr extract M.empty filteredExports
extract :: (HaddockSection, SymbolName) ->
M.Map Text (Set.Set Text) -> M.Map Text (Set.Set Text)
extract (subsec, m) secs =
M.insertWith Set.union subsec (Set.singleton m) secs
formatSubsection :: (HaddockSection, Set.Set SymbolName) -> Text
formatSubsection (subsec, symbols) =
T.unlines [ "-- ** " <> subsec
, ( T.concat
. map (paddedLine 1 . comma)
. Set.toList ) symbols]
-- | Format the list of methods.
formatMethods :: [Export] -> Maybe Text
formatMethods = formatSection "Methods" toMethod
where toMethod :: Export -> Maybe (HaddockSection, SymbolName)
toMethod (Export (ExportMethod s) m) = Just (s, m)
toMethod _ = Nothing
-- | Format the list of properties.
formatProperties :: [Export] -> Maybe Text
formatProperties = formatSection "Properties" toProperty
where toProperty :: Export -> Maybe (HaddockSection, SymbolName)
toProperty (Export (ExportProperty s) m) = Just (s, m)
toProperty _ = Nothing
-- | Format the list of signals.
formatSignals :: [Export] -> Maybe Text
formatSignals = formatSection "Signals" toSignal
where toSignal :: Export -> Maybe (HaddockSection, SymbolName)
toSignal (Export (ExportSignal s) m) = Just (s, m)
toSignal _ = Nothing
-- | Format the given export list. This is just the inside of the
-- parenthesis.
formatExportList :: [Export] -> Text
formatExportList exports =
T.unlines . catMaybes $ [ formatExportedModules exports
, formatToplevel exports
, formatTypeDecls exports
, formatMethods exports
, formatProperties exports
, formatSignals exports ]
-- | Write down the list of language pragmas.
languagePragmas :: [Text] -> Text
languagePragmas [] = ""
languagePragmas ps = "{-# LANGUAGE " <> T.intercalate ", " ps <> " #-}\n"
-- | Write down the list of GHC options.
ghcOptions :: [Text] -> Text
ghcOptions [] = ""
ghcOptions opts = "{-# OPTIONS_GHC " <> T.intercalate ", " opts <> " #-}\n"
-- | Standard fields for every module.
standardFields :: Text
standardFields = T.unlines [ "Copyright : " <> authors
, "License : " <> license
, "Maintainer : " <> maintainers ]
-- | The haddock header for the module, including optionally a description.
moduleHaddock :: Maybe Text -> Text
moduleHaddock Nothing = T.unlines ["{- |", standardFields <> "-}"]
moduleHaddock (Just description) = T.unlines ["{- |", standardFields,
description, "-}"]
-- | Generic module prelude. We reexport all of the submodules.
modulePrelude :: Text -> [Export] -> [Text] -> Text
modulePrelude name [] [] = "module " <> name <> " () where\n"
modulePrelude name exports [] =
"module " <> name <> "\n ( "
<> formatExportList exports
<> " ) where\n"
modulePrelude name [] reexportedModules =
"module " <> name <> "\n ( "
<> formatExportList (map (Export ExportModule) reexportedModules)
<> " ) where\n\n"
<> T.unlines (map ("import " <>) reexportedModules)
modulePrelude name exports reexportedModules =
"module " <> name <> "\n ( "
<> formatExportList (map (Export ExportModule) reexportedModules)
<> "\n"
<> formatExportList exports
<> " ) where\n\n"
<> T.unlines (map ("import " <>) reexportedModules)
-- | Code for loading the needed dependencies. One needs to give the
-- prefix for the namespace being currently generated, modules with
-- this prefix will be imported as {-# SOURCE #-}, and otherwise will
-- be imported normally.
importDeps :: ModuleName -> [ModuleName] -> Text
importDeps _ [] = ""
importDeps prefix deps = T.unlines . map toImport $ deps
where toImport :: ModuleName -> Text
toImport dep = let impSt = if importSource dep
then "import {-# SOURCE #-} qualified "
else "import qualified "
in impSt <> dotModuleName dep <>
" as " <> qualifiedModuleName dep
importSource :: ModuleName -> Bool
importSource ["GI", _, "Callbacks"] = False
importSource mn = take (length prefix) mn == prefix
-- | Standard imports.
moduleImports :: Text
moduleImports = T.unlines [ "import Data.GI.Base.ShortPrelude"
, "import qualified Data.GI.Base.Overloading as O"
, "import qualified Prelude as P"
, ""
, "import qualified Data.GI.Base.Attributes as GI.Attributes"
, "import qualified Data.Text as T"
, "import qualified Data.ByteString.Char8 as B"
, "import qualified Data.Map as Map" ]
-- | Write to disk the code for a module, under the given base
-- directory. Does not write submodules recursively, for that use
-- `writeModuleTree`.
writeModuleInfo :: Bool -> Maybe FilePath -> ModuleInfo -> IO ()
writeModuleInfo verbose dirPrefix minfo = do
let submoduleNames = map (moduleName) (M.elems (submodules minfo))
-- We reexport any submodules.
submoduleExports = map dotModuleName submoduleNames
fname = moduleNameToPath dirPrefix (moduleName minfo) ".hs"
dirname = takeDirectory fname
code = codeToText (moduleCode minfo)
pragmas = languagePragmas (Set.toList $ modulePragmas minfo)
optionsGHC = ghcOptions (Set.toList $ moduleGHCOpts minfo)
prelude = modulePrelude (dotModuleName $ moduleName minfo)
(F.toList (moduleExports minfo))
submoduleExports
imports = if ImplicitPrelude `Set.member` moduleFlags minfo
then ""
else moduleImports
pkgRoot = take 2 (moduleName minfo)
deps = importDeps pkgRoot (Set.toList $ qualifiedImports minfo)
haddock = moduleHaddock (moduleDoc minfo)
when verbose $ putStrLn ((T.unpack . dotModuleName . moduleName) minfo
++ " -> " ++ fname)
createDirectoryIfMissing True dirname
TIO.writeFile fname (T.unlines [pragmas, optionsGHC, haddock, prelude,
imports, deps, code])
when (bootCode minfo /= NoCode) $ do
let bootFName = moduleNameToPath dirPrefix (moduleName minfo) ".hs-boot"
TIO.writeFile bootFName (genHsBoot minfo)
-- | Generate the .hs-boot file for the given module.
genHsBoot :: ModuleInfo -> Text
genHsBoot minfo =
"module " <> (dotModuleName . moduleName) minfo <> " where\n\n" <>
moduleImports <> "\n" <>
codeToText (bootCode minfo)
-- | Construct the filename corresponding to the given module.
moduleNameToPath :: Maybe FilePath -> ModuleName -> FilePath -> FilePath
moduleNameToPath dirPrefix mn ext =
joinPath (fromMaybe "" dirPrefix : map T.unpack mn) ++ ext
-- | Turn an abstract module name into its dotted representation. For
-- instance, ["GI", "Gtk", "Types"] -> GI.Gtk.Types.
dotModuleName :: ModuleName -> Text
dotModuleName mn = T.intercalate "." mn
-- | Write down the code for a module and its submodules to disk under
-- the given base directory. It returns the list of written modules.
writeModuleTree :: Bool -> Maybe FilePath -> ModuleInfo -> IO [Text]
writeModuleTree verbose dirPrefix minfo = do
submoduleNames <- concat <$> forM (M.elems (submodules minfo))
(writeModuleTree verbose dirPrefix)
writeModuleInfo verbose dirPrefix minfo
return $ (dotModuleName (moduleName minfo) : submoduleNames)
-- | Return the list of modules `writeModuleTree` would write, without
-- actually writing anything to disk.
listModuleTree :: ModuleInfo -> [Text]
listModuleTree minfo =
let submoduleNames = concatMap listModuleTree (M.elems (submodules minfo))
in dotModuleName (moduleName minfo) : submoduleNames
|
hamishmack/haskell-gi
|
lib/Data/GI/CodeGen/Code.hs
|
lgpl-2.1
| 31,231
| 0
| 16
| 8,020
| 7,290
| 3,877
| 3,413
| -1
| -1
|
-- | Example of NlpSolver
{-# OPTIONS_GHC -Wall #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveGeneric #-}
module Main where
import GHC.Generics ( Generic1 )
import Text.Printf ( printf )
import Casadi.MX ( MX )
import Dyno.Vectorize ( Vectorize, Id(..), None(..), vpure, vapply )
import Dyno.View.View
import Dyno.View.M ( vcat, vsplit )
import Dyno.Nlp
import Dyno.NlpSolver
import Dyno.NlpUtils
import Dyno.Solvers
data X a = X a a deriving (Functor, Generic1, Show)
data G a = G a deriving (Functor, Generic1, Show)
instance Applicative X where {pure = vpure; (<*>) = vapply}
instance Applicative G where {pure = vpure; (<*>) = vapply}
instance Vectorize X
instance Vectorize G
myNlp :: Nlp (JV X) (JV None) (JV G) MX
myNlp = Nlp { nlpFG = fg
, nlpIn =
NlpIn
{ nlpBX = catJV bx
, nlpBG = catJV bg
, nlpX0 = catJV x0
, nlpP = catJV None
, nlpLamX0 = Nothing
, nlpLamG0 = Nothing
}
, nlpScaleF = Just 9.86
, nlpScaleX = Just $ catJV $ (X (4.7e-3) (4.7e4))
, nlpScaleG = Just $ catJV $ (G 4.7)
-- , nlpScaleF = Just 1
-- , nlpScaleX = Just $ catJV (X 1 1)
-- , nlpScaleG = Just $ catJV (G 1) -- 1)
}
where
x0 :: X Double
x0 = X 0 0
bx :: X Bounds
bx = pure (Nothing, Nothing)
bg :: G Bounds
bg = G (Just 2, Nothing)
fg :: J (JV X) MX -> J (JV None) MX -> (S MX, J (JV G) MX)
fg xy _ = (f, vcat g)
where
X x y = vsplit xy
x' = 1e3*x
y' = 1e-4*y
f = x'**2 + y'**2 + 0.1*x' * y'
g = G (x' + y')
solver :: Solver
solver = ipoptSolver { options = [ ("print_time", GBool False)
, ("ipopt.linear_solver", GString "ma86")
--, ("print_level", GInt 0)
] }
main :: IO ()
main = do
(_, eopt) <- solveNlp "nlp_solver_ex" solver myNlp Nothing
let opt = case eopt of
Left msg -> error msg
Right r -> r
Id obj = splitJV (fOpt opt)
x = splitJV (xOpt opt)
g = splitJV (gOpt opt)
-- Sdv obj' x' g' = split (fmapJ exp xopt)
-- Id obj = splitJV obj'
-- x = splitJV x'
-- g = splitJV g'
putStrLn "***********************************************************"
putStrLn "solution:"
print opt
putStrLn "***********************************************************"
putStrLn "scaling:"
putStrLn $ "f: " ++ (printf "%.2e" obj)
putStrLn $ "x: " ++ show (fmap (printf "%.2e" :: Double -> String) x)
putStrLn $ "g: " ++ show (fmap (printf "%.2e" :: Double -> String) g)
putStrLn "***********************************************************"
|
ghorn/dynobud
|
dynobud/examples/NlpSolverEx.hs
|
lgpl-3.0
| 2,792
| 0
| 14
| 909
| 889
| 480
| 409
| 67
| 2
|
module Polymorphism where
import Data.List (sort)
data Person = Person Bool deriving Show
printPerson :: Person -> IO ()
printPerson person = putStrLn (show person)
data Mood = Blah | Woot deriving (Show, Eq)
-- settleDown :: Mood -> Mood
settleDown x = if x == Woot
then Blah
else x
type Subject = String
type Verb = String
type Object = String
data Sentence = Sentence Subject Verb Object deriving (Eq, Show)
s1 = Sentence "dogs" "drool" "allover"
s2 = Sentence "Julie" "loves" "dogs"
data Rocks = Rocks String deriving (Eq, Show)
data Yeah = Yeah Bool deriving (Eq, Show)
data Papu = Papu Rocks Yeah deriving (Eq, Show)
equalityForall :: Papu -> Papu -> Bool
equalityForall p p' = p == p
-- comparePapus :: Papu -> Papu -> Bool
-- comparePapus p p' = p > p
-- f :: Float
f :: RealFrac a => a
f = 1.0
freud :: Ord a => a -> a
freud x = x
freud' :: Int -> Int
freud' x = x
myX = 1 :: Int
sigmund :: a -> Int
sigmund x = myX
-- sigmund' :: Num a => a -> a
-- sigmund' x = myX
--jung :: Ord a => [a] -> a
jung :: [Int] -> Int
jung xs = head (sort xs)
-- young :: [Char] -> Char
young :: Ord a => [a] -> a
young xs = head (sort xs)
chk :: Eq b => (a -> b) a -> b -> Bool
chk fnc x y =
|
m3mitsuppe/haskell
|
exercises/Programming Haskell Book Exercises.hsproj/Polymorphism.hs
|
unlicense
| 1,247
| 1
| 10
| 323
| 449
| 244
| 205
| -1
| -1
|
module Main where
import System.IO
import Control.Monad.State
import Control.Monad (forever)
import System.Environment ( getArgs )
import PGF
import PGF.Binary hiding (version)
import PGF.Printer
import Data.Binary
import Text.Show.Pretty
import Language.Porter
import qualified Data.Map as Map
import Transform
import TransformInternal
import Config
import Paths_pgftransform ( version )
-- synonyms :: [(String, String)]
-- synonyms = [ ("wine", "vino")
-- , ("cheese", "cheddar")
-- , ("cheese", "brie")
-- , ("that", "thar")
-- , ("that", "the")
-- , ("is", "be")
-- , ("is", "are")
-- , ("is", "iss")
-- ]
-- transform :: State Parser ()
-- transform = do
-- trToParser $ do
-- addSynonyms synonyms
-- matchStems
languageStr = "Eng"
transform :: Transform
transform = do
trToParser $ do
addSynonyms [ ("wine", "vino") ]
-- , ("that", "thar")
-- , ("that", "the")
-- , ("is", "be")
-- , ("is", "are")
-- , ("warm", "toasty")
-- ]
addHyponyms [ ("cheese", "cheddar")
, ("cheese", "brie") ]
-- addHypernyms [ ("cheese", "food")
-- , ("cheese", "snack")
-- , ("wine", "drink")
-- , ("wine", "beverage") ]
rewriteInput [ ("I'd", "I would")
, ("would like", "want")
, ("pizza", "a pizza")
, ("a a", "a")
]
matchStems
main :: IO ()
main = do
cfg <- getConfig version
pgf <- readPGF (cfgPgfFile cfg)
let
langStr = computeLangStr (cfgPgfFile cfg) languageStr
mbLang = readLanguage langStr
input = cfgInput cfg
putStrLn ("Lang: "++langStr)
putStrLn ("Parsing: \""++input++"\"")
case mbLang of
Nothing -> fail ("Could not parse language: "++langStr)
Just lang -> do
let
(_, st) = runState transform $ Parser pgf lang (\x->[x])
case cfgOutputFile cfg of
Nothing -> do
putStrLn "No output file specified"
Just outFile -> do
putStrLn ("Writing new PGF file to: "++outFile)
encodeFile outFile $ pPgf st
putStrLn ("PGF file saved to: "++outFile)
forever $ do
putStr "\n> "
hFlush stdout
inStr <- getLine
mapM_ (\s->putStrLn ("\n "++s)) $ runParser st inStr
-- let (lex, _) = runState (do getLexicon)
-- (Parser pgf lang (\x->[x]))
-- putStrLn "Lexicon"
-- mapM_ (\l->print (l, stem l)) lex
computeLangStr :: FilePath -> String -> String
computeLangStr pgfPath lang = let
-- TODO not system-independent (file path separator is /)
base = reverse $ takeWhile (/= '/') (drop 4 $ reverse pgfPath)
in base ++ lang
|
creswick/pgftransform
|
app/haskell/Main.hs
|
apache-2.0
| 2,858
| 0
| 21
| 926
| 604
| 326
| 278
| 59
| 3
|
{-
Copyrights (c) 2016. Samsung Electronics Ltd. All right reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
import Distribution.Simple
main = defaultMain
|
ryzhyk/cocoon
|
cocoon/Setup.hs
|
apache-2.0
| 644
| 0
| 4
| 102
| 12
| 7
| 5
| 2
| 1
|
{-
Copyright 2015 Raghu Kaippully
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
{-# LANGUAGE OverloadedStrings #-}
module Codec.Laydown.Specs where
import Codec.Laydown
import Data.ByteString.Builder
import Test.Hspec
blocks :: SpecWith ()
blocks = do
describe "hrule" $ do
it "parses hrule with '-'" $
laydown " -----" >>= (`shouldBe` Right (Document [HRule]))
it "parses hrule with '='" $
laydown " =====" >>= (`shouldBe` Right (Document [HRule]))
describe "header" $ do
it "parses h1" $
laydown " # header" >>= (`shouldBe` Right (Document [Header 1 "header"]))
it "parses h2" $
laydown " ## header" >>= (`shouldBe` Right (Document [Header 2 "header"]))
it "parses h3" $
laydown " ### header" >>=
(`shouldBe` Right (Document [Header 3 "header"]))
it "parses h4" $
laydown " #### header" >>=
(`shouldBe` Right (Document [Header 4 "header"]))
it "parses h5" $
laydown " ##### header" >>=
(`shouldBe` Right (Document [Header 5 "header"]))
it "parses h6" $
laydown " ###### header" >>=
(`shouldBe` Right (Document [Header 6 "header"]))
describe "code block" $ do
it "parses code block with '~'" $
laydown "~~~~ haskell\nmain = return ()\n~~~~" >>=
(`shouldBe` Right (Document [CodeBlock "haskell" "main = return ()\n"]))
it "parses code block with '`'" $
laydown "```` haskell\nmain = return ()\n````" >>=
(`shouldBe` Right (Document [CodeBlock "haskell" "main = return ()\n"]))
describe "html block" $
it "parses html block" $
laydown " !!! \n<em>lay down!</em>\n !!!" >>=
(`shouldBe` Right (Document [HtmlBlock "<em>lay down!</em>\n"]))
describe "paragraph" $
it "parses paragraph" $ do
let txt = toLazyByteString . stringUtf8 $
"Many were increasingly of the opinion that they'd all \n" ++
"made a big mistake in coming down from the trees in the \n" ++
"first place. And some said that even the trees had been \n" ++
"a bad move, and that no one should ever have left the \n" ++
"oceans.\n"
laydown txt >>= (`shouldBe` Right (Document [Paragraph txt]))
|
rkaippully/laydown
|
src/test/Codec/Laydown/Specs.hs
|
apache-2.0
| 2,745
| 0
| 18
| 694
| 586
| 294
| 292
| 49
| 1
|
module Main where
import ECC
import Codes.ICFP_Paper
import Haskell.ArraySig (fromListMatrix)
import Manifold.Repa (ecc_repa)
-- ecc_repa currently cannot handle the 4K/7K code
--
-- it takes about 3ms per decode for the 7/20 code
main :: IO ()
main = do
putStrLn "Start"
let h = fromListMatrix Codes.ICFP_Paper.h_7_20
let g = fromListMatrix Codes.ICFP_Paper.g_7_20
ecc <- ecc_repa 30 h g (Just 3) 0
mainWith (\i x -> i + toInteger x) (0 :: Integer) 1000 $ ecc{debug=noDebug}
|
ku-fpg/ldpc
|
mains/Haskell_Repa.hs
|
bsd-2-clause
| 488
| 0
| 12
| 87
| 155
| 82
| 73
| 12
| 1
|
module Drasil.Projectile.Assumptions (accelYGravity, accelXZero, cartSyst,
assumptions, constAccel, gravAccelValue, launchOrigin, pointMass,
posXDirection, targetXAxis, timeStartZero, twoDMotion, yAxisGravity) where
import Language.Drasil
import Utils.Drasil
import qualified Drasil.DocLang.SRS as SRS (valsOfAuxCons)
import Data.Drasil.Concepts.Documentation (assumpDom, value)
import Data.Drasil.Concepts.Math (cartesian, xAxis, xDir, yAxis, yDir)
import Data.Drasil.Concepts.PhysicalProperties (mass)
import Data.Drasil.Concepts.Physics (acceleration, collision, distance, gravity, time, twoD)
import Drasil.Projectile.Concepts (launcher, projectile, target)
assumptions :: [ConceptInstance]
assumptions = [twoDMotion, cartSyst, yAxisGravity, launchOrigin, targetXAxis,
posXDirection, constAccel, accelXZero, accelYGravity, neglectDrag, pointMass,
freeFlight, neglectCurv, timeStartZero, gravAccelValue]
twoDMotion, cartSyst, yAxisGravity, launchOrigin, targetXAxis,
posXDirection, constAccel, accelXZero, accelYGravity, neglectDrag,
pointMass, freeFlight, neglectCurv, timeStartZero,
gravAccelValue :: ConceptInstance
twoDMotion = cic "twoDMotion" twoDMotionDesc "twoDMotion" assumpDom
cartSyst = cic "cartSyst" cartSystDesc "cartSyst" assumpDom
yAxisGravity = cic "yAxisGravity" yAxisGravityDesc "yAxisGravity" assumpDom
launchOrigin = cic "launchOrigin" launchOriginDesc "launchOrigin" assumpDom
targetXAxis = cic "targetXAxis" targetXAxisDesc "targetXAxis" assumpDom
posXDirection = cic "posXDirection" posXDirectionDesc "posXDirection" assumpDom
constAccel = cic "constAccel" constAccelDesc "constAccel" assumpDom
accelXZero = cic "accelXZero" accelXZeroDesc "accelXZero" assumpDom
accelYGravity = cic "accelYGravity" accelYGravityDesc "accelYGravity" assumpDom
neglectDrag = cic "neglectDrag" neglectDragDesc "neglectDrag" assumpDom
pointMass = cic "pointMass" pointMassDesc "pointMass" assumpDom
freeFlight = cic "freeFlight" freeFlightDesc "freeFlight" assumpDom
neglectCurv = cic "neglectCurv" neglectCurvDesc "neglectCurv" assumpDom
timeStartZero = cic "timeStartZero" timeStartZeroDesc "timeStartZero" assumpDom
gravAccelValue = cic "gravAccelValue" gravAccelValueDesc "gravAccelValue" assumpDom
twoDMotionDesc :: Sentence
twoDMotionDesc = S "The" +:+ phrase projectile +:+ S "motion" `sIs` phrase twoD +:+. sParen (getAcc twoD)
cartSystDesc :: Sentence
cartSystDesc = S "A" +:+ (phrase cartesian `sIs` S "used") +:+. sParen (S "from" +:+ makeRef2S neglectCurv)
yAxisGravityDesc :: Sentence
yAxisGravityDesc = S "direction" `ofThe'` phrase yAxis `sIs` S "directed opposite to" +:+. phrase gravity
launchOriginDesc :: Sentence
launchOriginDesc = S "The" +:+. (phrase launcher `sIs` S "coincident with the origin")
targetXAxisDesc :: Sentence
targetXAxisDesc = S "The" +:+ phrase target +:+ S "lies on the" +:+ phrase xAxis +:+. sParen (S "from" +:+ makeRef2S neglectCurv)
posXDirectionDesc :: Sentence
posXDirectionDesc = S "The positive" +:+ phrase xDir `sIs` S "from the" +:+. (phrase launcher `toThe` phrase target)
constAccelDesc :: Sentence
constAccelDesc = S "The" +:+ (phrase acceleration `sIs` S "constant") +:+.
sParen (S "from" +:+ foldlList Comma List (map makeRef2S [accelXZero, accelYGravity, neglectDrag, freeFlight]))
accelXZeroDesc :: Sentence
accelXZeroDesc = S "The" +:+ phrase acceleration +:+. (S "in the" +:+ phrase xDir `sIs` S "zero")
accelYGravityDesc :: Sentence
accelYGravityDesc = S "The" +:+ phrase acceleration +:+ S "in the" +:+ phrase yDir `isThe` phrase acceleration +:+
S "due to" +:+ phrase gravity +:+. sParen (S "from" +:+ makeRef2S yAxisGravity)
neglectDragDesc :: Sentence
neglectDragDesc = S "Air drag" `sIs` S "neglected."
pointMassDesc :: Sentence
pointMassDesc = (S "size" `sAnd` S "shape") `ofThe'` phrase projectile `sAre`
S "negligible" `sC` S "so that it can be modelled as a point" +:+. phrase mass
freeFlightDesc :: Sentence
freeFlightDesc = S "The flight" `sIs` S "free; there" `sAre` S "no" +:+ plural collision +:+
S "during" +:+. (S "trajectory" `ofThe` phrase projectile)
neglectCurvDesc :: Sentence
neglectCurvDesc = S "The" +:+ phrase distance `sIs` S "small enough that" +:+.
(S "curvature" `ofThe` S "Earth can be neglected")
timeStartZeroDesc :: Sentence
timeStartZeroDesc = atStart time +:+. S "starts at zero"
gravAccelValueDesc :: Sentence
gravAccelValueDesc = S "The" +:+ phrase acceleration +:+ S "due to" +:+
phrase gravity +:+ S "is assumed to have the" +:+ phrase value +:+
S "provided in" +:+. makeRef2S (SRS.valsOfAuxCons ([]::[Contents])
([]::[Section]))
|
JacquesCarette/literate-scientific-software
|
code/drasil-example/Drasil/Projectile/Assumptions.hs
|
bsd-2-clause
| 4,905
| 0
| 12
| 904
| 1,247
| 680
| 567
| 72
| 1
|
module Exercises.ChapterTwelve
(
) where
data Tree a
= Leaf
| Node (Tree a)
a
(Tree a)
deriving (Show)
instance Functor Tree
-- fmap g Tree :: (a -> b) -> Tree a -> Tree b
where
fmap _ Leaf = Leaf
fmap g (Node l a r) = Node (fmap g l) (g a) (fmap g r)
-- instance Functor ((->) a)
-- -- fmap :: (b -> c) -> (a -> b) -> (a -> c)
-- where
-- fmap = (.)
-- instance Applicative ((->) a) where
-- pure = const -- K combinator
-- (<*>) f g x = f x (g x) -- S combinator
-- -- (<*>) = (a -> b -> c) -> (a -> b) -> (a -> c)
-- instance Monad ((->) a)
-- -- (>>=) :: (a -> b) -> (b -> (a -> c))) -> (a -> c)
-- where
-- (>>=) f g x = g (f x) x
data Expr a
= Var a
| Val Int
| Add (Expr a)
(Expr a)
deriving (Show)
instance Functor Expr
-- fmap g Expr :: (a -> b) -> Expr a -> Expr b
where
fmap g (Var a) = Var (g a)
fmap _ (Val i) = Val i
fmap g (Add l r) = Add (fmap g l) (fmap g r)
-- instance Applicative Expr
-- pure x = \env -> Var x
-- -- (<*>) = f (a -> b) -> f a -> f b
-- (<*>) g (Var a) = Var (g a)
-- (<*>) _ (Val i) = Val i
-- (<*>) g (Add l r) = Add ()
type State = Int
newtype ST a =
S (State -> (a, State))
app :: ST a -> State -> (a, State)
app (S st) x = st x
instance Functor ST
-- fmap :: (a -> b) -> ST a -> ST b
where
fmap g st = do
x <- st
return (g x)
instance Applicative ST
-- pure :: a -> ST a
where
pure x = S (\s -> (x, s))
-- (<*>) :: ST (a -> b) -> ST a -> ST b
stf <*> stx = do
f <- stf
x <- stx
return (f x)
instance Monad ST
-- (>>=) :: ST a -> (a -> ST b) -> ST b
where
st >>= f =
S
(\s ->
let (x, s') = app st s
in app (f x) s')
|
philipcraig/hutton
|
src/Exercises/ChapterTwelve.hs
|
bsd-3-clause
| 2,014
| 0
| 13
| 872
| 508
| 271
| 237
| 42
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Types.Torrent (
fromJson,
QueryResult(..),
Torrent(..)
) where
import Data.Aeson((.:), (.:?))
import qualified Data.Aeson as Json
import qualified Data.ByteString.Lazy.Char8 as C
import qualified Data.DateTime as Date
import qualified Data.Map.Strict as Map
import Control.Monad (mzero)
data QueryResult = QueryResult
{ query :: String
, total :: Int
, offset :: Int
, limit :: Int
, torrents :: [Torrent]
} deriving (Show)
data IntermediateQueryResult = IntermediateQueryResult
{ query_ :: String
, total_ :: String
, offset_ :: String
, limit_ :: String
, torrents_ :: [IntermediateTorrent]
}
instance Json.FromJSON IntermediateQueryResult where
parseJSON (Json.Object v) =
IntermediateQueryResult <$> v .: "query"
<*> v .: "total"
<*> v .: "offset"
<*> v .: "limit"
<*> v .: "torrents"
parseJSON _ = mzero
data Torrent = Torrent
{ id :: Int
, name :: String
, category :: Int
, seeders :: Int
, leechers :: Int
, comments :: Int
, isVerified :: Bool
, added :: Date.DateTime
, size :: Int
, times_completed :: Int
, owner :: Int
, categoryName :: String
, categoryImage :: String
, username :: String
, privacy :: String
} deriving (Show)
data IntermediateTorrent = IntermediateTorrent
{ id_ :: String
, name_ :: String
, category_ :: String
, seeders_ :: String
, leechers_ :: String
, comments_ :: String
, isVerified_ :: String
, added_ :: String
, size_ :: String
, times_completed_ :: String
, owner_ :: String
, categoryName_ :: String
, categoryImage_ :: String
, username_ :: String
, privacy_ :: String
}
instance Json.FromJSON IntermediateTorrent where
parseJSON (Json.Object v) =
IntermediateTorrent <$> v .: "id"
<*> v .: "name"
<*> v .: "category"
<*> v .: "seeders"
<*> v .: "leechers"
<*> v .: "comments"
<*> v .: "isVerified"
<*> v .: "added"
<*> v .: "size"
<*> v .: "times_completed"
<*> v .: "owner"
<*> v .: "categoryname"
<*> v .: "categoryimage"
<*> v .: "username"
<*> v .: "privacy"
parseJSON _ = mzero
fromJson :: String -> Either String QueryResult
fromJson string =
case Json.eitherDecode . C.pack $ string :: Either String IntermediateQueryResult of
Left error -> Left error
Right result -> Right $ queryFromIntermediate result
queryFromIntermediate :: IntermediateQueryResult -> QueryResult
queryFromIntermediate intermediate =
let makeTorrent i =
Torrent (read $ id_ i)
(name_ i)
(read $ category_ i)
(read $ seeders_ i)
(read $ leechers_ i)
(read $ comments_ i)
(if isVerified_ i == "0" then False else True)
(read $ added_ i)
(read $ size_ i)
(read $ times_completed_ i)
(read $ owner_ i)
(categoryName_ i)
(categoryImage_ i)
(username_ i)
(privacy_ i)
in QueryResult (query_ intermediate)
(read $ total_ intermediate)
(read $ offset_ intermediate)
(read $ limit_ intermediate)
(map makeTorrent $ torrents_ intermediate)
|
ibizaman/t411-hs
|
src/Types/Torrent.hs
|
bsd-3-clause
| 3,961
| 0
| 35
| 1,613
| 948
| 534
| 414
| 111
| 2
|
{-# OPTIONS_GHC -Wall #-}
module SourceSyntax.Pattern where
import qualified SourceSyntax.Helpers as Help
import SourceSyntax.PrettyPrint
import Text.PrettyPrint as PP
import qualified Data.Set as Set
import SourceSyntax.Literal as Literal
data Pattern = PData String [Pattern]
| PRecord [String]
| PAlias String Pattern
| PVar String
| PAnything
| PLiteral Literal.Literal
deriving (Eq, Ord, Show)
cons :: Pattern -> Pattern -> Pattern
cons h t = PData "::" [h,t]
nil :: Pattern
nil = PData "[]" []
list :: [Pattern] -> Pattern
list = foldr cons nil
tuple :: [Pattern] -> Pattern
tuple es = PData ("_Tuple" ++ show (length es)) es
boundVars :: Pattern -> Set.Set String
boundVars pattern =
case pattern of
PVar x -> Set.singleton x
PAlias x p -> Set.insert x (boundVars p)
PData _ ps -> Set.unions (map boundVars ps)
PRecord fields -> Set.fromList fields
PAnything -> Set.empty
PLiteral _ -> Set.empty
instance Pretty Pattern where
pretty pattern =
case pattern of
PVar x -> variable x
PLiteral lit -> pretty lit
PRecord fs -> PP.braces (commaCat $ map variable fs)
PAlias x p -> prettyParens p <+> PP.text "as" <+> variable x
PAnything -> PP.text "_"
PData "::" [hd,tl] -> parensIf isCons (pretty hd) <+> PP.text "::" <+> pretty tl
where isCons = case hd of
PData "::" _ -> True
_ -> False
PData name ps ->
if Help.isTuple name then
PP.parens . commaCat $ map pretty ps
else hsep (PP.text name : map prettyParens ps)
prettyParens :: Pattern -> Doc
prettyParens pattern = parensIf needsThem (pretty pattern)
where
needsThem =
case pattern of
PData name (_:_) | not (Help.isTuple name) -> True
PAlias _ _ -> True
_ -> False
|
JoeyEremondi/haskelm-old
|
src/SourceSyntax/Pattern.hs
|
bsd-3-clause
| 1,917
| 0
| 16
| 568
| 669
| 337
| 332
| 54
| 6
|
{-# LANGUAGE OverloadedStrings #-}
module Spec.IO (tests) where
import Data.Default (def)
import Hakyll.Convert.Common (DistilledPost (..))
import Hakyll.Convert.IO (savePost)
import Spec.SpecHelpers
import System.Directory (doesFileExist)
import System.FilePath ((</>))
import System.IO.Temp (withSystemTempDirectory)
import Test.Tasty (TestTree, testGroup)
import Test.Tasty.HUnit
tests :: TestTree
tests =
testGroup
"IO.savePost"
[ namesTheFileAccordingToFormat
]
namesTheFileAccordingToFormat :: TestTree
namesTheFileAccordingToFormat =
testCase
"Names the output file according to the given filename format"
( withSystemTempDirectory "hakyll-convert" $ \tempDir -> do
let output_format = "%o-%Y^%y%%_%s%dd & %m—%S%H%M"
let file_extension = "xyz"
let post =
def
{ -- The slug, %s, is going to be "yet-another"
dpUri = "https://example.com/2020/yet-another.post.html",
dpDate = fromGregorian 2020 11 6 11 33 46
}
let expectedFilename = tempDir </> "2020/yet-another-2020^20%_yet-another06d & 11—461133.xyz"
filename <- savePost tempDir output_format file_extension post
expectedFilename @=? filename
exists <- doesFileExist expectedFilename
assertBool "The file with expected name doesn't exist" exists
)
|
kowey/hakyll-convert
|
test/spec/Spec/IO.hs
|
bsd-3-clause
| 1,393
| 0
| 16
| 318
| 262
| 144
| 118
| 32
| 1
|
{-# LANGUAGE Arrows, NoMonomorphismRestriction #-}
module Web.HRSS.Data.Atom where
import Text.XML.HXT.Core
import Data.Tree.NTree.TypeDefs(NTree)
data Atom = Atom
{ atomTitle :: String
, atomEntries :: [Entry]
, atomLinks :: [Link]
}
deriving (Show, Read, Eq)
data Link = Link
{ linkRel :: String
, linkType :: String
, linkHref :: String
}
deriving (Show, Read, Eq)
data Entry = Entry
{ entryTitle :: String
, entryLinks :: [Link]
, entryUpdated :: String
, entryPublished :: String
, entryId :: String
}
deriving (Show, Read, Eq)
getAtom :: ArrowXml cat => cat XmlTree Atom
getAtom = atElem "feed" >>> parseAtom
where
parseAtom :: ArrowXml cat => cat XmlTree Atom
parseAtom = proc x -> do
t <- ( (atElem "title" >>> parse )) -< x
e <- (listA (atElem "entry" >>> parseEntry )) -< x
l <- (listA (atElem "link" >>> parseLink )) -< x
returnA -< Atom t e l
parseLink :: ArrowXml cat => cat XmlTree Link
parseLink = proc x -> do
r <- atAttr "rel" -< x
t <- atAttr "type" -< x
h <- atAttr "href" -< x
returnA -< Link r t h
parseEntry :: ArrowXml cat => cat XmlTree Entry
parseEntry = proc x -> do
i <- ( (atElem "id" >>> parse )) -< x
t <- ( (atElem "title" >>> parse )) -< x
l <- (listA (atElem "link" >>> parseLink )) -< x
u <- ( (atElem "updated" >>> parse )) -< x
p <- ( (atElem "published" >>> parse )) -< x
returnA -< Entry t l u p i
parse :: ArrowXml cat => cat XmlTree String
parse = getChildren >>> getText
atAttr :: ArrowXml cat => String -> cat XmlTree String
atAttr name = hasAttr name >>> getAttrValue name
atElem :: ArrowXml cat => String -> cat (NTree XNode) XmlTree
atElem name = getChildren >>> isElem >>> hasName name
|
kdridi/hrss
|
src/lib/Web/HRSS/Data/Atom.hs
|
bsd-3-clause
| 1,883
| 3
| 16
| 560
| 693
| 356
| 337
| 49
| 1
|
{-# LANGUAGE ScopedTypeVariables #-}
module Cataskell.GameData.PlayerSpec (main, spec) where
import Test.Hspec
import Test.QuickCheck
import Data.Monoid
import Cataskell.GameData.Basics
import Cataskell.GameData.Player
import Cataskell.GameData.Resources
import Control.Arrow ((&&&))
import Data.Maybe (isNothing)
import Data.Map (Map)
import qualified Data.Map.Strict as Map
import qualified Data.Set as Set
import Control.Lens hiding (elements)
import Cataskell.GameData.BasicsSpec() -- get Arbitrary ResourceCount
import Cataskell.GameData.ResourcesSpec() -- get Arbitrary ResourceCount
instance Arbitrary Player where
arbitrary = do
i <- elements [0..3]
name <- elements ["1", "2", "3", "4"]
color' <- elements [Red, Blue, Orange, White]
let p = mkPlayer (i, color', name)
r <- arbitrary
return $ resources .~ r $ p
shrink p = tail $ Player <$> [_playerName p, ""]
<*> [_playerColor p]
<*> [_playerIndex p]
<*> [_resources p]
<*> [filter (isNothing . preview itemType) $ _constructed p]
<*> [_newCards p]
<*> [_knights p]
<*> [_bonuses p]
instance Arbitrary PlayerIndex where
arbitrary = toPlayerIndex `fmap` elements [0..3]
newtype PlayerMap = PlayerMap (Map PlayerIndex Player)
deriving (Eq, Show, Ord)
instance Arbitrary PlayerMap where
arbitrary = do
(xs :: [Player]) <- arbitrary
let xs' = map (_playerIndex &&& id) xs
pure . PlayerMap $ Map.fromList xs'
main :: IO ()
main = hspec spec
spec :: Spec
spec = parallel $ do
describe "A Player" $ do
let p = mkPlayer (0, Blue, "Nobody")
it "has a name" $ do
view playerName p `shouldBe` "Nobody"
it "has a player index" $ do
view playerIndex p `shouldBe` toPlayerIndex 0
it "should begin with 0 resources" $ do
(totalResources $ view resources p) `shouldBe` 0
it "can add resources" $ property $
\p -> let resCountNow = totalResources $ view resources (p :: Player)
oneOre = mempty { ore = 1 }
resAfter = (view resources p) <> oneOre
resCountAfter = totalResources resAfter
in (resCountNow + 1) == resCountAfter
let c' = [ Card VictoryPoint
, settlement $ Just (undefined, White)
, settlement $ Just (undefined, White)]
let p2 = constructed .~ c' $ (mkPlayer (2, White, "No-One"))
it "should have a score" $ do
view score p2 `shouldBe` 3
it "should have a display score" $ do
view displayScore p2 `shouldBe` 2
it "can have development cards" $ do
view devCards p2 `shouldBe` [VictoryPoint]
it "must have only non-negative resources" $ property $ do
\player -> nonNegative $ view resources (player :: Player)
describe "A Map PlayerIndex Player" $
it "should have keys matching the PlayerIndex stored in the player object" $ property $
\(PlayerMap pmap)-> all (\(pI, p) -> pI == (p^.playerIndex)) $ Map.toList pmap
|
corajr/cataskell
|
test/Cataskell/GameData/PlayerSpec.hs
|
bsd-3-clause
| 3,108
| 0
| 19
| 851
| 980
| 515
| 465
| 74
| 1
|
{-|
Module : Idris.CmdOptions
Description : A parser for the CmdOptions for the Idris executable.
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE Arrows #-}
module Idris.CmdOptions
(
module Idris.CmdOptions
, opt
, getClient, getPkg, getPkgCheck, getPkgClean, getPkgMkDoc
, getPkgREPL, getPkgTest, getPort, getIBCSubDir
) where
import Idris.AbsSyntaxTree
import Idris.AbsSyntax (opt, getClient, getPkg, getPkgCheck, getPkgClean, getPkgMkDoc
, getPkgREPL, getPkgTest, getPort, getIBCSubDir)
-- import Idris.REPL
import Idris.Info (getIdrisVersion)
import IRTS.CodegenCommon
import Options.Applicative
import Options.Applicative.Arrows
import Options.Applicative.Types (ReadM(..))
import Control.Monad.Trans (lift)
import Control.Monad.Trans.Reader (ask)
import Control.Monad.Trans.Except (throwE)
import Data.Char
import Data.Maybe
import Text.ParserCombinators.ReadP hiding (many, option)
import Safe (lastMay)
import qualified Text.PrettyPrint.ANSI.Leijen as PP
runArgParser :: IO [Opt]
runArgParser = do opts <- execParser $ info parser
(fullDesc
<> headerDoc (Just idrisHeader)
<> progDescDoc (Just idrisProgDesc)
<> footerDoc (Just idrisFooter)
)
return $ preProcOpts opts
where
idrisHeader = PP.hsep [PP.text "Idris version", PP.text getIdrisVersion, PP.text ", (C) The Idris Community 2016"]
idrisProgDesc = PP.vsep [PP.empty,
PP.text "Idris is a general purpose pure functional programming language with dependent",
PP.text "types. Dependent types allow types to be predicated on values, meaning that",
PP.text "some aspects of a program's behaviour can be specified precisely in the type.",
PP.text "It is compiled, with eager evaluation. Its features are influenced by Haskell",
PP.text "and ML.",
PP.empty,
PP.vsep $ map (PP.indent 4 . PP.text) [
"+ Full dependent types with dependent pattern matching",
"+ Simple case expressions, where-clauses, with-rule",
"+ Pattern matching let- and lambda-bindings",
"+ Overloading via Interfaces (Type class-like), Monad comprehensions",
"+ do-notation, idiom brackets",
"+ Syntactic conveniences for lists, tuples, dependent pairs",
"+ Totality checking",
"+ Coinductive types",
"+ Indentation significant syntax, Extensible syntax",
"+ Tactic based theorem proving (influenced by Coq)",
"+ Cumulative universes",
"+ Simple Foreign Function Interface",
"+ Hugs style interactive environment"
]]
idrisFooter = PP.vsep [PP.text "It is important to note that Idris is first and foremost a research tool",
PP.text "and project. Thus the tooling provided and resulting programs created",
PP.text "should not necessarily be seen as production ready nor for industrial use.",
PP.empty,
PP.text "More details over Idris can be found online here:",
PP.empty,
PP.indent 4 (PP.text "http://www.idris-lang.org/")]
pureArgParser :: [String] -> [Opt]
pureArgParser args = case getParseResult $ execParserPure (prefs idm) (info parser idm) args of
Just opts -> preProcOpts opts
Nothing -> []
parser :: Parser [Opt]
parser = runA $ proc () -> do
flags <- asA parseFlags -< ()
files <- asA (many $ argument (fmap Filename str) (metavar "FILES")) -< ()
A parseVersion >>> A helper -< (flags ++ files)
parseFlags :: Parser [Opt]
parseFlags = many $
flag' NoBanner (long "nobanner" <> help "Suppress the banner")
<|> flag' Quiet (short 'q' <> long "quiet" <> help "Quiet verbosity")
-- IDE Mode Specific Flags
<|> flag' Idemode (long "ide-mode" <> help "Run the Idris REPL with machine-readable syntax")
<|> flag' IdemodeSocket (long "ide-mode-socket" <> help "Choose a socket for IDE mode to listen on")
<|> (Client <$> strOption (long "client"))
-- Logging Flags
<|> (OLogging <$> option auto (long "log" <> metavar "LEVEL" <> help "Debugging log level"))
<|> (OLogCats <$> option (str >>= parseLogCats)
(long "logging-categories"
<> metavar "CATS"
<> help "Colon separated logging categories. Use --listlogcats to see list."))
-- Turn off things
<|> flag' NoBasePkgs (long "nobasepkgs" <> help "Do not use the given base package")
<|> flag' NoPrelude (long "noprelude" <> help "Do not use the given prelude")
<|> flag' NoBuiltins (long "nobuiltins" <> help "Do not use the builtin functions")
<|> flag' NoREPL (long "check" <> help "Typecheck only, don't start the REPL")
<|> (Output <$> strOption (short 'o' <> long "output" <> metavar "FILE" <> help "Specify output file"))
-- <|> flag' TypeCase (long "typecase")
<|> flag' Interface (long "interface" <> help "Generate interface files from ExportLists")
<|> flag' TypeInType (long "typeintype" <> help "Turn off Universe checking")
<|> flag' DefaultTotal (long "total" <> help "Require functions to be total by default")
<|> flag' DefaultPartial (long "partial")
<|> flag' WarnPartial (long "warnpartial" <> help "Warn about undeclared partial functions")
<|> flag' WarnReach (long "warnreach" <> help "Warn about reachable but inaccessible arguments")
<|> flag' NoCoverage (long "nocoverage")
<|> flag' ErrContext (long "errorcontext")
-- Show things
<|> flag' ShowAll (long "info" <> help "Display information about installation.")
<|> flag' ShowLoggingCats (long "listlogcats" <> help "Display logging categories")
<|> flag' ShowLibs (long "link" <> help "Display link flags")
<|> flag' ShowPkgs (long "listlibs" <> help "Display installed libraries")
<|> flag' ShowLibdir (long "libdir" <> help "Display library directory")
<|> flag' ShowIncs (long "include" <> help "Display the includes flags")
<|> flag' Verbose (short 'V' <> long "verbose" <> help "Loud verbosity")
<|> (IBCSubDir <$> strOption (long "ibcsubdir" <> metavar "FILE" <> help "Write IBC files into sub directory"))
<|> (ImportDir <$> strOption (short 'i' <> long "idrispath" <> help "Add directory to the list of import paths"))
<|> (SourceDir <$> strOption (long "sourcepath" <> help "Add directory to the list of source search paths"))
<|> flag' WarnOnly (long "warn")
<|> (Pkg <$> strOption (short 'p' <> long "package" <> help "Add package as a dependency"))
<|> (Port <$> option portReader (long "port" <> metavar "PORT" <> help "REPL TCP port - pass \"none\" to not bind any port"))
-- Package commands
<|> (PkgBuild <$> strOption (long "build" <> metavar "IPKG" <> help "Build package"))
<|> (PkgInstall <$> strOption (long "install" <> metavar "IPKG" <> help "Install package"))
<|> (PkgREPL <$> strOption (long "repl" <> metavar "IPKG" <> help "Launch REPL, only for executables"))
<|> (PkgClean <$> strOption (long "clean" <> metavar "IPKG" <> help "Clean package"))
<|> (PkgMkDoc <$> strOption (long "mkdoc" <> metavar "IPKG" <> help "Generate IdrisDoc for package"))
<|> (PkgCheck <$> strOption (long "checkpkg" <> metavar "IPKG" <> help "Check package only"))
<|> (PkgTest <$> strOption (long "testpkg" <> metavar "IPKG" <> help "Run tests for package"))
-- Misc options
<|> (BCAsm <$> strOption (long "bytecode"))
<|> flag' (OutputTy Raw) (short 'S' <> long "codegenonly" <> help "Do no further compilation of code generator output")
<|> flag' (OutputTy Object) (short 'c' <> long "compileonly" <> help "Compile to object files rather than an executable")
<|> (DumpDefun <$> strOption (long "dumpdefuns"))
<|> (DumpCases <$> strOption (long "dumpcases"))
<|> (UseCodegen . parseCodegen) <$> strOption (long "codegen"
<> metavar "TARGET"
<> help "Select code generator: C, Javascript, Node and bytecode are bundled with Idris")
<|> ((UseCodegen . Via JSONFormat) <$> strOption (long "portable-codegen"
<> metavar "TARGET"
<> help "Pass the name of the code generator. This option is for codegens that take JSON formatted IR."))
<|> (CodegenArgs <$> strOption (long "cg-opt"
<> metavar "ARG"
<> help "Arguments to pass to code generator"))
<|> (EvalExpr <$> strOption (long "eval" <> short 'e' <> metavar "EXPR" <> help "Evaluate an expression without loading the REPL"))
<|> flag' (InterpretScript "Main.main") (long "execute" <> help "Execute as idris")
<|> (InterpretScript <$> strOption (long "exec" <> metavar "EXPR" <> help "Execute as idris"))
<|> ((Extension . getExt) <$> strOption (long "extension"
<> short 'X'
<> metavar "EXT"
<> help "Turn on language extension (TypeProviders or ErrorReflection)"))
-- Optimisation Levels
<|> flag' (OptLevel 3) (long "O3")
<|> flag' (OptLevel 2) (long "O2")
<|> flag' (OptLevel 1) (long "O1")
<|> flag' (OptLevel 0) (long "O0")
<|> flag' (AddOpt PETransform) (long "partial-eval")
<|> flag' (RemoveOpt PETransform) (long "no-partial-eval" <> help "Switch off partial evaluation, mainly for debugging purposes")
<|> (OptLevel <$> option auto (short 'O' <> long "level"))
<|> (TargetTriple <$> strOption (long "target" <> metavar "TRIPLE" <> help "If supported the codegen will target the named triple."))
<|> (TargetCPU <$> strOption (long "cpu" <> metavar "CPU" <> help "If supported the codegen will target the named CPU e.g. corei7 or cortex-m3"))
-- Colour Options
<|> flag' (ColourREPL True) (long "colour" <> long "color" <> help "Force coloured output")
<|> flag' (ColourREPL False) (long "nocolour" <> long "nocolor" <> help "Disable coloured output")
<|> (UseConsoleWidth <$> option (str >>= parseConsoleWidth) (long "consolewidth" <> metavar "WIDTH" <> help "Select console width: auto, infinite, nat"))
<|> flag' DumpHighlights (long "highlight" <> help "Emit source code highlighting")
<|> flag' NoElimDeprecationWarnings (long "no-elim-deprecation-warnings" <> help "Disable deprecation warnings for %elim")
<|> flag' NoOldTacticDeprecationWarnings (long "no-tactic-deprecation-warnings" <> help "Disable deprecation warnings for the old tactic sublanguage")
where
getExt :: String -> LanguageExt
getExt s = fromMaybe (error ("Unknown extension " ++ s)) (maybeRead s)
maybeRead :: String -> Maybe LanguageExt
maybeRead = fmap fst . listToMaybe . reads
portReader :: ReadM REPLPort
portReader =
((ListenPort . fromIntegral) <$> auto) <|>
(ReadM $ do opt <- ask
if map toLower opt == "none"
then return $ DontListen
else lift $ throwE $ ErrorMsg $
"got " <> opt <> " expected port number or \"none\"")
parseVersion :: Parser (a -> a)
parseVersion = infoOption getIdrisVersion (short 'v' <> long "version" <> help "Print version information")
preProcOpts :: [Opt] -> [Opt]
preProcOpts (NoBuiltins : xs) = NoBuiltins : NoPrelude : preProcOpts xs
preProcOpts (Output s : xs) = Output s : NoREPL : preProcOpts xs
preProcOpts (BCAsm s : xs) = BCAsm s : NoREPL : preProcOpts xs
preProcOpts (x:xs) = x : preProcOpts xs
preProcOpts [] = []
parseCodegen :: String -> Codegen
parseCodegen "bytecode" = Bytecode
parseCodegen cg = Via IBCFormat (map toLower cg)
parseLogCats :: Monad m => String -> m [LogCat]
parseLogCats s =
case lastMay (readP_to_S doParse s) of
Just (xs, _) -> return xs
_ -> fail "Incorrect categories specified"
where
doParse :: ReadP [LogCat]
doParse = do
cs <- sepBy1 parseLogCat (char ':')
eof
return (concat cs)
parseLogCat :: ReadP [LogCat]
parseLogCat = (string (strLogCat IParse) *> return parserCats)
<|> (string (strLogCat IElab) *> return elabCats)
<|> (string (strLogCat ICodeGen) *> return codegenCats)
<|> (string (strLogCat ICoverage) *> return [ICoverage])
<|> (string (strLogCat IIBC) *> return [IIBC])
<|> (string (strLogCat IErasure) *> return [IErasure])
<|> parseLogCatBad
parseLogCatBad :: ReadP [LogCat]
parseLogCatBad = do
s <- look
fail $ "Category: " ++ s ++ " is not recognised."
parseConsoleWidth :: Monad m => String -> m ConsoleWidth
parseConsoleWidth "auto" = return AutomaticWidth
parseConsoleWidth "infinite" = return InfinitelyWide
parseConsoleWidth s =
case lastMay (readP_to_S integerReader s) of
Just (r, _) -> return $ ColsWide r
_ -> fail $ "Cannot parse: " ++ s
integerReader :: ReadP Int
integerReader = do
digits <- many1 $ satisfy isDigit
return $ read digits
|
enolan/Idris-dev
|
src/Idris/CmdOptions.hs
|
bsd-3-clause
| 14,346
| 1
| 76
| 4,390
| 3,439
| 1,696
| 1,743
| 205
| 2
|
{-# LANGUAGE CPP, ForeignFunctionInterface #-}
{-# LANGUAGE DeriveGeneric #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.InstallDirs
-- Copyright : Isaac Jones 2003-2004
-- License : BSD3
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- This manages everything to do with where files get installed (though does
-- not get involved with actually doing any installation). It provides an
-- 'InstallDirs' type which is a set of directories for where to install
-- things. It also handles the fact that we use templates in these install
-- dirs. For example most install dirs are relative to some @$prefix@ and by
-- changing the prefix all other dirs still end up changed appropriately. So it
-- provides a 'PathTemplate' type and functions for substituting for these
-- templates.
module Distribution.Simple.InstallDirs (
InstallDirs(..),
InstallDirTemplates,
defaultInstallDirs,
combineInstallDirs,
absoluteInstallDirs,
CopyDest(..),
prefixRelativeInstallDirs,
substituteInstallDirTemplates,
PathTemplate,
PathTemplateVariable(..),
PathTemplateEnv,
toPathTemplate,
fromPathTemplate,
substPathTemplate,
initialPathTemplateEnv,
platformTemplateEnv,
compilerTemplateEnv,
packageTemplateEnv,
abiTemplateEnv,
installDirsTemplateEnv,
) where
import Distribution.Compat.Binary (Binary)
import Data.List (isPrefixOf)
import Data.Maybe (fromMaybe)
import Data.Monoid (Monoid(..))
import GHC.Generics (Generic)
import System.Directory (getAppUserDataDirectory)
import System.FilePath ((</>), isPathSeparator, pathSeparator)
import System.FilePath (dropDrive)
import Distribution.Package
( PackageIdentifier, PackageKey, packageName, packageVersion )
import Distribution.System
( OS(..), buildOS, Platform(..) )
import Distribution.Compiler
( AbiTag(..), abiTagString, CompilerInfo(..), CompilerFlavor(..) )
import Distribution.Text
( display )
#if mingw32_HOST_OS
import Foreign
import Foreign.C
#endif
-- ---------------------------------------------------------------------------
-- Installation directories
-- | The directories where we will install files for packages.
--
-- We have several different directories for different types of files since
-- many systems have conventions whereby different types of files in a package
-- are installed in different directories. This is particularly the case on
-- Unix style systems.
--
data InstallDirs dir = InstallDirs {
prefix :: dir,
bindir :: dir,
libdir :: dir,
libsubdir :: dir,
dynlibdir :: dir,
libexecdir :: dir,
includedir :: dir,
datadir :: dir,
datasubdir :: dir,
docdir :: dir,
mandir :: dir,
htmldir :: dir,
haddockdir :: dir,
sysconfdir :: dir
} deriving (Generic, Read, Show)
instance Binary dir => Binary (InstallDirs dir)
instance Functor InstallDirs where
fmap f dirs = InstallDirs {
prefix = f (prefix dirs),
bindir = f (bindir dirs),
libdir = f (libdir dirs),
libsubdir = f (libsubdir dirs),
dynlibdir = f (dynlibdir dirs),
libexecdir = f (libexecdir dirs),
includedir = f (includedir dirs),
datadir = f (datadir dirs),
datasubdir = f (datasubdir dirs),
docdir = f (docdir dirs),
mandir = f (mandir dirs),
htmldir = f (htmldir dirs),
haddockdir = f (haddockdir dirs),
sysconfdir = f (sysconfdir dirs)
}
instance Monoid dir => Monoid (InstallDirs dir) where
mempty = InstallDirs {
prefix = mempty,
bindir = mempty,
libdir = mempty,
libsubdir = mempty,
dynlibdir = mempty,
libexecdir = mempty,
includedir = mempty,
datadir = mempty,
datasubdir = mempty,
docdir = mempty,
mandir = mempty,
htmldir = mempty,
haddockdir = mempty,
sysconfdir = mempty
}
mappend = combineInstallDirs mappend
combineInstallDirs :: (a -> b -> c)
-> InstallDirs a
-> InstallDirs b
-> InstallDirs c
combineInstallDirs combine a b = InstallDirs {
prefix = prefix a `combine` prefix b,
bindir = bindir a `combine` bindir b,
libdir = libdir a `combine` libdir b,
libsubdir = libsubdir a `combine` libsubdir b,
dynlibdir = dynlibdir a `combine` dynlibdir b,
libexecdir = libexecdir a `combine` libexecdir b,
includedir = includedir a `combine` includedir b,
datadir = datadir a `combine` datadir b,
datasubdir = datasubdir a `combine` datasubdir b,
docdir = docdir a `combine` docdir b,
mandir = mandir a `combine` mandir b,
htmldir = htmldir a `combine` htmldir b,
haddockdir = haddockdir a `combine` haddockdir b,
sysconfdir = sysconfdir a `combine` sysconfdir b
}
appendSubdirs :: (a -> a -> a) -> InstallDirs a -> InstallDirs a
appendSubdirs append dirs = dirs {
libdir = libdir dirs `append` libsubdir dirs,
datadir = datadir dirs `append` datasubdir dirs,
libsubdir = error "internal error InstallDirs.libsubdir",
datasubdir = error "internal error InstallDirs.datasubdir"
}
-- | The installation directories in terms of 'PathTemplate's that contain
-- variables.
--
-- The defaults for most of the directories are relative to each other, in
-- particular they are all relative to a single prefix. This makes it
-- convenient for the user to override the default installation directory
-- by only having to specify --prefix=... rather than overriding each
-- individually. This is done by allowing $-style variables in the dirs.
-- These are expanded by textual substitution (see 'substPathTemplate').
--
-- A few of these installation directories are split into two components, the
-- dir and subdir. The full installation path is formed by combining the two
-- together with @\/@. The reason for this is compatibility with other Unix
-- build systems which also support @--libdir@ and @--datadir@. We would like
-- users to be able to configure @--libdir=\/usr\/lib64@ for example but
-- because by default we want to support installing multiple versions of
-- packages and building the same package for multiple compilers we append the
-- libsubdir to get: @\/usr\/lib64\/$pkgkey\/$compiler@.
--
-- An additional complication is the need to support relocatable packages on
-- systems which support such things, like Windows.
--
type InstallDirTemplates = InstallDirs PathTemplate
-- ---------------------------------------------------------------------------
-- Default installation directories
defaultInstallDirs :: CompilerFlavor -> Bool -> Bool -> IO InstallDirTemplates
defaultInstallDirs comp userInstall _hasLibs = do
installPrefix <-
if userInstall
then getAppUserDataDirectory "cabal"
else case buildOS of
Windows -> do windowsProgramFilesDir <- getWindowsProgramFilesDir
return (windowsProgramFilesDir </> "Haskell")
_ -> return "/usr/local"
installLibDir <-
case buildOS of
Windows -> return "$prefix"
_ -> case comp of
LHC | userInstall -> getAppUserDataDirectory "lhc"
_ -> return ("$prefix" </> "lib")
return $ fmap toPathTemplate $ InstallDirs {
prefix = installPrefix,
bindir = "$prefix" </> "bin",
libdir = installLibDir,
libsubdir = case comp of
JHC -> "$compiler"
LHC -> "$compiler"
UHC -> "$pkgid"
_other -> "$abi" </> "$pkgkey",
dynlibdir = "$libdir",
libexecdir = case buildOS of
Windows -> "$prefix" </> "$pkgkey"
_other -> "$prefix" </> "libexec",
includedir = "$libdir" </> "$libsubdir" </> "include",
datadir = case buildOS of
Windows -> "$prefix"
_other -> "$prefix" </> "share",
datasubdir = "$abi" </> "$pkgid",
docdir = "$datadir" </> "doc" </> "$abi" </> "$pkgid",
mandir = "$datadir" </> "man",
htmldir = "$docdir" </> "html",
haddockdir = "$htmldir",
sysconfdir = "$prefix" </> "etc"
}
-- ---------------------------------------------------------------------------
-- Converting directories, absolute or prefix-relative
-- | Substitute the install dir templates into each other.
--
-- To prevent cyclic substitutions, only some variables are allowed in
-- particular dir templates. If out of scope vars are present, they are not
-- substituted for. Checking for any remaining unsubstituted vars can be done
-- as a subsequent operation.
--
-- The reason it is done this way is so that in 'prefixRelativeInstallDirs' we
-- can replace 'prefix' with the 'PrefixVar' and get resulting
-- 'PathTemplate's that still have the 'PrefixVar' in them. Doing this makes it
-- each to check which paths are relative to the $prefix.
--
substituteInstallDirTemplates :: PathTemplateEnv
-> InstallDirTemplates -> InstallDirTemplates
substituteInstallDirTemplates env dirs = dirs'
where
dirs' = InstallDirs {
-- So this specifies exactly which vars are allowed in each template
prefix = subst prefix [],
bindir = subst bindir [prefixVar],
libdir = subst libdir [prefixVar, bindirVar],
libsubdir = subst libsubdir [],
dynlibdir = subst dynlibdir [prefixVar, bindirVar, libdirVar],
libexecdir = subst libexecdir prefixBinLibVars,
includedir = subst includedir prefixBinLibVars,
datadir = subst datadir prefixBinLibVars,
datasubdir = subst datasubdir [],
docdir = subst docdir prefixBinLibDataVars,
mandir = subst mandir (prefixBinLibDataVars ++ [docdirVar]),
htmldir = subst htmldir (prefixBinLibDataVars ++ [docdirVar]),
haddockdir = subst haddockdir (prefixBinLibDataVars ++
[docdirVar, htmldirVar]),
sysconfdir = subst sysconfdir prefixBinLibVars
}
subst dir env' = substPathTemplate (env'++env) (dir dirs)
prefixVar = (PrefixVar, prefix dirs')
bindirVar = (BindirVar, bindir dirs')
libdirVar = (LibdirVar, libdir dirs')
libsubdirVar = (LibsubdirVar, libsubdir dirs')
datadirVar = (DatadirVar, datadir dirs')
datasubdirVar = (DatasubdirVar, datasubdir dirs')
docdirVar = (DocdirVar, docdir dirs')
htmldirVar = (HtmldirVar, htmldir dirs')
prefixBinLibVars = [prefixVar, bindirVar, libdirVar, libsubdirVar]
prefixBinLibDataVars = prefixBinLibVars ++ [datadirVar, datasubdirVar]
-- | Convert from abstract install directories to actual absolute ones by
-- substituting for all the variables in the abstract paths, to get real
-- absolute path.
absoluteInstallDirs :: PackageIdentifier
-> PackageKey
-> CompilerInfo
-> CopyDest
-> Platform
-> InstallDirs PathTemplate
-> InstallDirs FilePath
absoluteInstallDirs pkgId pkg_key compilerId copydest platform dirs =
(case copydest of
CopyTo destdir -> fmap ((destdir </>) . dropDrive)
_ -> id)
. appendSubdirs (</>)
. fmap fromPathTemplate
$ substituteInstallDirTemplates env dirs
where
env = initialPathTemplateEnv pkgId pkg_key compilerId platform
-- |The location prefix for the /copy/ command.
data CopyDest
= NoCopyDest
| CopyTo FilePath
deriving (Eq, Show)
-- | Check which of the paths are relative to the installation $prefix.
--
-- If any of the paths are not relative, ie they are absolute paths, then it
-- prevents us from making a relocatable package (also known as a \"prefix
-- independent\" package).
--
prefixRelativeInstallDirs :: PackageIdentifier
-> PackageKey
-> CompilerInfo
-> Platform
-> InstallDirTemplates
-> InstallDirs (Maybe FilePath)
prefixRelativeInstallDirs pkgId pkg_key compilerId platform dirs =
fmap relative
. appendSubdirs combinePathTemplate
$ -- substitute the path template into each other, except that we map
-- \$prefix back to $prefix. We're trying to end up with templates that
-- mention no vars except $prefix.
substituteInstallDirTemplates env dirs {
prefix = PathTemplate [Variable PrefixVar]
}
where
env = initialPathTemplateEnv pkgId pkg_key compilerId platform
-- If it starts with $prefix then it's relative and produce the relative
-- path by stripping off $prefix/ or $prefix
relative dir = case dir of
PathTemplate cs -> fmap (fromPathTemplate . PathTemplate) (relative' cs)
relative' (Variable PrefixVar : Ordinary (s:rest) : rest')
| isPathSeparator s = Just (Ordinary rest : rest')
relative' (Variable PrefixVar : rest) = Just rest
relative' _ = Nothing
-- ---------------------------------------------------------------------------
-- Path templates
-- | An abstract path, possibly containing variables that need to be
-- substituted for to get a real 'FilePath'.
--
newtype PathTemplate = PathTemplate [PathComponent] deriving (Eq, Generic, Ord)
instance Binary PathTemplate
data PathComponent =
Ordinary FilePath
| Variable PathTemplateVariable
deriving (Eq, Ord, Generic)
instance Binary PathComponent
data PathTemplateVariable =
PrefixVar -- ^ The @$prefix@ path variable
| BindirVar -- ^ The @$bindir@ path variable
| LibdirVar -- ^ The @$libdir@ path variable
| LibsubdirVar -- ^ The @$libsubdir@ path variable
| DatadirVar -- ^ The @$datadir@ path variable
| DatasubdirVar -- ^ The @$datasubdir@ path variable
| DocdirVar -- ^ The @$docdir@ path variable
| HtmldirVar -- ^ The @$htmldir@ path variable
| PkgNameVar -- ^ The @$pkg@ package name path variable
| PkgVerVar -- ^ The @$version@ package version path variable
| PkgIdVar -- ^ The @$pkgid@ package Id path variable, eg @foo-1.0@
| PkgKeyVar -- ^ The @$pkgkey@ package key path variable
| CompilerVar -- ^ The compiler name and version, eg @ghc-6.6.1@
| OSVar -- ^ The operating system name, eg @windows@ or @linux@
| ArchVar -- ^ The CPU architecture name, eg @i386@ or @x86_64@
| AbiVar -- ^ The Compiler's ABI identifier, $arch-$os-$compiler-$abitag
| AbiTagVar -- ^ The optional ABI tag for the compiler
| ExecutableNameVar -- ^ The executable name; used in shell wrappers
| TestSuiteNameVar -- ^ The name of the test suite being run
| TestSuiteResultVar -- ^ The result of the test suite being run, eg
-- @pass@, @fail@, or @error@.
| BenchmarkNameVar -- ^ The name of the benchmark being run
deriving (Eq, Ord, Generic)
instance Binary PathTemplateVariable
type PathTemplateEnv = [(PathTemplateVariable, PathTemplate)]
-- | Convert a 'FilePath' to a 'PathTemplate' including any template vars.
--
toPathTemplate :: FilePath -> PathTemplate
toPathTemplate = PathTemplate . read
-- | Convert back to a path, any remaining vars are included
--
fromPathTemplate :: PathTemplate -> FilePath
fromPathTemplate (PathTemplate template) = show template
combinePathTemplate :: PathTemplate -> PathTemplate -> PathTemplate
combinePathTemplate (PathTemplate t1) (PathTemplate t2) =
PathTemplate (t1 ++ [Ordinary [pathSeparator]] ++ t2)
substPathTemplate :: PathTemplateEnv -> PathTemplate -> PathTemplate
substPathTemplate environment (PathTemplate template) =
PathTemplate (concatMap subst template)
where subst component@(Ordinary _) = [component]
subst component@(Variable variable) =
case lookup variable environment of
Just (PathTemplate components) -> components
Nothing -> [component]
-- | The initial environment has all the static stuff but no paths
initialPathTemplateEnv :: PackageIdentifier
-> PackageKey
-> CompilerInfo
-> Platform
-> PathTemplateEnv
initialPathTemplateEnv pkgId pkg_key compiler platform =
packageTemplateEnv pkgId pkg_key
++ compilerTemplateEnv compiler
++ platformTemplateEnv platform
++ abiTemplateEnv compiler platform
packageTemplateEnv :: PackageIdentifier -> PackageKey -> PathTemplateEnv
packageTemplateEnv pkgId pkg_key =
[(PkgNameVar, PathTemplate [Ordinary $ display (packageName pkgId)])
,(PkgVerVar, PathTemplate [Ordinary $ display (packageVersion pkgId)])
,(PkgKeyVar, PathTemplate [Ordinary $ display pkg_key])
,(PkgIdVar, PathTemplate [Ordinary $ display pkgId])
]
compilerTemplateEnv :: CompilerInfo -> PathTemplateEnv
compilerTemplateEnv compiler =
[(CompilerVar, PathTemplate [Ordinary $ display (compilerInfoId compiler)])
]
platformTemplateEnv :: Platform -> PathTemplateEnv
platformTemplateEnv (Platform arch os) =
[(OSVar, PathTemplate [Ordinary $ display os])
,(ArchVar, PathTemplate [Ordinary $ display arch])
]
abiTemplateEnv :: CompilerInfo -> Platform -> PathTemplateEnv
abiTemplateEnv compiler (Platform arch os) =
[(AbiVar, PathTemplate [Ordinary $ display arch ++ '-':display os ++
'-':display (compilerInfoId compiler) ++
case compilerInfoAbiTag compiler of
NoAbiTag -> ""
AbiTag tag -> '-':tag])
,(AbiTagVar, PathTemplate [Ordinary $ abiTagString (compilerInfoAbiTag compiler)])
]
installDirsTemplateEnv :: InstallDirs PathTemplate -> PathTemplateEnv
installDirsTemplateEnv dirs =
[(PrefixVar, prefix dirs)
,(BindirVar, bindir dirs)
,(LibdirVar, libdir dirs)
,(LibsubdirVar, libsubdir dirs)
,(DatadirVar, datadir dirs)
,(DatasubdirVar, datasubdir dirs)
,(DocdirVar, docdir dirs)
,(HtmldirVar, htmldir dirs)
]
-- ---------------------------------------------------------------------------
-- Parsing and showing path templates:
-- The textual format is that of an ordinary Haskell String, eg
-- "$prefix/bin"
-- and this gets parsed to the internal representation as a sequence of path
-- spans which are either strings or variables, eg:
-- PathTemplate [Variable PrefixVar, Ordinary "/bin" ]
instance Show PathTemplateVariable where
show PrefixVar = "prefix"
show PkgKeyVar = "pkgkey"
show BindirVar = "bindir"
show LibdirVar = "libdir"
show LibsubdirVar = "libsubdir"
show DatadirVar = "datadir"
show DatasubdirVar = "datasubdir"
show DocdirVar = "docdir"
show HtmldirVar = "htmldir"
show PkgNameVar = "pkg"
show PkgVerVar = "version"
show PkgIdVar = "pkgid"
show CompilerVar = "compiler"
show OSVar = "os"
show ArchVar = "arch"
show AbiTagVar = "abitag"
show AbiVar = "abi"
show ExecutableNameVar = "executablename"
show TestSuiteNameVar = "test-suite"
show TestSuiteResultVar = "result"
show BenchmarkNameVar = "benchmark"
instance Read PathTemplateVariable where
readsPrec _ s =
take 1
[ (var, drop (length varStr) s)
| (varStr, var) <- vars
, varStr `isPrefixOf` s ]
-- NB: order matters! Longer strings first
where vars = [("prefix", PrefixVar)
,("bindir", BindirVar)
,("libdir", LibdirVar)
,("libsubdir", LibsubdirVar)
,("datadir", DatadirVar)
,("datasubdir", DatasubdirVar)
,("docdir", DocdirVar)
,("htmldir", HtmldirVar)
,("pkgid", PkgIdVar)
,("pkgkey", PkgKeyVar)
,("pkg", PkgNameVar)
,("version", PkgVerVar)
,("compiler", CompilerVar)
,("os", OSVar)
,("arch", ArchVar)
,("abitag", AbiTagVar)
,("abi", AbiVar)
,("executablename", ExecutableNameVar)
,("test-suite", TestSuiteNameVar)
,("result", TestSuiteResultVar)
,("benchmark", BenchmarkNameVar)]
instance Show PathComponent where
show (Ordinary path) = path
show (Variable var) = '$':show var
showList = foldr (\x -> (shows x .)) id
instance Read PathComponent where
-- for some reason we collapse multiple $ symbols here
readsPrec _ = lex0
where lex0 [] = []
lex0 ('$':'$':s') = lex0 ('$':s')
lex0 ('$':s') = case [ (Variable var, s'')
| (var, s'') <- reads s' ] of
[] -> lex1 "$" s'
ok -> ok
lex0 s' = lex1 [] s'
lex1 "" "" = []
lex1 acc "" = [(Ordinary (reverse acc), "")]
lex1 acc ('$':'$':s) = lex1 acc ('$':s)
lex1 acc ('$':s) = [(Ordinary (reverse acc), '$':s)]
lex1 acc (c:s) = lex1 (c:acc) s
readList [] = [([],"")]
readList s = [ (component:components, s'')
| (component, s') <- reads s
, (components, s'') <- readList s' ]
instance Show PathTemplate where
show (PathTemplate template) = show (show template)
instance Read PathTemplate where
readsPrec p s = [ (PathTemplate template, s')
| (path, s') <- readsPrec p s
, (template, "") <- reads path ]
-- ---------------------------------------------------------------------------
-- Internal utilities
getWindowsProgramFilesDir :: IO FilePath
getWindowsProgramFilesDir = do
#if mingw32_HOST_OS
m <- shGetFolderPath csidl_PROGRAM_FILES
#else
let m = Nothing
#endif
return (fromMaybe "C:\\Program Files" m)
#if mingw32_HOST_OS
shGetFolderPath :: CInt -> IO (Maybe FilePath)
shGetFolderPath n =
allocaArray long_path_size $ \pPath -> do
r <- c_SHGetFolderPath nullPtr n nullPtr 0 pPath
if (r /= 0)
then return Nothing
else do s <- peekCWString pPath; return (Just s)
where
long_path_size = 1024 -- MAX_PATH is 260, this should be plenty
csidl_PROGRAM_FILES :: CInt
csidl_PROGRAM_FILES = 0x0026
-- csidl_PROGRAM_FILES_COMMON :: CInt
-- csidl_PROGRAM_FILES_COMMON = 0x002b
#ifdef x86_64_HOST_ARCH
#define CALLCONV ccall
#else
#define CALLCONV stdcall
#endif
foreign import CALLCONV unsafe "shlobj.h SHGetFolderPathW"
c_SHGetFolderPath :: Ptr ()
-> CInt
-> Ptr ()
-> CInt
-> CWString
-> IO CInt
#endif
|
plumlife/cabal
|
Cabal/Distribution/Simple/InstallDirs.hs
|
bsd-3-clause
| 23,529
| 2
| 16
| 6,547
| 4,551
| 2,551
| 2,000
| 390
| 10
|
{-|
Description: Advancement of SDL state.
-}
module Graphics.UI.SDL.State.Advance
( nextState
) where
import Control.Monad (liftM, foldM)
import Control.Applicative ((<$>))
import qualified Data.Set as S
import qualified Data.BitSet.Word as BW
import qualified Data.Map.Strict as M
import Control.Lens
import Data.Maybe (fromJust)
import Control.Monad.IO.ExClass
import Graphics.UI.SDL.State.Types
import Graphics.UI.SDL.Events.Types
import Graphics.UI.SDL.Video.Window
import Graphics.UI.SDL.Video.Mouse
import Graphics.UI.SDL.Video.Keyboard.Types
import Control.Lens.Instances ()
-- | Advance an SDL state.
-- During advancement it would query all necessary data to keep the state
-- consistent and up to date, given that all events that were received by
-- SDL were fed into it.
nextState :: forall m. MonadIO' m => StateData -> [EventData] -> m StateData
nextState s0 es = foldM (flip upd) s0 { _rawEvents = reverse es } es
where upd :: EventData -> StateData -> m StateData
upd (Window i Shown) = \s -> (\r -> s & windowState.at i ?~ r) <$> def
where def :: m WindowState
def = do
-- May fail if there are no other references to Window.
Just w <- getWindowFromID i
_wpos <- getWindowPosition w
_wsize <- getWindowSize w
return WindowState { _keysPressed = S.empty
, _scansPressed = S.empty
, _modsPressed = BW.empty
, _mouseState = M.empty
, _wshown = True
, _mouseFocus = False
, _kbdFocus = False
, ..
}
upd (Window i e) = windowState.at i %%~ liftM Just . winUpd e . fromJust
upd e = return . case e of
Window i Hidden -> rm i
Window i Closed -> rm i
_ -> id
where rm i = windowState.at i .~ Nothing
winUpd (Mouse i e) = mouseState.at i %%~ liftM (Just . mouseUpd e) . maybe def return
where def = do
(_mousePos, _mousePressed) <- getRelativeMouseState
return MouseState { .. }
winUpd e = return . case e of
Moved wp -> wpos .~ wp
SizeChanged ws -> wsize .~ ws
WinEntered -> mouseFocus .~ True
WinLeft -> mouseFocus .~ False
FocusGained -> kbdFocus .~ True
FocusLost -> kbdFocus .~ False
Keyboard KeyboardEvent { _kstate, _keySym = KeySym { .. } } ->
case _kstate of
Pressed -> (keysPressed.at _keyCode ?~ ())
. (scansPressed.at _scanCode ?~ ())
. (modsPressed .~ _keyMod)
Released -> (keysPressed.at _keyCode .~ Nothing)
. (scansPressed.at _scanCode .~ Nothing)
. (modsPressed .~ _keyMod)
_ -> id
mouseUpd (MMotion MouseMotionEvent { .. }) = (mousePos .~ _mmpos)
mouseUpd (MButton MouseButtonEvent { .. }) =
case _mstate of
Pressed -> (mousePressed.at _mbutton ?~ ()) . (mousePos .~ _mbpos)
Released -> (mousePressed.at _mbutton .~ Nothing) . (mousePos .~ _mbpos)
mouseUpd _ = id
|
abbradar/MySDL
|
src/Graphics/UI/SDL/State/Advance.hs
|
bsd-3-clause
| 3,447
| 0
| 19
| 1,278
| 899
| 483
| 416
| -1
| -1
|
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Ordinal.ID.Rules
( rules ) where
import qualified Data.Text as Text
import Prelude
import Data.String
import Duckling.Dimensions.Types
import Duckling.Numeral.Helpers (parseInt)
import Duckling.Ordinal.Helpers
import Duckling.Regex.Types
import Duckling.Types
ruleOrdinals :: Rule
ruleOrdinals = Rule
{ name = "ordinals"
, pattern =
[ regex "(pertama|kedua|ketiga|keempat|kelima|keenam|ketujuh|kedelapan|kesembilan|kesepuluh)"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (match:_)):_) -> case Text.toLower match of
"pertama" -> Just $ ordinal 1
"kedua" -> Just $ ordinal 2
"ketiga" -> Just $ ordinal 3
"keempat" -> Just $ ordinal 4
"kelima" -> Just $ ordinal 5
"keenam" -> Just $ ordinal 6
"ketujuh" -> Just $ ordinal 7
"kedelapan" -> Just $ ordinal 8
"kesembilan" -> Just $ ordinal 9
"kesepuluh" -> Just $ ordinal 10
_ -> Nothing
_ -> Nothing
}
ruleOrdinalsDigits :: Rule
ruleOrdinalsDigits = Rule
{ name = "ordinals (digits)"
, pattern =
[ regex "ke-0*(\\d+)"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (match:_)):_) -> ordinal <$> parseInt match
_ -> Nothing
}
rules :: [Rule]
rules =
[ ruleOrdinals
, ruleOrdinalsDigits
]
|
rfranek/duckling
|
Duckling/Ordinal/ID/Rules.hs
|
bsd-3-clause
| 1,708
| 0
| 17
| 397
| 406
| 224
| 182
| 43
| 12
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE UndecidableInstances #-}
module HSGen.Recompiler.Class where
import Control.Monad (ap, liftM2)
import Control.Spoon.Prim
import Data.Default.Aux
import Data.Fixable (Fix(..))
import Data.Wrapped (Wrapped(..), unwrapF, wrappedAp, defWrap)
infixl 1 $$
infixr 2 $$$
infixr 2 ###
-- | This class is of functions with type `a` and final return type `r`.
-- Additionally, `r` should be of the form: `Wrapped` a b
class Resolvable a r | a -> r where
-- | `resolve` resolves a "`Resolvable` a r" to `r`
resolve :: a -> r
instance Resolvable (Wrapped a b) (Wrapped a b) where
resolve :: Wrapped a b -> Wrapped a b
resolve w = w
instance (Resolvable a r) => Resolvable (t -> a) r where
resolve :: (t -> a) -> r
resolve w = resolve $ w undefined
class Compilable a r b s | a -> r, b -> s, a s -> b, r b -> a where
-- | `($$$) f wrappedFunction` essentially applies `f` directly to the
-- `Wrapped` part of `wrappedFunction`.
($$$) :: (r -> s) -> a -> b
instance Compilable (Wrapped a b) (Wrapped a b) (Wrapped c d) (Wrapped c d) where
($$$) :: (Wrapped a b -> Wrapped c d) -> Wrapped a b -> Wrapped c d
($$$) f = f
instance (Compilable a r b s) => Compilable (t -> a) r (t -> b) s where
($$$) :: (r -> s) -> (t -> a) -> t -> b
(f $$$ w) x = f $$$ w x
-- | Given a simple `Compilable` function:
--
-- @
-- plus :: Int -> Int -> Wrapped (Int -> Int -> Int) Int
-- @
--
-- You can use `($$)` to apply `plus` to both the outer (`Int` -> `Int` -> ..)
-- and innner (`Wrapped` (`Int` -> ..)) functions of `plus`. For example:
--
-- >>> plus $$ 1 $$ 2
-- Wrap 3 3
--
($$) :: Compilable a1 (Wrapped (t -> a) b) t1 (Wrapped a b) => (t -> a1) -> t -> t1
($$) w x = flip wrappedAp x $$$ w $ x
-- | See `Resolvable`
class FixResolvable a r | a -> r where
fixResolve :: a -> r
instance FixResolvable (Wrapped a b) (Wrapped a b) where
fixResolve :: Wrapped a b -> Wrapped a b
fixResolve w = w
instance (FixResolvable a r) => FixResolvable (Fix t -> a) r where
fixResolve :: (Fix t -> a) -> r
fixResolve w = fixResolve $ w Unfixed
-- | See `Compilable`
class FixCompilable a r b s | a -> r, b -> s, a s -> b, r b -> a where
(###) :: (r -> s) -> a -> b
instance FixCompilable (Wrapped a b) (Wrapped a b) (Wrapped c d) (Wrapped c d) where
(###) :: (Wrapped a b -> Wrapped c d) -> Wrapped a b -> Wrapped c d
(###) f = f
instance (FixCompilable a r b s) => FixCompilable (Fix t -> a) r (Fix t -> b) s where
(###) :: (r -> s) -> (Fix t -> a) -> Fix t -> b
(f ### w) x = f ### w x
-- | See `($$)`
(##) :: FixCompilable
a (Wrapped (r -> a1) b) (r -> r1) (Wrapped a1 b) =>
a -> r -> r1
(##) w x = flip wrappedAp x ### w $ x
-- | Apply a function to a `Compilable` from the outside
-- (just regular application) and also inside (`($$$)`)
apInOut :: Compilable a b b c => (b -> c) -> a -> c
apInOut = ap (.) ($$$)
-- | See `apInOut`
fixApInOut :: FixCompilable a b b c => (b -> c) -> a -> c
fixApInOut = ap (.) (###)
-- | Flip a compilable function
flipC :: Compilable a1 (a -> b -> c) c (b -> a -> c) =>
(a -> b -> a1) -> b -> a -> c
flipC = apInOut flip
-- | See `flipC`
fixFlipC :: FixCompilable a (a1 -> b -> c) (a1 -> b -> c) (b -> a1 -> c) =>
a -> b -> a1 -> c
fixFlipC = fixApInOut flip
replaceFW :: a -> Wrapped a b -> Wrapped a b
replaceFW f (Wrap _ r) = Wrap f r
-- | Compile a compilable function, essentially by resolving to the wrapped
-- function and return value, unwrapping the non-compilable function (i.e.
-- getting the compiled version), and replacing the original non-compilable
-- function.
compile :: (Compilable a1 (Wrapped a b) r (Wrapped a b), Resolvable a1 (Wrapped a b1)) => a1 -> r
compile = liftM2 ($$$) (replaceFW . unwrapF . resolve) id
-- | See `compile`
fixCompile :: (Compilable a1 (Wrapped a b) r (Wrapped a b), Resolvable a1 (Wrapped a b1)) => a1 -> r
fixCompile = liftM2 ($$$) (replaceFW . unwrapF . resolve) id
|
michaeljklein/CPlug
|
src/HSGen/Recompiler/Class.hs
|
bsd-3-clause
| 4,108
| 66
| 9
| 946
| 1,530
| 841
| 689
| 69
| 1
|
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[RnSource]{Main pass of renamer}
-}
{-# LANGUAGE CPP, ScopedTypeVariables #-}
module RnSource (
rnSrcDecls, addTcgDUs, findSplice
) where
#include "HsVersions.h"
import {-# SOURCE #-} RnExpr( rnLExpr )
import {-# SOURCE #-} RnSplice ( rnSpliceDecl, rnTopSpliceDecls )
import HsSyn
import FieldLabel
import RdrName
import RnTypes
import RnBinds
import RnEnv
import RnNames
import RnHsDoc ( rnHsDoc, rnMbLHsDoc )
import TcAnnotations ( annCtxt )
import TcRnMonad
import ForeignCall ( CCallTarget(..) )
import Module
import HscTypes ( Warnings(..), plusWarns )
import Class ( FunDep )
import PrelNames ( applicativeClassName, pureAName, thenAName
, monadClassName, returnMName, thenMName
, monadFailClassName, failMName, failMName_preMFP
, semigroupClassName, sappendName
, monoidClassName, mappendName
)
import Name
import NameSet
import NameEnv
import Avail
import Outputable
import Bag
import BasicTypes ( DerivStrategy, RuleName, pprRuleName )
import FastString
import SrcLoc
import DynFlags
import Util ( debugIsOn, lengthExceeds, partitionWith )
import HscTypes ( HscEnv, hsc_dflags )
import ListSetOps ( findDupsEq, removeDups, equivClasses )
import Digraph ( SCC, flattenSCC, flattenSCCs
, stronglyConnCompFromEdgedVerticesUniq )
import UniqFM
import qualified GHC.LanguageExtensions as LangExt
import Control.Monad
import Control.Arrow ( first )
import Data.List ( sortBy, mapAccumL )
import Data.Maybe ( isJust )
import qualified Data.Set as Set ( difference, fromList, toList, null )
{-
@rnSourceDecl@ `renames' declarations.
It simultaneously performs dependency analysis and precedence parsing.
It also does the following error checks:
\begin{enumerate}
\item
Checks that tyvars are used properly. This includes checking
for undefined tyvars, and tyvars in contexts that are ambiguous.
(Some of this checking has now been moved to module @TcMonoType@,
since we don't have functional dependency information at this point.)
\item
Checks that all variable occurrences are defined.
\item
Checks the @(..)@ etc constraints in the export list.
\end{enumerate}
-}
-- Brings the binders of the group into scope in the appropriate places;
-- does NOT assume that anything is in scope already
rnSrcDecls :: HsGroup RdrName -> RnM (TcGblEnv, HsGroup Name)
-- Rename a top-level HsGroup; used for normal source files *and* hs-boot files
rnSrcDecls group@(HsGroup { hs_valds = val_decls,
hs_splcds = splice_decls,
hs_tyclds = tycl_decls,
hs_derivds = deriv_decls,
hs_fixds = fix_decls,
hs_warnds = warn_decls,
hs_annds = ann_decls,
hs_fords = foreign_decls,
hs_defds = default_decls,
hs_ruleds = rule_decls,
hs_vects = vect_decls,
hs_docs = docs })
= do {
-- (A) Process the fixity declarations, creating a mapping from
-- FastStrings to FixItems.
-- Also checks for duplicates.
local_fix_env <- makeMiniFixityEnv fix_decls ;
-- (B) Bring top level binders (and their fixities) into scope,
-- *except* for the value bindings, which get done in step (D)
-- with collectHsIdBinders. However *do* include
--
-- * Class ops, data constructors, and record fields,
-- because they do not have value declarations.
-- Aso step (C) depends on datacons and record fields
--
-- * For hs-boot files, include the value signatures
-- Again, they have no value declarations
--
(tc_envs, tc_bndrs) <- getLocalNonValBinders local_fix_env group ;
setEnvs tc_envs $ do {
failIfErrsM ; -- No point in continuing if (say) we have duplicate declarations
-- (D1) Bring pattern synonyms into scope.
-- Need to do this before (D2) because rnTopBindsLHS
-- looks up those pattern synonyms (Trac #9889)
extendPatSynEnv val_decls local_fix_env $ \pat_syn_bndrs -> do {
-- (D2) Rename the left-hand sides of the value bindings.
-- This depends on everything from (B) being in scope,
-- and on (C) for resolving record wild cards.
-- It uses the fixity env from (A) to bind fixities for view patterns.
new_lhs <- rnTopBindsLHS local_fix_env val_decls ;
-- Bind the LHSes (and their fixities) in the global rdr environment
let { id_bndrs = collectHsIdBinders new_lhs } ; -- Excludes pattern-synonym binders
-- They are already in scope
traceRn "rnSrcDecls" (ppr id_bndrs) ;
tc_envs <- extendGlobalRdrEnvRn (map avail id_bndrs) local_fix_env ;
traceRn "D2" (ppr (tcg_rdr_env (fst tc_envs)));
setEnvs tc_envs $ do {
-- Now everything is in scope, as the remaining renaming assumes.
-- (E) Rename type and class decls
-- (note that value LHSes need to be in scope for default methods)
--
-- You might think that we could build proper def/use information
-- for type and class declarations, but they can be involved
-- in mutual recursion across modules, and we only do the SCC
-- analysis for them in the type checker.
-- So we content ourselves with gathering uses only; that
-- means we'll only report a declaration as unused if it isn't
-- mentioned at all. Ah well.
traceRn "Start rnTyClDecls" (ppr tycl_decls) ;
(rn_tycl_decls, src_fvs1) <- rnTyClDecls tycl_decls ;
-- (F) Rename Value declarations right-hand sides
traceRn "Start rnmono" empty ;
let { val_bndr_set = mkNameSet id_bndrs `unionNameSet` mkNameSet pat_syn_bndrs } ;
is_boot <- tcIsHsBootOrSig ;
(rn_val_decls, bind_dus) <- if is_boot
-- For an hs-boot, use tc_bndrs (which collects how we're renamed
-- signatures), since val_bndr_set is empty (there are no x = ...
-- bindings in an hs-boot.)
then rnTopBindsBoot tc_bndrs new_lhs
else rnValBindsRHS (TopSigCtxt val_bndr_set) new_lhs ;
traceRn "finish rnmono" (ppr rn_val_decls) ;
-- (G) Rename Fixity and deprecations
-- Rename fixity declarations and error if we try to
-- fix something from another module (duplicates were checked in (A))
let { all_bndrs = tc_bndrs `unionNameSet` val_bndr_set } ;
rn_fix_decls <- rnSrcFixityDecls all_bndrs fix_decls ;
-- Rename deprec decls;
-- check for duplicates and ensure that deprecated things are defined locally
-- at the moment, we don't keep these around past renaming
rn_warns <- rnSrcWarnDecls all_bndrs warn_decls ;
-- (H) Rename Everything else
(rn_rule_decls, src_fvs2) <- setXOptM LangExt.ScopedTypeVariables $
rnList rnHsRuleDecls rule_decls ;
-- Inside RULES, scoped type variables are on
(rn_vect_decls, src_fvs3) <- rnList rnHsVectDecl vect_decls ;
(rn_foreign_decls, src_fvs4) <- rnList rnHsForeignDecl foreign_decls ;
(rn_ann_decls, src_fvs5) <- rnList rnAnnDecl ann_decls ;
(rn_default_decls, src_fvs6) <- rnList rnDefaultDecl default_decls ;
(rn_deriv_decls, src_fvs7) <- rnList rnSrcDerivDecl deriv_decls ;
(rn_splice_decls, src_fvs8) <- rnList rnSpliceDecl splice_decls ;
-- Haddock docs; no free vars
rn_docs <- mapM (wrapLocM rnDocDecl) docs ;
last_tcg_env <- getGblEnv ;
-- (I) Compute the results and return
let {rn_group = HsGroup { hs_valds = rn_val_decls,
hs_splcds = rn_splice_decls,
hs_tyclds = rn_tycl_decls,
hs_derivds = rn_deriv_decls,
hs_fixds = rn_fix_decls,
hs_warnds = [], -- warns are returned in the tcg_env
-- (see below) not in the HsGroup
hs_fords = rn_foreign_decls,
hs_annds = rn_ann_decls,
hs_defds = rn_default_decls,
hs_ruleds = rn_rule_decls,
hs_vects = rn_vect_decls,
hs_docs = rn_docs } ;
tcf_bndrs = hsTyClForeignBinders rn_tycl_decls rn_foreign_decls ;
other_def = (Just (mkNameSet tcf_bndrs), emptyNameSet) ;
other_fvs = plusFVs [src_fvs1, src_fvs2, src_fvs3, src_fvs4, src_fvs5,
src_fvs6, src_fvs7, src_fvs8] ;
-- It is tiresome to gather the binders from type and class decls
src_dus = [other_def] `plusDU` bind_dus `plusDU` usesOnly other_fvs ;
-- Instance decls may have occurrences of things bound in bind_dus
-- so we must put other_fvs last
final_tcg_env = let tcg_env' = (last_tcg_env `addTcgDUs` src_dus)
in -- we return the deprecs in the env, not in the HsGroup above
tcg_env' { tcg_warns = tcg_warns tcg_env' `plusWarns` rn_warns };
} ;
traceRn "last" (ppr (tcg_rdr_env final_tcg_env)) ;
traceRn "finish rnSrc" (ppr rn_group) ;
traceRn "finish Dus" (ppr src_dus ) ;
return (final_tcg_env, rn_group)
}}}}
addTcgDUs :: TcGblEnv -> DefUses -> TcGblEnv
-- This function could be defined lower down in the module hierarchy,
-- but there doesn't seem anywhere very logical to put it.
addTcgDUs tcg_env dus = tcg_env { tcg_dus = tcg_dus tcg_env `plusDU` dus }
rnList :: (a -> RnM (b, FreeVars)) -> [Located a] -> RnM ([Located b], FreeVars)
rnList f xs = mapFvRn (wrapLocFstM f) xs
{-
*********************************************************
* *
HsDoc stuff
* *
*********************************************************
-}
rnDocDecl :: DocDecl -> RnM DocDecl
rnDocDecl (DocCommentNext doc) = do
rn_doc <- rnHsDoc doc
return (DocCommentNext rn_doc)
rnDocDecl (DocCommentPrev doc) = do
rn_doc <- rnHsDoc doc
return (DocCommentPrev rn_doc)
rnDocDecl (DocCommentNamed str doc) = do
rn_doc <- rnHsDoc doc
return (DocCommentNamed str rn_doc)
rnDocDecl (DocGroup lev doc) = do
rn_doc <- rnHsDoc doc
return (DocGroup lev rn_doc)
{-
*********************************************************
* *
Source-code fixity declarations
* *
*********************************************************
-}
rnSrcFixityDecls :: NameSet -> [LFixitySig RdrName] -> RnM [LFixitySig Name]
-- Rename the fixity decls, so we can put
-- the renamed decls in the renamed syntax tree
-- Errors if the thing being fixed is not defined locally.
--
-- The returned FixitySigs are not actually used for anything,
-- except perhaps the GHCi API
rnSrcFixityDecls bndr_set fix_decls
= do fix_decls <- mapM rn_decl fix_decls
return (concat fix_decls)
where
sig_ctxt = TopSigCtxt bndr_set
rn_decl :: LFixitySig RdrName -> RnM [LFixitySig Name]
-- GHC extension: look up both the tycon and data con
-- for con-like things; hence returning a list
-- If neither are in scope, report an error; otherwise
-- return a fixity sig for each (slightly odd)
rn_decl (L loc (FixitySig fnames fixity))
= do names <- mapM lookup_one fnames
return [ L loc (FixitySig name fixity)
| name <- names ]
lookup_one :: Located RdrName -> RnM [Located Name]
lookup_one (L name_loc rdr_name)
= setSrcSpan name_loc $
-- this lookup will fail if the definition isn't local
do names <- lookupLocalTcNames sig_ctxt what rdr_name
return [ L name_loc name | (_, name) <- names ]
what = text "fixity signature"
{-
*********************************************************
* *
Source-code deprecations declarations
* *
*********************************************************
Check that the deprecated names are defined, are defined locally, and
that there are no duplicate deprecations.
It's only imported deprecations, dealt with in RnIfaces, that we
gather them together.
-}
-- checks that the deprecations are defined locally, and that there are no duplicates
rnSrcWarnDecls :: NameSet -> [LWarnDecls RdrName] -> RnM Warnings
rnSrcWarnDecls _ []
= return NoWarnings
rnSrcWarnDecls bndr_set decls'
= do { -- check for duplicates
; mapM_ (\ dups -> let (L loc rdr:lrdr':_) = dups
in addErrAt loc (dupWarnDecl lrdr' rdr))
warn_rdr_dups
; pairs_s <- mapM (addLocM rn_deprec) decls
; return (WarnSome ((concat pairs_s))) }
where
decls = concatMap (\(L _ d) -> wd_warnings d) decls'
sig_ctxt = TopSigCtxt bndr_set
rn_deprec (Warning rdr_names txt)
-- ensures that the names are defined locally
= do { names <- concatMapM (lookupLocalTcNames sig_ctxt what . unLoc)
rdr_names
; return [(rdrNameOcc rdr, txt) | (rdr, _) <- names] }
what = text "deprecation"
warn_rdr_dups = findDupRdrNames $ concatMap (\(L _ (Warning ns _)) -> ns)
decls
findDupRdrNames :: [Located RdrName] -> [[Located RdrName]]
findDupRdrNames = findDupsEq (\ x -> \ y -> rdrNameOcc (unLoc x) == rdrNameOcc (unLoc y))
-- look for duplicates among the OccNames;
-- we check that the names are defined above
-- invt: the lists returned by findDupsEq always have at least two elements
dupWarnDecl :: Located RdrName -> RdrName -> SDoc
-- Located RdrName -> DeprecDecl RdrName -> SDoc
dupWarnDecl (L loc _) rdr_name
= vcat [text "Multiple warning declarations for" <+> quotes (ppr rdr_name),
text "also at " <+> ppr loc]
{-
*********************************************************
* *
\subsection{Annotation declarations}
* *
*********************************************************
-}
rnAnnDecl :: AnnDecl RdrName -> RnM (AnnDecl Name, FreeVars)
rnAnnDecl ann@(HsAnnotation s provenance expr)
= addErrCtxt (annCtxt ann) $
do { (provenance', provenance_fvs) <- rnAnnProvenance provenance
; (expr', expr_fvs) <- setStage (Splice Untyped) $
rnLExpr expr
; return (HsAnnotation s provenance' expr',
provenance_fvs `plusFV` expr_fvs) }
rnAnnProvenance :: AnnProvenance RdrName -> RnM (AnnProvenance Name, FreeVars)
rnAnnProvenance provenance = do
provenance' <- traverse lookupTopBndrRn provenance
return (provenance', maybe emptyFVs unitFV (annProvenanceName_maybe provenance'))
{-
*********************************************************
* *
\subsection{Default declarations}
* *
*********************************************************
-}
rnDefaultDecl :: DefaultDecl RdrName -> RnM (DefaultDecl Name, FreeVars)
rnDefaultDecl (DefaultDecl tys)
= do { (tys', fvs) <- rnLHsTypes doc_str tys
; return (DefaultDecl tys', fvs) }
where
doc_str = DefaultDeclCtx
{-
*********************************************************
* *
\subsection{Foreign declarations}
* *
*********************************************************
-}
rnHsForeignDecl :: ForeignDecl RdrName -> RnM (ForeignDecl Name, FreeVars)
rnHsForeignDecl (ForeignImport { fd_name = name, fd_sig_ty = ty, fd_fi = spec })
= do { topEnv :: HscEnv <- getTopEnv
; name' <- lookupLocatedTopBndrRn name
; (ty', fvs) <- rnHsSigType (ForeignDeclCtx name) ty
-- Mark any PackageTarget style imports as coming from the current package
; let unitId = thisPackage $ hsc_dflags topEnv
spec' = patchForeignImport unitId spec
; return (ForeignImport { fd_name = name', fd_sig_ty = ty'
, fd_co = noForeignImportCoercionYet
, fd_fi = spec' }, fvs) }
rnHsForeignDecl (ForeignExport { fd_name = name, fd_sig_ty = ty, fd_fe = spec })
= do { name' <- lookupLocatedOccRn name
; (ty', fvs) <- rnHsSigType (ForeignDeclCtx name) ty
; return (ForeignExport { fd_name = name', fd_sig_ty = ty'
, fd_co = noForeignExportCoercionYet
, fd_fe = spec }
, fvs `addOneFV` unLoc name') }
-- NB: a foreign export is an *occurrence site* for name, so
-- we add it to the free-variable list. It might, for example,
-- be imported from another module
-- | For Windows DLLs we need to know what packages imported symbols are from
-- to generate correct calls. Imported symbols are tagged with the current
-- package, so if they get inlined across a package boundry we'll still
-- know where they're from.
--
patchForeignImport :: UnitId -> ForeignImport -> ForeignImport
patchForeignImport unitId (CImport cconv safety fs spec src)
= CImport cconv safety fs (patchCImportSpec unitId spec) src
patchCImportSpec :: UnitId -> CImportSpec -> CImportSpec
patchCImportSpec unitId spec
= case spec of
CFunction callTarget -> CFunction $ patchCCallTarget unitId callTarget
_ -> spec
patchCCallTarget :: UnitId -> CCallTarget -> CCallTarget
patchCCallTarget unitId callTarget =
case callTarget of
StaticTarget src label Nothing isFun
-> StaticTarget src label (Just unitId) isFun
_ -> callTarget
{-
*********************************************************
* *
\subsection{Instance declarations}
* *
*********************************************************
-}
rnSrcInstDecl :: InstDecl RdrName -> RnM (InstDecl Name, FreeVars)
rnSrcInstDecl (TyFamInstD { tfid_inst = tfi })
= do { (tfi', fvs) <- rnTyFamInstDecl Nothing tfi
; return (TyFamInstD { tfid_inst = tfi' }, fvs) }
rnSrcInstDecl (DataFamInstD { dfid_inst = dfi })
= do { (dfi', fvs) <- rnDataFamInstDecl Nothing dfi
; return (DataFamInstD { dfid_inst = dfi' }, fvs) }
rnSrcInstDecl (ClsInstD { cid_inst = cid })
= do { (cid', fvs) <- rnClsInstDecl cid
; return (ClsInstD { cid_inst = cid' }, fvs) }
-- | Warn about non-canonical typeclass instance declarations
--
-- A "non-canonical" instance definition can occur for instances of a
-- class which redundantly defines an operation its superclass
-- provides as well (c.f. `return`/`pure`). In such cases, a canonical
-- instance is one where the subclass inherits its method
-- implementation from its superclass instance (usually the subclass
-- has a default method implementation to that effect). Consequently,
-- a non-canonical instance occurs when this is not the case.
--
-- See also descriptions of 'checkCanonicalMonadInstances' and
-- 'checkCanonicalMonoidInstances'
checkCanonicalInstances :: Name -> LHsSigType Name -> LHsBinds Name -> RnM ()
checkCanonicalInstances cls poly_ty mbinds = do
whenWOptM Opt_WarnNonCanonicalMonadInstances
checkCanonicalMonadInstances
whenWOptM Opt_WarnNonCanonicalMonadFailInstances
checkCanonicalMonadFailInstances
whenWOptM Opt_WarnNonCanonicalMonoidInstances
checkCanonicalMonoidInstances
where
-- | Warn about unsound/non-canonical 'Applicative'/'Monad' instance
-- declarations. Specifically, the following conditions are verified:
--
-- In 'Monad' instances declarations:
--
-- * If 'return' is overridden it must be canonical (i.e. @return = pure@)
-- * If '(>>)' is overridden it must be canonical (i.e. @(>>) = (*>)@)
--
-- In 'Applicative' instance declarations:
--
-- * Warn if 'pure' is defined backwards (i.e. @pure = return@).
-- * Warn if '(*>)' is defined backwards (i.e. @(*>) = (>>)@).
--
checkCanonicalMonadInstances
| cls == applicativeClassName = do
forM_ (bagToList mbinds) $ \(L loc mbind) -> setSrcSpan loc $ do
case mbind of
FunBind { fun_id = L _ name, fun_matches = mg }
| name == pureAName, isAliasMG mg == Just returnMName
-> addWarnNonCanonicalMethod1
Opt_WarnNonCanonicalMonadInstances "pure" "return"
| name == thenAName, isAliasMG mg == Just thenMName
-> addWarnNonCanonicalMethod1
Opt_WarnNonCanonicalMonadInstances "(*>)" "(>>)"
_ -> return ()
| cls == monadClassName = do
forM_ (bagToList mbinds) $ \(L loc mbind) -> setSrcSpan loc $ do
case mbind of
FunBind { fun_id = L _ name, fun_matches = mg }
| name == returnMName, isAliasMG mg /= Just pureAName
-> addWarnNonCanonicalMethod2
Opt_WarnNonCanonicalMonadInstances "return" "pure"
| name == thenMName, isAliasMG mg /= Just thenAName
-> addWarnNonCanonicalMethod2
Opt_WarnNonCanonicalMonadInstances "(>>)" "(*>)"
_ -> return ()
| otherwise = return ()
-- | Warn about unsound/non-canonical 'Monad'/'MonadFail' instance
-- declarations. Specifically, the following conditions are verified:
--
-- In 'Monad' instances declarations:
--
-- * If 'fail' is overridden it must be canonical
-- (i.e. @fail = Control.Monad.Fail.fail@)
--
-- In 'MonadFail' instance declarations:
--
-- * Warn if 'fail' is defined backwards
-- (i.e. @fail = Control.Monad.fail@).
--
checkCanonicalMonadFailInstances
| cls == monadFailClassName = do
forM_ (bagToList mbinds) $ \(L loc mbind) -> setSrcSpan loc $ do
case mbind of
FunBind { fun_id = L _ name, fun_matches = mg }
| name == failMName, isAliasMG mg == Just failMName_preMFP
-> addWarnNonCanonicalMethod1
Opt_WarnNonCanonicalMonadFailInstances "fail"
"Control.Monad.fail"
_ -> return ()
| cls == monadClassName = do
forM_ (bagToList mbinds) $ \(L loc mbind) -> setSrcSpan loc $ do
case mbind of
FunBind { fun_id = L _ name, fun_matches = mg }
| name == failMName_preMFP, isAliasMG mg /= Just failMName
-> addWarnNonCanonicalMethod2
Opt_WarnNonCanonicalMonadFailInstances "fail"
"Control.Monad.Fail.fail"
_ -> return ()
| otherwise = return ()
-- | Check whether Monoid(mappend) is defined in terms of
-- Semigroup((<>)) (and not the other way round). Specifically,
-- the following conditions are verified:
--
-- In 'Monoid' instances declarations:
--
-- * If 'mappend' is overridden it must be canonical
-- (i.e. @mappend = (<>)@)
--
-- In 'Semigroup' instance declarations:
--
-- * Warn if '(<>)' is defined backwards (i.e. @(<>) = mappend@).
--
checkCanonicalMonoidInstances
| cls == semigroupClassName = do
forM_ (bagToList mbinds) $ \(L loc mbind) -> setSrcSpan loc $ do
case mbind of
FunBind { fun_id = L _ name, fun_matches = mg }
| name == sappendName, isAliasMG mg == Just mappendName
-> addWarnNonCanonicalMethod1
Opt_WarnNonCanonicalMonoidInstances "(<>)" "mappend"
_ -> return ()
| cls == monoidClassName = do
forM_ (bagToList mbinds) $ \(L loc mbind) -> setSrcSpan loc $ do
case mbind of
FunBind { fun_id = L _ name, fun_matches = mg }
| name == mappendName, isAliasMG mg /= Just sappendName
-> addWarnNonCanonicalMethod2NoDefault
Opt_WarnNonCanonicalMonoidInstances "mappend" "(<>)"
_ -> return ()
| otherwise = return ()
-- | test whether MatchGroup represents a trivial \"lhsName = rhsName\"
-- binding, and return @Just rhsName@ if this is the case
isAliasMG :: MatchGroup Name (LHsExpr Name) -> Maybe Name
isAliasMG MG {mg_alts = L _ [L _ (Match { m_pats = [], m_grhss = grhss })]}
| GRHSs [L _ (GRHS [] body)] lbinds <- grhss
, L _ EmptyLocalBinds <- lbinds
, L _ (HsVar (L _ rhsName)) <- body = Just rhsName
isAliasMG _ = Nothing
-- got "lhs = rhs" but expected something different
addWarnNonCanonicalMethod1 flag lhs rhs = do
addWarn (Reason flag) $ vcat
[ text "Noncanonical" <+>
quotes (text (lhs ++ " = " ++ rhs)) <+>
text "definition detected"
, instDeclCtxt1 poly_ty
, text "Move definition from" <+>
quotes (text rhs) <+>
text "to" <+> quotes (text lhs)
]
-- expected "lhs = rhs" but got something else
addWarnNonCanonicalMethod2 flag lhs rhs = do
addWarn (Reason flag) $ vcat
[ text "Noncanonical" <+>
quotes (text lhs) <+>
text "definition detected"
, instDeclCtxt1 poly_ty
, text "Either remove definition for" <+>
quotes (text lhs) <+> text "or define as" <+>
quotes (text (lhs ++ " = " ++ rhs))
]
-- like above, but method has no default impl
addWarnNonCanonicalMethod2NoDefault flag lhs rhs = do
addWarn (Reason flag) $ vcat
[ text "Noncanonical" <+>
quotes (text lhs) <+>
text "definition detected"
, instDeclCtxt1 poly_ty
, text "Define as" <+>
quotes (text (lhs ++ " = " ++ rhs))
]
-- stolen from TcInstDcls
instDeclCtxt1 :: LHsSigType Name -> SDoc
instDeclCtxt1 hs_inst_ty
= inst_decl_ctxt (ppr (getLHsInstDeclHead hs_inst_ty))
inst_decl_ctxt :: SDoc -> SDoc
inst_decl_ctxt doc = hang (text "in the instance declaration for")
2 (quotes doc <> text ".")
rnClsInstDecl :: ClsInstDecl RdrName -> RnM (ClsInstDecl Name, FreeVars)
rnClsInstDecl (ClsInstDecl { cid_poly_ty = inst_ty, cid_binds = mbinds
, cid_sigs = uprags, cid_tyfam_insts = ats
, cid_overlap_mode = oflag
, cid_datafam_insts = adts })
= do { (inst_ty', inst_fvs) <- rnLHsInstType (text "an instance declaration") inst_ty
; let (ktv_names, _, head_ty') = splitLHsInstDeclTy inst_ty'
; let cls = case hsTyGetAppHead_maybe head_ty' of
Nothing -> mkUnboundName (mkTcOccFS (fsLit "<class>"))
Just (L _ cls, _) -> cls
-- rnLHsInstType has added an error message
-- if hsTyGetAppHead_maybe fails
-- Rename the bindings
-- The typechecker (not the renamer) checks that all
-- the bindings are for the right class
-- (Slightly strangely) when scoped type variables are on, the
-- forall-d tyvars scope over the method bindings too
; (mbinds', uprags', meth_fvs) <- rnMethodBinds False cls ktv_names mbinds uprags
; checkCanonicalInstances cls inst_ty' mbinds'
-- Rename the associated types, and type signatures
-- Both need to have the instance type variables in scope
; traceRn "rnSrcInstDecl" (ppr inst_ty' $$ ppr ktv_names)
; ((ats', adts'), more_fvs)
<- extendTyVarEnvFVRn ktv_names $
do { (ats', at_fvs) <- rnATInstDecls rnTyFamInstDecl cls ktv_names ats
; (adts', adt_fvs) <- rnATInstDecls rnDataFamInstDecl cls ktv_names adts
; return ( (ats', adts'), at_fvs `plusFV` adt_fvs) }
; let all_fvs = meth_fvs `plusFV` more_fvs
`plusFV` inst_fvs
; return (ClsInstDecl { cid_poly_ty = inst_ty', cid_binds = mbinds'
, cid_sigs = uprags', cid_tyfam_insts = ats'
, cid_overlap_mode = oflag
, cid_datafam_insts = adts' },
all_fvs) }
-- We return the renamed associated data type declarations so
-- that they can be entered into the list of type declarations
-- for the binding group, but we also keep a copy in the instance.
-- The latter is needed for well-formedness checks in the type
-- checker (eg, to ensure that all ATs of the instance actually
-- receive a declaration).
-- NB: Even the copies in the instance declaration carry copies of
-- the instance context after renaming. This is a bit
-- strange, but should not matter (and it would be more work
-- to remove the context).
rnFamInstDecl :: HsDocContext
-> Maybe (Name, [Name]) -- Nothing => not associated
-- Just (cls,tvs) => associated,
-- and gives class and tyvars of the
-- parent instance delc
-> Located RdrName
-> HsTyPats RdrName
-> rhs
-> (HsDocContext -> rhs -> RnM (rhs', FreeVars))
-> RnM (Located Name, HsTyPats Name, rhs', FreeVars)
rnFamInstDecl doc mb_cls tycon (HsIB { hsib_body = pats }) payload rnPayload
= do { tycon' <- lookupFamInstName (fmap fst mb_cls) tycon
; let loc = case pats of
[] -> pprPanic "rnFamInstDecl" (ppr tycon)
(L loc _ : []) -> loc
(L loc _ : ps) -> combineSrcSpans loc (getLoc (last ps))
; pat_kity_vars_with_dups <- extractHsTysRdrTyVarsDups pats
-- Use the "...Dups" form because it's needed
-- below to report unsed binder on the LHS
; var_names <- mapM (newTyVarNameRn mb_cls . L loc . unLoc) $
freeKiTyVarsAllVars $
rmDupsInRdrTyVars pat_kity_vars_with_dups
-- All the free vars of the family patterns
-- with a sensible binding location
; ((pats', payload'), fvs)
<- bindLocalNamesFV var_names $
do { (pats', pat_fvs) <- rnLHsTypes (FamPatCtx tycon) pats
; (payload', rhs_fvs) <- rnPayload doc payload
-- Report unused binders on the LHS
-- See Note [Unused type variables in family instances]
; let groups :: [[Located RdrName]]
groups = equivClasses cmpLocated $
freeKiTyVarsAllVars pat_kity_vars_with_dups
; tv_nms_dups <- mapM (lookupOccRn . unLoc) $
[ tv | (tv:_:_) <- groups ]
-- Add to the used variables
-- a) any variables that appear *more than once* on the LHS
-- e.g. F a Int a = Bool
-- b) for associated instances, the variables
-- of the instance decl. See
-- Note [Unused type variables in family instances]
; let tv_nms_used = extendNameSetList rhs_fvs $
inst_tvs ++ tv_nms_dups
inst_tvs = case mb_cls of
Nothing -> []
Just (_, inst_tvs) -> inst_tvs
; warnUnusedTypePatterns var_names tv_nms_used
-- See Note [Renaming associated types]
; let bad_tvs = case mb_cls of
Nothing -> []
Just (_,cls_tkvs) -> filter is_bad cls_tkvs
var_name_set = mkNameSet var_names
is_bad cls_tkv = cls_tkv `elemNameSet` rhs_fvs
&& not (cls_tkv `elemNameSet` var_name_set)
; unless (null bad_tvs) (badAssocRhs bad_tvs)
; return ((pats', payload'), rhs_fvs `plusFV` pat_fvs) }
; let anon_wcs = concatMap collectAnonWildCards pats'
all_ibs = anon_wcs ++ var_names
-- all_ibs: include anonymous wildcards in the implicit
-- binders In a type pattern they behave just like any
-- other type variable except for being anoymous. See
-- Note [Wildcards in family instances]
all_fvs = fvs `addOneFV` unLoc tycon'
; return (tycon',
HsIB { hsib_body = pats'
, hsib_vars = all_ibs },
payload',
all_fvs) }
-- type instance => use, hence addOneFV
rnTyFamInstDecl :: Maybe (Name, [Name])
-> TyFamInstDecl RdrName
-> RnM (TyFamInstDecl Name, FreeVars)
rnTyFamInstDecl mb_cls (TyFamInstDecl { tfid_eqn = L loc eqn })
= do { (eqn', fvs) <- rnTyFamInstEqn mb_cls eqn
; return (TyFamInstDecl { tfid_eqn = L loc eqn'
, tfid_fvs = fvs }, fvs) }
rnTyFamInstEqn :: Maybe (Name, [Name])
-> TyFamInstEqn RdrName
-> RnM (TyFamInstEqn Name, FreeVars)
rnTyFamInstEqn mb_cls (TyFamEqn { tfe_tycon = tycon
, tfe_pats = pats
, tfe_rhs = rhs })
= do { (tycon', pats', rhs', fvs) <-
rnFamInstDecl (TySynCtx tycon) mb_cls tycon pats rhs rnTySyn
; return (TyFamEqn { tfe_tycon = tycon'
, tfe_pats = pats'
, tfe_rhs = rhs' }, fvs) }
rnTyFamDefltEqn :: Name
-> TyFamDefltEqn RdrName
-> RnM (TyFamDefltEqn Name, FreeVars)
rnTyFamDefltEqn cls (TyFamEqn { tfe_tycon = tycon
, tfe_pats = tyvars
, tfe_rhs = rhs })
= bindHsQTyVars ctx Nothing (Just cls) [] tyvars $ \ tyvars' _ ->
do { tycon' <- lookupFamInstName (Just cls) tycon
; (rhs', fvs) <- rnLHsType ctx rhs
; return (TyFamEqn { tfe_tycon = tycon'
, tfe_pats = tyvars'
, tfe_rhs = rhs' }, fvs) }
where
ctx = TyFamilyCtx tycon
rnDataFamInstDecl :: Maybe (Name, [Name])
-> DataFamInstDecl RdrName
-> RnM (DataFamInstDecl Name, FreeVars)
rnDataFamInstDecl mb_cls (DataFamInstDecl { dfid_tycon = tycon
, dfid_pats = pats
, dfid_defn = defn })
= do { (tycon', pats', (defn', _), fvs) <-
rnFamInstDecl (TyDataCtx tycon) mb_cls tycon pats defn rnDataDefn
; return (DataFamInstDecl { dfid_tycon = tycon'
, dfid_pats = pats'
, dfid_defn = defn'
, dfid_fvs = fvs }, fvs) }
-- Renaming of the associated types in instances.
-- Rename associated type family decl in class
rnATDecls :: Name -- Class
-> [LFamilyDecl RdrName]
-> RnM ([LFamilyDecl Name], FreeVars)
rnATDecls cls at_decls
= rnList (rnFamDecl (Just cls)) at_decls
rnATInstDecls :: (Maybe (Name, [Name]) -> -- The function that renames
decl RdrName -> -- an instance. rnTyFamInstDecl
RnM (decl Name, FreeVars)) -- or rnDataFamInstDecl
-> Name -- Class
-> [Name]
-> [Located (decl RdrName)]
-> RnM ([Located (decl Name)], FreeVars)
-- Used for data and type family defaults in a class decl
-- and the family instance declarations in an instance
--
-- NB: We allow duplicate associated-type decls;
-- See Note [Associated type instances] in TcInstDcls
rnATInstDecls rnFun cls tv_ns at_insts
= rnList (rnFun (Just (cls, tv_ns))) at_insts
-- See Note [Renaming associated types]
{- Note [Wildcards in family instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Wild cards can be used in type/data family instance declarations to indicate
that the name of a type variable doesn't matter. Each wild card will be
replaced with a new unique type variable. For instance:
type family F a b :: *
type instance F Int _ = Int
is the same as
type family F a b :: *
type instance F Int b = Int
This is implemented as follows: during renaming anonymous wild cards
'_' are given freshly generated names. These names are collected after
renaming (rnFamInstDecl) and used to make new type variables during
type checking (tc_fam_ty_pats). One should not confuse these wild
cards with the ones from partial type signatures. The latter generate
fresh meta-variables whereas the former generate fresh skolems.
Note [Unused type variables in family instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When the flag -fwarn-unused-type-patterns is on, the compiler reports
warnings about unused type variables in type-family instances. A
tpye variable is considered used (i.e. cannot be turned into a wildcard)
when
* it occurs on the RHS of the family instance
e.g. type instance F a b = a -- a is used on the RHS
* it occurs multiple times in the patterns on the LHS
e.g. type instance F a a = Int -- a appears more than once on LHS
* it is one of the instance-decl variables, for associated types
e.g. instance C (a,b) where
type T (a,b) = a
Here the type pattern in the type instance must be the same as that
for the class instance, so
type T (a,_) = a
would be rejected. So we should not complain about an unused variable b
As usual, the warnings are not reported for for type variables with names
beginning with an underscore.
Extra-constraints wild cards are not supported in type/data family
instance declarations.
Relevant tickets: #3699, #10586, #10982 and #11451.
Note [Renaming associated types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Check that the RHS of the decl mentions only type variables
bound on the LHS. For example, this is not ok
class C a b where
type F a x :: *
instance C (p,q) r where
type F (p,q) x = (x, r) -- BAD: mentions 'r'
c.f. Trac #5515
The same thing applies to kind variables, of course (Trac #7938, #9574):
class Funct f where
type Codomain f :: *
instance Funct ('KProxy :: KProxy o) where
type Codomain 'KProxy = NatTr (Proxy :: o -> *)
Here 'o' is mentioned on the RHS of the Codomain function, but
not on the LHS.
All this applies only for *instance* declarations. In *class*
declarations there is no RHS to worry about, and the class variables
can all be in scope (Trac #5862):
class Category (x :: k -> k -> *) where
type Ob x :: k -> Constraint
id :: Ob x a => x a a
(.) :: (Ob x a, Ob x b, Ob x c) => x b c -> x a b -> x a c
Here 'k' is in scope in the kind signature, just like 'x'.
-}
{-
*********************************************************
* *
\subsection{Stand-alone deriving declarations}
* *
*********************************************************
-}
rnSrcDerivDecl :: DerivDecl RdrName -> RnM (DerivDecl Name, FreeVars)
rnSrcDerivDecl (DerivDecl ty deriv_strat overlap)
= do { standalone_deriv_ok <- xoptM LangExt.StandaloneDeriving
; deriv_strats_ok <- xoptM LangExt.DerivingStrategies
; unless standalone_deriv_ok (addErr standaloneDerivErr)
; failIfTc (isJust deriv_strat && not deriv_strats_ok) $
illegalDerivStrategyErr $ fmap unLoc deriv_strat
; (ty', fvs) <- rnLHsInstType (text "In a deriving declaration") ty
; return (DerivDecl ty' deriv_strat overlap, fvs) }
standaloneDerivErr :: SDoc
standaloneDerivErr
= hang (text "Illegal standalone deriving declaration")
2 (text "Use StandaloneDeriving to enable this extension")
{-
*********************************************************
* *
\subsection{Rules}
* *
*********************************************************
-}
rnHsRuleDecls :: RuleDecls RdrName -> RnM (RuleDecls Name, FreeVars)
rnHsRuleDecls (HsRules src rules)
= do { (rn_rules,fvs) <- rnList rnHsRuleDecl rules
; return (HsRules src rn_rules,fvs) }
rnHsRuleDecl :: RuleDecl RdrName -> RnM (RuleDecl Name, FreeVars)
rnHsRuleDecl (HsRule rule_name act vars lhs _fv_lhs rhs _fv_rhs)
= do { let rdr_names_w_loc = map get_var vars
; checkDupRdrNames rdr_names_w_loc
; checkShadowedRdrNames rdr_names_w_loc
; names <- newLocalBndrsRn rdr_names_w_loc
; bindHsRuleVars (snd $ unLoc rule_name) vars names $ \ vars' ->
do { (lhs', fv_lhs') <- rnLExpr lhs
; (rhs', fv_rhs') <- rnLExpr rhs
; checkValidRule (snd $ unLoc rule_name) names lhs' fv_lhs'
; return (HsRule rule_name act vars' lhs' fv_lhs' rhs' fv_rhs',
fv_lhs' `plusFV` fv_rhs') } }
where
get_var (L _ (RuleBndrSig v _)) = v
get_var (L _ (RuleBndr v)) = v
bindHsRuleVars :: RuleName -> [LRuleBndr RdrName] -> [Name]
-> ([LRuleBndr Name] -> RnM (a, FreeVars))
-> RnM (a, FreeVars)
bindHsRuleVars rule_name vars names thing_inside
= go vars names $ \ vars' ->
bindLocalNamesFV names (thing_inside vars')
where
doc = RuleCtx rule_name
go (L l (RuleBndr (L loc _)) : vars) (n : ns) thing_inside
= go vars ns $ \ vars' ->
thing_inside (L l (RuleBndr (L loc n)) : vars')
go (L l (RuleBndrSig (L loc _) bsig) : vars) (n : ns) thing_inside
= rnHsSigWcTypeScoped doc bsig $ \ bsig' ->
go vars ns $ \ vars' ->
thing_inside (L l (RuleBndrSig (L loc n) bsig') : vars')
go [] [] thing_inside = thing_inside []
go vars names _ = pprPanic "bindRuleVars" (ppr vars $$ ppr names)
{-
Note [Rule LHS validity checking]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Check the shape of a transformation rule LHS. Currently we only allow
LHSs of the form @(f e1 .. en)@, where @f@ is not one of the
@forall@'d variables.
We used restrict the form of the 'ei' to prevent you writing rules
with LHSs with a complicated desugaring (and hence unlikely to match);
(e.g. a case expression is not allowed: too elaborate.)
But there are legitimate non-trivial args ei, like sections and
lambdas. So it seems simmpler not to check at all, and that is why
check_e is commented out.
-}
checkValidRule :: FastString -> [Name] -> LHsExpr Name -> NameSet -> RnM ()
checkValidRule rule_name ids lhs' fv_lhs'
= do { -- Check for the form of the LHS
case (validRuleLhs ids lhs') of
Nothing -> return ()
Just bad -> failWithTc (badRuleLhsErr rule_name lhs' bad)
-- Check that LHS vars are all bound
; let bad_vars = [var | var <- ids, not (var `elemNameSet` fv_lhs')]
; mapM_ (addErr . badRuleVar rule_name) bad_vars }
validRuleLhs :: [Name] -> LHsExpr Name -> Maybe (HsExpr Name)
-- Nothing => OK
-- Just e => Not ok, and e is the offending sub-expression
validRuleLhs foralls lhs
= checkl lhs
where
checkl (L _ e) = check e
check (OpApp e1 op _ e2) = checkl op `mplus` checkl_e e1 `mplus` checkl_e e2
check (HsApp e1 e2) = checkl e1 `mplus` checkl_e e2
check (HsAppType e _) = checkl e
check (HsVar (L _ v)) | v `notElem` foralls = Nothing
check other = Just other -- Failure
-- Check an argument
checkl_e (L _ _e) = Nothing -- Was (check_e e); see Note [Rule LHS validity checking]
{- Commented out; see Note [Rule LHS validity checking] above
check_e (HsVar v) = Nothing
check_e (HsPar e) = checkl_e e
check_e (HsLit e) = Nothing
check_e (HsOverLit e) = Nothing
check_e (OpApp e1 op _ e2) = checkl_e e1 `mplus` checkl_e op `mplus` checkl_e e2
check_e (HsApp e1 e2) = checkl_e e1 `mplus` checkl_e e2
check_e (NegApp e _) = checkl_e e
check_e (ExplicitList _ es) = checkl_es es
check_e other = Just other -- Fails
checkl_es es = foldr (mplus . checkl_e) Nothing es
-}
badRuleVar :: FastString -> Name -> SDoc
badRuleVar name var
= sep [text "Rule" <+> doubleQuotes (ftext name) <> colon,
text "Forall'd variable" <+> quotes (ppr var) <+>
text "does not appear on left hand side"]
badRuleLhsErr :: FastString -> LHsExpr Name -> HsExpr Name -> SDoc
badRuleLhsErr name lhs bad_e
= sep [text "Rule" <+> pprRuleName name <> colon,
nest 4 (vcat [err,
text "in left-hand side:" <+> ppr lhs])]
$$
text "LHS must be of form (f e1 .. en) where f is not forall'd"
where
err = case bad_e of
HsUnboundVar uv -> text "Not in scope:" <+> ppr uv
_ -> text "Illegal expression:" <+> ppr bad_e
{-
*********************************************************
* *
\subsection{Vectorisation declarations}
* *
*********************************************************
-}
rnHsVectDecl :: VectDecl RdrName -> RnM (VectDecl Name, FreeVars)
-- FIXME: For the moment, the right-hand side is restricted to be a variable as we cannot properly
-- typecheck a complex right-hand side without invoking 'vectType' from the vectoriser.
rnHsVectDecl (HsVect s var rhs@(L _ (HsVar _)))
= do { var' <- lookupLocatedOccRn var
; (rhs', fv_rhs) <- rnLExpr rhs
; return (HsVect s var' rhs', fv_rhs `addOneFV` unLoc var')
}
rnHsVectDecl (HsVect _ _var _rhs)
= failWith $ vcat
[ text "IMPLEMENTATION RESTRICTION: right-hand side of a VECTORISE pragma"
, text "must be an identifier"
]
rnHsVectDecl (HsNoVect s var)
= do { var' <- lookupLocatedTopBndrRn var -- only applies to local (not imported) names
; return (HsNoVect s var', unitFV (unLoc var'))
}
rnHsVectDecl (HsVectTypeIn s isScalar tycon Nothing)
= do { tycon' <- lookupLocatedOccRn tycon
; return (HsVectTypeIn s isScalar tycon' Nothing, unitFV (unLoc tycon'))
}
rnHsVectDecl (HsVectTypeIn s isScalar tycon (Just rhs_tycon))
= do { tycon' <- lookupLocatedOccRn tycon
; rhs_tycon' <- lookupLocatedOccRn rhs_tycon
; return ( HsVectTypeIn s isScalar tycon' (Just rhs_tycon')
, mkFVs [unLoc tycon', unLoc rhs_tycon'])
}
rnHsVectDecl (HsVectTypeOut _ _ _)
= panic "RnSource.rnHsVectDecl: Unexpected 'HsVectTypeOut'"
rnHsVectDecl (HsVectClassIn s cls)
= do { cls' <- lookupLocatedOccRn cls
; return (HsVectClassIn s cls', unitFV (unLoc cls'))
}
rnHsVectDecl (HsVectClassOut _)
= panic "RnSource.rnHsVectDecl: Unexpected 'HsVectClassOut'"
rnHsVectDecl (HsVectInstIn instTy)
= do { (instTy', fvs) <- rnLHsInstType (text "a VECTORISE pragma") instTy
; return (HsVectInstIn instTy', fvs)
}
rnHsVectDecl (HsVectInstOut _)
= panic "RnSource.rnHsVectDecl: Unexpected 'HsVectInstOut'"
{- **************************************************************
* *
Renaming type, class, instance and role declarations
* *
*****************************************************************
@rnTyDecl@ uses the `global name function' to create a new type
declaration in which local names have been replaced by their original
names, reporting any unknown names.
Renaming type variables is a pain. Because they now contain uniques,
it is necessary to pass in an association list which maps a parsed
tyvar to its @Name@ representation.
In some cases (type signatures of values),
it is even necessary to go over the type first
in order to get the set of tyvars used by it, make an assoc list,
and then go over it again to rename the tyvars!
However, we can also do some scoping checks at the same time.
Note [Dependency analysis of type, class, and instance decls]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A TyClGroup represents a strongly connected components of
type/class/instance decls, together with the role annotations for the
type/class declarations. The renamer uses strongly connected
comoponent analysis to build these groups. We do this for a number of
reasons:
* Improve kind error messages. Consider
data T f a = MkT f a
data S f a = MkS f (T f a)
This has a kind error, but the error message is better if you
check T first, (fixing its kind) and *then* S. If you do kind
inference together, you might get an error reported in S, which
is jolly confusing. See Trac #4875
* Increase kind polymorphism. See TcTyClsDecls
Note [Grouping of type and class declarations]
Why do the instance declarations participate? At least two reasons
* Consider (Trac #11348)
type family F a
type instance F Int = Bool
data R = MkR (F Int)
type Foo = 'MkR 'True
For Foo to kind-check we need to know that (F Int) ~ Bool. But we won't
know that unless we've looked at the type instance declaration for F
before kind-checking Foo.
* Another example is this (Trac #3990).
data family Complex a
data instance Complex Double = CD {-# UNPACK #-} !Double
{-# UNPACK #-} !Double
data T = T {-# UNPACK #-} !(Complex Double)
Here, to generate the right kind of unpacked implementation for T,
we must have access to the 'data instance' declaration.
* Things become more complicated when we introduce transitive
dependencies through imported definitions, like in this scenario:
A.hs
type family Closed (t :: Type) :: Type where
Closed t = Open t
type family Open (t :: Type) :: Type
B.hs
data Q where
Q :: Closed Bool -> Q
type instance Open Int = Bool
type S = 'Q 'True
Somehow, we must ensure that the instance Open Int = Bool is checked before
the type synonym S. While we know that S depends upon 'Q depends upon Closed,
we have no idea that Closed depends upon Open!
To accomodate for these situations, we ensure that an instance is checked
before every @TyClDecl@ on which it does not depend. That's to say, instances
are checked as early as possible in @tcTyAndClassDecls@.
------------------------------------
So much for WHY. What about HOW? It's pretty easy:
(1) Rename the type/class, instance, and role declarations
individually
(2) Do strongly-connected component analysis of the type/class decls,
We'll make a TyClGroup for each SCC
In this step we treat a reference to a (promoted) data constructor
K as a dependency on its parent type. Thus
data T = K1 | K2
data S = MkS (Proxy 'K1)
Here S depends on 'K1 and hence on its parent T.
In this step we ignore instances; see
Note [No dependencies on data instances]
(3) Attach roles to the appropriate SCC
(4) Attach instances to the appropriate SCC.
We add an instance decl to SCC when:
all its free types/classes are bound in this SCC or earlier ones
(5) We make an initial TyClGroup, with empty group_tyclds, for any
(orphan) instances that affect only imported types/classes
Steps (3) and (4) are done by the (mapAccumL mk_group) call.
Note [No dependencies on data instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this
data family D a
data instance D Int = D1
data S = MkS (Proxy 'D1)
Here the declaration of S depends on the /data instance/ declaration
for 'D Int'. That makes things a lot more complicated, especially
if the data instance is an associated type of an enclosing class instance.
(And the class instance might have several associated type instances
with different dependency structure!)
Ugh. For now we simply don't allow promotion of data constructors for
data instances. See Note [AFamDataCon: not promoting data family
constructors] in TcEnv
-}
rnTyClDecls :: [TyClGroup RdrName]
-> RnM ([TyClGroup Name], FreeVars)
-- Rename the declarations and do dependency analysis on them
rnTyClDecls tycl_ds
= do { -- Rename the type/class, instance, and role declaraations
tycls_w_fvs <- mapM (wrapLocFstM rnTyClDecl)
(tyClGroupTyClDecls tycl_ds)
; let tc_names = mkNameSet (map (tcdName . unLoc . fst) tycls_w_fvs)
; instds_w_fvs <- mapM (wrapLocFstM rnSrcInstDecl) (tyClGroupInstDecls tycl_ds)
; role_annots <- rnRoleAnnots tc_names (tyClGroupRoleDecls tycl_ds)
; tycls_w_fvs <- addBootDeps tycls_w_fvs
-- TBD must add_boot_deps to instds_w_fvs?
-- Do SCC analysis on the type/class decls
; rdr_env <- getGlobalRdrEnv
; let tycl_sccs = depAnalTyClDecls rdr_env tycls_w_fvs
role_annot_env = mkRoleAnnotEnv role_annots
inst_ds_map = mkInstDeclFreeVarsMap rdr_env tc_names instds_w_fvs
(init_inst_ds, rest_inst_ds) = getInsts [] inst_ds_map
first_group
| null init_inst_ds = []
| otherwise = [TyClGroup { group_tyclds = []
, group_roles = []
, group_instds = init_inst_ds }]
((final_inst_ds, orphan_roles), groups)
= mapAccumL mk_group (rest_inst_ds, role_annot_env) tycl_sccs
all_fvs = plusFV (foldr (plusFV . snd) emptyFVs tycls_w_fvs)
(foldr (plusFV . snd) emptyFVs instds_w_fvs)
all_groups = first_group ++ groups
; ASSERT2( null final_inst_ds, ppr instds_w_fvs $$ ppr inst_ds_map
$$ ppr (flattenSCCs tycl_sccs) $$ ppr final_inst_ds )
mapM_ orphanRoleAnnotErr (nameEnvElts orphan_roles)
; traceRn "rnTycl dependency analysis made groups" (ppr all_groups)
; return (all_groups, all_fvs) }
where
mk_group :: (InstDeclFreeVarsMap, RoleAnnotEnv)
-> SCC (LTyClDecl Name)
-> ( (InstDeclFreeVarsMap, RoleAnnotEnv)
, TyClGroup Name )
mk_group (inst_map, role_env) scc
= ((inst_map', role_env'), group)
where
tycl_ds = flattenSCC scc
bndrs = map (tcdName . unLoc) tycl_ds
(inst_ds, inst_map') = getInsts bndrs inst_map
(roles, role_env') = getRoleAnnots bndrs role_env
group = TyClGroup { group_tyclds = tycl_ds
, group_roles = roles
, group_instds = inst_ds }
depAnalTyClDecls :: GlobalRdrEnv
-> [(LTyClDecl Name, FreeVars)]
-> [SCC (LTyClDecl Name)]
-- See Note [Dependency analysis of type, class, and instance decls]
depAnalTyClDecls rdr_env ds_w_fvs
= stronglyConnCompFromEdgedVerticesUniq edges
where
edges = [ (d, tcdName (unLoc d), map (getParent rdr_env) (nonDetEltsUFM fvs))
| (d, fvs) <- ds_w_fvs ]
-- It's OK to use nonDetEltsUFM here as
-- stronglyConnCompFromEdgedVertices is still deterministic
-- even if the edges are in nondeterministic order as explained
-- in Note [Deterministic SCC] in Digraph.
toParents :: GlobalRdrEnv -> NameSet -> NameSet
toParents rdr_env ns
= nonDetFoldUFM add emptyNameSet ns
-- It's OK to use nonDetFoldUFM because we immediately forget the
-- ordering by creating a set
where
add n s = extendNameSet s (getParent rdr_env n)
getParent :: GlobalRdrEnv -> Name -> Name
getParent rdr_env n
= case lookupGRE_Name rdr_env n of
Just gre -> case gre_par gre of
ParentIs { par_is = p } -> p
FldParent { par_is = p } -> p
_ -> n
Nothing -> n
{- Note [Extra dependencies from .hs-boot files]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This is a long story, so buckle in.
**Dependencies via hs-boot files are not obvious.** Consider the following case:
A.hs-boot
module A where
data A1
B.hs
module B where
import {-# SOURCE #-} A
type B1 = A1
A.hs
module A where
import B
data A2 = MkA2 B1
data A1 = MkA1 A2
Here A2 is really recursive (via B1), but we won't see that easily when
doing dependency analysis when compiling A.hs. When we look at A2,
we see that its free variables are simply B1, but without (recursively) digging
into the definition of B1 will we see that it actually refers to A1 via an
hs-boot file.
**Recursive declarations, even those broken by an hs-boot file, need to
be type-checked together.** Whenever we refer to a declaration via
an hs-boot file, we must be careful not to force the TyThing too early:
ala Note [Tying the knot] if we force the TyThing before we have
defined it ourselves in the local type environment, GHC will error.
Conservatively, then, it would make sense that we to typecheck A1
and A2 from the previous example together, because the two types are
truly mutually recursive through B1.
If we are being clever, we might observe that while kind-checking
A2, we don't actually need to force the TyThing for A1: B1
independently records its kind, so there is no need to go "deeper".
But then we are in an uncomfortable situation where we have
constructed a TyThing for A2 before we have checked A1, and we
have to be absolutely certain we don't force it too deeply until
we get around to kind checking A1, which could be for a very long
time.
Indeed, with datatype promotion, we may very well need to look
at the type of MkA2 before we have kind-checked A1: consider,
data T = MkT (Proxy 'MkA2)
To promote MkA2, we need to lift its type to the kind level.
We never tested this, but it seems likely A1 would get poked
at this point.
**Here's what we do instead.** So it is expedient for us to
make sure A1 and A2 are kind checked together in a loop.
To ensure that our dependency analysis can catch this,
we add a dependency:
- from every local declaration
- to everything that comes from this module's .hs-boot file
(this is gotten from sb_tcs in the SelfBootInfo).
In this case, we'll add an edges
- from A1 to A2 (but that edge is there already)
- from A2 to A1 (which is new)
Well, not quite *every* declaration. Imagine module A
above had another datatype declaration:
data A3 = A3 Int
Even though A3 has a dependency (on Int), all its dependencies are from things
that live on other packages. Since we don't have mutual dependencies across
packages, it is safe not to add the dependencies on the .hs-boot stuff to A2.
Hence function nameIsHomePackageImport.
Note that this is fairly conservative: it essentially implies that
EVERY type declaration in this modules hs-boot file will be kind-checked
together in one giant loop (and furthermore makes every other type
in the module depend on this loop). This is perhaps less than ideal, because
the larger a recursive group, the less polymorphism available (we
cannot infer a type to be polymorphically instantiated while we
are inferring its kind), but no one has hollered about this (yet!)
-}
addBootDeps :: [(LTyClDecl Name, FreeVars)] -> RnM [(LTyClDecl Name, FreeVars)]
-- See Note [Extra dependencies from .hs-boot files]
addBootDeps ds_w_fvs
= do { tcg_env <- getGblEnv
; let this_mod = tcg_mod tcg_env
boot_info = tcg_self_boot tcg_env
add_boot_deps :: [(LTyClDecl Name, FreeVars)] -> [(LTyClDecl Name, FreeVars)]
add_boot_deps ds_w_fvs
= case boot_info of
SelfBoot { sb_tcs = tcs } | not (isEmptyNameSet tcs)
-> map (add_one tcs) ds_w_fvs
_ -> ds_w_fvs
add_one :: NameSet -> (LTyClDecl Name, FreeVars) -> (LTyClDecl Name, FreeVars)
add_one tcs pr@(decl,fvs)
| has_local_imports fvs = (decl, fvs `plusFV` tcs)
| otherwise = pr
has_local_imports fvs
= nameSetAny (nameIsHomePackageImport this_mod) fvs
; return (add_boot_deps ds_w_fvs) }
{- ******************************************************
* *
Role annotations
* *
****************************************************** -}
-- | Renames role annotations, returning them as the values in a NameEnv
-- and checks for duplicate role annotations.
-- It is quite convenient to do both of these in the same place.
-- See also Note [Role annotations in the renamer]
rnRoleAnnots :: NameSet
-> [LRoleAnnotDecl RdrName]
-> RnM [LRoleAnnotDecl Name]
rnRoleAnnots tc_names role_annots
= do { -- Check for duplicates *before* renaming, to avoid
-- lumping together all the unboundNames
let (no_dups, dup_annots) = removeDups role_annots_cmp role_annots
role_annots_cmp (L _ annot1) (L _ annot2)
= roleAnnotDeclName annot1 `compare` roleAnnotDeclName annot2
; mapM_ dupRoleAnnotErr dup_annots
; mapM (wrapLocM rn_role_annot1) no_dups }
where
rn_role_annot1 (RoleAnnotDecl tycon roles)
= do { -- the name is an *occurrence*, but look it up only in the
-- decls defined in this group (see #10263)
tycon' <- lookupSigCtxtOccRn (RoleAnnotCtxt tc_names)
(text "role annotation")
tycon
; return $ RoleAnnotDecl tycon' roles }
dupRoleAnnotErr :: [LRoleAnnotDecl RdrName] -> RnM ()
dupRoleAnnotErr [] = panic "dupRoleAnnotErr"
dupRoleAnnotErr list
= addErrAt loc $
hang (text "Duplicate role annotations for" <+>
quotes (ppr $ roleAnnotDeclName first_decl) <> colon)
2 (vcat $ map pp_role_annot sorted_list)
where
sorted_list = sortBy cmp_annot list
(L loc first_decl : _) = sorted_list
pp_role_annot (L loc decl) = hang (ppr decl)
4 (text "-- written at" <+> ppr loc)
cmp_annot (L loc1 _) (L loc2 _) = loc1 `compare` loc2
orphanRoleAnnotErr :: LRoleAnnotDecl Name -> RnM ()
orphanRoleAnnotErr (L loc decl)
= addErrAt loc $
hang (text "Role annotation for a type previously declared:")
2 (ppr decl) $$
parens (text "The role annotation must be given where" <+>
quotes (ppr $ roleAnnotDeclName decl) <+>
text "is declared.")
{- Note [Role annotations in the renamer]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We must ensure that a type's role annotation is put in the same group as the
proper type declaration. This is because role annotations are needed during
type-checking when creating the type's TyCon. So, rnRoleAnnots builds a
NameEnv (LRoleAnnotDecl Name) that maps a name to a role annotation for that
type, if any. Then, this map can be used to add the role annotations to the
groups after dependency analysis.
This process checks for duplicate role annotations, where we must be careful
to do the check *before* renaming to avoid calling all unbound names duplicates
of one another.
The renaming process, as usual, might identify and report errors for unbound
names. We exclude the annotations for unbound names in the annotation
environment to avoid spurious errors for orphaned annotations.
We then (in rnTyClDecls) do a check for orphan role annotations (role
annotations without an accompanying type decl). The check works by folding
over components (of type [[Either (TyClDecl Name) (InstDecl Name)]]), selecting
out the relevant role declarations for each group, as well as diminishing the
annotation environment. After the fold is complete, anything left over in the
name environment must be an orphan, and errors are generated.
An earlier version of this algorithm short-cut the orphan check by renaming
only with names declared in this module. But, this check is insufficient in
the case of staged module compilation (Template Haskell, GHCi).
See #8485. With the new lookup process (which includes types declared in other
modules), we get better error messages, too.
-}
{- ******************************************************
* *
Dependency info for instances
* *
****************************************************** -}
----------------------------------------------------------
-- | 'InstDeclFreeVarsMap is an association of an
-- @InstDecl@ with @FreeVars@. The @FreeVars@ are
-- the tycon names that are both
-- a) free in the instance declaration
-- b) bound by this group of type/class/instance decls
type InstDeclFreeVarsMap = [(LInstDecl Name, FreeVars)]
-- | Construct an @InstDeclFreeVarsMap@ by eliminating any @Name@s from the
-- @FreeVars@ which are *not* the binders of a @TyClDecl@.
mkInstDeclFreeVarsMap :: GlobalRdrEnv
-> NameSet
-> [(LInstDecl Name, FreeVars)]
-> InstDeclFreeVarsMap
mkInstDeclFreeVarsMap rdr_env tycl_bndrs inst_ds_fvs
= [ (inst_decl, toParents rdr_env fvs `intersectFVs` tycl_bndrs)
| (inst_decl, fvs) <- inst_ds_fvs ]
-- | Get the @LInstDecl@s which have empty @FreeVars@ sets, and the
-- @InstDeclFreeVarsMap@ with these entries removed.
-- We call (getInsts tcs instd_map) when we've completed the declarations
-- for 'tcs'. The call returns (inst_decls, instd_map'), where
-- inst_decls are the instance declarations all of
-- whose free vars are now defined
-- instd_map' is the inst-decl map with 'tcs' removed from
-- the free-var set
getInsts :: [Name] -> InstDeclFreeVarsMap -> ([LInstDecl Name], InstDeclFreeVarsMap)
getInsts bndrs inst_decl_map
= partitionWith pick_me inst_decl_map
where
pick_me :: (LInstDecl Name, FreeVars)
-> Either (LInstDecl Name) (LInstDecl Name, FreeVars)
pick_me (decl, fvs)
| isEmptyNameSet depleted_fvs = Left decl
| otherwise = Right (decl, depleted_fvs)
where
depleted_fvs = delFVs bndrs fvs
{- ******************************************************
* *
Renaming a type or class declaration
* *
****************************************************** -}
rnTyClDecl :: TyClDecl RdrName
-> RnM (TyClDecl Name, FreeVars)
-- All flavours of type family declarations ("type family", "newtype family",
-- and "data family"), both top level and (for an associated type)
-- in a class decl
rnTyClDecl (FamDecl { tcdFam = decl })
= do { (decl', fvs) <- rnFamDecl Nothing decl
; return (FamDecl decl', fvs) }
rnTyClDecl (SynDecl { tcdLName = tycon, tcdTyVars = tyvars, tcdRhs = rhs })
= do { tycon' <- lookupLocatedTopBndrRn tycon
; kvs <- freeKiTyVarsKindVars <$> extractHsTyRdrTyVars rhs
; let doc = TySynCtx tycon
; traceRn "rntycl-ty" (ppr tycon <+> ppr kvs)
; ((tyvars', rhs'), fvs) <- bindHsQTyVars doc Nothing Nothing kvs tyvars $
\ tyvars' _ ->
do { (rhs', fvs) <- rnTySyn doc rhs
; return ((tyvars', rhs'), fvs) }
; return (SynDecl { tcdLName = tycon', tcdTyVars = tyvars'
, tcdRhs = rhs', tcdFVs = fvs }, fvs) }
-- "data", "newtype" declarations
-- both top level and (for an associated type) in an instance decl
rnTyClDecl (DataDecl { tcdLName = tycon, tcdTyVars = tyvars, tcdDataDefn = defn })
= do { tycon' <- lookupLocatedTopBndrRn tycon
; kvs <- extractDataDefnKindVars defn
; let doc = TyDataCtx tycon
; traceRn "rntycl-data" (ppr tycon <+> ppr kvs)
; ((tyvars', defn', no_kvs), fvs)
<- bindHsQTyVars doc Nothing Nothing kvs tyvars $ \ tyvars' dep_vars ->
do { ((defn', kind_sig_fvs), fvs) <- rnDataDefn doc defn
; let sig_tvs = filterNameSet isTyVarName kind_sig_fvs
unbound_sig_tvs = sig_tvs `minusNameSet` dep_vars
; return ((tyvars', defn', isEmptyNameSet unbound_sig_tvs), fvs) }
-- See Note [Complete user-supplied kind signatures] in HsDecls
; typeintype <- xoptM LangExt.TypeInType
; let cusk = hsTvbAllKinded tyvars' &&
(not typeintype || no_kvs)
; return (DataDecl { tcdLName = tycon', tcdTyVars = tyvars'
, tcdDataDefn = defn', tcdDataCusk = cusk
, tcdFVs = fvs }, fvs) }
rnTyClDecl (ClassDecl { tcdCtxt = context, tcdLName = lcls,
tcdTyVars = tyvars, tcdFDs = fds, tcdSigs = sigs,
tcdMeths = mbinds, tcdATs = ats, tcdATDefs = at_defs,
tcdDocs = docs})
= do { lcls' <- lookupLocatedTopBndrRn lcls
; let cls' = unLoc lcls'
kvs = [] -- No scoped kind vars except those in
-- kind signatures on the tyvars
-- Tyvars scope over superclass context and method signatures
; ((tyvars', context', fds', ats'), stuff_fvs)
<- bindHsQTyVars cls_doc Nothing Nothing kvs tyvars $ \ tyvars' _ -> do
-- Checks for distinct tyvars
{ (context', cxt_fvs) <- rnContext cls_doc context
; fds' <- rnFds fds
-- The fundeps have no free variables
; (ats', fv_ats) <- rnATDecls cls' ats
; let fvs = cxt_fvs `plusFV`
fv_ats
; return ((tyvars', context', fds', ats'), fvs) }
; (at_defs', fv_at_defs) <- rnList (rnTyFamDefltEqn cls') at_defs
-- No need to check for duplicate associated type decls
-- since that is done by RnNames.extendGlobalRdrEnvRn
-- Check the signatures
-- First process the class op sigs (op_sigs), then the fixity sigs (non_op_sigs).
; let sig_rdr_names_w_locs = [op | L _ (ClassOpSig False ops _) <- sigs
, op <- ops]
; checkDupRdrNames sig_rdr_names_w_locs
-- Typechecker is responsible for checking that we only
-- give default-method bindings for things in this class.
-- The renamer *could* check this for class decls, but can't
-- for instance decls.
-- The newLocals call is tiresome: given a generic class decl
-- class C a where
-- op :: a -> a
-- op {| x+y |} (Inl a) = ...
-- op {| x+y |} (Inr b) = ...
-- op {| a*b |} (a*b) = ...
-- we want to name both "x" tyvars with the same unique, so that they are
-- easy to group together in the typechecker.
; (mbinds', sigs', meth_fvs)
<- rnMethodBinds True cls' (hsAllLTyVarNames tyvars') mbinds sigs
-- No need to check for duplicate method signatures
-- since that is done by RnNames.extendGlobalRdrEnvRn
-- and the methods are already in scope
-- Haddock docs
; docs' <- mapM (wrapLocM rnDocDecl) docs
; let all_fvs = meth_fvs `plusFV` stuff_fvs `plusFV` fv_at_defs
; return (ClassDecl { tcdCtxt = context', tcdLName = lcls',
tcdTyVars = tyvars', tcdFDs = fds', tcdSigs = sigs',
tcdMeths = mbinds', tcdATs = ats', tcdATDefs = at_defs',
tcdDocs = docs', tcdFVs = all_fvs },
all_fvs ) }
where
cls_doc = ClassDeclCtx lcls
-- "type" and "type instance" declarations
rnTySyn :: HsDocContext -> LHsType RdrName -> RnM (LHsType Name, FreeVars)
rnTySyn doc rhs = rnLHsType doc rhs
rnDataDefn :: HsDocContext -> HsDataDefn RdrName
-> RnM ((HsDataDefn Name, NameSet), FreeVars)
-- the NameSet includes all Names free in the kind signature
-- See Note [Complete user-supplied kind signatures]
rnDataDefn doc (HsDataDefn { dd_ND = new_or_data, dd_cType = cType
, dd_ctxt = context, dd_cons = condecls
, dd_kindSig = m_sig, dd_derivs = derivs })
= do { checkTc (h98_style || null (unLoc context))
(badGadtStupidTheta doc)
; (m_sig', sig_fvs) <- case m_sig of
Just sig -> first Just <$> rnLHsKind doc sig
Nothing -> return (Nothing, emptyFVs)
; (context', fvs1) <- rnContext doc context
; (derivs', fvs3) <- rn_derivs derivs
-- For the constructor declarations, drop the LocalRdrEnv
-- in the GADT case, where the type variables in the declaration
-- do not scope over the constructor signatures
-- data T a where { T1 :: forall b. b-> b }
; let { zap_lcl_env | h98_style = \ thing -> thing
| otherwise = setLocalRdrEnv emptyLocalRdrEnv }
; (condecls', con_fvs) <- zap_lcl_env $ rnConDecls condecls
-- No need to check for duplicate constructor decls
-- since that is done by RnNames.extendGlobalRdrEnvRn
; let all_fvs = fvs1 `plusFV` fvs3 `plusFV`
con_fvs `plusFV` sig_fvs
; return (( HsDataDefn { dd_ND = new_or_data, dd_cType = cType
, dd_ctxt = context', dd_kindSig = m_sig'
, dd_cons = condecls'
, dd_derivs = derivs' }
, sig_fvs )
, all_fvs )
}
where
h98_style = case condecls of -- Note [Stupid theta]
L _ (ConDeclGADT {}) : _ -> False
_ -> True
rn_derivs (L loc ds)
= do { deriv_strats_ok <- xoptM LangExt.DerivingStrategies
; failIfTc (lengthExceeds ds 1 && not deriv_strats_ok)
multipleDerivClausesErr
; (ds', fvs) <- mapFvRn (rnLHsDerivingClause deriv_strats_ok doc) ds
; return (L loc ds', fvs) }
rnLHsDerivingClause :: Bool -> HsDocContext -> LHsDerivingClause RdrName
-> RnM (LHsDerivingClause Name, FreeVars)
rnLHsDerivingClause deriv_strats_ok doc
(L loc (HsDerivingClause { deriv_clause_strategy = dcs
, deriv_clause_tys = L loc' dct }))
= do { failIfTc (isJust dcs && not deriv_strats_ok) $
illegalDerivStrategyErr $ fmap unLoc dcs
; (dct', fvs) <- mapFvRn (rnHsSigType doc) dct
; return ( L loc (HsDerivingClause { deriv_clause_strategy = dcs
, deriv_clause_tys = L loc' dct' })
, fvs ) }
badGadtStupidTheta :: HsDocContext -> SDoc
badGadtStupidTheta _
= vcat [text "No context is allowed on a GADT-style data declaration",
text "(You can put a context on each constructor, though.)"]
illegalDerivStrategyErr :: Maybe DerivStrategy -> SDoc
illegalDerivStrategyErr ds
= vcat [ text "Illegal deriving strategy" <> colon <+> maybe empty ppr ds
, text "Use DerivingStrategies to enable this extension" ]
multipleDerivClausesErr :: SDoc
multipleDerivClausesErr
= vcat [ text "Illegal use of multiple, consecutive deriving clauses"
, text "Use DerivingStrategies to allow this" ]
rnFamDecl :: Maybe Name -- Just cls => this FamilyDecl is nested
-- inside an *class decl* for cls
-- used for associated types
-> FamilyDecl RdrName
-> RnM (FamilyDecl Name, FreeVars)
rnFamDecl mb_cls (FamilyDecl { fdLName = tycon, fdTyVars = tyvars
, fdInfo = info, fdResultSig = res_sig
, fdInjectivityAnn = injectivity })
= do { tycon' <- lookupLocatedTopBndrRn tycon
; kvs <- extractRdrKindSigVars res_sig
; ((tyvars', res_sig', injectivity'), fv1) <-
bindHsQTyVars doc Nothing mb_cls kvs tyvars $
\ tyvars'@(HsQTvs { hsq_implicit = rn_kvs }) _ ->
do { let rn_sig = rnFamResultSig doc rn_kvs
; (res_sig', fv_kind) <- wrapLocFstM rn_sig res_sig
; injectivity' <- traverse (rnInjectivityAnn tyvars' res_sig')
injectivity
; return ( (tyvars', res_sig', injectivity') , fv_kind ) }
; (info', fv2) <- rn_info info
; return (FamilyDecl { fdLName = tycon', fdTyVars = tyvars'
, fdInfo = info', fdResultSig = res_sig'
, fdInjectivityAnn = injectivity' }
, fv1 `plusFV` fv2) }
where
doc = TyFamilyCtx tycon
----------------------
rn_info (ClosedTypeFamily (Just eqns))
= do { (eqns', fvs) <- rnList (rnTyFamInstEqn Nothing) eqns
-- no class context,
; return (ClosedTypeFamily (Just eqns'), fvs) }
rn_info (ClosedTypeFamily Nothing)
= return (ClosedTypeFamily Nothing, emptyFVs)
rn_info OpenTypeFamily = return (OpenTypeFamily, emptyFVs)
rn_info DataFamily = return (DataFamily, emptyFVs)
rnFamResultSig :: HsDocContext
-> [Name] -- kind variables already in scope
-> FamilyResultSig RdrName
-> RnM (FamilyResultSig Name, FreeVars)
rnFamResultSig _ _ NoSig
= return (NoSig, emptyFVs)
rnFamResultSig doc _ (KindSig kind)
= do { (rndKind, ftvs) <- rnLHsKind doc kind
; return (KindSig rndKind, ftvs) }
rnFamResultSig doc kv_names (TyVarSig tvbndr)
= do { -- `TyVarSig` tells us that user named the result of a type family by
-- writing `= tyvar` or `= (tyvar :: kind)`. In such case we want to
-- be sure that the supplied result name is not identical to an
-- already in-scope type variable from an enclosing class.
--
-- Example of disallowed declaration:
-- class C a b where
-- type F b = a | a -> b
rdr_env <- getLocalRdrEnv
; let resName = hsLTyVarName tvbndr
; when (resName `elemLocalRdrEnv` rdr_env) $
addErrAt (getLoc tvbndr) $
(hsep [ text "Type variable", quotes (ppr resName) <> comma
, text "naming a type family result,"
] $$
text "shadows an already bound type variable")
; bindLHsTyVarBndr doc Nothing -- this might be a lie, but it's used for
-- scoping checks that are irrelevant here
(mkNameSet kv_names) emptyNameSet
-- use of emptyNameSet here avoids
-- redundant duplicate errors
tvbndr $ \ _ _ tvbndr' ->
return (TyVarSig tvbndr', unitFV (hsLTyVarName tvbndr')) }
-- Note [Renaming injectivity annotation]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- During renaming of injectivity annotation we have to make several checks to
-- make sure that it is well-formed. At the moment injectivity annotation
-- consists of a single injectivity condition, so the terms "injectivity
-- annotation" and "injectivity condition" might be used interchangeably. See
-- Note [Injectivity annotation] for a detailed discussion of currently allowed
-- injectivity annotations.
--
-- Checking LHS is simple because the only type variable allowed on the LHS of
-- injectivity condition is the variable naming the result in type family head.
-- Example of disallowed annotation:
--
-- type family Foo a b = r | b -> a
--
-- Verifying RHS of injectivity consists of checking that:
--
-- 1. only variables defined in type family head appear on the RHS (kind
-- variables are also allowed). Example of disallowed annotation:
--
-- type family Foo a = r | r -> b
--
-- 2. for associated types the result variable does not shadow any of type
-- class variables. Example of disallowed annotation:
--
-- class Foo a b where
-- type F a = b | b -> a
--
-- Breaking any of these assumptions results in an error.
-- | Rename injectivity annotation. Note that injectivity annotation is just the
-- part after the "|". Everything that appears before it is renamed in
-- rnFamDecl.
rnInjectivityAnn :: LHsQTyVars Name -- ^ Type variables declared in
-- type family head
-> LFamilyResultSig Name -- ^ Result signature
-> LInjectivityAnn RdrName -- ^ Injectivity annotation
-> RnM (LInjectivityAnn Name)
rnInjectivityAnn tvBndrs (L _ (TyVarSig resTv))
(L srcSpan (InjectivityAnn injFrom injTo))
= do
{ (injDecl'@(L _ (InjectivityAnn injFrom' injTo')), noRnErrors)
<- askNoErrs $
bindLocalNames [hsLTyVarName resTv] $
-- The return type variable scopes over the injectivity annotation
-- e.g. type family F a = (r::*) | r -> a
do { injFrom' <- rnLTyVar injFrom
; injTo' <- mapM rnLTyVar injTo
; return $ L srcSpan (InjectivityAnn injFrom' injTo') }
; let tvNames = Set.fromList $ hsAllLTyVarNames tvBndrs
resName = hsLTyVarName resTv
-- See Note [Renaming injectivity annotation]
lhsValid = EQ == (stableNameCmp resName (unLoc injFrom'))
rhsValid = Set.fromList (map unLoc injTo') `Set.difference` tvNames
-- if renaming of type variables ended with errors (eg. there were
-- not-in-scope variables) don't check the validity of injectivity
-- annotation. This gives better error messages.
; when (noRnErrors && not lhsValid) $
addErrAt (getLoc injFrom)
( vcat [ text $ "Incorrect type variable on the LHS of "
++ "injectivity condition"
, nest 5
( vcat [ text "Expected :" <+> ppr resName
, text "Actual :" <+> ppr injFrom ])])
; when (noRnErrors && not (Set.null rhsValid)) $
do { let errorVars = Set.toList rhsValid
; addErrAt srcSpan $ ( hsep
[ text "Unknown type variable" <> plural errorVars
, text "on the RHS of injectivity condition:"
, interpp'SP errorVars ] ) }
; return injDecl' }
-- We can only hit this case when the user writes injectivity annotation without
-- naming the result:
--
-- type family F a | result -> a
-- type family F a :: * | result -> a
--
-- So we rename injectivity annotation like we normally would except that
-- this time we expect "result" to be reported not in scope by rnLTyVar.
rnInjectivityAnn _ _ (L srcSpan (InjectivityAnn injFrom injTo)) =
setSrcSpan srcSpan $ do
(injDecl', _) <- askNoErrs $ do
injFrom' <- rnLTyVar injFrom
injTo' <- mapM rnLTyVar injTo
return $ L srcSpan (InjectivityAnn injFrom' injTo')
return $ injDecl'
{-
Note [Stupid theta]
~~~~~~~~~~~~~~~~~~~
Trac #3850 complains about a regression wrt 6.10 for
data Show a => T a
There is no reason not to allow the stupid theta if there are no data
constructors. It's still stupid, but does no harm, and I don't want
to cause programs to break unnecessarily (notably HList). So if there
are no data constructors we allow h98_style = True
-}
{- *****************************************************
* *
Support code for type/data declarations
* *
***************************************************** -}
---------------
badAssocRhs :: [Name] -> RnM ()
badAssocRhs ns
= addErr (hang (text "The RHS of an associated type declaration mentions"
<+> pprWithCommas (quotes . ppr) ns)
2 (text "All such variables must be bound on the LHS"))
-----------------
rnConDecls :: [LConDecl RdrName] -> RnM ([LConDecl Name], FreeVars)
rnConDecls = mapFvRn (wrapLocFstM rnConDecl)
rnConDecl :: ConDecl RdrName -> RnM (ConDecl Name, FreeVars)
rnConDecl decl@(ConDeclH98 { con_name = name, con_qvars = qtvs
, con_cxt = mcxt, con_details = details
, con_doc = mb_doc })
= do { _ <- addLocM checkConName name
; new_name <- lookupLocatedTopBndrRn name
; let doc = ConDeclCtx [new_name]
; mb_doc' <- rnMbLHsDoc mb_doc
; (kvs, qtvs') <- get_con_qtvs (hsConDeclArgTys details)
; bindHsQTyVars doc (Just $ inHsDocContext doc) Nothing kvs qtvs' $
\new_tyvars _ -> do
{ (new_context, fvs1) <- case mcxt of
Nothing -> return (Nothing,emptyFVs)
Just lcxt -> do { (lctx',fvs) <- rnContext doc lcxt
; return (Just lctx',fvs) }
; (new_details, fvs2) <- rnConDeclDetails (unLoc new_name) doc details
; let (new_details',fvs3) = (new_details,emptyFVs)
; traceRn "rnConDecl" (ppr name <+> vcat
[ text "free_kvs:" <+> ppr kvs
, text "qtvs:" <+> ppr qtvs
, text "qtvs':" <+> ppr qtvs' ])
; let all_fvs = fvs1 `plusFV` fvs2 `plusFV` fvs3
new_tyvars' = case qtvs of
Nothing -> Nothing
Just _ -> Just new_tyvars
; return (decl { con_name = new_name, con_qvars = new_tyvars'
, con_cxt = new_context, con_details = new_details'
, con_doc = mb_doc' },
all_fvs) }}
where
cxt = maybe [] unLoc mcxt
get_rdr_tvs tys = extractHsTysRdrTyVars (cxt ++ tys)
get_con_qtvs :: [LHsType RdrName]
-> RnM ([Located RdrName], LHsQTyVars RdrName)
get_con_qtvs arg_tys
| Just tvs <- qtvs -- data T = forall a. MkT (a -> a)
= do { free_vars <- get_rdr_tvs arg_tys
; return (freeKiTyVarsKindVars free_vars, tvs) }
| otherwise -- data T = MkT (a -> a)
= return ([], mkHsQTvs [])
rnConDecl decl@(ConDeclGADT { con_names = names, con_type = ty
, con_doc = mb_doc })
= do { mapM_ (addLocM checkConName) names
; new_names <- mapM lookupLocatedTopBndrRn names
; let doc = ConDeclCtx new_names
; mb_doc' <- rnMbLHsDoc mb_doc
; (ty', fvs) <- rnHsSigType doc ty
; traceRn "rnConDecl" (ppr names <+> vcat
[ text "fvs:" <+> ppr fvs ])
; return (decl { con_names = new_names, con_type = ty'
, con_doc = mb_doc' },
fvs) }
rnConDeclDetails
:: Name
-> HsDocContext
-> HsConDetails (LHsType RdrName) (Located [LConDeclField RdrName])
-> RnM (HsConDetails (LHsType Name) (Located [LConDeclField Name]), FreeVars)
rnConDeclDetails _ doc (PrefixCon tys)
= do { (new_tys, fvs) <- rnLHsTypes doc tys
; return (PrefixCon new_tys, fvs) }
rnConDeclDetails _ doc (InfixCon ty1 ty2)
= do { (new_ty1, fvs1) <- rnLHsType doc ty1
; (new_ty2, fvs2) <- rnLHsType doc ty2
; return (InfixCon new_ty1 new_ty2, fvs1 `plusFV` fvs2) }
rnConDeclDetails con doc (RecCon (L l fields))
= do { fls <- lookupConstructorFields con
; (new_fields, fvs) <- rnConDeclFields doc fls fields
-- No need to check for duplicate fields
-- since that is done by RnNames.extendGlobalRdrEnvRn
; return (RecCon (L l new_fields), fvs) }
-------------------------------------------------
-- | Brings pattern synonym names and also pattern synonym selectors
-- from record pattern synonyms into scope.
extendPatSynEnv :: HsValBinds RdrName -> MiniFixityEnv
-> ([Name] -> TcRnIf TcGblEnv TcLclEnv a) -> TcM a
extendPatSynEnv val_decls local_fix_env thing = do {
names_with_fls <- new_ps val_decls
; let pat_syn_bndrs = concat [ name: map flSelector fields
| (name, fields) <- names_with_fls ]
; let avails = map avail pat_syn_bndrs
; (gbl_env, lcl_env) <- extendGlobalRdrEnvRn avails local_fix_env
; let field_env' = extendNameEnvList (tcg_field_env gbl_env) names_with_fls
final_gbl_env = gbl_env { tcg_field_env = field_env' }
; setEnvs (final_gbl_env, lcl_env) (thing pat_syn_bndrs) }
where
new_ps :: HsValBinds RdrName -> TcM [(Name, [FieldLabel])]
new_ps (ValBindsIn binds _) = foldrBagM new_ps' [] binds
new_ps _ = panic "new_ps"
new_ps' :: LHsBindLR RdrName RdrName
-> [(Name, [FieldLabel])]
-> TcM [(Name, [FieldLabel])]
new_ps' bind names
| L bind_loc (PatSynBind (PSB { psb_id = L _ n
, psb_args = RecordPatSyn as })) <- bind
= do
bnd_name <- newTopSrcBinder (L bind_loc n)
let rnames = map recordPatSynSelectorId as
mkFieldOcc :: Located RdrName -> LFieldOcc RdrName
mkFieldOcc (L l name) = L l (FieldOcc (L l name) PlaceHolder)
field_occs = map mkFieldOcc rnames
flds <- mapM (newRecordSelector False [bnd_name]) field_occs
return ((bnd_name, flds): names)
| L bind_loc (PatSynBind (PSB { psb_id = L _ n})) <- bind
= do
bnd_name <- newTopSrcBinder (L bind_loc n)
return ((bnd_name, []): names)
| otherwise
= return names
{-
*********************************************************
* *
\subsection{Support code to rename types}
* *
*********************************************************
-}
rnFds :: [Located (FunDep (Located RdrName))]
-> RnM [Located (FunDep (Located Name))]
rnFds fds
= mapM (wrapLocM rn_fds) fds
where
rn_fds (tys1, tys2)
= do { tys1' <- rnHsTyVars tys1
; tys2' <- rnHsTyVars tys2
; return (tys1', tys2') }
rnHsTyVars :: [Located RdrName] -> RnM [Located Name]
rnHsTyVars tvs = mapM rnHsTyVar tvs
rnHsTyVar :: Located RdrName -> RnM (Located Name)
rnHsTyVar (L l tyvar) = do
tyvar' <- lookupOccRn tyvar
return (L l tyvar')
{-
*********************************************************
* *
findSplice
* *
*********************************************************
This code marches down the declarations, looking for the first
Template Haskell splice. As it does so it
a) groups the declarations into a HsGroup
b) runs any top-level quasi-quotes
-}
findSplice :: [LHsDecl RdrName] -> RnM (HsGroup RdrName, Maybe (SpliceDecl RdrName, [LHsDecl RdrName]))
findSplice ds = addl emptyRdrGroup ds
addl :: HsGroup RdrName -> [LHsDecl RdrName]
-> RnM (HsGroup RdrName, Maybe (SpliceDecl RdrName, [LHsDecl RdrName]))
-- This stuff reverses the declarations (again) but it doesn't matter
addl gp [] = return (gp, Nothing)
addl gp (L l d : ds) = add gp l d ds
add :: HsGroup RdrName -> SrcSpan -> HsDecl RdrName -> [LHsDecl RdrName]
-> RnM (HsGroup RdrName, Maybe (SpliceDecl RdrName, [LHsDecl RdrName]))
-- #10047: Declaration QuasiQuoters are expanded immediately, without
-- causing a group split
add gp _ (SpliceD (SpliceDecl (L _ qq@HsQuasiQuote{}) _)) ds
= do { (ds', _) <- rnTopSpliceDecls qq
; addl gp (ds' ++ ds)
}
add gp loc (SpliceD splice@(SpliceDecl _ flag)) ds
= do { -- We've found a top-level splice. If it is an *implicit* one
-- (i.e. a naked top level expression)
case flag of
ExplicitSplice -> return ()
ImplicitSplice -> do { th_on <- xoptM LangExt.TemplateHaskell
; unless th_on $ setSrcSpan loc $
failWith badImplicitSplice }
; return (gp, Just (splice, ds)) }
where
badImplicitSplice = text "Parse error: module header, import declaration"
$$ text "or top-level declaration expected."
-- Class declarations: pull out the fixity signatures to the top
add gp@(HsGroup {hs_tyclds = ts, hs_fixds = fs}) l (TyClD d) ds
| isClassDecl d
= let fsigs = [ L l f | L l (FixSig f) <- tcdSigs d ] in
addl (gp { hs_tyclds = add_tycld (L l d) ts, hs_fixds = fsigs ++ fs}) ds
| otherwise
= addl (gp { hs_tyclds = add_tycld (L l d) ts }) ds
-- Signatures: fixity sigs go a different place than all others
add gp@(HsGroup {hs_fixds = ts}) l (SigD (FixSig f)) ds
= addl (gp {hs_fixds = L l f : ts}) ds
add gp@(HsGroup {hs_valds = ts}) l (SigD d) ds
= addl (gp {hs_valds = add_sig (L l d) ts}) ds
-- Value declarations: use add_bind
add gp@(HsGroup {hs_valds = ts}) l (ValD d) ds
= addl (gp { hs_valds = add_bind (L l d) ts }) ds
-- Role annotations: added to the TyClGroup
add gp@(HsGroup {hs_tyclds = ts}) l (RoleAnnotD d) ds
= addl (gp { hs_tyclds = add_role_annot (L l d) ts }) ds
-- NB instance declarations go into TyClGroups. We throw them into the first
-- group, just as we do for the TyClD case. The renamer will go on to group
-- and order them later.
add gp@(HsGroup {hs_tyclds = ts}) l (InstD d) ds
= addl (gp { hs_tyclds = add_instd (L l d) ts }) ds
-- The rest are routine
add gp@(HsGroup {hs_derivds = ts}) l (DerivD d) ds
= addl (gp { hs_derivds = L l d : ts }) ds
add gp@(HsGroup {hs_defds = ts}) l (DefD d) ds
= addl (gp { hs_defds = L l d : ts }) ds
add gp@(HsGroup {hs_fords = ts}) l (ForD d) ds
= addl (gp { hs_fords = L l d : ts }) ds
add gp@(HsGroup {hs_warnds = ts}) l (WarningD d) ds
= addl (gp { hs_warnds = L l d : ts }) ds
add gp@(HsGroup {hs_annds = ts}) l (AnnD d) ds
= addl (gp { hs_annds = L l d : ts }) ds
add gp@(HsGroup {hs_ruleds = ts}) l (RuleD d) ds
= addl (gp { hs_ruleds = L l d : ts }) ds
add gp@(HsGroup {hs_vects = ts}) l (VectD d) ds
= addl (gp { hs_vects = L l d : ts }) ds
add gp l (DocD d) ds
= addl (gp { hs_docs = (L l d) : (hs_docs gp) }) ds
add_tycld :: LTyClDecl a -> [TyClGroup a] -> [TyClGroup a]
add_tycld d [] = [TyClGroup { group_tyclds = [d]
, group_roles = []
, group_instds = []
}
]
add_tycld d (ds@(TyClGroup { group_tyclds = tyclds }):dss)
= ds { group_tyclds = d : tyclds } : dss
add_instd :: LInstDecl a -> [TyClGroup a] -> [TyClGroup a]
add_instd d [] = [TyClGroup { group_tyclds = []
, group_roles = []
, group_instds = [d]
}
]
add_instd d (ds@(TyClGroup { group_instds = instds }):dss)
= ds { group_instds = d : instds } : dss
add_role_annot :: LRoleAnnotDecl a -> [TyClGroup a] -> [TyClGroup a]
add_role_annot d [] = [TyClGroup { group_tyclds = []
, group_roles = [d]
, group_instds = []
}
]
add_role_annot d (tycls@(TyClGroup { group_roles = roles }) : rest)
= tycls { group_roles = d : roles } : rest
add_bind :: LHsBind a -> HsValBinds a -> HsValBinds a
add_bind b (ValBindsIn bs sigs) = ValBindsIn (bs `snocBag` b) sigs
add_bind _ (ValBindsOut {}) = panic "RdrHsSyn:add_bind"
add_sig :: LSig a -> HsValBinds a -> HsValBinds a
add_sig s (ValBindsIn bs sigs) = ValBindsIn bs (s:sigs)
add_sig _ (ValBindsOut {}) = panic "RdrHsSyn:add_sig"
|
mettekou/ghc
|
compiler/rename/RnSource.hs
|
bsd-3-clause
| 97,908
| 1
| 25
| 30,410
| 17,774
| 9,391
| 8,383
| 1,123
| 8
|
{-# LANGUAGE PolyKinds #-}
module Data.IxFunctor.Copointed where
import Data.IxFunctor
class IxFunctor w => IxCopointed w where
iextract :: w i i a -> a
|
kylcarte/ixfunctors
|
src/Data/IxFunctor/Copointed.hs
|
bsd-3-clause
| 159
| 0
| 8
| 30
| 45
| 24
| 21
| 5
| 0
|
module Data.Model (
-- |Check the <https://github.com/tittoassini/model tutorial and github repo>.
module Data.Model.Class
,module Data.Model.Types
,module Data.Model.Pretty
,module Data.Model.Util
) where
import Data.Model.Class
import Data.Model.Instances ()
import Data.Model.Pretty
import Data.Model.Types
import Data.Model.Util
|
Quid2/model
|
src/Data/Model.hs
|
bsd-3-clause
| 396
| 0
| 5
| 95
| 70
| 48
| 22
| 10
| 0
|
module Main where
import Codec.Picture as Juicy
import Control.Lens
import Control.Monad.Catch
import Control.Monad.Except
import Data.Int
import Data.Proxy
import Matrix
import qualified Data.Map.Strict as M
import qualified Data.Vector as V
import LambdaCube.GL as LC
import LambdaCube.GL.Mesh as LC
import Game.GoreAndAsh.Core
import Game.GoreAndAsh.LambdaCube
import Game.GoreAndAsh.Logging
import Game.GoreAndAsh.SDL as SDL
import Game.GoreAndAsh.Time
type AppStack t = LambdaCubeT t (SDLT t (LoggingT t (TimerT t (GameMonad t))))
newtype AppMonad t a = AppMonad { runAppMonad :: AppStack t a}
deriving (Functor, Applicative, Monad, MonadFix)
-- | Single application rendering pipeline
mainPipeline :: PipelineId
mainPipeline = PipelineId "mainPipeline"
-- | Load and compile LambdaCube pipeline
initPipe :: forall t m . (MonadLambdaCube t m) => m ()
initPipe = do
lambdacubeAddPipeline [".", "../shared"] "example04.lc" mainPipeline $ do
defObjectArray "objects" Triangles $ do
"position" @: Attribute_V3F
"normal" @: Attribute_V3F
"uv" @: Attribute_V2F
defUniforms $ do
"modelMat" @: M44F
"viewMat" @: M44F
"projMat" @: M44F
"depthMVP" @: M44F
"diffuseTexture" @: FTexture2D
"lightDir" @: V3F
"windowWidth" @: Int
"windowHeight" @: Int
-- | Draw single frame with LambdaCube on SDL context
drawFrame :: forall t . (ReflexHost t, MonadIO (HostFrame t))
=> (Word -> Word -> IO ()) -- ^ Updates width and height of context for LambdaCube
-> IO () -- ^ Action that render LambdaCube scene
-> Window -- ^ Window we render on
-> Renderer -- ^ Renderer of the window
-> HostFrame t ()
drawFrame updateLambdaCubeSize renderLambdaCube win _ = do
-- rendererDrawColor r $= SDL.V4 0 0 0 0
-- clear r
SDL.V2 w h <- glGetDrawableSize win
liftIO $ do
updateLambdaCubeSize (fromIntegral w) (fromIntegral h)
renderLambdaCube
glSwapWindow win
-- | Initialise window and set up render pipeline
app :: forall t m . (MonadLambdaCube t m, MonadSDL t m, TimerMonad t m) => m ()
app = do
SDL.initializeAll
sizeUpdater <- lambdacubeGetSizeUpdater
renderer <- lambdacubeGetRenderer
rec
win <- createMainWindow tickE (drawFrame sizeUpdater renderer) $ defaultWindowCfg
& windowCfgTitle .~ pure "Gore&Ash LambdaCube example 04"
& windowCfgConfig .~ defaultWindow {
windowOpenGL = Just defaultOpenGL {
glProfile = Core Normal 3 3
}
, windowInitialSize = SDL.V2 640 640
}
glMakeCurrent (win ^. windowWindow) (win ^. windowContext)
initPipe
tickE <- uncurry (simulateStorage win) =<< initStorage
return ()
-- | Initialise LambaCube storages
initStorage :: forall t m . (MonadLambdaCube t m) => m (GLStorage, GPUMesh)
initStorage = do
(sid, storage) <- lambdacubeCreateStorage mainPipeline
gpuMesh <- liftIO $ LC.uploadMeshToGPU cubeMesh
lambdacubeRenderStorageFirst sid
return (storage, gpuMesh)
-- | Get dynamic aspect ratio of window
windowAspect :: Reflex t => WindowWidget t -> Dynamic t Float
windowAspect win = ffor (win ^. windowSizeDyn) $ \(SDL.V2 w h) ->
fromIntegral w / fromIntegral h
-- | Constantly update LambdaCube storage
simulateStorage :: forall t m . (MonadLambdaCube t m, TimerMonad t m)
=> WindowWidget t -> GLStorage -> GPUMesh -> m (Event t ())
simulateStorage win storage gpuMesh = do
let dt = 1 / 60 :: Float
tickE <- tickEvery (realToFrac dt)
tD <- foldDyn (const (+ dt)) 0 tickE
tickedE <- performEvent $ ffor tickE $ const $ do
t <- sample . current $ tD
aspect <- sample . current $ windowAspect win
SDL.V2 w h <- sample . current $ win ^. windowSizeDyn
liftIO $ LC.updateUniforms storage $ do
"viewMat" @= return (cameraMatrix t)
"projMat" @= return (projMatrix aspect)
"lightDir" @= return lightDirection
"windowWidth" @= return (fromIntegral w :: Int32)
"windowHeight" @= return (fromIntegral h :: Int32)
simulateCube tD storage gpuMesh
simulateWall tD storage gpuMesh
return tickedE
-- | Render cube object
simulateCube :: forall t m . (MonadLambdaCube t m)
=> Dynamic t Float -> GLStorage -> GPUMesh -> m ()
simulateCube tD storage gpuMesh = do
-- upload geometry to GPU and add to pipeline input
obj <- liftIO $ LC.addMeshToObjectArray storage "objects" [
"modelMat"
, "diffuseTexture"
, "depthMVP"] gpuMesh
-- load image and upload texture
textureData <- liftIO $ do
Right img <- Juicy.readImage "../shared/logo.png"
LC.uploadTexture2DToGPU img
performEvent_ $ ffor (updated tD) $ \t -> liftIO $ do
let setter = LC.objectUniformSetter obj
uniformM44F "modelMat" setter $ modelMatrixCube t
uniformM44F "depthMVP" setter $ depthMVPCube t
uniformFTexture2D "diffuseTexture" setter textureData
-- | Render wall object
simulateWall :: forall t m . (MonadLambdaCube t m)
=> Dynamic t Float -> GLStorage -> GPUMesh -> m ()
simulateWall tD storage gpuMesh = do
-- upload geometry to GPU and add to pipeline input
obj <- liftIO $ LC.addMeshToObjectArray storage "objects" [
"modelMat"
, "diffuseTexture"
, "depthMVP"] gpuMesh
-- load image and upload texture
textureData <- liftIO $ do
Right img <- Juicy.readImage "../shared/brick.jpg"
LC.uploadTexture2DToGPU img
performEvent_ $ ffor (updated tD) $ const $ liftIO $ do
let setter = LC.objectUniformSetter obj
uniformM44F "modelMat" setter modelMatrixWall
uniformM44F "depthMVP" setter depthMVPWall
uniformFTexture2D "diffuseTexture" setter textureData
main :: IO ()
main = runSpiderHost $ hostApp $ runModule opts (app :: AppMonad Spider ())
opts :: LambdaCubeOptions ()
opts = LambdaCubeOptions {
lambdaOptsNext = ()
}
-- geometry data: triangles
cubeMesh :: LC.Mesh
cubeMesh = Mesh
{ mAttributes = M.fromList
[ ("position", A_V3F $ V.fromList vertecies)
, ("normal", A_V3F $ V.fromList normals)
, ("uv", A_V2F $ V.fromList uvs)
]
, mPrimitive = P_Triangles
}
where
vertecies = [
v3, v2, v1, v3, v1, v0
, v4, v7, v6, v4, v6, v5
, v0, v1, v7, v0, v7, v4
, v5, v6, v2, v5, v2, v3
, v2, v6, v7, v2, v7, v1
, v5, v3, v0, v5, v0, v4
]
normals = concat [
replicate 6 n0
, replicate 6 n1
, replicate 6 n2
, replicate 6 n3
, replicate 6 n4
, replicate 6 n5
]
uvs = concat $ replicate 6 [u1, u2, u3, u1, u3, u0]
v0 = LC.V3 (-1) (-1) (-1)
v1 = LC.V3 (-1) 1 (-1)
v2 = LC.V3 1 1 (-1)
v3 = LC.V3 1 (-1) (-1)
v4 = LC.V3 (-1) (-1) 1
v5 = LC.V3 1 (-1) 1
v6 = LC.V3 1 1 1
v7 = LC.V3 (-1) 1 1
n0 = LC.V3 0 0 (-1)
n1 = LC.V3 0 0 1
n2 = LC.V3 (-1) 0 0
n3 = LC.V3 1 0 0
n4 = LC.V3 0 1 0
n5 = LC.V3 0 (-1) 0
u0 = LC.V2 0 0
u1 = LC.V2 1 0
u2 = LC.V2 1 1
u3 = LC.V2 0 1
-- Boilerplate below
deriving instance (ReflexHost t, MonadCatch (HostFrame t)) => MonadCatch (AppMonad t)
deriving instance (ReflexHost t, MonadThrow (HostFrame t)) => MonadThrow (AppMonad t)
deriving instance (ReflexHost t, MonadMask (HostFrame t)) => MonadMask (AppMonad t)
deriving instance (ReflexHost t, MonadIO (HostFrame t)) => MonadIO (AppMonad t)
deriving instance (ReflexHost t, MonadIO (HostFrame t), MonadThrow (HostFrame t)) => MonadLambdaCube t (AppMonad t)
deriving instance (ReflexHost t, MonadIO (HostFrame t)) => TimerMonad t (AppMonad t)
deriving instance (ReflexHost t, MonadIO (HostFrame t)) => LoggingMonad t (AppMonad t)
deriving instance (ReflexHost t, MonadIO (HostFrame t), MonadCatch (HostFrame t)) => MonadSDL t (AppMonad t)
deriving instance (ReflexHost t) => MonadSample t (AppMonad t)
deriving instance (ReflexHost t) => MonadHold t (AppMonad t)
deriving instance (ReflexHost t) => MonadSubscribeEvent t (AppMonad t)
instance ReflexHost t => MonadReflexCreateTrigger t (AppMonad t) where
newEventWithTrigger = AppMonad . newEventWithTrigger
newFanEventWithTrigger trigger = AppMonad $ newFanEventWithTrigger trigger
instance (ReflexHost t, MonadIO (HostFrame t)) => MonadAppHost t (AppMonad t) where
getFireAsync = AppMonad getFireAsync
getRunAppHost = do
runner <- AppMonad getRunAppHost
return $ \m -> runner $ runAppMonad m
performPostBuild_ = AppMonad . performPostBuild_
liftHostFrame = AppMonad . liftHostFrame
instance (ReflexHost t, MonadIO (HostFrame t)) => GameModule t (AppMonad t) where
type ModuleOptions t (AppMonad t) = ModuleOptions t (AppStack t)
runModule os m = runModule os $ runAppMonad m
withModule t _ = withModule t (Proxy :: Proxy (AppStack t))
|
Teaspot-Studio/gore-and-ash-lambdacube
|
examples/04/Example04.hs
|
bsd-3-clause
| 8,721
| 0
| 17
| 1,983
| 2,917
| 1,499
| 1,418
| -1
| -1
|
-------------------------------------------------------------------------------
-- |
-- Module : System.Hardware.Arduino.SamplePrograms.Pulse
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : erkokl@gmail.com
-- Stability : experimental
--
-- Demonstrates 'pulseIn_hostTiming' and 'pulseOut_hostTiming' functions, sending
-- and receiving pulses to/from the board.
-------------------------------------------------------------------------------
module System.Hardware.Arduino.SamplePrograms.Pulse where
import Control.Monad (forever)
import Control.Monad.Trans (liftIO)
import System.Hardware.Arduino
-------------------------------------------------------------------------------
-- * Detecting pulses
-------------------------------------------------------------------------------
-- | Computes the amount of time a push-button is connected to
-- input pin 2 on the Arduino. We will wait for at most 5 seconds,
-- as a further demonstration of the time-out facility. Note that the
-- timing is done on the host side, so this measurement is inherently
-- inaccurate.
--
-- The wiring is straightforward: Simply put a push-button between
-- digital input 2 and +5V, guarded by a 10K resistor:
--
-- <<http://github.com/LeventErkok/hArduino/raw/master/System/Hardware/Arduino/SamplePrograms/Schematics/PulseIn.png>>
pulseInDemo :: IO ()
pulseInDemo = withArduino False "/dev/cu.usbmodemFD131" $ do
setPinMode pb INPUT
go
where pb = digital 2
go = forever $ do
liftIO $ putStr "Ready, push-and-hold for less than 5 seconds: "
mbDur <- pulseIn_hostTiming pb True (Just 5000000)
liftIO $ putStrLn $ case mbDur of
Nothing -> "Time out!"
Just d -> "Button stayed high for: " ++ show d ++ " micro-seconds"
-------------------------------------------------------------------------------
-- * Sending pulses
-------------------------------------------------------------------------------
-- | Send pulses on a led as requested by the user. Note that the timing is computed
-- on the host side, thus the duration of the pulse is subject to some error due to
-- the Firmata communication overhead.
--
-- Wiring: Simply a led on pin 13:
--
-- <<http://github.com/LeventErkok/hArduino/raw/master/System/Hardware/Arduino/SamplePrograms/Schematics/Blink.png>>
pulseOutDemo :: IO ()
pulseOutDemo = withArduino False "/dev/cu.usbmodemFD131" $ do
setPinMode led OUTPUT
digitalWrite led False
forever trigger
where led = digital 13
trigger = do liftIO $ putStr "Pulse duration? (microseconds) "
d <- liftIO getLine
case reads d of
[(v, "")] -> pulseOut_hostTiming led True 0 v
_ -> liftIO $ putStrLn "Please enter a number."
|
LeventErkok/hArduino
|
System/Hardware/Arduino/SamplePrograms/Pulse.hs
|
bsd-3-clause
| 2,911
| 0
| 16
| 612
| 323
| 175
| 148
| 26
| 2
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.