code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
<?xml version='1.0' encoding='UTF-8' ?>
<!DOCTYPE helpset
PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN"
"../dtd/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ja">
<!-- title -->
<title>LightZone - ヘルプ</title>
<!-- maps -->
<maps>
<homeID>top</homeID>
<mapref location="Japanese/Map.jhm"/>
</maps>
<!-- views -->
<view>
<name>TOC</name>
<label>目次</label>
<type>javax.help.TOCView</type>
<data>Japanese/LightZoneTOC.xml</data>
</view>
<view>
<name>Search</name>
<label>検索</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
Japanese/JavaHelpSearch
</data>
</view>
<presentation default="true" displayviewimages="false">
<name>main window</name>
<size width="700" height="400" />
<location x="200" y="200" />
<title>LightZone - オンライン ヘルプ</title>
<toolbar>
<helpaction>javax.help.BackAction</helpaction>
<helpaction>javax.help.ForwardAction</helpaction>
<helpaction>javax.help.SeparatorAction</helpaction>
<helpaction>javax.help.HomeAction</helpaction>
<helpaction>javax.help.ReloadAction</helpaction>
<helpaction>javax.help.SeparatorAction</helpaction>
<helpaction>javax.help.PrintAction</helpaction>
<helpaction>javax.help.PrintSetupAction</helpaction>
</toolbar>
</presentation>
<presentation>
<name>main</name>
<size width="400" height="400" />
<location x="200" y="200" />
<title>LightZone - オンライン ヘルプ</title>
</presentation>
</helpset>
|
MarinnaCole/LightZone
|
linux/help/LightZone_ja.hs
|
bsd-3-clause
| 1,678
| 132
| 91
| 305
| 641
| 321
| 320
| -1
| -1
|
module Pats where
main :: Fay ()
main = do
case [1,2] of
[] -> putStrLn "got []"
[a] -> putStrLn "got one value."
[a,b] -> putStrLn "got two values."
case [1,2] of
(([1,2])) -> putStrLn "parens"
|
beni55/fay
|
tests/pats.hs
|
bsd-3-clause
| 221
| 0
| 12
| 63
| 104
| 56
| 48
| 9
| 3
|
--------------------------------------------------------------------
-- |
-- Module : MediaWiki.API.Types
-- Description : Basic MediaWiki API types
-- Copyright : (c) Sigbjorn Finne, 2008
-- License : BSD3
--
-- Maintainer: Sigbjorn Finne <sof@forkIO.com>
-- Stability : provisional
-- Portability: portable
--
-- Basic MediaWiki API types
--
--------------------------------------------------------------------
module MediaWiki.API.Types where
-- base types
type UserName = String
type NamespaceID = String
type TimeString = String
data Direction = Up | Down deriving ( Eq )
type RevID = String
type PageName = String
type UserID = String
data TimeArrow = Earlier | Later deriving ( Eq )
type Timestamp = String
type Redirect = String
type SortKind = String
type CatName = String
type GroupName = String
type FilterLang = String
type WithRedirects = String
type URLString = String
type Token = String
type LangName = String
nullRevId :: RevID
nullRevId = "0"
nullTimestamp :: Timestamp
nullTimestamp = ""
nullUser :: UserName
nullUser = ""
data PageTitle
= PageTitle { pgNS :: NamespaceID
, pgTitle :: Title
, pgMbId :: Maybe PageID
, pgMissing :: Bool
}
emptyPageTitle :: PageTitle
emptyPageTitle
= PageTitle { pgNS = mainNamespace
, pgTitle = ""
, pgMbId = Nothing
, pgMissing = False
}
mainNamespace :: NamespaceID
mainNamespace = "0"
ns_MAIN :: NamespaceID
ns_MAIN = mainNamespace
ns_IMAGE :: NamespaceID
ns_IMAGE = "6"
data Format
= Format
{ formatKind :: FormatKind
, formatFormatted :: Bool
}
emptyFormat :: Format
emptyFormat = Format{formatKind=FormatXML, formatFormatted=True}
xmlFormat :: Format
xmlFormat = emptyFormat{formatKind=FormatXML, formatFormatted=False}
data FormatKind
= FormatJSON
| FormatPHP
| FormatWDDX
| FormatXML
| FormatYAML
| FormatTxt
| FormatDbg
type User = String
type Password = String
type LoginToken = String
type SessionID = String
type SessionToken = String
type ValueName = (String,String)
data UserInfo
= UserInfo
{ uiName :: UserName
, uiId :: UserID
, uiIsAnon :: Bool
, uiHasMessage :: Bool
, uiBlocked :: Maybe (UserName,String)
, uiGroups :: [String]
, uiRights :: [String]
, uiOptions :: [(String,String)]
, uiRateLimits :: [RateLimit]
, uiEditCount :: Maybe Int
}
emptyUserInfo :: UserInfo
emptyUserInfo = UserInfo
{ uiName = nullUser
, uiId = "0"
, uiIsAnon = True
, uiHasMessage = False
, uiBlocked = Nothing
, uiGroups = []
, uiRights = []
, uiOptions = []
, uiRateLimits = []
, uiEditCount = Nothing
}
data RateLimit
= RateLimit
{ rlName :: String
, rlHits :: Int
, rlSeconds :: Int
}
data NamespaceInfo
= NamespaceInfo
{ nsId :: String
, nsTitle :: String
, nsSubpages :: Bool
}
data InterwikiEntry
= InterwikiEntry
{ iwPrefix :: String
, iwLocal :: Bool
, iwTranscludable :: Maybe Bool
, iwUrl :: String
, iwLanguage :: Maybe String
}
data UserSession
= UserSession
{ sessUserId :: UserID
, sessUserName :: UserName
, sessPassword :: Maybe Password -- not sure; could leave out.
, sessCookiePrefix :: Maybe String
, sessSessionId :: Maybe SessionID
, sessToken :: LoginToken
}
data HelpRequest
= HelpRequest
{ helpVersion :: Maybe Bool
}
type Title = String -- Q: what kind of encoding/escaping can be assumed here?
type PageID = String -- numeric ID, so arguably wrong Haskell type.
type RevisionID = String -- ditto.
newtype PropKind
= PropKind { prKind :: String }
{- Not deemed worthy to try to enumerate them all.
-- Three major reasons:
-- - supportd properties are likely to evolve with MW API.
-- - fields support subsets of the type, so using a union type
-- for these is imprecise.
-- - development of queries are driven by reading the API documentation
-- from the MW API help page, so transliterations ought to be
-- accommodated.
-- - being too lazy to write them out is not a reason; did
-- have such an enum type defined at one point :-)
-}
newtype MetaKind
= MetaKind { meKind :: String } -- likely values: siteinfo, userinfo, allmessages
newtype ListKind
= ListKind { liKind :: String }
newtype GeneratorKind
= GeneratorKind { genKind :: String }
class APIRequest a where
showReq :: a -> [Maybe (String,String)]
isPostable :: a -> Bool
isPostable _ = False
queryKind :: a -> QueryKind
queryKind _ = QProp ""
data QueryKind
= QProp String | QMeta String | QList String | QGen String
deriving ( Eq )
data QueryRequest
= QueryRequest
{ quTitles :: [Title]
, quPageIds :: [PageID]
, quRevIds :: [RevisionID]
, quProps :: [PropKind]
, quLists :: [ListKind]
, quMetas :: [MetaKind]
, quGenerator :: Maybe GeneratorKind
, quFollowRedirects :: Maybe Bool
, quIndexPageIds :: Maybe Bool
}
emptyQuery :: QueryRequest
emptyQuery = QueryRequest
{ quTitles = []
, quPageIds = []
, quRevIds = []
, quProps = []
, quLists = []
, quMetas = []
, quGenerator = Nothing
, quFollowRedirects = Nothing
, quIndexPageIds = Nothing
}
{-
data MetaKindProp
= SiteInfoProp
| UserInfoProp
| AllMessagesProp
| ExpandTemplatesProp
| ParseProp
| OpenSearchProp
| FeedWatchlistProp
| HelpProp
| ParamInfoProp
-}
|
neobrain/neobot
|
mediawiki/MediaWiki/API/Types.hs
|
bsd-3-clause
| 5,768
| 4
| 10
| 1,627
| 1,117
| 704
| 413
| 158
| 1
|
{-# LANGUAGE PatternGuards, ScopedTypeVariables #-}
{-
Find bindings within a let, and lists of statements
If you have n the same, error out
<TEST>
main = do a; a; a; a
main = do a; a; a; a; a; a -- ???
main = do a; a; a; a; a; a; a -- ???
main = do (do b; a; a; a); do (do c; a; a; a) -- ???
main = do a; a; a; b; a; a; a -- ???
main = do a; a; a; b; a; a
foo = a where {a = 1; b = 2; c = 3}; bar = a where {a = 1; b = 2; c = 3} -- ???
</TEST>
-}
module Hint.Duplicate(duplicateHint) where
import Hint.Type
import Data.Tuple.Extra
import Data.List hiding (find)
import qualified Data.Map as Map
duplicateHint :: CrossHint
duplicateHint ms =
dupes [y | Do _ y :: Exp S <- universeBi modu] ++
dupes [y | BDecls l y :: Binds S <- universeBi modu]
where modu = map snd ms
dupes ys =
[rawIdeaN
(if length xs >= 5 then Error else Warning)
"Reduce duplication" p1
(unlines $ map (prettyPrint . fmap (const p1)) xs)
(Just $ "Combine with " ++ showSrcLoc (getPointLoc p2)) []
| (p1,p2,xs) <- duplicateOrdered 3 $ map (map (toSrcSpan . ann &&& dropAnn)) ys]
---------------------------------------------------------------------
-- DUPLICATE FINDING
-- | The position to return if we match at this point, and the map of where to go next
-- If two runs have the same vals, always use the first pos you find
data Dupe pos val = Dupe pos (Map.Map val (Dupe pos val))
find :: Ord val => [val] -> Dupe pos val -> (pos, Int)
find (v:vs) (Dupe p mp) | Just d <- Map.lookup v mp = second (+1) $ find vs d
find _ (Dupe p mp) = (p, 0)
add :: Ord val => pos -> [val] -> Dupe pos val -> Dupe pos val
add pos [] d = d
add pos (v:vs) (Dupe p mp) = Dupe p $ Map.insertWith f v (add pos vs $ Dupe pos Map.empty) mp
where f new = add pos vs
duplicateOrdered :: Ord val => Int -> [[(SrcSpan,val)]] -> [(SrcSpan,SrcSpan,[val])]
duplicateOrdered threshold xs = concat $ concat $ snd $ mapAccumL f (Dupe nullSrcSpan Map.empty) xs
where
f d xs = second overlaps $ mapAccumL (g pos) d $ takeWhile ((>= threshold) . length) $ tails xs
where pos = Map.fromList $ zip (map fst xs) [0..]
g pos d xs = (d2, res)
where
res = [(p,pme,take mx vs) | i >= threshold
,let mx = maybe i (\x -> min i $ (pos Map.! pme) - x) $ Map.lookup p pos
,mx >= threshold]
vs = map snd xs
(p,i) = find vs d
pme = fst $ head xs
d2 = add pme vs d
overlaps (x@((_,_,n):_):xs) = x : overlaps (drop (length n - 1) xs)
overlaps (x:xs) = x : overlaps xs
overlaps [] = []
|
eigengrau/hlint
|
src/Hint/Duplicate.hs
|
bsd-3-clause
| 2,675
| 0
| 21
| 775
| 978
| 508
| 470
| -1
| -1
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
module T14172a where
import Data.Kind
import Data.Coerce
import Data.Functor.Compose
import Data.Functor.Identity
class Profunctor p where
dimap :: (a -> b) -> (c -> d) -> p b c -> p a d
(#.) :: Coercible c b => (b -> c) -> p a b -> p a c
instance Profunctor (->) where
dimap ab cd bc = cd . bc . ab
{-# INLINE dimap #-}
(#.) _ = coerce (\x -> x :: b) :: forall a b. Coercible b a => a -> b
{-# INLINE (#.) #-}
type Iso s t a b = forall p f. (Profunctor p, Functor f) => p a (f b) -> p s (f t)
type Iso' s a = Iso s s a a
iso :: (s -> a) -> (b -> t) -> Iso s t a b
iso sa bt = dimap sa (fmap bt)
{-# INLINE iso #-}
type AnIso s t a b = Exchange a b a (Identity b) -> Exchange a b s (Identity t)
data Exchange a b s t = Exchange (s -> a) (b -> t)
instance Profunctor (Exchange a b) where
dimap f g (Exchange sa bt) = Exchange (sa . f) (g . bt)
{-# INLINE dimap #-}
(#.) _ = coerce
{-# INLINE ( #. ) #-}
withIso :: AnIso s t a b -> ((s -> a) -> (b -> t) -> r) -> r
withIso ai k = case ai (Exchange id Identity) of
Exchange sa bt -> k sa (runIdentity #. bt)
{-# INLINE withIso #-}
class Wrapped s where
type Unwrapped s :: Type
_Wrapped' :: Iso' s (Unwrapped s)
class Wrapped s => Rewrapped (s :: Type) (t :: Type)
class (Rewrapped s t, Rewrapped t s) => Rewrapping s t
instance (Rewrapped s t, Rewrapped t s) => Rewrapping s t
instance (t ~ Compose f' g' a') => Rewrapped (Compose f g a) t
instance Wrapped (Compose f g a) where
type Unwrapped (Compose f g a) = f (g a)
_Wrapped' = iso getCompose Compose
_Wrapping :: Rewrapping s t => (Unwrapped s -> s) -> Iso s t (Unwrapped s) (Unwrapped t)
_Wrapping _ = _Wrapped
{-# INLINE _Wrapping #-}
_Wrapped :: Rewrapping s t => Iso s t (Unwrapped s) (Unwrapped t)
_Wrapped = withIso _Wrapped' $ \ sa _ -> withIso _Wrapped' $ \ _ bt -> iso sa bt
{-# INLINE _Wrapped #-}
|
sdiehl/ghc
|
testsuite/tests/polykinds/T14172a.hs
|
bsd-3-clause
| 2,125
| 0
| 11
| 488
| 913
| 486
| 427
| -1
| -1
|
{-# LANGUAGE CPP #-}
#if MIN_VERSION_base(4,5,0)
import Data.Bits ((.&.), popCount)
import Data.Word (Word)
#else
import Data.Bits ((.&.))
#endif
import Data.IntSet
import Data.List (nub,sort)
import qualified Data.List as List
import Data.Monoid (mempty)
import qualified Data.Set as Set
import Prelude hiding (lookup, null, map, filter, foldr, foldl)
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.Framework.Providers.QuickCheck2
import Test.HUnit hiding (Test, Testable)
import Test.QuickCheck hiding ((.&.))
main :: IO ()
main = defaultMain [ testCase "lookupLT" test_lookupLT
, testCase "lookupGT" test_lookupGT
, testCase "lookupLE" test_lookupLE
, testCase "lookupGE" test_lookupGE
, testCase "split" test_split
, testProperty "prop_Single" prop_Single
, testProperty "prop_Member" prop_Member
, testProperty "prop_NotMember" prop_NotMember
, testProperty "prop_LookupLT" prop_LookupLT
, testProperty "prop_LookupGT" prop_LookupGT
, testProperty "prop_LookupLE" prop_LookupLE
, testProperty "prop_LookupGE" prop_LookupGE
, testProperty "prop_InsertDelete" prop_InsertDelete
, testProperty "prop_MemberFromList" prop_MemberFromList
, testProperty "prop_UnionInsert" prop_UnionInsert
, testProperty "prop_UnionAssoc" prop_UnionAssoc
, testProperty "prop_UnionComm" prop_UnionComm
, testProperty "prop_Diff" prop_Diff
, testProperty "prop_Int" prop_Int
, testProperty "prop_Ordered" prop_Ordered
, testProperty "prop_List" prop_List
, testProperty "prop_DescList" prop_DescList
, testProperty "prop_AscDescList" prop_AscDescList
, testProperty "prop_fromList" prop_fromList
, testProperty "prop_MaskPow2" prop_MaskPow2
, testProperty "prop_Prefix" prop_Prefix
, testProperty "prop_LeftRight" prop_LeftRight
, testProperty "prop_isProperSubsetOf" prop_isProperSubsetOf
, testProperty "prop_isProperSubsetOf2" prop_isProperSubsetOf2
, testProperty "prop_isSubsetOf" prop_isSubsetOf
, testProperty "prop_isSubsetOf2" prop_isSubsetOf2
, testProperty "prop_size" prop_size
, testProperty "prop_findMax" prop_findMax
, testProperty "prop_findMin" prop_findMin
, testProperty "prop_ord" prop_ord
, testProperty "prop_readShow" prop_readShow
, testProperty "prop_foldR" prop_foldR
, testProperty "prop_foldR'" prop_foldR'
, testProperty "prop_foldL" prop_foldL
, testProperty "prop_foldL'" prop_foldL'
, testProperty "prop_map" prop_map
, testProperty "prop_maxView" prop_maxView
, testProperty "prop_minView" prop_minView
, testProperty "prop_split" prop_split
, testProperty "prop_splitMember" prop_splitMember
, testProperty "prop_splitRoot" prop_splitRoot
, testProperty "prop_partition" prop_partition
, testProperty "prop_filter" prop_filter
#if MIN_VERSION_base(4,5,0)
, testProperty "prop_bitcount" prop_bitcount
#endif
]
----------------------------------------------------------------
-- Unit tests
----------------------------------------------------------------
test_lookupLT :: Assertion
test_lookupLT = do
lookupLT 3 (fromList [3, 5]) @?= Nothing
lookupLT 5 (fromList [3, 5]) @?= Just 3
test_lookupGT :: Assertion
test_lookupGT = do
lookupGT 4 (fromList [3, 5]) @?= Just 5
lookupGT 5 (fromList [3, 5]) @?= Nothing
test_lookupLE :: Assertion
test_lookupLE = do
lookupLE 2 (fromList [3, 5]) @?= Nothing
lookupLE 4 (fromList [3, 5]) @?= Just 3
lookupLE 5 (fromList [3, 5]) @?= Just 5
test_lookupGE :: Assertion
test_lookupGE = do
lookupGE 3 (fromList [3, 5]) @?= Just 3
lookupGE 4 (fromList [3, 5]) @?= Just 5
lookupGE 6 (fromList [3, 5]) @?= Nothing
test_split :: Assertion
test_split = do
split 3 (fromList [1..5]) @?= (fromList [1,2], fromList [4,5])
{--------------------------------------------------------------------
Arbitrary, reasonably balanced trees
--------------------------------------------------------------------}
instance Arbitrary IntSet where
arbitrary = do{ xs <- arbitrary
; return (fromList xs)
}
{--------------------------------------------------------------------
Single, Member, Insert, Delete, Member, FromList
--------------------------------------------------------------------}
prop_Single :: Int -> Bool
prop_Single x
= (insert x empty == singleton x)
prop_Member :: [Int] -> Int -> Bool
prop_Member xs n =
let m = fromList xs
in all (\k -> k `member` m == (k `elem` xs)) (n : xs)
prop_NotMember :: [Int] -> Int -> Bool
prop_NotMember xs n =
let m = fromList xs
in all (\k -> k `notMember` m == (k `notElem` xs)) (n : xs)
test_LookupSomething :: (Int -> IntSet -> Maybe Int) -> (Int -> Int -> Bool) -> [Int] -> Bool
test_LookupSomething lookup' cmp xs =
let odd_sorted_xs = filter_odd $ nub $ sort xs
t = fromList odd_sorted_xs
test x = case List.filter (`cmp` x) odd_sorted_xs of
[] -> lookup' x t == Nothing
cs | 0 `cmp` 1 -> lookup' x t == Just (last cs) -- we want largest such element
| otherwise -> lookup' x t == Just (head cs) -- we want smallest such element
in all test xs
where filter_odd [] = []
filter_odd [_] = []
filter_odd (_ : o : xs) = o : filter_odd xs
prop_LookupLT :: [Int] -> Bool
prop_LookupLT = test_LookupSomething lookupLT (<)
prop_LookupGT :: [Int] -> Bool
prop_LookupGT = test_LookupSomething lookupGT (>)
prop_LookupLE :: [Int] -> Bool
prop_LookupLE = test_LookupSomething lookupLE (<=)
prop_LookupGE :: [Int] -> Bool
prop_LookupGE = test_LookupSomething lookupGE (>=)
prop_InsertDelete :: Int -> IntSet -> Property
prop_InsertDelete k t
= not (member k t) ==> delete k (insert k t) == t
prop_MemberFromList :: [Int] -> Bool
prop_MemberFromList xs
= all (`member` t) abs_xs && all ((`notMember` t) . negate) abs_xs
where abs_xs = [abs x | x <- xs, x /= 0]
t = fromList abs_xs
{--------------------------------------------------------------------
Union
--------------------------------------------------------------------}
prop_UnionInsert :: Int -> IntSet -> Bool
prop_UnionInsert x t
= union t (singleton x) == insert x t
prop_UnionAssoc :: IntSet -> IntSet -> IntSet -> Bool
prop_UnionAssoc t1 t2 t3
= union t1 (union t2 t3) == union (union t1 t2) t3
prop_UnionComm :: IntSet -> IntSet -> Bool
prop_UnionComm t1 t2
= (union t1 t2 == union t2 t1)
prop_Diff :: [Int] -> [Int] -> Bool
prop_Diff xs ys
= toAscList (difference (fromList xs) (fromList ys))
== List.sort ((List.\\) (nub xs) (nub ys))
prop_Int :: [Int] -> [Int] -> Bool
prop_Int xs ys
= toAscList (intersection (fromList xs) (fromList ys))
== List.sort (nub ((List.intersect) (xs) (ys)))
{--------------------------------------------------------------------
Lists
--------------------------------------------------------------------}
prop_Ordered
= forAll (choose (5,100)) $ \n ->
let xs = concat [[i-n,i-n]|i<-[0..2*n :: Int]]
in fromAscList xs == fromList xs
prop_List :: [Int] -> Bool
prop_List xs
= (sort (nub xs) == toAscList (fromList xs))
prop_DescList :: [Int] -> Bool
prop_DescList xs = (reverse (sort (nub xs)) == toDescList (fromList xs))
prop_AscDescList :: [Int] -> Bool
prop_AscDescList xs = toAscList s == reverse (toDescList s)
where s = fromList xs
prop_fromList :: [Int] -> Bool
prop_fromList xs
= case fromList xs of
t -> t == fromAscList sort_xs &&
t == fromDistinctAscList nub_sort_xs &&
t == List.foldr insert empty xs
where sort_xs = sort xs
nub_sort_xs = List.map List.head $ List.group sort_xs
{--------------------------------------------------------------------
Bin invariants
--------------------------------------------------------------------}
powersOf2 :: IntSet
powersOf2 = fromList [2^i | i <- [0..63]]
-- Check the invariant that the mask is a power of 2.
prop_MaskPow2 :: IntSet -> Bool
prop_MaskPow2 (Bin _ msk left right) = member msk powersOf2 && prop_MaskPow2 left && prop_MaskPow2 right
prop_MaskPow2 _ = True
-- Check that the prefix satisfies its invariant.
prop_Prefix :: IntSet -> Bool
prop_Prefix s@(Bin prefix msk left right) = all (\elem -> match elem prefix msk) (toList s) && prop_Prefix left && prop_Prefix right
prop_Prefix _ = True
-- Check that the left elements don't have the mask bit set, and the right
-- ones do.
prop_LeftRight :: IntSet -> Bool
prop_LeftRight (Bin _ msk left right) = and [x .&. msk == 0 | x <- toList left] && and [x .&. msk == msk | x <- toList right]
prop_LeftRight _ = True
{--------------------------------------------------------------------
IntSet operations are like Set operations
--------------------------------------------------------------------}
toSet :: IntSet -> Set.Set Int
toSet = Set.fromList . toList
-- Check that IntSet.isProperSubsetOf is the same as Set.isProperSubsetOf.
prop_isProperSubsetOf :: IntSet -> IntSet -> Bool
prop_isProperSubsetOf a b = isProperSubsetOf a b == Set.isProperSubsetOf (toSet a) (toSet b)
-- In the above test, isProperSubsetOf almost always returns False (since a
-- random set is almost never a subset of another random set). So this second
-- test checks the True case.
prop_isProperSubsetOf2 :: IntSet -> IntSet -> Bool
prop_isProperSubsetOf2 a b = isProperSubsetOf a c == (a /= c) where
c = union a b
prop_isSubsetOf :: IntSet -> IntSet -> Bool
prop_isSubsetOf a b = isSubsetOf a b == Set.isSubsetOf (toSet a) (toSet b)
prop_isSubsetOf2 :: IntSet -> IntSet -> Bool
prop_isSubsetOf2 a b = isSubsetOf a (union a b)
prop_size :: IntSet -> Bool
prop_size s = size s == List.length (toList s)
prop_findMax :: IntSet -> Property
prop_findMax s = not (null s) ==> findMax s == maximum (toList s)
prop_findMin :: IntSet -> Property
prop_findMin s = not (null s) ==> findMin s == minimum (toList s)
prop_ord :: IntSet -> IntSet -> Bool
prop_ord s1 s2 = s1 `compare` s2 == toList s1 `compare` toList s2
prop_readShow :: IntSet -> Bool
prop_readShow s = s == read (show s)
prop_foldR :: IntSet -> Bool
prop_foldR s = foldr (:) [] s == toList s
prop_foldR' :: IntSet -> Bool
prop_foldR' s = foldr' (:) [] s == toList s
prop_foldL :: IntSet -> Bool
prop_foldL s = foldl (flip (:)) [] s == List.foldl (flip (:)) [] (toList s)
prop_foldL' :: IntSet -> Bool
prop_foldL' s = foldl' (flip (:)) [] s == List.foldl' (flip (:)) [] (toList s)
prop_map :: IntSet -> Bool
prop_map s = map id s == s
prop_maxView :: IntSet -> Bool
prop_maxView s = case maxView s of
Nothing -> null s
Just (m,s') -> m == maximum (toList s) && s == insert m s' && m `notMember` s'
prop_minView :: IntSet -> Bool
prop_minView s = case minView s of
Nothing -> null s
Just (m,s') -> m == minimum (toList s) && s == insert m s' && m `notMember` s'
prop_split :: IntSet -> Int -> Bool
prop_split s i = case split i s of
(s1,s2) -> all (<i) (toList s1) && all (>i) (toList s2) && i `delete` s == union s1 s2
prop_splitMember :: IntSet -> Int -> Bool
prop_splitMember s i = case splitMember i s of
(s1,t,s2) -> all (<i) (toList s1) && all (>i) (toList s2) && t == i `member` s && i `delete` s == union s1 s2
prop_splitRoot :: IntSet -> Bool
prop_splitRoot s = loop ls && (s == unions ls)
where
ls = splitRoot s
loop [] = True
loop (s1:rst) = List.null
[ (x,y) | x <- toList s1
, y <- toList (unions rst)
, x > y ]
prop_partition :: IntSet -> Int -> Bool
prop_partition s i = case partition odd s of
(s1,s2) -> all odd (toList s1) && all even (toList s2) && s == s1 `union` s2
prop_filter :: IntSet -> Int -> Bool
prop_filter s i = partition odd s == (filter odd s, filter even s)
#if MIN_VERSION_base(4,5,0)
prop_bitcount :: Int -> Word -> Bool
prop_bitcount a w = bitcount_orig a w == bitcount_new a w
where
bitcount_orig a0 x0 = go a0 x0
where go a 0 = a
go a x = go (a + 1) (x .&. (x-1))
bitcount_new a x = a + popCount x
#endif
|
shockkolate/containers
|
tests/intset-properties.hs
|
bsd-3-clause
| 12,774
| 0
| 17
| 3,087
| 3,939
| 2,042
| 1,897
| 231
| 4
|
{-# LANGUAGE DataKinds, GADTs, KindSignatures #-}
module T15499 ()
where
data ADT (p :: Integer) where
ADT ::
{ a :: a
, b :: Integer
} -> ADT p
foo = undefined {b=undefined}
|
sdiehl/ghc
|
testsuite/tests/typecheck/should_compile/T15499.hs
|
bsd-3-clause
| 191
| 0
| 7
| 50
| 55
| 36
| 19
| 8
| 1
|
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE StandaloneDeriving #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Concurrent.Chan
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : non-portable (concurrency)
--
-- Unbounded channels.
--
-- The channels are implemented with @MVar@s and therefore inherit all the
-- caveats that apply to @MVar@s (possibility of races, deadlocks etc). The
-- stm (software transactional memory) library has a more robust implementation
-- of channels called @TChan@s.
--
-----------------------------------------------------------------------------
module Control.Concurrent.Chan
(
-- * The 'Chan' type
Chan, -- abstract
-- * Operations
newChan,
writeChan,
readChan,
dupChan,
unGetChan,
isEmptyChan,
-- * Stream interface
getChanContents,
writeList2Chan,
) where
import System.IO.Unsafe ( unsafeInterleaveIO )
import Control.Concurrent.MVar
import Control.Exception (mask_)
#define _UPK_(x) {-# UNPACK #-} !(x)
-- A channel is represented by two @MVar@s keeping track of the two ends
-- of the channel contents,i.e., the read- and write ends. Empty @MVar@s
-- are used to handle consumers trying to read from an empty channel.
-- |'Chan' is an abstract type representing an unbounded FIFO channel.
data Chan a
= Chan _UPK_(MVar (Stream a))
_UPK_(MVar (Stream a)) -- Invariant: the Stream a is always an empty MVar
deriving (Eq)
type Stream a = MVar (ChItem a)
data ChItem a = ChItem a _UPK_(Stream a)
-- benchmarks show that unboxing the MVar here is worthwhile, because
-- although it leads to higher allocation, the channel data takes up
-- less space and is therefore quicker to GC.
-- See the Concurrent Haskell paper for a diagram explaining the
-- how the different channel operations proceed.
-- @newChan@ sets up the read and write end of a channel by initialising
-- these two @MVar@s with an empty @MVar@.
-- |Build and returns a new instance of 'Chan'.
newChan :: IO (Chan a)
newChan = do
hole <- newEmptyMVar
readVar <- newMVar hole
writeVar <- newMVar hole
return (Chan readVar writeVar)
-- To put an element on a channel, a new hole at the write end is created.
-- What was previously the empty @MVar@ at the back of the channel is then
-- filled in with a new stream element holding the entered value and the
-- new hole.
-- |Write a value to a 'Chan'.
writeChan :: Chan a -> a -> IO ()
writeChan (Chan _ writeVar) val = do
new_hole <- newEmptyMVar
mask_ $ do
old_hole <- takeMVar writeVar
putMVar old_hole (ChItem val new_hole)
putMVar writeVar new_hole
-- The reason we don't simply do this:
--
-- modifyMVar_ writeVar $ \old_hole -> do
-- putMVar old_hole (ChItem val new_hole)
-- return new_hole
--
-- is because if an asynchronous exception is received after the 'putMVar'
-- completes and before modifyMVar_ installs the new value, it will set the
-- Chan's write end to a filled hole.
-- |Read the next value from the 'Chan'.
readChan :: Chan a -> IO a
readChan (Chan readVar _) = do
modifyMVarMasked readVar $ \read_end -> do -- Note [modifyMVarMasked]
(ChItem val new_read_end) <- readMVar read_end
-- Use readMVar here, not takeMVar,
-- else dupChan doesn't work
return (new_read_end, val)
-- Note [modifyMVarMasked]
-- This prevents a theoretical deadlock if an asynchronous exception
-- happens during the readMVar while the MVar is empty. In that case
-- the read_end MVar will be left empty, and subsequent readers will
-- deadlock. Using modifyMVarMasked prevents this. The deadlock can
-- be reproduced, but only by expanding readMVar and inserting an
-- artificial yield between its takeMVar and putMVar operations.
-- |Duplicate a 'Chan': the duplicate channel begins empty, but data written to
-- either channel from then on will be available from both. Hence this creates
-- a kind of broadcast channel, where data written by anyone is seen by
-- everyone else.
--
-- (Note that a duplicated channel is not equal to its original.
-- So: @fmap (c /=) $ dupChan c@ returns @True@ for all @c@.)
dupChan :: Chan a -> IO (Chan a)
dupChan (Chan _ writeVar) = do
hole <- readMVar writeVar
newReadVar <- newMVar hole
return (Chan newReadVar writeVar)
-- |Put a data item back onto a channel, where it will be the next item read.
unGetChan :: Chan a -> a -> IO ()
unGetChan (Chan readVar _) val = do
new_read_end <- newEmptyMVar
modifyMVar_ readVar $ \read_end -> do
putMVar new_read_end (ChItem val read_end)
return new_read_end
{-# DEPRECATED unGetChan "if you need this operation, use Control.Concurrent.STM.TChan instead. See <http://ghc.haskell.org/trac/ghc/ticket/4154> for details" #-} -- deprecated in 7.0
-- |Returns 'True' if the supplied 'Chan' is empty.
isEmptyChan :: Chan a -> IO Bool
isEmptyChan (Chan readVar writeVar) = do
withMVar readVar $ \r -> do
w <- readMVar writeVar
let eq = r == w
eq `seq` return eq
{-# DEPRECATED isEmptyChan "if you need this operation, use Control.Concurrent.STM.TChan instead. See <http://ghc.haskell.org/trac/ghc/ticket/4154> for details" #-} -- deprecated in 7.0
-- Operators for interfacing with functional streams.
-- |Return a lazy list representing the contents of the supplied
-- 'Chan', much like 'System.IO.hGetContents'.
getChanContents :: Chan a -> IO [a]
getChanContents ch
= unsafeInterleaveIO (do
x <- readChan ch
xs <- getChanContents ch
return (x:xs)
)
-- |Write an entire list of items to a 'Chan'.
writeList2Chan :: Chan a -> [a] -> IO ()
writeList2Chan ch ls = sequence_ (map (writeChan ch) ls)
|
tolysz/prepare-ghcjs
|
spec-lts8/base/Control/Concurrent/Chan.hs
|
bsd-3-clause
| 5,994
| 0
| 14
| 1,254
| 802
| 433
| 369
| 68
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Main where
import Import
import Yesod.Default.Config
import Yesod.Test
import Application (makeFoundation)
import HomeTest
main :: IO ()
main = do
conf <- loadConfig $ (configSettings Testing) { csParseExtra = parseExtra }
foundation <- makeFoundation conf
app <- toWaiAppPlain foundation
runTests app (connPool foundation) homeSpecs
|
snoyberg/photosorter
|
tests/main.hs
|
bsd-2-clause
| 477
| 0
| 11
| 77
| 108
| 58
| 50
| 15
| 1
|
{-# LANGUAGE TemplateHaskell #-}
module Tests.DBTree where
import Control.Monad
import Test.QuickCheck.All
import Test.QuickCheck.Arbitrary
import System.Exit
import Database.Siege.DBNode as N
import Database.Siege.DBTree as T
import Database.Siege.Memory
import qualified Data.ByteString as B
-- TODO: pull these out into a new file
instance Arbitrary B.ByteString where
arbitrary = fmap B.pack arbitrary
instance Arbitrary Node where
arbitrary = undefined
instance Arbitrary MemoryRef where
-- either Nothing or fmap Just arbitrary
arbitrary = undefined
prop_insert_lookup (ref, key, value) = testRawDBOperation $ do
ref' <- T.insert ref key value
value' <- T.lookup ref' key
return $ value == value'
prop_delete_lookup (ref, key) = testRawDBOperation $ do
ref' <- T.delete ref key
value <- T.lookup ref' key
return $ null value
runTests = do
succeed <- $quickCheckAll
when (not succeed) exitFailure
|
DanielWaterworth/siege
|
testsuite/Tests/DBTree.hs
|
mit
| 936
| 0
| 10
| 158
| 254
| 136
| 118
| 27
| 1
|
{-# LANGUAGE ViewPatterns #-}
module BeforeFix.Logic where
import Control.Monad
import Control.Monad.Trans
import Control.Monad.Logic.Class
import Control.Monad.IO.Class
newtype ML m a = ML { toView :: m (Maybe (a, ML m a)) }
fromView = ML
single a = return (Just (a,mzero))
instance Monad m => Monad (ML m) where
return = fromView . single
(toView -> m) >>= f = fromView $ m >>= \x -> case x of
Nothing -> return Nothing
Just (h,t) -> toView (f h `mplus` (t >>= f))
fail _ = mzero
instance Monad m => MonadPlus (ML m) where
mzero = fromView (return Nothing)
mplus (toView -> a) b = fromView $ a >>= \x -> case x of
Nothing -> toView b
Just (h,t) -> return (Just (h,t `mplus` b))
instance MonadTrans ML where
lift m = fromView (m >>= single)
instance Monad m => MonadLogic (ML m) where
msplit (toView -> m) = lift m
observeAllT :: Monad m => ML m a -> m [a]
observeAllT (toView -> m) = m >>= get where
get (Just (a,t)) = liftM (a :) (observeAllT t)
get _ = return []
observeT :: Monad m => ML m a -> m a
observeT (toView -> m) = m >>= get where
get (Just (a,t)) = return a
get _ = fail "No results"
instance (MonadIO m) => MonadIO (ML m) where
liftIO = lift . liftIO
|
atzeus/reflectionwithoutremorse
|
BeforeFix/Logic.hs
|
mit
| 1,272
| 0
| 15
| 336
| 600
| 315
| 285
| 34
| 2
|
{-# LANGUAGE TemplateHaskell, RankNTypes, ScopedTypeVariables, LambdaCase #-}
module Commands.Mixins.DNS13OSX9.Derived where
import Commands.Frontends.Dragon13.Extra hiding (fromGlobalName)
import Commands.Munging
import Commands.Mixins.DNS13OSX9.Types
import Commands.Frontends.Dragon13
import Commands.RHS
import Commands.Parsers.Earley
import Data.GUI
import Control.Lens hiding (snoc)
import Data.Void
import Data.Char
import Data.Typeable
import GHC.Exts (IsString(..))
import Language.Haskell.TH.Syntax (Name)
import qualified Data.List as List
import Data.Foldable (asum)
infix 2 <=>
-- | reach into the func (mutually recursive with the rhs).
getTerminalsDNSEarley
:: forall t n a. (Eq t)
=> (RHS n t (DNSEarleyFunc n t) a)
-> [t]
getTerminalsDNSEarley = getTerminals' (const id) getTerminalsFromDNSEarleyFunc
where -- TODO explicit signatures
getTerminalsFromDNSEarleyFunc :: (forall x. DNSEarleyFunc n t x -> [t])
getTerminalsFromDNSEarleyFunc = (maybe [] getTerminalsFromBoth . projectDNSEarleyFunc)
getTerminalsFromBoth :: (forall x. ((RHS n t (DNSEarleyFunc n t) x), (RHS n t (DNSEarleyFunc n t) x)) -> [t])
getTerminalsFromBoth (pRHS,gRHS) = getTerminalsDNSEarley pRHS ++ getTerminalsDNSEarley gRHS
{-| @(<=>) = 'genericGrammar'@
-}
(<=>) :: Name -> DNSEarleyRHS a -> DNSEarleyRHS a
(<=>) = genericGrammar
-- NOTE specialized type signature is for type inference, disambiguates:
-- "No instance for (Data.String.IsString _)" and "No instance for (Functor _)"
{-| construct a 'NonTerminal'.
-}
nonterminalGrammar :: String -> DNSEarleyRHS a -> DNSEarleyRHS a
nonterminalGrammar l r = NonTerminal (ConstName (defaultDNSInfo, l)) r
{-| construct a 'NonTerminal'.
-}
genericGrammar :: Name -> DNSEarleyRHS a -> DNSEarleyRHS a
genericGrammar name r = nonterminalGrammar (gui^.(guiIdentifier._Identifier)) r
where
Just gui = fromGlobalName name -- TODO GHC 7.10.2 https://downloads.haskell.org/~ghc/7.10.2/docs/html/users_guide/other-type-extensions.html#special-implicit-params
{-| manually construct a special rule, with primitives.
-}
simpleGrammar :: Name -> (DNSEarleyProd a) -> (DNSRHS Text Void) -> DNSEarleyRHS a
simpleGrammar n p r = genericGrammar n $ liftLeaf p r
{-| manually construct a special rule, with separate recognizer and parser.
it has two independent right-hand sides: one for recognizing and one for parsing.
-}
complexGrammar :: Name -> DNSEarleyRHS a -> DNSEarleyRHS a -> DNSEarleyRHS a
complexGrammar n p r = genericGrammar n $ liftTree p r
{-| automatically generate a grammar from a type.
the left-hand side comes from the type,
and the right-hand side comes from the 'Show'n and transformed 'constructors'.
-}
transformedGrammar :: forall a. (Typeable a, Enum a, Show a) => (String -> String) -> DNSEarleyRHS a
transformedGrammar f = nonterminalGrammar
(guiOf(Proxy :: Proxy a) ^. (guiIdentifier._Identifier)) -- TODO Haskell type sections, whenever
(asum . fmap (transformedCon f) $ constructors)
{-| helper function for conveniently using Dragon NaturallySpeaking built-ins.
sets 'dnsInline' to true.
-}
dragonGrammar :: Name -> (DNSEarleyProd a) -> DNSBuiltinRule -> DNSEarleyRHS a
dragonGrammar name p r = simpleGrammar name p (SomeDNSNonTerminal (DNSBuiltinRule r))
& set (_DNSEarleyRHSInfo.dnsInline) True
{-| a default 'Grammar' for 'Enum's.
with 'Enum's, we can get the "edit only once" property: edit the @data@ definition, then 'terminal' builds the 'Rule', and then the functions on 'Rule's build the 'Parser's and 'DNSGrammar's. with 'Typeable', but without TemplateHaskell.
the 'LHS' comes from the type, not the term (avoiding TemplateHaskell). other 'Grammar's can always be defined with an LHS that comes from the term, e.g. with '<=>' (as Haskell values' names are disjoint from Haskell types').
-}
enumGrammar :: (Typeable a, Enum a, Show a) => DNSEarleyRHS a
enumGrammar = transformedGrammar (overCamelCase id)
{-| a default 'Grammar' for simple ADTs.
detects the type name in a constructor name (as a
prefix/infix/suffix) and elides the affix.
useful when you want your @grammars@-DSL terminals to be
unqualified (for convenience), but you want your Haskell
identifiers to be qualified (to avoid conflicts). e.g.:
e.g. avoids naming conflicts with @Either@:
>>> :set -XDeriveDataTypeable
>>> data Button = LeftButton | ButtonMiddleButton | ButtonRight deriving (Show,Eq,Enum,Typeable)
>>> let button = qualifiedGrammar :: Grammar Button
>>> getWords . view gramGrammar $ button
["left","middle","right"]
(the qualification is exaggerated to show the filtering behavior:
it's consistent in idiomatic declarations).
we didn't define @data Button = Left | Middle | Right@ because it
conflicts with 'Either', but the derived grammar is identical.
-}
qualifiedGrammar :: forall a. (Typeable a, Enum a, Show a) => DNSEarleyRHS a
qualifiedGrammar = qualifiedGrammarWith occ
where
occ = guiOf(Proxy :: Proxy a) ^. (guiIdentifier._Identifier)
{-| a default 'Grammar' for simple ADTs.
elides the given <http://en.wikipedia.org/wiki/Affix affix> from any part of any constructor.
e.g. avoids naming conflicts with @Either@. without making either the data type name too short, or the data constructor names too long:
>>> :set -XDeriveDataTypeable
>>> data Direction = UpD | DownD | LeftD | RightD deriving (Show,Eq,Enum,Typeable)
>>> qualifiedGrammarWith "D" :: Grammar Direction
["up","down","left","right"]
-}
qualifiedGrammarWith :: (Typeable a, Enum a, Show a) => String -> DNSEarleyRHS a
qualifiedGrammarWith affix = transformedGrammar (overCamelCase (filter (/= fmap toLower affix)))
{-| strips out data typename like 'qualifiedGrammar', along with @_@'s and numbers.
makes it easy to generate generic terminals (like @"left"@),
without conflicting with.common symbols (like 'Left').
-}
tidyGrammar :: forall a. (Typeable a, Enum a, Show a) => DNSEarleyRHS a
tidyGrammar = transformedGrammar f
where
f = filter (/= '_') >>> overCamelCase (filter (/= fmap toLower occ))
occ = guiOf(Proxy :: Proxy a) ^. (guiIdentifier._Identifier)
{-| a default 'Grammar' for 'String' @newtype@s.
the user might want to parse/recognize an arbitrary but dynamic/large subset of all possible strings.
For example:
* a mutable grammer whose contents depend on some context,
like the current buffer, or the previous recognition.
* a huge list of custom words, that sound like more common words,
that aren't being recognized, even after using Dragon's Vocabulary Builder.
* even a few static words, which don't need to be a sum typo,
to not increase boilerplate, while still increasing type safety.
e.g.
@
newtype Place = Place String deriving (Show,Eq)
instance IsString Place where fromString = Place
@
-}
vocabularyGrammar :: [String] -> DNSEarleyRHS Text
vocabularyGrammar = tokens
{-| the empty grammar.
See 'UnitDNSRHS' (which always matches, recognizing nothing)
and 'unitEarleyParser' (which always succeeds, parsing nothing).
-}
epsilon :: DNSEarleyRHS ()
epsilon = simpleGrammar 'epsilon (UnsafeEarleyProduction unitEarleyParser) UnitDNSRHS
--TODO generalize these introducers to any RHS, and use Text
{-| inject a string.
-}
token :: (IsString t, Show t) => String -> RHS n t f t -- TODO mv these to .RHS.Derived
token = fromString
{-| @str = 'token'@
specialized for type inference.
(with @OverloadedStrings@, string literals are 'IsString'-constrained polymorphic types)
-}
str :: String -> DNSEarleyRHS Text
str = token
{-| inject a character.
-}
chr :: Char -> DNSEarleyRHS Char
chr c = c <$ token [c]
{-| @int = 'con'@
specialized for type inference.
(integer literals are 'Num'-constrained polymorphic types).
-}
int :: Int -> DNSEarleyRHS Int
int = con
{-| inject a constructor.
-}
con :: (Show a) => a -> DNSEarleyRHS a
con = transformedCon (List.intercalate " " . unCamelCase)
{-| make a 'Terminal' from the @transformed@ 'Show'n constructor, returning the constructor.
-}
transformedCon :: (Show a) => (String -> String) -> a -> DNSEarleyRHS a
transformedCon f x = x <$ (token . f . show $ x)
-- | @= 'optionRHS' 'enumDefault' ...@
optionalEnum :: (Enum a) => DNSEarleyRHS a -> DNSEarleyRHS a
optionalEnum = optionRHS enumDefault
{-| inject a mapping.
in each pair, the first is the string to recognize, the second is the value to parse and return.
-}
vocab :: (IsString t, Show t, Functor'RHS n t f) => [(String, a)] -> RHS n t f a
vocab
= foldMap (\(s,x) -> x <$ token s)
. filterBlanks
{-| inject a set of strings.
-}
tokens :: (IsString t, Show t, Functor'RHS n t f) => [String] -> RHS n t f t
tokens = foldMap token
getRhsName :: DNSEarleyRHS a -> Maybe String
getRhsName r = r ^? _DNSEarleyRHSName
|
sboosali/commands
|
commands-frontend-DragonNaturallySpeaking/sources/Commands/Mixins/DNS13OSX9/Derived.hs
|
mit
| 8,732
| 0
| 14
| 1,420
| 1,511
| 814
| 697
| 82
| 1
|
{-# LANGUAGE GADTs #-}
module Pladen.Beets.Queries
(
get
, getAll
, getAlbums, getAlbums'
, getAlbumTrack
, getAlbumTracks
, getTrack
, getSingletonTracks
, getArtists
) where
import Control.Applicative
import qualified Data.List as L
import qualified Data.Function as F
import Database.Persist.Class
import Database.Persist.Types
import Database.Esqueleto
import Pladen.Beets.Model
getAll :: ( PersistEntity val
, PersistQuery m
, PersistEntityBackend val ~ PersistMonadBackend m )
=> m [Entity val]
getAll = selectList [] []
getAlbums :: SqlPersistM [Entity Album]
getAlbums = selectList [] [Desc AlbumAdded]
getAlbums' :: SqlPersistM [Album]
getAlbums' = map entityVal <$> getAlbums
----------------------------------------------------------------------------
getAlbumTrack :: Key Album -> SqlPersistM (Maybe EntityAlbumTracks)
getAlbumTrack key = do
tracks <- select $
from $ \(t `InnerJoin` a) -> do
on $ a ^. AlbumId ==. t ^. TrackAlbumId
where_ $ a ^. AlbumId ==. val key
orderBy [asc $ t ^. TrackNo]
return (a,t)
return $ case tracks of
[] -> Nothing
_ -> Just $ entityAlbumTracks tracks
getAlbumTracks :: SqlPersistM [EntityAlbumTracks]
getAlbumTracks = collectAlbumTracks <$> joinAlbumTracks
joinAlbumTracks :: SqlPersistM [(Entity Album, Entity Track)]
joinAlbumTracks = select $
from $ \(t `InnerJoin` a) -> do
on (a ^. AlbumId ==. t ^. TrackAlbumId)
orderBy [asc (t ^. TrackAlbumId)]
return (a,t)
collectAlbumTracks :: [(Entity Album, Entity Track)] -> [EntityAlbumTracks]
collectAlbumTracks = (map entityAlbumTracks) . group
where
group = L.groupBy ((==) `F.on` (entityKey .fst))
entityAlbumTracks :: [(Entity Album, Entity Track)] -> EntityAlbumTracks
entityAlbumTracks es = EntityAlbumTracks (fst $ head es) (map snd es)
----------------------------------------------------------------------------
getTrack :: Key Track -> SqlPersistM (Maybe (Entity Track))
getTrack k = (fmap.fmap) (Entity k) $ get k
getSingletonTracks :: SqlPersistM [Entity Track]
getSingletonTracks = selectList [filterSingletons] []
where filterSingletons = Filter TrackAlbumId (Left $ Key PersistNull) Eq
----------------------------------------------------------------------------
getArtists :: SqlPersistM [EntityArtist]
getArtists = collectArtists <$> selectList [] [Asc AlbumArtist, Desc AlbumYear]
collectArtists :: [Entity Album] -> [EntityArtist]
collectArtists albums = map artistFromAlbums grouped
where
artistFromAlbums as = EntityArtist (albumArtist $ entityVal $ head as)
(map entityKey as)
grouped :: [[Entity Album]]
grouped = L.groupBy ((==) `F.on` (albumArtist . entityVal)) albums
|
geigerzaehler/pladen
|
Pladen/Beets/Queries.hs
|
mit
| 2,861
| 1
| 15
| 603
| 866
| 459
| 407
| 64
| 2
|
import Control.Monad -- MonadPlus
pairs :: Int -> [(Int, Int)]
pairs n = [(i,j) | i <- [1..n], j <- [(i+1)..n]]
-- - Equivalently
pairsWithDo :: Int -> [(Int, Int)]
pairsWithDo n = do {
i <- [1..n];
j <- [(i+1)..n];
return (i,j)
}
-- -- - With Guard
pairsGuard :: Int -> [(Int, Int)]
pairsGuard n = [(i,j) | i <- [1..n], j <- [1..n], i < j]
-- MonadPlus is a monad with extra structure
{-
-- MonadPlus
class Monad m => MonadPlus m where
mzero :: m a
mplus :: m a -> m a -> m a
instance MonadPlus [] where
mzero = []
mplus = (++)
guard :: MonadPlus m => Bool -> m ()
guard False = mzero -- non-deterministic result; 'mzero = [] = no possible result'
guard True = return () -- 'return () = [()] = one possible result'
msum :: MonadPlus m => [m a] -> m a
msum = foldr mplus mzero -- generalization of concat
-}
-- - Equivalently
pairsWithDoGuard :: Int -> [(Int, Int)]
pairsWithDoGuard n = do {
i <- [1..n];
j <- [1..n];
guard (i < j);
return (i,j)
}
|
abhishekkr/tutorials_as_code
|
talks-articles/languages-n-runtimes/haskell/PhilipWadler-UniversityOfEdinburgh-2011/lecture18.example00.hs
|
mit
| 1,178
| 0
| 10
| 425
| 302
| 172
| 130
| 16
| 1
|
module Paths_ListFry (
version,
getBinDir, getLibDir, getDataDir, getLibexecDir,
getDataFileName, getSysconfDir
) where
import qualified Control.Exception as Exception
import Data.Version (Version(..))
import System.Environment (getEnv)
import Prelude
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
catchIO = Exception.catch
version :: Version
version = Version {versionBranch = [0,1,0,0], versionTags = []}
bindir, libdir, datadir, libexecdir, sysconfdir :: FilePath
bindir = "/home/longqm/.cabal/bin"
libdir = "/home/longqm/.cabal/lib/x86_64-linux-ghc-7.4.1/ListFry-0.1.0.0"
datadir = "/home/longqm/.cabal/share/x86_64-linux-ghc-7.4.1/ListFry-0.1.0.0"
libexecdir = "/home/longqm/.cabal/libexec"
sysconfdir = "/home/longqm/.cabal/etc"
getBinDir, getLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath
getBinDir = catchIO (getEnv "ListFry_bindir") (\_ -> return bindir)
getLibDir = catchIO (getEnv "ListFry_libdir") (\_ -> return libdir)
getDataDir = catchIO (getEnv "ListFry_datadir") (\_ -> return datadir)
getLibexecDir = catchIO (getEnv "ListFry_libexecdir") (\_ -> return libexecdir)
getSysconfDir = catchIO (getEnv "ListFry_sysconfdir") (\_ -> return sysconfdir)
getDataFileName :: FilePath -> IO FilePath
getDataFileName name = do
dir <- getDataDir
return (dir ++ "/" ++ name)
|
longqm/ListFry
|
dist/build/autogen/Paths_ListFry.hs
|
mit
| 1,344
| 0
| 10
| 182
| 371
| 213
| 158
| 28
| 1
|
--data AVL a = Nil
-- | L (AVL a) a (AVL a)
-- | B (AVL a) a (AVL a)
-- | R (AVL a) a (AVL a)
-- deriving (Eq, Ord, Show)
data AVL a = Nil
| Node (AVL a) a (AVL a) Balance
deriving (Show, Eq)
data Balance = N
| Z
| P
deriving (Show, Eq, Ord)
singleton :: Ord a => a -> AVL a
singleton a = B Nil a Nil
insert :: Ord a => a -> AVL a -> AVL a
insert a (L lt n Nil)
restoreAVL :: AVL a -> AVL a
restoreAVL Nil = Nil
restoreAVL
|
ThreeFx/ETH
|
1semester/da/avl/AVL.hs
|
mit
| 532
| 0
| 8
| 217
| 180
| 92
| 88
| -1
| -1
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TemplateHaskell #-}
module State where
import Reflex.Cocos2d.Prelude
import GHC.Generics
import Data.Enumerate
import qualified Data.Array as A
import qualified Data.IntMap.Strict as IM
import qualified Data.Map as M
import qualified Data.IntSet as IS
data BoxStatus = BoxControlled | BoxFreeForm | BoxBroken | BoxRemoved deriving (Enum, Generic, Enumerable, Eq, Ord, Show, Read)
data BoxState = BoxState
{ _boxSetPos :: P2 Double
, _boxSetRot :: Direction V2 Double
, _boxStatus :: BoxStatus
} deriving (Eq, Ord, Show, Read)
makeLenses ''BoxState
data BoxThrowerSceneState = BoxThrowerSceneState
{ _boxes :: IM.IntMap BoxState -- ids are just simple indexes
, _idleBoxes :: IS.IntSet
, _controlledBox :: Maybe Int
} deriving (Eq, Ord, Show, Read)
makeLenses ''BoxThrowerSceneState
-- collision types
data EnemyStatus = EnemyAlive | EnemyDead deriving (Enum, Generic, Enumerable, Eq, Ord, Show, Read)
data CollisionType = CTGround | CTBox BoxStatus | Enemy EnemyStatus deriving (Generic, Enumerable, Eq, Ord, Show, Read)
array_CollisionType :: A.Array Int CollisionType
array_CollisionType = array_enumerable
table_CollisionType :: M.Map CollisionType Int
table_CollisionType = table_enumerable
instance Enum CollisionType where
toEnum = toEnum_enumerable array_CollisionType
fromEnum = fromEnum_enumerable table_CollisionType
data SceneState
= StartScene
| BoxThrowerScene BoxThrowerSceneState
deriving (Eq, Ord, Show, Read)
makePrisms ''SceneState
type SceneStateType = Int
sceneStateType :: SceneState -> SceneStateType
sceneStateType StartScene = 0
sceneStateType (BoxThrowerScene _) = 1
data AppState = AppState
{ -- some shared app state
-- ...
_sceneState :: SceneState
} deriving (Eq, Ord, Show, Read)
makeLenses ''AppState
|
lynnard/cocos2d-proj
|
src/State.hs
|
mit
| 2,047
| 0
| 10
| 379
| 497
| 279
| 218
| 49
| 1
|
{-# htermination (and :: (List MyBool) -> MyBool) #-}
import qualified Prelude
data MyBool = MyTrue | MyFalse
data List a = Cons a (List a) | Nil
asAs :: MyBool -> MyBool -> MyBool;
asAs MyFalse x = MyFalse;
asAs MyTrue x = x;
foldr :: (b -> a -> a) -> a -> (List b) -> a;
foldr f z Nil = z;
foldr f z (Cons x xs) = f x (foldr f z xs);
and :: (List MyBool) -> MyBool;
and = foldr asAs MyTrue;
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/basic_haskell/and_1.hs
|
mit
| 418
| 0
| 9
| 114
| 184
| 101
| 83
| 11
| 1
|
{-# htermination putChar :: Char -> IO () #-}
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/Prelude_putChar_1.hs
|
mit
| 46
| 0
| 2
| 9
| 3
| 2
| 1
| 1
| 0
|
{-# LANGUAGE FlexibleContexts #-}
module Source (simpleSource,producer) where
import Neutron (Neutron(Neutron))
import Linear (normalize,V3,Epsilon)
import Momentum (Momentum(getSpeed))
import Pipes
import Control.Monad (forever)
import Data.Random
{-# INLINABLE simpleSource #-}
simpleSource :: (Epsilon a, Momentum m, Floating a) => V3 a -> V3 a -> m a -> Neutron a
simpleSource start target momentum =
Neutron start 1 (normalize (target - start)) (getSpeed momentum)
producer :: (RandomSource IO s) => s -> RVar (Maybe a) -> Producer a IO ()
producer src beam = forever $ do
n <- lift $ runRVar beam src
case n of
Just neutron -> yield neutron
Nothing -> discard n
|
rprospero/NeutronPipe
|
Source.hs
|
mit
| 688
| 0
| 11
| 124
| 260
| 137
| 123
| 17
| 2
|
data ArbolG a = AGV | AG a [ ArbolG a ]
|
josegury/HaskellFuntions
|
Arboles/DeclaracionArbol.hs
|
mit
| 40
| 0
| 8
| 12
| 22
| 12
| 10
| 1
| 0
|
import Memory
import Thought
import Control.Arrow
import Data.Monoid
import Test.QuickCheck
addAnyNum :: Integer -> Integer -> Bool
addAnyNum n s = appEndo (runMemory (update (n +)) s ()) s == s+n
thinkHoldsState :: Integer -> Integer -> Bool
thinkHoldsState n s = fst (stepThought (arr (+ n)) s) == s+n
main :: IO ()
main = do
quickCheck addAnyNum
quickCheck thinkHoldsState
|
edwardwas/memory
|
tests/main.hs
|
mit
| 388
| 0
| 12
| 73
| 159
| 82
| 77
| 13
| 1
|
{- draw sprite with image clipping -}
{-# LANGUAGE OverloadedStrings #-}
module Lesson11 where
--
import qualified SDL
--
import Data.Word (Word8(..))
import Linear.Affine (Point(..))
import Linear.V2 (V2(..))
import Linear.V4 (V4(..))
import Foreign.C.Types (CInt)
--
import Control.Monad (unless)
--
import qualified Config
--
-- setup xywh for all clips
gSpriteClips :: [SDL.Rectangle CInt]
gSpriteClips =
[ rect 0 0 100 100 -- LU
, rect 100 0 100 100 -- RU
, rect 0 100 100 100 -- LD
, rect 100 100 100 100] -- RD
where rect x y w h = SDL.Rectangle (P$V2 x y) (V2 w h)
getClipsW :: [SDL.Rectangle CInt] -> Int -> CInt
getClipsW xs i = let (SDL.Rectangle _ (V2 w _)) = xs !! i in w
getClipsH :: [SDL.Rectangle CInt] -> Int -> CInt
getClipsH xs i = let (SDL.Rectangle _ (V2 _ h)) = xs !! i in h
-- definition of LTexture
data LTexture = LTexture {getTx :: SDL.Texture, getWH :: (V2 CInt)}
--
class Renderable a where
render :: SDL.Renderer -> a -> SDL.Rectangle CInt -> V2 CInt -> IO ()
free :: a -> IO ()
--
instance Renderable LTexture where
render rdr ltx xywh@(SDL.Rectangle _ (V2 w h)) xy = do
SDL.copy rdr (getTx ltx) (Just xywh) (Just $ SDL.Rectangle (P xy) (V2 w h))
free ltx = SDL.destroyTexture (getTx ltx)
-- definition of loading function
loadFromFile :: SDL.Renderer -> FilePath -> IO LTexture
loadFromFile rdr path = do
tempSf <- SDL.loadBMP path
wh <- SDL.surfaceDimensions tempSf
-- ************ --
SDL.surfaceColorKey tempSf SDL.$= (Just (V4 223 113 38 maxBound))
tx <- SDL.createTextureFromSurface rdr tempSf
SDL.freeSurface tempSf
return (LTexture tx wh)
--
lesson11 :: IO ()
lesson11 = do
-- initialize SDL
SDL.initialize [SDL.InitVideo]
-- create window
window <- SDL.createWindow "Lesson11" Config.winConfig
renderer <- SDL.createRenderer window (-1) Config.rdrConfig
SDL.HintRenderScaleQuality SDL.$= SDL.ScaleLinear
SDL.rendererDrawColor renderer SDL.$=
V4 maxBound maxBound minBound maxBound
gSpriteSheetTexture <- loadFromFile renderer "./img/11/sprite.bmp"
let
loop = do
events <- SDL.pollEvents
let quit = any (== SDL.QuitEvent) $ map SDL.eventPayload events
-- *** beginning of drawing region ***
SDL.rendererDrawColor renderer SDL.$=
V4 minBound minBound maxBound maxBound
SDL.clear renderer
-- render with our own function
render renderer gSpriteSheetTexture (gSpriteClips !! 0) -- LU
$ V2 0 0
render renderer gSpriteSheetTexture (gSpriteClips !! 1) -- RU
$ V2 (Config.winW - (getClipsW gSpriteClips 1) ) 0
-- $ V2 (Config.winW - (getClipsW gSpriteClips 1) ) 0
render renderer gSpriteSheetTexture (gSpriteClips !! 2) -- LD
$ V2 0 (Config.winH - (getClipsH gSpriteClips 2))
render renderer gSpriteSheetTexture (gSpriteClips !! 3) -- RD
$ V2 (Config.winW - (getClipsW gSpriteClips 3))
(Config.winH - (getClipsH gSpriteClips 3))
--
SDL.present renderer
-- *** end of drawing region ***
unless quit loop
loop
free gSpriteSheetTexture
SDL.destroyRenderer renderer
SDL.destroyWindow window
SDL.quit
|
rueshyna/sdl2-examples
|
src/Lesson11.hs
|
mit
| 3,269
| 0
| 18
| 796
| 1,092
| 551
| 541
| 69
| 1
|
module Compiler.JMacro.Lens where
import Prelude
import Control.Lens
import Compiler.JMacro.Base
import Data.Map (Map)
import Data.Text (Text)
_DeclStat :: Prism' JStat Ident
_DeclStat = prism DeclStat
(\x -> case x of DeclStat y -> Right y
_ -> Left x)
_ReturnStat :: Prism' JStat JExpr
_ReturnStat = prism ReturnStat
(\x -> case x of ReturnStat y -> Right y
_ -> Left x)
_IfStat :: Prism' JStat (JExpr, JStat, JStat)
_IfStat = prism (\(x1, x2, x3) -> IfStat x1 x2 x3)
(\x -> case x of IfStat y1 y2 y3 -> Right (y1, y2, y3)
_ -> Left x)
_WhileStat :: Prism' JStat (Bool, JExpr, JStat)
_WhileStat = prism (\(x1, x2, x3) -> WhileStat x1 x2 x3)
(\x -> case x of WhileStat y1 y2 y3 -> Right (y1, y2, y3)
_ -> Left x)
_ForInStat :: Prism' JStat (Bool, Ident, JExpr, JStat)
_ForInStat =
prism (\(x1, x2, x3, x4) -> ForInStat x1 x2 x3 x4)
(\x -> case x of ForInStat y1 y2 y3 y4 -> Right (y1, y2, y3, y4)
_ -> Left x)
_SwitchStat :: Prism' JStat (JExpr, [(JExpr, JStat)], JStat)
_SwitchStat =
prism (\(x1, x2, x3) -> SwitchStat x1 x2 x3)
(\x -> case x of SwitchStat y1 y2 y3 -> Right (y1, y2, y3)
_ -> Left x)
_TryStat :: Prism' JStat (JStat, Ident, JStat, JStat)
_TryStat =
prism (\(x1, x2, x3, x4) -> TryStat x1 x2 x3 x4)
(\x -> case x of TryStat y1 y2 y3 y4 -> Right (y1, y2, y3, y4)
_ -> Left x)
_BlockStat :: Prism' JStat [JStat]
_BlockStat =
prism BlockStat
(\x -> case x of BlockStat y -> Right y
_ -> Left x)
_ApplStat :: Prism' JStat (JExpr, [JExpr])
_ApplStat =
prism (uncurry ApplStat)
(\x -> case x of ApplStat y1 y2 -> Right (y1, y2)
_ -> Left x)
_UOpStat :: Prism' JStat (JUOp, JExpr)
_UOpStat =
prism (uncurry UOpStat)
(\x -> case x of UOpStat y1 y2 -> Right (y1, y2)
_ -> Left x)
_AssignStat :: Prism' JStat (JExpr, JExpr)
_AssignStat =
prism (uncurry AssignStat)
(\x -> case x of AssignStat y1 y2 -> Right (y1, y2)
_ -> Left x)
_UnsatBlock :: Prism' JStat (IdentSupply JStat)
_UnsatBlock =
prism UnsatBlock
(\x -> case x of UnsatBlock y -> Right y
_ -> Left x)
_LabelStat :: Prism' JStat (JsLabel, JStat)
_LabelStat =
prism (uncurry LabelStat)
(\x -> case x of LabelStat y1 y2 -> Right (y1, y2)
_ -> Left x)
_BreakStat :: Prism' JStat (Maybe JsLabel)
_BreakStat =
prism BreakStat
(\x -> case x of BreakStat y -> Right y
_ -> Left x)
_ContinueStat :: Prism' JStat (Maybe JsLabel)
_ContinueStat =
prism ContinueStat
(\x -> case x of ContinueStat y -> Right y
_ -> Left x)
-- JExpr
_ValExpr :: Prism' JExpr JVal
_ValExpr =
prism ValExpr
(\x -> case x of ValExpr y -> Right y
_ -> Left x)
_SelExpr :: Prism' JExpr (JExpr, Ident)
_SelExpr =
prism (uncurry SelExpr)
(\x -> case x of SelExpr y1 y2 -> Right (y1, y2)
_ -> Left x)
_IdxExpr :: Prism' JExpr (JExpr, JExpr)
_IdxExpr =
prism (uncurry IdxExpr)
(\x -> case x of IdxExpr y1 y2 -> Right (y1, y2)
_ -> Left x)
_InfixExpr :: Prism' JExpr (JOp, JExpr, JExpr)
_InfixExpr =
prism (\(x1, x2, x3) -> InfixExpr x1 x2 x3)
(\x -> case x of InfixExpr y1 y2 y3 -> Right (y1, y2, y3)
_ -> Left x)
_UOpExpr :: Prism' JExpr (JUOp, JExpr)
_UOpExpr =
prism (uncurry UOpExpr)
(\x -> case x of UOpExpr y1 y2 -> Right (y1, y2)
_ -> Left x)
_IfExpr :: Prism' JExpr (JExpr, JExpr, JExpr)
_IfExpr =
prism (\(x1, x2, x3) -> IfExpr x1 x2 x3)
(\x -> case x of IfExpr y1 y2 y3 -> Right (y1, y2, y3)
_ -> Left x)
_ApplExpr :: Prism' JExpr (JExpr, [JExpr])
_ApplExpr =
prism (uncurry ApplExpr)
(\x -> case x of ApplExpr y1 y2 -> Right (y1, y2)
_ -> Left x)
_UnsatExpr :: Prism' JExpr (IdentSupply JExpr)
_UnsatExpr
= prism UnsatExpr
(\x -> case x of UnsatExpr y -> Right y
_ -> Left x)
-- JVal
_JVar :: Prism' JVal Ident
_JVar =
prism JVar
(\x -> case x of JVar y -> Right y
_ -> Left x)
_JList :: Prism' JVal [JExpr]
_JList =
prism JList
( \x -> case x of JList y -> Right y
_ -> Left x)
_JDouble :: Prism' JVal SaneDouble
_JDouble =
prism JDouble
(\x -> case x of JDouble y -> Right y
_ -> Left x)
_JInt :: Prism' JVal Integer
_JInt =
prism JInt
(\x -> case x of JInt y -> Right y
_ -> Left x)
_JStr :: Prism' JVal Text
_JStr =
prism JStr
(\x -> case x of JStr y -> Right y
_ -> Left x)
_JRegEx :: Prism' JVal Text
_JRegEx
= prism JRegEx
(\x -> case x of JRegEx y -> Right y
_ -> Left x)
_JHash :: Prism' JVal (Map Text JExpr)
_JHash =
prism JHash
(\x -> case x of JHash y -> Right y
_ -> Left x)
_JFunc :: Prism' JVal ([Ident], JStat)
_JFunc =
prism (uncurry JFunc)
(\x -> case x of JFunc y1 y2 -> Right (y1, y2)
_ -> Left x)
_UnsatVal :: Prism' JVal (IdentSupply JVal)
_UnsatVal =
prism UnsatVal
(\x -> case x of UnsatVal y -> Right y
_ -> Left x)
-- Ident
_TxtI :: Iso' Ident Text
_TxtI = iso (\(TxtI x) -> x) TxtI
|
ghcjs/ghcjs
|
src/Compiler/JMacro/Lens.hs
|
mit
| 6,157
| 0
| 12
| 2,522
| 2,364
| 1,234
| 1,130
| 164
| 2
|
{-# LANGUAGE DeriveDataTypeable #-}
{- |
Module : ./CASL/Sublogic.hs
Description : sublogic analysis for CASL
Copyright : (c) Pascal Schmidt, C. Maeder, and Uni Bremen 2002-2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : experimental
Portability : portable
Sublogic analysis for CASL
This module provides the sublogic functions (as required by Logic.hs)
for CASL. The functions allow to compute the minimal sublogics needed
by a given element, to check whether an item is part of a given
sublogic, and to project an element into a given sublogic.
-}
module CASL.Sublogic
( -- * types
CASL_Sublogics
, CASL_SL (..)
, CASL_Formulas (..)
, SubsortingFeatures (..)
, SortGenerationFeatures (..)
-- * class
, Lattice (..)
-- * predicates on CASL_SL
, has_sub
, has_cons
-- * functions for SemiLatticeWithTop instance
, mkTop
, top
, caslTop
, cFol
, cPrenex
, sublogics_max
, comp_list
-- * functions for the creation of minimal sublogics
, bottom
, mkBot
, emptyMapConsFeature
, need_sub
, need_pred
, need_horn
, need_fol
, updExtFeature
-- * functions for Logic instance sublogic to string conversion
, sublogics_name
, parseSL
, parseBool
-- ** list of all sublogics
, sublogics_all
, sDims
-- * computes the sublogic of a given element
, sl_sig_items
, sl_basic_spec
, sl_opkind
, sl_op_type
, sl_op_item
, sl_pred_item
, sl_sentence
, sl_term
, sl_symb_items
, sl_symb_map_items
, sl_sign
, sl_morphism
, sl_symbol
-- * projects an element into a given sublogic
, pr_basic_spec
, pr_symb_items
, pr_symb_map_items
, pr_sign
, pr_morphism
, pr_epsilon
, pr_symbol
) where
import Data.Data
import Data.List
import Data.Maybe
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Common.Lib.MapSet as MapSet
import qualified Common.Lib.Rel as Rel
import Common.Id
import Common.AS_Annotation
import Common.Lattice
import Control.Monad
import CASL.AS_Basic_CASL
import CASL.Sign
import CASL.Morphism
import CASL.Fold
{- ----------------------------------------------------------------------------
datatypes for CASL sublogics
---------------------------------------------------------------------------- -}
data CASL_Formulas = Atomic -- ^ atomic logic
| Horn -- ^ positive conditional logic
| GHorn -- ^ generalized positive conditional logic
| Prenex -- ^ formulas in prenex normal form
| FOL -- ^ first-order logic
| SOL -- ^ second-order logic
deriving (Show, Eq, Ord, Typeable, Data)
data SubsortingFeatures = NoSub
| LocFilSub
| Sub
deriving (Show, Eq, Ord, Typeable, Data)
data SortGenerationFeatures =
NoSortGen
| SortGen { emptyMapping :: Bool
-- ^ Mapping of indexed sorts is empty
, onlyInjConstrs :: Bool
-- ^ only constructors that are subsort injections
} deriving (Show, Eq, Ord, Typeable, Data)
joinSortGenFeature :: (Bool -> Bool -> Bool)
-> SortGenerationFeatures -> SortGenerationFeatures
-> SortGenerationFeatures
joinSortGenFeature f x y =
case x of
NoSortGen -> y
SortGen em_x ojc_x -> case y of
NoSortGen -> x
SortGen em_y ojc_y -> SortGen (f em_x em_y) (f ojc_x ojc_y)
data CASL_SL a = CASL_SL
{ sub_features :: SubsortingFeatures, -- ^ subsorting
has_part :: Bool, -- ^ partiality
cons_features :: SortGenerationFeatures, -- ^ sort generation constraints
has_eq :: Bool, -- ^ equality
has_pred :: Bool, -- ^ predicates
which_logic :: CASL_Formulas, -- ^ first order sublogics
has_empty_sorts :: Bool, -- ^ may sorts be empty
ext_features :: a -- ^ features of extension
} deriving (Show, Eq, Ord, Typeable, Data)
updExtFeature :: (a -> a) -> CASL_SL a -> CASL_SL a
updExtFeature f s = s { ext_features = f $ ext_features s }
type CASL_Sublogics = CASL_SL ()
{- -----------------------
old selector functions
----------------------- -}
has_sub :: CASL_SL a -> Bool
has_sub sl = case sub_features sl of
NoSub -> False
_ -> True
has_cons :: CASL_SL a -> Bool
has_cons sl = case cons_features sl of
NoSortGen -> False
_ -> True
{- ---------------------------------------------------------------------------
Special sublogics elements
--------------------------------------------------------------------------- -}
-- top element
mkTop :: a -> CASL_SL a
mkTop = CASL_SL Sub True (SortGen False False) True True SOL True
top :: Lattice a => CASL_SL a
top = mkTop ctop
caslTop :: Lattice a => CASL_SL a
caslTop = top
{ has_empty_sorts = False
, which_logic = FOL
}
cFol :: Lattice a => CASL_SL a
cFol = caslTop
{ sub_features = NoSub -- no subsorting
, has_part = False -- no partiality
}
cPrenex :: Lattice a => CASL_SL a
cPrenex = cFol {which_logic = Prenex}
mkBot :: a -> CASL_SL a
mkBot = CASL_SL NoSub False NoSortGen False False Atomic False
-- bottom element
bottom :: Lattice a => CASL_SL a
bottom = mkBot bot
need_empty_sorts :: Lattice a => CASL_SL a
need_empty_sorts = bottom { has_empty_sorts = True }
{- the following are used to add a needed feature to a given
sublogic via sublogics_max, i.e. (sublogics_max given needs_part)
will force partiality in addition to what features given already
has included -}
-- minimal sublogics with subsorting
need_sub :: Lattice a => CASL_SL a
need_sub = need_horn { sub_features = Sub }
need_sul :: Lattice a => CASL_SL a
need_sul = need_horn { sub_features = LocFilSub }
-- minimal sublogic with partiality
need_part :: Lattice a => CASL_SL a
need_part = bottom { has_part = True }
emptyMapConsFeature :: SortGenerationFeatures
emptyMapConsFeature = SortGen
{ emptyMapping = True
, onlyInjConstrs = False }
-- minimal sublogics with sort generation constraints
need_cons :: Lattice a => CASL_SL a
need_cons = bottom
{ cons_features = SortGen { emptyMapping = False
, onlyInjConstrs = False} }
need_e_cons :: Lattice a => CASL_SL a
need_e_cons = bottom
{ cons_features = emptyMapConsFeature }
need_s_cons :: Lattice a => CASL_SL a
need_s_cons = bottom
{ cons_features = SortGen { emptyMapping = False
, onlyInjConstrs = True} }
need_se_cons :: Lattice a => CASL_SL a
need_se_cons = bottom
{ cons_features = SortGen { emptyMapping = True
, onlyInjConstrs = True} }
-- minimal sublogic with equality
need_eq :: Lattice a => CASL_SL a
need_eq = bottom { has_eq = True }
-- minimal sublogic with predicates
need_pred :: Lattice a => CASL_SL a
need_pred = bottom { has_pred = True }
need_horn :: Lattice a => CASL_SL a
need_horn = bottom { which_logic = Horn }
need_fol :: Lattice a => CASL_SL a
need_fol = bottom { which_logic = FOL }
{- ---------------------------------------------------------------------------
Functions to generate a list of all sublogics for CASL
--------------------------------------------------------------------------- -}
{- all elements
create a list of all CASL sublogics by generating all possible
feature combinations and then filtering illegal ones out -}
sublogics_all :: Lattice a => [a] -> [CASL_SL a]
sublogics_all l = bottom : map mkBot l ++ concat (sDims [])
++ let subPAtom = (sublogics_max need_part need_pred) { sub_features = Sub } in
[ sublogics_max need_fol need_eq
, comp_list [subPAtom, need_horn, need_eq]
, subPAtom
, sublogics_max subPAtom need_cons
, cFol, caslTop, top]
sDims :: Lattice a => [[a]] -> [[CASL_SL a]]
sDims l = let
t = True
b = bottom
bools = [True, False]
in
map (map mkBot) l ++
[ [ b { sub_features = s_f } | s_f <- [LocFilSub, Sub]]
, [b { has_part = t } ]
, [b { cons_features = c_f } | c_f <- [ SortGen m s | m <- bools, s <- bools]]
, [b { has_eq = t } ]
, [b { has_pred = t } ]
, [b { has_empty_sorts = t } ]
, [b { which_logic = fo } | fo <- reverse [SOL, FOL, Prenex, GHorn, Horn]]]
{- ----------------------------------------------------------------------------
Conversion functions (to String)
---------------------------------------------------------------------------- -}
formulas_name :: Bool -> CASL_Formulas -> String
formulas_name b f = let Just s = lookup (b, f) nameList in s
nameList :: [((Bool, CASL_Formulas), String)]
nameList =
[ ((True, SOL), "SOL")
, ((False, SOL), "SOAlg")
, ((True, FOL), "FOL")
, ((False, FOL), "FOAlg")
, ((True, Prenex), "Prenex")
, ((False,Prenex), "PrenexAlg")
, ((True, GHorn), "GHorn")
, ((False, GHorn), "GCond")
, ((True, Horn), "Horn")
, ((False, Horn), "Cond")
, ((True, Atomic), "Atom")
, ((False, Atomic), "Eq")]
sublogics_name :: (a -> String) -> CASL_SL a -> String
sublogics_name f x = f (ext_features x)
++ (case sub_features x of
NoSub -> ""
LocFilSub -> "Sul"
Sub -> "Sub")
++ (if has_part x then "P" else "")
++ (if has_cons x
then (if onlyInjConstrs (cons_features x)
then "s" else "") ++
(if emptyMapping (cons_features x)
then "e" else "") ++ "C"
else "")
++ formulas_name (has_pred x) (which_logic x)
++ (if has_eq x then "=" else "")
++ if has_empty_sorts x then "E" else ""
parseBool :: String -> String -> (Bool, String)
parseBool p s = case stripPrefix p s of
Just r -> (True, r)
Nothing -> (False, s)
parseSL :: (String -> Maybe (a, String)) -> String -> Maybe (CASL_SL a)
parseSL f s0 = do
(a, s1) <- f s0
(sub, s2) <- case stripPrefix "Su" s1 of
Just r -> case r of
c : t -> case c of
'l' -> Just (LocFilSub, t)
'b' -> Just (Sub, t)
_ -> Nothing
"" -> Nothing
Nothing -> Just (NoSub, s1)
let (pa, s3) = parseBool "P" s2
(c, s4) = parseCons s3
((pr, l), s5) <- parseForm s4
let (eq, s6) = parseBool "=" s5
(es, s7) = parseBool "E" s6
unless (null s7) Nothing
return (mkBot a)
{ sub_features = sub
, has_part = pa
, cons_features = c
, has_pred = pr
, which_logic = l
, has_eq = eq
, has_empty_sorts = es }
parseForm :: String -> Maybe ((Bool, CASL_Formulas), String)
parseForm s = foldr (\ (q, p) m -> case m of
Just _ -> m
Nothing -> case stripPrefix p s of
Just r -> Just (q, r)
Nothing -> m) Nothing nameList
parseCons :: String -> (SortGenerationFeatures, String)
parseCons s = case stripPrefix "seC" s of
Just r -> (SortGen True True, r)
Nothing -> case stripPrefix "sC" s of
Just r -> (SortGen False True, r)
Nothing -> case stripPrefix "eC" s of
Just r -> (SortGen True False, r)
Nothing -> case stripPrefix "C" s of
Just r | not $ isPrefixOf "ond" r -> (SortGen False False, r)
_ -> (NoSortGen, s)
{- ----------------------------------------------------------------------------
join or max functions
---------------------------------------------------------------------------- -}
sublogics_join :: (Bool -> Bool -> Bool)
-> (SubsortingFeatures -> SubsortingFeatures
-> SubsortingFeatures)
-> (SortGenerationFeatures -> SortGenerationFeatures
-> SortGenerationFeatures)
-> (CASL_Formulas -> CASL_Formulas -> CASL_Formulas)
-> (a -> a -> a)
-> CASL_SL a -> CASL_SL a -> CASL_SL a
sublogics_join jB jS jC jF jE a b = CASL_SL
{ sub_features = jS (sub_features a) (sub_features b)
, ext_features = jE (ext_features a) (ext_features b)
, has_part = jB (has_part a) $ has_part b
, cons_features = jC (cons_features a) (cons_features b)
, has_eq = jB (has_eq a) $ has_eq b
, has_pred = jB (has_pred a) $ has_pred b
, has_empty_sorts = jB (has_empty_sorts a) $ has_empty_sorts b
, which_logic = jF (which_logic a) (which_logic b)
}
sublogics_max :: Lattice a => CASL_SL a -> CASL_SL a
-> CASL_SL a
sublogics_max = sublogics_join max max (joinSortGenFeature min) max cjoin
{- ----------------------------------------------------------------------------
Helper functions
---------------------------------------------------------------------------- -}
-- compute sublogics from a list of sublogics
comp_list :: Lattice a => [CASL_SL a] -> CASL_SL a
comp_list = foldl sublogics_max bottom
{- map a function returning Maybe over a list of arguments
. a list of Pos is maintained by removing an element if the
function f returns Nothing on the corresponding element of
the argument list
. leftover elements in the Pos list after the argument
list is exhausted are appended at the end with Nothing
as a substitute for f's result -}
mapMaybePos :: [Pos] -> (a -> Maybe b) -> [a] -> [(Maybe b, Pos)]
mapMaybePos [] _ _ = []
mapMaybePos (p1 : pl) f [] = (Nothing, p1) : mapMaybePos pl f []
mapMaybePos (p1 : pl) f (h : t) = let res = f h in
(if isJust res then ((res, p1) :) else id) $ mapMaybePos pl f t
{- map with partial function f on Maybe type
will remove elements from given Pos list for elements of [a]
where f returns Nothing
given number of elements from the beginning of Range are always
kept -}
mapPos :: Int -> Range -> (a -> Maybe b) -> [a] -> ([b], Range)
mapPos c (Range p) f l = let
(res, pos) = unzip $ mapMaybePos (drop c p) f l
in
(catMaybes res, Range (take c p ++ pos))
{- ----------------------------------------------------------------------------
Functions to analyse formulae
---------------------------------------------------------------------------- -}
{- ---------------------------------------------------------------------------
These functions are based on Till Mossakowski's paper "Sublanguages of
CASL", which is CoFI Note L-7. The functions implement an adaption of
the reduced grammar given there for formulae in a specific expression
logic by, checking whether a formula would match the productions from the
grammar.
--------------------------------------------------------------------------- -}
sl_form_level :: (f -> CASL_Formulas)
-> (Bool, Bool) -> FORMULA f -> CASL_Formulas
sl_form_level ff (isCompound, leftImp) phi = let
subl = sl_form_level_aux ff (isCompound, leftImp) phi
in if subl == FOL
then if testPrenex True ff phi then Prenex
else FOL
else subl
sl_form_level_aux :: (f -> CASL_Formulas)
-> (Bool, Bool) -> FORMULA f -> CASL_Formulas
sl_form_level_aux ff (isCompound, leftImp) phi =
case phi of
Quantification q _ f _ ->
let ql = sl_form_level_aux ff (isCompound, leftImp) f
in if is_atomic_q q then ql else max FOL ql
Junction j l _ -> maximum $ case j of
Con -> FOL : map (sl_form_level_aux ff (True, leftImp)) l
Dis -> FOL : map (sl_form_level_aux ff (False, False)) l
Relation l1 c l2 _ -> maximum $ sl_form_level_aux ff (True, True) l1
: case c of
Equivalence -> [ sl_form_level_aux ff (True, True) l2
, if leftImp then FOL else GHorn ]
_ -> [ sl_form_level_aux ff (True, False) l2
, if leftImp then FOL else
if isCompound then GHorn else Horn ]
Negation f _ -> max FOL $ sl_form_level_aux ff (False, False) f
Atom b _ -> if b then Atomic else FOL
Equation _ e _ _
| e == Existl -> Atomic
| leftImp -> FOL
| otherwise -> Horn
QuantOp {} -> SOL -- it can't get worse
QuantPred {} -> SOL
ExtFORMULA f -> ff f
_ -> Atomic
testPrenex :: Bool -> (f -> CASL_Formulas) -> FORMULA f -> Bool
testPrenex topQ ff phi =
case phi of
Quantification _ _ phi' _ -> if topQ then testPrenex True ff phi' else False
Junction _ l _ -> foldl (\b x -> b && testPrenex False ff x) True l
Relation l1 _ l2 _ -> testPrenex False ff l1 && testPrenex False ff l2
Negation f _ -> testPrenex False ff f
Atom _ _ -> True
Equation _ _ _ _ -> True
QuantOp {} -> error "should not get quant ops in FOL"
QuantPred {} -> error "should not get quant preds in FOL"
ExtFORMULA f -> if ff f == Prenex then True else False
_ -> True
-- QUANTIFIER
is_atomic_q :: QUANTIFIER -> Bool
is_atomic_q Universal = True
is_atomic_q _ = False
-- compute logic of a formula by checking all logics in turn
get_logic :: Lattice a => (f -> CASL_SL a)
-> FORMULA f -> CASL_SL a
get_logic ff f = bottom
{ which_logic = sl_form_level (which_logic . ff) (False, False) f }
-- for the formula inside a subsort-defn
get_logic_sd :: Lattice a => (f -> CASL_SL a)
-> FORMULA f -> CASL_SL a
get_logic_sd ff f = bottom
{ which_logic =
max Horn $ sl_form_level (which_logic . ff) (False, False) f }
{- ----------------------------------------------------------------------------
Functions to compute minimal sublogic for a given element, these work
by recursing into all subelements
---------------------------------------------------------------------------- -}
sl_basic_spec :: Lattice a => (b -> CASL_SL a)
-> (s -> CASL_SL a)
-> (f -> CASL_SL a)
-> BASIC_SPEC b s f -> CASL_SL a
sl_basic_spec bf sf ff (Basic_spec l) =
comp_list $ map (sl_basic_items bf sf ff . item) l
sl_basic_items :: Lattice a => (b -> CASL_SL a)
-> (s -> CASL_SL a)
-> (f -> CASL_SL a)
-> BASIC_ITEMS b s f -> CASL_SL a
sl_basic_items bf sf ff bi = case bi of
Sig_items i -> sl_sig_items sf ff i
Free_datatype sk l _ -> needsEmptySorts sk
$ comp_list $ map (sl_datatype_decl . item) l
Sort_gen l _ -> sublogics_max need_se_cons
$ comp_list $ map (sl_sig_items sf ff . item) l
Var_items l _ -> comp_list $ map sl_var_decl l
Local_var_axioms d l _ -> comp_list
$ map sl_var_decl d ++ map (sl_formula ff . item) l
Axiom_items l _ -> comp_list $ map (sl_formula ff . item) l
Ext_BASIC_ITEMS b -> bf b
needsEmptySorts :: Lattice a => SortsKind -> CASL_SL a -> CASL_SL a
needsEmptySorts sk = case sk of
NonEmptySorts -> id
PossiblyEmptySorts -> sublogics_max need_empty_sorts
sl_sig_items :: Lattice a => (s -> CASL_SL a)
-> (f -> CASL_SL a)
-> SIG_ITEMS s f -> CASL_SL a
sl_sig_items sf ff si = case si of
Sort_items sk l _ -> needsEmptySorts sk
$ comp_list $ map (sl_sort_item ff . item) l
Op_items l _ -> comp_list $ map (sl_op_item ff . item) l
Pred_items l _ -> comp_list $ map (sl_pred_item ff . item) l
Datatype_items sk l _ -> needsEmptySorts sk
$ comp_list $ map (sl_datatype_decl . item) l
Ext_SIG_ITEMS s -> sf s
{- Subsort_defn needs to compute the expression logic needed seperately
because the expressiveness allowed in the formula may be different
from more general formulae in the same expression logic -}
sl_sort_item :: Lattice a => (f -> CASL_SL a)
-> SORT_ITEM f -> CASL_SL a
sl_sort_item ff si = case si of
Subsort_decl {} -> need_sul
Subsort_defn _ _ _ f _ -> sublogics_max
(get_logic_sd ff $ item f)
(sublogics_max need_sul
(sl_formula ff $ item f))
Iso_decl _ _ -> need_sul
_ -> bottom
sl_op_item :: Lattice a => (f -> CASL_SL a)
-> OP_ITEM f -> CASL_SL a
sl_op_item ff oi = case oi of
Op_decl _ t l _ -> sublogics_max (sl_op_type t)
(comp_list $ map (sl_op_attr ff) l)
Op_defn _ h t _ -> sublogics_max (sl_op_head h)
(sl_term ff $ item t)
sl_op_attr :: Lattice a => (f -> CASL_SL a)
-> OP_ATTR f -> CASL_SL a
sl_op_attr ff oa = case oa of
Unit_op_attr t -> sl_term ff t
_ -> need_eq
sl_op_type :: Lattice a => OP_TYPE -> CASL_SL a
sl_op_type ot = case ot of
Op_type Partial _ _ _ -> need_part
_ -> bottom
sl_op_head :: Lattice a => OP_HEAD -> CASL_SL a
sl_op_head oh = case oh of
Op_head Partial _ _ _ -> need_part
_ -> bottom
sl_pred_item :: Lattice a => (f -> CASL_SL a)
-> PRED_ITEM f -> CASL_SL a
sl_pred_item ff i = case i of
Pred_decl {} -> need_pred
Pred_defn _ _ f _ -> sublogics_max need_pred (sl_formula ff $ item f)
sl_datatype_decl :: Lattice a => DATATYPE_DECL -> CASL_SL a
sl_datatype_decl (Datatype_decl _ l _) =
comp_list $ map (sl_alternative . item) l
sl_alternative :: Lattice a => ALTERNATIVE -> CASL_SL a
sl_alternative a = case a of
Alt_construct Total _ l _ -> comp_list $ map sl_components l
Alt_construct Partial _ _ _ -> need_part
Subsorts _ _ -> need_sul
sl_components :: Lattice a => COMPONENTS -> CASL_SL a
sl_components c = case c of
Cons_select Partial _ _ _ -> need_part
_ -> bottom
sl_var_decl :: Lattice a => VAR_DECL -> CASL_SL a
sl_var_decl _ = bottom
{- without subsorts casts are trivial and would not even require
need_part, but testing sortOfTerm is not save for formulas in basic specs
that are only parsed (and resolved) but not enriched with sorts -}
slRecord :: Lattice a => (f -> CASL_SL a) -> Record f (CASL_SL a) (CASL_SL a)
slRecord ff = (constRecord ff comp_list bottom)
{ foldPredication = \ _ _ l _ -> comp_list $ need_pred : l
, foldEquation = \ _ t _ u _ -> comp_list [need_eq, t, u]
, foldSort_gen_ax = \ _ constraints _ ->
case recover_Sort_gen_ax constraints of
(_, ops, m) -> case (m, filter (\ o -> case o of
Op_name _ -> True
Qual_op_name n _ _ ->
not (isInjName n)) ops) of
([], []) -> need_se_cons
([], _) -> need_e_cons
(_, []) -> need_s_cons
_ -> need_cons
, foldQuantPred = \ _ _ _ f -> sublogics_max need_pred f
, foldCast = \ _ t _ _ -> sublogics_max need_part t
}
sl_term :: Lattice a => (f -> CASL_SL a) -> TERM f -> CASL_SL a
sl_term = foldTerm . slRecord
sl_formula :: Lattice a => (f -> CASL_SL a)
-> FORMULA f -> CASL_SL a
sl_formula ff f = sublogics_max (get_logic ff f) (sl_form ff f)
sl_form :: Lattice a => (f -> CASL_SL a)
-> FORMULA f -> CASL_SL a
sl_form = foldFormula . slRecord
sl_symb_items :: Lattice a => SYMB_ITEMS -> CASL_SL a
sl_symb_items (Symb_items k l _) = sublogics_max (sl_symb_kind k)
(comp_list $ map sl_symb l)
sl_symb_kind :: Lattice a => SYMB_KIND -> CASL_SL a
sl_symb_kind pk = case pk of
Preds_kind -> need_pred
_ -> bottom
sl_symb :: Lattice a => SYMB -> CASL_SL a
sl_symb s = case s of
Symb_id _ -> bottom
Qual_id _ t _ -> sl_type t
sl_type :: Lattice a => TYPE -> CASL_SL a
sl_type ty = case ty of
O_type t -> sl_op_type t
P_type _ -> need_pred
_ -> bottom
sl_symb_map_items :: Lattice a => SYMB_MAP_ITEMS -> CASL_SL a
sl_symb_map_items (Symb_map_items k l _) = sublogics_max (sl_symb_kind k)
(comp_list $ map sl_symb_or_map l)
sl_symb_or_map :: Lattice a => SYMB_OR_MAP -> CASL_SL a
sl_symb_or_map syms = case syms of
Symb s -> sl_symb s
Symb_map s t _ -> sublogics_max (sl_symb s) (sl_symb t)
{- the maps have no influence since all sorts, ops, preds in them
must also appear in the signatures, so any features needed by
them will be included by just checking the signatures -}
sl_sign :: Lattice a => (e -> CASL_SL a) -> Sign f e -> CASL_SL a
sl_sign f s =
let rel = sortRel s
subs | Rel.noPairs rel = bottom
| Rel.locallyFiltered rel = need_sul
| otherwise = need_sub
esorts = if Set.null $ emptySortSet s then bottom
else need_empty_sorts
preds = if MapSet.null $ predMap s then bottom else need_pred
partial = if any isPartial $ Set.toList
$ MapSet.elems $ opMap s then need_part else bottom
in comp_list [subs, esorts, preds, partial, f $ extendedInfo s]
sl_sentence :: Lattice a => (f -> CASL_SL a) -> FORMULA f -> CASL_SL a
sl_sentence = sl_formula
sl_morphism :: Lattice a => (e -> CASL_SL a) -> Morphism f e m -> CASL_SL a
sl_morphism f m = sublogics_max (sl_sign f $ msource m) (sl_sign f $ mtarget m)
sl_symbol :: Lattice a => Symbol -> CASL_SL a
sl_symbol (Symbol _ t) = sl_symbtype t
sl_symbtype :: Lattice a => SymbType -> CASL_SL a
sl_symbtype st = case st of
OpAsItemType t -> sl_optype t
PredAsItemType _ -> need_pred
_ -> bottom
sl_optype :: Lattice a => OpType -> CASL_SL a
sl_optype = sl_opkind . opKind
sl_opkind :: Lattice a => OpKind -> CASL_SL a
sl_opkind fk = case fk of
Partial -> need_part
_ -> bottom
{- ----------------------------------------------------------------------------
projection functions
---------------------------------------------------------------------------- -}
sl_in :: Lattice a => CASL_SL a -> CASL_SL a -> Bool
sl_in given new = sublogics_max given new == given
in_x :: Lattice a => CASL_SL a -> b -> (b -> CASL_SL a) -> Bool
in_x l x f = sl_in l (f x)
-- process Annoted type like simple type, simply keep all annos
pr_annoted :: CASL_SL s -> (CASL_SL s -> a -> Maybe a)
-> Annoted a -> Maybe (Annoted a)
pr_annoted sl f a =
fmap (`replaceAnnoted` a) $ f sl (item a)
{- project annoted type, by-producing a [SORT]
used for projecting datatypes: sometimes it is necessary to
introduce a SORT_DEFN for a datatype that was erased
completely, for example by only having partial constructors
and partiality forbidden in the desired sublogic - the sort
name may however still be needed for formulas because it can
appear there like in (forall x,y:Datatype . x=x), a formula
that does not use partiality (does not use any constructor
or selector) -}
pr_annoted_dt :: CASL_SL s
-> (CASL_SL s -> a -> (Maybe a, [SORT]))
-> Annoted a -> (Maybe (Annoted a), [SORT])
pr_annoted_dt sl f a =
let (res, lst) = f sl (item a)
in (fmap (`replaceAnnoted` a) res
, lst)
-- keep an element if its computed sublogic is in the given sublogic
pr_check :: Lattice a => CASL_SL a -> (b -> CASL_SL a)
-> b -> Maybe b
pr_check l f e = if in_x l e f then Just e else Nothing
checkRecord :: CASL_SL a -> (CASL_SL a -> f -> Maybe (FORMULA f))
-> Record f (FORMULA f) (TERM f)
checkRecord l ff = (mapRecord id)
{ foldExtFORMULA = \ o _ -> case o of
ExtFORMULA f -> fromMaybe (error "checkRecord") $ ff l f
_ -> error "checkRecord.foldExtFORMULA" }
toCheck :: Lattice a => CASL_SL a
-> (CASL_SL a -> f -> Maybe (FORMULA f))
-> f -> CASL_SL a
toCheck l ff = maybe top (const l) . ff l
pr_formula :: Lattice a => (CASL_SL a -> f -> Maybe (FORMULA f))
-> CASL_SL a -> FORMULA f -> Maybe (FORMULA f)
pr_formula ff l =
fmap (foldFormula $ checkRecord l ff)
. pr_check l (sl_formula $ toCheck l ff)
pr_term :: Lattice a => (CASL_SL a -> f -> Maybe (FORMULA f))
-> CASL_SL a -> TERM f -> Maybe (TERM f)
pr_term ff l =
fmap (foldTerm $ checkRecord l ff)
. pr_check l (sl_term $ toCheck l ff)
-- make full Annoted Sig_items out of a SORT list
pr_make_sorts :: [SORT] -> Annoted (BASIC_ITEMS b s f)
pr_make_sorts s =
Annoted (Sig_items (Sort_items NonEmptySorts
[Annoted (Sort_decl s nullRange) nullRange [] []]
nullRange))
nullRange [] []
{- when processing BASIC_SPEC, add a Sort_decl in front for sorts
defined by DATATYPE_DECLs that had to be removed completely,
otherwise formulas might be broken by the missing sorts, thus
breaking the projection -}
pr_basic_spec :: Lattice a =>
(CASL_SL a -> b -> (Maybe (BASIC_ITEMS b s f), [SORT]))
-> (CASL_SL a -> s -> (Maybe (SIG_ITEMS s f), [SORT]))
-> (CASL_SL a -> f -> Maybe (FORMULA f))
-> CASL_SL a -> BASIC_SPEC b s f -> BASIC_SPEC b s f
pr_basic_spec fb fs ff l (Basic_spec s) =
let
res = map (pr_annoted_dt l $ pr_basic_items fb fs ff) s
items = mapMaybe fst res
toAdd = concatMap snd res
ret = if null toAdd then
items
else
pr_make_sorts toAdd : items
in
Basic_spec ret
{- returns a non-empty list of [SORT] if datatypes had to be removed
completely -}
pr_basic_items :: Lattice a =>
(CASL_SL a -> b -> (Maybe (BASIC_ITEMS b s f), [SORT]))
-> (CASL_SL a -> s -> (Maybe (SIG_ITEMS s f), [SORT]))
-> (CASL_SL a -> f -> Maybe (FORMULA f))
-> CASL_SL a -> BASIC_ITEMS b s f
-> (Maybe (BASIC_ITEMS b s f), [SORT])
pr_basic_items fb fs ff l bi = case bi of
Sig_items s ->
let
(res, lst) = pr_sig_items fs ff l s
in
if isNothing res then
(Nothing, lst)
else
(Just (Sig_items (fromJust res)), lst)
Free_datatype sk d p ->
let
(res, pos) = mapPos 2 p (pr_annoted l pr_datatype_decl) d
lst = pr_lost_dt l (map item d)
in
if null res then
(Nothing, lst)
else
(Just (Free_datatype sk res pos), lst)
Sort_gen s p ->
if has_cons l then
let
tmp = map (pr_annoted_dt l $ pr_sig_items fs ff) s
res = mapMaybe fst tmp
lst = concatMap snd tmp
in
if null res then
(Nothing, lst)
else
(Just (Sort_gen res p), lst)
else
(Nothing, [])
Var_items v p -> (Just (Var_items v p), [])
Local_var_axioms v f p ->
let
(res, pos) = mapPos (length v) p
(pr_annoted l $ pr_formula ff) f
in
if null res then
(Nothing, [])
else
(Just (Local_var_axioms v res pos), [])
Axiom_items f p ->
let
(res, pos) = mapPos 0 p (pr_annoted l $ pr_formula ff) f
in
if null res then
(Nothing, [])
else
(Just (Axiom_items res pos), [])
Ext_BASIC_ITEMS b -> fb l b
pr_datatype_decl :: CASL_SL a -> DATATYPE_DECL -> Maybe DATATYPE_DECL
pr_datatype_decl l (Datatype_decl s a p) =
let
(res, pos) = mapPos 1 p (pr_annoted l pr_alternative) a
in
if null res then
Nothing
else
Just (Datatype_decl s res pos)
pr_alternative :: CASL_SL a -> ALTERNATIVE -> Maybe ALTERNATIVE
pr_alternative l alt = case alt of
Alt_construct Total n c p ->
let
(res, pos) = mapPos 1 p (pr_components l) c
in
if null res then
Nothing
else
Just (Alt_construct Total n res pos)
Alt_construct Partial _ _ _ ->
if has_part l then
Just alt
else
Nothing
Subsorts s p ->
if has_sub l then
Just (Subsorts s p)
else
Nothing
pr_components :: CASL_SL a -> COMPONENTS -> Maybe COMPONENTS
pr_components l sel = case sel of
Cons_select Partial _ _ _ ->
if has_part l then
Just sel
else
Nothing
_ -> Just sel
{- takes a list of datatype declarations and checks whether a
whole declaration is invalid in the given sublogic - if this
is the case, the sort that would be declared by the type is
added to a list of SORT that is emitted -}
pr_lost_dt :: CASL_SL a -> [DATATYPE_DECL] -> [SORT]
pr_lost_dt sl = concatMap (\ dt@(Datatype_decl s _ _) ->
case pr_datatype_decl sl dt of
Nothing -> [s]
_ -> [])
pr_symbol :: Lattice a => CASL_SL a -> Symbol -> Maybe Symbol
pr_symbol l = pr_check l sl_symbol
{- returns a non-empty list of [SORT] if datatypes had to be removed
completely -}
pr_sig_items :: Lattice a =>
(CASL_SL a -> s -> (Maybe (SIG_ITEMS s f), [SORT]))
-> (CASL_SL a -> f -> Maybe (FORMULA f))
-> CASL_SL a -> SIG_ITEMS s f -> (Maybe (SIG_ITEMS s f), [SORT])
pr_sig_items sf ff l si = case si of
Sort_items sk s p ->
let
(res, pos) = mapPos 1 p (pr_annoted l pr_sort_item) s
in
if null res then
(Nothing, [])
else
(Just (Sort_items sk res pos), [])
Op_items o p ->
let
(res, pos) = mapPos 1 p (pr_annoted l $ pr_op_item ff) o
in
if null res then
(Nothing, [])
else
(Just (Op_items res pos), [])
Pred_items i p ->
if has_pred l then
(Just (Pred_items i p), [])
else
(Nothing, [])
Datatype_items sk d p ->
let
(res, pos) = mapPos 1 p (pr_annoted l pr_datatype_decl) d
lst = pr_lost_dt l (map item d)
in
if null res then
(Nothing, lst)
else
(Just (Datatype_items sk res pos), lst)
Ext_SIG_ITEMS s -> sf l s
pr_op_item :: Lattice a => (CASL_SL a -> f -> Maybe (FORMULA f))
-> CASL_SL a -> OP_ITEM f -> Maybe (OP_ITEM f)
pr_op_item ff l oi = case oi of
Op_defn o h f r -> do
g <- pr_annoted l (pr_term ff) f
return $ Op_defn o h g r
_ -> Just oi
{- subsort declarations and definitions are reduced to simple
sort declarations if the sublogic disallows subsorting to
avoid loosing sorts in the projection -}
pr_sort_item :: CASL_SL a -> SORT_ITEM f -> Maybe (SORT_ITEM f)
pr_sort_item _ (Sort_decl s p) = Just (Sort_decl s p)
pr_sort_item l (Subsort_decl sl s p) =
Just $ if has_sub l then Subsort_decl sl s p
else Sort_decl (s : sl) nullRange
pr_sort_item l (Subsort_defn s1 v s2 f p) =
Just $ if has_sub l then Subsort_defn s1 v s2 f p
else Sort_decl [s1] nullRange
pr_sort_item _ (Iso_decl s p) = Just (Iso_decl s p)
pr_symb_items :: Lattice a => CASL_SL a -> SYMB_ITEMS
-> Maybe SYMB_ITEMS
pr_symb_items l (Symb_items k s p) =
if in_x l k sl_symb_kind then
let
(res, pos) = mapPos 1 p (pr_symb l) s
in
if null res then
Nothing
else
Just (Symb_items k res pos)
else
Nothing
pr_symb_map_items :: Lattice a => CASL_SL a -> SYMB_MAP_ITEMS
-> Maybe SYMB_MAP_ITEMS
pr_symb_map_items l (Symb_map_items k s p) =
if in_x l k sl_symb_kind then
let
(res, pos) = mapPos 1 p (pr_symb_or_map l) s
in
if null res then
Nothing
else
Just (Symb_map_items k res pos)
else
Nothing
pr_symb_or_map :: Lattice a => CASL_SL a -> SYMB_OR_MAP
-> Maybe SYMB_OR_MAP
pr_symb_or_map l (Symb s) =
let
res = pr_symb l s
in
if isNothing res then
Nothing
else
Just (Symb (fromJust res))
pr_symb_or_map l (Symb_map s t p) =
let
a = pr_symb l s
b = pr_symb l t
in
if isJust a && isJust b then
Just (Symb_map s t p)
else
Nothing
pr_symb :: Lattice a => CASL_SL a -> SYMB -> Maybe SYMB
pr_symb _ (Symb_id i) = Just (Symb_id i)
pr_symb l (Qual_id i t p) =
if in_x l t sl_type then
Just (Qual_id i t p)
else
Nothing
pr_sign :: CASL_SL a -> Sign f e -> Sign f e
pr_sign _sl s = s -- do something here
pr_morphism :: Lattice a => CASL_SL a -> Morphism f e m
-> Morphism f e m
pr_morphism l m =
m { msource = pr_sign l $ msource m
, mtarget = pr_sign l $ mtarget m
, op_map = pr_op_map l $ op_map m
, pred_map = pr_pred_map l $ pred_map m }
{- predicates only rely on the has_pred feature, so the map
can be kept or removed as a whole -}
pr_pred_map :: CASL_SL a -> Pred_map -> Pred_map
pr_pred_map l x = if has_pred l then x else Map.empty
pr_op_map :: Lattice a => CASL_SL a -> Op_map -> Op_map
pr_op_map = Map.filterWithKey . pr_op_map_entry
pr_op_map_entry :: Lattice a => CASL_SL a -> (Id, OpType) -> (Id, OpKind)
-> Bool
pr_op_map_entry l (_, t) (_, b) =
has_part l || in_x l t sl_optype && b == Partial
{- compute a morphism that consists of the original signature
and the projected signature -}
pr_epsilon :: m -> CASL_SL a -> Sign f e -> Morphism f e m
pr_epsilon extEm l s = embedMorphism extEm s $ pr_sign l s
|
gnn/Hets
|
CASL/Sublogic.hs
|
gpl-2.0
| 37,877
| 138
| 37
| 11,625
| 10,802
| 5,646
| 5,156
| 760
| 17
|
module Data.Tuple6 where
import Data.List ((!!), length)
import Data.Maybe (Maybe (Just, Nothing))
import Text.Show (Show)
data Tuple6 a b c d e f =
Tuple6_0 |
Tuple6_1 a |
Tuple6_2 (a, b) |
Tuple6_3 (a, b, c) |
Tuple6_4 (a, b, c, d) |
Tuple6_5 (a, b, c, d, e) |
Tuple6_6 (a, b, c, d, e, f) deriving Show
toTuple6 :: (Maybe a, Maybe b, Maybe c, Maybe d, Maybe e, Maybe f) -> Tuple6 a b c d e f
toTuple6 t = case t of
(Nothing, _, _, _, _, _) -> Tuple6_0
(Just a, Nothing, _, _, _, _) -> Tuple6_1 a
(Just a, Just b, Nothing, _, _, _) -> Tuple6_2 (a, b)
(Just a, Just b, Just c, Nothing, _, _) -> Tuple6_3 (a, b, c)
(Just a, Just b, Just c, Just d, Nothing, _) -> Tuple6_4 (a, b, c, d)
(Just a, Just b, Just c, Just d, Just e, Nothing) -> Tuple6_5 (a, b, c, d, e)
(Just a, Just b, Just c, Just d, Just e, Just f) -> Tuple6_6 (a, b, c, d, e, f)
fromTuple6 :: Tuple6 a b c d e f -> (Maybe a, Maybe b, Maybe c, Maybe d, Maybe e, Maybe f)
fromTuple6 t = case t of
Tuple6_0 -> (Nothing, Nothing, Nothing, Nothing, Nothing, Nothing)
Tuple6_1 a -> (Just a, Nothing, Nothing, Nothing, Nothing, Nothing)
Tuple6_2 (a, b) -> (Just a, Just b, Nothing, Nothing, Nothing, Nothing)
Tuple6_3 (a, b, c) -> (Just a, Just b, Just c, Nothing, Nothing, Nothing)
Tuple6_4 (a, b, c, d) -> (Just a, Just b, Just c, Just d, Nothing, Nothing)
Tuple6_5 (a, b, c, d, e) -> (Just a, Just b, Just c, Just d, Just e, Nothing)
Tuple6_6 (a, b, c, d, e, f) -> (Just a, Just b, Just c, Just d, Just e, Just f)
listToTuple6 :: [a] -> Tuple6 a a a a a a
listToTuple6 l = case length l of
0 -> Tuple6_0
1 -> Tuple6_1 (l !! 0)
2 -> Tuple6_2 ((l !! 0), (l !! 1))
3 -> Tuple6_3 ((l !! 0), (l !! 1), (l !! 2))
4 -> Tuple6_4 ((l !! 0), (l !! 1), (l !! 2), (l !! 3))
5 -> Tuple6_5 ((l !! 0), (l !! 1), (l !! 2), (l !! 3), (l !! 4))
_ -> Tuple6_6 ((l !! 0), (l !! 1), (l !! 2), (l !! 3), (l !! 4), (l !! 5))
|
marcellussiegburg/autotool
|
yesod/Data/Tuple6.hs
|
gpl-2.0
| 1,937
| 0
| 11
| 490
| 1,218
| 686
| 532
| 39
| 7
|
{-# LANGUAGE
FlexibleContexts
, FlexibleInstances
, GeneralizedNewtypeDeriving
, MultiParamTypeClasses
, StandaloneDeriving
, UndecidableInstances
#-}
module HFlint.NMod.Algebra
where
import Prelude hiding ( (+), (-), negate, subtract
, (*), (/), recip, (^), (^^)
, gcd
, quotRem, quot, rem
)
import qualified Prelude as P
import Math.Structure.Additive
import Math.Structure.Multiplicative
import Math.Structure.Ring
import HFlint.Internal.Context
import HFlint.NMod.Arithmetic ()
import HFlint.NMod.FFI
instance ReifiesFlintContext NModCtx ctxProxy
=> AdditiveMagma (NMod ctxProxy)
where
(+) = (P.+)
instance ReifiesFlintContext NModCtx ctxProxy
=> Abelian (NMod ctxProxy)
instance ReifiesFlintContext NModCtx ctxProxy
=> AdditiveSemigroup (NMod ctxProxy)
instance ReifiesFlintContext NModCtx ctxProxy
=> AdditiveMonoid (NMod ctxProxy)
where
zero = NMod 0
instance ReifiesFlintContext NModCtx ctxProxy
=> DecidableZero (NMod ctxProxy)
where
isZero = (==0) . unNMod
instance ReifiesFlintContext NModCtx ctxProxy
=> AdditiveGroup (NMod ctxProxy)
where
negate = P.negate
(-) = (P.-)
instance ReifiesFlintContext NModCtx ctxProxy
=> MultiplicativeMagma (NMod ctxProxy)
where
(*) = (P.*)
instance ReifiesFlintContext NModCtx ctxProxy
=> Commutative (NMod ctxProxy)
instance ReifiesFlintContext NModCtx ctxProxy
=> MultiplicativeSemigroup (NMod ctxProxy)
instance ReifiesFlintContext NModCtx ctxProxy
=> MultiplicativeMonoid (NMod ctxProxy)
where
one = NMod 1
instance ReifiesFlintContext NModCtx ctxProxy
=> DecidableOne (NMod ctxProxy)
where
isOne = (==1) . unNMod
deriving instance ReifiesFlintContext NModCtx ctxProxy
=> MultiplicativeMagma (NonZero (NMod ctxProxy))
instance ReifiesFlintContext NModCtx ctxProxy
=> Commutative (NonZero (NMod ctxProxy))
instance ReifiesFlintContext NModCtx ctxProxy
=> MultiplicativeSemigroup (NonZero (NMod ctxProxy))
instance ReifiesFlintContext NModCtx ctxProxy
=> MultiplicativeMonoid (NonZero (NMod ctxProxy))
where
one = NonZero one
instance ReifiesFlintContext NModCtx ctxProxy
=> DecidableOne (NonZero (NMod ctxProxy))
where
isOne (NonZero a) = isOne a
instance ReifiesFlintContext NModCtx ctxProxy
=> MultiplicativeGroup (NonZero (NMod ctxProxy))
where
recip (NonZero a)= NonZero ( P.recip a )
(NonZero a) / (NonZero b) = NonZero (a P./ b)
instance ReifiesFlintContext NModCtx ctxProxy
=> DecidableUnit (NMod ctxProxy)
where
isUnit = not . isZero
toUnit = Unit
instance
ReifiesFlintContext NModCtx ctxProxy
=> DecidableOne (Unit (NMod ctxProxy))
where
isOne = isOne . fromUnit
instance
ReifiesFlintContext NModCtx ctxProxy
=> MultiplicativeGroup (Unit (NMod ctxProxy))
where
recip = Unit . fromNonZero . recip . NonZero . fromUnit
instance ReifiesFlintContext NModCtx ctxProxy
=> Distributive (NMod ctxProxy)
instance ReifiesFlintContext NModCtx ctxProxy
=> Semiring (NMod ctxProxy)
instance ReifiesFlintContext NModCtx ctxProxy
=> Rng (NMod ctxProxy)
instance ReifiesFlintContext NModCtx ctxProxy
=> Rig (NMod ctxProxy)
instance ReifiesFlintContext NModCtx ctxProxy
=> Ring (NMod ctxProxy)
instance ReifiesFlintContext NModCtx ctxProxy
=> IntegralDomain (NMod ctxProxy)
instance ReifiesFlintContext NModCtx ctxProxy
=> DivisionRing (NMod ctxProxy)
instance ReifiesFlintContext NModCtx ctxProxy
=> Field (NMod ctxProxy)
|
martinra/hflint
|
src/HFlint/NMod/Algebra.hs
|
gpl-3.0
| 3,780
| 2
| 9
| 901
| 999
| 514
| 485
| 93
| 0
|
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE FlexibleInstances #-}
module Sara.Z3.CondAst ( CondAst
, W.FailureTrackableAst
, ast
, runCondAst
, trivial
, conditionOn
, conditionOnNot
, combine2
, combine3
, combineN
, liftCond
, addProofObligation
, addAssumption
, W.findFailure ) where
import qualified Sara.Z3.Ast as A
import qualified Sara.Z3.AstWrapper as W
import Sara.Errors (VerifierFailureType)
import Text.Parsec.Pos (SourcePos)
-- | An A.Ast and his proof obligations and assumptions.
data CondAst
= CondAst { obl :: W.ProofObligation
, ass :: W.Assumption
, ast :: A.Ast }
deriving (Eq, Ord, Show)
-- | A trivial expression with no proof obligation.
trivial :: A.Ast -> CondAst
trivial ast = CondAst W.empty W.empty ast
runCondAst :: CondAst -> (W.ProofObligation, W.Assumption, A.Ast)
runCondAst (CondAst obl ass ast) = (obl, ass, ast)
-- | Transforms the proof obligations and assumptions to be conditioned on the given condition.
conditionOn :: A.Ast -> CondAst -> CondAst
conditionOn cond (CondAst obl ass ast) = CondAst (W.conditionOn cond obl) (W.conditionOn cond ass) ast
-- | Transforms the proof obligations and assumption to be conditioned on NOT the given condition.
conditionOnNot :: A.Ast -> CondAst -> CondAst
conditionOnNot cond condAst = conditionOn (A.UnOp A.Not cond) condAst
-- | Combine the ast using the given function and conjunct the proof obligations and assumptions.
combine2 :: (A.Ast -> A.Ast -> A.Ast) -> CondAst -> CondAst -> CondAst
combine2 f (CondAst oblA assA a) (CondAst oblB assB b) =
CondAst (W.conjunct [oblA, oblB]) (W.conjunct [assA, assB]) $ f a b
-- | Combine the ast using the given function and conjunct the proof obligations and assumptions.
combine3 :: (A.Ast -> A.Ast -> A.Ast -> A.Ast) -> CondAst -> CondAst -> CondAst -> CondAst
combine3 f (CondAst oblA assA a) (CondAst oblB assB b) (CondAst oblC assC c) =
CondAst (W.conjunct [oblA, oblB, oblC]) (W.conjunct [assA, assB, assC]) $ f a b c
-- | Combine the ast using the given function and conjunct the proof obligations and assumptions.
combineN :: ([A.Ast] -> A.Ast) -> [CondAst] -> CondAst
combineN f conds = CondAst (W.conjunct (map obl conds)) (W.conjunct (map ass conds)) $ f (map ast conds)
-- | Map the expression, leave the proof obligations and assumptions untouched.
liftCond :: (A.Ast -> A.Ast) -> CondAst -> CondAst
liftCond f (CondAst obl ass ast) = CondAst obl ass $ f ast
-- | Adds the first argument as the proof obligation to the second argument.
addProofObligation :: A.Ast -> VerifierFailureType -> SourcePos -> CondAst -> CondAst
addProofObligation newObl failureType pos (CondAst obl ass ast) = CondAst (W.conjunct [newObl', obl]) ass ast
where newObl' = W.singleton newObl (failureType, pos)
-- | Adds the first argument as the assumption to the second argument.
addAssumption :: A.Ast -> CondAst -> CondAst
addAssumption newAss (CondAst obl ass ast) = CondAst obl (W.conjunct [newAss', ass]) ast
where newAss' = W.singleton newAss ()
|
Lykos/Sara
|
src/lib/Sara/Z3/CondAst.hs
|
gpl-3.0
| 3,364
| 0
| 10
| 821
| 894
| 486
| 408
| 50
| 1
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeSynonymInstances #-}
--------------------------------------------------------------------------------
-- |
-- Module : Tct.Method.DP.PathAnalysis
-- Copyright : (c) Martin Avanzini <martin.avanzini@uibk.ac.at>,
-- Georg Moser <georg.moser@uibk.ac.at>,
-- Andreas Schnabl <andreas.schnabl@uibk.ac.at>,
-- License : LGPL (see COPYING)
--
-- Maintainer : Martin Avanzini <martin.avanzini@uibk.ac.at>
-- Stability : unstable
-- Portability : unportable
--
-- This module provides the path analysis with respect to dependency
-- graphs.
--------------------------------------------------------------------------------
module Tct.Method.DP.PathAnalysis
(
pathAnalysis
-- * Proof Object
, PathProof
-- * Processor
, pathAnalysisProcessor
, PathAnalysis
)
where
import qualified Data.List as List
import qualified Data.Set as Set
import Control.Monad (liftM)
import Control.Applicative ((<|>))
-- import Control.Monad.Trans (liftIO)
import Data.Maybe (fromMaybe)
import Data.Either (partitionEithers)
import qualified Text.PrettyPrint.HughesPJ as PP
import Text.PrettyPrint.HughesPJ hiding (empty)
import qualified Termlib.FunctionSymbol as F
import qualified Termlib.Problem as Prob
import qualified Termlib.Trs as Trs
import qualified Termlib.Variable as V
import Termlib.Utils
import Tct.Certificate
import Tct.Method.DP.DependencyGraph as DG
import qualified Tct.Processor.Transformations as T
import qualified Tct.Processor as P
import Tct.Processor.Args as A
import Tct.Utils.PPrint
import Tct.Utils.Enum
import qualified Tct.Utils.Xml as Xml
import Tct.Processor.Args.Instances ()
import Tct.Method.DP.Utils
----------------------------------------------------------------------
-- Proof objects
data Path = Path { thePath :: [NodeId] } deriving (Eq, Show)
data PathProof = PathProof { computedPaths :: [Path]
, computedCongrDG :: CDG
, computedDG :: DG
, subsumedBy :: [(Path,[Path])]
, variables :: V.Variables
, signature :: F.Signature
, isLinearProof :: Bool}
| Error DPError
data PathAnalysis = PathAnalysis
instance T.Transformer PathAnalysis where
name PathAnalysis = "pathanalysis"
description PathAnalysis = ["This processor implements path-analysis as described in the dependency pair paper."]
type ArgumentsOf PathAnalysis = Arg Bool
type ProofOf PathAnalysis = PathProof
arguments PathAnalysis = opt { A.name = "linear"
, A.description = unlines [ "If this flag is set, linear path analysis is employed."]
, A.defaultValue = False }
transform inst prob
| not $ Prob.isDPProblem prob = return $ T.NoProgress $ Error NonDPProblemGiven
| otherwise = return $ res
where lin = T.transformationArgs inst
res | progressed = T.Progress p (enumeration [(thePath pth, prob') | (pth,prob') <- pathsToProbs ])
| otherwise = T.NoProgress p
edg = estimatedDependencyGraph defaultApproximation prob
cedg = toCongruenceGraph edg
rts = roots cedg
-- lfs = leafs cedg
p = PathProof { computedPaths = paths
, computedCongrDG = cedg
, computedDG = edg
, subsumedBy = subsume
, variables = Prob.variables prob
, signature = Prob.signature prob
, isLinearProof = lin}
(subsume, pathsToProbs) = partitionEithers $ concatMap walk rts
where walk | lin = walkFromL
| otherwise = walkFromQ
paths = [pth | (pth, _) <- subsume] ++ [pth | (pth,_) <- pathsToProbs]
walkFromL n = [ toSubsumed r | r <- walked]
where walked = walkFromL' prob { Prob.strictDPs = Trs.empty, Prob.weakDPs = Trs.empty} ([],Set.empty) n
toSubsumed (path, nds,pprob) =
case [ Path path2 | (path2, nds2, _) <- walked , nds `Set.isProperSubsetOf` nds2 ] of
[] -> Right (Path path, pprob)
pths -> Left (Path path, pths)
walkFromL' pprob (prefix, nds) n = new ++ concatMap (walkFromL' pprob' (prefix', nds')) sucs
where sucs = List.nub $ successors cedg n
prefix' = prefix ++ [n]
nds' = n `Set.insert` nds
pprob' = pprob { Prob.strictDPs = Prob.strictDPs pprob `Trs.union` stricts
, Prob.weakDPs = Prob.weakDPs pprob `Trs.union` weaks }
new | null sucs = [ ( prefix', nds', pprob') ]
| otherwise = []
rs = allRulesFromNodes cedg [n]
stricts = Trs.fromRules [ r | (StrictDP, r) <- rs]
weaks = Trs.fromRules [ r | (WeakDP, r) <- rs]
walkFromQ = walkFromQ' Trs.empty []
walkFromQ' weaks prefix n = new ++ concatMap (walkFromQ' weaks' path) sucs
where path = prefix ++ [n]
sucs = List.nub $ successors cedg n
rs = allRulesFromNodes cedg [n]
strict_n = Trs.fromRules [ r | (StrictDP, r) <- rs]
weak_n = Trs.fromRules [ r | (WeakDP, r) <- rs]
weaks' = strict_n `Trs.union` weak_n `Trs.union` weaks
new | subsumed = [Left ( Path path, [Path $ path ++ [n'] | n' <- sucs ] )]
| otherwise = [Right ( Path path
, prob { Prob.strictDPs = strict_n, Prob.weakDPs = weaks} )]
where subsumed = not (null sucs) && Trs.isEmpty strict_n
progressed =
case paths of
[pth] -> length spath < length sprob
where spath = [ r | (StrictDP, r) <- allRulesFromNodes cedg (thePath pth) ]
sprob = Trs.toRules $ Prob.strictDPs prob
_ -> True
-- progressed | lin = length (rts ++ lfs) > 2
-- | otherwise =
-- case paths of
-- [pth] -> length spath < length sprob
-- where spath = [ r | (StrictDP, r) <- allRulesFromNodes cedg (thePath pth) ]
-- sprob = Trs.toRules $ Prob.strictDPs prob
-- _ -> True
printPathName :: CDG -> F.Signature -> V.Variables -> Path -> Doc
printPathName cwdg sig vars (Path ns) = hcat $ punctuate (text "->") [printNodeId n | n <- ns]
where printNodeId = pprintCWDGNode cwdg sig vars
instance T.TransformationProof PathAnalysis where
answer proof = case T.transformationResult proof of
T.NoProgress _ -> T.answerFromSubProof proof
T.Progress _ subprobs ->
case mproofs of
Just proofs -> if all P.succeeded proofs
then P.CertAnswer $ certified (unknown, mkUb proofs)
else P.MaybeAnswer
Nothing -> P.MaybeAnswer
where mproofs = sequence [ T.findProof e proof | (SN e,_) <- subprobs]
mkUb proofs = maximum $ (Poly $ Just 1) : [upperBound $ P.certificate p | p <- proofs]
tproofToXml _ _ (Error e) = ("pathanalysis", [errorToXml e])
tproofToXml _ _ p =
( "pathanalysis"
, [ DG.toXml (dg, sig, vs)
, Xml.elt "kind" [] [ Xml.text kind ]
, Xml.elt "paths" [] [ pToXml path | path <- computedPaths p ]
])
where
sig = signature p
vs = variables p
dg = computedDG p
cwdg = computedCongrDG p
kind | isLinearProof p = "linear"
| otherwise = "quadratic"
pToXml path = Xml.elt "path" [] [ Xml.elt "congruence" [] [ Xml.elt "elt" [] [Xml.text $ show m] | m <- congruence cwdg n]
| n <- thePath path]
pprintProof proof mde =
case T.transformationProof proof of
Error e -> pprint e
tproof -> paragraph ("We employ '" ++ nm ++ "' using the following approximated dependency graph:")
$+$ pprintCWDG cwdg sig vars ppLabel
$+$ text ""
$+$ ppDetails
where cwdg = computedCongrDG tproof
sig = signature tproof
vars = variables tproof
nm | isLinearProof tproof = "linear path analysis"
| otherwise = "path analysis"
ppLabel pth _ = PP.brackets $ centering 20 $ ppMaybeAnswerOf (Path pth)
where centering n d = text $ take pre ss ++ s ++ take post ss
where s = show d
l = length s
ss = repeat ' '
pre = floor $ (fromIntegral (n - l) / 2.0 :: Double)
post = n - l - pre
ppMaybeAnswerOf pth = fromMaybe (text "?") (ppSpAns <|> ppSubsumed)
where ppSpAns = pprint `liftM` P.answer `liftM` findSubProof pth
ppSubsumed = const (text "subsumed") `liftM` List.lookup pth (subsumedBy tproof)
findSubProof pth = T.findProof (thePath pth) proof
ppPathName path = printPathName cwdg sig vars path
ppDetails = vcat $ List.intersperse (text "")
[ (text "*" <+> (underline (text "Path" <+> ppPathName path <> text ":" <+> ppMaybeAnswerOf path)
$+$ text ""
$+$ ppDetail path))
| path <- List.sortBy comparePath $ computedPaths tproof]
where comparePath p1 p2 = mkpath p1 `compare` mkpath p2
mkpath p = [congruence cwdg n | n <- thePath p]
ppDetail path = fromMaybe errMsg (ppsubsumed <|> ppsubproof)
where errMsg = text "CANNOT find proof of path" <+> ppPathName path <> text "."
<+> text "Propably computation has been aborted since some other path cannot be solved."
ppsubsumed = do paths <- List.lookup path (subsumedBy tproof)
return $ (text "This path is subsumed by the proof of paths"
<+> sep (punctuate (text ",") [ppPathName pth | pth <- paths])
<> text ".")
ppsubproof = do subproof <- findSubProof path
return $ P.pprintProof subproof mde
pprintTProof _ _ (Error e) _ = pprint e
pprintTProof _ _ tproof _ = block' "Transformation Details" [ppTrans]
where ppTrans = paragraph "Following congruence DG was used:"
$+$ text ""
$+$ pprintCWDG cwdg (signature tproof) (variables tproof) (\ _ _ -> text "")
cwdg = computedCongrDG tproof
pathAnalysisProcessor :: T.Transformation PathAnalysis P.AnyProcessor
pathAnalysisProcessor = T.Transformation PathAnalysis
-- | Implements path analysis. If the given argument is 'True', then
-- linear path analysis is employed.
pathAnalysis :: Bool -> T.TheTransformer PathAnalysis
pathAnalysis lin = T.Transformation PathAnalysis `T.withArgs` lin
|
mzini/TcT
|
source/Tct/Method/DP/PathAnalysis.hs
|
gpl-3.0
| 12,910
| 0
| 26
| 5,286
| 2,911
| 1,558
| 1,353
| 187
| 1
|
{-# LANGUAGE DeriveGeneric, TemplateHaskell #-}
{-|
Module : Database.DataType
Description : Contains a single enumeration type, ShapeType.
This is a small module that contains a single enumeration type.
Note that we can't include this in "Database.Tables" because of
a restriction on Template Haskell.
-}
module Database.DataType where
import Data.Aeson
import Database.Persist.TH
import GHC.Generics
-- | Defines a datatype of a shape, used in Shape json table.
data ShapeType = BoolNode | Node | Hybrid | Region
deriving (Show, Read, Eq, Generic)
-- | Results from [derivePersistField](https://hackage.haskell.org/package/persistent-template-2.5.1.6/docs/Database-Persist-TH.html#v:derivePersistField)
-- call, as does PersistField, most importantly, it allows the data type to be
-- a column in the database.
derivePersistField "ShapeType"
-- | Results from call of [ToJSON](https://hackage.haskell.org/package/aeson-1.1.0.0/docs/Data-Aeson.html#t:ToJSON)
-- .
instance ToJSON ShapeType
-- | Results from call of [FromJSON](https://hackage.haskell.org/package/aeson-1.1.0.0/docs/Data-Aeson.html#t:FromJSON)
-- .
instance FromJSON ShapeType
data PostType = Specialist | Major | Minor | Other
deriving (Show, Read, Eq, Generic)
derivePersistField "PostType"
instance ToJSON PostType
instance FromJSON PostType
|
Courseography/courseography
|
app/Database/DataType.hs
|
gpl-3.0
| 1,336
| 0
| 6
| 179
| 142
| 80
| 62
| 15
| 0
|
<?xml version='1.0' encoding='ISO-8859-1'?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN"
"http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0">
<title>S2P Help</title>
<maps>
<homeID>top</homeID>
<mapref location="map.xml"/>
</maps>
<view mergetype="javax.help.AppendMerge">
<name>TOC</name>
<label>Table of Contents</label>
<type>javax.help.TOCView</type>
<data>toc.xml</data>
</view>
<view xml:lang="en">
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<presentation default="true">
<name>main window</name>
<size width="1200" height="800" />
<location x="100" y="100"/>
<title>S2P Help</title>
<toolbar>
<helpaction>javax.help.BackAction</helpaction>
<helpaction>javax.help.ForwardAction</helpaction>
<helpaction image="homeicon">javax.help.HomeAction</helpaction>
</toolbar>
</presentation>
</helpset>
|
sing-group/S2P
|
s2p-aibench/src/main/resources/help/helpset.hs
|
gpl-3.0
| 1,134
| 89
| 66
| 199
| 471
| 233
| 238
| -1
| -1
|
{-# LANGUAGE DeriveDataTypeable #-}
module Sound.Tidal.Pattern where
import Control.Applicative
import Data.Monoid
import Data.Fixed
import Data.List
import Data.Maybe
import Data.Ratio
import Debug.Trace
import Data.Typeable
import Data.Function
import System.Random.Mersenne.Pure64
import Music.Theory.Bjorklund
import Sound.Tidal.Time
import Sound.Tidal.Utils
-- | The pattern datatype, a function from a time @Arc@ to @Event@
-- values. For discrete patterns, this returns the events which are
-- active during that time. For continuous patterns, events with
-- values for the midpoint of the given @Arc@ is returned.
data Pattern a = Pattern {arc :: Arc -> [Event a]}
-- | @show (p :: Pattern)@ returns a text string representing the
-- event values active during the first cycle of the given pattern.
instance (Show a) => Show (Pattern a) where
show p@(Pattern _) = intercalate " " $ map showEvent $ arc p (0, 1)
showTime t | denominator t == 1 = show (numerator t)
| otherwise = show (numerator t) ++ ('/':show (denominator t))
showArc a = concat[showTime $ fst a, (' ':showTime (snd a))]
showEvent (a, b, v) | a == b = concat["(",show v,
(' ':showArc a),
")"
]
| otherwise = show v
instance Functor Pattern where
fmap f (Pattern a) = Pattern $ fmap (fmap (mapThd' f)) a
-- | @pure a@ returns a pattern with an event with value @a@, which
-- has a duration of one cycle, and repeats every cycle.
instance Applicative Pattern where
pure x = Pattern $ \(s, e) -> map
(\t -> ((t%1, (t+1)%1),
(t%1, (t+1)%1),
x
)
)
[floor s .. ((ceiling e) - 1)]
(Pattern fs) <*> (Pattern xs) =
Pattern $ \a -> concatMap applyX (fs a)
where applyX ((s,e), (s', e'), f) =
map (\(_, _, x) -> ((s,e), (s', e'), f x))
(filter
(\(_, a', _) -> isIn a' s)
(xs (s',e'))
)
-- | @mempty@ is a synonym for @silence@.
-- | @mappend@ is a synonym for @overlay@.
instance Monoid (Pattern a) where
mempty = silence
mappend = overlay
instance Monad Pattern where
return = pure
-- Pattern a -> (a -> Pattern b) -> Pattern b
-- Pattern Char -> (Char -> Pattern String) -> Pattern String
p >>= f = -- unwrap (f <$> p)
Pattern (\a -> concatMap
(\((s,e), (s',e'), x) -> map (\ev -> ((s,e), (s',e'), thd' ev)) $
filter
(\(a', _, _) -> isIn a' s)
(arc (f x) (s,e))
)
(arc p a)
)
-- join x = x >>= id
-- Take a pattern, and function from elements in the pattern to another pattern,
-- and then return that pattern
--bind :: Pattern a -> (a -> Pattern b) -> Pattern b
--bind p f =
-- this is actually join
unwrap :: Pattern (Pattern a) -> Pattern a
unwrap p = Pattern $ \a -> concatMap ((\p' -> arc p' a) . thd') (arc p a)
-- | @atom@ is a synonym for @pure@.
atom :: a -> Pattern a
atom = pure
-- | @silence@ returns a pattern with no events.
silence :: Pattern a
silence = Pattern $ const []
-- | @withQueryArc f p@ returns a new @Pattern@ with function @f@
-- applied to the @Arc@ values passed to the original @Pattern@ @p@.
withQueryArc :: (Arc -> Arc) -> Pattern a -> Pattern a
withQueryArc f p = Pattern $ \a -> arc p (f a)
-- | @withQueryTime f p@ returns a new @Pattern@ with function @f@
-- applied to the both the start and end @Time@ of the @Arc@ passed to
-- @Pattern@ @p@.
withQueryTime :: (Time -> Time) -> Pattern a -> Pattern a
withQueryTime = withQueryArc . mapArc
-- | @withResultArc f p@ returns a new @Pattern@ with function @f@
-- applied to the @Arc@ values in the events returned from the
-- original @Pattern@ @p@.
withResultArc :: (Arc -> Arc) -> Pattern a -> Pattern a
withResultArc f p = Pattern $ \a -> mapArcs f $ arc p a
-- | @withResultTime f p@ returns a new @Pattern@ with function @f@
-- applied to the both the start and end @Time@ of the @Arc@ values in
-- the events returned from the original @Pattern@ @p@.
withResultTime :: (Time -> Time) -> Pattern a -> Pattern a
withResultTime = withResultArc . mapArc
-- | @overlay@ combines two @Pattern@s into a new pattern, so that
-- their events are combined over time. This is the same as the infix
-- operator `<>`.
overlay :: Pattern a -> Pattern a -> Pattern a
overlay p p' = Pattern $ \a -> (arc p a) ++ (arc p' a)
-- | @stack@ combines a list of @Pattern@s into a new pattern, so that
-- their events are combined over time.
stack :: [Pattern a] -> Pattern a
stack ps = foldr overlay silence ps
-- | @append@ combines two patterns @Pattern@s into a new pattern, so
-- that the events of the second pattern are appended to those of the
-- first pattern, within a single cycle
append :: Pattern a -> Pattern a -> Pattern a
append a b = cat [a,b]
-- | @append'@ does the same as @append@, but over two cycles, so that
-- the cycles alternate between the two patterns.
append' :: Pattern a -> Pattern a -> Pattern a
append' a b = slow 2 $ cat [a,b]
-- | @cat@ returns a new pattern which interlaces the cycles of the
-- given patterns, within a single cycle. It's the equivalent of
-- @append@, but with a list of patterns.
cat :: [Pattern a] -> Pattern a
cat ps = density (fromIntegral $ length ps) $ slowcat ps
splitAtSam :: Pattern a -> Pattern a
splitAtSam p =
splitQueries $ Pattern $ \(s,e) -> mapSnds' (trimArc (sam s)) $ arc p (s,e)
where trimArc s' (s,e) = (max (s') s, min (s'+1) e)
-- | @slowcat@ does the same as @cat@, but maintaining the duration of
-- the original patterns. It is the equivalent of @append'@, but with
-- a list of patterns.
slowcat :: [Pattern a] -> Pattern a
slowcat [] = silence
slowcat ps = splitQueries $ Pattern f
where ps' = map splitAtSam ps
l = length ps'
f (s,e) = arc (withResultTime (+offset) p) (s',e')
where p = ps' !! n
r = (floor s) :: Int
n = (r `mod` l) :: Int
offset = (fromIntegral $ r - ((r - n) `div` l)) :: Time
(s', e') = (s-offset, e-offset)
-- | @listToPat@ turns the given list of values to a Pattern, which
-- cycles through the list.
listToPat :: [a] -> Pattern a
listToPat = cat . map atom
-- | @maybeListToPat@ is similar to @listToPat@, but allows values to
-- be optional using the @Maybe@ type, so that @Nothing@ results in
-- gaps in the pattern.
maybeListToPat :: [Maybe a] -> Pattern a
maybeListToPat = cat . map f
where f Nothing = silence
f (Just x) = atom x
-- | @run@ @n@ returns a pattern representing a cycle of numbers from @0@ to @n-1@.
run n = listToPat [0 .. n-1]
scan n = cat $ map run [1 .. n]
-- | @density@ returns the given pattern with density increased by the
-- given @Time@ factor. Therefore @density 2 p@ will return a pattern
-- that is twice as fast, and @density (1%3) p@ will return one three
-- times as slow.
density :: Time -> Pattern a -> Pattern a
density 0 p = silence
density 1 p = p
density r p = withResultTime (/ r) $ withQueryTime (* r) p
-- | @densityGap@ is similar to @density@ but maintains its cyclic
-- alignment. For example, @densityGap 2 p@ would squash the events in
-- pattern @p@ into the first half of each cycle (and the second
-- halves would be empty).
densityGap :: Time -> Pattern a -> Pattern a
densityGap 0 p = silence
densityGap r p = splitQueries $ withResultArc (\(s,e) -> (sam s + ((s - sam s)/r), (sam s + ((e - sam s)/r)))) $ Pattern (\a -> arc p $ mapArc (\t -> sam t + (min 1 (r * cyclePos t))) a)
-- | @slow@ does the opposite of @density@, i.e. @slow 2 p@ will
-- return a pattern that is half the speed.
slow :: Time -> Pattern a -> Pattern a
slow 0 = id
slow t = density (1/t)
-- | The @<~@ operator shifts (or rotates) a pattern to the left (or
-- counter-clockwise) by the given @Time@ value. For example
-- @(1%16) <~ p@ will return a pattern with all the events moved
-- one 16th of a cycle to the left.
(<~) :: Time -> Pattern a -> Pattern a
(<~) t p = withResultTime (subtract t) $ withQueryTime (+ t) p
-- | The @~>@ operator does the same as @~>@ but shifts events to the
-- right (or clockwise) rather than to the left.
(~>) :: Time -> Pattern a -> Pattern a
(~>) = (<~) . (0-)
brak :: Pattern a -> Pattern a
brak = when ((== 1) . (`mod` 2)) (((1%4) ~>) . (\x -> cat [x, silence]))
iter :: Int -> Pattern a -> Pattern a
iter n p = slowcat $ map (\i -> ((fromIntegral i)%(fromIntegral n)) <~ p) [0 .. n]
-- | @rev p@ returns @p@ with the event positions in each cycle
-- reversed (or mirrored).
rev :: Pattern a -> Pattern a
rev p = splitQueries $ Pattern $ \a -> mapArcs mirrorArc (arc p (mirrorArc a))
-- | @palindrome p@ applies @rev@ to @p@ every other cycle, so that
-- the pattern alternates between forwards and backwards.
palindrome p = append' p (rev p)
-- | @when test f p@ applies the function @f@ to @p@, but in a way
-- which only affects cycles where the @test@ function applied to the
-- cycle number returns @True@.
when :: (Int -> Bool) -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a
when test f p = splitQueries $ Pattern apply
where apply a | test (floor $ fst a) = (arc $ f p) a
| otherwise = (arc p) a
whenT :: (Time -> Bool) -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a
whenT test f p = splitQueries $ Pattern apply
where apply a | test (fst a) = (arc $ f p) a
| otherwise = (arc p) a
playWhen :: (Time -> Bool) -> Pattern a -> Pattern a
playWhen test (Pattern f) = Pattern $ (filter (\e -> test (eventOnset e))) . f
playFor :: Time -> Time -> Pattern a -> Pattern a
playFor s e = playWhen (\t -> and [t >= s, t < e])
seqP :: [(Time, Time, Pattern a)] -> Pattern a
seqP = stack . (map (\(s, e, p) -> playFor s e ((sam s) ~> p)))
-- | @every n f p@ applies the function @f@ to @p@, but only affects
-- every @n@ cycles.
every :: Int -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a
every 0 f p = p
every n f p = when ((== 0) . (`mod` n)) f p
-- | @foldEvery ns f p@ applies the function @f@ to @p@, and is applied for
-- each cycle in @ns@.
foldEvery :: [Int] -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a
foldEvery ns f p = foldr ($) p (map (\x -> every x f) ns)
-- | @sig f@ takes a function from time to values, and turns it into a
-- @Pattern@.
sig :: (Time -> a) -> Pattern a
sig f = Pattern f'
where f' (s,e) | s > e = []
| otherwise = [((s,e), (s,e), f s)]
-- | @sinewave@ returns a @Pattern@ of continuous @Double@ values following a
-- sinewave with frequency of one cycle, and amplitude from -1 to 1.
sinewave :: Pattern Double
sinewave = sig $ \t -> sin $ pi * 2 * (fromRational t)
-- | @sine@ is a synonym for @sinewave.
sine = sinewave
-- | @sinerat@ is equivalent to @sinewave@ for @Rational@ values,
-- suitable for use as @Time@ offsets.
sinerat = fmap toRational sine
ratsine = sinerat
-- | @sinewave1@ is equivalent to @sinewave@, but with amplitude from 0 to 1.
sinewave1 :: Pattern Double
sinewave1 = fmap ((/ 2) . (+ 1)) sinewave
-- | @sine1@ is a synonym for @sinewave1@.
sine1 = sinewave1
-- | @sinerat1@ is equivalent to @sinerat@, but with amplitude from 0 to 1.
sinerat1 = fmap toRational sine1
-- | @sineAmp1 d@ returns @sinewave1@ with its amplitude offset by @d@.
sineAmp1 :: Double -> Pattern Double
sineAmp1 offset = (+ offset) <$> sinewave1
-- | @sawwave@ is the equivalent of @sinewave@ for sawtooth waves.
sawwave :: Pattern Double
sawwave = ((subtract 1) . (* 2)) <$> sawwave1
-- | @saw@ is a synonym for @sawwave@.
saw = sawwave
-- | @sawrat@ is the same as @sawwave@ but returns @Rational@ values
-- suitable for use as @Time@ offsets.
sawrat = fmap toRational saw
sawwave1 :: Pattern Double
sawwave1 = sig $ \t -> mod' (fromRational t) 1
saw1 = sawwave1
sawrat1 = fmap toRational saw1
-- | @triwave@ is the equivalent of @sinewave@ for triangular waves.
triwave :: Pattern Double
triwave = ((subtract 1) . (* 2)) <$> triwave1
-- | @tri@ is a synonym for @triwave@.
tri = triwave
-- | @trirat@ is the same as @triwave@ but returns @Rational@ values
-- suitable for use as @Time@ offsets.
trirat = fmap toRational tri
triwave1 :: Pattern Double
triwave1 = append sawwave1 (rev sawwave1)
tri1 = triwave1
trirat1 = fmap toRational tri1
-- todo - triangular waves again
squarewave1 :: Pattern Double
squarewave1 = sig $
\t -> fromIntegral $ floor $ (mod' (fromRational t) 1) * 2
square1 = squarewave1
squarewave :: Pattern Double
squarewave = ((subtract 1) . (* 2)) <$> squarewave1
square = squarewave
-- | @envL@ is a @Pattern@ of continuous @Double@ values, representing
-- a linear interpolation between 0 and 1 during the first cycle, then
-- staying constant at 1 for all following cycles. Possibly only
-- useful if you're using something like the retrig function defined
-- in tidal.el.
envL :: Pattern Double
envL = sig $ \t -> max 0 $ min (fromRational t) 1
-- like envL but reversed.
envLR :: Pattern Double
envLR = (1-) <$> envL
-- 'Equal power' for gain-based transitions
envEq :: Pattern Double
envEq = sig $ \t -> sqrt (sin (pi/2 * (max 0 $ min (fromRational (1-t)) 1)))
-- Equal power reversed
envEqR = sig $ \t -> sqrt (cos (pi/2 * (max 0 $ min (fromRational (1-t)) 1)))
fadeOut :: Time -> Pattern a -> Pattern a
fadeOut n = spread' (degradeBy) (slow n $ envL)
-- Alternate versions where you can provide the time from which the fade starts
fadeOut' :: Time -> Time -> Pattern a -> Pattern a
fadeOut' from dur p = spread' (degradeBy) (from ~> slow dur envL) p
-- The 1 <~ is so fade ins and outs have different degredations
fadeIn' :: Time -> Time -> Pattern a -> Pattern a
fadeIn' from dur p = spread' (\n p -> 1 <~ degradeBy n p) (from ~> slow dur ((1-) <$> envL)) p
fadeIn :: Time -> Pattern a -> Pattern a
fadeIn n = spread' (degradeBy) (slow n $ (1-) <$> envL)
spread :: (a -> t -> Pattern b) -> [a] -> t -> Pattern b
spread f xs p = cat $ map (\x -> f x p) xs
slowspread :: (a -> t -> Pattern b) -> [a] -> t -> Pattern b
slowspread f xs p = slowcat $ map (\x -> f x p) xs
spread' :: (a -> Pattern b -> Pattern c) -> Pattern a -> Pattern b -> Pattern c
spread' f timepat pat =
Pattern $ \r -> concatMap (\(_,r', x) -> (arc (f x pat) r')) (rs r)
where rs r = arc (filterOnsetsInRange timepat) r
filterValues :: (a -> Bool) -> Pattern a -> Pattern a
filterValues f (Pattern x) = Pattern $ (filter (f . thd')) . x
-- Filter out events that have had their onsets cut off
filterOnsets :: Pattern a -> Pattern a
filterOnsets (Pattern f) =
Pattern $ (filter (\e -> eventOnset e >= eventStart e)) . f
-- Filter events which have onsets, which are within the given range
filterStartInRange :: Pattern a -> Pattern a
filterStartInRange (Pattern f) =
Pattern $ \(s,e) -> filter ((>= s) . eventOnset) $ f (s,e)
filterOnsetsInRange = filterOnsets . filterStartInRange
seqToRelOnsets :: Arc -> Pattern a -> [(Double, a)]
seqToRelOnsets (s, e) p = map (\((s', _), _, x) -> (fromRational $ (s'-s) / (e-s), x)) $ arc (filterOnsetsInRange p) (s, e)
segment :: Pattern a -> Pattern [a]
segment p = Pattern $ \(s,e) -> filter (\(_,(s',e'),_) -> s' < e && e' > s) $ groupByTime (segment' (arc p (s,e)))
segment' :: [Event a] -> [Event a]
segment' es = foldr split es pts
where pts = nub $ points es
split :: Time -> [Event a] -> [Event a]
split _ [] = []
split t ((ev@(a,(s,e), v)):es) | t > s && t < e = (a,(s,t),v):(a,(t,e),v):(split t es)
| otherwise = ev:split t es
points :: [Event a] -> [Time]
points [] = []
points ((_,(s,e), _):es) = s:e:(points es)
groupByTime :: [Event a] -> [Event [a]]
groupByTime es = map mrg $ groupBy ((==) `on` snd') $ sortBy (compare `on` snd') es
where mrg es@((a, a', _):_) = (a, a', map thd' es)
ifp :: (Int -> Bool) -> (Pattern a -> Pattern a) -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a
ifp test f1 f2 p = splitQueries $ Pattern apply
where apply a | test (floor $ fst a) = (arc $ f1 p) a
| otherwise = (arc $ f2 p) a
rand :: Pattern Double
rand = Pattern $ \a -> [(a, a, timeToRand $ (midPoint a))]
timeToRand t = fst $ randomDouble $ pureMT $ floor $ (*1000000) t
irand :: Double -> Pattern Int
irand i = (floor . (*i)) <$> rand
degradeBy :: Double -> Pattern a -> Pattern a
degradeBy x p = unMaybe $ (\a f -> toMaybe (f > x) a) <$> p <*> rand
where toMaybe False _ = Nothing
toMaybe True a = Just a
unMaybe = (fromJust <$>) . filterValues isJust
unDegradeBy :: Double -> Pattern a -> Pattern a
unDegradeBy x p = unMaybe $ (\a f -> toMaybe (f <= x) a) <$> p <*> rand
where toMaybe False _ = Nothing
toMaybe True a = Just a
unMaybe = (fromJust <$>) . filterValues isJust
sometimesBy :: Double -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a
sometimesBy x f p = overlay (degradeBy x p) (f $ unDegradeBy x p)
sometimes = sometimesBy 0.5
often = sometimesBy 0.75
rarely = sometimesBy 0.25
almostNever = sometimesBy 0.1
almostAlways = sometimesBy 0.9
degrade :: Pattern a -> Pattern a
degrade = degradeBy 0.5
-- | @wedge t p p'@ combines patterns @p@ and @p'@ by squashing the
-- @p@ into the portion of each cycle given by @t@, and @p'@ into the
-- remainer of each cycle.
wedge :: Time -> Pattern a -> Pattern a -> Pattern a
wedge t p p' = overlay (densityGap (1/t) p) (t ~> densityGap (1/(1-t)) p')
whenmod :: Int -> Int -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a
whenmod a b = Sound.Tidal.Pattern.when ((\t -> (t `mod` a) >= b ))
superimpose f p = stack [p, f p]
-- | @splitQueries p@ wraps `p` to ensure that it does not get
-- queries that span arcs. For example `arc p (0.5, 1.5)` would be
-- turned into two queries, `(0.5,1)` and `(1,1.5)`, and the results
-- combined. Being able to assume queries don't span cycles often
-- makes transformations easier to specify.
splitQueries :: Pattern a -> Pattern a
splitQueries p = Pattern $ \a -> concatMap (arc p) $ arcCycles a
trunc :: Time -> Pattern a -> Pattern a
trunc t p = slow t $ splitQueries $ p'
where p' = Pattern $ \a -> mapArcs (stretch . trunc') $ arc p (trunc' a)
trunc' (s,e) = (min s ((sam s) + t), min e ((sam s) + t))
stretch (s,e) = (sam s + ((s - sam s) / t), sam s + ((e - sam s) / t))
zoom :: Arc -> Pattern a -> Pattern a
zoom a@(s,e) p = splitQueries $ withResultArc (mapCycle ((/d) . (subtract s))) $ withQueryArc (mapCycle ((+s) . (*d))) p
where d = e-s
compress :: Arc -> Pattern a -> Pattern a
compress a@(s,e) p | s >= e = silence
| otherwise = s ~> densityGap (1/(e-s)) p
sliceArc :: Arc -> Pattern a -> Pattern a
sliceArc a@(s,e) p | s >= e = silence
| otherwise = compress a $ zoom a p
-- @within@ uses @compress@ and @zoom to apply @f@ to only part of pattern @p@
-- for example, @within (1%2) (3%4) ((1%8) <~) "bd sn bd cp"@ would shift only
-- the second @bd@
within :: Arc -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a
within (s,e) f p = stack [sliceArc (0,s) p,
compress (s,e) $ f $ zoom (s,e) p,
sliceArc (e,1) p
]
revArc a = within a rev
e :: Int -> Int -> Pattern a -> Pattern a
e n k p = (flip const) <$> (filterValues (== True) $ listToPat $ bjorklund (n,k)) <*> p
e' :: Int -> Int -> Pattern a -> Pattern a
e' n k p = cat $ map (\x -> if x then p else silence) (bjorklund (n,k))
index :: Real b => b -> Pattern b -> Pattern c -> Pattern c
index sz indexpat pat = spread' (zoom' $ toRational sz) (toRational . (*(1-sz)) <$> indexpat) pat
where zoom' sz start = zoom (start, start+sz)
-- | @prr rot (blen, vlen) beatPattern valuePattern@: pattern rotate/replace.
prrw :: (a -> b -> c) -> Int -> (Time, Time) -> Pattern a -> Pattern b -> Pattern c
prrw f rot (blen, vlen) beatPattern valuePattern =
let
ecompare (_,e1,_) (_,e2,_) = compare (fst e1) (fst e2)
beats = sortBy ecompare $ arc beatPattern (0, blen)
values = fmap thd' . sortBy ecompare $ arc valuePattern (0, vlen)
cycles = blen * (fromIntegral $ lcm (length beats) (length values) `div` (length beats))
in
slow cycles $ stack $ zipWith
(\( _, (start, end), v') v -> (start ~>) $ densityGap (1 / (end - start)) $ pure (f v' v))
(sortBy ecompare $ arc (density cycles $ beatPattern) (0, blen))
(drop (rot `mod` length values) $ cycle values)
-- | @prr rot (blen, vlen) beatPattern valuePattern@: pattern rotate/replace.
prr :: Int -> (Time, Time) -> Pattern a -> Pattern a -> Pattern a
prr = prrw $ flip const
{-|
@preplace (blen, plen) beats values@ combines the timing of @beats@ with the values
of @values@. Other ways of saying this are:
* sequential convolution
* @values@ quantized to @beats@.
Examples:
@
d1 $ sound $ preplace (1,1) "x [~ x] x x" "bd sn"
d1 $ sound $ preplace (1,1) "x(3,8)" "bd sn"
d1 $ sound $ "x(3,8)" <~> "bd sn"
d1 $ sound "[jvbass jvbass:5]*3" |+| (shape $ "1 1 1 1 1" <~> "0.2 0.9")
@
It is assumed the pattern fits into a single cycle. This works well with
pattern literals, but not always with patterns defined elsewhere. In those cases
use @prr@ and provide desired pattern lengths:
@
let p = slow 2 $ "x x x"
d1 $ sound $ prr 0 (2,1) p "bd sn"
@
-}
preplace :: (Time, Time) -> Pattern a -> Pattern a -> Pattern a
preplace = preplaceWith $ flip const
prep = preplace
preplace1 :: Pattern a -> Pattern a -> Pattern a
preplace1 = prr 0 (1, 1)
preplaceWith :: (a -> b -> c) -> (Time, Time) -> Pattern a -> Pattern b -> Pattern c
preplaceWith f (blen, plen) = prrw f 0 (blen, plen)
prw = preplaceWith
preplaceWith1 :: (a -> b -> c) -> Pattern a -> Pattern b -> Pattern c
preplaceWith1 f = prrw f 0 (1, 1)
prw1 = preplaceWith1
(<~>) :: Pattern a -> Pattern a -> Pattern a
(<~>) = preplace (1, 1)
-- | @protate len rot p@ rotates pattern @p@ by @rot@ beats to the left.
-- @len@: length of the pattern, in cycles.
-- Example: @d1 $ every 4 (protate 2 (-1)) $ slow 2 $ sound "bd hh hh hh"@
protate :: Time -> Int -> Pattern a -> Pattern a
protate len rot p = prr rot (len, len) p p
prot = protate
prot1 = protate 1
{-| The @<<~@ operator rotates a unit pattern to the left, similar to @<~@,
but by events rather than linear time. The timing of the pattern remains constant:
@
d1 $ (1 <<~) $ sound "bd ~ sn hh"
-- will become
d1 $ sound "sn ~ hh bd"
@ -}
(<<~) :: Int -> Pattern a -> Pattern a
(<<~) = protate 1
(~>>) :: Int -> Pattern a -> Pattern a
(~>>) = (<<~) . (0-)
-- | @pequal cycles p1 p2@: quickly test if @p1@ and @p2@ are the same.
pequal :: Ord a => Time -> Pattern a -> Pattern a -> Bool
pequal cycles p1 p2 = (sort $ arc p1 (0, cycles)) == (sort $ arc p2 (0, cycles))
-- | @discretise n p@: 'samples' the pattern @p@ at a rate of @n@
-- events per cycle. Useful for turning a continuous pattern into a
-- discrete one.
discretise :: Time -> Pattern a -> Pattern a
discretise n p = density n $ (atom (id)) <*> p
-- | @randcat ps@: does a @slowcat@ on the list of patterns @ps@ but
-- randomises the order in which they are played.
randcat :: [Pattern a] -> Pattern a
randcat ps = spread' (<~) (discretise 1 $ ((%1) . fromIntegral) <$> irand (fromIntegral $ length ps)) (slowcat ps)
toMIDI :: Pattern String -> Pattern Int
toMIDI p = fromJust <$> (filterValues (isJust) (noteLookup <$> p))
where
noteLookup [] = Nothing
noteLookup s | last s `elem` ['0' .. '9'] = elemIndex s names
| otherwise = noteLookup (s ++ "5")
names = take 128 [(n ++ show o)
| o <- octaves,
n <- notes
]
notes = ["c","cs","d","ds","e","f","fs","g","gs","a","as","b"]
octaves = [0 .. 10]
tom = toMIDI
fit :: Int -> [a] -> Pattern Int -> Pattern a
fit perCycle xs p = (xs !!!) <$> (Pattern $ \a -> map ((\e -> (mapThd' (+ (cyclePos perCycle e)) e))) (arc p a))
where cyclePos perCycle e = perCycle * (floor $ eventStart e)
|
kindohm/Tidal
|
Sound/Tidal/Pattern.hs
|
gpl-3.0
| 24,226
| 0
| 18
| 5,917
| 8,261
| 4,378
| 3,883
| 341
| 2
|
-- Work in progress
{-# LANGUAGE GeneralizedNewtypeDeriving, TypeFamilies #-}
module Tests.Names where
import Control.Monad.Trans.FastWriter (Writer, runWriter)
import Control.Monad.Unit (Unit(..))
import Control.Monad.Writer (MonadWriter(..))
import Lamdu.Data.Anchors (anonTag)
import qualified Lamdu.I18N.Name as Texts
import Lamdu.Name (Name)
import qualified Lamdu.Name as Name
import Lamdu.Sugar.Names.Add (InternalName(..), addToWorkAreaTest)
import Lamdu.Sugar.Names.CPS (liftCPS)
import qualified Lamdu.Sugar.Names.Walk as Walk
import qualified Lamdu.Sugar.Types as Sugar
import qualified Test.Lamdu.Env as Env
import Test.Lamdu.SugarStubs ((~>))
import qualified Test.Lamdu.SugarStubs as Stub
import Test.Lamdu.Prelude
newtype CollectNames name a = CollectNames { runCollectNames :: Writer [name] a }
deriving newtype (Functor, Applicative, Monad, MonadWriter [name])
instance Walk.MonadNaming (CollectNames name) where
type OldName (CollectNames name) = name
type NewName (CollectNames name) = name
type IM (CollectNames name) = Identity
opGetName _ _ _ x = x <$ tell [x]
opWithName _ _ x = x <$ liftCPS (tell [x])
opRun = pure (pure . fst . runWriter . runCollectNames)
opWithNewTag _ _ = id
test :: Test
test =
testGroup "Disambiguation"
[ testCase "disambiguation(#396)" workArea396
, testCase "globals collide" workAreaGlobals
, testCase "anonymous globals" anonGlobals
]
nameTexts :: Texts.Name Text
nameTexts =
Texts.Name
{ Texts._unnamed = "Unnamed"
, Texts._emptyName = "empty"
}
assertNoCollisions :: Name -> IO ()
assertNoCollisions name =
case Name.visible name nameTexts of
(Name.TagText _ Name.NoCollision, Name.NoCollision) -> pure ()
(Name.TagText text textCollision, tagCollision) ->
unwords
[ "Unexpected collision for name", show text
, show textCollision, show tagCollision
] & assertString
testWorkArea ::
(Name -> IO b) ->
Sugar.WorkArea
(Sugar.Annotation (Sugar.EvaluationScopes InternalName Identity) InternalName)
InternalName Identity Unit
(Sugar.Payload (Sugar.Annotation (Sugar.EvaluationScopes InternalName Identity) InternalName) Unit) ->
IO ()
testWorkArea verifyName inputWorkArea =
do
lang <- Env.makeLang
addToWorkAreaTest lang Stub.getName inputWorkArea
& runIdentity
& getNames
& traverse_ verifyName
getNames ::
Sugar.WorkArea (Sugar.Annotation (Sugar.EvaluationScopes name Identity) name) name Identity o
(Sugar.Payload (Sugar.Annotation (Sugar.EvaluationScopes name Identity) name) o) ->
[name]
getNames workArea =
Walk.toWorkAreaTest workArea
& runCollectNames
& runWriter
& snd
--- test inputs:
workArea396 :: IO ()
workArea396 =
Sugar.WorkArea
{ Sugar._waRepl = Stub.repl lamExpr
, Sugar._waPanes =
[ Stub.funcExpr "paneVar" "num" Stub.hole & Sugar.BodyFunction & Stub.node
& Stub.def lamType "def" "def"
& Stub.pane
]
, Sugar._waGlobals = Sugar.Globals (pure []) (pure []) (pure [])
} & testWorkArea assertNoCollisions
where
lamType = Stub.numType ~> Stub.numType
lamExpr =
Sugar.BodyLam Sugar.Lambda
{ Sugar._lamLightweight = False
, Sugar._lamApplyLimit = Sugar.UnlimitedFuncApply
, Sugar._lamFunc = Stub.funcExpr "lamVar" "num" Stub.hole
} & Stub.expr
workAreaGlobals :: IO ()
workAreaGlobals =
Sugar.WorkArea
{ Sugar._waRepl = Stub.repl Stub.hole
, Sugar._waPanes =
-- 2 defs sharing the same tag with different Vars/UUIDs,
-- should collide with ordinary suffixes
[ Stub.def Stub.numType "def1" "def" trivialBinder & Stub.pane
, Stub.def Stub.numType "def2" "def" trivialBinder & Stub.pane
]
, Sugar._waGlobals = Sugar.Globals (pure []) (pure []) (pure [])
} & testWorkArea verifyName
where
verifyName name =
case Name.visible name nameTexts of
(Name.TagText _ Name.NoCollision, Name.NoCollision) -> pure ()
(Name.TagText _ Name.NoCollision, Name.Collision _) -> pure ()
(Name.TagText text textCollision, tagCollision) ->
unwords
[ "Unexpected/bad collision for name", show text
, show textCollision, show tagCollision
] & assertString
trivialBinder ::
Annotated (Sugar.Payload (Sugar.Annotation v InternalName) Unit) #
Sugar.Assignment
(Sugar.Annotation (Sugar.EvaluationScopes InternalName Identity) InternalName)
InternalName Identity Unit
trivialBinder =
Sugar.Hole mempty mempty & Sugar.LeafHole & Sugar.BodyLeaf & Sugar.BinderTerm
& Sugar.Binder Unit & Sugar.AssignPlain Unit
& Sugar.BodyPlain
& Ann (Const Stub.payload)
anonGlobals :: IO ()
anonGlobals =
Sugar.WorkArea
{ Sugar._waRepl = Stub.repl Stub.hole
, Sugar._waPanes =
-- 2 defs sharing the same tag with different Vars/UUIDs,
-- should collide with ordinary suffixes
[ Stub.def Stub.numType "def1" anonTag trivialBinder & Stub.pane
, Stub.def Stub.numType "def2" anonTag trivialBinder & Stub.pane
]
, Sugar._waGlobals = Sugar.Globals (pure []) (pure []) (pure [])
} & testWorkArea (\x -> length (show x) `seq` pure ())
|
lamdu/lamdu
|
test/Tests/Names.hs
|
gpl-3.0
| 5,509
| 0
| 14
| 1,364
| 1,557
| 831
| 726
| -1
| -1
|
module Language.Untyped.PrettyPrinting
( showTerm
, toPP
) where
import Language.Untyped.Context
import Language.Untyped.Syntax
import Printcess.PrettyPrinting
toPP :: Context -> NamelessTerm -> Term
toPP ctx (NmVar index)
= case fromIndex ctx index of
Left e -> error (show e) --BadTerm e
Right name -> Var name
toPP ctx (NmAbs name term)
= let (ctx', name') = pickFreshName ctx name
in Abs name' (toPP ctx' term)
toPP ctx (NmApp t1 t2)
= App (toPP ctx t1) (toPP ctx t2)
instance Pretty Term where
pp (Var name) = pp name
pp (Abs name term) = assocR 0 $ "λ" +> I name +> "." ~> R term
pp (App t1 t2) = assocL 9 $ L t1 ~> R t2
-- pp (BadTerm (Unbound name)) = pp $ "Unbound: " ++ name
-- pp PBadTerm NotFound) = pp "Identifier not found at: "
showTerm :: Context -> NamelessTerm -> PrettyM ()
showTerm ctx = pp . toPP ctx
|
juanbono/tapl-haskell
|
untyped/src/Language/Untyped/PrettyPrinting.hs
|
gpl-3.0
| 913
| 0
| 10
| 244
| 328
| 163
| 165
| 22
| 2
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.GAN.Advertisers.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves data about all advertisers that the requesting
-- advertiser\/publisher has access to.
--
-- /See:/ <https://developers.google.com/affiliate-network/ Google Affiliate Network API Reference> for @gan.advertisers.list@.
module Network.Google.Resource.GAN.Advertisers.List
(
-- * REST Resource
AdvertisersListResource
-- * Creating a Request
, advertisersList
, AdvertisersList
-- * Request Lenses
, alRelationshipStatus
, alMinSevenDayEpc
, alRoleId
, alMinNinetyDayEpc
, alRole
, alMinPayoutRank
, alAdvertiserCategory
, alPageToken
, alMaxResults
) where
import Network.Google.Affiliates.Types
import Network.Google.Prelude
-- | A resource alias for @gan.advertisers.list@ method which the
-- 'AdvertisersList' request conforms to.
type AdvertisersListResource =
"gan" :>
"v1beta1" :>
Capture "role" AdvertisersListRole :>
Capture "roleId" Text :>
"advertisers" :>
QueryParam "relationshipStatus"
AdvertisersListRelationshipStatus
:>
QueryParam "minSevenDayEpc" (Textual Double) :>
QueryParam "minNinetyDayEpc" (Textual Double) :>
QueryParam "minPayoutRank" (Textual Int32) :>
QueryParam "advertiserCategory" Text :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Word32) :>
QueryParam "alt" AltJSON :> Get '[JSON] Advertisers
-- | Retrieves data about all advertisers that the requesting
-- advertiser\/publisher has access to.
--
-- /See:/ 'advertisersList' smart constructor.
data AdvertisersList = AdvertisersList'
{ _alRelationshipStatus :: !(Maybe AdvertisersListRelationshipStatus)
, _alMinSevenDayEpc :: !(Maybe (Textual Double))
, _alRoleId :: !Text
, _alMinNinetyDayEpc :: !(Maybe (Textual Double))
, _alRole :: !AdvertisersListRole
, _alMinPayoutRank :: !(Maybe (Textual Int32))
, _alAdvertiserCategory :: !(Maybe Text)
, _alPageToken :: !(Maybe Text)
, _alMaxResults :: !(Maybe (Textual Word32))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'AdvertisersList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'alRelationshipStatus'
--
-- * 'alMinSevenDayEpc'
--
-- * 'alRoleId'
--
-- * 'alMinNinetyDayEpc'
--
-- * 'alRole'
--
-- * 'alMinPayoutRank'
--
-- * 'alAdvertiserCategory'
--
-- * 'alPageToken'
--
-- * 'alMaxResults'
advertisersList
:: Text -- ^ 'alRoleId'
-> AdvertisersListRole -- ^ 'alRole'
-> AdvertisersList
advertisersList pAlRoleId_ pAlRole_ =
AdvertisersList'
{ _alRelationshipStatus = Nothing
, _alMinSevenDayEpc = Nothing
, _alRoleId = pAlRoleId_
, _alMinNinetyDayEpc = Nothing
, _alRole = pAlRole_
, _alMinPayoutRank = Nothing
, _alAdvertiserCategory = Nothing
, _alPageToken = Nothing
, _alMaxResults = Nothing
}
-- | Filters out all advertisers for which do not have the given relationship
-- status with the requesting publisher.
alRelationshipStatus :: Lens' AdvertisersList (Maybe AdvertisersListRelationshipStatus)
alRelationshipStatus
= lens _alRelationshipStatus
(\ s a -> s{_alRelationshipStatus = a})
-- | Filters out all advertisers that have a seven day EPC average lower than
-- the given value (inclusive). Min value: 0.0. Optional.
alMinSevenDayEpc :: Lens' AdvertisersList (Maybe Double)
alMinSevenDayEpc
= lens _alMinSevenDayEpc
(\ s a -> s{_alMinSevenDayEpc = a})
. mapping _Coerce
-- | The ID of the requesting advertiser or publisher.
alRoleId :: Lens' AdvertisersList Text
alRoleId = lens _alRoleId (\ s a -> s{_alRoleId = a})
-- | Filters out all advertisers that have a ninety day EPC average lower
-- than the given value (inclusive). Min value: 0.0. Optional.
alMinNinetyDayEpc :: Lens' AdvertisersList (Maybe Double)
alMinNinetyDayEpc
= lens _alMinNinetyDayEpc
(\ s a -> s{_alMinNinetyDayEpc = a})
. mapping _Coerce
-- | The role of the requester. Valid values: \'advertisers\' or
-- \'publishers\'.
alRole :: Lens' AdvertisersList AdvertisersListRole
alRole = lens _alRole (\ s a -> s{_alRole = a})
-- | A value between 1 and 4, where 1 represents the quartile of advertisers
-- with the lowest ranks and 4 represents the quartile of advertisers with
-- the highest ranks. Filters out all advertisers with a lower rank than
-- the given quartile. For example if a 2 was given only advertisers with a
-- payout rank of 25 or higher would be included. Optional.
alMinPayoutRank :: Lens' AdvertisersList (Maybe Int32)
alMinPayoutRank
= lens _alMinPayoutRank
(\ s a -> s{_alMinPayoutRank = a})
. mapping _Coerce
-- | Caret(^) delimted list of advertiser categories. Valid categories are
-- defined here:
-- http:\/\/www.google.com\/support\/affiliatenetwork\/advertiser\/bin\/answer.py?hl=en&answer=107581.
-- Filters out all advertisers not in one of the given advertiser
-- categories. Optional.
alAdvertiserCategory :: Lens' AdvertisersList (Maybe Text)
alAdvertiserCategory
= lens _alAdvertiserCategory
(\ s a -> s{_alAdvertiserCategory = a})
-- | The value of \'nextPageToken\' from the previous page. Optional.
alPageToken :: Lens' AdvertisersList (Maybe Text)
alPageToken
= lens _alPageToken (\ s a -> s{_alPageToken = a})
-- | Max number of items to return in this page. Optional. Defaults to 20.
alMaxResults :: Lens' AdvertisersList (Maybe Word32)
alMaxResults
= lens _alMaxResults (\ s a -> s{_alMaxResults = a})
. mapping _Coerce
instance GoogleRequest AdvertisersList where
type Rs AdvertisersList = Advertisers
type Scopes AdvertisersList = '[]
requestClient AdvertisersList'{..}
= go _alRole _alRoleId _alRelationshipStatus
_alMinSevenDayEpc
_alMinNinetyDayEpc
_alMinPayoutRank
_alAdvertiserCategory
_alPageToken
_alMaxResults
(Just AltJSON)
affiliatesService
where go
= buildClient
(Proxy :: Proxy AdvertisersListResource)
mempty
|
rueshyna/gogol
|
gogol-affiliates/gen/Network/Google/Resource/GAN/Advertisers/List.hs
|
mpl-2.0
| 7,199
| 0
| 20
| 1,693
| 1,026
| 591
| 435
| 139
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DLP.Organizations.StoredInfoTypes.Create
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates a pre-built stored infoType to be used for inspection. See
-- https:\/\/cloud.google.com\/dlp\/docs\/creating-stored-infotypes to
-- learn more.
--
-- /See:/ <https://cloud.google.com/dlp/docs/ Cloud Data Loss Prevention (DLP) API Reference> for @dlp.organizations.storedInfoTypes.create@.
module Network.Google.Resource.DLP.Organizations.StoredInfoTypes.Create
(
-- * REST Resource
OrganizationsStoredInfoTypesCreateResource
-- * Creating a Request
, organizationsStoredInfoTypesCreate
, OrganizationsStoredInfoTypesCreate
-- * Request Lenses
, ositcParent
, ositcXgafv
, ositcUploadProtocol
, ositcAccessToken
, ositcUploadType
, ositcPayload
, ositcCallback
) where
import Network.Google.DLP.Types
import Network.Google.Prelude
-- | A resource alias for @dlp.organizations.storedInfoTypes.create@ method which the
-- 'OrganizationsStoredInfoTypesCreate' request conforms to.
type OrganizationsStoredInfoTypesCreateResource =
"v2" :>
Capture "parent" Text :>
"storedInfoTypes" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON]
GooglePrivacyDlpV2CreateStoredInfoTypeRequest
:> Post '[JSON] GooglePrivacyDlpV2StoredInfoType
-- | Creates a pre-built stored infoType to be used for inspection. See
-- https:\/\/cloud.google.com\/dlp\/docs\/creating-stored-infotypes to
-- learn more.
--
-- /See:/ 'organizationsStoredInfoTypesCreate' smart constructor.
data OrganizationsStoredInfoTypesCreate =
OrganizationsStoredInfoTypesCreate'
{ _ositcParent :: !Text
, _ositcXgafv :: !(Maybe Xgafv)
, _ositcUploadProtocol :: !(Maybe Text)
, _ositcAccessToken :: !(Maybe Text)
, _ositcUploadType :: !(Maybe Text)
, _ositcPayload :: !GooglePrivacyDlpV2CreateStoredInfoTypeRequest
, _ositcCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'OrganizationsStoredInfoTypesCreate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ositcParent'
--
-- * 'ositcXgafv'
--
-- * 'ositcUploadProtocol'
--
-- * 'ositcAccessToken'
--
-- * 'ositcUploadType'
--
-- * 'ositcPayload'
--
-- * 'ositcCallback'
organizationsStoredInfoTypesCreate
:: Text -- ^ 'ositcParent'
-> GooglePrivacyDlpV2CreateStoredInfoTypeRequest -- ^ 'ositcPayload'
-> OrganizationsStoredInfoTypesCreate
organizationsStoredInfoTypesCreate pOsitcParent_ pOsitcPayload_ =
OrganizationsStoredInfoTypesCreate'
{ _ositcParent = pOsitcParent_
, _ositcXgafv = Nothing
, _ositcUploadProtocol = Nothing
, _ositcAccessToken = Nothing
, _ositcUploadType = Nothing
, _ositcPayload = pOsitcPayload_
, _ositcCallback = Nothing
}
-- | Required. Parent resource name. The format of this value varies
-- depending on the scope of the request (project or organization) and
-- whether you have [specified a processing
-- location](https:\/\/cloud.google.com\/dlp\/docs\/specifying-location): +
-- Projects scope, location specified:
-- \`projects\/\`PROJECT_ID\`\/locations\/\`LOCATION_ID + Projects scope,
-- no location specified (defaults to global): \`projects\/\`PROJECT_ID +
-- Organizations scope, location specified:
-- \`organizations\/\`ORG_ID\`\/locations\/\`LOCATION_ID + Organizations
-- scope, no location specified (defaults to global):
-- \`organizations\/\`ORG_ID The following example \`parent\` string
-- specifies a parent project with the identifier \`example-project\`, and
-- specifies the \`europe-west3\` location for processing data:
-- parent=projects\/example-project\/locations\/europe-west3
ositcParent :: Lens' OrganizationsStoredInfoTypesCreate Text
ositcParent
= lens _ositcParent (\ s a -> s{_ositcParent = a})
-- | V1 error format.
ositcXgafv :: Lens' OrganizationsStoredInfoTypesCreate (Maybe Xgafv)
ositcXgafv
= lens _ositcXgafv (\ s a -> s{_ositcXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
ositcUploadProtocol :: Lens' OrganizationsStoredInfoTypesCreate (Maybe Text)
ositcUploadProtocol
= lens _ositcUploadProtocol
(\ s a -> s{_ositcUploadProtocol = a})
-- | OAuth access token.
ositcAccessToken :: Lens' OrganizationsStoredInfoTypesCreate (Maybe Text)
ositcAccessToken
= lens _ositcAccessToken
(\ s a -> s{_ositcAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
ositcUploadType :: Lens' OrganizationsStoredInfoTypesCreate (Maybe Text)
ositcUploadType
= lens _ositcUploadType
(\ s a -> s{_ositcUploadType = a})
-- | Multipart request metadata.
ositcPayload :: Lens' OrganizationsStoredInfoTypesCreate GooglePrivacyDlpV2CreateStoredInfoTypeRequest
ositcPayload
= lens _ositcPayload (\ s a -> s{_ositcPayload = a})
-- | JSONP
ositcCallback :: Lens' OrganizationsStoredInfoTypesCreate (Maybe Text)
ositcCallback
= lens _ositcCallback
(\ s a -> s{_ositcCallback = a})
instance GoogleRequest
OrganizationsStoredInfoTypesCreate
where
type Rs OrganizationsStoredInfoTypesCreate =
GooglePrivacyDlpV2StoredInfoType
type Scopes OrganizationsStoredInfoTypesCreate =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient OrganizationsStoredInfoTypesCreate'{..}
= go _ositcParent _ositcXgafv _ositcUploadProtocol
_ositcAccessToken
_ositcUploadType
_ositcCallback
(Just AltJSON)
_ositcPayload
dLPService
where go
= buildClient
(Proxy ::
Proxy OrganizationsStoredInfoTypesCreateResource)
mempty
|
brendanhay/gogol
|
gogol-dlp/gen/Network/Google/Resource/DLP/Organizations/StoredInfoTypes/Create.hs
|
mpl-2.0
| 6,855
| 0
| 17
| 1,381
| 796
| 471
| 325
| 120
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Admin.Customer.Devices.Chromeos.IssueCommand
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Issues a command for the device to execute.
--
-- /See:/ <https://developers.google.com/admin-sdk/ Admin SDK API Reference> for @admin.customer.devices.chromeos.issueCommand@.
module Network.Google.Resource.Admin.Customer.Devices.Chromeos.IssueCommand
(
-- * REST Resource
CustomerDevicesChromeosIssueCommandResource
-- * Creating a Request
, customerDevicesChromeosIssueCommand
, CustomerDevicesChromeosIssueCommand
-- * Request Lenses
, cdcicXgafv
, cdcicUploadProtocol
, cdcicAccessToken
, cdcicUploadType
, cdcicPayload
, cdcicCustomerId
, cdcicDeviceId
, cdcicCallback
) where
import Network.Google.Directory.Types
import Network.Google.Prelude
-- | A resource alias for @admin.customer.devices.chromeos.issueCommand@ method which the
-- 'CustomerDevicesChromeosIssueCommand' request conforms to.
type CustomerDevicesChromeosIssueCommandResource =
"admin" :>
"directory" :>
"v1" :>
"customer" :>
Capture "customerId" Text :>
"devices" :>
"chromeos" :>
CaptureMode "deviceId" "issueCommand" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON]
DirectoryChromeosDevicesIssueCommandRequest
:>
Post '[JSON]
DirectoryChromeosDevicesIssueCommandResponse
-- | Issues a command for the device to execute.
--
-- /See:/ 'customerDevicesChromeosIssueCommand' smart constructor.
data CustomerDevicesChromeosIssueCommand =
CustomerDevicesChromeosIssueCommand'
{ _cdcicXgafv :: !(Maybe Xgafv)
, _cdcicUploadProtocol :: !(Maybe Text)
, _cdcicAccessToken :: !(Maybe Text)
, _cdcicUploadType :: !(Maybe Text)
, _cdcicPayload :: !DirectoryChromeosDevicesIssueCommandRequest
, _cdcicCustomerId :: !Text
, _cdcicDeviceId :: !Text
, _cdcicCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'CustomerDevicesChromeosIssueCommand' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cdcicXgafv'
--
-- * 'cdcicUploadProtocol'
--
-- * 'cdcicAccessToken'
--
-- * 'cdcicUploadType'
--
-- * 'cdcicPayload'
--
-- * 'cdcicCustomerId'
--
-- * 'cdcicDeviceId'
--
-- * 'cdcicCallback'
customerDevicesChromeosIssueCommand
:: DirectoryChromeosDevicesIssueCommandRequest -- ^ 'cdcicPayload'
-> Text -- ^ 'cdcicCustomerId'
-> Text -- ^ 'cdcicDeviceId'
-> CustomerDevicesChromeosIssueCommand
customerDevicesChromeosIssueCommand pCdcicPayload_ pCdcicCustomerId_ pCdcicDeviceId_ =
CustomerDevicesChromeosIssueCommand'
{ _cdcicXgafv = Nothing
, _cdcicUploadProtocol = Nothing
, _cdcicAccessToken = Nothing
, _cdcicUploadType = Nothing
, _cdcicPayload = pCdcicPayload_
, _cdcicCustomerId = pCdcicCustomerId_
, _cdcicDeviceId = pCdcicDeviceId_
, _cdcicCallback = Nothing
}
-- | V1 error format.
cdcicXgafv :: Lens' CustomerDevicesChromeosIssueCommand (Maybe Xgafv)
cdcicXgafv
= lens _cdcicXgafv (\ s a -> s{_cdcicXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
cdcicUploadProtocol :: Lens' CustomerDevicesChromeosIssueCommand (Maybe Text)
cdcicUploadProtocol
= lens _cdcicUploadProtocol
(\ s a -> s{_cdcicUploadProtocol = a})
-- | OAuth access token.
cdcicAccessToken :: Lens' CustomerDevicesChromeosIssueCommand (Maybe Text)
cdcicAccessToken
= lens _cdcicAccessToken
(\ s a -> s{_cdcicAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
cdcicUploadType :: Lens' CustomerDevicesChromeosIssueCommand (Maybe Text)
cdcicUploadType
= lens _cdcicUploadType
(\ s a -> s{_cdcicUploadType = a})
-- | Multipart request metadata.
cdcicPayload :: Lens' CustomerDevicesChromeosIssueCommand DirectoryChromeosDevicesIssueCommandRequest
cdcicPayload
= lens _cdcicPayload (\ s a -> s{_cdcicPayload = a})
-- | Immutable. Immutable ID of the Google Workspace account.
cdcicCustomerId :: Lens' CustomerDevicesChromeosIssueCommand Text
cdcicCustomerId
= lens _cdcicCustomerId
(\ s a -> s{_cdcicCustomerId = a})
-- | Immutable. Immutable ID of Chrome OS Device.
cdcicDeviceId :: Lens' CustomerDevicesChromeosIssueCommand Text
cdcicDeviceId
= lens _cdcicDeviceId
(\ s a -> s{_cdcicDeviceId = a})
-- | JSONP
cdcicCallback :: Lens' CustomerDevicesChromeosIssueCommand (Maybe Text)
cdcicCallback
= lens _cdcicCallback
(\ s a -> s{_cdcicCallback = a})
instance GoogleRequest
CustomerDevicesChromeosIssueCommand
where
type Rs CustomerDevicesChromeosIssueCommand =
DirectoryChromeosDevicesIssueCommandResponse
type Scopes CustomerDevicesChromeosIssueCommand =
'["https://www.googleapis.com/auth/admin.directory.device.chromeos"]
requestClient
CustomerDevicesChromeosIssueCommand'{..}
= go _cdcicCustomerId _cdcicDeviceId _cdcicXgafv
_cdcicUploadProtocol
_cdcicAccessToken
_cdcicUploadType
_cdcicCallback
(Just AltJSON)
_cdcicPayload
directoryService
where go
= buildClient
(Proxy ::
Proxy CustomerDevicesChromeosIssueCommandResource)
mempty
|
brendanhay/gogol
|
gogol-admin-directory/gen/Network/Google/Resource/Admin/Customer/Devices/Chromeos/IssueCommand.hs
|
mpl-2.0
| 6,661
| 0
| 22
| 1,624
| 873
| 507
| 366
| 140
| 1
|
{-
This file is part of Tractor.
Tractor is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Tractor is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with Tractor. If not, see <http://www.gnu.org/licenses/>.
-}
{- |
Module : KabuCom.Broker
Description : broker
Copyright : (c) 2016 Akihiro Yamamoto
License : AGPLv3
Maintainer : https://github.com/ak1211
Stability : unstable
Portability : POSIX
証券会社とやり取りするモジュールです。
-}
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TypeFamilies #-}
module KabuCom.Broker
( siteConn
, noticeOfBrokerageAnnouncement
, noticeOfCurrentAssets
, fetchUpdatedPriceAndStore
) where
import qualified Control.Arrow as A
import Control.Exception.Safe
import qualified Control.Monad.IO.Class as M
import qualified Control.Monad.Logger as Logger
import qualified Control.Monad.Reader as M
import qualified Control.Monad.Trans.Resource as MR
import qualified Data.Conduit as C
import qualified Data.Conduit.List as CL
import qualified Data.Maybe as Maybe
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import Data.Time (UTCTime)
import qualified Data.Time as Time
import Database.Persist ((<.), (==.))
import qualified Database.Persist as DB
import qualified Database.Persist.MySQL as MySQL
import qualified Database.Persist.Sql as DB
import qualified Network.HTTP.Conduit as N
import Network.URI (URI)
import qualified Network.URI as URI
import qualified BrokerBackend as BB
import qualified Conf
import qualified GenScraper as GS
import KabuCom.Model
import qualified KabuCom.Scraper as S
import Lib (tzAsiaTokyo)
import qualified Lib
import Scheduling (AsiaTokyoDay (..))
import qualified SinkSlack as Slack
-- |
-- runResourceTと組み合わせて証券会社のサイトにログイン/ログアウトする
siteConn :: (Monad m, M.MonadTrans t, MR.MonadResource (t m))
=> Conf.InfoKabuCom
-> Conf.UserAgent
-> (BB.HTTPSession -> m b)
-> t m b
siteConn conf userAgent f =
MR.allocate (login conf userAgent url) logout
>>= (\(_,session) -> M.lift $ f session)
where
url = "https://s10.kabu.co.jp/_mem_bin/light/login.asp?/light"
-- |
-- Slackへお知らせを送るついでに現在資産評価をDBへ
noticeOfBrokerageAnnouncement :: M.MonadIO m
=> Conf.InfoKabuCom
-> MySQL.ConnectInfo
-> Conf.UserAgent
-> C.ConduitT () TL.Text m ()
noticeOfBrokerageAnnouncement _ _ _=
return ()
-- |
-- DBから最新の資産評価を取り出してSlackへレポートを送る
noticeOfCurrentAssets :: M.MonadIO m
=> MySQL.ConnectInfo
-> C.ConduitT () Slack.Report m ()
noticeOfCurrentAssets connInfo = do
-- 今日の前場開始時間
openingTime <- todayOpeningTime <$> M.liftIO Time.getCurrentTime
-- データーベースの内容からレポートを作る
rpt <- M.liftIO
. Logger.runNoLoggingT . MR.runResourceT
. MySQL.withMySQLConn connInfo . MySQL.runSqlConn $ do
DB.runMigration migrateKabuCom
--
yesterday <- takeBeforeAsset openingTime
latest <- takeLatestAsset
report <- M.mapM
(makeReport yesterday)
(latest :: Maybe (DB.Entity KabucomAsset))
return $ Maybe.maybeToList (report :: Maybe Slack.Report)
-- Slackへレポートを送る
CL.sourceList rpt
where
-- |
-- 今日の前場開始時間
todayOpeningTime :: UTCTime -> UTCTime
todayOpeningTime =
Time.zonedTimeToUTC
. (\(Time.ZonedTime t z) -> Time.ZonedTime
(t { Time.localTimeOfDay = Time.TimeOfDay 9 00 00}) z)
. Time.utcToZonedTime Lib.tzAsiaTokyo
-- |
-- レポートを作る関数
makeReport yesterday (DB.Entity key asset) = do
-- 保有株式を取り出す
stocks <- takeStocks key
--
return Slack.Report
{ Slack.reportAt = kabucomAssetAt asset
, Slack.reportAllAsset = allAsset asset
-- 現在値 - 前営業日値
, Slack.reportGrowthToday = (\y -> allAsset asset - allAsset y) <$> yesterday
, Slack.reportAllProfit = kabucomAssetProfit asset
, Slack.reportStockDigests =
[Slack.StockDigest (kabucomStockAt s) (kabucomStockGain s) (kabucomStockDigest s) | s<-stocks]
}
-- |
-- DBから最新の資産評価を取り出す
takeLatestAsset =
DB.selectFirst [] [DB.Desc KabucomAssetAt]
-- |
-- DBから保有株式を取り出す
takeStocks key =
fmap DB.entityVal
<$>
DB.selectList
[KabucomStockAsset ==. key]
[DB.Asc KabucomStockTicker]
-- |
-- DBから前場開始直前の資産評価を取り出す
takeBeforeAsset openingTime =
fmap DB.entityVal
<$>
DB.selectFirst
[KabucomAssetAt <. openingTime]
[DB.Desc KabucomAssetAt]
-- |
-- 全財産(現金換算)を返す関数
-- 株式資産評価合計 + 使用可能現金
allAsset :: KabucomAsset -> Double
allAsset a =
kabucomAssetEvaluation a + realToFrac (kabucomAssetCashBalance a)
-- |
-- 現在資産評価を証券会社のサイトから取得してDBへ
fetchUpdatedPriceAndStore :: MySQL.ConnectInfo
-> BB.HTTPSession
-> IO ()
fetchUpdatedPriceAndStore connInfo sess@BB.HTTPSession{..} = do
-- トップ -> 買付出金可能額 を見に行く
pmPage <- goPurchaseMarginPage
--
-- サーバーに対して過度なアクセスを控えるための時間待ち
BB.waitMS 600
-- トップ -> 残高照会 を見に行く
splPage <- goStockPositionListPage
-- トップ -> 残高照会 -> 個別銘柄詳細ページを見に行く
stocks <- goStockDetailPage splPage
-- 受信時間
tm <- Time.getCurrentTime
-- 全てをデーターベースへ
Logger.runStderrLoggingT . MR.runResourceT . MySQL.withMySQLConn connInfo . MySQL.runSqlConn $ do
DB.runMigration migrateKabuCom
-- 資産テーブルへ格納する
key <- DB.insert $ asset tm splPage pmPage
-- 保有株式テーブルへ格納する(寄っている場合のみ)
M.mapM_ DB.insert . Maybe.mapMaybe (stock key) $ stocks
where
-- |
-- トップ -> 買付出金可能額 を見に行く
goPurchaseMarginPage :: (M.MonadIO m, MonadThrow m) => m S.PurchaseMarginPage
goPurchaseMarginPage =
S.purchaseMarginPage =<< fetchLinkPage slowlyFetch sess "買付出金可能額"
-- |
-- トップ -> 残高照会 を見に行く関数
goStockPositionListPage :: (M.MonadIO m, MonadThrow m) => m S.StockPositionListPage
goStockPositionListPage =
S.stockPositionListPage =<< fetchLinkPage slowlyFetch sess "残高照会"
-- |
-- トップ -> 残高照会 -> 個別銘柄詳細ページを見に行く関数
goStockDetailPage :: (M.MonadIO m, MonadThrow m)
=> S.StockPositionListPage
-> m [(S.StockPositionItem, S.StockDetailPage)]
goStockDetailPage page =
let ps = S.splPositions page
in
M.mapM (\i -> (,) <$> pure i <*> go i) ps
where
go :: (M.MonadIO m, MonadThrow m) => S.StockPositionItem -> m S.StockDetailPage
go spi =
let href = T.unpack . GS.aHref $ S.spCaptionAnchor spi
in
case BB.toAbsoluteURI sLoginPageURI href of
Nothing -> throwString $ href ++" の絶対リンクを取得できませんでした"
Just uri ->
-- 詳細ページへアクセスしてスクレイピングする
S.stockDetailPage =<< slowlyFetch sess uri
-- |
-- 資産テーブル情報を組み立てる
asset :: UTCTime -> S.StockPositionListPage -> S.PurchaseMarginPage -> KabucomAsset
asset at splp pmp = KabucomAsset
{ kabucomAssetAt = at
, kabucomAssetEvaluation = S.splEvaluation splp
, kabucomAssetProfit = S.splProfit splp
--
, kabucomAssetMoneySpare = S.pmMoneyToSpare pmp
, kabucomAssetCashBalance= S.pmCashBalance pmp
}
-- |
-- 保有株式テーブル情報を組み立てる
-- まだ寄っていない値を元に作らない
stock :: DB.Key KabucomAsset
-> (S.StockPositionItem, S.StockDetailPage)
-> Maybe KabucomStock
stock key (sp, sdp) = do
(pr, (h,m)) <- A.second S.unHourMinute <$> S.sdpPrice sdp
t <- Time.makeTimeOfDayValid h m 0
let lt = Time.LocalTime { Time.localDay = unAsiaTokyoDay (S.sdpDay sdp)
, Time.localTimeOfDay = t }
Just KabucomStock
{ kabucomStockAsset = key
, kabucomStockAt = Time.localTimeToUTC tzAsiaTokyo lt
, kabucomStockTicker = S.sdpTicker sdp
, kabucomStockCaption = T.unpack $ S.sdpCaption sdp
, kabucomStockCount = S.spCount sp
, kabucomStockPurchase = S.spPurchasePrice sp
, kabucomStockPrice = pr
}
-- |
-- リンクのページへアクセスする関数
fetchLinkPage :: MonadThrow m
=> (BB.HTTPSession -> URI -> m TL.Text)
-> BB.HTTPSession
-> T.Text
-> m TL.Text
fetchLinkPage fetcher sess t =
fetcher sess =<< lookupLinkOnTopPage sess =<< pure t
-- |
-- トップページ上のリンクテキストに対応したURIを返す
lookupLinkOnTopPage :: MonadThrow m => BB.HTTPSession -> T.Text -> m URI
lookupLinkOnTopPage BB.HTTPSession{..} linktext =
maybe failure pure go
where
--
--
go =
BB.toAbsoluteURI sLoginPageURI . T.unpack
=<< lookup linktext [(GS.aText a, GS.aHref a) | a<-topPage]
--
--
topPage =
S.unTopPage $ S.topPage sTopPageHTML
--
--
failure =
throwString $ "no link \"" ++ T.unpack linktext ++ "\""
-- |
-- 通常のfetch
noWaitFetch :: M.MonadIO m => BB.HTTPSession -> URI -> m TL.Text
noWaitFetch =
BB.fetchPageWithSession
-- |
-- 時間待ち付きfetch
slowlyFetch :: M.MonadIO m => BB.HTTPSession -> URI -> m TL.Text
slowlyFetch x y = noWaitFetch x y <* M.liftIO (BB.waitMS 300)
-- |
-- ログインページからログインしてHTTPセッション情報を返す関数
login :: Conf.InfoKabuCom -> Conf.UserAgent -> String -> IO BB.HTTPSession
login conf userAgent loginPageURL = do
loginURI <- maybe errInvalidUrl return (URI.parseURI loginPageURL)
-- HTTPS接続ですよ
manager <- N.newManager N.tlsManagerSettings
-- ログインページへアクセスする
loginPage <- BB.takeBodyFromResponse <$>
BB.fetchHTTP manager reqHeader Nothing [] loginURI
-- ログインページをスクレイピングする
loginForm <- S.formLoginPage loginPage
-- IDとパスワードを入力する
let postMsg = BB.mkCustomPostReq
(map GS.toPairNV $ GS.formInputTag loginForm)
[ ("SsLogonUser", Conf.loginID $ Conf.unInfoKabuCom conf)
, ("SsLogonPassword", Conf.loginPassword $ Conf.unInfoKabuCom conf)
]
-- フォームのaction属性ページ
let formAction = T.unpack $ GS.formAction loginForm
postto <- maybe loginFail return $ BB.toAbsoluteURI loginURI formAction
-- 提出
resp <- BB.fetchHTTP manager reqHeader Nothing postMsg postto
-- 受け取ったセッションクッキーとトップページを返却する
return BB.HTTPSession
{ BB.sLoginPageURI = loginURI
, BB.sManager = manager
, BB.sReqHeaders = reqHeader
, BB.sRespCookies = N.responseCookieJar resp
, BB.sTopPageHTML = BB.takeBodyFromResponse resp
}
where
-- |
-- HTTPリクエストヘッダ
reqHeader = Lib.httpRequestHeader userAgent
--
--
errInvalidUrl =
throwString $ loginPageURL ++ " は有効なURLではありません"
--
--
loginFail =
throwString $ loginPageURL ++ " にログインできませんでした"
-- |
-- ログアウトする関数
logout :: BB.HTTPSession -> IO ()
logout sess@BB.HTTPSession{..} =
let topPageLinks = S.topPage sTopPageHTML
logoutLink = lookup "LOG OUT" [(GS.aText a, GS.aHref a) | a<-S.unTopPage topPageLinks]
toLogoutURI = BB.toAbsoluteURI sLoginPageURI . T.unpack
in
case toLogoutURI =<< logoutLink of
Nothing -> logoutFail
Just uri ->
-- ログアウトページへアクセスする
M.void $ BB.fetchPageWithSession sess uri
where
--
--
logoutFail =
throwString "ログアウトリンクがわかりませんでした"
|
ak1211/tractor
|
src/KabuCom/Broker.hs
|
agpl-3.0
| 14,339
| 0
| 17
| 3,943
| 2,721
| 1,457
| 1,264
| 220
| 2
|
--Zaoqilc
--Copyright (C) 2017 Zaoqi
--This program is free software: you can redistribute it and/or modify
--it under the terms of the GNU Affero General Public License as published
--by the Free Software Foundation, either version 3 of the License, or
--(at your option) any later version.
--This program is distributed in the hope that it will be useful,
--but WITHOUT ANY WARRANTY; without even the implied warranty of
--MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
--GNU Affero General Public License for more details.
--You should have received a copy of the GNU Affero General Public License
--along with this program. If not, see <http://www.gnu.org/licenses/>.
import Data.Ratio
n = [1..]
q = 0 : (concat $ map (\x -> do
y <- [1..x]
a <- [y%x, x%y]
[a, (-a)]) [1..])
|
zaoqi/zaoqilc
|
featuring/n.hs
|
agpl-3.0
| 810
| 0
| 15
| 150
| 105
| 63
| 42
| 6
| 1
|
module Poset.A334111 (a334111) where
import Poset.A064097 (a064097)
import Data.Set (Set)
import qualified Data.Set as Set
a334111 :: Int -> Integer
a334111 = (!!) a334111_list
a334111_list :: [Integer]
a334111_list = concat a334111_rows
a334111_rows :: [[Integer]]
a334111_rows = zipWith takeUntil powersOfTwo a334111_rows' where
a334111_rows' = partition' (fromIntegral . a064097) [1..]
powersOfTwo = map (2^) [0..]
partition' :: (a -> Int) -> [a] -> [[a]]
partition' p = foldr (select p) $ repeat []
select :: (a -> Int) -> a -> [[a]] -> [[a]]
select index x = prependTo (index x) x
prependTo :: Int -> a -> [[a]] -> [[a]]
prependTo 0 b ~(bs:bss) = (b:bs):bss
prependTo n b ~(bs:bss) = bs : prependTo (n-1) b bss
takeUntil :: Ord a => a -> [a] -> [a]
takeUntil _ [] = []
takeUntil n (x:xs)
| x < n = x : takeUntil n xs
| x == n = [x]
| otherwise = []
|
peterokagey/haskellOEIS
|
src/Poset/A334111.hs
|
apache-2.0
| 879
| 0
| 9
| 179
| 449
| 243
| 206
| 25
| 1
|
module P32 where
gcd' :: Int -> Int -> Int
gcd' a 0 = 0
gcd' a b = gcd b (a `mod` b)
|
plilja/h99
|
p32.hs
|
apache-2.0
| 86
| 0
| 7
| 25
| 51
| 28
| 23
| 4
| 1
|
{- | This Module is the internal implementation behind the module 'Muste.Tree' -}
module Muste.Tree.Internal where
import PGF hiding (showType,checkType,parse)
import PGF.Internal hiding (showType)
import Data.Maybe
import Muste.Grammar
import Muste.Feat
-- | Generic class for trees
class TreeC t where
showTree :: t -> String
-- | The function 'selectNode' returns a subtree at given 'Path' if it exists
selectNode :: t -> Path -> Maybe t
-- | The function 'selectNode' returns a subtree at given node if it exists
selectBranch :: t -> Int -> Maybe t
-- | Position in a path
type Pos = Int
-- | Path in a tree
type Path = [Pos]
-- | Rename GF abstract syntax tree (from PGF)
type GFAbsTree = Tree
-- | A labeled tree - just a template to match labels to paths
data LTree = LNode CId Int [LTree] | LLeaf deriving (Show,Eq)
-- | A generic tree with types is in TreeC class
instance TreeC TTree where
showTree = show
selectNode t [] = Just t
selectNode t [b] = selectBranch t b
selectNode t (hd:tl) =
let
branch = selectBranch t hd
in
case branch of {
Just b -> selectNode b tl ;
Nothing -> Nothing
}
selectBranch (TMeta _) _ = Nothing
selectBranch (TNode _ _ [] ) _ = Nothing
selectBranch (TNode _ _ trees) i
| i < 0 || i >= length trees = Nothing
| otherwise = Just (trees !! i)
-- | A generic tree with types is in TreeC class
instance TreeC LTree where
showTree = show
selectNode t [] = Just t
selectNode t [b] = selectBranch t b
selectNode t (hd:tl) =
let
branch = selectBranch t hd
in
case branch of {
Just b -> selectNode b tl ;
Nothing -> Nothing
}
selectBranch (LLeaf) _ = Nothing
selectBranch (LNode _ _ [] ) _ = Nothing
selectBranch (LNode _ _ trees) i
| i < 0 || i >= length trees = Nothing
| otherwise = Just (trees !! i)
-- List-related functions
-- | The function 'listReplace' replaces an element in a 'List' if the position exists
listReplace :: [a] -> Pos -> a -> [a]
listReplace list pos elem
| 0 <= pos && pos < length list = -- pos in list -> replace it
let
(pre,_:post) = splitAt pos list
in
pre ++ (elem:post)
| otherwise = list -- Element not in the list -> return the same list instead
-- | The function 'isValid' "type-checks a 'TTree'
isValid :: TTree -> (Bool,Maybe Path)
isValid t =
let
check :: TTree -> Path -> (Bool,Maybe Path)
check (TMeta _) _ = (True,Nothing)
check (TNode _ NoType []) _ = (True,Nothing)
check (TNode _ (Fun _ t) c) path =
let
ccats = map getTreeCat c
vs = map (\(p,t) -> check t (p:path)) $ zip [0..] c
brokenPath = filter (not . fst) vs
in
if (t == ccats) && (and $ map fst vs)then (True,Nothing)
else if null brokenPath then (False, Just $ reverse path) else (False, Just $ reverse $ fromJust $ snd $ head $ brokenPath)
check _ path = (False, Just $ reverse path)
in
check t []
-- | The function 'getTreeCat' gives the root category of a 'TTree', returns 'wildCId' on missing type
getTreeCat :: TTree -> String
getTreeCat (TNode id typ _) =
case typ of {
(Fun cat _) -> cat ;
NoType -> wildCard
}
getTreeCat (TMeta cat) = cat
-- | The function 'gfAbsTreeToTTree' creates a 'TTree' from an GFabstract syntax 'Tree' and a Grammar. Othewise similar to gfAbsTreeToTTreeWithPGF
gfAbsTreeToTTree :: Grammar -> GFAbsTree -> TTree
gfAbsTreeToTTree g (EFun f) =
let
typ = getFunType g (showCId f)
in
TNode (showCId f) typ []
gfAbsTreeToTTree g (EApp e1 e2) =
let
(TNode name typ sts) = gfAbsTreeToTTree g e1
st2 = gfAbsTreeToTTree g e2
in
TNode name typ (sts ++ [st2])
gfAbsTreeToTTree _ _ = TMeta wildCard
-- | Creates a GF abstract syntax Tree from a generic tree
ttreeToGFAbsTree :: TTree -> GFAbsTree
ttreeToGFAbsTree tree =
let
loop :: [TTree] -> Int -> (Int,[GFAbsTree])
loop [] id = (id,[])
loop (t:ts) id =
let
(nid,nt) = convert t id
(fid,nts) = loop ts nid
in
(fid,nt:nts)
convert :: TTree -> Int -> (Int,GFAbsTree)
convert (TMeta _) id = (id + 1, mkMeta id)
convert (TNode name _ ns) id =
let
(nid,nts) = loop ns id
in
if name == wildCard then (nid,mkApp wildCId nts) else (nid,mkApp (mkCId name) nts)
in
snd $ convert tree 0
-- | Creates a labeled LTree from a TTree
ttreeToLTree :: TTree -> LTree
ttreeToLTree tree =
let
-- Convert structure without caring about labels
convert (TMeta cat) = LNode (mkCId cat) (-1) [LNode (mkCId "_") (-1) [LLeaf]]
convert (TNode _ (Fun cat _) []) = LNode (mkCId cat) (-1) []
convert (TNode _ (Fun cat _) ts) = LNode (mkCId cat) (-1) (map convert ts)
convert rest = error $ "Could not convert tree due to lack of types" ++ show rest
-- Update the labels in a tree
update :: Int -> LTree -> (Int, LTree)
update pos LLeaf = (pos, LLeaf)
update pos (LNode cat id []) = (pos + 1, LNode cat pos [])
update pos (LNode cat id ns) =
let
(npos,ults) = updates pos ns
in
(npos + 1, LNode cat npos ults)
-- Update a list of trees
updates :: Int -> [LTree] -> (Int, [LTree])
updates pos [] = (pos, [])
updates pos (lt:lts) =
let
(npos1,ult) = update pos lt
(npos,ults) = updates npos1 lts
in
(npos, ult:ults)
in
snd $ update 0 $ convert tree
-- | The function 'getPath' finds a path to a node with a given label in a labeled tree
getPath :: LTree -> Int -> Path
getPath ltree id =
let
deep :: LTree -> Int -> Path -> Path
deep LLeaf _ _ = []
deep (LNode cid fid ns) id path = if fid == id then path else broad ns id path 0
broad :: [LTree] -> Int -> Path -> Pos -> Path
broad [] _ _ _ = []
broad (n:ns) id path pos =
let
d = deep n id (pos:path)
b = broad ns id path (pos + 1)
in
if not $ null d then d else b
in
reverse $ deep ltree id []
-- | The function 'maxDepth' gets the length of the maximum path between root and a leaf (incl. meta nodes) of a 'TTree'
maxDepth :: TTree -> Int
maxDepth (TMeta _) = 1
maxDepth (TNode _ _ []) = 1
maxDepth (TNode _ _ trees) =
1 + maximum ( map maxDepth trees )
-- | The function 'getPathes' returns all pathes in a 'TTree'
getPathes :: TTree -> [Path]
getPathes t =
let
pathes (TMeta _) = []
pathes (TNode _ _ []) = []
pathes (TNode _ _ cs) =
let zips = zip [0..] cs in
[[c]|(c,_) <- zips] ++ (concat $ map (\(p,c) -> map (p:) $ pathes c) $ zips)
in
[]:pathes t
-- | The function 'replaceBranch' replaces a branch in a 'TTree' by a new 'TTree' if a subtree at the position exists
replaceBranch :: TTree -> Pos -> TTree -> TTree
replaceBranch (TNode id typ trees) pos newTree =
let
newSubtrees = listReplace trees pos newTree -- listReplace takes care of out-of-range positions
in
TNode id typ newSubtrees
replaceBranch tree _ _ = tree
-- | The function 'replaceNode' replaces a subtree given by 'Path' in a 'TTree'
replaceNode :: TTree -> Path -> TTree -> TTree
replaceNode oldTree@(TNode _ _ trees) path@(pos:ps) newTree
| pos >= 0 && pos < length trees = -- subtree must exist
let
branch = fromJust $ selectBranch oldTree pos
in
replaceBranch oldTree pos (replaceNode branch ps newTree)
| otherwise = oldTree -- if branch does not exist just do nothing
replaceNode oldTree [] newTree =
newTree -- at the end of the path just give the new tree to be inserted
replaceNode oldTree _ _ =
oldTree -- No more subtrees, cancel search
-- | The function 'generateTrees' generates a list of 'TTree's up to a certain depth given a grammar. Powered by the magic of feat
generateTrees :: Grammar -> String -> Int -> [TTree]
generateTrees grammar cat size =
let
feats = map (\d -> let f = feat grammar in (featCard f cat d,featIth f cat d)) [0..size]
in
concatMap (\(max,fs) -> map fs [0..max-1]) feats
-- | Show the tree in a simpler form
showTTree = showExpr [] . ttreeToGFAbsTree
countNodes :: TTree -> Int
countNodes (TMeta _) = 1
countNodes (TNode _ _ []) = 1
countNodes (TNode _ _ ts) = 1 + (sum $ map countNodes ts)
countMatchedNodes :: TTree -> TTree -> Int
countMatchedNodes tree1 tree2 =
let
pathes = getPathes tree1
in
length $ filter (\p -> selectNode tree1 p == selectNode tree2 p) pathes
|
daherb/Haskell-Muste
|
muste-lib/Muste/Tree/Internal.hs
|
artistic-2.0
| 8,449
| 0
| 20
| 2,236
| 2,973
| 1,539
| 1,434
| 182
| 7
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE QuasiQuotes #-}
module Utils.Vigilance.Client.Client ( getList
, getInfo
, pause
, unPause
, checkIn
, test
, displayList
, displayWatch
, displayWatchInfo
, displayFailedNotifications
-- for testing
, renderList
, renderWatch
, renderWatchInfo
, renderFailedNotifications
, VError(..) ) where
import ClassyPrelude
import Control.Lens
import Control.Monad.Trans.Reader (asks)
import Data.Aeson ( FromJSON
, json
, Result(..)
, fromJSON )
import Blaze.ByteString.Builder (Builder)
import Data.Ix (inRange)
import Network.Http.Client ( Method(GET, POST)
, Response
, emptyBody
, withConnection
, openConnection
, http
, setAccept
, setHeader
, buildRequest
, sendRequest
, receiveResponse
, RequestBuilder
, getStatusCode
, StatusCode )
import System.IO.Streams.Attoparsec (parseFromStream)
import qualified System.IO.Streams as S
import Text.InterpolatedString.Perl6 (qc)
import Utils.Vigilance.Client.Config
import Utils.Vigilance.Types
displayList :: [EWatch] -> IO ()
displayList = putStrLn . renderList
renderList :: [EWatch] -> Text
renderList = unlines' . map renderWatch
displayWatch :: EWatch -> IO ()
displayWatch = putStrLn . renderWatch
renderWatch :: EWatch -> Text
renderWatch w = [qc|{name} ({i}) - {interval} - {state}|]
where name = w ^. watchName . unWatchName
i = w ^. watchId . unID
interval = w ^. watchInterval
state = w ^. watchWState . to renderState
displayWatchInfo :: EWatch -> IO ()
displayWatchInfo = putStrLn . renderWatchInfo
displayFailedNotifications :: [FailedNotification] -> IO ()
displayFailedNotifications = putStrLn . renderFailedNotifications
renderFailedNotifications :: [FailedNotification] -> Text
renderFailedNotifications [] = "All notifications sent successfully."
renderFailedNotifications fns = unlines' . (header:) . map render $ fns
where header = "The following errors were encountered when testing:"
render fn = [qc|- {pref} for {wn} ({wid}): {err}|]
where pref = fn ^. failedPref . to renderNotificationPref
wn = fn ^. failedWatch . watchName . unWatchName
wid = fn ^. failedWatch . watchId . unID
err = fn ^. failedLastError . to renderNotificationError
renderNotificationPref :: NotificationPreference -> Text
renderNotificationPref (HTTPNotification u) = [qc|HTTP Notification ({u})|]
renderNotificationPref (EmailNotification (EmailAddress a)) = [qc|Email Notification ({a})|]
renderNotificationError :: NotificationError -> Text
renderNotificationError (FailedByCode c) = [qc|Failed with status code {c}|]
renderNotificationError (FailedByException e) = [qc|Failed with exception "{e}"|]
renderWatchInfo :: EWatch -> Text
renderWatchInfo w = [qc|{renderedWatch}
Notifications:
{renderedNotifications}|]
where renderedWatch = renderWatch w
notifications = w ^. watchNotifications
renderedNotifications
| null notifications = bullet "none"
| otherwise = unlines' . map (bullet . renderNotification) $ notifications
renderNotification :: NotificationPreference -> Text
renderNotification (EmailNotification (EmailAddress a)) = [qc|Email: {a}|]
renderNotification (HTTPNotification u) = [qc|HTTP: {u}|]
renderState :: WatchState -> Text
renderState (Active t) = [qc|Active {t}|]
renderState x = show x
bullet :: Text -> Text
bullet x = [qc| - {x}|]
getList :: ClientCtxT IO (VigilanceResponse [EWatch])
getList = makeRequest GET "/watches" emptyBody
getInfo :: WatchName -> ClientCtxT IO (VigilanceResponse EWatch)
getInfo n = makeRequest GET (watchRoute n) emptyBody
pause :: WatchName -> ClientCtxT IO (VigilanceResponse ())
pause n = makeRequest_ POST (watchRoute n <> "/pause") emptyBody
unPause :: WatchName -> ClientCtxT IO (VigilanceResponse ())
unPause n = makeRequest_ POST (watchRoute n <> "/unpause") emptyBody
checkIn :: WatchName -> ClientCtxT IO (VigilanceResponse ())
checkIn n = makeRequest_ POST (watchRoute n <> "/checkin") emptyBody
test :: WatchName -> ClientCtxT IO (VigilanceResponse [FailedNotification])
test n = makeRequest POST (watchRoute n <> "/test") emptyBody
watchRoute :: WatchName -> ByteString
watchRoute (WatchName n) = "/watches/" <> encodeUtf8 n
makeRequest_ :: Method
-> ByteString
-> (S.OutputStream Builder -> IO b)
-> ClientCtxT IO (VigilanceResponse ())
makeRequest_ = makeRequest' unitResponseHandler
makeRequest :: FromJSON a
=> Method
-> ByteString
-> (S.OutputStream Builder -> IO b)
-> ClientCtxT IO (VigilanceResponse a)
makeRequest = makeRequest' jsonResponseHandler
makeRequest':: (Response -> S.InputStream ByteString -> IO (VigilanceResponse a))
-> Method
-> ByteString
-> (S.OutputStream Builder -> IO b)
-> ClientCtxT IO (VigilanceResponse a)
makeRequest' handler m p body = do
host <- asks serverHost
port <- asks serverPort
lift $ withConnection (openConnection host port) $ \c -> do
req <- buildRequest $ do
http m p
setAccept "application/json"
setUserAgent defaultUserAgent
void $ sendRequest c req body
receiveResponse c handler
setUserAgent :: ByteString -> RequestBuilder ()
setUserAgent = setHeader "User-Agent"
defaultUserAgent :: ByteString
defaultUserAgent = "vigilance client"
unitResponseHandler :: Response
-> S.InputStream ByteString
-> IO (VigilanceResponse ())
unitResponseHandler = responseHandler (const $ return $ Right ())
jsonResponseHandler :: FromJSON a
=> Response
-> S.InputStream ByteString
-> IO (VigilanceResponse a)
jsonResponseHandler = responseHandler handleJSONBody
responseHandler :: (S.InputStream ByteString -> IO (VigilanceResponse a))
-> Response
-> S.InputStream ByteString
-> IO (VigilanceResponse a)
responseHandler successHandler resp stream
| responseOk = successHandler stream
| notFound = return . Left $ NotFound
| otherwise = return . Left $ StatusError statusCode
where statusCode = getStatusCode resp
responseOk = inRange (200, 299) statusCode
notFound = statusCode == 404
handleJSONBody :: FromJSON a => S.InputStream ByteString -> IO (VigilanceResponse a)
handleJSONBody stream = coerceParsed <$> parseJSONBody stream
data VError = NotFound |
ParseError Text |
StatusError StatusCode deriving (Show, Eq)
parseJSONBody :: FromJSON a => S.InputStream ByteString -> IO (Result a)
parseJSONBody = parseFromStream parser
where parser = fmap fromJSON json
type VigilanceResponse a = Either VError a
coerceParsed :: Result a -> VigilanceResponse a
coerceParsed (Success a) = Right a
coerceParsed (Error e) = Left $ ParseError $ pack e
unlines' :: [Text] -> Text
unlines' = intercalate "\n"
|
MichaelXavier/vigilance
|
src/Utils/Vigilance/Client/Client.hs
|
bsd-2-clause
| 8,156
| 0
| 15
| 2,581
| 1,922
| 1,018
| 904
| -1
| -1
|
-- | This module provides the type of a JBC program state.
module Jat.PState.Data
(
PState (..)
, PException (..)
--, Var (..)
, frames
, frame
, heap
, annotations
, Path
, Root (..)
, RPath(..)
)
where
import Jat.PState.Frame
import Jat.PState.Heap
import Jat.Utils.Pretty as PP
import qualified Jinja.Program as P
-- | The abstract State.
data PState i a =
PState (Heap i) [Frame i] a
| EState PException
-- | The program exception.
data PException = NullPointerException | IllegalStateException deriving Show
-- | Returns the list of frames.
frames :: PState i a -> [Frame i]
frames (PState _ frms _) = frms
frames (EState _) = []
-- | Returns the top frame.
frame :: PState i a -> Frame i
frame (PState _ frms _)
| null frms = error "Jat.PState.Data.frame: assertion error: empty stack."
| otherwise = head frms
frame (EState _) = error "Jat.PState.Data.frame assertion error: exceptional state"
-- | Returns Frame indices.
--vars :: PState i a -> [P.Var]
--vars s = foldr k [] (zip [0..] $ frames s)
--where k (i,frm) xs = xs ++ ([P.StkVar i j | (j, _) <- zip [0..] (opstk frm)]
-- ++ [P.LocVar i j | (j, _) <- zip [0..] (locals frm)])
-- | Returns the heap.
heap :: PState i a -> Heap i
heap (PState hp _ _) = hp
heap (EState _) = error "Jat.PState.Data.heap: assertion error: exceptional state"
-- | Returns the annotations.
annotations :: PState i a -> a
annotations (PState _ _ ann) = ann
annotations (EState _) = error "Jat.PState.Data.annotations: assertion error: exceptional state"
-- | A path in the heap represented by pairs of class and field identifiers.
type Path = [(P.ClassId, P.FieldId)]
-- | A Root origin from the frame.
data Root = RStk Int Int | RLoc Int Int deriving (Eq,Show)
-- | A rooted path.
data RPath= RPath Root Path deriving (Eq)
instance Show RPath where
show (RPath root path) = show root ++ show (map prettyEdge path)
where prettyEdge (cn,fn) = pretty cn <> char '.' <> pretty fn <> char '>'
instance (Pretty i, Pretty a) => Show (PState i a) where
show = show . pretty
instance Pretty PException where
pretty = text . show
instance (Pretty i, Pretty a) => Pretty (PState i a) where
pretty (EState ex) = pretty ex
pretty (PState hp frms ann) =
vsep (map pretty frms) PP.<$> pretty hp PP.<$> pretty ann
|
ComputationWithBoundedResources/jat
|
src/Jat/PState/Data.hs
|
bsd-3-clause
| 2,368
| 0
| 11
| 537
| 672
| 361
| 311
| 47
| 1
|
-- Author: Lee Ehudin
-- Runs a text editor
module Main (main) where
import Controller.Runner (hasked)
main :: IO ()
main = hasked
|
percivalgambit/hasked
|
Main.hs
|
bsd-3-clause
| 134
| 0
| 6
| 26
| 36
| 22
| 14
| 4
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-|
Module : Secrets
Description : Routines to safely store passwords or tokens.
Copyright : (c) Frédéric Menou, 2016
License : MIT License
Maintainer : Frédéric Menou <frederic.menou@gmail.com>
Stability : experimental
Portability : X11/Freedesktop
Store a password:
> password <- getLine -- read from stdin, popup, ...
> let label = "Fancy label to be displayed in Keyring"
> let attributes = [ ("application", "my-application") ]
> success <- storePassword label password attributes
> if success
> then putStrLn "Password saved!"
> else putStrLn "Failure"
Retrieve passwords:
> let filters = [ ("application", "my-application") ]
> Just password <- findPassword filters
-}
module GoogleAPIsClient.Secrets
(
-- * Types
Attributes
, Filter
, Label
, Password
-- * API
, findPassword
, findPasswords
, storePassword
) where
import Control.Concurrent.MVar (MVar, newEmptyMVar, takeMVar, putMVar)
import Control.Monad (void)
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as C (pack)
import Data.List (uncons)
import Data.Maybe (fromJust)
import qualified Data.Map.Lazy as M (Map, elems, fromList)
import DBus
import DBus.Client
type Label = String
type Password = ByteString
type Attributes = [(String, String)]
type Filter = [(String, String)]
type Secret = (ObjectPath, ByteString, ByteString, String)
data Success = Success Variant
| PromptNeeded ObjectPath
deriving (Show)
findSecrets :: Filter -> IO [Secret]
findSecrets filters = do
client <- connectSession
unlockSuccess <- unlockPasswords client defaultCollection
case unlockSuccess of
Success _ -> do
paths <- searchItems client filters
session <- openSession client
getSecrets client paths session
_ -> return []
findPassword :: Filter -> IO (Maybe ByteString)
findPassword filters = fmap fst . uncons <$> findPasswords filters
findPasswords :: Filter -> IO [ByteString]
findPasswords filters = map readPassword <$> findSecrets filters
storePassword :: Label -> Password -> Attributes -> IO Bool
storePassword label password attributes = do
client <- connectSession
session <- openSession client
unlockSuccess <- unlockPasswords client defaultCollection
case unlockSuccess of
Success _ -> do
let properties = mkProperties label attributes
let secret = mkPlainSecret session password
reply <- call_ client (methodCall defaultCollection "org.freedesktop.Secret.Collection" "CreateItem")
{ methodCallDestination = Just "org.freedesktop.secrets"
, methodCallBody = [ toVariant properties
, toVariant secret
, toVariant True
]
}
result <- promptIfRequired client reply
case result of
Success _ -> return True
_ -> return False
_ -> return False
defaultCollection :: ObjectPath
defaultCollection = "/org/freedesktop/secrets/aliases/default"
readPassword :: Secret -> ByteString
readPassword (_, _, password, _) = password
mkProperties :: String -> Attributes -> M.Map String Variant
mkProperties label attributes =
M.fromList [ ("org.freedesktop.Secret.Item.Label", toVariant label)
, ("org.freedesktop.Secret.Item.Attributes", toVariant (M.fromList attributes))
]
mkPlainSecret :: ObjectPath -> ByteString -> Secret
mkPlainSecret session password = (session, C.pack "", password, "text/plain")
promptIfRequired :: Client -> MethodReturn -> IO Success
promptIfRequired client reply = do
let result = methodReturnBody reply
let promptPath = fromJust . fromVariant $ result !! 1
askPrompt client $ analyze (head result) promptPath
unlockPasswords :: Client -> ObjectPath -> IO Success
unlockPasswords client collection = do
reply <- call_ client (methodCall "/org/freedesktop/secrets" "org.freedesktop.Secret.Service" "Unlock")
{ methodCallDestination = Just "org.freedesktop.secrets"
, methodCallBody = [ toVariant [collection] ]
}
promptIfRequired client reply
askPrompt :: Client -> Success -> IO Success
askPrompt _ s@(Success _) = return s
askPrompt client pn@(PromptNeeded path) = do
openPrompt client path
callback <- waitPrompt client path
if fromJust . fromVariant $ head callback
then putStrLn "Dismissed, aborting" >> return pn
else putStrLn "Accepted" >> return (Success $ callback !! 1)
openPrompt :: Client -> ObjectPath -> IO ()
openPrompt client path =
void $ call_ client (methodCall path "org.freedesktop.Secret.Prompt" "Prompt")
{ methodCallDestination = Just "org.freedesktop.secrets"
, methodCallBody = [ toVariant ("0" :: String) ]
}
waitPrompt :: Client -> ObjectPath -> IO [Variant]
waitPrompt client path = do
let matchRule = matchAny { matchPath = Just path }
resultHolder <- newEmptyMVar
void $ addMatch client matchRule (promptCompletedHandler resultHolder)
takeMVar resultHolder
promptCompletedHandler :: MVar [Variant] -> Signal -> IO ()
promptCompletedHandler barrier = putMVar barrier . signalBody
analyze :: Variant -> ObjectPath -> Success
analyze v "/" = Success v
analyze _ prompt = PromptNeeded prompt
searchItems :: Client -> Filter -> IO [ObjectPath]
searchItems client filters = do
reply <- call_ client (methodCall "/org/freedesktop/secrets" "org.freedesktop.Secret.Service" "SearchItems")
{ methodCallDestination = Just "org.freedesktop.secrets"
, methodCallBody = [toVariant (M.fromList filters)]
}
return $ fromJust $ fromVariant $ head (methodReturnBody reply)
openSession :: Client -> IO ObjectPath
openSession client = do
reply <- call_ client (methodCall "/org/freedesktop/secrets" "org.freedesktop.Secret.Service" "OpenSession")
{ methodCallDestination = Just "org.freedesktop.secrets"
, methodCallBody = [toVariant ("plain" :: String), toVariant (toVariant ("" :: String))]
}
return $ fromJust $ fromVariant $ methodReturnBody reply !! 1
getSecrets :: Client -> [ObjectPath] -> ObjectPath -> IO [Secret]
getSecrets client paths session = do
reply <- call_ client (methodCall "/org/freedesktop/secrets" "org.freedesktop.Secret.Service" "GetSecrets")
{ methodCallDestination = Just "org.freedesktop.secrets"
, methodCallBody = [toVariant paths, toVariant session]
}
return $ M.elems $ fromJust (fromVariant (head (methodReturnBody reply)) :: Maybe (M.Map ObjectPath Secret))
|
ptitfred/ftv-vods
|
src/GoogleAPIsClient/Secrets.hs
|
bsd-3-clause
| 6,709
| 0
| 17
| 1,464
| 1,631
| 835
| 796
| 123
| 3
|
module JavaScript.AceAjax.Raw.Types where
import qualified GHCJS.Types as GHCJS
import qualified GHCJS.Marshal as GHCJS
import qualified Data.Typeable
import GHCJS.FFI.TypeScript
import GHCJS.DOM.Types (HTMLElement)
newtype Delta = Delta (GHCJS.JSRef Delta)
deriving (Data.Typeable.Typeable, GHCJS.ToJSRef, GHCJS.FromJSRef)
newtype EditorCommand = EditorCommand (GHCJS.JSRef EditorCommand)
deriving (Data.Typeable.Typeable, GHCJS.ToJSRef, GHCJS.FromJSRef)
newtype CommandManager = CommandManager (GHCJS.JSRef CommandManager)
deriving (Data.Typeable.Typeable, GHCJS.ToJSRef, GHCJS.FromJSRef)
newtype Annotation = Annotation (GHCJS.JSRef Annotation)
deriving (Data.Typeable.Typeable, GHCJS.ToJSRef, GHCJS.FromJSRef)
newtype TokenInfo = TokenInfo (GHCJS.JSRef TokenInfo)
deriving (Data.Typeable.Typeable, GHCJS.ToJSRef, GHCJS.FromJSRef)
newtype Position = Position (GHCJS.JSRef Position)
deriving (Data.Typeable.Typeable, GHCJS.ToJSRef, GHCJS.FromJSRef)
newtype KeyBinding = KeyBinding (GHCJS.JSRef KeyBinding)
deriving (Data.Typeable.Typeable, GHCJS.ToJSRef, GHCJS.FromJSRef)
newtype TextMode = TextMode (GHCJS.JSRef TextMode)
deriving (Data.Typeable.Typeable, GHCJS.ToJSRef, GHCJS.FromJSRef)
newtype Ace = Ace (GHCJS.JSRef Ace)
deriving (Data.Typeable.Typeable, GHCJS.ToJSRef, GHCJS.FromJSRef)
newtype Anchor = Anchor (GHCJS.JSRef Anchor)
deriving (Data.Typeable.Typeable, GHCJS.ToJSRef, GHCJS.FromJSRef)
newtype BackgroundTokenizer = BackgroundTokenizer (GHCJS.JSRef BackgroundTokenizer)
deriving (Data.Typeable.Typeable, GHCJS.ToJSRef, GHCJS.FromJSRef)
newtype Document = Document (GHCJS.JSRef Document)
deriving (Data.Typeable.Typeable, GHCJS.ToJSRef, GHCJS.FromJSRef)
newtype IEditSession = IEditSession (GHCJS.JSRef IEditSession)
deriving (Data.Typeable.Typeable, GHCJS.ToJSRef, GHCJS.FromJSRef)
newtype Editor = Editor (GHCJS.JSRef Editor)
deriving (Data.Typeable.Typeable, GHCJS.ToJSRef, GHCJS.FromJSRef)
newtype PlaceHolder = PlaceHolder (GHCJS.JSRef PlaceHolder)
deriving (Data.Typeable.Typeable, GHCJS.ToJSRef, GHCJS.FromJSRef)
newtype IRangeList = IRangeList (GHCJS.JSRef IRangeList)
deriving (Data.Typeable.Typeable, GHCJS.ToJSRef, GHCJS.FromJSRef)
newtype Range = Range (GHCJS.JSRef Range)
deriving (Data.Typeable.Typeable, GHCJS.ToJSRef, GHCJS.FromJSRef)
newtype RenderLoop = RenderLoop (GHCJS.JSRef RenderLoop)
deriving (Data.Typeable.Typeable, GHCJS.ToJSRef, GHCJS.FromJSRef)
newtype ScrollBar = ScrollBar (GHCJS.JSRef ScrollBar)
deriving (Data.Typeable.Typeable, GHCJS.ToJSRef, GHCJS.FromJSRef)
newtype Search = Search (GHCJS.JSRef Search)
deriving (Data.Typeable.Typeable, GHCJS.ToJSRef, GHCJS.FromJSRef)
newtype Selection = Selection (GHCJS.JSRef Selection)
deriving (Data.Typeable.Typeable, GHCJS.ToJSRef, GHCJS.FromJSRef)
newtype Split = Split (GHCJS.JSRef Split)
deriving (Data.Typeable.Typeable, GHCJS.ToJSRef, GHCJS.FromJSRef)
newtype TokenIterator = TokenIterator (GHCJS.JSRef TokenIterator)
deriving (Data.Typeable.Typeable, GHCJS.ToJSRef, GHCJS.FromJSRef)
newtype Tokenizer = Tokenizer (GHCJS.JSRef Tokenizer)
deriving (Data.Typeable.Typeable, GHCJS.ToJSRef, GHCJS.FromJSRef)
newtype UndoManager = UndoManager (GHCJS.JSRef UndoManager)
deriving (Data.Typeable.Typeable, GHCJS.ToJSRef, GHCJS.FromJSRef)
newtype VirtualRenderer = VirtualRenderer (GHCJS.JSRef VirtualRenderer)
deriving (Data.Typeable.Typeable, GHCJS.ToJSRef, GHCJS.FromJSRef)
|
fpco/ghcjs-from-typescript
|
ghcjs-ace/JavaScript/AceAjax/Raw/Types.hs
|
bsd-3-clause
| 3,418
| 0
| 8
| 333
| 1,035
| 579
| 456
| 58
| 0
|
module NejlaCommon.Test
( module NejlaCommon.Test.Json
, module NejlaCommon.Test.Postgres
, module NejlaCommon.Test.Expectation
, module NejlaCommon.Test.Logging
) where
import NejlaCommon.Test.Expectation
import NejlaCommon.Test.Json
import NejlaCommon.Test.Logging
import NejlaCommon.Test.Postgres
|
nejla/nejla-common
|
src/NejlaCommon/Test.hs
|
bsd-3-clause
| 351
| 0
| 5
| 75
| 60
| 41
| 19
| 9
| 0
|
{-# LANGUAGE OverloadedStrings #-}
import Text.Regex.PCRE.Light (compile,compileM,match)
import qualified Text.Regex.PCRE.Light.Char8 as String (compile,compileM,match)
import Text.Regex.PCRE.Light.Base
import qualified Data.ByteString.Char8 as S
import System.IO
import Test.HUnit
import System.Exit (exitFailure)
import Control.Monad (when)
import System.IO
import Data.Either
import qualified Data.Map as M
import System.IO.Unsafe
import Control.OldException
import Control.Monad.Error
assertBool' s = assertBool (S.unpack s)
assertEqual' s = assertEqual (S.unpack s)
testLabel s = TestLabel (S.unpack s)
instance Error S.ByteString where
noMsg = S.empty
strMsg = S.pack
testRegex :: S.ByteString
-> [PCREOption]
-> [S.ByteString]
-> [Maybe [S.ByteString]]
-> Test
testRegex regex options inputs outputs = testLabel regex $
TestCase $ do
assertEqual' "Input/Output Length Check" (length inputs) (length outputs)
assertBool' "ByteString regex compile" =<<
case compile regex options of
r -> return $
and [ match r i [] == o
| (i,o) <- zip inputs outputs ]
assertBool' "ByteString regex compileM" =<<
case compileM regex options of
Left s -> do S.hPutStrLn stderr ("ERROR in ByteString in compileM " `S.append` (S.pack s))
return False
Right r -> return $
and [ match r i [] == o
| (i,o) <- zip inputs outputs ]
assertBool' "String regex" =<<
case String.compile (S.unpack regex) options of
r -> return $
and [ String.match r i [] == o
| (i,o) <- zip (map (S.unpack) inputs)
(map (fmap (map S.unpack)) outputs) ]
assertBool' "String regex" =<<
case String.compileM (S.unpack regex) options of
Left s -> do S.hPutStrLn stderr ("ERROR in String compileM: " `S.append` (S.pack s))
return False
Right r -> return $
and [ String.match r i [] == o
| (i,o) <- zip (map (S.unpack) inputs)
(map (fmap (map S.unpack)) outputs) ]
main = do counts <- runTestTT tests
when (errors counts > 0 || failures counts > 0) exitFailure
tests = TestList
[ testRegex "the quick brown fox" []
[ "the quick brown fox"
, "The quick brown FOX"
, "What do you know about the quick brown fox?"
, "What do you know about THE QUICK BROWN FOX?"
]
[ Just ["the quick brown fox"]
, Nothing
, Just ["the quick brown fox"]
, Nothing
]
, testLabel "compile failure" $
TestCase $ (assertBool' "compile failure" $
Left ("nothing to repeat" ) == compileM "*" [])
, testLabel "compile failure" $
TestCase $ (assertBool' "compile failure" =<< (return $
(Just ("Text.Regex.PCRE.Light: Error in regex: nothing to repeat"))
==
(unsafePerformIO $ do
handle (\e -> return (Just (S.pack $ show e)))
(compile "*" [] `seq` return Nothing))))
-- , testRegex "\0*" [] -- the embedded null in the pattern seems to be a problem
-- ["\0\0\0\0"]
-- [Just ["\0\0\0\0"]]
, testRegex "\1*" [] -- the embedded null in the pattern seems to be a problem
["\1\1\1\1"]
[Just ["\1\1\1\1"]]
, testRegex "The quick brown fox" [caseless]
["the quick brown fox"
,"The quick brown FOX"
,"What do you know about the quick brown fox?"
,"What do you know about THE QUICK BROWN FOX?"
]
[ Just ["the quick brown fox"]
, Just ["The quick brown FOX"]
, Just ["the quick brown fox"]
, Just ["THE QUICK BROWN FOX"]
]
, testRegex "a*abc?xyz+pqr{3}ab{2,}xy{4,5}pq{0,6}AB{0,}zz" []
["abxyzpqrrrabbxyyyypqAzz"
,"abxyzpqrrrabbxyyyypqAzz"
,"aabxyzpqrrrabbxyyyypqAzz"
,"aaabxyzpqrrrabbxyyyypqAzz"
,"aaaabxyzpqrrrabbxyyyypqAzz"
,"abcxyzpqrrrabbxyyyypqAzz"
,"aabcxyzpqrrrabbxyyyypqAzz"
,"aaabcxyzpqrrrabbxyyyypAzz"
,"aaabcxyzpqrrrabbxyyyypqAzz"
,"aaabcxyzpqrrrabbxyyyypqqAzz"
,"aaabcxyzpqrrrabbxyyyypqqqAzz"
,"aaabcxyzpqrrrabbxyyyypqqqqAzz"
,"aaabcxyzpqrrrabbxyyyypqqqqqAzz"
,"aaabcxyzpqrrrabbxyyyypqqqqqqAzz"
,"aaaabcxyzpqrrrabbxyyyypqAzz"
,"abxyzzpqrrrabbxyyyypqAzz"
,"aabxyzzzpqrrrabbxyyyypqAzz"
,"aaabxyzzzzpqrrrabbxyyyypqAzz"
,"aaaabxyzzzzpqrrrabbxyyyypqAzz"
,"abcxyzzpqrrrabbxyyyypqAzz"
,"aabcxyzzzpqrrrabbxyyyypqAzz"
,"aaabcxyzzzzpqrrrabbxyyyypqAzz"
,"aaaabcxyzzzzpqrrrabbxyyyypqAzz"
,"aaaabcxyzzzzpqrrrabbbxyyyypqAzz"
,"aaaabcxyzzzzpqrrrabbbxyyyyypqAzz"
,"aaabcxyzpqrrrabbxyyyypABzz"
,"aaabcxyzpqrrrabbxyyyypABBzz"
,">>>aaabxyzpqrrrabbxyyyypqAzz"
,">aaaabxyzpqrrrabbxyyyypqAzz"
,">>>>abcxyzpqrrrabbxyyyypqAzz"
,"abxyzpqrrabbxyyyypqAzz"
,"abxyzpqrrrrabbxyyyypqAzz"
,"abxyzpqrrrabxyyyypqAzz"
,"aaaabcxyzzzzpqrrrabbbxyyyyyypqAzz"
,"aaaabcxyzzzzpqrrrabbbxyyypqAzz"
,"aaabcxyzpqrrrabbxyyyypqqqqqqqAzz"
]
[ Just ["abxyzpqrrrabbxyyyypqAzz"]
, Just ["abxyzpqrrrabbxyyyypqAzz"]
, Just ["aabxyzpqrrrabbxyyyypqAzz"]
, Just ["aaabxyzpqrrrabbxyyyypqAzz"]
, Just ["aaaabxyzpqrrrabbxyyyypqAzz"]
, Just ["abcxyzpqrrrabbxyyyypqAzz"]
, Just ["aabcxyzpqrrrabbxyyyypqAzz"]
, Just ["aaabcxyzpqrrrabbxyyyypAzz"]
, Just ["aaabcxyzpqrrrabbxyyyypqAzz"]
, Just ["aaabcxyzpqrrrabbxyyyypqqAzz"]
, Just ["aaabcxyzpqrrrabbxyyyypqqqAzz"]
, Just ["aaabcxyzpqrrrabbxyyyypqqqqAzz"]
, Just ["aaabcxyzpqrrrabbxyyyypqqqqqAzz"]
, Just ["aaabcxyzpqrrrabbxyyyypqqqqqqAzz"]
, Just ["aaaabcxyzpqrrrabbxyyyypqAzz"]
, Just ["abxyzzpqrrrabbxyyyypqAzz"]
, Just ["aabxyzzzpqrrrabbxyyyypqAzz"]
, Just ["aaabxyzzzzpqrrrabbxyyyypqAzz"]
, Just ["aaaabxyzzzzpqrrrabbxyyyypqAzz"]
, Just ["abcxyzzpqrrrabbxyyyypqAzz"]
, Just ["aabcxyzzzpqrrrabbxyyyypqAzz"]
, Just ["aaabcxyzzzzpqrrrabbxyyyypqAzz"]
, Just ["aaaabcxyzzzzpqrrrabbxyyyypqAzz"]
, Just ["aaaabcxyzzzzpqrrrabbbxyyyypqAzz"]
, Just ["aaaabcxyzzzzpqrrrabbbxyyyyypqAzz"]
, Just ["aaabcxyzpqrrrabbxyyyypABzz"]
, Just ["aaabcxyzpqrrrabbxyyyypABBzz"]
, Just ["aaabxyzpqrrrabbxyyyypqAzz"]
, Just ["aaaabxyzpqrrrabbxyyyypqAzz"]
, Just ["abcxyzpqrrrabbxyyyypqAzz"]
, Nothing
, Nothing
, Nothing
, Nothing
, Nothing
, Nothing
]
, testRegex "^(abc){1,2}zz" []
["abczz"
,"abcabczz"
,"zz"
,"abcabcabczz"
,">>abczz"]
[ Just ["abczz","abc"]
, Just ["abcabczz", "abc"]
, Nothing
, Nothing
, Nothing ]
, testRegex "^(b+?|a){1,2}?c" []
["bc",
"bbc",
"bbbc",
"bac",
"bbac",
"aac",
"abbbbbbbbbbbc",
"bbbbbbbbbbbac",
"aaac",
"abbbbbbbbbbbac"]
[Just ["bc", "b"],
Just ["bbc", "b"],
Just ["bbbc", "bb"],
Just ["bac", "a"],
Just ["bbac", "a"],
Just ["aac", "a"],
Just ["abbbbbbbbbbbc", "bbbbbbbbbbb"],
Just ["bbbbbbbbbbbac", "a"],
Nothing,
Nothing]
, testRegex "^(b+|a){1,2}c" []
["bc",
"bbc",
"bbbc",
"bac",
"bbac",
"aac",
"abbbbbbbbbbbc",
"bbbbbbbbbbbac",
"aaac",
"abbbbbbbbbbbac"]
[Just ["bc", "b"],
Just ["bbc", "bb"],
Just ["bbbc", "bbb"],
Just ["bac", "a"],
Just ["bbac", "a"],
Just ["aac", "a"],
Just ["abbbbbbbbbbbc", "bbbbbbbbbbb"],
Just ["bbbbbbbbbbbac", "a"],
Nothing,
Nothing]
, testRegex "^(b+|a){1,2}?bc" []
["bbc"]
[Just ["bbc", "b"]]
, testRegex "^(b*|ba){1,2}?bc" []
["babc",
"bbabc",
"bababc",
"bababbc",
"babababc"]
[Just ["babc","ba"],
Just ["bbabc","ba"],
Just ["bababc","ba"],
Nothing,
Nothing]
, testRegex "^(ba|b*){1,2}?bc" []
["babc",
"bbabc",
"bababc",
"bababbc",
"babababc"]
[Just ["babc","ba"],
Just ["bbabc","ba"],
Just ["bababc","ba"],
Nothing,
Nothing]
, testRegex "^[ab\\]cde]" []
["athing",
"bthing",
"]thing",
"cthing",
"dthing",
"ething",
"fthing",
"[thing",
"\\\\thing"]
[Just ["a"],
Just ["b"],
Just ["]"],
Just ["c"],
Just ["d"],
Just ["e"],
Nothing,
Nothing,
Nothing]
, testRegex "^[]cde]" []
["]thing",
"cthing",
"dthing",
"ething",
"athing",
"fthing"]
[Just ["]"],
Just ["c"],
Just ["d"],
Just ["e"],
Nothing,
Nothing]
, testRegex "^[^ab\\]cde]" []
["fthing",
"[thing",
"\\\\thing",
"athing",
"bthing",
"]thing",
"cthing",
"dthing",
"ething"]
[Just ["f"],
Just ["["],
Just ["\\"],
Nothing,
Nothing,
Nothing,
Nothing,
Nothing,
Nothing]
, testRegex "^\129" []
["\129"]
[Just ["\x81"]]
, testRegex "^\255" []
["\255"]
[Just ["\xff"]]
, testRegex "^[0-9]+$" []
["0",
"1",
"2",
"3",
"4",
"5",
"6",
"7",
"8",
"9",
"10",
"100",
"abc"]
[Just ["0"],
Just ["1"],
Just ["2"],
Just ["3"],
Just ["4"],
Just ["5"],
Just ["6"],
Just ["7"],
Just ["8"],
Just ["9"],
Just ["10"],
Just ["100"],
Nothing]
, testRegex "^.*nter" []
["enter",
"inter",
"uponter"]
[Just ["enter"],
Just ["inter"],
Just ["uponter"]]
, testRegex "^xxx[0-9]+$" []
["xxx0",
"xxx1234",
"xxx"]
[Just ["xxx0"],
Just ["xxx1234"],
Nothing]
, testRegex "^.+[0-9][0-9][0-9]$" []
["x123",
"xx123",
"123456",
"123",
"x1234"]
[Just ["x123"],
Just ["xx123"],
Just ["123456"],
Nothing,
Just ["x1234"]]
, testRegex "^.+?[0-9][0-9][0-9]$" []
["x123",
"xx123",
"123456",
"123",
"x1234"]
[Just ["x123"],
Just ["xx123"],
Just ["123456"],
Nothing,
Just ["x1234"]]
-- test matching more than 1 subpattern
, testRegex "^([^!]+)!(.+)=apquxz\\.ixr\\.zzz\\.ac\\.uk$" []
["abc!pqr=apquxz.ixr.zzz.ac.uk",
"!pqr=apquxz.ixr.zzz.ac.uk",
"abc!=apquxz.ixr.zzz.ac.uk",
"abc!pqr=apquxz:ixr.zzz.ac.uk",
"abc!pqr=apquxz.ixr.zzz.ac.ukk"]
[Just ["abc!pqr=apquxz.ixr.zzz.ac.uk", "abc", "pqr"],
Nothing,
Nothing,
Nothing,
Nothing]
, testRegex ":" []
["Well, we need a colon: somewhere",
"*** Fail if we don't"]
[Just [":"],
Nothing]
, testRegex "([\\da-f:]+)$" [caseless]
["0abc",
"abc",
"fed",
"E",
"::",
"5f03:12C0::932e",
"fed def",
"Any old stuff",
"*** Failers",
"0zzz",
"gzzz",
"fed\x20",
"Any old rubbish"]
[Just ["0abc", "0abc"],
Just ["abc", "abc"],
Just ["fed", "fed"],
Just ["E", "E"],
Just ["::", "::"],
Just ["5f03:12C0::932e", "5f03:12C0::932e"],
Just ["def", "def"],
Just ["ff", "ff"],
Nothing,
Nothing,
Nothing,
Nothing,
Nothing]
, testRegex "^.*\\.(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})$" []
[".1.2.3",
"A.12.123.0",
".1.2.3333",
"1.2.3",
"1234.2.3"]
[Just [".1.2.3", "1", "2", "3"],
Just ["A.12.123.0", "12", "123", "0"],
Nothing,
Nothing,
Nothing]
, testRegex "^(\\d+)\\s+IN\\s+SOA\\s+(\\S+)\\s+(\\S+)\\s*\\(\\s*$" []
["1 IN SOA non-sp1 non-sp2(",
"1 IN SOA non-sp1 non-sp2 (",
"1IN SOA non-sp1 non-sp2("]
[Just ["1 IN SOA non-sp1 non-sp2(", "1", "non-sp1", "non-sp2"],
Just ["1 IN SOA non-sp1 non-sp2 (", "1", "non-sp1", "non-sp2"],
Nothing]
, testRegex "^[a-zA-Z\\d][a-zA-Z\\d\\-]*(\\.[a-zA-Z\\d][a-zA-z\\d\\-]*)*\\.$" []
["a.",
"Z.",
"2.",
"ab-c.pq-r.",
"sxk.zzz.ac.uk.",
"x-.y-.",
"*** Failers",
"-abc.peq."]
[Just ["a."],
Just ["Z."],
Just ["2."],
Just ["ab-c.pq-r.", ".pq-r"],
Just ["sxk.zzz.ac.uk.", ".uk"],
Just ["x-.y-.", ".y-"],
Nothing,
Nothing]
, testRegex "^\\*\\.[a-z]([a-z\\-\\d]*[a-z\\d]+)?(\\.[a-z]([a-z\\-\\d]*[a-z\\d]+)?)*$" []
["*.a",
"*.b0-a",
"*.c3-b.c",
"*.c-a.b-c",
"*** Failers",
"*.0",
"*.a-",
"*.a-b.c-",
"*.c-a.0-c"]
[Just ["*.a"],
Just ["*.b0-a", "0-a"],
Just ["*.c3-b.c", "3-b", ".c"],
Just ["*.c-a.b-c", "-a", ".b-c", "-c"],
Nothing,
Nothing,
Nothing,
Nothing,
Nothing]
, testRegex "^(?=ab(de))(abd)(e)" []
["abde"]
[Just ["abde", "de", "abd", "e"]]
, testRegex "^(?!(ab)de|x)(abd)(f)" []
["abdf"]
[Just ["abdf", "", "abd", "f"]]
, testRegex "^(?=(ab(cd)))(ab)" []
["abcd"]
[Just ["ab", "abcd", "cd", "ab"]]
, testRegex "^[\\da-f](\\.[\\da-f])*$" [caseless]
["a.b.c.d",
"A.B.C.D",
"a.b.c.1.2.3.C"]
[Just ["a.b.c.d", ".d"],
Just ["A.B.C.D", ".D"],
Just ["a.b.c.1.2.3.C", ".C"]]
, testRegex "^\".*\"\\s*(;.*)?$" []
["\"1234\"",
"\"abcd\" ;",
"\"\" ; rhubarb",
"*** Failers",
"\\\"1234\\\" : things"]
[Just ["\"1234\""],
Just ["\"abcd\" ;", ";"],
Just ["\"\" ; rhubarb", "; rhubarb"],
Nothing,
Nothing]
, testRegex "^$" []
["",
"*** Failers"]
[Just [""],
Nothing]
, testRegex " ^ a (?# begins with a) b\\sc (?# then b c) $ (?# then end)" [extended]
["ab c",
"*** Failers",
"abc",
"ab cde"]
[Just ["ab c"],
Nothing,
Nothing,
Nothing]
, testRegex "(?x) ^ a (?# begins with a) b\\sc (?# then b c) $ (?# then end)" []
["ab c",
"*** Failers",
"abc",
"ab cde"]
[Just ["ab c"],
Nothing,
Nothing,
Nothing]
, testRegex "^ a\\ b[c ]d $" [extended]
["a bcd",
"a b d",
"*** Failers",
"abcd",
"ab d"]
[Just ["a bcd"],
Just ["a b d"],
Nothing,
Nothing,
Nothing]
, testRegex "^(a(b(c)))(d(e(f)))(h(i(j)))(k(l(m)))$" []
["abcdefhijklm"]
[Just ["abcdefhijklm",
"abc", "bc",
"c", "def",
"ef", "f",
"hij", "ij",
"j", "klm",
"lm", "m"]]
, testRegex "^(?:a(b(c)))(?:d(e(f)))(?:h(i(j)))(?:k(l(m)))$" []
["abcdefhijklm"]
[Just ["abcdefhijklm",
"bc", "c", "ef", "f", "ij", "j", "lm", "m"]]
, testRegex "^[.^$|()*+?{,}]+" []
[".^$(*+)|{?,?}"]
[Just [".^$(*+)|{?,?}"]]
, testRegex "^a*\\w" []
["z",
"az",
"aaaz",
"a",
"aa",
"aaaa",
"a+",
"aa+"]
[Just ["z"],
Just ["az"],
Just ["aaaz"],
Just ["a"],
Just ["aa"],
Just ["aaaa"],
Just ["a"],
Just ["aa"]]
, testRegex "^a*?\\w" []
["z",
"az",
"aaaz",
"a",
"aa",
"aaaa",
"a+",
"aa+"]
[Just ["z"],
Just ["a"],
Just ["a"],
Just ["a"],
Just ["a"],
Just ["a"],
Just ["a"],
Just ["a"]]
, testRegex "^a+\\w" []
["az",
"aaaz",
"aa",
"aaaa",
"aa+"]
[Just ["az"],
Just ["aaaz"],
Just ["aa"],
Just ["aaaa"],
Just ["aa"]]
, testRegex "^a+?\\w" []
["az",
"aaaz",
"aa",
"aaaa",
"aa+"]
[Just ["az"],
Just ["aa"],
Just ["aa"],
Just ["aa"],
Just ["aa"]]
, testRegex "^\\d{8}\\w{2,}" []
["1234567890",
"12345678ab",
"12345678__",
"*** Failers",
"1234567"]
[Just ["1234567890"],
Just ["12345678ab"],
Just ["12345678__"],
Nothing,
Nothing]
, testRegex "^[aeiou\\d]{4,5}$" []
["uoie",
"1234",
"12345",
"aaaaa",
"*** Failers",
"123456"]
[Just ["uoie"],
Just ["1234"],
Just ["12345"],
Just ["aaaaa"],
Nothing,
Nothing]
, testRegex "^[aeiou\\d]{4,5}?" []
["uoie",
"1234",
"12345",
"aaaaa",
"123456"]
[Just ["uoie"],
Just ["1234"],
Just ["1234"],
Just ["aaaa"],
Just ["1234"]]
, testRegex "\\A(abc|def)=(\\1){2,3}\\Z" []
["abc=abcabc",
"def=defdefdef",
"*** Failers",
"abc=defdef"]
[Just ["abc=abcabc", "abc", "abc"],
Just ["def=defdefdef", "def", "def"],
Nothing,
Nothing]
, testRegex "^(a)(b)(c)(d)(e)(f)(g)(h)(i)(j)(k)\\11*(\\3\\4)\\1(?#)2$" []
["abcdefghijkcda2",
"abcdefghijkkkkcda2"]
[Just ["abcdefghijkcda2", "a", "b",
"c", "d", "e", "f", "g", "h", "i", "j", "k", "cd"],
Just ["abcdefghijkkkkcda2", "a", "b", "c", "d",
"e", "f", "g", "h", "i", "j", "k", "cd"]]
, testRegex "(cat(a(ract|tonic)|erpillar)) \\1()2(3)" []
["cataract cataract23",
"catatonic catatonic23",
"caterpillar caterpillar23"]
[Just ["cataract cataract23", "cataract", "aract", "ract", "", "3"],
Just ["catatonic catatonic23", "catatonic", "atonic", "tonic", "", "3"],
Just ["caterpillar caterpillar23", "caterpillar", "erpillar", "", "", "3"]]
, testRegex "^From +([^ ]+) +[a-zA-Z][a-zA-Z][a-zA-Z] +[a-zA-Z][a-zA-Z][a-zA-Z] +[0-9]?[0-9] +[0-9][0-9]:[0-9][0-9]" []
["From abcd Mon Sep 01 12:33:02 1997"]
[Just ["From abcd Mon Sep 01 12:33", "abcd"]]
, testRegex "^From\\s+\\S+\\s+([a-zA-Z]{3}\\s+){2}\\d{1,2}\\s+\\d\\d:\\d\\d" []
["From abcd Mon Sep 01 12:33:02 1997",
"From abcd Mon Sep 1 12:33:02 1997",
"*** Failers",
"From abcd Sep 01 12:33:02 1997"]
[Just ["From abcd Mon Sep 01 12:33", "Sep "],
Just ["From abcd Mon Sep 1 12:33", "Sep "],
Nothing,
Nothing]
, testRegex "\\w+(?=\t)" []
["the quick brown\t fox"]
[Just ["brown"]]
, testRegex "foo(?!bar)(.*)" []
["foobar is foolish see?"]
[Just ["foolish see?", "lish see?"]]
, testRegex "(?:(?!foo)...|^.{0,2})bar(.*)" []
["foobar crowbar etc",
"barrel",
"2barrel",
"A barrel"]
[Just ["rowbar etc", " etc"],
Just ["barrel", "rel"],
Just ["2barrel", "rel"],
Just ["A barrel", "rel"]]
, testRegex "^(\\D*)(?=\\d)(?!123)" []
["abc456",
"*** Failers",
"abc123"]
[Just ["abc", "abc"],
Nothing,
Nothing]
, testRegex "^(a)\\1{2,3}(.)" []
["aaab",
"aaaab",
"aaaaab",
"aaaaaab"]
[Just ["aaab", "a","b"],
Just ["aaaab","a","b"],
Just ["aaaaa","a","a"],
Just ["aaaaa","a","a"]]
, testRegex "(?!^)abc" []
["the abc",
"*** Failers",
"abc"]
[Just ["abc"],
Nothing,
Nothing]
, testRegex "(?=^)abc" []
["abc",
"*** Failers",
"the abc"]
[Just ["abc"],
Nothing,
Nothing]
, testRegex "^[ab]{1,3}(ab*|b)" []
["aabbbbb"]
[Just ["aabb", "b"]]
, testRegex "^[ab]{1,3}?(ab*|b)" []
["aabbbbb"]
[Just ["aabbbbb", "abbbbb"]]
, testRegex "^[ab]{1,3}?(ab*?|b)" []
["aabbbbb"]
[Just ["aa", "a"]]
, testRegex "^[ab]{1,3}(ab*?|b)" []
["aabbbbb"]
[Just ["aabb", "b"]]
, testRegex "^(cow|)\\1(bell)" []
["cowcowbell",
"bell",
"*** Failers",
"cowbell"]
[Just ["cowcowbell", "cow", "bell"],
Just ["bell", "", "bell"],
Nothing,
Nothing]
, testRegex "^\\s" []
["\o40abc",
"\nabc",
"\rabc",
"\tabc",
"abc"]
[Just [" "],
Just ["\x0a"],
Just ["\x0d"],
Just ["\x09"],
Nothing]
, testRegex "^(a|)\\1*b" []
["ab",
"aaaab",
"b",
"acb"]
[Just ["ab", "a"],
Just ["aaaab", "a"],
Just ["b", ""],
Nothing]
, testRegex "^(a|)\\1+b" []
["aab",
"aaaab",
"b",
"*** Failers",
"ab"]
[Just ["aab", "a"],
Just ["aaaab", "a"],
Just ["b", ""],
Nothing,
Nothing]
, testRegex "^(a|)\\1?b" []
["ab",
"aab",
"b",
"acb"]
[Just ["ab", "a"],
Just ["aab", "a"],
Just ["b", ""],
Nothing]
, testRegex "^(a|)\\1{2}b" []
["aaab",
"b",
"ab",
"aab",
"aaaab"]
[Just ["aaab", "a"],
Just ["b", ""],
Nothing,
Nothing,
Nothing]
, testRegex "^(a|)\\1{2,3}b" []
["aaab",
"aaaab",
"b",
"ab",
"aab",
"aaaaab"]
[Just ["aaab", "a"],
Just ["aaaab", "a"],
Just ["b", ""],
Nothing,
Nothing,
Nothing]
, testRegex "ab{1,3}bc" []
["abbbbc",
"abbbc",
"abbc",
"abc",
"abbbbbc"]
[Just ["abbbbc"],
Just ["abbbc"],
Just ["abbc"],
Nothing,
Nothing]
, testRegex "([^.]*)\\.([^:]*):[T ]+(.*)" []
["track1.title:TBlah blah blah"]
[Just ["track1.title:TBlah blah blah", "track1", "title", "Blah blah blah"]]
, testRegex "([^.]*)\\.([^:]*):[T ]+(.*)" [caseless]
["track1.title:TBlah blah blah"]
[Just ["track1.title:TBlah blah blah", "track1", "title", "Blah blah blah"]]
, testRegex "([^.]*)\\.([^:]*):[t ]+(.*)" [caseless]
["track1.title:TBlah blah blah"]
[Just ["track1.title:TBlah blah blah", "track1", "title", "Blah blah blah"]]
, testRegex "^[W-c]+$" []
["WXY_^abc",
"wxy"]
[Just ["WXY_^abc"],
Nothing]
, testRegex "^[W-c]+$" [caseless]
["WXY_^abc",
"wxy_^ABC"]
[Just ["WXY_^abc"],
Just ["wxy_^ABC"]]
, testRegex "^[\\x3f-\\x5F]+$" [caseless]
["WXY_^abc",
"wxy_^ABC"]
[Just ["WXY_^abc"],
Just ["wxy_^ABC"]]
, testRegex "^abc$" []
["abc",
"qqq\\nabc",
"abc\\nzzz",
"qqq\\nabc\\nzzz"]
[Just ["abc"],
Nothing,
Nothing,
Nothing]
, testRegex "(?:b)|(?::+)" []
["b::c",
"c::b"]
[Just ["b"],
Just ["::"]]
, testRegex "[-az]+" []
["az-",
"*** Failers",
"b"]
[Just ["az-"],
Just ["a"],
Nothing]
, testRegex "[az-]+" []
["za-",
"*** Failers",
"b"]
[Just ["za-"],
Just ["a"],
Nothing]
, testRegex "[a\\-z]+" []
["a-z",
"*** Failers",
"b"]
[Just ["a-z"],
Just ["a"],
Nothing]
, testRegex "[a-z]+" []
["abcdxyz"]
[Just ["abcdxyz"]]
, testRegex "[\\d-]+" []
["12-34",
"aaa"]
[Just ["12-34"],
Nothing]
, testRegex "[\\d-z]+" []
["12-34z",
"aaa"]
[Just ["12-34z"],
Nothing]
, testRegex "\\x20Z" []
["the Zoo",
"*** Failers",
"Zulu"]
[Just [" Z"],
Nothing,
Nothing]
, testRegex "(abc)\\1" [caseless]
["abcabc",
"ABCabc",
"abcABC"]
[Just ["abcabc", "abc"],
Just ["ABCabc", "ABC"],
Just ["abcABC", "abc"]]
, testRegex "ab{3cd" []
["ab{3cd"]
[Just ["ab{3cd"]]
, testRegex "ab{3,cd" []
["ab{3,cd"]
[Just ["ab{3,cd"]]
, testRegex "ab{3,4a}cd" []
["ab{3,4a}cd"]
[Just ["ab{3,4a}cd"]]
, testRegex "{4,5a}bc" []
["{4,5a}bc"]
[Just ["{4,5a}bc"]]
, testRegex "abc$" []
["abc",
"abc\n",
"*** Failers",
"abc\ndef"]
[Just ["abc"],
Just ["abc"],
Nothing,
Nothing]
, testRegex "(abc)\\123" []
["abc\x53"]
[Just ["abcS", "abc"]]
, testRegex "(abc)\\223" []
["abc\x93"]
[Just ["abc\x93", "abc"]]
, testRegex "(abc)\\323" []
["abc\xd3"]
[Just ["abc\xd3", "abc"]]
, testRegex "(abc)\\100" []
["abc\x40",
"abc\o100"]
[Just ["abc@", "abc"],
Just ["abc@", "abc"]]
, testRegex "(a)(b)(c)(d)(e)(f)(g)(h)(i)(j)(k)(l)\\12\\123" []
["abcdefghijkllS"]
[Just ["abcdefghijkllS",
"a",
"b",
"c",
"d",
"e",
"f",
"g",
"h",
"i",
"j",
"k",
"l"]]
, testRegex "(a)(b)(c)(d)(e)(f)(g)(h)(i)(j)(k)\\12\\123" []
["abcdefghijk\o12S"]
[Just ["abcdefghijk\x0aS",
"a",
"b",
"c",
"d",
"e",
"f",
"g",
"h",
"i",
"j",
"k"]]
, testRegex "ab\\idef" []
["abidef"]
[Just ["abidef"]]
, testRegex "a{0}bc" []
["bc"]
[Just ["bc"]]
, testRegex "(a|(bc)){0,0}?xyz" []
["xyz"]
[Just ["xyz"]]
, testRegex "(?s)a.b" []
["a\nb"]
[Just ["a\nb"]]
, testRegex "^([^a])([^\\b])([^c]*)([^d]{3,4})" []
["baNOTccccd",
"baNOTcccd",
"baNOTccd",
"bacccd",
"anything",
"b\bc ",
"baccd"]
[Just ["baNOTcccc", "b", "a", "NOT", "cccc"],
Just ["baNOTccc", "b", "a", "NOT", "ccc"],
Just ["baNOTcc", "b", "a", "NO", "Tcc"],
Just ["baccc", "b", "a", "", "ccc"],
Nothing,
Nothing,
Nothing]
, testRegex "^\\d{8,}\\@.+[^k]$" []
["12345678@a.b.c.d",
"123456789@x.y.z",
"*** Failers",
"12345678@x.y.uk",
"1234567@a.b.c.d "]
[Just ["12345678@a.b.c.d"],
Just ["123456789@x.y.z"],
Nothing,
Nothing,
Nothing]
, testRegex "(a)\\1{8,}" []
["aaaaaaaaa",
"aaaaaaaaaa",
"*** Failers",
"aaaaaaa "]
[Just ["aaaaaaaaa", "a"],
Just ["aaaaaaaaaa", "a"],
Nothing,
Nothing]
, testRegex "[^a]" []
["aaaabcd",
"aaAabcd "]
[Just ["b"],
Just ["A"]]
, testRegex "[^a]" [caseless]
["aaaabcd",
"aaAabcd "]
[Just ["b"],
Just ["b"]]
, testRegex "[^az]" []
["aaaabcd",
"aaAabcd "]
[Just ["b"],
Just ["A"]]
, testRegex "[^az]" [caseless]
["aaaabcd",
"aaAabcd "]
[Just ["b"],
Just ["b"]]
, testRegex "P[^*]TAIRE[^*]{1,6}?LL" []
["xxxxxxxxxxxPSTAIREISLLxxxxxxxxx"]
[Just ["PSTAIREISLL"]]
, testRegex "P[^*]TAIRE[^*]{1,}?LL" []
["xxxxxxxxxxxPSTAIREISLLxxxxxxxxx"]
[Just ["PSTAIREISLL"]]
, testRegex "(.*?)(\\d+)" []
["I have 2 numbers: 53147"]
[Just ["I have 2", "I have ", "2"]]
, testRegex "(.*)(\\d+)$" []
["I have 2 numbers: 53147"]
[Just ["I have 2 numbers: 53147", "I have 2 numbers: 5314", "7"]]
, testRegex "(.*?)(\\d+)$" []
["I have 2 numbers: 53147"]
[Just ["I have 2 numbers: 53147", "I have 2 numbers: ", "53147"]]
, testRegex "(.*)\\b(\\d+)$" []
["I have 2 numbers: 53147"]
[Just ["I have 2 numbers: 53147", "I have 2 numbers: ", "53147"]]
, testRegex "(.*\\D)(\\d+)$" []
["I have 2 numbers: 53147"]
[Just ["I have 2 numbers: 53147", "I have 2 numbers: ", "53147"]]
, testRegex "word (?:[a-zA-Z0-9]+ ){0,10}otherword" []
["word cat dog elephant mussel cow horse canary baboon snake shark otherword",
"word cat dog elephant mussel cow horse canary baboon snake shark"]
[Just ["word cat dog elephant mussel cow horse canary baboon snake shark otherword"],
Nothing]
, testRegex "word (?:[a-zA-Z0-9]+ ){0,300}otherword" []
["word cat dog elephant mussel cow horse canary baboon snake shark the quick brown fox and the lazy dog and several other words getting close to thirty by now I hope"]
[Nothing]
, testRegex "^(a){0,0}" []
["bcd",
"abc",
"aab "]
[Just [""],
Just [""],
Just [""]]
, testRegex "^(a){0,1}" []
["bcd",
"abc",
"aab "]
[Just [""],
Just ["a", "a"],
Just ["a", "a"]]
, testRegex "^(a){0,2}" []
["bcd",
"abc",
"aab "]
[Just [""],
Just ["a", "a"],
Just ["aa", "a"]]
, testRegex "^(a){0,3}" []
["bcd",
"abc",
"aab",
"aaa "]
[Just [""],
Just ["a", "a"],
Just ["aa", "a"],
Just ["aaa", "a"]]
, testRegex "^(a){0,3}" []
["bcd",
"abc",
"aab",
"aaa "]
[Just [""],
Just ["a", "a"],
Just ["aa", "a"],
Just ["aaa", "a"]]
, testRegex "^(a){0,}" []
["bcd",
"abc",
"aab",
"aaa",
"aaaaaaaa "]
[Just [""],
Just ["a", "a"],
Just ["aa", "a"],
Just ["aaa", "a"],
Just ["aaaaaaaa", "a"]]
, testRegex "^(a){1,1}" []
["bcd",
"abc",
"aab "]
[Nothing,
Just ["a", "a"],
Just ["a", "a"]]
, testRegex "^(a){1,2}" []
["bcd",
"abc",
"aab "]
[Nothing,
Just ["a", "a"],
Just ["aa", "a"]]
, testRegex "^(a){1,3}" []
["bcd",
"abc",
"aab",
"aaa "]
[Nothing,
Just ["a", "a"],
Just ["aa", "a"],
Just ["aaa", "a"]]
, testRegex ".*\\.gif" []
["borfle\nbib.gif\nno"]
[Just ["bib.gif"]]
, testRegex ".{0,}\\.gif" []
["borfle\nbib.gif\nno"]
[Just ["bib.gif"]]
, testRegex ".*\\.gif" [multiline]
["borfle\nbib.gif\nno"]
[Just ["bib.gif"]]
, testRegex ".*\\.gif" [dotall]
["borfle\nbib.gif\nno"]
[Just ["borfle\nbib.gif"]]
, testRegex ".*$" [multiline]
["borfle\nbib.gif\nno"]
[Just ["borfle"]]
, testRegex ".*$" [dotall]
["borfle\nbib.gif\nno"]
[Just ["borfle\nbib.gif\nno"]]
, testRegex ".*$" [multiline]
["borfle\nbib.gif\nno\\n"]
[Just ["borfle"]]
, testRegex "(?ms)^B" []
["abc\nB"]
[Just ["B"]]
, testRegex "(?s)B$" []
["B\n"]
[Just ["B"]]
, testRegex "^[abcdefghijklmnopqrstuvwxy0123456789]" []
["n",
"z "]
[Just ["n"],
Nothing]
, testRegex "abcde{0,0}" []
["abcd",
"abce "]
[Just ["abcd"],
Nothing]
, testRegex "^(b+?|a){1,2}?c" []
["bac",
"bbac",
"bbbac",
"bbbbac",
"bbbbbac "]
[Just ["bac","a"],
Just ["bbac","a"],
Just ["bbbac","a"],
Just ["bbbbac","a"],
Just ["bbbbbac","a"]]
, testRegex "(AB)*?\\1" []
["ABABAB"]
[Just ["ABAB", "AB"]]
, testRegex "(.*(.)?)*" []
["abcd"]
[Just ["abcd", ""]]
{-
, testRegex "(?:a|)*\\d" []
["aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa4"]
[Nothing,
Just ["aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa4"]]
-}
, testRegex "^(?:a(?:(?:))+)+" []
["aaaa"]
[Just ["aaaa"]]
, testRegex "^(a()+)+" []
["aaaa"]
[Just ["aaaa", "a", ""]]
, testRegex "^(?:a(?:(?:))*)*" []
["aaaa"]
[Just ["aaaa"]]
, testRegex "^(a()*)*" []
["aaaa"]
[Just ["aaaa", "a", ""]]
, testRegex "^(a){1,}" []
["bcd",
"abc",
"aab",
"aaa",
"aaaaaaaa "]
[Nothing,
Just ["a", "a"],
Just ["aa", "a"],
Just ["aaa", "a"],
Just ["aaaaaaaa", "a"]]
, testRegex "(?s)(.*X|^B)" []
["abcde\n1234Xyz",
"BarFoo ",
"*** Failers ",
"abcde\nBar "]
[Just ["abcde\n1234X", "abcde\n1234X"],
Just ["B", "B"],
Nothing,
Nothing]
, testRegex "(?s:.*X|^B)" []
["abcde\n1234Xyz",
"BarFoo ",
"*** Failers ",
"abcde\nBar "]
[Just ["abcde\n1234X"],
Just ["B"],
Nothing,
Nothing]
, testRegex "\\w{3}(?<!bar)foo" []
["catfood",
"*** Failers",
"foo",
"barfoo",
"towbarfoo"]
[Just ["catfoo"],
Nothing,
Nothing,
Nothing,
Nothing]
, testRegex "(?>(\\.\\d\\d[1-9]?))\\d+" []
["1.230003938",
"1.875000282",
"1.235 "]
[Just [".230003938", ".23"],
Just [".875000282", ".875"],
Nothing]
, testRegex "^((?>\\w+)|(?>\\s+))*$" []
["now is the time for all good men to come to the aid of the party",
"this is not a line with only words and spaces!"]
[Just ["now is the time for all good men to come to the aid of the party", "party"],
Nothing]
, testRegex "((?>\\d+))(\\w)" []
["12345a",
"12345+ "]
[Just ["12345a", "12345", "a"],
Nothing]
, testRegex "(?>a+)b" []
["aaab"]
[Just ["aaab"]]
, testRegex "((?>a+)b)" []
["aaab"]
[Just ["aaab", "aaab"]]
, testRegex "(?>(a+))b" []
["aaab"]
[Just ["aaab", "aaa"]]
, testRegex "(?>b)+" []
["aaabbbccc"]
[Just ["bbb"]]
, testRegex "(?>a+|b+|c+)*c" []
["aaabbbbccccd"]
[Just ["aaabbbbc"]]
, testRegex "(?:(a)|b)(?(1)A|B)" []
["aA",
"bB",
"aB",
"bA "]
[Just ["aA", "a"],
Just ["bB"],
Nothing,
Nothing]
, testRegex "^(a)?(?(1)a|b)+$" []
["aa",
"b",
-- "bb ", -- ?
"ab "]
[Just ["aa", "a"],
Just ["b"],
-- Just ["bb"],
Nothing]
, testRegex "^(?(?=abc)\\w{3}:|\\d\\d)$" []
["abc:",
"12",
"123",
"xyz "]
[Just ["abc:"],
Just ["12"],
Nothing,
Nothing]
, testRegex "(?(?<!foo)cat|bar)" []
["foobar",
"cat",
"fcat",
"focat ",
"foocat "]
[Just ["bar"],
Just ["cat"],
Just ["cat"],
Just ["cat"],
Nothing]
, testRegex "^(?(2)a|(1)(2))+$" []
["12",
"12a",
"12aa",
"*** Failers",
"1234 "]
[Just ["12", "1", "2"],
Just ["12a", "1", "2"],
Just ["12aa", "1", "2"],
Nothing,
Nothing]
, testRegex "(?<=foo\\n)^bar" [multiline]
["foo\nbar",
"*** Failers",
"bar",
"baz\nbar "]
[Just ["bar"],
Nothing,
Nothing,
Nothing]
, testRegex "(?<=(?<!foo)bar)baz" []
["barbaz",
"barbarbaz ",
"koobarbaz ",
"*** Failers",
"baz",
"foobarbaz "]
[Just ["baz"],
Just ["baz"],
Just ["baz"],
Nothing,
Nothing,
Nothing]
{-
, testRegex "^(a\\1?){4}$" []
["a",
"aa",
"aaa",
"aaaa",
"aaaaa",
"aaaaaaa",
"aaaaaaaa",
"aaaaaaaaa",
"aaaaaaaaaa",
"aaaaaaaaaaa",
"aaaaaaaaaaaa",
"aaaaaaaaaaaaa",
"aaaaaaaaaaaaaa",
"aaaaaaaaaaaaaaa",
"aaaaaaaaaaaaaaaa "]
[Nothing,
Nothing,
Nothing,
Just ["aaaa", "a"],
Just ["aaaaa", "a"],
Just ["aaaaaaa", "a"],
Nothing,
Nothing,
Just ["aaaaaaaaaa", "aaaa"],
Nothing,
Nothing,
Nothing,
Nothing,
Nothing,
Nothing]
-}
, testRegex "abc" []
["abc",
"xabcy",
"ababc",
"*** Failers",
"xbc",
"axc",
"abx"]
[Just ["abc"],
Just ["abc"],
Just ["abc"],
Nothing,
Nothing,
Nothing,
Nothing]
, testRegex "ab*c" []
["abc"]
[Just ["abc"]]
, testRegex "ab*bc" []
["abc",
"abbc",
"abbbbc"]
[Just ["abc"],
Just ["abbc"],
Just ["abbbbc"]]
, testRegex ".{1}" []
["abbbbc"]
[Just ["a"]]
, testRegex ".{3,4}" []
["abbbbc"]
[Just ["abbb"]]
, testRegex "ab{0,}bc" []
["abbbbc"]
[Just ["abbbbc"]]
, testRegex "ab+bc" []
["abbc",
"abc",
"abq"]
[Just ["abbc"],
Nothing,
Nothing]
, testRegex "ab{1,}bc" []
[]
[]
, testRegex "ab+bc" []
["abbbbc"]
[Just ["abbbbc"]]
, testRegex "ab{1,}bc" []
["abbbbc"]
[Just ["abbbbc"]]
, testRegex "ab{1,3}bc" []
["abbbbc"]
[Just ["abbbbc"]]
, testRegex "ab{3,4}bc" []
["abbbbc"]
[Just ["abbbbc"]]
, testRegex "ab{4,5}bc" []
["*** Failers",
"abq",
"abbbbc"]
[Nothing,
Nothing,
Nothing]
, testRegex "ab?bc" []
["abbc",
"abc"]
[Just ["abbc"],
Just ["abc"]]
, testRegex "ab{0,1}bc" []
["abc"]
[Just ["abc"]]
, testRegex "ab?bc" []
[]
[]
, testRegex "ab?c" []
["abc"]
[Just ["abc"]]
, testRegex "ab{0,1}c" []
["abc"]
[Just ["abc"]]
, testRegex "^abc$" []
["abc",
"abbbbc",
"abcc"]
[Just ["abc"],
Nothing,
Nothing]
, testRegex "^abc" []
["abcc"]
[Just ["abc"]]
, testRegex "^abc$" []
[]
[]
, testRegex "abc$" []
["aabc",
"*** Failers",
"aabc",
"aabcd"]
[Just ["abc"],
Nothing,
Just ["abc"],
Nothing]
, testRegex "^" []
["abc"]
[Just [""]]
,
testRegex "$" []
["abc"]
[Just [""]]
,
testRegex "a.c" []
["abc",
"axc"]
[Just ["abc"],
Just ["axc"]]
,
testRegex "a.*c" []
["axyzc"]
[Just ["axyzc"]]
,
testRegex "a[bc]d" []
["abd",
"*** Failers",
"axyzd",
"abc"]
[Just ["abd"],
Nothing,
Nothing,
Nothing]
,
testRegex "a[b-d]e" []
["ace"]
[Just ["ace"]]
,
testRegex "a[b-d]" []
["aac"]
[Just ["ac"]]
,
testRegex "a[-b]" []
["a-"]
[Just ["a-"]]
,
testRegex "a[b-]" []
["a-"]
[Just ["a-"]]
,
testRegex "a]" []
["a]"]
[Just ["a]"]]
,
testRegex "a[]]b" []
["a]b"]
[Just ["a]b"]]
,
testRegex "a[^bc]d" []
["aed",
"*** Failers",
"abd",
"abd"]
[Just ["aed"],
Nothing,
Nothing,
Nothing]
,
testRegex "a[^-b]c" []
["adc"]
[Just ["adc"]]
,
testRegex "a[^]b]c" []
["adc",
"*** Failers",
"a-c",
"a]c"]
[Just ["adc"],
Nothing,
Just ["a-c"],
Nothing]
,
testRegex "\\ba\\b" []
["a-",
"-a",
"-a-"]
[Just ["a"],
Just ["a"],
Just ["a"]]
,
testRegex "\\by\\b" []
["*** Failers",
"xy",
"yz",
"xyz"]
[Nothing,
Nothing,
Nothing,
Nothing]
,
testRegex "\\Ba\\B" []
["*** Failers",
"a-",
"-a",
"-a-"]
[Just ["a"],
Nothing,
Nothing,
Nothing]
,
testRegex "\\By\\b" []
["xy"]
[Just ["y"]]
,
testRegex "\\by\\B" []
["yz"]
[Just ["y"]]
,
testRegex "\\By\\B" []
["xyz"]
[Just ["y"]]
,
testRegex "\\w" []
["a"]
[Just ["a"]]
,
testRegex "\\W" []
["-",
"*** Failers",
"-",
"a"]
[Just ["-"],
Just ["*"],
Just ["-"],
Nothing]
,
testRegex "a\\sb" []
["a b"]
[Just ["a b"]]
,
testRegex "a\\Sb" []
["a-b",
"*** Failers",
"a-b",
"a b"]
[Just ["a-b"],
Nothing,
Just ["a-b"],
Nothing]
,
testRegex "\\d" []
["1"]
[Just ["1"]]
,
testRegex "\\D" []
["-",
"*** Failers",
"-",
"1"]
[Just ["-"],
Just ["*"],
Just ["-"],
Nothing]
,
testRegex "[\\w]" []
["a"]
[Just ["a"]]
,
testRegex "[\\W]" []
["-",
"*** Failers",
"-",
"a"]
[Just ["-"],
Just ["*"],
Just ["-"],
Nothing]
,
testRegex "a[\\s]b" []
["a b"]
[Just ["a b"]]
,
testRegex "a[\\S]b" []
["a-b",
"*** Failers",
"a-b",
"a b"]
[Just ["a-b"],
Nothing,
Just ["a-b"],
Nothing]
,
testRegex "[\\d]" []
["1"]
[Just ["1"]]
,
testRegex "[\\D]" []
["-",
"*** Failers",
"-",
"1"]
[Just ["-"],
Just ["*"],
Just ["-"],
Nothing]
,
testRegex "ab|cd" []
["abc",
"abcd"]
[Just ["ab"],
Just ["ab"]]
,
testRegex "$b" []
[]
[]
,
testRegex "a\\(b" []
["a(b"]
[Just ["a(b"]]
,
testRegex "a\\(*b" []
["ab",
"a((b"]
[Just ["ab"],
Just ["a((b"]]
,
testRegex "((a))" []
["abc"]
[Just ["a", "a", "a"]]
,
testRegex "(a)b(c)" []
["abc"]
[Just ["abc", "a", "c"]]
,
testRegex "a+b+c" []
["aabbabc"]
[Just ["abc"]]
,
testRegex "a{1,}b{1,}c" []
["aabbabc"]
[Just ["abc"]]
,
testRegex "a.+?c" []
["abcabc"]
[Just ["abc"]]
,
testRegex "(a+|b)*" []
["ab"]
[Just ["ab", "b"]]
,
testRegex "(a+|b){0,}" []
["ab"]
[Just ["ab", "b"]]
,
testRegex "(a+|b)+" []
["ab"]
[Just ["ab","b"]]
,
testRegex "(a+|b){1,}" []
["ab"]
[Just ["ab", "b"]]
,
testRegex "(a+|b)?" []
["ab"]
[Just ["a", "a"]]
,
testRegex "(a+|b){0,1}" []
["ab"]
[Just ["a", "a"]]
,
testRegex "[^ab]*" []
["cde"]
[Just ["cde"]]
,
testRegex "abc" []
["b"]
[Nothing]
,
testRegex "a*" []
[""]
[Just [""]]
,
testRegex "([abc])*d" []
["abbbcd"]
[Just ["abbbcd", "c"]]
,
testRegex "([abc])*bcd" []
["abcd"]
[Just ["abcd", "a"]]
,
testRegex "a|b|c|d|e" []
["e"]
[Just ["e"]]
,
testRegex "(a|b|c|d|e)f" []
["ef"]
[Just ["ef", "e"]]
,
testRegex "abcd*efg" []
["abcdefg"]
[Just ["abcdefg"]]
,
testRegex "ab*" []
["xabyabbbz",
"xayabbbz"]
[Just ["ab"],
Just ["a"]]
,
testRegex "(ab|cd)e" []
["abcde"]
[Just ["cde", "cd"]]
,
testRegex "[abhgefdc]ij" []
["hij"]
[Just ["hij"]]
, testRegex "^(ab|cd)e" []
[]
[]
,
testRegex "(abc|)ef" []
["abcdef"]
[Just ["ef", ""]]
,
testRegex "(a|b)c*d" []
["abcd"]
[Just ["bcd", "b"]]
,
testRegex "(ab|ab*)bc" []
["abc"]
[Just ["abc", "a"]]
,
testRegex "a([bc]*)c*" []
["abc"]
[Just ["abc", "bc"]]
,
testRegex "a([bc]*)(c*d)" []
["abcd"]
[Just ["abcd", "bc", "d"]]
,
testRegex "a([bc]+)(c*d)" []
["abcd"]
[Just ["abcd", "bc", "d"]]
,
testRegex "a([bc]*)(c+d)" []
["abcd"]
[Just ["abcd", "b", "cd"]]
,
testRegex "a[bcd]*dcdcde" []
["adcdcde"]
[Just ["adcdcde"]]
,
testRegex "a[bcd]+dcdcde" []
["*** Failers",
"abcde",
"adcdcde"]
[Nothing,
Nothing,
Nothing]
,
testRegex "(ab|a)b*c" []
["abc"]
[Just ["abc", "ab"]]
,
testRegex "((a)(b)c)(d)" []
["abcd"]
[Just ["abcd", "abc", "a", "b", "d"]]
,
testRegex "[a-zA-Z_][a-zA-Z0-9_]*" []
["alpha"]
[Just ["alpha"]]
,
testRegex "^a(bc+|b[eh])g|.h$" []
["abh"]
[Just ["bh"]]
,
testRegex "(bc+d$|ef*g.|h?i(j|k))" []
["effgz",
"ij",
"reffgz",
"*** Failers",
"effg",
"bcdd"]
[Just ["effgz", "effgz"],
Just ["ij", "ij", "j"],
Just ["effgz", "effgz"],
Nothing,
Nothing,
Nothing]
,
testRegex "((((((((((a))))))))))" []
["a"]
[Just ["a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a"]]
,
testRegex "((((((((((a))))))))))\\10" []
["aa"]
[Just ["aa", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a"]]
,
testRegex "(((((((((a)))))))))" []
["a"]
[Just ["a", "a", "a", "a", "a", "a", "a", "a", "a", "a"]]
,
testRegex "multiple words of text" []
["*** Failers",
"aa",
"uh-uh"]
[Nothing,
Nothing,
Nothing]
,
testRegex "multiple words" []
["multiple words, yeah"]
[Just ["multiple words"]]
,
testRegex "(.*)c(.*)" []
["abcde"]
[Just ["abcde", "ab", "de"]]
,
testRegex "\\((.*), (.*)\\)" []
["(a, b)"]
[Just ["(a, b)", "a", "b"]]
,
testRegex "[k]" []
[]
[]
,
testRegex "abcd" []
["abcd"]
[Just ["abcd"]]
,
testRegex "a(bc)d" []
["abcd"]
[Just ["abcd", "bc"]]
,
testRegex "a[-]?c" []
["ac"]
[Just ["ac"]]
,
testRegex "(abc)\\1" []
["abcabc"]
[Just ["abcabc", "abc"]]
,
testRegex "([a-c]*)\\1" []
["abcabc"]
[Just ["abcabc", "abc"]]
,
testRegex "(a)|\\1" []
["a",
"*** Failers",
"ab",
"x"]
[Just ["a", "a"],
Just ["a", "a"],
Just ["a", "a"],
Nothing]
,
testRegex "(([a-c])b*?\\2)*" []
["ababbbcbc"]
[Just ["ababb", "bb", "b"]]
,
testRegex "(([a-c])b*?\\2){3}" []
["ababbbcbc"]
[Just ["ababbbcbc", "cbc", "c"]]
,
testRegex "((\\3|b)\\2(a)x)+" []
["aaaxabaxbaaxbbax"]
[Just ["bbax", "bbax", "b", "a"]]
,
testRegex "((\\3|b)\\2(a)){2,}" []
["bbaababbabaaaaabbaaaabba"]
[Just ["bbaaaabba", "bba", "b", "a"]]
,
testRegex "abc" [caseless]
["ABC",
"XABCY",
"ABABC",
"*** Failers",
"aaxabxbaxbbx",
"XBC",
"AXC",
"ABX"]
[Just ["ABC"],
Just ["ABC"],
Just ["ABC"],
Nothing,
Nothing,
Nothing,
Nothing,
Nothing]
,
testRegex "ab*c" [caseless]
["ABC"]
[Just ["ABC"]]
,
testRegex "ab*bc" [caseless]
["ABC",
"ABBC"]
[Just ["ABC"],
Just ["ABBC"]]
,
testRegex "ab*?bc" [caseless]
["ABBBBC"]
[Just ["ABBBBC"]]
,
testRegex "ab{0,}?bc" [caseless]
["ABBBBC"]
[Just ["ABBBBC"]]
,
testRegex "ab+?bc" [caseless]
["ABBC"]
[Just ["ABBC"]]
,
testRegex "ab+bc" [caseless]
["*** Failers",
"ABC",
"ABQ"]
[Nothing,
Nothing,
Nothing]
,
testRegex "ab{1,}bc" [caseless]
[]
[]
,
testRegex "ab+bc" [caseless]
["ABBBBC"]
[Just ["ABBBBC"]]
,
testRegex "ab{1,}?bc" [caseless]
["ABBBBC"]
[Just ["ABBBBC"]]
,
testRegex "ab{1,3}?bc" [caseless]
["ABBBBC"]
[Just ["ABBBBC"]]
,
testRegex "ab{3,4}?bc" [caseless]
["ABBBBC"]
[Just ["ABBBBC"]]
,
testRegex "ab{4,5}?bc" [caseless]
["*** Failers",
"ABQ",
"ABBBBC"]
[Nothing,
Nothing,
Nothing]
,
testRegex "ab??bc" [caseless]
["ABBC",
"ABC"]
[Just ["ABBC"],
Just ["ABC"]]
,
testRegex "ab{0,1}?bc" [caseless]
["ABC"]
[Just ["ABC"]]
,
testRegex "ab??bc" [caseless]
[]
[]
,
testRegex "ab??c" [caseless]
["ABC"]
[Just ["ABC"]]
,
testRegex "ab{0,1}?c" [caseless]
["ABC"]
[Just ["ABC"]]
,
testRegex "^abc$" [caseless]
["ABC",
"*** Failers",
"ABBBBC",
"ABCC"]
[Just ["ABC"],
Nothing,
Nothing,
Nothing]
,
testRegex "^abc" [caseless]
["ABCC"]
[Just ["ABC"]]
,
testRegex "^abc$" [caseless]
[]
[]
,
testRegex "abc$" [caseless]
["AABC"]
[Just ["ABC"]]
,
testRegex "^" [caseless]
["ABC"]
[Just [""]]
,
testRegex "$" [caseless]
["ABC"]
[Just [""]]
,
testRegex "a.c" [caseless]
["ABC",
"AXC"]
[Just ["ABC"],
Just ["AXC"]]
,
testRegex "a.*?c" [caseless]
["AXYZC"]
[Just ["AXYZC"]]
,
testRegex "a.*c" [caseless]
["*** Failers",
"AABC",
"AXYZD"]
[Nothing,
Just ["AABC"],
Nothing]
,
testRegex "a[bc]d" [caseless]
["ABD"]
[Just ["ABD"]]
,
testRegex "a[b-d]e" [caseless]
["ACE",
"ABC",
"ABD"]
[Just ["ACE"],
Nothing,
Nothing]
,
testRegex "a[b-d]" [caseless]
["AAC"]
[Just ["AC"]]
,
testRegex "a[-b]" [caseless]
["A-"]
[Just ["A-"]]
,
testRegex "a[b-]" [caseless]
["A-"]
[Just ["A-"]]
,
testRegex "a]" [caseless]
["A]"]
[Just ["A]"]]
,
testRegex "a[]]b" [caseless]
["A]B"]
[Just ["A]B"]]
,
testRegex "a[^bc]d" [caseless]
["AED"]
[Just ["AED"]]
,
testRegex "a[^-b]c" [caseless]
["ADC",
"ABD",
"A-C"]
[Just ["ADC"],
Nothing,
Nothing]
,
testRegex "a[^]b]c" [caseless]
["ADC"]
[Just ["ADC"]]
,
testRegex "ab|cd" [caseless]
["ABC",
"ABCD"]
[Just ["AB"],
Just ["AB"]]
,
testRegex "()ef" [caseless]
["DEF"]
[Just ["EF", ""]]
,
testRegex "$b" [caseless]
["A]C",
"B"]
[Nothing,
Nothing]
,
testRegex "a\\(b" [caseless]
["A(B"]
[Just ["A(B"]]
,
testRegex "a\\(*b" [caseless]
["AB",
"A((B"]
[Just ["AB"],
Just ["A((B"]]
,
testRegex "((a))" [caseless]
["ABC"]
[Just ["A", "A", "A"]]
,
testRegex "(a)b(c)" [caseless]
["ABC"]
[Just ["ABC", "A", "C"]]
,
testRegex "a+b+c" [caseless]
["AABBABC"]
[Just ["ABC"]]
,
testRegex "a{1,}b{1,}c" [caseless]
["AABBABC"]
[Just ["ABC"]]
,
testRegex "a.+?c" [caseless]
["ABCABC"]
[Just ["ABC"]]
,
testRegex "a.*?c" [caseless]
["ABCABC"]
[Just ["ABC"]]
,
testRegex "a.{0,5}?c" [caseless]
["ABCABC"]
[Just ["ABC"]]
,
testRegex "(a+|b)*" [caseless]
["AB"]
[Just ["AB", "B"]]
,
testRegex "(a+|b){0,}" [caseless]
["AB"]
[Just ["AB", "B"]]
,
testRegex "(a+|b)+" [caseless]
["AB"]
[Just ["AB","B"]]
,
testRegex "(a+|b){1,}" [caseless]
["AB"]
[Just ["AB", "B"]]
,
testRegex "(a+|b)?" [caseless]
["AB"]
[Just ["A", "A"]]
,
testRegex "(a+|b){0,1}" [caseless]
["AB"]
[Just ["A", "A"]]
,
testRegex "(a+|b){0,1}?" [caseless]
["AB"]
[Just [""]]
,
testRegex "[^ab]*" [caseless]
["CDE"]
[Just ["CDE"]]
,
testRegex "abc" [caseless]
[]
[]
,
testRegex "a*" [caseless]
[""]
[Just [""]]
,
testRegex "([abc])*d" [caseless]
["ABBBCD"]
[Just ["ABBBCD", "C"]]
,
testRegex "([abc])*bcd" [caseless]
["ABCD"]
[Just ["ABCD","A"]]
,
testRegex "a|b|c|d|e" [caseless]
["E"]
[Just ["E"]]
,
testRegex "(a|b|c|d|e)f" [caseless]
["EF"]
[Just ["EF", "E"]]
,
testRegex "abcd*efg" [caseless]
["ABCDEFG"]
[Just ["ABCDEFG"]]
,
testRegex "ab*" [caseless]
["XABYABBBZ",
"XAYABBBZ"]
[Just ["AB"],
Just ["A"]]
,
testRegex "(ab|cd)e" [caseless]
["ABCDE"]
[Just ["CDE", "CD"]]
,
testRegex "[abhgefdc]ij" [caseless]
["HIJ"]
[Just ["HIJ"]]
,
testRegex "^(ab|cd)e" [caseless]
["ABCDE"]
[Nothing]
,
testRegex "(abc|)ef" [caseless]
["ABCDEF"]
[Just ["EF", ""]]
,
testRegex "(a|b)c*d" [caseless]
["ABCD"]
[Just ["BCD", "B"]]
,
testRegex "(ab|ab*)bc" [caseless]
["ABC"]
[Just ["ABC", "A"]]
,
testRegex "a([bc]*)c*" [caseless]
["ABC"]
[Just ["ABC", "BC"]]
,
testRegex "a([bc]*)(c*d)" [caseless]
["ABCD"]
[Just ["ABCD","BC","D"]]
,
testRegex "a([bc]+)(c*d)" [caseless]
["ABCD"]
[Just ["ABCD","BC","D"]]
,
testRegex "a([bc]*)(c+d)" [caseless]
["ABCD"]
[Just ["ABCD","B","CD"]]
,
testRegex "a[bcd]*dcdcde" [caseless]
["ADCDCDE"]
[Just ["ADCDCDE"]]
,
testRegex "a[bcd]+dcdcde" [caseless]
[]
[]
,
testRegex "(ab|a)b*c" [caseless]
["ABC"]
[Just ["ABC", "AB"]]
,
testRegex "((a)(b)c)(d)" [caseless]
["ABCD"]
[Just ["ABCD", "ABC", "A", "B", "D"]]
,
testRegex "[a-zA-Z_][a-zA-Z0-9_]*" [caseless]
["ALPHA"]
[Just ["ALPHA"]]
,
testRegex "^a(bc+|b[eh])g|.h$" [caseless]
["ABH"]
[Just ["BH"]]
,
testRegex "(bc+d$|ef*g.|h?i(j|k))" [caseless]
["EFFGZ",
"IJ",
"REFFGZ",
"*** Failers",
"ADCDCDE",
"EFFG",
"BCDD"]
[Just ["EFFGZ", "EFFGZ"],
Just ["IJ", "IJ", "J"],
Just ["EFFGZ", "EFFGZ"],
Nothing,
Nothing,
Nothing,
Nothing]
,
testRegex "((((((((((a))))))))))" [caseless]
["A"]
[Just ["A", "A", "A", "A", "A", "A", "A", "A", "A", "A", "A"]]
,
testRegex "((((((((((a))))))))))\\10" [caseless]
["AA"]
[Just ["AA", "A", "A", "A", "A", "A", "A", "A", "A", "A", "A"]]
,
testRegex "(((((((((a)))))))))" [caseless]
["A"]
[Just ["A", "A", "A", "A", "A", "A", "A", "A", "A", "A"]]
,
testRegex "(?:(?:(?:(?:(?:(?:(?:(?:(?:(a))))))))))" [caseless]
["A"]
[Just ["A", "A"]]
,
testRegex "(?:(?:(?:(?:(?:(?:(?:(?:(?:(a|b|c))))))))))" [caseless]
["C"]
[Just ["C", "C"]]
,
testRegex "multiple words of text" [caseless]
["AA",
"UH-UH"]
[Nothing,
Nothing]
,
testRegex "multiple words" [caseless]
["MULTIPLE WORDS, YEAH"]
[Just ["MULTIPLE WORDS"]]
,
testRegex "(.*)c(.*)" [caseless]
["ABCDE"]
[Just ["ABCDE", "AB", "DE"]]
,
testRegex "\\((.*), (.*)\\)" [caseless]
["(A, B)"]
[Just ["(A, B)", "A", "B"]]
,
testRegex "[k]" [caseless]
[]
[]
,
testRegex "abcd" [caseless]
["ABCD"]
[Just ["ABCD"]]
,
testRegex "a(bc)d" [caseless]
["ABCD"]
[Just ["ABCD", "BC"]]
,
testRegex "a[-]?c" [caseless]
["AC"]
[Just ["AC"]]
,
testRegex "(abc)\\1" [caseless]
["ABCABC"]
[Just ["ABCABC", "ABC"]]
,
testRegex "([a-c]*)\\1" [caseless]
["ABCABC"]
[Just ["ABCABC", "ABC"]]
,
testRegex "a(?!b)." []
["abad"]
[Just ["ad"]]
,
testRegex "a(?=d)." []
["abad"]
[Just ["ad"]]
,
testRegex "a(?=c|d)." []
["abad"]
[Just ["ad"]]
,
testRegex "a(?:b|c|d)(.)" []
["ace"]
[Just ["ace", "e"]]
,
testRegex "a(?:b|c|d)*(.)" []
["ace"]
[Just ["ace", "e"]]
,
testRegex "a(?:b|c|d)+?(.)" []
["ace",
"acdbcdbe"]
[Just ["ace", "e"],
Just ["acd", "d"]]
,
testRegex "a(?:b|c|d)+(.)" []
["acdbcdbe"]
[Just ["acdbcdbe", "e"]]
,
testRegex "a(?:b|c|d){2}(.)" []
["acdbcdbe"]
[Just ["acdb", "b"]]
,
testRegex "a(?:b|c|d){4,5}(.)" []
["acdbcdbe"]
[Just ["acdbcdb", "b"]]
,
testRegex "a(?:b|c|d){4,5}?(.)" []
["acdbcdbe"]
[Just ["acdbcd", "d"]]
,
testRegex "((foo)|(bar))*" []
["foobar"]
[Just ["foobar", "bar", "foo", "bar"]]
,
testRegex "a(?:b|c|d){6,7}(.)" []
["acdbcdbe"]
[Just ["acdbcdbe", "e"]]
,
testRegex "a(?:b|c|d){6,7}?(.)" []
["acdbcdbe"]
[Just ["acdbcdbe", "e"]]
,
testRegex "a(?:b|c|d){5,6}(.)" []
["acdbcdbe"]
[Just ["acdbcdbe", "e"]]
,
testRegex "a(?:b|c|d){5,6}?(.)" []
["acdbcdbe"]
[Just ["acdbcdb", "b"]]
,
testRegex "a(?:b|c|d){5,7}(.)" []
["acdbcdbe"]
[Just ["acdbcdbe", "e"]]
,
testRegex "a(?:b|c|d){5,7}?(.)" []
["acdbcdbe"]
[Just ["acdbcdb", "b"]]
,
testRegex "a(?:b|(c|e){1,2}?|d)+?(.)" []
["ace"]
[Just ["ace", "c", "e"]]
,
testRegex "^(.+)?B" []
["AB"]
[Just ["AB", "A"]]
,
testRegex "^([^a-z])|(\\^)$" []
["."]
[Just [".", "."]]
,
testRegex "^[<>]&" []
["<&OUT"]
[Just ["<&"]]
,
testRegex "^(a\\1?){4}$" []
["aaaaaaaaaa",
"*** Failers",
"AB",
"aaaaaaaaa",
"aaaaaaaaaaa"]
[Just ["aaaaaaaaaa", "aaaa"],
Nothing,
Nothing,
Nothing,
Nothing]
,
testRegex "^(a(?(1)\\1)){4}$" []
["aaaaaaaaaa",
"*** Failers",
"aaaaaaaaa",
"aaaaaaaaaaa"]
[Just ["aaaaaaaaaa", "aaaa"],
Nothing,
Nothing,
Nothing]
,
testRegex "(?:(f)(o)(o)|(b)(a)(r))*" []
["foobar"]
[Just ["foobar", "f", "o", "o", "b", "a", "r"]]
,
testRegex "(?<=a)b" []
["ab",
"*** Failers",
"cb",
"b"]
[Just ["b"],
Nothing,
Nothing,
Nothing]
,
testRegex "(?<!c)b" []
["ab",
"b",
"b"]
[Just ["b"],
Just ["b"],
Just ["b"]]
,
testRegex "(?:..)*a" []
["aba"]
[Just ["aba"]]
,
testRegex "(?:..)*?a" []
["aba"]
[Just ["a"]]
,
testRegex "^(?:b|a(?=(.)))*\\1" []
["abc"]
[Just ["ab", "b"]]
,
testRegex "^(){3,5}" []
["abc"]
[Just ["", ""]]
,
testRegex "^(a+)*ax" []
["aax"]
[Just ["aax", "a"]]
,
testRegex "^((a|b)+)*ax" []
["aax"]
[Just ["aax", "a", "a"]]
,
testRegex "^((a|bc)+)*ax" []
["aax"]
[Just ["aax", "a", "a"]]
,
testRegex "(a|x)*ab" []
["cab"]
[Just ["ab"]]
,
testRegex "(a)*ab" []
["cab"]
[Just ["ab"]]
,
testRegex "(?:(?i)a)b" []
["ab"]
[Just ["ab"]]
,
testRegex "((?i)a)b" []
["ab"]
[Just ["ab", "a"]]
,
testRegex "(?:(?i)a)b" []
["Ab"]
[Just ["Ab"]]
,
testRegex "((?i)a)b" []
["Ab"]
[Just ["Ab", "A"]]
,
testRegex "(?:(?i)a)b" []
["*** Failers",
"cb",
"aB"]
[Nothing,
Nothing,
Nothing]
,
testRegex "((?i)a)b" []
[]
[]
,
testRegex "(?i:a)b" []
["ab"]
[Just ["ab"]]
,
testRegex "((?i:a))b" []
["ab"]
[Just ["ab", "a"]]
,
testRegex "(?i:a)b" []
["Ab"]
[Just ["Ab"]]
,
testRegex "((?i:a))b" []
["Ab"]
[Just ["Ab", "A"]]
,
testRegex "(?i:a)b" []
["*** Failers",
"aB",
"aB"]
[Nothing,
Nothing,
Nothing]
,
testRegex "((?i:a))b" []
[]
[]
,
testRegex "(?:(?-i)a)b" [caseless]
["ab"]
[Just ["ab"]]
,
testRegex "((?-i)a)b" [caseless]
["ab"]
[Just ["ab", "a"]]
,
testRegex "(?:(?-i)a)b" [caseless]
["aB"]
[Just ["aB"]]
,
testRegex "((?-i)a)b" [caseless]
["aB"]
[Just ["aB", "a"]]
,
testRegex "(?:(?-i)a)b" [caseless]
["*** Failers",
"aB",
"Ab"]
[Nothing,
Just ["aB"],
Nothing]
,
testRegex "((?-i)a)b" [caseless]
[]
[]
,
testRegex "(?:(?-i)a)b" [caseless]
["aB"]
[Just ["aB"]]
,
testRegex "((?-i)a)b" [caseless]
["aB"]
[Just ["aB", "a"]]
,
testRegex "(?:(?-i)a)b" [caseless]
["*** Failers",
"Ab",
"AB"]
[Nothing,
Nothing,
Nothing]
,
testRegex "((?-i)a)b" [caseless]
[]
[]
,
testRegex "(?-i:a)b" [caseless]
["ab"]
[Just ["ab"]]
,
testRegex "((?-i:a))b" [caseless]
["ab"]
[Just ["ab", "a"]]
,
testRegex "(?-i:a)b" [caseless]
["aB"]
[Just ["aB"]]
,
testRegex "((?-i:a))b" [caseless]
["aB"]
[Just ["aB", "a"]]
,
testRegex "(?-i:a)b" [caseless]
["*** Failers",
"AB",
"Ab"]
[Nothing,
Nothing,
Nothing]
,
testRegex "((?-i:a))b" [caseless]
[]
[]
,
testRegex "(?-i:a)b" [caseless]
["aB"]
[Just ["aB"]]
,
testRegex "((?-i:a))b" [caseless]
["aB"]
[Just ["aB", "a"]]
,
testRegex "(?-i:a)b" [caseless]
["*** Failers",
"Ab",
"AB"]
[Nothing,
Nothing,
Nothing]
,
testRegex "((?-i:a))b" [caseless]
[]
[]
,
testRegex "((?-i:a.))b" [caseless]
["*** Failers",
"AB",
"a\nB"]
[Nothing,
Nothing,
Nothing]
,
testRegex "((?s-i:a.))b" [caseless]
["a\nB"]
[Just ["a\nB", "a\n"]]
,
testRegex "(?:c|d)(?:)(?:a(?:)(?:b)(?:b(?:))(?:b(?:)(?:b)))" []
["cabbbb"]
[Just ["cabbbb"]]
,
testRegex "(?:c|d)(?:)(?:aaaaaaaa(?:)(?:bbbbbbbb)(?:bbbbbbbb(?:))(?:bbbbbbbb(?:)(?:bbbbbbbb)))" []
["caaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"]
[Just ["caaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"]]
,
testRegex "(ab)\\d\\1" [caseless]
["Ab4ab",
"ab4Ab"]
[Just ["Ab4ab", "Ab"],
Just ["ab4Ab", "ab"]]
,
testRegex "foo\\w*\\d{4}baz" []
["foobar1234baz"]
[Just ["foobar1234baz"]]
,
testRegex "x(~~)*(?:(?:F)?)?" []
["x~~"]
[Just ["x~~", "~~"]]
,
testRegex "^a(?#xxx){3}c" []
["aaac"]
[Just ["aaac"]]
,
testRegex "^a (?#xxx) (?#yyy) {3}c" [extended]
["aaac"]
[Just ["aaac"]]
,
testRegex "(?<![cd])b" []
["*** Failers",
"B\nB",
"dbcb"]
[Nothing,
Nothing,
Nothing]
,
testRegex "(?<![cd])[ab]" []
["dbaacb"]
[Just ["a"]]
,
testRegex "(?<!(c|d))b" []
[]
[]
,
testRegex "(?<!(c|d))[ab]" []
["dbaacb"]
[Just ["a"]]
,
testRegex "(?<!cd)[ab]" []
["cdaccb"]
[Just ["b"]]
,
{-
testRegex "^(?:a?b?)*$" []
["a",
"ab",
"aaa ",
"*** Failers",
"dbcb",
"a--",
"aa-- "]
[Just ["a"],
Just ["ab"],
Just ["aaa"],
Nothing,
Nothing,
Nothing,
Nothing]
,
-}
testRegex "((?s)^a(.))((?m)^b$)" []
["a\nb\nc\n"]
[Just ["a\nb", "a\n", "\n", "b"]]
,
testRegex "((?m)^b$)" []
["a\nb\nc\n"]
[Just ["b", "b"]]
,
testRegex "(?m)^b" []
["a\nb\n"]
[Just ["b"]]
,
testRegex "(?m)^(b)" []
["a\nb\n"]
[Just ["b", "b"]]
,
testRegex "((?m)^b)" []
["a\nb\n"]
[Just ["b", "b"]]
,
testRegex "\\n((?m)^b)" []
["a\nb\n"]
[Just ["\nb", "b"]]
,
testRegex "((?s).)c(?!.)" []
["a\nb\nc\n",
"a\nb\nc\n"]
[Just ["\nc", "\n"],
Just ["\nc", "\n"]]
,
testRegex "((?s)b.)c(?!.)" []
["a\nb\nc\n",
"a\nb\nc\n"]
[Just ["b\nc", "b\n"],
Just ["b\nc", "b\n"]]
,
testRegex "^b" []
[]
[]
,
testRegex "()^b" []
["*** Failers",
"a\nb\nc\n",
"a\nb\nc\n"]
[Nothing,
Nothing,
Nothing]
,
testRegex "((?m)^b)" []
["a\nb\nc\n"]
[Just ["b", "b"]]
,
{-
testRegex "(?(1)a|b)" []
[]
[]
,
testRegex "(?(1)b|a)" []
["a"]
[Just ["a"]]
,
-}
testRegex "(x)?(?(1)a|b)" []
["*** Failers",
"a",
"a"]
[Nothing,
Nothing,
Nothing]
,
testRegex "(x)?(?(1)b|a)" []
["a"]
[Just ["a"]]
,
testRegex "()?(?(1)b|a)" []
["a"]
[Just ["a"]]
,
testRegex "()(?(1)b|a)" []
[]
[]
,
testRegex "()?(?(1)a|b)" []
["a"]
[Just ["a", ""]]
,
testRegex "^(\\()?blah(?(1)(\\)))$" []
["(blah)",
"blah",
"a",
"blah)",
"(blah"]
[Just ["(blah)", "(", ")"],
Just ["blah"],
Nothing,
Nothing,
Nothing]
,
testRegex "^(\\(+)?blah(?(1)(\\)))$" []
["(blah)",
"blah",
"*** Failers",
"blah)",
"(blah"]
[Just ["(blah)", "(", ")"],
Just ["blah"],
Nothing,
Nothing,
Nothing]
,
testRegex "(?(?!a)a|b)" []
[]
[]
,
testRegex "(?(?!a)b|a)" []
["a"]
[Just ["a"]]
,
testRegex "(?(?=a)b|a)" []
["*** Failers",
"a",
"a"]
[Nothing,
Nothing,
Nothing]
,
testRegex "(?(?=a)a|b)" []
["a"]
[Just ["a"]]
,
testRegex "(?=(a+?))(\\1ab)" []
["aaab"]
[Just ["aab", "a", "aab"]]
,
testRegex "^(?=(a+?))\\1ab" []
[]
[]
,
testRegex "(\\w+:)+" []
["one:"]
[Just ["one:", "one:"]]
,
testRegex "$(?<=^(a))" []
["a"]
[Just ["", "a"]]
,
testRegex "(?=(a+?))(\\1ab)" []
["aaab"]
[Just ["aab", "a", "aab"]]
,
testRegex "^(?=(a+?))\\1ab" []
["*** Failers",
"aaab",
"aaab"]
[Nothing,
Nothing,
Nothing]
,
testRegex "([\\w:]+::)?(\\w+)$" []
["abcd",
"xy:z:::abcd"]
[Just ["abcd", "", "abcd"],
Just ["xy:z:::abcd", "xy:z:::", "abcd"]]
,
testRegex "^[^bcd]*(c+)" []
["aexycd"]
[Just ["aexyc", "c"]]
,
testRegex "(a*)b+" []
["caab"]
[Just ["aab", "aa"]]
,
testRegex "([\\w:]+::)?(\\w+)$" []
["abcd",
"xy:z:::abcd",
"*** Failers",
"abcd:",
"abcd:"]
[Just ["abcd", "", "abcd"],
Just ["xy:z:::abcd", "xy:z:::", "abcd"],
Just ["Failers", "", "Failers"],
Nothing,
Nothing]
,
testRegex "^[^bcd]*(c+)" []
["aexycd"]
[Just ["aexyc", "c"]]
,
testRegex "(>a+)ab" []
[]
[]
,
testRegex "(?>a+)b" []
["aaab"]
[Just ["aaab"]]
,
testRegex "([[:]+)" []
["a:[b]:"]
[Just [":[", ":["]]
,
testRegex "([[=]+)" []
["a=[b]="]
[Just ["=[", "=["]]
,
testRegex "([[.]+)" []
["a.[b]."]
[Just [".[", ".["]]
,
testRegex "((?>a+)b)" []
["aaab"]
[Just ["aaab", "aaab"]]
,
testRegex "(?>(a+))b" []
["aaab"]
[Just ["aaab", "aaa"]]
,
testRegex "((?>[^()]+)|\\([^()]*\\))+" []
["((abc(ade)ufh()()x"]
[Just ["abc(ade)ufh()()x", "x"]]
,
testRegex "a\\Z" []
["aaab",
"a\nb\n"]
[Nothing,
Nothing]
,
testRegex "b\\Z" []
["a\nb\n"]
[Just ["b"]]
,
testRegex "b\\z" []
[]
[]
,
testRegex "b\\Z" []
["a\\nb"]
[Just ["b"]]
,
testRegex "(?>.*)(?<=(abcd|wxyz))" []
["alphabetabcd",
"endingwxyz",
"*** Failers",
"a rather long string that doesn't end with one of them"]
[Just ["alphabetabcd", "abcd"],
Just ["endingwxyz", "wxyz"],
Nothing,
Nothing]
,
testRegex "word (?>(?:(?!otherword)[a-zA-Z0-9]+ ){0,30})otherword" []
["word cat dog elephant mussel cow horse canary baboon snake shark otherword",
"word cat dog elephant mussel cow horse canary baboon snake shark"
]
[Just ["word cat dog elephant mussel cow horse canary baboon snake shark otherword"],
Nothing]
,
testRegex "((Z)+|A)*" []
["ZABCDEFG"]
[Just ["ZA", "A", "Z"]]
,
testRegex "(Z()|A)*" []
["ZABCDEFG"]
[Just ["ZA", "A", ""]]
,
testRegex "(Z(())|A)*" []
["ZABCDEFG"]
[Just ["ZA", "A", "", ""]]
,
testRegex "((?>Z)+|A)*" []
["ZABCDEFG"]
[Just ["ZA", "A"]]
,
testRegex "((?>)+|A)*" []
["ZABCDEFG"]
[Just ["", ""]]
,
testRegex "^[a-\\d]" []
["abcde",
"-things",
"0digit",
"*** Failers",
"bcdef "]
[Just ["a"],
Just ["-"],
Just ["0"],
Nothing,
Nothing]
,
testRegex "\\Qabc\\$xyz\\E" []
["abc\\$xyz"]
[Just ["abc\\$xyz"]]
,
testRegex "\\Qabc\\E\\$\\Qxyz\\E" []
["abc$xyz"]
[Just ["abc$xyz"]]
,
testRegex "\\Gabc" []
["abc",
"*** Failers",
"xyzabc "]
[Just ["abc"],
Nothing,
Nothing]
,
testRegex "a(?x: b c )d" []
["XabcdY",
"*** Failers ",
"Xa b c d Y "]
[Just ["abcd"],
Nothing,
Nothing]
,
testRegex "((?x)x y z | a b c)" []
["XabcY",
"AxyzB "]
[Just ["abc", "abc"],
Just ["xyz", "xyz"]]
,
testRegex "(?i)AB(?-i)C" []
["XabCY",
"*** Failers",
"XabcY "]
[Just ["abC"],
Nothing,
Nothing]
,
testRegex "((?i)AB(?-i)C|D)E" []
["abCE",
"DE",
"*** Failers",
"abcE",
"abCe ",
"dE",
"De "]
[Just ["abCE", "abC"],
Just ["DE", "D"],
Nothing,
Nothing,
Nothing,
Nothing,
Nothing]
,
testRegex "(.*)\\d+\\1" []
["abc123abc",
"abc123bc "]
[Just ["abc123abc", "abc"],
Just ["bc123bc", "bc"]]
,
testRegex "[z\\Qa-d]\\E]" []
["z",
"a",
"-",
"d",
"] ",
"*** Failers",
"b "]
[Just ["z"],
Just ["a"],
Just ["-"],
Just ["d"],
Just ["]"],
Just ["a"],
Nothing]
, testRegex "(?<=Z)X." []
["\\x84XAZXB"]
[Just ["XB"]]
,
testRegex "ab cd (?x) de fg" []
["ab cd defg"]
[Just ["ab cd defg"]]
,
testRegex "ab cd(?x) de fg" []
["ab cddefg",
"** Failers ",
"abcddefg"]
[Just ["ab cddefg"],
Nothing,
Nothing]
,
testRegex "(?<![^f]oo)(bar)" []
["foobarX ",
"** Failers ",
"boobarX"]
[Just ["bar", "bar"],
Nothing,
Nothing]
,
testRegex "(?<![^f])X" []
["offX",
"** Failers",
"onyX "]
[Just ["X"],
Nothing,
Nothing]
,
testRegex "(?<=[^f])X" []
["onyX",
"** Failers",
"offX "]
[Just ["X"],
Nothing,
Nothing]
,
testRegex "(?:(?(1)a|b)(X))+" []
["bXaX"]
[Just ["bXaX", "X"]]
,
testRegex "(?:(?(1)\\1a|b)(X|Y))+" []
["bXXaYYaY",
"bXYaXXaX "]
[Just ["bXXaYYaY", "Y"],
Just ["bX", "X"]]
,
testRegex "()()()()()()()()()(?:(?(10)\\10a|b)(X|Y))+" []
["bXXaYYaY"]
[Just ["bX", "", "", "", "", "", "", "", "", "", "X"]]
,
testRegex "[[,abc,]+]" []
["abc]",
"a,b]",
"[a,b,c] "]
[Just ["abc]"],
Just ["a,b]"],
Just ["[a,b,c]"]]
,
testRegex "a*b*\\w" []
["aaabbbb",
"aaaa",
"a"]
[Just ["aaabbbb"],
Just ["aaaa"],
Just ["a"]]
,
testRegex "a*b?\\w" []
["aaabbbb",
"aaaa",
"a"]
[Just ["aaabb"],
Just ["aaaa"],
Just ["a"]]
,
testRegex "a*b{0,4}\\w" []
["aaabbbb",
"aaaa",
"a"]
[Just ["aaabbbb"],
Just ["aaaa"],
Just ["a"]]
,
testRegex "(?=(\\w+))\\1:" []
["abcd:"]
[Just ["abcd:", "abcd"]]
,
testRegex "^(?=(\\w+))\\1:" []
["abcd:"]
[Just ["abcd:", "abcd"]]
,
testRegex "^[a\\E\\E-\\Ec]" []
["b",
"** Failers",
"-",
"E "]
[Just ["b"],
Nothing,
Nothing,
Nothing]
,
testRegex "(a){0,3}(?(1)b|(c|))*D" []
["abbD",
"ccccD",
"D "]
[Just ["abbD", "a"],
Just ["ccccD", "", ""],
Just ["D", "", ""]]
,
-- WARNING: at around this point, the file starts segfaulting ghci....
testRegex "(a|)*\\d" []
["aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa4"]
[Nothing,
Just ["aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa4", ""]]
,
testRegex "(?>a|)*\\d" []
["aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa4"]
[Nothing,
Just ["aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa4"]]
,
testRegex "(?=(\\w+))\\1:" []
["abcd:"]
[Just ["abcd:", "abcd"]]
,
testRegex "^(?=(\\w+))\\1:" []
["abcd:"]
[Just ["abcd:", "abcd"]]
,
testRegex "^[a\\E\\E-\\Ec]" []
["b",
"** Failers",
"-",
"E "]
[Just ["b"],
Nothing,
Nothing,
Nothing]
,
testRegex "(a){0,3}(?(1)b|(c|))*D" []
["abbD",
"ccccD",
"D "]
[Just ["abbD", "a"],
Just ["ccccD", "", ""],
Just ["D", "", ""]]
,
testRegex "(a|)*\\d" []
["aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa4"]
[Nothing,
Just ["aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa4", ""]]
,
testRegex "(?>a|)*\\d" []
["aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa4"]
[Nothing,
Just ["aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa4"]]
,
testRegex "( (A | (?(1)0|) )* )" [extended]
["abcd"]
[Just ["", "", ""]]
,
testRegex "^[\\w]+" []
["*** Failers",
"\201cole"]
[Nothing,
Nothing]
,
testRegex "^[\\w]+" []
["*** Failers",
"\201cole"]
[Nothing,
Nothing]
{-
,
testRegex "(a.b(?s)c.d|x.y)p.q" []
["a+bc+dp+q",
"a+bc\ndp+q",
"x\nyp+q ",
"a\nbc\ndp+q",
"a+bc\ndp\nq",
"x\nyp\nq "
]
[Just ["a+bc+dp+q"],
Just ["a+bc\ndp+q"],
Just ["x\nyp+q"],
Nothing,
Nothing,
Nothing
]
-}
,
testRegex "a\\d\\z" []
["ba0",
"*** Failers",
"ba0\n",
"ba0\ncd "]
[Just ["a0"],
Nothing,
Nothing,
Nothing]
,
testRegex "a\\d\\Z" []
["ba0",
"ba0\n",
"ba0\ncd "]
[Just ["a0"],
Just ["a0"],
Nothing]
,
testRegex "a\\d$" []
["ba0",
"ba0\n",
"*** Failers",
"ba0\ncd "]
[Just ["a0"],
Just ["a0"],
Nothing,
Nothing]
,
testRegex "a+" []
["aaaa"]
[Just ["aaaa"]]
,
testRegex "^\\d{2,3}X" []
["12X",
"123X",
"*** Failers",
"X",
"1X",
"1234X "]
[Just ["12X"],
Just ["123X"],
Nothing,
Nothing,
Nothing,
Nothing]
,
testRegex "^[abcd]\\d" []
["a45",
"b93",
"c99z",
"d04",
"*** Failers",
"e45",
"abcd ",
"abcd1234",
"1234 "]
[Just ["a4"],
Just ["b9"],
Just ["c9"],
Just ["d0"],
Nothing,
Nothing,
Nothing,
Nothing,
Nothing]
{-
,
testRegex "^(a*\\w|ab)=(a*\\w|ab)" []
["ab=ab"]
[Just ["ab=ab", "ab"]]
-}
,
testRegex "^(a*\\w|ab)=(?1)" []
["ab=ab"]
[Just ["ab=ab", "ab"]]
{-
,
testRegex "^([^()]|\\((?1)*\\))*$" []
["abc",
"a(b)c",
"a(b(c))d ",
"*** Failers)",
"a(b(c)d "]
[Just ["abc"],
Just ["a(b)c"],
Just ["a(b(c))d"],
Nothing,
Nothing]
,
testRegex "^>abc>([^()]|\\((?1)*\\))*<xyz<$" []
[">abc>123<xyz<",
">abc>1(2)3<xyz<",
">abc>(1(2)3)<xyz<"]
[Just [">abc>123<xyz<"],
Just [">abc>1(2)3<xyz<"],
Just [">abc>(1(2)3)<xyz<"]]
-}
,
testRegex "^(?>a*)\\d" []
["aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa9876",
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"]
[Just ["aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa9"],
Nothing]
,
testRegex "(?<=abc|xy)123" []
["abc12345",
"wxy123z",
"*** Failers",
"123abc"]
[Just ["123"],
Just ["123"],
Nothing,
Nothing]
,
testRegex "(?<!abc|xy)123" []
["123abc",
"mno123456 ",
"*** Failers",
"abc12345",
"wxy123z"]
[Just ["123"],
Just ["123"],
Nothing,
Nothing,
Nothing]
,
testRegex "abc(?C1)xyz" []
["abcxyz",
"123abcxyz999 "]
[ Just ["abcxyz"],
Just ["abcxyz"]]
{-
,
testRegex "\\Gabc" []
["abcdef",
"defabcxyz\\>3 ",
"defabcxyz"]
[Just ["abc"],
Just ["abc"],
Nothing]
-}
,
testRegex "[\\xFF]" []
[">\xff<"]
[Just ["\xff"]]
,
testRegex "[^\\xFF]" []
["XYZ"]
[Just ["X"]]
,
testRegex "^\\pN{2,3}X" []
["12X",
"123X",
"*** Failers",
"X",
"1X",
"1234X "]
[Just ["12X"],
Just ["123X"],
Nothing,
Nothing,
Nothing,
Nothing]
]
|
mitchty/pcre-light
|
tests/Unit.hs
|
bsd-3-clause
| 84,561
| 0
| 24
| 34,367
| 22,575
| 12,827
| 9,748
| 3,018
| 3
|
{-# LANGUAGE TupleSections #-}
module Parser
( Parser
, SourcePos
, Error
, parse
, getPosition
, anyChar
, satisfy
, eof
, char
, newline
, upper
, lower
, space
, spaces
, alphaNum
, string
, number
, try
, anyOf
, noneOf
, sepBy
, between
, (<&>)
, strOf
, chainl1
, chainr1
, trace
) where
import Data.Char
import Data.Monoid
import Control.Monad
import Control.Arrow
import Control.Applicative
import qualified Debug.Trace as T
type SourcePos = (Int, Int)
type Error = (SourcePos, String)
data State = State { remaining :: !String
, sourcePos :: !SourcePos
}
deriving (Show, Eq)
newtype Parser a = Parser { runParser :: State -> Either Error (a, State) }
instance Functor Parser where
fmap f p = Parser $ \s -> first f <$> runParser p s
instance Applicative Parser where
pure a = Parser $ pure . (a, )
a <*> b = do
f <- a
r <- b
return $ f r
instance Alternative Parser where
empty = fail "empty: no suitable parser"
a <|> b = Parser $ \s -> case runParser a s of
Left _ -> runParser b s
r -> r
instance Monad Parser where
return = pure
a >>= b = Parser $ \s -> do
(r, s') <- runParser a s
runParser (b r) s'
fail e = Parser $ \s -> Left (sourcePos s, e)
instance MonadPlus Parser where
mzero = empty
mplus = (<|>)
parse :: Parser a -> String -> Either Error a
parse p s = fst <$> runParser p State { remaining = s, sourcePos = (1, 1) }
anyChar :: Parser Char
anyChar = Parser $ \s -> case remaining s of
[] -> Left (sourcePos s, "anyChar: empty input")
(h:t) -> let sourcePos' = if h == '\n'
then (fst (sourcePos s) + 1, 1)
else second (+1) $ sourcePos s
in pure (h, State { remaining = t
, sourcePos = sourcePos'
}
)
getPosition :: Parser SourcePos
getPosition = Parser $ \s -> pure (sourcePos s, s)
satisfy :: (a -> Bool) -> Parser a -> Parser a
satisfy f p = Parser $ \s -> let check o@(r, _)
| not $ f r = Left (sourcePos s, "satisfy: failed")
| otherwise = pure o
in runParser p s >>= check
-- Derived functions
eof :: Parser ()
eof = try anyChar >>= maybe (pure ()) (const $ fail "eof: non-empty input")
char :: Char -> Parser Char
char c = satisfy (== c) anyChar
newline :: Parser Char
newline = char '\n'
upper :: Parser Char
upper = satisfy isUpper anyChar
lower :: Parser Char
lower = satisfy isLower anyChar
space :: Parser Char
space = satisfy (== ' ') anyChar
spaces :: Parser String
spaces = many space
alphaNum :: Parser Char
alphaNum = satisfy isAlphaNum anyChar
number :: Parser Integer
number = read <$> some (satisfy isDigit anyChar)
string :: String -> Parser String
string [] = pure []
string (h:t) = (:) <$> char h <*> string t
try :: Parser a -> Parser (Maybe a)
try p = Just <$> p <|> pure Nothing
anyOf :: [Char] -> Parser Char
anyOf cs = satisfy (`elem` cs) anyChar
noneOf :: [Char] -> Parser Char
noneOf cs = satisfy (not . flip elem cs) anyChar
sepBy :: Parser a -> Parser sep -> Parser [a]
sepBy p s = do
h <- p
t <- (s >> sepBy p s) <|> pure []
return $ h:t
between :: Parser sep1 -> Parser sep2 -> Parser a -> Parser a
between o c p = o *> p <* c
(<&>) :: Monoid a => Parser a -> Parser a -> Parser a
a <&> b = (<>) <$> a <*> b
infixl 4 <&>
strOf :: [String] -> Parser String
strOf = foldr1 (<|>) . map string
chainl1 :: Parser a -> Parser (a -> a -> a) -> Parser a
chainl1 p op = p >>= rest
where rest x = do
f <- op
y <- p
rest (f x y)
<|> return x
chainr1 :: Parser a -> Parser (a -> a -> a) -> Parser a
chainr1 p op = expr
where expr = first <*> expr <|> p
first = do
x <- p
f <- op
return (f x)
trace :: String -> Parser a -> Parser a
trace str exp = do
--r <- T.trace ("entering " ++ str) exp
--return $ T.trace ("exiting " ++ str) r
exp
|
abbradar/comps
|
src/Parser.hs
|
bsd-3-clause
| 4,252
| 0
| 19
| 1,428
| 1,657
| 858
| 799
| 140
| 3
|
module Waldo.Waldo (
WaldoData(..)
, loadWaldo
) where
import Control.Monad
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as Map
import qualified Data.Text as T
import Waldo.Stalk
import Waldo.Script
data WaldoData =
WaldoData {
wdStalkDB :: StalkDB
, wdGenScript :: HashMap T.Text (PersonalData -> IO Script)
}
loadWaldo :: [(T.Text, IO (PersonalData -> IO Script))] ->IO WaldoData
loadWaldo storyGenLoaders = do
sdb <- loadStalkDB
storyGens <- forM storyGenLoaders $ \(nm, ldr) -> do
sgen <- ldr
return (nm, sgen)
let wdata = WaldoData { wdStalkDB = sdb
, wdGenScript = Map.fromList storyGens
}
putStrLn "Loading completed!"
return wdata
|
davean/waldo
|
Waldo/Waldo.hs
|
bsd-3-clause
| 780
| 0
| 13
| 208
| 234
| 129
| 105
| 23
| 1
|
{-# LANGUAGE PolyKinds, DataKinds, TemplateHaskell, TypeFamilies,
GADTs, TypeOperators, RankNTypes, FlexibleContexts, UndecidableInstances,
FlexibleInstances, ScopedTypeVariables, MultiParamTypeClasses,
OverlappingInstances, TemplateHaskell #-}
module Oxymoron.Regions.Image where
import qualified Graphics.Rendering.OpenGL.Raw as GL
import Graphics.Rendering.OpenGL.Raw (GLint, GLenum, GLsizei, GLchar, GLint,
gl_TEXTURE_CUBE_MAP_POSITIVE_X, gl_TEXTURE_2D)
import Control.Monad.Trans.Region
import Control.Monad.Trans.Region.OnExit
import Oxymoron.Regions.Resource
import Control.Monad.IO.Class (MonadIO, liftIO)
import Oxymoron.Regions.Shader
import Control.Lens
import Data.Singletons
import Control.Monad.State
import Oxymoron.Class
singletons [d|
data ImageTarget = GL_TEXTURE_2D
| GL_TEXTURE_CUBE_MAP_POSITIVE_X
| GL_TEXTURE_CUBE_MAP_NEGATIVE_X
| GL_TEXTURE_CUBE_MAP_POSITIVE_Y
| GL_TEXTURE_CUBE_MAP_NEGATIVE_Y
| GL_TEXTURE_CUBE_MAP_POSITIVE_Z
| GL_TEXTURE_CUBE_MAP_NEGATIVE_Z
|]
instance ToGLenum (Sing (a :: ImageTarget)) where
toGLenum SGL_TEXTURE_2D = gl_TEXTURE_2D
toGLenum SGL_TEXTURE_CUBE_MAP_POSITIVE_X = gl_TEXTURE_CUBE_MAP_POSITIVE_X
glGenTextures :: (MonadIO pr, ImageResource imageResource)
=> GLsizei
-> RegionT s pr [imageResource (RegionT s pr)]
glGenTextures count = undefined
glBindTexture :: (MonadIO cr,
AncestorRegion pr cr,
ImageResource imageResource)
=> Sing (a :: ImageTarget) -> imageResource pr -> cr ()
glBindTexture target handle = liftIO $
GL.glBindTexture (toGLenum target) (_resourceId . unimageResource $ handle)
|
jfischoff/oxymoron
|
src/Oxymoron/Regions/Image.hs
|
bsd-3-clause
| 1,920
| 0
| 11
| 511
| 301
| 174
| 127
| 38
| 1
|
import Graphics.UI.GLUT
myPoints :: [(GLfloat,GLfloat,GLfloat)]
myPoints = [ (sin (2*pi*k/12), cos (2*pi*k/12), 0) | k <- [1..12] ]
main :: IO ()
main = do
(_progName, _args) <- getArgsAndInitialize
_window <- createWindow _progName
displayCallback $= display
reshapeCallback $= Just reshape
mainLoop
display :: DisplayCallback
display = do
clear [ ColorBuffer ]
renderPrimitive Triangles $
mapM_ (\(x, y, z) -> vertex $ Vertex3 x y z) myPoints
flush
reshape :: ReshapeCallback
reshape size = do
viewport $= (Position 0 0, size)
postRedisplay Nothing
|
rumblesan/hask3d
|
src/main.hs
|
bsd-3-clause
| 580
| 0
| 12
| 111
| 250
| 129
| 121
| 20
| 1
|
module Main(main) where
import Test.HUnit
import System.Exit
import Data.Ratio
import Data.Map (Map)
import qualified Data.Map as M
import Raven.Data.Stat
main :: IO ()
main = do
cs@(Counts _ _ errs fails) <- runTestTT allTests
putStrLn (showCounts cs)
if errs > 0 || fails > 0
then exitFailure
else exitSuccess
allTests :: Test
allTests = TestList $ runtests mean meanData
++ runtests intMean intMeanData
++ runtests median medianData
++ runtests intMedian intMedianData
++ runtests countInstances countInstancesData
++ runtests mode modeData
++ runtests variance varianceData
++ runtests intVariance intVarianceData
++ runtests ratioVariance ratioVarianceData
--run tests with 1 input
runtests :: (Eq b,Show b) => (a -> b) -> [(String,a,b)] -> [Test]
runtests f ls = map (uncurry (~:)) (createtest f ls)
--create tests with 1 input
createtest :: (Eq b,Show b) => (a -> b) -> [(String,a,b)] -> [(String,Test)]
createtest f = map (\(s,x,y) -> (s,TestCase $ y @=? f x))
meanData :: [(String,[Double],Double)]
meanData =
[ ("mean: empty",[],0)
, ("mean: single",[1],1)
, ("mean: norm",[1,2,3],2)
, ("mean: norm 1",[1,3,4],8/3)
, ("mean: norm 2",[-1,2.5],1.5/2)
]
intMeanData :: [(String,[Int],Ratio Int)]
intMeanData =
[ ("intMean: empty",[],0)
, ("intMean: single",[2],2)
, ("intMean: norm",[1,2,3],2)
, ("intMean: norm 1",[1,2],3%2)
, ("intMean: norm 2",[1,3,4],8%3)
, ("intMean: norm 3",[-1,2,4],5%3)
]
ratioMeanData :: [(String,[Ratio Int],Ratio Int)]
ratioMeanData =
[ ("ratioMean: empty",[],0)
, ("ratioMean: single",[1],1)
, ("ratioMean: norm",[1,2],3%2)
, ("ratioMean: norm 1",[1%2,3%2],1)
, ("ratioMean: norm 2",[-1%4,3%4],1%4)
]
medianData :: [(String,[Double],Double)]
medianData =
[ ("median: empty",[],0)
, ("median: single",[1],1)
, ("median: double",[2,1],3/2)
, ("median: norm",[3,1,2],2)
, ("median: norm 1",[1.5,-0.5],0.5)
, ("median: norm 2",[4,2,6,8,9,1,5,3,7],5)
, ("median: norm 3",[5,10,1,4,3,7,6,8,2,9],5.5)
, ("median: norm 4",[9,4,6],6)
]
intMedianData :: [(String,[Int],Ratio Int)]
intMedianData =
[ ("intMedian: empty",[],0)
, ("intMedian: single",[4],4)
, ("intMedian: double",[2,3],5%2)
, ("intMedian: norm",[3,1,2],2)
, ("intMedian: norm 1",[-1,5,0],0)
, ("intMedian: norm 2",[5,10,1,4,3,7,6,8,2,9],11%2)
, ("intMedian: norm 3",[4,2,6,8,9,1,5,3,7],5)
, ("intMedian: norm 4",[3,-1,6,7],9%2)
]
ratioMedianData :: [(String,[Ratio Int],Ratio Int)]
ratioMedianData =
[ ("ratioMedian: empty",[],0)
, ("ratioMedian: single",[-1],-1)
, ("ratioMedian: double",[4%3,2%3],1)
, ("ratioMedian: norm",[4%3,-5%2,7],4%3)
, ("ratioMedian: norm 1",[2%3,1%2,-6,5],7%12)
]
countInstancesData :: [(String,[Int],Map Int Int)]
countInstancesData =
[ ("countInstances: empty",[],M.empty)
, ("countInstances: single",[2],M.fromList [(2,1)])
, ("countInstances: no repeats",[1..4],
M.fromList [(1,1),(2,1),(3,1),(4,1)])
, ("countInstances: all repeat",[2,2,2],M.fromList [(2,3)])
, ("countInstances: norm",[1,2,3,2],
M.fromList [(1,1),(2,2),(3,1)])
]
modeData :: [(String,[Int],[(Int,Int)])]
modeData =
[ ("mode: empty",[],[])
, ("mode: single",[5],[(5,1)])
, ("mode: norm",[1,2,3],[(3,1),(2,1),(1,1)])
, ("mode: norm 1",[1,2,3,2],[(2,2)])
]
varianceData :: [(String,[Float],Float)]
varianceData =
[ ("variance: empty",[],0)
, ("variance: single",[3.43],0)
, ("variance: zero",[7.1,7.1,7.1,7.1],0)
, ("variance: norm",[1,2,3],2/3)
, ("variance: norm 1",[1/2,3/2,4],13/6)
]
intVarianceData :: [(String,[Int],Ratio Int)]
intVarianceData =
[ ("intVariance: empty",[],0)
, ("intVariance: single",[5],0)
, ("intVariance: zero",[2,2,2,2],0)
, ("intVariance: norm",[1,2,3],2%3)
, ("intVariance: norm 1",[0,3,4,5],7%2)
]
ratioVarianceData :: [(String,[Ratio Int],Ratio Int)]
ratioVarianceData =
[ ("ratioVariance: empty",[],0)
, ("ratioVariance: single",[4%7],0)
, ("ratioVariance: zero",[2%7,2%7,2%7,2%7],0)
, ("ratioVariance: norm",[1,2,3],2%3)
]
|
denumerate/raven
|
src/Raven/Data/Stat.Test.hs
|
bsd-3-clause
| 4,113
| 0
| 14
| 702
| 2,212
| 1,370
| 842
| 112
| 2
|
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TemplateHaskell #-}
module Budget.Database.Schema where
import Budget.Database.TH (str)
import Data.Time (Day, LocalTime, UTCTime)
import Database.HDBC (runRaw)
import Database.HDBC.Query.TH (defineTableFromDB)
import Database.HDBC.Schema.Driver (typeMap)
import Database.HDBC.Schema.SQLite3 (driverSQLite3)
import Database.HDBC.Sqlite3 (connectSqlite3)
import Language.Haskell.TH (Q, Dec, TypeQ)
-- FIXME: income は revenue という単語でもよさそう
schema :: String
schema = [str|
CREATE TABLE cost_type (
id INTEGER NOT NULL PRIMARY KEY,
name VARCHAR NOT NULL
);
INSERT INTO cost_type VALUES (1, 'fixed');
INSERT INTO cost_type VALUES (2, 'variable');
CREATE TABLE account_type (
id INTEGER NOT NULL PRIMARY KEY,
name VARCHAR NOT NULL
);
INSERT INTO account_type VALUES (1, 'asset');
INSERT INTO account_type VALUES (2, 'liability');
CREATE TABLE account (
id INTEGER NOT NULL PRIMARY KEY,
name VARCHAR NOT NULL,
account_type INTEGER NOT NULL,
FOREIGN KEY(account_type) REFERENCES account_type(id)
);
CREATE TABLE item_type (
id INTEGER NOT NULL PRIMARY KEY,
name VARCHAR NOT NULL
);
INSERT INTO item_type VALUES (1, 'income');
INSERT INTO item_type VALUES (2, 'expense');
CREATE TABLE item_category (
id INTEGER NOT NULL PRIMARY KEY,
name VARCHAR NOT NULL,
item_type INTEGER NOT NULL,
FOREIGN KEY(item_type) REFERENCES item_type(id)
);
CREATE TABLE item (
id VARCHAR NOT NULL PRIMARY KEY,
item_category INTEGER NOT NULL,
date DATE NOT NULL,
name VARCHAR NOT NULL,
amount INTEGER NOT NULL,
note VARCHAR,
create_on TIMESTAMP NOT NULL,
FOREIGN KEY(item_category) REFERENCES item_category(id)
);
CREATE TABLE geometry (
id INTEGER NOT NULL PRIMARY KEY,
lat REAL NOT NULL,
lng REAL NOT NULL
);
CREATE TABLE location (
item_id VARCHAR NOT NULL PRIMARY KEY,
name VARCHAR NOT NULL,
geometry INTEGER,
FOREIGN KEY(item_id) REFERENCES item(id),
FOREIGN KEY(geometry) REFERENCES geometry(id)
);
CREATE TABLE item_template (
name VARCHAR NOT NULL,
item_category INTEGER NOT NULL,
PRIMARY KEY (name, item_category),
FOREIGN KEY(item_category) REFERENCES item_category(id)
);
|]
convTypes :: [(String, TypeQ)]
convTypes =
[ ("float", [t|Double|])
, ("date", [t|Day|])
, ("timestamp", [t|LocalTime|])
, ("double", [t|Double|])
, ("varchar", [t|String|])
, ("integer", [t|Int|])
]
defineTable' :: String -> String -> Q [Dec]
defineTable' source tableName =
defineTableFromDB
connWithSchema
(driverSQLite3 { typeMap = convTypes })
"main"
tableName
[''Show]
where
conn = connectSqlite3 ":memory:"
connWithSchema = conn >>= (\c -> runRaw c source >> (return c))
defineTable :: String -> Q [Dec]
defineTable = defineTable' schema
|
utky/budget
|
src/Budget/Database/Schema.hs
|
bsd-3-clause
| 2,953
| 0
| 12
| 621
| 344
| 220
| 124
| 34
| 1
|
module Main (main) where
import Prelude ()
import Game.LambdaHack.Core.Prelude
import qualified Data.Text as T
import Options.Applicative
import System.IO.Unsafe (unsafePerformIO)
import Test.Tasty
import Test.Tasty.HUnit
import Game.LambdaHack.Client.UI.UIOptions
import Game.LambdaHack.Client.UI.UIOptionsParse
import Game.LambdaHack.Common.ClientOptions
import qualified Game.LambdaHack.Content.RuleKind as RK
import Game.LambdaHack.Server
import qualified Content.RuleKind
import TieKnot
import ActorStateUnitTests
import CommonMUnitTests
import HandleHelperMUnitTests
import HandleHumanLocalMUnitTests
import InventoryMUnitTests
import ItemDescriptionUnitTests
import ItemKindUnitTests
import ItemRevUnitTests
import LevelUnitTests
import MonadClientUIUnitTests
import ReqFailureUnitTests
import SessionUIUnitTests
main :: IO ()
main = defaultMain tests
tests :: TestTree
tests = testGroup "Tests" [ actorStateUnitTests
, commonMUnitTests
, handleHelperMUnitTests
, handleHumanLocalMUnitTests
, inventoryMUnitTests
, itemDescriptionUnitTests
, itemKindUnitTests
, itemRevUnitTests
, levelUnitTests
, reqFailureUnitTests
, macroTests
, monadClientUIUnitTests
, integrationTests
]
integrationTests :: TestTree
integrationTests = testGroup "integrationTests" $
[ testCase "Null frontend; 5 frames" $ do
let seed = "SMGen 131 141"
args = words "--dbgMsgSer --logPriority 4 --newGame 1 --noAnim --maxFps 100000 --frontendNull --benchmark --stopAfterFrames 5 --automateAll --keepAutomated --gameMode crawl"
++ [ "--setDungeonRng", seed, "--setMainRng", seed]
serverOptions <- handleParseResult $ execParserPure defaultPrefs serverOptionsPI args
tieKnot serverOptions
]
#ifndef USE_BROWSER
++
let corule = RK.makeData Content.RuleKind.standardRules
uiOptions = unsafePerformIO $ mkUIOptions corule defClientOptions
testFontset :: Int -> String -> TestTree
testFontset n fontsetName =
testCase ("SDL fronted; init only; " ++ fontsetName ++ " fontset") $ do
-- This test only works when run from the same directory that
-- the .cabal file is in. And this is what Debian needs, so OK.
-- The hacky log priority 0 tells SDL frontend to init
-- and quit at once, for testing on CIs without graphics access.
let seed = "SMGen " ++ show (13 + 2 * n) ++ " " ++ show (15 + 4 * n)
args2 = words "--dbgMsgSer --logPriority 0 --newGame 3 --maxFps 100000 --benchmark --stopAfterFrames 5 --automateAll --keepAutomated --gameMode battle"
++ [ "--setDungeonRng", seed, "--setMainRng", seed
, "--fontset", fontsetName ]
serverOptions2 <- handleParseResult $ execParserPure defaultPrefs serverOptionsPI args2
tieKnot serverOptions2
in zipWith testFontset [0..] $ map (T.unpack . fst) $ uFontsets uiOptions
#endif
|
LambdaHack/LambdaHack
|
test/Spec.hs
|
bsd-3-clause
| 3,342
| 0
| 22
| 974
| 515
| 294
| 221
| 64
| 1
|
module BrownPLT.JavaScript.Contracts
( Contract (..)
, InterfaceItem (..)
, compile
, compileFormatted
, compileRelease
, parseInterface
, getContractLibraryPath
) where
import System.FilePath
import Paths_flapjax_fixed -- created by Cabal
import BrownPLT.JavaScript.Contracts.Types
import BrownPLT.JavaScript.Contracts.Compiler
import BrownPLT.JavaScript.Contracts.Parser
getContractLibraryPath :: IO FilePath
getContractLibraryPath = do
dataDir <- getDataDir
return $ dataDir </> "contracts.js"
|
ducis/flapjax-fixed
|
JsContracts-0.5.3/src/BrownPLT/JavaScript/Contracts.hs
|
bsd-3-clause
| 520
| 0
| 8
| 72
| 99
| 62
| 37
| 17
| 1
|
{-# LANGUAGE CPP, BangPatterns, MagicHash, NondecreasingIndentation, OverloadedStrings #-}
-------------------------------------------------------------------------------
--
-- | Main API for compiling plain Haskell source code.
--
-- This module implements compilation of a Haskell source. It is
-- /not/ concerned with preprocessing of source files; this is handled
-- in "DriverPipeline".
--
-- There are various entry points depending on what mode we're in:
-- "batch" mode (@--make@), "one-shot" mode (@-c@, @-S@ etc.), and
-- "interactive" mode (GHCi). There are also entry points for
-- individual passes: parsing, typechecking/renaming, desugaring, and
-- simplification.
--
-- All the functions here take an 'HscEnv' as a parameter, but none of
-- them return a new one: 'HscEnv' is treated as an immutable value
-- from here on in (although it has mutable components, for the
-- caches).
--
-- Warning messages are dealt with consistently throughout this API:
-- during compilation warnings are collected, and before any function
-- in @HscMain@ returns, the warnings are either printed, or turned
-- into a real compialtion error if the @-Werror@ flag is enabled.
--
-- (c) The GRASP/AQUA Project, Glasgow University, 1993-2000
--
-------------------------------------------------------------------------------
module ETA.Main.HscMain
(
-- * Making an HscEnv
newHscEnv
-- * Compiling complete source files
, Messager, batchMsg
, HscStatus (..)
, hscCompileOneShot
, hscCompileCore
, genericHscCompileGetFrontendResult
, genModDetails
, hscSimpleIface
, hscWriteIface
, hscNormalIface
, hscGenHardCode
, hscInteractive
-- * Running passes separately
, hscParse
, hscTypecheckRename
, hscDesugar
, makeSimpleIface
, makeSimpleDetails
, hscSimplify -- ToDo, shouldn't really export this
-- * Support for interactive evaluation
, hscParseIdentifier
, hscTcRcLookupName
, hscTcRnGetInfo
, hscCheckSafe
, hscGetSafe
#ifdef GHCI
, hscIsGHCiMonad
, hscGetModuleInterface
, hscRnImportDecls
, hscTcRnLookupRdrName
, hscStmt, hscStmtWithLocation
, hscDecls, hscDeclsWithLocation
, hscTcExpr, hscImport, hscKcType
, hscCompileCoreExpr
-- * Low-level exports for hooks
, hscCompileCoreExpr'
#endif
-- We want to make sure that we export enough to be able to redefine
-- hscFileFrontEnd in client code
, hscParse', hscSimplify', hscDesugar', tcRnModule'
, getHscEnv
, hscSimpleIface', hscNormalIface'
, oneShotMsg
, hscFileFrontEnd, genericHscFrontend, dumpIfaceStats
, renderRtsConfig
) where
#ifdef GHCI
import ETA.BasicTypes.Id
import ETA.BasicTypes.BasicTypes ( HValue )
import ETA.Interactive.ByteCodeGen ( byteCodeGen, coreExprToBCOs )
import ETA.Interactive.Linker
import ETA.Core.CoreTidy ( tidyExpr )
import ETA.Types.Type ( Type )
import ETA.Prelude.PrelNames
import {- Kind parts of -} ETA.Types.Type ( Kind )
import ETA.Core.CoreLint ( lintInteractiveExpr )
import ETA.DeSugar.DsMeta ( templateHaskellNames )
import ETA.BasicTypes.VarEnv ( emptyTidyEnv )
import ETA.Utils.Panic
import ETA.BasicTypes.ConLike
import GHC.Exts
#endif
import ETA.BasicTypes.Module
import ETA.BasicTypes.RdrName
import ETA.HsSyn.HsSyn
import ETA.Core.CoreSyn
import ETA.Utils.StringBuffer
import ETA.Parser.Parser
import qualified ETA.Parser.Lexer as Lexer
import ETA.Parser.Lexer
import ETA.BasicTypes.SrcLoc
import ETA.TypeCheck.TcRnDriver
import ETA.Iface.TcIface ( typecheckIface )
import ETA.TypeCheck.TcRnMonad
import ETA.Iface.IfaceEnv ( initNameCache )
import ETA.Iface.LoadIface ( ifaceStats, initExternalPackageState )
import ETA.Prelude.PrelInfo
import ETA.Iface.MkIface
import ETA.DeSugar.DeSugar
import ETA.SimplCore.SimplCore
import ETA.Main.TidyPgm
import ETA.Core.CorePrep
import ETA.StgSyn.CoreToStg ( coreToStg )
--import qualified StgCmm ( codeGen )
import ETA.StgSyn.StgSyn
import ETA.Profiling.CostCentre
--import ProfInit
import ETA.Types.TyCon
import ETA.BasicTypes.Name
import ETA.SimplStg.SimplStg ( stg2stg )
import ETA.BasicTypes.NameEnv ( emptyNameEnv )
import ETA.BasicTypes.NameSet ( emptyNameSet )
import ETA.Types.InstEnv
import ETA.Types.FamInstEnv
import ETA.Utils.Fingerprint ( Fingerprint )
import ETA.Main.Hooks
import ETA.Main.DynFlags
import ETA.Main.ErrUtils
import ETA.Utils.Outputable
import ETA.Main.HscStats ( ppSourceStats )
import ETA.Main.HscTypes
import ETA.Utils.FastString
import ETA.Utils.UniqFM ( emptyUFM )
import ETA.BasicTypes.UniqSupply
import ETA.Utils.Bag
import ETA.Utils.Exception
-- import qualified ETA.Utils.Stream as Stream
-- import ETA.Utils.Stream (Stream)
import ETA.Utils.Util
import ETA.CodeGen.Main
import ETA.CodeGen.Name
-- import ETA.Debug
import ETA.CodeGen.Rts
import ETA.Utils.JAR
import ETA.Main.Packages
-- import ETA.Util
import Codec.JVM
-- import Debug.Trace(traceShow)
import Data.List
import Control.Monad hiding (void)
import Data.Maybe
import Data.IORef
import System.FilePath as FilePath
import System.Directory
import qualified Data.Map as M
import qualified Data.Text as T
import Control.Arrow((&&&))
import Data.Foldable(fold)
import qualified Data.Monoid as Mon
#include "HsVersions.h"
{- **********************************************************************
%* *
Initialisation
%* *
%********************************************************************* -}
newHscEnv :: DynFlags -> IO HscEnv
newHscEnv dflags = do
eps_var <- newIORef initExternalPackageState
us <- mkSplitUniqSupply 'r'
nc_var <- newIORef (initNameCache us knownKeyNames)
fc_var <- newIORef emptyUFM
mlc_var <- newIORef emptyModuleEnv
return HscEnv { hsc_dflags = dflags,
hsc_targets = [],
hsc_mod_graph = [],
hsc_IC = emptyInteractiveContext dflags,
hsc_HPT = emptyHomePackageTable,
hsc_EPS = eps_var,
hsc_NC = nc_var,
hsc_FC = fc_var,
hsc_MLC = mlc_var,
hsc_type_env_var = Nothing }
knownKeyNames :: [Name] -- Put here to avoid loops involving DsMeta,
knownKeyNames = -- where templateHaskellNames are defined
map getName wiredInThings
++ basicKnownKeyNames
#ifdef GHCI
++ templateHaskellNames
#endif
-- -----------------------------------------------------------------------------
getWarnings :: Hsc WarningMessages
getWarnings = Hsc $ \_ w -> return (w, w)
clearWarnings :: Hsc ()
clearWarnings = Hsc $ \_ _ -> return ((), emptyBag)
logWarnings :: WarningMessages -> Hsc ()
logWarnings w = Hsc $ \_ w0 -> return ((), w0 `unionBags` w)
getHscEnv :: Hsc HscEnv
getHscEnv = Hsc $ \e w -> return (e, w)
handleWarnings :: Hsc ()
handleWarnings = do
dflags <- getDynFlags
w <- getWarnings
liftIO $ printOrThrowWarnings dflags w
clearWarnings
-- | log warning in the monad, and if there are errors then
-- throw a SourceError exception.
logWarningsReportErrors :: Messages -> Hsc ()
logWarningsReportErrors (warns,errs) = do
logWarnings warns
when (not $ isEmptyBag errs) $ throwErrors errs
-- | Throw some errors.
throwErrors :: ErrorMessages -> Hsc a
throwErrors = liftIO . throwIO . mkSrcErr
-- | Deal with errors and warnings returned by a compilation step
--
-- In order to reduce dependencies to other parts of the compiler, functions
-- outside the "main" parts of GHC return warnings and errors as a parameter
-- and signal success via by wrapping the result in a 'Maybe' type. This
-- function logs the returned warnings and propagates errors as exceptions
-- (of type 'SourceError').
--
-- This function assumes the following invariants:
--
-- 1. If the second result indicates success (is of the form 'Just x'),
-- there must be no error messages in the first result.
--
-- 2. If there are no error messages, but the second result indicates failure
-- there should be warnings in the first result. That is, if the action
-- failed, it must have been due to the warnings (i.e., @-Werror@).
ioMsgMaybe :: IO (Messages, Maybe a) -> Hsc a
ioMsgMaybe ioA = do
((warns,errs), mb_r) <- liftIO ioA
logWarnings warns
case mb_r of
Nothing -> throwErrors errs
Just r -> ASSERT( isEmptyBag errs ) return r
-- | like ioMsgMaybe, except that we ignore error messages and return
-- 'Nothing' instead.
ioMsgMaybe' :: IO (Messages, Maybe a) -> Hsc (Maybe a)
ioMsgMaybe' ioA = do
((warns,_errs), mb_r) <- liftIO $ ioA
logWarnings warns
return mb_r
-- -----------------------------------------------------------------------------
-- | Lookup things in the compiler's environment
#ifdef GHCI
hscTcRnLookupRdrName :: HscEnv -> Located RdrName -> IO [Name]
hscTcRnLookupRdrName hsc_env0 rdr_name
= runInteractiveHsc hsc_env0 $
do { hsc_env <- getHscEnv
; ioMsgMaybe $ tcRnLookupRdrName hsc_env rdr_name }
#endif
hscTcRcLookupName :: HscEnv -> Name -> IO (Maybe TyThing)
hscTcRcLookupName hsc_env0 name = runInteractiveHsc hsc_env0 $ do
hsc_env <- getHscEnv
ioMsgMaybe' $ tcRnLookupName hsc_env name
-- ignore errors: the only error we're likely to get is
-- "name not found", and the Maybe in the return type
-- is used to indicate that.
hscTcRnGetInfo :: HscEnv -> Name -> IO (Maybe (TyThing, Fixity, [ClsInst], [FamInst]))
hscTcRnGetInfo hsc_env0 name
= runInteractiveHsc hsc_env0 $
do { hsc_env <- getHscEnv
; ioMsgMaybe' $ tcRnGetInfo hsc_env name }
#ifdef GHCI
hscIsGHCiMonad :: HscEnv -> String -> IO Name
hscIsGHCiMonad hsc_env name
= runHsc hsc_env $ ioMsgMaybe $ isGHCiMonad hsc_env name
hscGetModuleInterface :: HscEnv -> Module -> IO ModIface
hscGetModuleInterface hsc_env0 mod = runInteractiveHsc hsc_env0 $ do
hsc_env <- getHscEnv
ioMsgMaybe $ getModuleInterface hsc_env mod
-- -----------------------------------------------------------------------------
-- | Rename some import declarations
hscRnImportDecls :: HscEnv -> [LImportDecl RdrName] -> IO GlobalRdrEnv
hscRnImportDecls hsc_env0 import_decls = runInteractiveHsc hsc_env0 $ do
hsc_env <- getHscEnv
ioMsgMaybe $ tcRnImportDecls hsc_env import_decls
#endif
-- -----------------------------------------------------------------------------
-- | parse a file, returning the abstract syntax
hscParse :: HscEnv -> ModSummary -> IO HsParsedModule
hscParse hsc_env mod_summary = runHsc hsc_env $ hscParse' mod_summary
-- internal version, that doesn't fail due to -Werror
hscParse' :: ModSummary -> Hsc HsParsedModule
hscParse' mod_summary = do
dflags <- getDynFlags
let src_filename = ms_hspp_file mod_summary
maybe_src_buf = ms_hspp_buf mod_summary
-------------------------- Parser ----------------
liftIO $ showPass dflags "Parser"
{-# SCC "Parser" #-} do
-- sometimes we already have the buffer in memory, perhaps
-- because we needed to parse the imports out of it, or get the
-- module name.
buf <- case maybe_src_buf of
Just b -> return b
Nothing -> liftIO $ hGetStringBuffer src_filename
let loc = mkRealSrcLoc (mkFastString src_filename) 1 1
case unP parseModule (mkPState dflags buf loc) of
PFailed span err ->
liftIO $ throwOneError (mkPlainErrMsg dflags span err)
POk pst rdr_module -> do
logWarningsReportErrors (getMessages pst)
liftIO $ dumpIfSet_dyn dflags Opt_D_dump_parsed "Parser" $
ppr rdr_module
liftIO $ dumpIfSet_dyn dflags Opt_D_source_stats "Source Statistics" $
ppSourceStats False rdr_module
-- To get the list of extra source files, we take the list
-- that the parser gave us,
-- - eliminate files beginning with '<'. gcc likes to use
-- pseudo-filenames like "<built-in>" and "<command-line>"
-- - normalise them (elimiante differences between ./f and f)
-- - filter out the preprocessed source file
-- - filter out anything beginning with tmpdir
-- - remove duplicates
-- - filter out the .hs/.lhs source filename if we have one
--
let n_hspp = FilePath.normalise src_filename
srcs0 = nub $ filter (not . (tmpDir dflags `isPrefixOf`))
$ filter (not . (== n_hspp))
$ map FilePath.normalise
$ filter (not . (isPrefixOf "<"))
$ map unpackFS
$ srcfiles pst
srcs1 = case ml_hs_file (ms_location mod_summary) of
Just f -> filter (/= FilePath.normalise f) srcs0
Nothing -> srcs0
-- sometimes we see source files from earlier
-- preprocessing stages that cannot be found, so just
-- filter them out:
srcs2 <- liftIO $ filterM doesFileExist srcs1
return HsParsedModule {
hpm_module = rdr_module,
hpm_src_files = srcs2,
hpm_annotations
= (M.fromListWith (++) $ annotations pst,
M.fromList $ ((noSrcSpan,comment_q pst)
:(annotations_comments pst)))
}
-- XXX: should this really be a Maybe X? Check under which circumstances this
-- can become a Nothing and decide whether this should instead throw an
-- exception/signal an error.
type RenamedStuff =
(Maybe (HsGroup Name, [LImportDecl Name], Maybe [LIE Name],
Maybe LHsDocString))
-- | Rename and typecheck a module, additionally returning the renamed syntax
hscTypecheckRename :: HscEnv -> ModSummary -> HsParsedModule
-> IO (TcGblEnv, RenamedStuff)
hscTypecheckRename hsc_env mod_summary rdr_module = runHsc hsc_env $ do
tc_result <- tcRnModule' hsc_env mod_summary True rdr_module
-- This 'do' is in the Maybe monad!
let rn_info = do decl <- tcg_rn_decls tc_result
let imports = tcg_rn_imports tc_result
exports = tcg_rn_exports tc_result
doc_hdr = tcg_doc_hdr tc_result
return (decl,imports,exports,doc_hdr)
return (tc_result, rn_info)
-- wrapper around tcRnModule to handle safe haskell extras
tcRnModule' :: HscEnv -> ModSummary -> Bool -> HsParsedModule
-> Hsc TcGblEnv
tcRnModule' hsc_env sum save_rn_syntax mod = do
tcg_res <- {-# SCC "Typecheck-Rename" #-}
ioMsgMaybe $
tcRnModule hsc_env (ms_hsc_src sum) save_rn_syntax mod
tcSafeOK <- liftIO $ readIORef (tcg_safeInfer tcg_res)
dflags <- getDynFlags
let allSafeOK = safeInferred dflags && tcSafeOK
-- end of the safe haskell line, how to respond to user?
if not (safeHaskellOn dflags) || (safeInferOn dflags && not allSafeOK)
-- if safe Haskell off or safe infer failed, mark unsafe
then markUnsafeInfer tcg_res emptyBag
-- module (could be) safe, throw warning if needed
else do
tcg_res' <- hscCheckSafeImports tcg_res
safe <- liftIO $ readIORef (tcg_safeInfer tcg_res')
when safe $ do
case wopt Opt_WarnSafe dflags of
True -> (logWarnings $ unitBag $ mkPlainWarnMsg dflags
(warnSafeOnLoc dflags) $ errSafe tcg_res')
False | safeHaskell dflags == Sf_Trustworthy &&
wopt Opt_WarnTrustworthySafe dflags ->
(logWarnings $ unitBag $ mkPlainWarnMsg dflags
(trustworthyOnLoc dflags) $ errTwthySafe tcg_res')
False -> return ()
return tcg_res'
where
pprMod t = ppr $ moduleName $ tcg_mod t
errSafe t = quotes (pprMod t) <+> text "has been inferred as safe!"
errTwthySafe t = quotes (pprMod t)
<+> text "is marked as Trustworthy but has been inferred as safe!"
-- | Convert a typechecked module to Core
hscDesugar :: HscEnv -> ModSummary -> TcGblEnv -> IO ModGuts
hscDesugar hsc_env mod_summary tc_result =
runHsc hsc_env $ hscDesugar' (ms_location mod_summary) tc_result
hscDesugar' :: ModLocation -> TcGblEnv -> Hsc ModGuts
hscDesugar' mod_location tc_result = do
hsc_env <- getHscEnv
r <- ioMsgMaybe $
{-# SCC "deSugar" #-}
deSugar hsc_env mod_location tc_result
-- always check -Werror after desugaring, this is the last opportunity for
-- warnings to arise before the backend.
handleWarnings
return r
-- | Make a 'ModIface' from the results of typechecking. Used when
-- not optimising, and the interface doesn't need to contain any
-- unfoldings or other cross-module optimisation info.
-- ToDo: the old interface is only needed to get the version numbers,
-- we should use fingerprint versions instead.
makeSimpleIface :: HscEnv -> Maybe ModIface -> TcGblEnv -> ModDetails
-> IO (ModIface,Bool)
makeSimpleIface hsc_env maybe_old_iface tc_result details = runHsc hsc_env $ do
safe_mode <- hscGetSafeMode tc_result
ioMsgMaybe $ do
mkIfaceTc hsc_env (fmap mi_iface_hash maybe_old_iface) safe_mode
details tc_result
-- | Make a 'ModDetails' from the results of typechecking. Used when
-- typechecking only, as opposed to full compilation.
makeSimpleDetails :: HscEnv -> TcGblEnv -> IO ModDetails
makeSimpleDetails hsc_env tc_result = mkBootModDetailsTc hsc_env tc_result
{- **********************************************************************
%* *
The main compiler pipeline
%* *
%********************************************************************* -}
{-
--------------------------------
The compilation proper
--------------------------------
It's the task of the compilation proper to compile Haskell, hs-boot and core
files to either byte-code, hard-code (C, asm, LLVM, ect) or to nothing at all
(the module is still parsed and type-checked. This feature is mostly used by
IDE's and the likes). Compilation can happen in either 'one-shot', 'batch',
'nothing', or 'interactive' mode. 'One-shot' mode targets hard-code, 'batch'
mode targets hard-code, 'nothing' mode targets nothing and 'interactive' mode
targets byte-code.
The modes are kept separate because of their different types and meanings:
* In 'one-shot' mode, we're only compiling a single file and can therefore
discard the new ModIface and ModDetails. This is also the reason it only
targets hard-code; compiling to byte-code or nothing doesn't make sense when
we discard the result.
* 'Batch' mode is like 'one-shot' except that we keep the resulting ModIface
and ModDetails. 'Batch' mode doesn't target byte-code since that require us to
return the newly compiled byte-code.
* 'Nothing' mode has exactly the same type as 'batch' mode but they're still
kept separate. This is because compiling to nothing is fairly special: We
don't output any interface files, we don't run the simplifier and we don't
generate any code.
* 'Interactive' mode is similar to 'batch' mode except that we return the
compiled byte-code together with the ModIface and ModDetails.
Trying to compile a hs-boot file to byte-code will result in a run-time error.
This is the only thing that isn't caught by the type-system.
-}
type Messager = HscEnv -> (Int,Int) -> RecompileRequired -> ModSummary -> IO ()
genericHscCompileGetFrontendResult ::
Bool -- always do basic recompilation check?
-> Maybe TcGblEnv
-> Maybe Messager
-> HscEnv
-> ModSummary
-> SourceModified
-> Maybe ModIface -- Old interface, if available
-> (Int,Int) -- (i,n) = module i of n (for msgs)
-> IO (Either ModIface (TcGblEnv, Maybe Fingerprint))
genericHscCompileGetFrontendResult
always_do_basic_recompilation_check m_tc_result
mHscMessage hsc_env mod_summary source_modified mb_old_iface mod_index
= do
let msg what = case mHscMessage of
Just hscMessage -> hscMessage hsc_env mod_index what mod_summary
Nothing -> return ()
skip iface = do
msg UpToDate
return $ Left iface
compile mb_old_hash reason = do
msg reason
tc_result <- runHsc hsc_env $ genericHscFrontend mod_summary
return $ Right (tc_result, mb_old_hash)
stable = case source_modified of
SourceUnmodifiedAndStable -> True
_ -> False
case m_tc_result of
Just tc_result
| not always_do_basic_recompilation_check ->
return $ Right (tc_result, Nothing)
_ -> do
(recomp_reqd, mb_checked_iface)
<- {-# SCC "checkOldIface" #-}
checkOldIface hsc_env mod_summary
source_modified mb_old_iface
-- save the interface that comes back from checkOldIface.
-- In one-shot mode we don't have the old iface until this
-- point, when checkOldIface reads it from the disk.
let mb_old_hash = fmap mi_iface_hash mb_checked_iface
case mb_checked_iface of
Just iface | not (recompileRequired recomp_reqd) ->
-- If the module used TH splices when it was last
-- compiled, then the recompilation check is not
-- accurate enough (#481) and we must ignore
-- it. However, if the module is stable (none of
-- the modules it depends on, directly or
-- indirectly, changed), then we *can* skip
-- recompilation. This is why the SourceModified
-- type contains SourceUnmodifiedAndStable, and
-- it's pretty important: otherwise ghc --make
-- would always recompile TH modules, even if
-- nothing at all has changed. Stability is just
-- the same check that make is doing for us in
-- one-shot mode.
case m_tc_result of
Nothing
| mi_used_th iface && not stable ->
compile mb_old_hash (RecompBecause "TH")
_ ->
skip iface
_ ->
case m_tc_result of
Nothing -> compile mb_old_hash recomp_reqd
Just tc_result ->
return $ Right (tc_result, mb_old_hash)
genericHscFrontend :: ModSummary -> Hsc TcGblEnv
genericHscFrontend mod_summary =
getHooked hscFrontendHook genericHscFrontend' >>= ($ mod_summary)
genericHscFrontend' :: ModSummary -> Hsc TcGblEnv
genericHscFrontend' mod_summary = hscFileFrontEnd mod_summary
--------------------------------------------------------------
-- Compilers
--------------------------------------------------------------
hscCompileOneShot :: HscEnv
-> ModSummary
-> SourceModified
-> IO HscStatus
hscCompileOneShot env =
lookupHook hscCompileOneShotHook hscCompileOneShot' (hsc_dflags env) env
-- Compile Haskell/boot in OneShot mode.
hscCompileOneShot' :: HscEnv
-> ModSummary
-> SourceModified
-> IO HscStatus
hscCompileOneShot' hsc_env mod_summary src_changed
= do
-- One-shot mode needs a knot-tying mutable variable for interface
-- files. See TcRnTypes.TcGblEnv.tcg_type_env_var.
type_env_var <- newIORef emptyNameEnv
let mod = ms_mod mod_summary
hsc_env' = hsc_env{ hsc_type_env_var = Just (mod, type_env_var) }
msg what = oneShotMsg hsc_env' what
skip = do msg UpToDate
dumpIfaceStats hsc_env'
return HscUpToDate
compile mb_old_hash reason = runHsc hsc_env' $ do
liftIO $ msg reason
tc_result <- genericHscFrontend mod_summary
guts0 <- hscDesugar' (ms_location mod_summary) tc_result
dflags <- getDynFlags
case hscTarget dflags of
HscNothing -> do
when (gopt Opt_WriteInterface dflags) $ liftIO $ do
(iface, changed, _details) <- hscSimpleIface hsc_env tc_result mb_old_hash
hscWriteIface dflags iface changed mod_summary
return HscNotGeneratingCode
_ ->
case ms_hsc_src mod_summary of
t | isHsBootOrSig t ->
do (iface, changed, _) <- hscSimpleIface' tc_result mb_old_hash
liftIO $ hscWriteIface dflags iface changed mod_summary
return (case t of
HsBootFile -> HscUpdateBoot
HsigFile -> HscUpdateSig
HsSrcFile -> panic "hscCompileOneShot Src")
_ ->
do guts <- hscSimplify' guts0
(iface, changed, _details, cgguts) <- hscNormalIface' guts mb_old_hash
liftIO $ hscWriteIface dflags iface changed mod_summary
return $ HscRecomp cgguts mod_summary
-- XXX This is always False, because in one-shot mode the
-- concept of stability does not exist. The driver never
-- passes SourceUnmodifiedAndStable in here.
stable = case src_changed of
SourceUnmodifiedAndStable -> True
_ -> False
(recomp_reqd, mb_checked_iface)
<- {-# SCC "checkOldIface" #-}
checkOldIface hsc_env' mod_summary src_changed Nothing
-- save the interface that comes back from checkOldIface.
-- In one-shot mode we don't have the old iface until this
-- point, when checkOldIface reads it from the disk.
let mb_old_hash = fmap mi_iface_hash mb_checked_iface
case mb_checked_iface of
Just iface | not (recompileRequired recomp_reqd) ->
-- If the module used TH splices when it was last compiled,
-- then the recompilation check is not accurate enough (#481)
-- and we must ignore it. However, if the module is stable
-- (none of the modules it depends on, directly or indirectly,
-- changed), then we *can* skip recompilation. This is why
-- the SourceModified type contains SourceUnmodifiedAndStable,
-- and it's pretty important: otherwise ghc --make would
-- always recompile TH modules, even if nothing at all has
-- changed. Stability is just the same check that make is
-- doing for us in one-shot mode.
if mi_used_th iface && not stable
then compile mb_old_hash (RecompBecause "TH")
else skip
_ ->
compile mb_old_hash recomp_reqd
--------------------------------------------------------------
-- NoRecomp handlers
--------------------------------------------------------------
genModDetails :: HscEnv -> ModIface -> IO ModDetails
genModDetails hsc_env old_iface
= do
new_details <- {-# SCC "tcRnIface" #-}
initIfaceCheck hsc_env (typecheckIface old_iface)
dumpIfaceStats hsc_env
return new_details
--------------------------------------------------------------
-- Progress displayers.
--------------------------------------------------------------
oneShotMsg :: HscEnv -> RecompileRequired -> IO ()
oneShotMsg hsc_env recomp =
case recomp of
UpToDate ->
compilationProgressMsg (hsc_dflags hsc_env) $
"compilation IS NOT required"
_ ->
return ()
batchMsg :: Messager
batchMsg hsc_env mod_index recomp mod_summary =
case recomp of
MustCompile -> showMsg "Compiling " ""
UpToDate
| verbosity (hsc_dflags hsc_env) >= 2 -> showMsg "Skipping " ""
| otherwise -> return ()
RecompBecause reason -> showMsg "Compiling " (" [" ++ reason ++ "]")
where
dflags = hsc_dflags hsc_env
showMsg msg reason =
compilationProgressMsg dflags $
(showModuleIndex mod_index ++
msg ++ showModMsg dflags (hscTarget dflags)
(recompileRequired recomp) mod_summary)
++ reason
--------------------------------------------------------------
-- FrontEnds
--------------------------------------------------------------
hscFileFrontEnd :: ModSummary -> Hsc TcGblEnv
hscFileFrontEnd mod_summary = do
hpm <- hscParse' mod_summary
hsc_env <- getHscEnv
tcg_env <- tcRnModule' hsc_env mod_summary False hpm
return tcg_env
--------------------------------------------------------------
-- Safe Haskell
--------------------------------------------------------------
-- Note [Safe Haskell Trust Check]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Safe Haskell checks that an import is trusted according to the following
-- rules for an import of module M that resides in Package P:
--
-- * If M is recorded as Safe and all its trust dependencies are OK
-- then M is considered safe.
-- * If M is recorded as Trustworthy and P is considered trusted and
-- all M's trust dependencies are OK then M is considered safe.
--
-- By trust dependencies we mean that the check is transitive. So if
-- a module M that is Safe relies on a module N that is trustworthy,
-- importing module M will first check (according to the second case)
-- that N is trusted before checking M is trusted.
--
-- This is a minimal description, so please refer to the user guide
-- for more details. The user guide is also considered the authoritative
-- source in this matter, not the comments or code.
-- Note [Safe Haskell Inference]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Safe Haskell does Safe inference on modules that don't have any specific
-- safe haskell mode flag. The basic aproach to this is:
-- * When deciding if we need to do a Safe language check, treat
-- an unmarked module as having -XSafe mode specified.
-- * For checks, don't throw errors but return them to the caller.
-- * Caller checks if there are errors:
-- * For modules explicitly marked -XSafe, we throw the errors.
-- * For unmarked modules (inference mode), we drop the errors
-- and mark the module as being Unsafe.
--
-- It used to be that we only did safe inference on modules that had no Safe
-- Haskell flags, but now we perform safe inference on all modules as we want
-- to allow users to set the `--fwarn-safe`, `--fwarn-unsafe` and
-- `--fwarn-trustworthy-safe` flags on Trustworthy and Unsafe modules so that a
-- user can ensure their assumptions are correct and see reasons for why a
-- module is safe or unsafe.
--
-- This is tricky as we must be careful when we should throw an error compared
-- to just warnings. For checking safe imports we manage it as two steps. First
-- we check any imports that are required to be safe, then we check all other
-- imports to see if we can infer them to be safe.
-- | Check that the safe imports of the module being compiled are valid.
-- If not we either issue a compilation error if the module is explicitly
-- using Safe Haskell, or mark the module as unsafe if we're in safe
-- inference mode.
hscCheckSafeImports :: TcGblEnv -> Hsc TcGblEnv
hscCheckSafeImports tcg_env = do
dflags <- getDynFlags
tcg_env' <- checkSafeImports dflags tcg_env
checkRULES dflags tcg_env'
where
checkRULES dflags tcg_env' = do
case safeLanguageOn dflags of
True -> do
-- XSafe: we nuke user written RULES
logWarnings $ warns dflags (tcg_rules tcg_env')
return tcg_env' { tcg_rules = [] }
False
-- SafeInferred: user defined RULES, so not safe
| safeInferOn dflags && not (null $ tcg_rules tcg_env')
-> markUnsafeInfer tcg_env' $ warns dflags (tcg_rules tcg_env')
-- Trustworthy OR SafeInferred: with no RULES
| otherwise
-> return tcg_env'
warns dflags rules = listToBag $ map (warnRules dflags) rules
warnRules dflags (L loc (HsRule n _ _ _ _ _ _)) =
mkPlainWarnMsg dflags loc $
text "Rule \"" <> ftext (unLoc n) <> text "\" ignored" $+$
text "User defined rules are disabled under Safe Haskell"
-- | Validate that safe imported modules are actually safe. For modules in the
-- HomePackage (the package the module we are compiling in resides) this just
-- involves checking its trust type is 'Safe' or 'Trustworthy'. For modules
-- that reside in another package we also must check that the external pacakge
-- is trusted. See the Note [Safe Haskell Trust Check] above for more
-- information.
--
-- The code for this is quite tricky as the whole algorithm is done in a few
-- distinct phases in different parts of the code base. See
-- RnNames.rnImportDecl for where package trust dependencies for a module are
-- collected and unioned. Specifically see the Note [RnNames . Tracking Trust
-- Transitively] and the Note [RnNames . Trust Own Package].
checkSafeImports :: DynFlags -> TcGblEnv -> Hsc TcGblEnv
checkSafeImports dflags tcg_env
= do
imps <- mapM condense imports'
let (safeImps, regImps) = partition (\(_,_,s) -> s) imps
-- We want to use the warning state specifically for detecting if safe
-- inference has failed, so store and clear any existing warnings.
oldErrs <- getWarnings
clearWarnings
-- Check safe imports are correct
safePkgs <- mapM checkSafe safeImps
safeErrs <- getWarnings
clearWarnings
-- Check non-safe imports are correct if inferring safety
-- See the Note [Safe Haskell Inference]
(infErrs, infPkgs) <- case (safeInferOn dflags) of
False -> return (emptyBag, [])
True -> do infPkgs <- mapM checkSafe regImps
infErrs <- getWarnings
clearWarnings
return (infErrs, infPkgs)
-- restore old errors
logWarnings oldErrs
case (isEmptyBag safeErrs) of
-- Failed safe check
False -> liftIO . throwIO . mkSrcErr $ safeErrs
-- Passed safe check
True -> do
let infPassed = isEmptyBag infErrs
tcg_env' <- case (not infPassed) of
True -> markUnsafeInfer tcg_env infErrs
False -> return tcg_env
when (packageTrustOn dflags) $ checkPkgTrust dflags pkgReqs
let newTrust = pkgTrustReqs safePkgs infPkgs infPassed
return tcg_env' { tcg_imports = impInfo `plusImportAvails` newTrust }
where
impInfo = tcg_imports tcg_env -- ImportAvails
imports = imp_mods impInfo -- ImportedMods
imports' = moduleEnvToList imports -- (Module, [ImportedModsVal])
pkgReqs = imp_trust_pkgs impInfo -- [InstalledUnitId]
condense :: (Module, [ImportedModsVal]) -> Hsc (Module, SrcSpan, IsSafeImport)
condense (_, []) = panic "HscMain.condense: Pattern match failure!"
condense (m, x:xs) = do (_,_,l,s) <- foldlM cond' x xs
return (m, l, s)
-- ImportedModsVal = (ModuleName, Bool, SrcSpan, IsSafeImport)
cond' :: ImportedModsVal -> ImportedModsVal -> Hsc ImportedModsVal
cond' v1@(m1,_,l1,s1) (_,_,_,s2)
| s1 /= s2
= throwErrors $ unitBag $ mkPlainErrMsg dflags l1
(text "Module" <+> ppr m1 <+>
(text $ "is imported both as a safe and unsafe import!"))
| otherwise
= return v1
-- easier interface to work with
checkSafe (m, l, _) = fst `fmap` hscCheckSafe' dflags m l
-- what pkg's to add to our trust requirements
pkgTrustReqs req inf infPassed | safeInferOn dflags
&& safeHaskell dflags == Sf_None && infPassed
= emptyImportAvails {
imp_trust_pkgs = catMaybes req ++ catMaybes inf
}
pkgTrustReqs _ _ _ | safeHaskell dflags == Sf_Unsafe
= emptyImportAvails
pkgTrustReqs req _ _ = emptyImportAvails { imp_trust_pkgs = catMaybes req }
-- | Check that a module is safe to import.
--
-- We return True to indicate the import is safe and False otherwise
-- although in the False case an exception may be thrown first.
hscCheckSafe :: HscEnv -> Module -> SrcSpan -> IO Bool
hscCheckSafe hsc_env m l = runHsc hsc_env $ do
dflags <- getDynFlags
pkgs <- snd `fmap` hscCheckSafe' dflags m l
when (packageTrustOn dflags) $ checkPkgTrust dflags pkgs
errs <- getWarnings
return $ isEmptyBag errs
-- | Return if a module is trusted and the pkgs it depends on to be trusted.
hscGetSafe :: HscEnv -> Module -> SrcSpan -> IO (Bool, [InstalledUnitId])
hscGetSafe hsc_env m l = runHsc hsc_env $ do
dflags <- getDynFlags
(self, pkgs) <- hscCheckSafe' dflags m l
good <- isEmptyBag `fmap` getWarnings
clearWarnings -- don't want them printed...
let pkgs' | Just p <- self = p:pkgs
| otherwise = pkgs
return (good, pkgs')
-- | Is a module trusted? If not, throw or log errors depending on the type.
-- Return (regardless of trusted or not) if the trust type requires the modules
-- own package be trusted and a list of other packages required to be trusted
-- (these later ones haven't been checked) but the own package trust has been.
hscCheckSafe' :: DynFlags -> Module -> SrcSpan -> Hsc (Maybe InstalledUnitId, [InstalledUnitId])
hscCheckSafe' dflags m l = do
(tw, pkgs) <- isModSafe m l
case tw of
False -> return (Nothing, pkgs)
True | isHomePkg m -> return (Nothing, pkgs)
| otherwise -> return (Just $ toInstalledUnitId (moduleUnitId m), pkgs)
where
isModSafe :: Module -> SrcSpan -> Hsc (Bool, [InstalledUnitId])
isModSafe m l = do
iface <- lookup' m
case iface of
-- can't load iface to check trust!
Nothing -> throwErrors $ unitBag $ mkPlainErrMsg dflags l
$ text "Can't load the interface file for" <+> ppr m
<> text ", to check that it can be safely imported"
-- got iface, check trust
Just iface' ->
let trust = getSafeMode $ mi_trust iface'
trust_own_pkg = mi_trust_pkg iface'
-- check module is trusted
safeM = trust `elem` [Sf_Safe, Sf_Trustworthy]
-- check package is trusted
safeP = packageTrusted trust trust_own_pkg m
-- pkg trust reqs
pkgRs = map fst $ filter snd $ dep_pkgs $ mi_deps iface'
-- General errors we throw but Safe errors we log
errs = case (safeM, safeP) of
(True, True ) -> emptyBag
(True, False) -> pkgTrustErr
(False, _ ) -> modTrustErr
in do
logWarnings errs
return (trust == Sf_Trustworthy, pkgRs)
where
pkgTrustErr = unitBag $ mkErrMsg dflags l (pkgQual dflags) $
sep [ ppr (moduleName m)
<> text ": Can't be safely imported!"
, text "The package (" <> ppr (moduleUnitId m)
<> text ") the module resides in isn't trusted."
]
modTrustErr = unitBag $ mkErrMsg dflags l (pkgQual dflags) $
sep [ ppr (moduleName m)
<> text ": Can't be safely imported!"
, text "The module itself isn't safe." ]
-- | Check the package a module resides in is trusted. Safe compiled
-- modules are trusted without requiring that their package is trusted. For
-- trustworthy modules, modules in the home package are trusted but
-- otherwise we check the package trust flag.
packageTrusted :: SafeHaskellMode -> Bool -> Module -> Bool
packageTrusted Sf_None _ _ = False -- shouldn't hit these cases
packageTrusted Sf_Unsafe _ _ = False -- prefer for completeness.
packageTrusted _ _ _
| not (packageTrustOn dflags) = True
packageTrusted Sf_Safe False _ = True
packageTrusted _ _ m
| isHomePkg m = True
| otherwise = trusted $ getPackageDetails dflags (moduleUnitId m)
lookup' :: Module -> Hsc (Maybe ModIface)
lookup' m = do
hsc_env <- getHscEnv
hsc_eps <- liftIO $ hscEPS hsc_env
let pkgIfaceT = eps_PIT hsc_eps
homePkgT = hsc_HPT hsc_env
iface = lookupIfaceByModule dflags homePkgT pkgIfaceT m
#ifdef GHCI
-- the 'lookupIfaceByModule' method will always fail when calling from GHCi
-- as the compiler hasn't filled in the various module tables
-- so we need to call 'getModuleInterface' to load from disk
iface' <- case iface of
Just _ -> return iface
Nothing -> snd `fmap` (liftIO $ getModuleInterface hsc_env m)
return iface'
#else
return iface
#endif
isHomePkg :: Module -> Bool
isHomePkg m
| thisPackage dflags == moduleUnitId m = True
| otherwise = False
-- | Check the list of packages are trusted.
checkPkgTrust :: DynFlags -> [InstalledUnitId] -> Hsc ()
checkPkgTrust dflags pkgs =
case errors of
[] -> return ()
_ -> (liftIO . throwIO . mkSrcErr . listToBag) errors
where
errors = catMaybes $ map go pkgs
go pkg
| trusted $ getInstalledPackageDetails dflags pkg
= Nothing
| otherwise
= Just $ mkErrMsg dflags noSrcSpan (pkgQual dflags)
$ text "The package (" <> ppr pkg <> text ") is required" <>
text " to be trusted but it isn't!"
-- | Set module to unsafe and (potentially) wipe trust information.
--
-- Make sure to call this method to set a module to inferred unsafe, it should
-- be a central and single failure method. We only wipe the trust information
-- when we aren't in a specific Safe Haskell mode.
--
-- While we only use this for recording that a module was inferred unsafe, we
-- may call it on modules using Trustworthy or Unsafe flags so as to allow
-- warning flags for safety to function correctly. See Note [Safe Haskell
-- Inference].
markUnsafeInfer :: TcGblEnv -> WarningMessages -> Hsc TcGblEnv
markUnsafeInfer tcg_env whyUnsafe = do
dflags <- getDynFlags
when (wopt Opt_WarnUnsafe dflags)
(logWarnings $ unitBag $
mkPlainWarnMsg dflags (warnUnsafeOnLoc dflags) (whyUnsafe' dflags))
liftIO $ writeIORef (tcg_safeInfer tcg_env) False
-- NOTE: Only wipe trust when not in an explicity safe haskell mode. Other
-- times inference may be on but we are in Trustworthy mode -- so we want
-- to record safe-inference failed but not wipe the trust dependencies.
case safeHaskell dflags == Sf_None of
True -> return $ tcg_env { tcg_imports = wiped_trust }
False -> return tcg_env
where
wiped_trust = (tcg_imports tcg_env) { imp_trust_pkgs = [] }
pprMod = ppr $ moduleName $ tcg_mod tcg_env
whyUnsafe' df = vcat [ quotes pprMod <+> text "has been inferred as unsafe!"
, text "Reason:"
, nest 4 $ (vcat $ badFlags df) $+$
(vcat $ pprErrMsgBagWithLoc whyUnsafe) $+$
(vcat $ badInsts $ tcg_insts tcg_env)
]
badFlags df = concat $ map (badFlag df) unsafeFlagsForInfer
badFlag df (str,loc,on,_)
| on df = [mkLocMessage SevOutput (loc df) $
text str <+> text "is not allowed in Safe Haskell"]
| otherwise = []
badInsts insts = concat $ map badInst insts
checkOverlap (NoOverlap _) = False
checkOverlap _ = True
badInst ins | checkOverlap (overlapMode (is_flag ins))
= [mkLocMessage SevOutput (nameSrcSpan $ getName $ is_dfun ins) $
ppr (overlapMode $ is_flag ins) <+>
text "overlap mode isn't allowed in Safe Haskell"]
| otherwise = []
-- | Figure out the final correct safe haskell mode
hscGetSafeMode :: TcGblEnv -> Hsc SafeHaskellMode
hscGetSafeMode tcg_env = do
dflags <- getDynFlags
liftIO $ finalSafeMode dflags tcg_env
--------------------------------------------------------------
-- Simplifiers
--------------------------------------------------------------
hscSimplify :: HscEnv -> ModGuts -> IO ModGuts
hscSimplify hsc_env modguts = runHsc hsc_env $ hscSimplify' modguts
hscSimplify' :: ModGuts -> Hsc ModGuts
hscSimplify' ds_result = do
hsc_env <- getHscEnv
{-# SCC "Core2Core" #-}
liftIO $ core2core hsc_env ds_result
--------------------------------------------------------------
-- Interface generators
--------------------------------------------------------------
hscSimpleIface :: HscEnv
-> TcGblEnv
-> Maybe Fingerprint
-> IO (ModIface, Bool, ModDetails)
hscSimpleIface hsc_env tc_result mb_old_iface
= runHsc hsc_env $ hscSimpleIface' tc_result mb_old_iface
hscSimpleIface' :: TcGblEnv
-> Maybe Fingerprint
-> Hsc (ModIface, Bool, ModDetails)
hscSimpleIface' tc_result mb_old_iface = do
hsc_env <- getHscEnv
details <- liftIO $ mkBootModDetailsTc hsc_env tc_result
safe_mode <- hscGetSafeMode tc_result
(new_iface, no_change)
<- {-# SCC "MkFinalIface" #-}
ioMsgMaybe $
mkIfaceTc hsc_env mb_old_iface safe_mode details tc_result
-- And the answer is ...
liftIO $ dumpIfaceStats hsc_env
return (new_iface, no_change, details)
hscNormalIface :: HscEnv
-> ModGuts
-> Maybe Fingerprint
-> IO (ModIface, Bool, ModDetails, CgGuts)
hscNormalIface hsc_env simpl_result mb_old_iface =
runHsc hsc_env $ hscNormalIface' simpl_result mb_old_iface
hscNormalIface' :: ModGuts
-> Maybe Fingerprint
-> Hsc (ModIface, Bool, ModDetails, CgGuts)
hscNormalIface' simpl_result mb_old_iface = do
hsc_env <- getHscEnv
(cg_guts, details) <- {-# SCC "CoreTidy" #-}
liftIO $ tidyProgram hsc_env simpl_result
-- BUILD THE NEW ModIface and ModDetails
-- and emit external core if necessary
-- This has to happen *after* code gen so that the back-end
-- info has been set. Not yet clear if it matters waiting
-- until after code output
(new_iface, no_change)
<- {-# SCC "MkFinalIface" #-}
ioMsgMaybe $
mkIface hsc_env mb_old_iface details simpl_result
liftIO $ dumpIfaceStats hsc_env
-- Return the prepared code.
return (new_iface, no_change, details, cg_guts)
--------------------------------------------------------------
-- BackEnd combinators
--------------------------------------------------------------
hscWriteIface :: DynFlags -> ModIface -> Bool -> ModSummary -> IO ()
hscWriteIface dflags iface no_change mod_summary = do
let ifaceFile = ml_hi_file (ms_location mod_summary)
unless no_change $
{-# SCC "writeIface" #-}
writeIfaceFile dflags ifaceFile iface
whenGeneratingDynamicToo dflags $ do
-- TODO: We should do a no_change check for the dynamic
-- interface file too
-- TODO: Should handle the dynamic hi filename properly
let dynIfaceFile = replaceExtension ifaceFile (dynHiSuf dflags)
dynIfaceFile' = addBootSuffix_maybe (mi_boot iface) dynIfaceFile
dynDflags = dynamicTooMkDynamicDynFlags dflags
writeIfaceFile dynDflags dynIfaceFile' iface
-- | Compile to hard-code.
hscGenHardCode :: HscEnv -> CgGuts -> ModSummary -> FilePath
-> IO (FilePath, Maybe FilePath) -- ^ @Just f@ <=> _stub.c is f
hscGenHardCode hsc_env cgguts mod_summary output_filename = do
let CgGuts{ -- This is the last use of the ModGuts in a compilation.
-- From now on, we just use the bits we need.
cg_module = this_mod,
cg_binds = core_binds,
cg_tycons = tycons,
cg_foreign = foreign_stubs,
cg_dep_pkgs = _dependencies,
cg_hpc_info = hpc_info } = cgguts
dflags = hsc_dflags hsc_env
location = ms_location mod_summary
data_tycons = filter isDataTyCon tycons
-- cg_tycons includes newtypes, for the benefit of External Core,
-- but we don't generate any code for newtypes
-------------------
-- PREPARE FOR CODE GENERATION
-- Do saturation and convert to A-normal form
prepd_binds <- {-# SCC "CorePrep" #-}
corePrepPgm hsc_env location core_binds data_tycons ;
----------------- Convert to STG ------------------
(stg_binds, _cost_centre_info)
<- {-# SCC "CoreToStg" #-}
myCoreToStg dflags this_mod prepd_binds
modClasses <- codeGen hsc_env this_mod data_tycons stg_binds hpc_info
let stubClasses = outputForeignStubs dflags foreign_stubs
classes = stubClasses ++ modClasses
jarContents' = map (classFilePath &&& classFileBS) classes
jarContents <- forM jarContents' $ \(a,b) -> do
a' <- mkPath a
return (a', b)
-- createEmptyJar output_filename
addMultiByteStringsToJar' output_filename (compressionMethod dflags) jarContents
return (output_filename, Nothing)
outputForeignStubs :: DynFlags -> ForeignStubs -> [ClassFile]
outputForeignStubs _dflags NoStubs = []
outputForeignStubs dflags (ForeignStubs _ _ classExports) =
map f $ foreignExportsList classExports
where f (classSpec, (methodDefs, fieldDefs)) =
mkClassFile java7 [Public, Super] (jvmify className) (Just superClass)
interfaces fieldDefs methodDefs''
where className':specs = T.words classSpec
className = jvmify className'
methodDefs' = genClInit className : methodDefs
methodDefs'' = if hasConstructor
then methodDefs'
else mkDefaultConstructor className superClass
: methodDefs'
hasConstructor = any (\(MethodDef _ (UName n) _ _) ->
n == "<init>") methodDefs
(superClass, interfaces) = parseSpecs specs jobjectC []
parseSpecs ("extends":superClass:xs) _ is = parseSpecs xs (jvmify superClass) is
parseSpecs ("implements":interface:xs) sc is = parseSpecs xs sc (jvmify interface:is)
parseSpecs [] sc is = (sc, reverse is)
parseSpecs _ _ _ = error $ "Invalid foreign export spec."
jvmify = T.map (\c -> if c == '.' then '/' else c)
genClInit cls = mkMethodDef cls [Public, Static] "<clinit>" [] void $ fold
[ iconst jint 0
, new (jarray jstring)
, renderRtsConfig dflags False
, invokestatic (mkMethodRef "eta/runtime/Rts" "hsInit"
[jarray jstring, rtsConfigType] void)
, vreturn ]
hscInteractive :: HscEnv
-> CgGuts
-> ModSummary
-> IO (Maybe FilePath, CompiledByteCode, ModBreaks)
#ifdef GHCI
hscInteractive hsc_env cgguts mod_summary = do
let dflags = hsc_dflags hsc_env
let CgGuts{ -- This is the last use of the ModGuts in a compilation.
-- From now on, we just use the bits we need.
cg_module = this_mod,
cg_binds = core_binds,
cg_tycons = tycons,
cg_foreign = foreign_stubs,
cg_modBreaks = mod_breaks } = cgguts
location = ms_location mod_summary
data_tycons = filter isDataTyCon tycons
-- cg_tycons includes newtypes, for the benefit of External Core,
-- but we don't generate any code for newtypes
-------------------
-- PREPARE FOR CODE GENERATION
-- Do saturation and convert to A-normal form
prepd_binds <- {-# SCC "CorePrep" #-}
corePrepPgm hsc_env location core_binds data_tycons
----------------- Generate byte code ------------------
comp_bc <- byteCodeGen dflags this_mod prepd_binds data_tycons mod_breaks
------------------ Create f-x-dynamic C-side stuff ---
(_istub_h_exists, istub_c_exists)
<- outputForeignStubs dflags this_mod location foreign_stubs
return (istub_c_exists, comp_bc, mod_breaks)
#else
hscInteractive _ _ = panic "GHC not compiled with interpreter"
#endif
myCoreToStg :: DynFlags -> Module -> CoreProgram
-> IO ( [StgBinding] -- output program
, CollectedCCs) -- cost centre info (declared and used)
myCoreToStg dflags this_mod prepd_binds = do
stg_binds
<- {-# SCC "Core2Stg" #-}
coreToStg dflags this_mod prepd_binds
(stg_binds2, cost_centre_info)
<- {-# SCC "Stg2Stg" #-}
stg2stg dflags this_mod stg_binds
return (stg_binds2, cost_centre_info)
{- **********************************************************************
%* *
\subsection{Compiling a do-statement}
%* *
%********************************************************************* -}
{-
When the UnlinkedBCOExpr is linked you get an HValue of type *IO [HValue]* When
you run it you get a list of HValues that should be the same length as the list
of names; add them to the ClosureEnv.
A naked expression returns a singleton Name [it]. The stmt is lifted into the
IO monad as explained in Note [Interactively-bound Ids in GHCi] in HscTypes
-}
#ifdef GHCI
-- | Compile a stmt all the way to an HValue, but don't run it
--
-- We return Nothing to indicate an empty statement (or comment only), not a
-- parse error.
hscStmt :: HscEnv -> String -> IO (Maybe ([Id], IO [HValue], FixityEnv))
hscStmt hsc_env stmt = hscStmtWithLocation hsc_env stmt "<interactive>" 1
-- | Compile a stmt all the way to an HValue, but don't run it
--
-- We return Nothing to indicate an empty statement (or comment only), not a
-- parse error.
hscStmtWithLocation :: HscEnv
-> String -- ^ The statement
-> String -- ^ The source
-> Int -- ^ Starting line
-> IO (Maybe ([Id], IO [HValue], FixityEnv))
hscStmtWithLocation hsc_env0 stmt source linenumber =
runInteractiveHsc hsc_env0 $ do
maybe_stmt <- hscParseStmtWithLocation source linenumber stmt
case maybe_stmt of
Nothing -> return Nothing
Just parsed_stmt -> do
-- Rename and typecheck it
hsc_env <- getHscEnv
(ids, tc_expr, fix_env) <- ioMsgMaybe $ tcRnStmt hsc_env parsed_stmt
-- Desugar it
ds_expr <- ioMsgMaybe $ deSugarExpr hsc_env tc_expr
liftIO (lintInteractiveExpr "desugar expression" hsc_env ds_expr)
handleWarnings
-- Then code-gen, and link it
-- It's important NOT to have package 'interactive' as thisUnitId
-- for linking, else we try to link 'main' and can't find it.
-- Whereas the linker already knows to ignore 'interactive'
let src_span = srcLocSpan interactiveSrcLoc
hval <- liftIO $ hscCompileCoreExpr hsc_env src_span ds_expr
let hval_io = unsafeCoerce# hval :: IO [HValue]
return $ Just (ids, hval_io, fix_env)
-- | Compile a decls
hscDecls :: HscEnv
-> String -- ^ The statement
-> IO ([TyThing], InteractiveContext)
hscDecls hsc_env str = hscDeclsWithLocation hsc_env str "<interactive>" 1
-- | Compile a decls
hscDeclsWithLocation :: HscEnv
-> String -- ^ The statement
-> String -- ^ The source
-> Int -- ^ Starting line
-> IO ([TyThing], InteractiveContext)
hscDeclsWithLocation hsc_env0 str source linenumber =
runInteractiveHsc hsc_env0 $ do
L _ (HsModule{ hsmodDecls = decls }) <-
hscParseThingWithLocation source linenumber parseModule str
{- Rename and typecheck it -}
hsc_env <- getHscEnv
tc_gblenv <- ioMsgMaybe $ tcRnDeclsi hsc_env decls
{- Grab the new instances -}
-- We grab the whole environment because of the overlapping that may have
-- been done. See the notes at the definition of InteractiveContext
-- (ic_instances) for more details.
let defaults = tcg_default tc_gblenv
{- Desugar it -}
-- We use a basically null location for iNTERACTIVE
let iNTERACTIVELoc = ModLocation{ ml_hs_file = Nothing,
ml_hi_file = panic "hsDeclsWithLocation:ml_hi_file",
ml_obj_file = panic "hsDeclsWithLocation:ml_hi_file"}
ds_result <- hscDesugar' iNTERACTIVELoc tc_gblenv
{- Simplify -}
simpl_mg <- liftIO $ hscSimplify hsc_env ds_result
{- Tidy -}
(tidy_cg, mod_details) <- liftIO $ tidyProgram hsc_env simpl_mg
let dflags = hsc_dflags hsc_env
!CgGuts{ cg_module = this_mod,
cg_binds = core_binds,
cg_tycons = tycons,
cg_modBreaks = mod_breaks } = tidy_cg
!ModDetails { md_insts = cls_insts
, md_fam_insts = fam_insts } = mod_details
-- Get the *tidied* cls_insts and fam_insts
data_tycons = filter isDataTyCon tycons
{- Prepare For Code Generation -}
-- Do saturation and convert to A-normal form
prepd_binds <- {-# SCC "CorePrep" #-}
liftIO $ corePrepPgm hsc_env iNTERACTIVELoc core_binds data_tycons
{- Generate byte code -}
cbc <- liftIO $ byteCodeGen dflags this_mod
prepd_binds data_tycons mod_breaks
let src_span = srcLocSpan interactiveSrcLoc
liftIO $ linkDecls hsc_env src_span cbc
let tcs = filterOut isImplicitTyCon (mg_tcs simpl_mg)
patsyns = mg_patsyns simpl_mg
ext_ids = [ id | id <- bindersOfBinds core_binds
, isExternalName (idName id)
, not (isDFunId id || isImplicitId id) ]
-- We only need to keep around the external bindings
-- (as decided by TidyPgm), since those are the only ones
-- that might be referenced elsewhere.
-- The DFunIds are in 'cls_insts' (see Note [ic_tythings] in HscTypes
-- Implicit Ids are implicit in tcs
tythings = map AnId ext_ids ++ map ATyCon tcs ++ map (AConLike . PatSynCon) patsyns
let icontext = hsc_IC hsc_env
ictxt = extendInteractiveContext icontext ext_ids tcs
cls_insts fam_insts defaults patsyns
return (tythings, ictxt)
hscImport :: HscEnv -> String -> IO (ImportDecl RdrName)
hscImport hsc_env str = runInteractiveHsc hsc_env $ do
(L _ (HsModule{hsmodImports=is})) <-
hscParseThing parseModule str
case is of
[L _ i] -> return i
_ -> liftIO $ throwOneError $
mkPlainErrMsg (hsc_dflags hsc_env) noSrcSpan $
ptext (sLit "parse error in import declaration")
-- | Typecheck an expression (but don't run it)
-- Returns its most general type
hscTcExpr :: HscEnv
-> String -- ^ The expression
-> IO Type
hscTcExpr hsc_env0 expr = runInteractiveHsc hsc_env0 $ do
hsc_env <- getHscEnv
maybe_stmt <- hscParseStmt expr
case maybe_stmt of
Just (L _ (BodyStmt expr _ _ _)) ->
ioMsgMaybe $ tcRnExpr hsc_env expr
_ ->
throwErrors $ unitBag $ mkPlainErrMsg (hsc_dflags hsc_env) noSrcSpan
(text "not an expression:" <+> quotes (text expr))
-- | Find the kind of a type
-- Currently this does *not* generalise the kinds of the type
hscKcType
:: HscEnv
-> Bool -- ^ Normalise the type
-> String -- ^ The type as a string
-> IO (Type, Kind) -- ^ Resulting type (possibly normalised) and kind
hscKcType hsc_env0 normalise str = runInteractiveHsc hsc_env0 $ do
hsc_env <- getHscEnv
ty <- hscParseType str
ioMsgMaybe $ tcRnType hsc_env normalise ty
hscParseStmt :: String -> Hsc (Maybe (GhciLStmt RdrName))
hscParseStmt = hscParseThing parseStmt
hscParseStmtWithLocation :: String -> Int -> String
-> Hsc (Maybe (GhciLStmt RdrName))
hscParseStmtWithLocation source linenumber stmt =
hscParseThingWithLocation source linenumber parseStmt stmt
hscParseType :: String -> Hsc (LHsType RdrName)
hscParseType = hscParseThing parseType
#endif
hscParseIdentifier :: HscEnv -> String -> IO (Located RdrName)
hscParseIdentifier hsc_env str =
runInteractiveHsc hsc_env $ hscParseThing parseIdentifier str
hscParseThing :: (Outputable thing) => Lexer.P thing -> String -> Hsc thing
hscParseThing = hscParseThingWithLocation "<interactive>" 1
hscParseThingWithLocation :: (Outputable thing) => String -> Int
-> Lexer.P thing -> String -> Hsc thing
hscParseThingWithLocation source linenumber parser str
= {-# SCC "Parser" #-} do
dflags <- getDynFlags
liftIO $ showPass dflags "Parser"
let buf = stringToStringBuffer str
loc = mkRealSrcLoc (fsLit source) linenumber 1
case unP parser (mkPState dflags buf loc) of
PFailed span err -> do
let msg = mkPlainErrMsg dflags span err
throwErrors $ unitBag msg
POk pst thing -> do
logWarningsReportErrors (getMessages pst)
liftIO $ dumpIfSet_dyn dflags Opt_D_dump_parsed "Parser" (ppr thing)
return thing
hscCompileCore :: HscEnv -> Bool -> SafeHaskellMode -> ModSummary
-> CoreProgram -> FilePath -> IO ()
hscCompileCore hsc_env simplify safe_mode mod_summary binds output_filename
= runHsc hsc_env $ do
guts <- maybe_simplify (mkModGuts (ms_mod mod_summary) safe_mode binds)
(iface, changed, _details, cgguts) <- hscNormalIface' guts Nothing
liftIO $ hscWriteIface (hsc_dflags hsc_env) iface changed mod_summary
_ <- liftIO $ hscGenHardCode hsc_env cgguts mod_summary output_filename
return ()
where
maybe_simplify mod_guts | simplify = hscSimplify' mod_guts
| otherwise = return mod_guts
-- Makes a "vanilla" ModGuts.
mkModGuts :: Module -> SafeHaskellMode -> CoreProgram -> ModGuts
mkModGuts mod safe binds =
ModGuts {
mg_module = mod,
mg_boot = False,
mg_exports = [],
mg_deps = noDependencies,
mg_dir_imps = emptyModuleEnv,
mg_used_names = emptyNameSet,
mg_used_th = False,
mg_rdr_env = emptyGlobalRdrEnv,
mg_fix_env = emptyFixityEnv,
mg_tcs = [],
mg_insts = [],
mg_fam_insts = [],
mg_patsyns = [],
mg_rules = [],
mg_vect_decls = [],
mg_binds = binds,
mg_foreign = NoStubs,
mg_warns = NoWarnings,
mg_anns = [],
mg_hpc_info = emptyHpcInfo False,
mg_modBreaks = emptyModBreaks,
mg_vect_info = noVectInfo,
mg_inst_env = emptyInstEnv,
mg_fam_inst_env = emptyFamInstEnv,
mg_safe_haskell = safe,
mg_trust_pkg = False,
mg_dependent_files = []
}
{- **********************************************************************
%* *
Desugar, simplify, convert to bytecode, and link an expression
%* *
%********************************************************************* -}
#ifdef GHCI
hscCompileCoreExpr :: HscEnv -> SrcSpan -> CoreExpr -> IO HValue
hscCompileCoreExpr hsc_env =
lookupHook hscCompileCoreExprHook hscCompileCoreExpr' (hsc_dflags hsc_env) hsc_env
hscCompileCoreExpr' :: HscEnv -> SrcSpan -> CoreExpr -> IO HValue
hscCompileCoreExpr' hsc_env srcspan ds_expr
| rtsIsProfiled
= throwIO (InstallationError "You can't call hscCompileCoreExpr in a profiled compiler")
-- Otherwise you get a seg-fault when you run it
| otherwise
= do { let dflags = hsc_dflags hsc_env
{- Simplify it -}
; simpl_expr <- simplifyExpr dflags ds_expr
{- Tidy it (temporary, until coreSat does cloning) -}
; let tidy_expr = tidyExpr emptyTidyEnv simpl_expr
{- Prepare for codegen -}
; prepd_expr <- corePrepExpr dflags hsc_env tidy_expr
{- Lint if necessary -}
; lintInteractiveExpr "hscCompileExpr" hsc_env prepd_expr
{- Convert to BCOs -}
; bcos <- coreExprToBCOs dflags (icInteractiveModule (hsc_IC hsc_env)) prepd_expr
{- link it -}
; hval <- linkExpr hsc_env srcspan bcos
; return hval }
#endif
{- **********************************************************************
%* *
Statistics on reading interfaces
%* *
%********************************************************************* -}
dumpIfaceStats :: HscEnv -> IO ()
dumpIfaceStats hsc_env = do
eps <- readIORef (hsc_EPS hsc_env)
dumpIfSet dflags (dump_if_trace || dump_rn_stats)
"Interface statistics"
(ifaceStats eps)
where
dflags = hsc_dflags hsc_env
dump_rn_stats = dopt Opt_D_dump_rn_stats dflags
dump_if_trace = dopt Opt_D_dump_if_trace dflags
{- **********************************************************************
%* *
Progress Messages: Module i of n
%* *
%********************************************************************* -}
showModuleIndex :: (Int, Int) -> String
showModuleIndex (i,n) = "[" ++ padded ++ " of " ++ n_str ++ "] "
where
n_str = show n
i_str = show i
padded = replicate (length n_str - length i_str) ' ' ++ i_str
-- Render RTS Config
renderRtsConfig :: DynFlags -> Bool -> Code
renderRtsConfig dflags isHsMain
= invokestatic (mkMethodRef rtsConfig "getDefault" [] (ret rtsConfigType))
<> putRtsHsMain
<> putRtsOptsEnabled
<> putRtsOpts
where (<>) = (Mon.<>)
putRtsHsMain
| isHsMain = dup rtsConfigType
<> iconst jbool 1
<> putfield (mkFieldRef rtsConfig "rtsHsMain" jbool)
| otherwise = mempty
rtsOptsEnabledText = T.pack . show . rtsOptsEnabled $ dflags
putRtsOptsEnabled
| rtsOptsEnabledText == "RtsOptsSafeOnly" = mempty
| otherwise = dup rtsConfigType
<> getstatic (mkFieldRef rtsOptsEnbled rtsOptsEnabledText
rtsOptsEnbledType)
<> putfield (mkFieldRef rtsConfig "rtsOptsEnabled"
rtsOptsEnbledType)
putRtsOpts = case rtsOpts dflags of
Nothing -> mempty
Just s -> dup rtsConfigType
<> sconst (T.pack s)
<> putfield (mkFieldRef rtsConfig "rtsOpts" jstring)
|
AlexeyRaga/eta
|
compiler/ETA/Main/HscMain.hs
|
bsd-3-clause
| 69,463
| 0
| 27
| 20,579
| 11,935
| 6,158
| 5,777
| 825
| 9
|
{-# LANGUAGE FlexibleContexts, ScopedTypeVariables, TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{- |
Module : Data.Graph.Inductive.Arbitrary
Description : Arbitrary definition for fgl graphs
Copyright : (c) Ivan Lazar Miljenovic
License : BSD3
Maintainer : Ivan.Miljenovic@gmail.com
This module provides default definitions for use with QuickCheck's
'Arbitrary' class.
Both "Data.Graph.Inductive.Tree"- and
"Data.Graph.Inductive.PatriciaTree"-based graph implementations have
'Arbitrary' instances. In most cases, this is all you will need.
If, however, you want to create arbitrary custom graph-like data
structures, then you will probably want to do some custom processing
from an arbitrary 'GraphNodesEdges' value, either directly or with a
custom 'ArbGraph' instance.
-}
module Data.Graph.Inductive.Arbitrary
( -- * Explicit graph creation
-- $explicit
arbitraryGraph
, arbitraryGraphWith
, shrinkGraph
, shrinkGraphWith
-- * Types of graphs
, ArbGraph(..)
, GrProxy(..)
, shrinkF
, arbitraryGraphBy
-- ** Specific graph structures
, NoMultipleEdges(..)
, NoLoops(..)
, SimpleGraph
, Undirected(..)
-- ** Connected graphs
, Connected(..)
, connGraph
-- * Node and edge lists
, arbitraryNodes
, arbitraryEdges
, GraphNodesEdges(..)
) where
import Data.Graph.Inductive.Graph (DynGraph, Graph, LEdge,
LNode, Node, delNode,
insEdges, insNode, mkGraph,
newNodes, nodes, toEdge)
import qualified Data.Graph.Inductive.PatriciaTree as P
import qualified Data.Graph.Inductive.Tree as T
import Test.QuickCheck (Arbitrary (..), Gen, elements, listOf)
import Control.Applicative (liftA3, (<*>))
import Control.Arrow (second)
import Data.Function (on)
import Data.Functor ((<$>))
import Data.List (deleteBy, groupBy, sortBy)
import Data.Maybe (mapMaybe)
-- -----------------------------------------------------------------------------
-- | Generally a list of labelled nodes.
arbitraryNodes :: (Arbitrary a) => Gen [LNode a]
arbitraryNodes = arbitrary >>= mapM ((<$> arbitrary) . (,)) . uniq
-- | Given a specified list of nodes, generate a list of edges.
arbitraryEdges :: (Arbitrary b) => [LNode a] -> Gen [LEdge b]
arbitraryEdges lns
| null lns = return []
| otherwise = listOf (liftA3 (,,) nGen nGen arbitrary)
where
nGen = elements (map fst lns)
-- | Defined so as to be able to generate valid 'arbitrary' node and
-- edge lists.
--
-- If any specific structure (no multiple edges, no loops, etc.) is
-- required then you will need to post-process this after generating
-- it, or else create a new instance of 'ArbGraph'.
data GraphNodesEdges a b = GNEs { graphNodes :: [LNode a]
, graphEdges :: [LEdge b]
}
deriving (Eq, Ord, Show, Read)
instance (Arbitrary a, Arbitrary b) => Arbitrary (GraphNodesEdges a b) where
arbitrary = do ns <- arbitraryNodes
GNEs ns <$> arbitraryEdges ns
shrink (GNEs ns es) = case ns of
_:_:_ -> map delN ns
_ -> []
where
delN ln@(n,_) = GNEs ns' es'
where
ns' = deleteBy ((==)`on`fst) ln ns
es' = filter (not . hasN) es
hasN (v,w,_) = v == n || w == n
-- -----------------------------------------------------------------------------
-- | Representation of generating arbitrary graph structures.
--
-- Typically, you would only use this for the 'toBaseGraph' function
-- or if you wanted to make a custom graph wrapper.
--
-- The intent of this class is to simplify defining and using
-- different wrappers on top of graphs (e.g. you may wish to have an
-- 'Undirected' graph, or one with 'NoLoops', or possibly both!).
class (DynGraph (BaseGraph ag)) => ArbGraph ag where
type BaseGraph ag :: * -> * -> *
toBaseGraph :: ag a b -> BaseGraph ag a b
fromBaseGraph :: BaseGraph ag a b -> ag a b
-- | Any manipulation of edges that should be done to satisfy the
-- requirements of the specified wrapper.
edgeF :: GrProxy ag -> [LEdge b] -> [LEdge b]
-- | Shrinking function (assuming only one node is removed at a
-- time) which also returns the node that is removed.
shrinkFWith :: ag a b -> [(Node, ag a b)]
-- | In most cases, for an instance of 'ArbGraph' the 'Arbitrary'
-- instance definition will\/can have @shrink = shrinkF@.
shrinkF :: (ArbGraph ag) => ag a b -> [ag a b]
shrinkF = map snd . shrinkFWith
instance ArbGraph T.Gr where
type BaseGraph T.Gr = T.Gr
toBaseGraph = id
fromBaseGraph = id
edgeF _ = id
shrinkFWith = shrinkGraphWith
instance ArbGraph P.Gr where
type BaseGraph P.Gr = P.Gr
toBaseGraph = id
fromBaseGraph = id
edgeF _ = id
shrinkFWith = shrinkGraphWith
-- | A simple graph-specific proxy type.
data GrProxy (gr :: * -> * -> *) = GrProxy
deriving (Eq, Ord, Show, Read)
-- -----------------------------------------------------------------------------
{- $explicit
If you wish to explicitly create a generated graph value (rather than
using the 'Arbitrary' class) then you will want to use these
functions.
-}
-- | Generate an arbitrary graph. Multiple edges are allowed.
arbitraryGraph :: (Graph gr, Arbitrary a, Arbitrary b) => Gen (gr a b)
arbitraryGraph = arbitraryGraphWith id
-- | Generate an arbitrary graph, using the specified function to
-- manipulate the generated list of edges (e.g. remove multiple
-- edges).
arbitraryGraphWith :: (Graph gr, Arbitrary a, Arbitrary b)
=> ([LEdge b] -> [LEdge b]) -> Gen (gr a b)
arbitraryGraphWith f = do GNEs ns es <- arbitrary
let es' = f es
return (mkGraph ns es')
-- | Generate an instance of 'ArbGraph' using the class methods.
arbitraryGraphBy :: forall ag a b. (ArbGraph ag, Arbitrary a, Arbitrary b)
=> Gen (ag a b)
arbitraryGraphBy = fromBaseGraph
<$> arbitraryGraphWith (edgeF (GrProxy :: GrProxy ag))
-- Ensure we have a list of unique Node values; this will also sort
-- the list, but that shouldn't matter.
uniq :: [Node] -> [Node]
uniq = uniqBy id
uniqBy :: (Ord b) => (a -> b) -> [a] -> [a]
uniqBy f = map head . groupBy ((==) `on` f) . sortBy (compare `on` f)
-- | For a graph with at least two nodes, return every possible way of
-- deleting a single node (i.e. will never shrink to an empty
-- graph).
shrinkGraph :: (Graph gr) => gr a b -> [gr a b]
shrinkGraph = map snd . shrinkGraphWith
-- | As with 'shrinkGraph', but also return the node that was deleted.
shrinkGraphWith :: (Graph gr) => gr a b -> [(Node, gr a b)]
shrinkGraphWith gr = case nodes gr of
-- Need to have at least 2 nodes before we delete one!
ns@(_:_:_) -> map ((,) <*> (`delNode` gr)) ns
_ -> []
instance (Arbitrary a, Arbitrary b) => Arbitrary (T.Gr a b) where
arbitrary = arbitraryGraph
shrink = shrinkGraph
instance (Arbitrary a, Arbitrary b) => Arbitrary (P.Gr a b) where
arbitrary = arbitraryGraph
shrink = shrinkGraph
-- | A newtype wrapper to generate a graph without multiple edges
-- (loops allowed).
newtype NoMultipleEdges gr a b = NME { nmeGraph :: gr a b }
deriving (Eq, Show, Read)
instance (ArbGraph gr) => ArbGraph (NoMultipleEdges gr) where
type BaseGraph (NoMultipleEdges gr) = BaseGraph gr
toBaseGraph = toBaseGraph. nmeGraph
fromBaseGraph = NME . fromBaseGraph
edgeF _ = uniqBy toEdge . edgeF (GrProxy :: GrProxy gr)
shrinkFWith = map (second NME) . shrinkFWith . nmeGraph
instance (ArbGraph gr, Arbitrary a, Arbitrary b) => Arbitrary (NoMultipleEdges gr a b) where
arbitrary = arbitraryGraphBy
shrink = shrinkF
-- | A newtype wrapper to generate a graph without loops (multiple
-- edges allowed).
newtype NoLoops gr a b = NL { looplessGraph :: gr a b }
deriving (Eq, Show, Read)
instance (ArbGraph gr) => ArbGraph (NoLoops gr) where
type BaseGraph (NoLoops gr) = BaseGraph gr
toBaseGraph = toBaseGraph . looplessGraph
fromBaseGraph = NL . fromBaseGraph
edgeF _ = filter notLoop . edgeF (GrProxy :: GrProxy gr)
shrinkFWith = map (second NL) . shrinkFWith . looplessGraph
notLoop :: LEdge b -> Bool
notLoop (v,w,_) = v /= w
instance (ArbGraph gr, Arbitrary a, Arbitrary b) => Arbitrary (NoLoops gr a b) where
arbitrary = arbitraryGraphBy
shrink = shrinkF
-- | A wrapper to generate a graph without multiple edges and
-- no loops.
type SimpleGraph gr = NoLoops (NoMultipleEdges gr)
-- | A newtype wrapper such that each (non-loop) edge also has its
-- reverse in the graph.
--
-- Note that there is no way to guarantee this after any additional
-- edges are added or removed.
--
-- You should also apply this wrapper /after/ 'NoMultipleEdges' or
-- else the wrong reverse edge might be removed.
newtype Undirected gr a b = UG { undirGraph :: gr a b }
deriving (Eq, Show, Read)
instance (ArbGraph gr) => ArbGraph (Undirected gr) where
type BaseGraph (Undirected gr) = BaseGraph gr
toBaseGraph = toBaseGraph . undirGraph
fromBaseGraph = UG . fromBaseGraph
edgeF _ = undirect . edgeF (GrProxy :: GrProxy gr)
shrinkFWith = map (second UG) . shrinkFWith . undirGraph
undirect :: [LEdge b] -> [LEdge b]
undirect = concatMap undir
where
undir le@(v,w,b)
| notLoop le = [le, (w,v,b)]
| otherwise = [le]
instance (ArbGraph gr, Arbitrary a, Arbitrary b) => Arbitrary (Undirected gr a b) where
arbitrary = arbitraryGraphBy
shrink = shrinkF
-- -----------------------------------------------------------------------------
-- | A brute-force approach to generating connected graphs.
--
-- The resultant graph (obtained with 'connGraph') will /never/ be
-- empty: it will, at the very least, contain an additional
-- connected node (obtained with 'connNode').
--
-- Note that this is /not/ an instance of 'ArbGraph' as it is not
-- possible to arbitrarily layer a transformer on top of this.
data Connected ag a b = CG { connNode :: Node
, connArbGraph :: ag a b
}
deriving (Eq, Show, Read)
instance (ArbGraph ag, Arbitrary a, Arbitrary b) => Arbitrary (Connected ag a b) where
arbitrary = arbitraryGraphBy >>= toConnGraph
shrink = shrinkConnGraph
toConnGraph :: forall ag a b. (ArbGraph ag, Arbitrary a, Arbitrary b)
=> ag a b -> Gen (Connected ag a b)
toConnGraph ag = do a <- arbitrary
ces <- concat <$> mapM mkE ws
return $ CG { connNode = v
, connArbGraph = fromBaseGraph
. insEdges ces
. insNode (v,a)
$ g
}
where
g = toBaseGraph ag
[v] = newNodes 1 g
ws = nodes g
mkE w = do b <- arbitrary
return (edgeF p [(v,w,b)])
p :: GrProxy ag
p = GrProxy
shrinkConnGraph :: (ArbGraph ag) => Connected ag a b -> [Connected ag a b]
shrinkConnGraph cg = mapMaybe keepConn . shrinkFWith $ g
where
v = connNode cg
g = connArbGraph cg
keepConn (w,sgs) | v == w = Nothing
| otherwise = Just (cg { connArbGraph = sgs })
-- | The underlying graph represented by this 'Connected' value.
connGraph :: (ArbGraph ag) => Connected ag a b -> BaseGraph ag a b
connGraph = toBaseGraph . connArbGraph
-- -----------------------------------------------------------------------------
|
scolobb/fgl
|
fgl-arbitrary/Data/Graph/Inductive/Arbitrary.hs
|
bsd-3-clause
| 12,050
| 0
| 13
| 3,316
| 2,783
| 1,540
| 1,243
| 174
| 2
|
module Main where
import Prelude hiding (Either(..), id, (.))
import Game
import UI
import LevelBuilder
import GameState
gameState :: GameState
gameState = mkLevel
main :: IO ()
main = do
display <- initDisplay
gameLoop display gameState
endDisplay display
|
fros1y/umbral
|
app/Main.hs
|
bsd-3-clause
| 273
| 0
| 7
| 54
| 83
| 48
| 35
| 13
| 1
|
module Conversation (
startConversation
, historyMaxLength
) where
import Data.Maybe (isNothing, fromJust)
import qualified Data.ByteString.Char8 as B8 (unpack)
import Control.Concurrent.STM (atomically)
import Control.Concurrent.STM.TVar (readTVar, writeTVar)
import qualified Data.Map.Strict as Map (lookup, insert)
import Types (ClientState(..))
import Network.Simple.TCP (withSocketsDo, connect)
import Tools (contactLookup)
-- | Number of messages to store in the history
historyMaxLength :: Int
historyMaxLength = 100
{- | Start a new conversation with a contact :
- Establishes a connection with the user
- Updates the state with the new Conversation
- fork a thread to listen to incoming messages and put them in the Conversation's History
-}
startConversation :: ClientState -> UserName -> IO ()
startConversation state username = do
-- Establish connection (to be replaced with SSL/TLS version !)
do contacts <- atomically $ readTVar (csContactList state)
let mContact = contactLookup username contacts
when (not (isNothing mContact)) $
do let contact = fromJust mContact
connect (contactIpAddr contact) clientPort (receiveThread state username)
-- | Initiate a conversation and handle the exchange of messages
receiveThread :: ClientState -> UserName -> (Socket, SockAddr) -> IO ()
receiveThread state username (socket, _) = do
-- Add the new Conversation to the state
now <- getCurrentTime
let conversation = Conversation {
convContext = Socket
, convHistory = Q.fromList [Notice ("Conversation started on " ++ now)]
, convNbNew = 1
}
atomically $ do
conversations <- readTVar csConversations state
writeTVar conversations (Map.insert username conversation conversations)
-- loop listening for incoming messages
loop state username socket
where
loop state username socket = do
mbs <- recv socket 1024
case mbs of
Nothing -> do putStrLn ("Connection closed with " ++ username)
Just bs ->
do conversations <- atomically $ readTVar (csConversations state)
let (Just conversation) = Map.lookup username conversations
let newHistory = pushFront (convHistory conversation) (B8.unpack bs) -- don't parse, only Message for now (TEMPORARY)
let newConversation = conversation { convHistory = newHistory, convNbNew = convNbNew + 1 }
atomically $ writeTVar (csConversation state) newConversation
loop state username socket
|
nschoe/hpt
|
src/Client/Conversation.hs
|
bsd-3-clause
| 2,753
| 2
| 15
| 756
| 567
| 303
| 264
| 42
| 2
|
module Language.Java.Paragon.TypeChecker
( -- * Type checking phase
typeCheck
) where
import Control.Monad (when)
import Language.Java.Paragon.Interaction
import Language.Java.Paragon.Monad.Base
import Language.Java.Paragon.Monad.PiReader
import Language.Java.Paragon.Monad.TypeCheckM
import Language.Java.Paragon.Syntax
thisModule :: String
thisModule = "Language.Java.Paragon.TypeChecker"
-- | Type checking phase. Returns an AST with type annotation for nodes that
-- have a type. The base name of the file containing the class of this AST is
-- required to check that the right type is defined in this .para file.
typeCheck :: PiPath -- ^ Directories where .pi files can be found.
-> String -- ^ Base name of the file.
-> AST -- ^ AST from previous phase.
-> BaseM AST -- ^ AST after type checking phase.
typeCheck piPath baseName ast = do
when length (cuTypeDecls ast) /= 1 $
panic (thisModule ++ ".typeCheck") $ "Encountered multiple / zero type"
"declarations in one file. This should not occur in this phase."
let [typeDecl] = cuTypeDecls ast
-- 1. Create skolem types for the type parameters (generics).
let typeParamSubst = createSkolemSubst typeDecl
liftToBaseM piPath $ runTypeCheckM typeParamSubst typeDecl $ do
-- 2. Get the package name.
let maybePkgDecl = fmap pdName (cuPkgDecl ast)
-- 3. Type check type declaration.
tcTypeDecl <- typeCheckTypeDecl baseName maybePkgDecl typeDecl
-- 4. Packages and import declarations have no type.
let tcPkgDecl = fmap (\x -> x { pdAnn = (pdAnn x) { annType = Nothing } } ) (cuPkgDecl ast)
let tcImpDecls = map (\x -> x { impdAnn = (impdAnn x) { annType = Nothing } } ) (cuImportDecls ast)
-- 5. Return updated AST.
return $ ast { cuPkgDecl = tcPkgDecl
, cuImportDecls = tcImpDecls
, cuTypeDecls = [tcTypeDecl]
}
-- | Create a mapping from type parameter to skolemised type to be used in type
-- checking.
createSkolemSubst :: TypeDecl -> [(TypeParam, TcType)]
createSkolemSubst (ClassTypeDecl classDecl) =
let typeParams = cdTypeParams classDecl
in zip typeParams (map skolemiseParam typeParams)
createSkolemSubst (InterfaceTypeDecl interfaceDecl) =
let typeParams = intdTypeParams interfaceDecl
in zip typeParams (map skolemiseParam typeParams)
-- | Create a skolem type for the provided type parameter.
skolemiseParam :: TypeParam -> TcType
skolemiseParam _ = panic (thisModule ++ ".skolemiseParam") $
"This function is not implemented yet."
|
bvdelft/paragon
|
src/Language/Java/Paragon/TypeChecker.hs
|
bsd-3-clause
| 2,583
| 3
| 21
| 542
| 475
| 262
| 213
| 39
| 1
|
{-# LANGUAGE CPP, BangPatterns, ViewPatterns, FlexibleInstances #-}
#if __GLASGOW_HASKELL__ >= 700
{-# OPTIONS -fllvm #-}
#endif
module Data.TrieMap.RadixTrie.Subset () where
import Control.Monad
import Control.Monad.Option
import Data.TrieMap.RadixTrie.Base
import Prelude hiding (lookup)
#define V(f) f (VVector) (k)
#define U(f) f (PVector) (Word)
#define EDGE(args) (!(eView -> Edge args))
instance TrieKey k => Subset (TrieMap (VVector k)) where
Radix m1 <=? Radix m2 = m1 <<=? m2
instance Subset (TrieMap (PVector Word)) where
WRadix m1 <=? WRadix m2 = m1 <<=? m2
instance (Eq k, Label v k) => Subset (Edge v k) where
{-# SPECIALIZE instance (Eq k, TrieKey k) => Subset (V(Edge)) #-}
{-# SPECIALIZE instance Subset (U(Edge)) #-}
eK@EDGE(_ ks0 vK tsK) <=? EDGE(_ ls0 vL tsL) = matchSlice matcher matches ks0 ls0 where
matcher k l z = k == l && z
matches kLen lLen = case compare kLen lLen of
LT -> False
EQ -> vK <=? vL && tsK <<=? tsL
GT -> let k = ks0 !$ lLen in isSome (mfilter (dropEdge (lLen + 1) eK <=?) (lookup k tsL))
|
lowasser/TrieMap
|
Data/TrieMap/RadixTrie/Subset.hs
|
bsd-3-clause
| 1,074
| 9
| 19
| 218
| 333
| 175
| 158
| -1
| -1
|
{-# LANGUAGE OverloadedStrings #-}
import Text.Printf
import Text.Regex.Posix
import Data.Function (on)
import Data.List (sortBy, nubBy)
import Data.Text (intercalate)
import Network.HTTP.Conduit (simpleHttp)
import Codec.Text.IConv as IConv
import qualified Data.Text as T
import Text.HTML.DOM (parseLBS)
import Text.XML.Cursor (Cursor, attributeIs, content, element, child, attribute, fromDocument,
($//), (&|), (&//), (>=>))
data Teacher = Teacher { name :: String
, jokesCount :: Int
} deriving (Show)
teachersList = "http://www.mephist.ru/mephist/prepods.nsf/teachers"
baseUrl = "http://www.mephist.ru%s"
noJokes = "Пришлите"
findHrefsAndTitles :: Cursor -> [Cursor]
findHrefsAndTitles = element "TD" >=> (attributeIs "class" "menu1") &// element "a"
findJokesCount :: Cursor -> [Cursor]
findJokesCount = element "table" >=> (attributeIs "class" "MphDataTable") &// element "a" >=> child
isJokeRow :: Cursor -> Bool
isJokeRow c = (T.unpack $ head $ attribute "href" c :: String) =~ (".*bash?Open.*" :: String) :: Bool
extractTitles = T.concat . attribute "title"
extractHrefs = T.concat . attribute "href"
extractData = T.concat . content
processData :: [T.Text] -> IO ()
processData = putStrLn . T.unpack . T.concat
cursorFor :: String -> IO Cursor
cursorFor u = do
page <- simpleHttp u
let rus = IConv.convert "CP1251" "UTF-8" page
return $ fromDocument $ parseLBS rus
generateTuples :: [String] -> [String] -> [(String, String)]
generateTuples _ [] = []
generateTuples [] _ = []
generateTuples (x:xs) (y:ys) = (x, y): generateTuples xs ys
{- parseHtmlWithJokesCount :: [Char] -> IO String
parseHtmlWithJokesCount url_part = do
c <- cursorFor $ (baseUrl ++ url_part)
return $ last $ map T.unpack $ c $// findJokesCount &| extractData -}
getJokesCount :: [(String, String)] -> [(String, Int)]
getJokesCount [] = []
getJokesCount (teacher:teachers) = (fst teacher, real_jokes $ "1"):getJokesCount teachers
real_jokes :: [Char] -> Int
real_jokes j | j == noJokes = 0
real_jokes j = read j
sortOnJokesCount :: Ord b => [(a, b)] -> [(a, b)]
sortOnJokesCount arr = reverse $ sortBy (compare `on` snd) arr
tuplesToTeachers :: [(String, Int)] -> [Teacher]
tuplesToTeachers [] = []
tuplesToTeachers ((name, jokes):xs) = Teacher name jokes : tuplesToTeachers xs
symEq :: Eq a => (a, a) -> (a, a) -> Bool
symEq (x,y) (u,v) = (x == u && y == v)
removeDuplTuples :: Eq a => [(a, a)] -> [(a, a)]
removeDuplTuples = nubBy symEq
perls_on_russian jokes
| elem last_digit [5,6,7,8,9,0] = "перлов"
| elem last_digit [2,3,4] = "перла"
| otherwise = "перл"
where
last_digit = (jokes `mod` 10)
outputTeachers :: Teacher -> [Char]
outputTeachers (Teacher name jokes) = (printf "%3d %6s %6s %-40s" jokes (perls :: String) ("--->" :: String) (name :: String) :: String)
where
perls = perls_on_russian jokes
counter :: PrintfType t => Int -> Int -> t
counter done all = printf ((replicate 16 '\8') ++ "%*d out of %*d" :: String) (length $ show done :: Int) (done :: Int) (length $ show all :: Int) (all :: Int)
main :: IO ()
main = do
cursor1 <- cursorFor teachersList
putStrLn "Collecting data ..."
let
only = 1019
titles = map T.unpack $ cursor1 $// findHrefsAndTitles &| extractTitles
hrefs = map T.unpack $ cursor1 $// findHrefsAndTitles &| extractHrefs
tuples = take only $ zip [1..] $ removeDuplTuples $ filter ((=~ ("/mephist/prepods.nsf/id/.*" :: String)).snd) $ generateTuples titles hrefs
tup_len = length tuples
jokesIO =
mapM
(\(index, tuple) -> do
c <- cursorFor $ printf baseUrl $ snd tuple
putStr $ counter index tup_len
return $ ((fst tuple), real_jokes $ last $ map T.unpack $ c $// findJokesCount &| extractData)
)
tuples
jokes <- jokesIO
putStrLn "\nAll data collected. Processing ...\n"
putStrLn . unlines .
map
(\(index, value) -> (printf "%3s) " $ show index) ++ value )
$ zip [1..]
$ map outputTeachers
$ tuplesToTeachers
$ sortOnJokesCount
$ filter ((/=0).snd) jokes
|
MrBadge/hs2s-lab
|
maltsev/main.hs
|
mit
| 4,106
| 14
| 22
| 808
| 1,510
| 817
| 693
| 90
| 1
|
{-# OPTIONS_GHC -F -pgmF hspec-discover -optF --module-name=AllSpec #-}
|
zsedem/haskell-python
|
test/AllSpec.hs
|
mit
| 72
| 0
| 2
| 8
| 3
| 2
| 1
| 1
| 0
|
module Data.MemoUgly(memoIO, memo) where
import Control.Concurrent.MVar
import qualified Data.Map as M
import System.IO.Unsafe(unsafePerformIO)
-- | Memoize the given function by allocating a memo table,
-- and then updating the memo table on each function call.
memoIO :: (Ord a)
=> (a -> b) -- ^Function to memoize
-> IO (a -> IO b)
memoIO f = do
v <- newMVar M.empty
let f' x = do
m <- readMVar v
case M.lookup x m of
Nothing -> do let { r = f x }; modifyMVar_ v (return . M.insert x r); return r
Just r -> return r
return f'
-- | The pure version of 'memoIO'.
memo :: (Ord a)
=> (a -> b) -- ^Function to memoize
-> (a -> b)
memo f = let f' = unsafePerformIO (memoIO f) in \ x -> unsafePerformIO (f' x)
|
Kiwi-Labs/kwirc
|
kwick/Data/MemoUgly.hs
|
mit
| 821
| 0
| 21
| 255
| 280
| 144
| 136
| 19
| 2
|
module Calendar where
type Year = Int
type Month = Int
type Day = Int
type Date = (Day,Month,Year)
monthNames :: [String]
monthNames = [ "January" , "February" , "March" , "April"
, "May" , "June" , "July" , "August"
, "September" , "October" , "November" , "December"
]
isLeapYear :: Year -> Bool
isLeapYear year = year `mod` 4 == 0
&& not ( year `mod` 100 == 0 && year `mod` 400 /= 0 )
daysInMonth :: Month -> Year -> Int
daysInMonth month year = list !! month
where
list :: [Int]
list = [31,february,31,30,31,30,31,31,30,31,30,31]
february :: Int
february = if isLeapYear year then 29 else 28
calenderMonth :: Month -> Year -> [String]
calenderMonth month year = title : body
where
title :: String
title = cjustify 22 (monthNames !! month ++ " " ++ showInt year)
body :: [String]
body = take 7 . map (concat . separateBy " ") . makeGroupsOf 7
$ ( ["su","mo","tu","we","th","fr","sa"]
++ replicate firstDayOfMonth " "
++ map (rjustify 2 . showInt) [1..daysInMonth month year]
++ repeat " "
)
firstDayOfMonth :: Int
firstDayOfMonth = sum ( year -- 365 `mod` 7 == 1
: nrOfLeapYears
: [ daysInMonth m year | m <- [0..month-1] ]
) `mod` 7
nrOfLeapYears :: Int
nrOfLeapYears = (year - 1) `div` 4
- (year - 1) `div` 100
+ (year - 1) `div` 400
showCalenderForYear :: Year -> String
showCalenderForYear year = unlines
. concat
. separateBy horizontal
. map besides
. makeGroupsOf 3
$ map (\month -> calenderMonth month year) [0..11]
where
besides :: [[String]] -> [String]
besides xxs = foldr1 (zipWith (++)) $ separateBy vertical xxs
horizontal :: [String]
horizontal = [concat (separateBy "+" (replicate 3 (replicate 22 '-')))]
vertical :: [String]
vertical = repeat "|"
separateBy :: a -> [a] -> [a]
separateBy sep xs = sep : concatMap (\x -> [x,sep]) xs
makeGroupsOf :: Int -> [a] -> [[a]]
makeGroupsOf _ [] = []
makeGroupsOf i xs = take i xs : makeGroupsOf i (drop i xs)
rjustify :: Int -> String -> String
rjustify i s = replicate (i - length s) ' ' ++ s
cjustify :: Int -> String -> String
cjustify i s = let sp :: String
sp = replicate ((i - length s) `div` 2) ' '
in take i (sp ++ s ++ repeat ' ')
main :: IO ()
main = do putStr "See calendar for which year? "
input <- getLine
let year :: Int
year = readUnsigned input
if year > 1752
then putStrLn (showCalenderForYear year)
else putStrLn "invalid year (should be >1752)"
|
roberth/uu-helium
|
demo/Calendar.hs
|
gpl-3.0
| 3,133
| 0
| 15
| 1,241
| 1,029
| 561
| 468
| 69
| 2
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.DynamoDB.DeleteItem
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes a single item in a table by primary key. You can perform a
-- conditional delete operation that deletes the item if it exists, or if
-- it has an expected attribute value.
--
-- In addition to deleting an item, you can also return the item\'s
-- attribute values in the same operation, using the /ReturnValues/
-- parameter.
--
-- Unless you specify conditions, the /DeleteItem/ is an idempotent
-- operation; running it multiple times on the same item or attribute does
-- /not/ result in an error response.
--
-- Conditional deletes are useful for deleting items only if specific
-- conditions are met. If those conditions are met, DynamoDB performs the
-- delete. Otherwise, the item is not deleted.
--
-- /See:/ <http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_DeleteItem.html AWS API Reference> for DeleteItem.
module Network.AWS.DynamoDB.DeleteItem
(
-- * Creating a Request
deleteItem
, DeleteItem
-- * Request Lenses
, diExpressionAttributeNames
, diReturnValues
, diExpressionAttributeValues
, diReturnConsumedCapacity
, diReturnItemCollectionMetrics
, diConditionExpression
, diConditionalOperator
, diExpected
, diTableName
, diKey
-- * Destructuring the Response
, deleteItemResponse
, DeleteItemResponse
-- * Response Lenses
, dirsItemCollectionMetrics
, dirsConsumedCapacity
, dirsAttributes
, dirsResponseStatus
) where
import Network.AWS.DynamoDB.Types
import Network.AWS.DynamoDB.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | Represents the input of a /DeleteItem/ operation.
--
-- /See:/ 'deleteItem' smart constructor.
data DeleteItem = DeleteItem'
{ _diExpressionAttributeNames :: !(Maybe (Map Text Text))
, _diReturnValues :: !(Maybe ReturnValue)
, _diExpressionAttributeValues :: !(Maybe (Map Text AttributeValue))
, _diReturnConsumedCapacity :: !(Maybe ReturnConsumedCapacity)
, _diReturnItemCollectionMetrics :: !(Maybe ReturnItemCollectionMetrics)
, _diConditionExpression :: !(Maybe Text)
, _diConditionalOperator :: !(Maybe ConditionalOperator)
, _diExpected :: !(Maybe (Map Text ExpectedAttributeValue))
, _diTableName :: !Text
, _diKey :: !(Map Text AttributeValue)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DeleteItem' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'diExpressionAttributeNames'
--
-- * 'diReturnValues'
--
-- * 'diExpressionAttributeValues'
--
-- * 'diReturnConsumedCapacity'
--
-- * 'diReturnItemCollectionMetrics'
--
-- * 'diConditionExpression'
--
-- * 'diConditionalOperator'
--
-- * 'diExpected'
--
-- * 'diTableName'
--
-- * 'diKey'
deleteItem
:: Text -- ^ 'diTableName'
-> DeleteItem
deleteItem pTableName_ =
DeleteItem'
{ _diExpressionAttributeNames = Nothing
, _diReturnValues = Nothing
, _diExpressionAttributeValues = Nothing
, _diReturnConsumedCapacity = Nothing
, _diReturnItemCollectionMetrics = Nothing
, _diConditionExpression = Nothing
, _diConditionalOperator = Nothing
, _diExpected = Nothing
, _diTableName = pTableName_
, _diKey = mempty
}
-- | One or more substitution tokens for attribute names in an expression.
-- The following are some use cases for using /ExpressionAttributeNames/:
--
-- - To access an attribute whose name conflicts with a DynamoDB reserved
-- word.
--
-- - To create a placeholder for repeating occurrences of an attribute
-- name in an expression.
--
-- - To prevent special characters in an attribute name from being
-- misinterpreted in an expression.
--
-- Use the __#__ character in an expression to dereference an attribute
-- name. For example, consider the following attribute name:
--
-- - 'Percentile'
--
-- The name of this attribute conflicts with a reserved word, so it cannot
-- be used directly in an expression. (For the complete list of reserved
-- words, see
-- <http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/ReservedWords.html Reserved Words>
-- in the /Amazon DynamoDB Developer Guide/). To work around this, you
-- could specify the following for /ExpressionAttributeNames/:
--
-- - '{\"#P\":\"Percentile\"}'
--
-- You could then use this substitution in an expression, as in this
-- example:
--
-- - '#P = :val'
--
-- Tokens that begin with the __:__ character are /expression attribute
-- values/, which are placeholders for the actual value at runtime.
--
-- For more information on expression attribute names, see
-- <http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.AccessingItemAttributes.html Accessing Item Attributes>
-- in the /Amazon DynamoDB Developer Guide/.
diExpressionAttributeNames :: Lens' DeleteItem (HashMap Text Text)
diExpressionAttributeNames = lens _diExpressionAttributeNames (\ s a -> s{_diExpressionAttributeNames = a}) . _Default . _Map;
-- | Use /ReturnValues/ if you want to get the item attributes as they
-- appeared before they were deleted. For /DeleteItem/, the valid values
-- are:
--
-- - 'NONE' - If /ReturnValues/ is not specified, or if its value is
-- 'NONE', then nothing is returned. (This setting is the default for
-- /ReturnValues/.)
--
-- - 'ALL_OLD' - The content of the old item is returned.
--
diReturnValues :: Lens' DeleteItem (Maybe ReturnValue)
diReturnValues = lens _diReturnValues (\ s a -> s{_diReturnValues = a});
-- | One or more values that can be substituted in an expression.
--
-- Use the __:__ (colon) character in an expression to dereference an
-- attribute value. For example, suppose that you wanted to check whether
-- the value of the /ProductStatus/ attribute was one of the following:
--
-- 'Available | Backordered | Discontinued'
--
-- You would first need to specify /ExpressionAttributeValues/ as follows:
--
-- '{ \":avail\":{\"S\":\"Available\"}, \":back\":{\"S\":\"Backordered\"}, \":disc\":{\"S\":\"Discontinued\"} }'
--
-- You could then use these values in an expression, such as this:
--
-- 'ProductStatus IN (:avail, :back, :disc)'
--
-- For more information on expression attribute values, see
-- <http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.SpecifyingConditions.html Specifying Conditions>
-- in the /Amazon DynamoDB Developer Guide/.
diExpressionAttributeValues :: Lens' DeleteItem (HashMap Text AttributeValue)
diExpressionAttributeValues = lens _diExpressionAttributeValues (\ s a -> s{_diExpressionAttributeValues = a}) . _Default . _Map;
-- | Undocumented member.
diReturnConsumedCapacity :: Lens' DeleteItem (Maybe ReturnConsumedCapacity)
diReturnConsumedCapacity = lens _diReturnConsumedCapacity (\ s a -> s{_diReturnConsumedCapacity = a});
-- | Determines whether item collection metrics are returned. If set to
-- 'SIZE', the response includes statistics about item collections, if any,
-- that were modified during the operation are returned in the response. If
-- set to 'NONE' (the default), no statistics are returned.
diReturnItemCollectionMetrics :: Lens' DeleteItem (Maybe ReturnItemCollectionMetrics)
diReturnItemCollectionMetrics = lens _diReturnItemCollectionMetrics (\ s a -> s{_diReturnItemCollectionMetrics = a});
-- | A condition that must be satisfied in order for a conditional
-- /DeleteItem/ to succeed.
--
-- An expression can contain any of the following:
--
-- - Functions:
-- 'attribute_exists | attribute_not_exists | attribute_type | contains | begins_with | size'
--
-- These function names are case-sensitive.
--
-- - Comparison operators: ' = | \<> | \< | > | \<= | >= | BETWEEN | IN'
--
-- - Logical operators: 'AND | OR | NOT'
--
-- For more information on condition expressions, see
-- <http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.SpecifyingConditions.html Specifying Conditions>
-- in the /Amazon DynamoDB Developer Guide/.
--
-- /ConditionExpression/ replaces the legacy /ConditionalOperator/ and
-- /Expected/ parameters.
diConditionExpression :: Lens' DeleteItem (Maybe Text)
diConditionExpression = lens _diConditionExpression (\ s a -> s{_diConditionExpression = a});
-- | This is a legacy parameter, for backward compatibility. New applications
-- should use /ConditionExpression/ instead. Do not combine legacy
-- parameters and expression parameters in a single API call; otherwise,
-- DynamoDB will return a /ValidationException/ exception.
--
-- A logical operator to apply to the conditions in the /Expected/ map:
--
-- - 'AND' - If all of the conditions evaluate to true, then the entire
-- map evaluates to true.
--
-- - 'OR' - If at least one of the conditions evaluate to true, then the
-- entire map evaluates to true.
--
-- If you omit /ConditionalOperator/, then 'AND' is the default.
--
-- The operation will succeed only if the entire map evaluates to true.
--
-- This parameter does not support attributes of type List or Map.
diConditionalOperator :: Lens' DeleteItem (Maybe ConditionalOperator)
diConditionalOperator = lens _diConditionalOperator (\ s a -> s{_diConditionalOperator = a});
-- | This is a legacy parameter, for backward compatibility. New applications
-- should use /ConditionExpression/ instead. Do not combine legacy
-- parameters and expression parameters in a single API call; otherwise,
-- DynamoDB will return a /ValidationException/ exception.
--
-- A map of attribute\/condition pairs. /Expected/ provides a conditional
-- block for the /DeleteItem/ operation.
--
-- Each element of /Expected/ consists of an attribute name, a comparison
-- operator, and one or more values. DynamoDB compares the attribute with
-- the value(s) you supplied, using the comparison operator. For each
-- /Expected/ element, the result of the evaluation is either true or
-- false.
--
-- If you specify more than one element in the /Expected/ map, then by
-- default all of the conditions must evaluate to true. In other words, the
-- conditions are ANDed together. (You can use the /ConditionalOperator/
-- parameter to OR the conditions instead. If you do this, then at least
-- one of the conditions must evaluate to true, rather than all of them.)
--
-- If the /Expected/ map evaluates to true, then the conditional operation
-- succeeds; otherwise, it fails.
--
-- /Expected/ contains the following:
--
-- - /AttributeValueList/ - One or more values to evaluate against the
-- supplied attribute. The number of values in the list depends on the
-- /ComparisonOperator/ being used.
--
-- For type Number, value comparisons are numeric.
--
-- String value comparisons for greater than, equals, or less than are
-- based on ASCII character code values. For example, 'a' is greater
-- than 'A', and 'a' is greater than 'B'. For a list of code values,
-- see <http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters>.
--
-- For type Binary, DynamoDB treats each byte of the binary data as
-- unsigned when it compares binary values.
--
-- - /ComparisonOperator/ - A comparator for evaluating attributes in the
-- /AttributeValueList/. When performing the comparison, DynamoDB uses
-- strongly consistent reads.
--
-- The following comparison operators are available:
--
-- 'EQ | NE | LE | LT | GE | GT | NOT_NULL | NULL | CONTAINS | NOT_CONTAINS | BEGINS_WITH | IN | BETWEEN'
--
-- The following are descriptions of each comparison operator.
--
-- - 'EQ' : Equal. 'EQ' is supported for all datatypes, including
-- lists and maps.
--
-- /AttributeValueList/ can contain only one /AttributeValue/
-- element of type String, Number, Binary, String Set, Number Set,
-- or Binary Set. If an item contains an /AttributeValue/ element
-- of a different type than the one provided in the request, the
-- value does not match. For example, '{\"S\":\"6\"}' does not
-- equal '{\"N\":\"6\"}'. Also, '{\"N\":\"6\"}' does not equal
-- '{\"NS\":[\"6\", \"2\", \"1\"]}'.
--
-- - 'NE' : Not equal. 'NE' is supported for all datatypes, including
-- lists and maps.
--
-- /AttributeValueList/ can contain only one /AttributeValue/ of
-- type String, Number, Binary, String Set, Number Set, or Binary
-- Set. If an item contains an /AttributeValue/ of a different type
-- than the one provided in the request, the value does not match.
-- For example, '{\"S\":\"6\"}' does not equal '{\"N\":\"6\"}'.
-- Also, '{\"N\":\"6\"}' does not equal
-- '{\"NS\":[\"6\", \"2\", \"1\"]}'.
--
-- - 'LE' : Less than or equal.
--
-- /AttributeValueList/ can contain only one /AttributeValue/
-- element of type String, Number, or Binary (not a set type). If
-- an item contains an /AttributeValue/ element of a different type
-- than the one provided in the request, the value does not match.
-- For example, '{\"S\":\"6\"}' does not equal '{\"N\":\"6\"}'.
-- Also, '{\"N\":\"6\"}' does not compare to
-- '{\"NS\":[\"6\", \"2\", \"1\"]}'.
--
-- - 'LT' : Less than.
--
-- /AttributeValueList/ can contain only one /AttributeValue/ of
-- type String, Number, or Binary (not a set type). If an item
-- contains an /AttributeValue/ element of a different type than
-- the one provided in the request, the value does not match. For
-- example, '{\"S\":\"6\"}' does not equal '{\"N\":\"6\"}'. Also,
-- '{\"N\":\"6\"}' does not compare to
-- '{\"NS\":[\"6\", \"2\", \"1\"]}'.
--
-- - 'GE' : Greater than or equal.
--
-- /AttributeValueList/ can contain only one /AttributeValue/
-- element of type String, Number, or Binary (not a set type). If
-- an item contains an /AttributeValue/ element of a different type
-- than the one provided in the request, the value does not match.
-- For example, '{\"S\":\"6\"}' does not equal '{\"N\":\"6\"}'.
-- Also, '{\"N\":\"6\"}' does not compare to
-- '{\"NS\":[\"6\", \"2\", \"1\"]}'.
--
-- - 'GT' : Greater than.
--
-- /AttributeValueList/ can contain only one /AttributeValue/
-- element of type String, Number, or Binary (not a set type). If
-- an item contains an /AttributeValue/ element of a different type
-- than the one provided in the request, the value does not match.
-- For example, '{\"S\":\"6\"}' does not equal '{\"N\":\"6\"}'.
-- Also, '{\"N\":\"6\"}' does not compare to
-- '{\"NS\":[\"6\", \"2\", \"1\"]}'.
--
-- - 'NOT_NULL' : The attribute exists. 'NOT_NULL' is supported for
-- all datatypes, including lists and maps.
--
-- This operator tests for the existence of an attribute, not its
-- data type. If the data type of attribute \"'a'\" is null, and
-- you evaluate it using 'NOT_NULL', the result is a Boolean
-- /true/. This result is because the attribute \"'a'\" exists; its
-- data type is not relevant to the 'NOT_NULL' comparison operator.
--
-- - 'NULL' : The attribute does not exist. 'NULL' is supported for
-- all datatypes, including lists and maps.
--
-- This operator tests for the nonexistence of an attribute, not
-- its data type. If the data type of attribute \"'a'\" is null,
-- and you evaluate it using 'NULL', the result is a Boolean
-- /false/. This is because the attribute \"'a'\" exists; its data
-- type is not relevant to the 'NULL' comparison operator.
--
-- - 'CONTAINS' : Checks for a subsequence, or value in a set.
--
-- /AttributeValueList/ can contain only one /AttributeValue/
-- element of type String, Number, or Binary (not a set type). If
-- the target attribute of the comparison is of type String, then
-- the operator checks for a substring match. If the target
-- attribute of the comparison is of type Binary, then the operator
-- looks for a subsequence of the target that matches the input. If
-- the target attribute of the comparison is a set (\"'SS'\",
-- \"'NS'\", or \"'BS'\"), then the operator evaluates to true if
-- it finds an exact match with any member of the set.
--
-- CONTAINS is supported for lists: When evaluating
-- \"'a CONTAINS b'\", \"'a'\" can be a list; however, \"'b'\"
-- cannot be a set, a map, or a list.
--
-- - 'NOT_CONTAINS' : Checks for absence of a subsequence, or absence
-- of a value in a set.
--
-- /AttributeValueList/ can contain only one /AttributeValue/
-- element of type String, Number, or Binary (not a set type). If
-- the target attribute of the comparison is a String, then the
-- operator checks for the absence of a substring match. If the
-- target attribute of the comparison is Binary, then the operator
-- checks for the absence of a subsequence of the target that
-- matches the input. If the target attribute of the comparison is
-- a set (\"'SS'\", \"'NS'\", or \"'BS'\"), then the operator
-- evaluates to true if it /does not/ find an exact match with any
-- member of the set.
--
-- NOT_CONTAINS is supported for lists: When evaluating
-- \"'a NOT CONTAINS b'\", \"'a'\" can be a list; however, \"'b'\"
-- cannot be a set, a map, or a list.
--
-- - 'BEGINS_WITH' : Checks for a prefix.
--
-- /AttributeValueList/ can contain only one /AttributeValue/ of
-- type String or Binary (not a Number or a set type). The target
-- attribute of the comparison must be of type String or Binary
-- (not a Number or a set type).
--
-- - 'IN' : Checks for matching elements within two sets.
--
-- /AttributeValueList/ can contain one or more /AttributeValue/
-- elements of type String, Number, or Binary (not a set type).
-- These attributes are compared against an existing set type
-- attribute of an item. If any elements of the input set are
-- present in the item attribute, the expression evaluates to true.
--
-- - 'BETWEEN' : Greater than or equal to the first value, and less
-- than or equal to the second value.
--
-- /AttributeValueList/ must contain two /AttributeValue/ elements
-- of the same type, either String, Number, or Binary (not a set
-- type). A target attribute matches if the target value is greater
-- than, or equal to, the first element and less than, or equal to,
-- the second element. If an item contains an /AttributeValue/
-- element of a different type than the one provided in the
-- request, the value does not match. For example, '{\"S\":\"6\"}'
-- does not compare to '{\"N\":\"6\"}'. Also, '{\"N\":\"6\"}' does
-- not compare to '{\"NS\":[\"6\", \"2\", \"1\"]}'
--
-- For usage examples of /AttributeValueList/ and /ComparisonOperator/, see
-- <http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/LegacyConditionalParameters.html Legacy Conditional Parameters>
-- in the /Amazon DynamoDB Developer Guide/.
--
-- For backward compatibility with previous DynamoDB releases, the
-- following parameters can be used instead of /AttributeValueList/ and
-- /ComparisonOperator/:
--
-- - /Value/ - A value for DynamoDB to compare with an attribute.
--
-- - /Exists/ - A Boolean value that causes DynamoDB to evaluate the
-- value before attempting the conditional operation:
--
-- - If /Exists/ is 'true', DynamoDB will check to see if that
-- attribute value already exists in the table. If it is found,
-- then the condition evaluates to true; otherwise the condition
-- evaluate to false.
--
-- - If /Exists/ is 'false', DynamoDB assumes that the attribute
-- value does /not/ exist in the table. If in fact the value does
-- not exist, then the assumption is valid and the condition
-- evaluates to true. If the value is found, despite the assumption
-- that it does not exist, the condition evaluates to false.
--
-- Note that the default value for /Exists/ is 'true'.
--
-- The /Value/ and /Exists/ parameters are incompatible with
-- /AttributeValueList/ and /ComparisonOperator/. Note that if you use both
-- sets of parameters at once, DynamoDB will return a /ValidationException/
-- exception.
--
-- This parameter does not support attributes of type List or Map.
diExpected :: Lens' DeleteItem (HashMap Text ExpectedAttributeValue)
diExpected = lens _diExpected (\ s a -> s{_diExpected = a}) . _Default . _Map;
-- | The name of the table from which to delete the item.
diTableName :: Lens' DeleteItem Text
diTableName = lens _diTableName (\ s a -> s{_diTableName = a});
-- | A map of attribute names to /AttributeValue/ objects, representing the
-- primary key of the item to delete.
--
-- For the primary key, you must provide all of the attributes. For
-- example, with a hash type primary key, you only need to provide the hash
-- attribute. For a hash-and-range type primary key, you must provide both
-- the hash attribute and the range attribute.
diKey :: Lens' DeleteItem (HashMap Text AttributeValue)
diKey = lens _diKey (\ s a -> s{_diKey = a}) . _Map;
instance AWSRequest DeleteItem where
type Rs DeleteItem = DeleteItemResponse
request = postJSON dynamoDB
response
= receiveJSON
(\ s h x ->
DeleteItemResponse' <$>
(x .?> "ItemCollectionMetrics") <*>
(x .?> "ConsumedCapacity")
<*> (x .?> "Attributes" .!@ mempty)
<*> (pure (fromEnum s)))
instance ToHeaders DeleteItem where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("DynamoDB_20120810.DeleteItem" :: ByteString),
"Content-Type" =#
("application/x-amz-json-1.0" :: ByteString)])
instance ToJSON DeleteItem where
toJSON DeleteItem'{..}
= object
(catMaybes
[("ExpressionAttributeNames" .=) <$>
_diExpressionAttributeNames,
("ReturnValues" .=) <$> _diReturnValues,
("ExpressionAttributeValues" .=) <$>
_diExpressionAttributeValues,
("ReturnConsumedCapacity" .=) <$>
_diReturnConsumedCapacity,
("ReturnItemCollectionMetrics" .=) <$>
_diReturnItemCollectionMetrics,
("ConditionExpression" .=) <$>
_diConditionExpression,
("ConditionalOperator" .=) <$>
_diConditionalOperator,
("Expected" .=) <$> _diExpected,
Just ("TableName" .= _diTableName),
Just ("Key" .= _diKey)])
instance ToPath DeleteItem where
toPath = const "/"
instance ToQuery DeleteItem where
toQuery = const mempty
-- | Represents the output of a /DeleteItem/ operation.
--
-- /See:/ 'deleteItemResponse' smart constructor.
data DeleteItemResponse = DeleteItemResponse'
{ _dirsItemCollectionMetrics :: !(Maybe ItemCollectionMetrics)
, _dirsConsumedCapacity :: !(Maybe ConsumedCapacity)
, _dirsAttributes :: !(Maybe (Map Text AttributeValue))
, _dirsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DeleteItemResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dirsItemCollectionMetrics'
--
-- * 'dirsConsumedCapacity'
--
-- * 'dirsAttributes'
--
-- * 'dirsResponseStatus'
deleteItemResponse
:: Int -- ^ 'dirsResponseStatus'
-> DeleteItemResponse
deleteItemResponse pResponseStatus_ =
DeleteItemResponse'
{ _dirsItemCollectionMetrics = Nothing
, _dirsConsumedCapacity = Nothing
, _dirsAttributes = Nothing
, _dirsResponseStatus = pResponseStatus_
}
-- | Information about item collections, if any, that were affected by the
-- operation. /ItemCollectionMetrics/ is only returned if the request asked
-- for it. If the table does not have any local secondary indexes, this
-- information is not returned in the response.
--
-- Each /ItemCollectionMetrics/ element consists of:
--
-- - /ItemCollectionKey/ - The hash key value of the item collection.
-- This is the same as the hash key of the item.
--
-- - /SizeEstimateRange/ - An estimate of item collection size, in
-- gigabytes. This value is a two-element array containing a lower
-- bound and an upper bound for the estimate. The estimate includes the
-- size of all the items in the table, plus the size of all attributes
-- projected into all of the local secondary indexes on that table. Use
-- this estimate to measure whether a local secondary index is
-- approaching its size limit.
--
-- The estimate is subject to change over time; therefore, do not rely
-- on the precision or accuracy of the estimate.
--
dirsItemCollectionMetrics :: Lens' DeleteItemResponse (Maybe ItemCollectionMetrics)
dirsItemCollectionMetrics = lens _dirsItemCollectionMetrics (\ s a -> s{_dirsItemCollectionMetrics = a});
-- | Undocumented member.
dirsConsumedCapacity :: Lens' DeleteItemResponse (Maybe ConsumedCapacity)
dirsConsumedCapacity = lens _dirsConsumedCapacity (\ s a -> s{_dirsConsumedCapacity = a});
-- | A map of attribute names to /AttributeValue/ objects, representing the
-- item as it appeared before the /DeleteItem/ operation. This map appears
-- in the response only if /ReturnValues/ was specified as 'ALL_OLD' in the
-- request.
dirsAttributes :: Lens' DeleteItemResponse (HashMap Text AttributeValue)
dirsAttributes = lens _dirsAttributes (\ s a -> s{_dirsAttributes = a}) . _Default . _Map;
-- | The response status code.
dirsResponseStatus :: Lens' DeleteItemResponse Int
dirsResponseStatus = lens _dirsResponseStatus (\ s a -> s{_dirsResponseStatus = a});
|
olorin/amazonka
|
amazonka-dynamodb/gen/Network/AWS/DynamoDB/DeleteItem.hs
|
mpl-2.0
| 27,191
| 0
| 14
| 5,973
| 1,929
| 1,283
| 646
| 174
| 1
|
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Main
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Main (main) where
import Test.Tasty
import Test.AWS.CloudWatch
import Test.AWS.CloudWatch.Internal
main :: IO ()
main = defaultMain $ testGroup "CloudWatch"
[ testGroup "tests" tests
, testGroup "fixtures" fixtures
]
|
fmapfmapfmap/amazonka
|
amazonka-cloudwatch/test/Main.hs
|
mpl-2.0
| 543
| 0
| 8
| 103
| 76
| 47
| 29
| 9
| 1
|
{-# LANGUAGE ScopedTypeVariables #-}
module Main (main) where
import Control.Exception
(catch)
import Prelude
import System.Exit
(ExitCode(..))
import System.Process
(StdStream(CreatePipe), proc, std_out,
waitForProcess, withCreateProcess)
import Test.DocTest
(doctest)
import Test.Tasty
(TestTree, defaultMain, testGroup, withResource)
import Test.Tasty.HUnit
(testCase)
import Test.Tasty.QuickCheck
(expectFailure, testProperty, withMaxSuccess)
import Bookstore as Store
import CircularBuffer
import Cleanup
import qualified CrudWebserverDb as WS
import DieHard
import Echo
import ErrorEncountered
import Hanoi
import IORefs
import MemoryReference
import Mock
import Overflow
import ProcessRegistry
import RQlite
import qualified ShrinkingProps
import SQLite
import Test.StateMachine.Markov
(PropertyName, StatsDb, fileStatsDb)
import TicketDispenser
import qualified UnionFind
------------------------------------------------------------------------
tests :: Bool -> TestTree
tests docker0 = testGroup "Tests"
[ testCase "Doctest"
(doctest [ "src/Test/StateMachine/Z.hs"
, "src/Test/StateMachine/Logic.hs"
])
, ShrinkingProps.tests
, testProperty "TowersOfHanoi"
(expectFailure (prop_hanoi 3))
, testProperty "DieHard"
(expectFailure (withMaxSuccess 2000 prop_dieHard))
, testGroup "MemoryReference"
[testProperty "NoBugSeq" (prop_sequential MemoryReference.None)
, testProperty "LogicBug" (expectFailure (prop_sequential Logic))
, testProperty "RaceBugSequential" (prop_sequential Race)
, testProperty "NoBugParallel" (prop_parallel MemoryReference.None)
, testProperty "RaceBugParallel" (expectFailure (prop_parallel Race))
, testProperty "CrashBugParallel" (prop_parallel' Crash)
, testProperty "CrashAndLogicBugParallel"
(expectFailure (withMaxSuccess 10000 (prop_parallel' CrashAndLogic)))
, testProperty "PreconditionFailed" prop_precondition
, testProperty "ExistsCommands" prop_existsCommands
, testProperty "NoBug 1 thread" (prop_nparallel MemoryReference.None 1)
, testProperty "NoBug 2 threads" (prop_nparallel MemoryReference.None 2)
, testProperty "NoBug 3 threads" (withMaxSuccess 80 $ prop_nparallel MemoryReference.None 3)
, testProperty "NoBug 4 threads" (withMaxSuccess 40 $ prop_nparallel MemoryReference.None 4)
, testProperty "RaceBugParalleel 1 thread" (prop_nparallel Race 1)
, testProperty "RaceBugParalleel 2 threads" (expectFailure (prop_nparallel Race 2))
, testProperty "RaceBugParalleel 3 threads" (expectFailure (prop_nparallel Race 3))
, testProperty "RaceBugParalleel 4 threads" (expectFailure (prop_nparallel Race 4))
, testProperty "ShrinkParallelEquivalence" prop_pairs_shrink_parallel_equivalence
, testProperty "ShrinkAndValidateParallelEquivalence" prop_pairs_shrinkAndValidate_equivalence
, testProperty "ShrinkPairsEquialence" prop_pairs_shrink_parallel
]
, testGroup "Overflow"
[ testProperty "2-threads" prop_parallel_overflow
, testProperty "3-threads" $ prop_nparallel_overflow 3
, testProperty "4-threads" $ expectFailure
$ withMaxSuccess 500
$ prop_nparallel_overflow 4
]
, testGroup "Cleanup"
[ testProperty "seqRegularNoOp" $ prop_sequential_clean Regular Cleanup.NoBug NoOp
, testProperty "seqRegular" $ prop_sequential_clean Regular Cleanup.NoBug ReDo
, testProperty "seqRegularExcNoOp"
$ expectFailure $ prop_sequential_clean Regular Cleanup.Exception NoOp
, testProperty "seqRegularExc"
$ expectFailure $ prop_sequential_clean Regular Cleanup.Exception ReDo
, testProperty "seqFilesNoOp" $ prop_sequential_clean Files Cleanup.NoBug NoOp
, testProperty "seqFiles" $ prop_sequential_clean Files Cleanup.NoBug ReDo
, testProperty "seqFilesExcNoOp" $ prop_sequential_clean Files Cleanup.Exception NoOp
, testProperty "seqFilesExc" $ prop_sequential_clean Files Cleanup.Exception ReDo
, testProperty "seqFilesExcAfterNoOp" $ prop_sequential_clean Files Cleanup.ExcAfter NoOp
, testProperty "seqFilesExcAfterReDo" $ prop_sequential_clean Files Cleanup.ExcAfter ReDo
, testProperty "seqEquivNoOp" $ prop_sequential_clean (Eq False) Cleanup.NoBug NoOp
, testProperty "2-threadsRegularExc"
$ expectFailure $ prop_parallel_clean Regular Cleanup.Exception NoOp
, testProperty "2-threadsRegularExc"
$ expectFailure $ prop_parallel_clean Regular Cleanup.Exception ReDo
, testProperty "2-threadsFilesExc"
$ expectFailure $ withMaxSuccess 1000 $ prop_parallel_clean Files Cleanup.Exception ReDo
, testProperty "2-threadsEquivFailingNoOp"
$ expectFailure $ withMaxSuccess 1000 $ prop_parallel_clean (Eq True) Cleanup.NoBug NoOp
, testProperty "3-threadsRegularNoOp" $ prop_nparallel_clean 3 Regular Cleanup.NoBug NoOp
, testProperty "3-threadsRegular" $ prop_nparallel_clean 3 Regular Cleanup.NoBug ReDo
, testProperty "3-threadsRegularExc" $ expectFailure
$ prop_nparallel_clean 3 Regular Cleanup.Exception NoOp
, testProperty "3-threadsRegularExc"
$ expectFailure $ prop_nparallel_clean 3 Regular Cleanup.Exception ReDo
, testProperty "3-threadsFilesNoOp" $ prop_nparallel_clean 3 Files Cleanup.NoBug NoOp
, testProperty "3-threadsFiles" $ prop_nparallel_clean 3 Files Cleanup.NoBug ReDo
, testProperty "3-threadsFilesExcNoOp" $ prop_nparallel_clean 3 Files Cleanup.Exception NoOp
, testProperty "3-threadsFilesExc"
$ expectFailure $ withMaxSuccess 1000 $ prop_nparallel_clean 3 Files Cleanup.Exception ReDo
, testProperty "3-threadsFilesExcAfter" $ prop_nparallel_clean 3 Files Cleanup.ExcAfter NoOp
, testProperty "3-threadsEquivNoOp" $ prop_nparallel_clean 3 (Eq False) Cleanup.NoBug NoOp
, testProperty "3-threadsEquivFailingNoOp"
$ expectFailure $ withMaxSuccess 1000 $ prop_nparallel_clean 3 (Eq True) Cleanup.NoBug NoOp
]
, testGroup "SQLite"
[ testProperty "Parallel" prop_parallel_sqlite
]
, testGroup "Rqlite"
[ whenDocker docker0 "rqlite" $ testProperty "parallel" $ withMaxSuccess 10 $ prop_parallel_rqlite (Just Weak)
-- we currently don't add other properties, because they interfere (Tasty runs tests on parallel)
-- , testProperty "sequential" $ withMaxSuccess 10 $ prop_sequential_rqlite (Just Weak)
-- , testProperty "sequential-stale" $ expectFailure $ prop_sequential_rqlite (Just RQlite.None)
]
, testGroup "ErrorEncountered"
[ testProperty "Sequential" prop_error_sequential
, testProperty "Parallel" prop_error_parallel
, testProperty "2-Parallel" $ prop_error_nparallel 2
, testProperty "3-Parallel" $ prop_error_nparallel 3
, testProperty "4-Parallel" $ prop_error_nparallel 4
]
, testGroup "CrudWebserver"
[ webServer docker0 WS.None 8800 "NoBug" WS.prop_crudWebserverDb
, webServer docker0 WS.Logic 8801 "LogicBug" (expectFailure . WS.prop_crudWebserverDb)
, webServer docker0 WS.Race 8802 "NoRaceBug" WS.prop_crudWebserverDb
, webServer docker0 WS.Race 8803 "RaceBug" (expectFailure . WS.prop_crudWebserverDbParallel)
]
, testGroup "Bookstore"
[ dataBase docker0 "NoBug" (Store.prop_bookstore Store.NoBug)
, dataBase docker0 "SqlStatementBug"
$ expectFailure
. withMaxSuccess 500
. Store.prop_bookstore Bug
, dataBase docker0 "InputValidationBug"
$ expectFailure
. withMaxSuccess 500
. Store.prop_bookstore Injection
]
, testGroup "TicketDispenser"
[ testProperty "Sequential" prop_ticketDispenser
, testProperty "ParallelWithExclusiveLock" (withMaxSuccess 30
prop_ticketDispenserParallelOK)
, testProperty "ParallelWithSharedLock" (expectFailure
prop_ticketDispenserParallelBad)
, testProperty "2-ParallelWithExclusiveLock" (prop_ticketDispenserNParallelOK 2)
, testProperty "3-ParallelWithExclusiveLock" (prop_ticketDispenserNParallelOK 3)
, testProperty "4-ParallelWithExclusiveLock" (prop_ticketDispenserNParallelOK 4)
, testProperty "3-ParallelWithSharedLock" (expectFailure $
prop_ticketDispenserNParallelBad 3)
]
, testGroup "Mock"
[ testProperty "sequential" prop_sequential_mock
, testProperty "parallel" prop_parallel_mock
, testProperty "nparallel" prop_nparallel_mock
]
, testGroup "CircularBuffer"
[ testProperty "unpropNoSizeCheck"
(expectFailure (withMaxSuccess 1000 unpropNoSizeCheck))
, testProperty "unpropFullIsEmpty"
(expectFailure (withMaxSuccess 1000 unpropFullIsEmpty))
, testProperty "unpropBadRem"
(expectFailure (withMaxSuccess 1000 unpropBadRem))
, testProperty "unpropStillBadRem"
(expectFailure (withMaxSuccess 1000 unpropStillBadRem))
, testProperty "prop_circularBuffer"
prop_circularBuffer
]
, testGroup "Echo"
[ testProperty "Sequential" prop_echoOK
, testProperty "ParallelOk" (prop_echoParallelOK False)
, testProperty "ParallelBad" -- See issue #218.
(expectFailure (prop_echoParallelOK True))
, testProperty "2-Parallel" (prop_echoNParallelOK 2 False)
, testProperty "3-Parallel" (prop_echoNParallelOK 3 False)
, testProperty "Parallel bad, 2 threads, see issue #218"
(expectFailure (prop_echoNParallelOK 2 True))
, testProperty "Parallel bad, 3 threads, see issue #218"
(expectFailure (prop_echoNParallelOK 3 True))
]
, testGroup "ProcessRegistry"
[ testProperty "Sequential" (prop_processRegistry (statsDb "processRegistry"))
]
, testGroup "UnionFind"
[ testProperty "Sequential" UnionFind.prop_unionFindSequential ]
, testGroup "Lockstep"
[ testProperty "IORefs_Sequential" prop_IORefs_sequential
]
]
where
statsDb :: PropertyName -> StatsDb IO
statsDb = fileStatsDb "/tmp/stats-db"
webServer docker bug port test prop
| docker = withResource (WS.setup bug WS.connectionString port) WS.cleanup
(const (testProperty test (prop port)))
| otherwise = testCase ("No docker or running on CI, skipping: " ++ test) (return ())
dataBase docker test prop
| docker = withResource Store.setup
Store.cleanup
(\io -> testProperty test (prop (snd <$> io)))
| otherwise = testCase ("No docker, skipping: " ++ test) (return ())
whenDocker docker test prop
| docker = prop
| otherwise = testCase ("No docker, skipping: " ++ test) (return ())
------------------------------------------------------------------------
main :: IO ()
main = do
-- Check if docker is avaiable.
ec <- rawSystemNoStdout "docker" ["version"]
`catch` (\(_ :: IOError) -> return (ExitFailure 127))
let docker = case ec of
ExitSuccess -> True
ExitFailure _ -> False
defaultMain (tests docker)
where
rawSystemNoStdout cmd args =
withCreateProcess
(proc cmd args) { std_out = CreatePipe }
(\_ _ _ -> waitForProcess)
|
advancedtelematic/quickcheck-state-machine-model
|
test/Spec.hs
|
bsd-3-clause
| 12,617
| 0
| 15
| 3,585
| 2,429
| 1,227
| 1,202
| 207
| 2
|
{-# LANGUAGE FlexibleContexts #-}
import AI.Layer
import AI.Neuron
import AI.Network
import AI.Network.FeedForwardNetwork
import AI.Trainer
import AI.Trainer.BackpropTrainer
--import Network.Visualizations
import Numeric.LinearAlgebra
import System.IO
import System.Random
main :: IO ()
main = do
-- Convolutional Settings
let field = 3
let stride = 0
let padding = 0
let filters = 2
let dimensions = 1
let widthIn = 4
let heightIn = 4
let widthOut = 2
let heightOut = 2
let connectivity = connectLocally field stride padding filters dimensions widthIn heightIn widthOut heightOut
let randomization = randomizeLocally field stride padding filters dimensions widthIn heightIn widthOut heightOut
g <- newStdGen
let l = LayerDefinition sigmoidNeuron (16 * dimensions) connectFully randomizeFully
let l' = LayerDefinition sigmoidNeuron (4 * filters) connectivity randomization
let n = createNetwork uniforms g [l, l'] :: FeedForwardNetwork
print $ predict (fromList [0..15]) n
|
jbarrow/LambdaNet
|
examples/Convolutional.hs
|
mit
| 1,106
| 0
| 12
| 268
| 308
| 153
| 155
| 28
| 1
|
--------------------------------------------------------------------------------
{-|
Module : Variable
Copyright : (c) Daan Leijen 2003
License : wxWindows
Maintainer : wxhaskell-devel@lists.sourceforge.net
Stability : provisional
Portability : portable
Mutable variables.
-}
--------------------------------------------------------------------------------
module Graphics.UI.WX.Variable
( variable
) where
import Control.Concurrent.STM.TVar
import Graphics.UI.WX.Types( Var, varGet, varSet, varCreate, varUpdate )
import Graphics.UI.WX.Attributes
import Graphics.UI.WX.Classes
{--------------------------------------------------------------------
--------------------------------------------------------------------}
instance Valued TVar where
value = makeAttr "value" varGet varSet varUpdate
-- | Create a mutable variable. Change the value using the |value| attribute.
variable :: [Prop (Var a)] -> IO (Var a)
variable props
= do v <- varCreate (error "Graphics.UI.WX.Variable: uninitialized variable, use the 'value' attribute at creation")
set v props
return v
|
sherwoodwang/wxHaskell
|
wx/src/Graphics/UI/WX/Variable.hs
|
lgpl-2.1
| 1,134
| 0
| 10
| 172
| 153
| 87
| 66
| 13
| 1
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="fa-IR">
<title>Active Scan Rules - Alpha | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>نمایه</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>جستجو</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
0xkasun/security-tools
|
src/org/zaproxy/zap/extension/ascanrulesAlpha/resources/help_fa_IR/helpset_fa_IR.hs
|
apache-2.0
| 996
| 84
| 65
| 163
| 418
| 211
| 207
| -1
| -1
|
module UtilSpec where
import System.Directory
import Test.Hspec
import Test.Mockery.Directory
import Util
spec :: Spec
spec = do
describe "listDirectories" $ do
it "lists directories" $ do
inTempDirectory $ do
createDirectory "foo"
createDirectory "bar"
writeFile "baz" ""
listDirectories "." `shouldReturn` ["./bar", "./foo"]
describe "listFilesRecursively" $ do
it "lists files recursively" $ do
inTempDirectory $ do
touch "foo"
touch "bar/baz"
listFilesRecursively "." >>= (`shouldMatchList` ["./foo", "./bar/baz"])
describe "fingerprint" $ do
it "returns a fingerprint for files in specified directory" $ do
hash1 <- inTempDirectory $ do
writeFile "foo" "some content"
touch "bar/baz"
writeFile "bar/baz" "some other content"
fingerprint "."
hash2 <- inTempDirectory $ do
writeFile "foo" "some content"
touch "bar/baz"
writeFile "bar/baz" "some other content"
fingerprint "."
hash1 `shouldBe` hash2
it "takes file contents into account" $ do
hash1 <- inTempDirectory $ do
writeFile "foo" "some content"
fingerprint "."
hash2 <- inTempDirectory $ do
writeFile "foo" "some other content"
fingerprint "."
hash1 `shouldSatisfy` (/= hash2)
it "takes filenames into account" $ do
hash1 <- inTempDirectory $ do
writeFile "foo" "some content"
writeFile "bar" "some other content"
fingerprint "."
hash2 <- inTempDirectory $ do
writeFile "bar" "some content"
writeFile "foo" "some other content"
fingerprint "."
hash1 `shouldSatisfy` (/= hash2)
it "ignores base path of filenames" $ do
hash1 <- inTempDirectory $ do
touch "foo/bar"
writeFile "foo/bar" "some content"
fingerprint "foo"
hash2 <- inTempDirectory $ do
touch "baz/bar"
writeFile "baz/bar" "some content"
fingerprint "baz"
hash1 `shouldBe` hash2
describe "cachedIO" $ do
it "runs given action" $ do
inTempDirectory $ do
cachedIO "foo" (return "bar") `shouldReturn` "bar"
it "caches the result of the given action" $ do
inTempDirectory $ do
_ <- cachedIO "foo" (return "bar")
readFile "foo" `shouldReturn` "bar"
it "reuses cached result" $ do
inTempDirectory $ do
writeFile "foo" "bar"
cachedIO "foo" undefined `shouldReturn` "bar"
describe "getCabalFiles" $ around_ inTempDirectory $ do
it "returns all cabal files in the current directory" $ do
touch "foo.cabal"
touch "bar.cabal"
getCabalFiles "." >>= (`shouldMatchList` ["bar.cabal", "foo.cabal"])
it "ignores dot files" $ do
touch ".foo.cabal"
getCabalFiles "." `shouldReturn` []
|
sol/tinc
|
test/UtilSpec.hs
|
mit
| 2,924
| 0
| 19
| 867
| 733
| 323
| 410
| 81
| 1
|
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-missing-methods #-}
-- |
-- Module : Data.Text.Internal.Fusion.Internal
-- Copyright : (c) Roman Leshchinskiy 2008,
-- (c) Bryan O'Sullivan 2009
--
-- License : BSD-style
-- Maintainer : bos@serpentine.com
-- Stability : experimental
-- Portability : portable
--
-- /Warning/: this is an internal module, and does not have a stable
-- API or name. Functions in this module may not check or enforce
-- preconditions expected by public modules. Use at your own risk!
--
-- Size hints.
module Data.Text.Internal.Fusion.Size
(
Size
, exactly
, exactSize
, maxSize
, unknownSize
, smaller
, larger
, upperBound
, isEmpty
) where
#if defined(ASSERTS)
import Control.Exception (assert)
#endif
data Size = Exact {-# UNPACK #-} !Int -- ^ Exact size.
| Max {-# UNPACK #-} !Int -- ^ Upper bound on size.
| Unknown -- ^ Unknown size.
deriving (Eq, Show)
exactly :: Size -> Maybe Int
exactly (Exact n) = Just n
exactly _ = Nothing
{-# INLINE exactly #-}
exactSize :: Int -> Size
exactSize n =
#if defined(ASSERTS)
assert (n >= 0)
#endif
Exact n
{-# INLINE exactSize #-}
maxSize :: Int -> Size
maxSize n =
#if defined(ASSERTS)
assert (n >= 0)
#endif
Max n
{-# INLINE maxSize #-}
unknownSize :: Size
unknownSize = Unknown
{-# INLINE unknownSize #-}
instance Num Size where
(+) = addSize
(-) = subtractSize
(*) = mulSize
fromInteger = f where f = Exact . fromInteger
{-# INLINE f #-}
add :: Int -> Int -> Int
add m n | mn >= 0 = mn
| otherwise = overflowError
where mn = m + n
{-# INLINE add #-}
addSize :: Size -> Size -> Size
addSize (Exact m) (Exact n) = Exact (add m n)
addSize (Exact m) (Max n) = Max (add m n)
addSize (Max m) (Exact n) = Max (add m n)
addSize (Max m) (Max n) = Max (add m n)
addSize _ _ = Unknown
{-# INLINE addSize #-}
subtractSize :: Size -> Size -> Size
subtractSize (Exact m) (Exact n) = Exact (max (m-n) 0)
subtractSize (Exact m) (Max _) = Max m
subtractSize (Max m) (Exact n) = Max (max (m-n) 0)
subtractSize a@(Max _) (Max _) = a
subtractSize a@(Max _) Unknown = a
subtractSize _ _ = Unknown
{-# INLINE subtractSize #-}
mul :: Int -> Int -> Int
mul m n
| m <= maxBound `quot` n = m * n
| otherwise = overflowError
{-# INLINE mul #-}
mulSize :: Size -> Size -> Size
mulSize (Exact m) (Exact n) = Exact (mul m n)
mulSize (Exact m) (Max n) = Max (mul m n)
mulSize (Max m) (Exact n) = Max (mul m n)
mulSize (Max m) (Max n) = Max (mul m n)
mulSize _ _ = Unknown
{-# INLINE mulSize #-}
-- | Minimum of two size hints.
smaller :: Size -> Size -> Size
smaller (Exact m) (Exact n) = Exact (m `min` n)
smaller (Exact m) (Max n) = Max (m `min` n)
smaller (Exact m) Unknown = Max m
smaller (Max m) (Exact n) = Max (m `min` n)
smaller (Max m) (Max n) = Max (m `min` n)
smaller a@(Max _) Unknown = a
smaller Unknown (Exact n) = Max n
smaller Unknown (Max n) = Max n
smaller Unknown Unknown = Unknown
{-# INLINE smaller #-}
-- | Maximum of two size hints.
larger :: Size -> Size -> Size
larger (Exact m) (Exact n) = Exact (m `max` n)
larger a@(Exact m) b@(Max n) | m >= n = a
| otherwise = b
larger a@(Max m) b@(Exact n) | n >= m = b
| otherwise = a
larger (Max m) (Max n) = Max (m `max` n)
larger _ _ = Unknown
{-# INLINE larger #-}
-- | Compute the maximum size from a size hint, if possible.
upperBound :: Int -> Size -> Int
upperBound _ (Exact n) = n
upperBound _ (Max n) = n
upperBound k _ = k
{-# INLINE upperBound #-}
isEmpty :: Size -> Bool
isEmpty (Exact n) = n <= 0
isEmpty (Max n) = n <= 0
isEmpty _ = False
{-# INLINE isEmpty #-}
overflowError :: Int
overflowError = error "Data.Text.Internal.Fusion.Size: size overflow"
|
fpco/text-stream-decode
|
text/Data/Text/Internal/Fusion/Size.hs
|
mit
| 4,136
| 0
| 9
| 1,267
| 1,405
| 745
| 660
| 102
| 1
|
{-@ LIQUID "--no-termination" @-}
module Eval (eval) where
import Language.Haskell.Liquid.Prelude (liquidError)
import Prelude hiding (lookup)
import Data.Set (Set (..))
{-@ embed Set as Set_Set @-}
type Val = Int
type Bndr = String
data Expr = Const Int
| Var Bndr
| Plus Expr Expr
| Let Bndr Expr Expr
type Env = [(Bndr, Val)]
------------------------------------------------------------------
{-@ lookup :: x:Bndr -> {v:Env | Set_mem x (vars v)} -> Val @-}
lookup :: Bndr -> Env -> Val
--------------------- -------------------------------------------
lookup x ((y,v):env)
| x == y = v
| otherwise = lookup x env
lookup x [] = liquidError "Unbound Variable"
------------------------------------------------------------------
{-@ eval :: g:Env -> CExpr g -> Val @-}
------------------------------------------------------------------
eval env (Const i) = i
eval env (Var x) = lookup x env
eval env (Plus e1 e2) = eval env e1 + eval env e2
eval env (Let x e1 e2) = eval env' e2
where
env' = (x, eval env e1) : env
{-@ type CExpr G = {v:Expr | Set_sub (free v) (vars G)} @-}
{-@ measure vars :: Env -> (Set Bndr)
vars ([]) = {v | Set_emp v }
vars (b:env) = {v | v = Set_cup (Set_sng (fst b)) (vars env)}
@-}
{-@ measure free :: Expr -> (Set Bndr)
free (Const i) = {v | Set_emp v}
free (Var x) = {v | v = Set_sng x}
free (Plus e1 e2) = {v | v = Set_cup (free e1) (free e2)}
free (Let x e1 e2) = {v | v = Set_cup (free e1) (Set_dif (free e2) (Set_sng x))}
@-}
|
ssaavedra/liquidhaskell
|
tests/pos/Eval.hs
|
bsd-3-clause
| 1,632
| 0
| 9
| 436
| 303
| 169
| 134
| 21
| 1
|
module TypeClassConstraintsInformedHoles where
-- Make sure that constraints imposed by the call site
-- are handled. See trac #14273
pleaseShow :: Show a => Bool -> a -> Maybe String
pleaseShow False _ = Nothing
pleaseShow True a = Just (show _a)
k :: String
k = "I fit into the hole in foo!"
foo :: [a] -> String
foo xs = show (_h ++ [])
|
shlevy/ghc
|
testsuite/tests/typecheck/should_compile/T14273.hs
|
bsd-3-clause
| 343
| 0
| 8
| 69
| 100
| 53
| 47
| 8
| 1
|
{-# LANGUAGE KindSignatures, PatternGuards #-}
-- Adapted from the LogicT code at http://okmij.org/ftp/Haskell/LogicT.tar.gz
-- The TicTacToe problem for the NxN board
-- M consecutive 'X' or 'O' marks in each column, row, or diagonal
-- is a win for the corresponding player
-- This code is heavily based on the code posted by Andrew Bromage on
-- the Haskell mailing list on June 22, 2005:
-- http://www.haskell.org/pipermail/haskell/2005-June/016037.html
-- To compile this code
-- ghc --make -O2 -main-is a12a1 TicTacToe.hs
-- To run this code
-- GHCRTS="-tstderr" /usr/bin/time ./TicTacToe
import Control.Monad
import Control.Monad.Trans
import Control.Monad.Logic.Class
import qualified Data.Map as Map
import Data.List
import System.Environment
import System.IO
-- Implementations of Logic:
import Fixed.Logic -- our new implementation
--import Control.Monad.Logic -- two continuation implementation
--import LogicBenchmarks.LogicCC -- delimited continuations implementation
bagofN :: MonadLogic m => Maybe Int -> m a -> m [a]
bagofN (Just n) _ | n <= 0 = return []
bagofN n m = msplit m >>= bagofN'
where bagofN' Nothing = return []
bagofN' (Just (a,m')) = bagofN (fmap (-1 +) n) m' >>= (return . (a:))
n = 5 -- Dimension of the board
m = 4 -- Number of consecutive marks needed for win
-- ----------------------------------------------------------------------
-- Representation of the board
type PlayerProc t (m :: * -> *) = Mark -> Game -> t m (Int,Game)
-- We also use Mark to mark the players
data Mark = X | O deriving (Ord,Eq,Show)
-- Location on the board: 0..n-1
type Loc = (Int,Int)
-- The current position: What Mark is at Loc. Initially empty
type Board = Map.Map Loc Mark
-- Determining the number of consecutive marks around the location loc
-- (in a particular or all directions)
-- movement functions: Loc -> Loc from one location to the neighboring one.
-- For each direction (row, column, diagonal) we provide two
-- functions: one moves into the positive direction, and
-- the other in the negative direction
type MoveFn = Loc -> Loc
-- The current position of the game then
data Game = Game {
-- The location and the mark of the
-- player who first achieved the goal
winner :: Maybe (Loc,Mark),
-- The list of empty locations
moves :: [Loc],
board :: Board
}
playGameAI :: Int -> Int -> Int -> Int -> IO ()
playGameAI n m dlim blim = observeT $ game (X,ai) (O,ai) where
move'loc'fn :: [(MoveFn,MoveFn)]
move'loc'fn =
[(\ (x,y) -> (x-1,y) , \ (x,y) -> (x+1,y)), -- up/down the column y
(\ (x,y) -> (x,y-1) , \ (x,y) -> (x,y+1)), -- left/right the row x
(\ (x,y) -> (x-1,y-1), \ (x,y) -> (x+1,y+1)),
(\ (x,y) -> (x-1,y+1), \ (x,y) -> (x+1,y-1))]
good'loc (x,y) = x>=0 && y>=0 && x<n && y<n
-- move as far as possible from the location loc into the direction specified
-- by mfn so long as new location is still marked by 'm'. Return the
-- last location marked by 'm' and the number of the moves performed.
extend'loc :: Board -> MoveFn -> Mark -> Loc -> (Int,Loc)
extend'loc board mfn m loc = loop 0 loc (mfn loc)
where loop n loc loc' | good'loc loc',
Just m' <- Map.lookup loc' board,
m' == m
= loop (n+1) loc' (mfn loc')
loop n loc _ = (n,loc)
max'cluster :: Board -> Mark -> Loc -> (Int,Loc)
max'cluster board m loc = maximumBy (\ (n1,_) (n2,_) -> compare n1 n2) $
(map cluster'dir move'loc'fn)
where cluster'dir (mfn1,mfn2) =
let (n1,end1) = extend'loc board mfn1 m loc
(n2,end2) = extend'loc board mfn2 m loc
in (n1+n2+1,end1)
new'game :: Game
new'game = Game { winner = Nothing,
moves = map (\[x,y] ->(x,y)) $ sequence [[0..n-1],[0..n-1]],
board = Map.empty}
show'board fm = concatMap showrow [0..n-1]
where showrow i = concatMap (showmark i) [0..n-1] ++ "\n"
showmark i j = maybe " ." ((' ':) . show) $ Map.lookup (i,j) fm
-- Account for the move into location the 'loc' by the player 'p'
take'move :: Mark -> Loc -> Game -> Game
take'move p loc g =
Game { moves = delete loc (moves g),
board = board',
winner = let (n,l) = max'cluster board' p loc
in if n >= m then Just (l,p) else Nothing
}
where
board' = Map.insert loc p (board g)
-- The main game-playing function
game :: (MonadLogic (t m), MonadIO (t m)) =>
(Mark,PlayerProc t m) -> (Mark,PlayerProc t m) -> t m ()
game player1 player2
= game' player1 player2 new'game
where
game' player@(p,proc) other'player g
| Game{winner=Just k} <- g
= liftIO (putStrLn $ (show k) ++ " wins!")
| Game{moves=[]} <- g
= liftIO (putStrLn "Draw!")
| otherwise
= do
(_,g') <- once (proc p g)
-- liftIO (putStrLn $ show'board (board g'))
game' other'player player g'
-- Play as a human
human'player :: (MonadIO (t m), MonadTrans t) => PlayerProc t m
human'player p g = do
liftIO $ (putStrLn $ "Your (i,j) move as " ++ (show p))
let loop = liftIO getLine >>=
\s -> case (reads s) of
[(l,"")] -> return l
_ -> (liftIO $ putStrLn "Parse Error") >> loop
l@(i,j) <- loop
if elem l (moves g) then return (1,(take'move p l g))
else (liftIO $ putStrLn "Bad Move") >> human'player p g
-- ----------------------------------------------------------------------
-- Heuristics
{-
Andrew Bromage wrote:
This is a simple problem in AI. Basically, you're trying to do a minimax
search. If this is a "goal state" (i.e. an ACTUAL win, lose or draw), then
we're done. If not, we examine all of the successor states, assuming that
our opponent will make their best move, and we pick the one that's best
for us.
-}
ai :: (MonadLogic (t m)) => PlayerProc t m
ai p g
| Game{winner=Just _} <- g
= return (estimate'state p g,g)
| Game{moves=[]} <- g
= return (estimate'state p g,g)
| otherwise
= do
wbs <- bagofN Nothing (do
m <- choose (moves g)
let g' = take'move p m g
(w,_) <- ai (other'player p) g'
return (-w,g'))
let (w,g') = maximumBy (\ (x,_) (y,_) -> compare x y) wbs
return (w,g')
-- Utility "axiom of choice" function:
-- A more sophisticated choice functions are possible
choose :: (MonadPlus m) => [a] -> m a
choose = msum . map return
other'player X = O
other'player O = X
-- the more the better
estimate'state :: Mark -> Game -> Int
estimate'state p g
| Game{winner=Just (_,p')} <- g
= if p == p' then score'win else score'lose
| Game{moves=[]} <- g
= 0 -- draw
| otherwise = 10
score'win = maxBound
score'lose = - maxBound
{-
Andrew Bromage wrote:
Unfortunately, this is too slow for interactive use. Certainly, I ran
out of patience after a minute. However, thankfully there are a couple
of safe heuristics which work just fine with tic-tac-toe.
The first is that if you can win in this move, you should do so.
The second is that if the first heuristic doesn't work, then you should
see if there is any move that your opponent could make where they could
win on the next move. If so, you should move to block it.
-}
first'move'wins p g =
do
m <- choose (moves g)
let g' = take'move p m g
guard (maybe False (\ (_,p') -> p' == p) (winner g'))
return (m,(score'win,g'))
minmax :: (Monad m, MonadLogic (t m)) =>
(Int->Int->PlayerProc t m) -> (Int->Int->PlayerProc t m)
minmax self dlim blim p g =
do
wbs <- bagofN (Just blim)
(do
m <- choose (moves g)
let g' = take'move p m g
if dlim <= 0 then return (estimate'state p g',g')
else do (w,_) <- self (dlim-1) blim
(other'player p) g'
return (-w,g'))
let (w,g') = maximumBy (\ (x,_) (y,_) -> compare x y) wbs
return (w,g')
ai' :: (MonadLogic (t m), Monad m) => PlayerProc t m
ai' p g = ai'lim dlim blim p g
where
ai'lim dlim blim p g
| Game{winner=Just _} <- g
= return (estimate'state p g,g)
| Game{moves=[]} <- g
= return (estimate'state p g,g)
| otherwise
= ifte (once (first'move'wins p g))
(return . snd)
(ifte (once (first'move'wins (other'player p) g))
(\ (m,_) -> do
let g' = take'move p m g
(w,_) <- ai'lim dlim blim (other'player p) g'
return (-w,g'))
(minmax ai'lim dlim blim p g))
main = do args <- getArgs
let n = read (head args)
let m = read (head (tail args))
let dlim = read (head (tail $ tail args))
let blim = read (head (tail $ tail $ tail args))
playGameAI n m dlim blim
|
atzeus/reflectionwithoutremorse
|
TicTacToe.hs
|
mit
| 8,882
| 45
| 23
| 2,414
| 3,029
| 1,607
| 1,422
| 155
| 8
|
{-# LANGUAGE DoAndIfThenElse, MultiParamTypeClasses, OverloadedStrings, RecordWildCards, ScopedTypeVariables #-}
module Main where
import Control.Exception (catch, throw)
import Control.Monad.IO.Class (MonadIO (..))
import Data.Maybe
import Data.Monoid ((<>))
import Network.DO
import Prelude as P hiding (error)
import System.Console.GetOpt
import System.Environment
import System.IO
import System.IO.Error (isDoesNotExistError)
generalOptions :: [OptDescr (ToolConfiguration -> ToolConfiguration)]
generalOptions = [ Option ['t'] ["auth-token"]
(ReqArg ( \ t config -> config { authToken = Just t}) "STRING")
"Authentication token used for communicating with server (default: <extracted from $AUTH_TOKEN environment)"
, Option ['q'] ["quiet"]
(NoArg ( \ config -> config { quiet = True}))
"Don't send notifications of operations to Slack (default: False)"
]
createDropletOptions :: [OptDescr (BoxConfiguration -> BoxConfiguration)]
createDropletOptions = [ Option ['n'] ["name"]
(ReqArg ( \ n config -> config { configName = n }) "STRING")
"name of the box to create (default: <randomly generated name>)"
, Option ['r'] ["region"]
(ReqArg ( \ r config -> config { boxRegion = RegionSlug r}) "REGION")
"region where the instance is to be deployed (default: 'ams2')"
, Option ['b'] ["background"]
(NoArg ( \ config -> config { backgroundCreate = True}))
"create droplet in the background, returning immediately (default: 'false')"
, Option ['s'] ["size"]
(ReqArg ( \ s config -> config { size = read s}) "SIZE")
"size of instance to deploy (default: '4gb')"
, Option ['i'] ["image-slug"]
(ReqArg ( \ i config -> config { configImageSlug = i}) "IMAGE")
"slug of image to deploy (default: 'ubuntu-14-04-x64')"
, Option ['k'] ["key"]
(ReqArg ( \ k config -> config { keys = read k ++ keys config}) "[KEY1,..]")
"add a key to access box (default: '[]')"
]
getSlackUriFromEnv :: IO (Maybe URI)
getSlackUriFromEnv = (Just `fmap` getEnv "SLACK_URI") `catch` (\ (e :: IOError) -> if isDoesNotExistError e then return Nothing else throw e)
defaultBox :: IO BoxConfiguration
defaultBox = do
name <- generateName
return $ BoxConfiguration name (RegionSlug "ams2") G4 defaultImage [] False
defaultTool :: IO ToolConfiguration
defaultTool = do
uri <- getSlackUriFromEnv
tok <- getAuthFromEnv
return $ Tool uri tok False
usage :: String
usage = usageInfo (banner ++ "\n" ++ usageInfo "General options:" generalOptions ++ "\nCommands options:") createDropletOptions
where
banner = "Usage: toolbox [OPTIONS..] COMMAND [CMD OPTIONS...]"
parseOptions :: [String] -> IO (ToolConfiguration, [String])
parseOptions args = do
d <- defaultTool
case getOpt RequireOrder generalOptions args of
(opts, coms, []) -> return ((foldl (flip P.id) d opts), coms)
(_,_,errs) -> ioError(userError (concat errs ++ usage))
main :: IO ()
main = do
hSetBuffering stdin NoBuffering
args <- getArgs
(_, cmds) <- parseOptions args
runDOEnv (parseCommandOptions cmds)
parseCommandOptions :: (MonadIO m) => [String] -> Command m ()
parseCommandOptions ("droplets":"create":args) = do
b <- liftIO defaultBox
case getOpt Permute createDropletOptions args of
(c,[],[]) -> createDroplet (foldl (flip P.id) b c) >>= outputResult
(_,_,errs) -> liftIO $ ioError (userError (concat errs ++ usage))
parseCommandOptions ("droplets":"destroy":dropletId:[]) = destroyDroplet (P.read dropletId) >>= outputResult
parseCommandOptions ("droplets":"list":_) = listDroplets >>= outputResult
parseCommandOptions ("droplets":"power_off":dropletId:[])
= dropletAction (P.read dropletId) DoPowerOff >>= outputResult
parseCommandOptions ("droplets":"power_on":dropletId:[])
= dropletAction (P.read dropletId) DoPowerOn >>= outputResult
parseCommandOptions ("droplets":"snapshot":dropletId:snapshotName:[])
= dropletAction (P.read dropletId) (CreateSnapshot snapshotName) >>= outputResult
parseCommandOptions ("droplets":"action":dropletId:actionId:[])
= getAction (P.read dropletId) (P.read actionId) >>= outputResult
parseCommandOptions ("droplets":dropletId:"snapshots":[])
= listDropletSnapshots (P.read dropletId) >>= outputResult
parseCommandOptions ("droplets":dropletId:[])
= showDroplet (P.read dropletId) >>= outputResult
parseCommandOptions ("droplets":"ssh":dropletIdOrName:[])
= (do
droplets <- findByIdOrName dropletIdOrName <$> listDroplets
case droplets of
(did:_) -> dropletConsole did
[] -> return (error $ "no droplet with id or name " <> dropletIdOrName)
) >>= outputResult
parseCommandOptions ("images":"list":_) = listImages >>= outputResult
parseCommandOptions ("regions":"list":_) = listRegions >>= outputResult
parseCommandOptions ("keys":"list":_) = listKeys >>= outputResult
parseCommandOptions ("sizes":"list":_) = listSizes >>= outputResult
parseCommandOptions ("ips":"list":_) = listFloatingIPs >>= outputResult
parseCommandOptions ("ips":"create":dropletOrRegion:[]) = do
regions <- listRegions
outputResult =<< if dropletOrRegion `elem` map regionSlug regions
then createFloatingIP (TargetRegion dropletOrRegion)
else createFloatingIP (TargetDroplet $ read dropletOrRegion)
parseCommandOptions ("ips":"delete":ip:[]) = deleteFloatingIP (P.read ip) >>= outputResult
parseCommandOptions ("ips":ip:"assign":did:[]) = assignFloatingIP (P.read ip) (P.read did) >>= outputResult
parseCommandOptions ("ips":ip:"unassign": []) = unassignFloatingIP (P.read ip) >>= outputResult
parseCommandOptions ("dns":"list":_) = listDomains >>= outputResult
parseCommandOptions ("dns":"create":name:ip:[]) = createDomain (P.read name) (P.read ip) >>= outputResult
parseCommandOptions ("dns":"delete":name:[]) = deleteDomain (P.read name) >>= outputResult
parseCommandOptions ("dns":name:"list":_) = listRecords (P.read name) >>= outputResult
parseCommandOptions ("dns":name:"create":rest) =
case (parseRecord $ unwords rest) of
Left (Error e) -> fail e
Right r -> createRecord (P.read name) r >>= outputResult
parseCommandOptions ("dns":name:"delete":rid:[]) = deleteRecord (P.read name) (P.read rid) >>= outputResult
parseCommandOptions e = fail $ "I don't know how to interpret commands " ++ unwords e ++ "\n" ++ usage
|
capital-match/hdo
|
main/hdo.hs
|
mit
| 7,731
| 0
| 16
| 2,394
| 2,111
| 1,101
| 1,010
| 114
| 5
|
module Args
where
import Options
data ProgramOptions = ProgramOptions
{
inputPath :: String,
clusters :: Int,
precision :: Double,
demension :: Int,
separator :: String,
ignoreFirstCol :: Bool,
ignoreLastCol :: Bool,
ignoreFirstLine :: Bool,
distance :: Int,
startWithRandomVector :: Bool,
outPath :: String
}
instance Options ProgramOptions where
defineOptions = pure ProgramOptions
<*> simpleOption "inputPath" ""
"A path to input file."
<*> simpleOption "clusters" 2
"A number of clusters."
<*> simpleOption "precision" 1
"Precision for FCM algorithm."
<*> simpleOption "demension" 2
"A demension of the feature vector"
<*> simpleOption "separator" ","
"A separator of the csv file"
<*> simpleOption "ignoreFirstCol" False
"The csv parser should ignore the first column."
<*> simpleOption "ignoreLastCol" False
"The csv parser should ignore the last column."
<*> simpleOption "ignoreFirstLine" False
"The csv parser should ignore the first line."
<*> simpleOption "distance" 0
"A distance type for FCM algorithm. 0 - Euclidean. 1 - Hamming."
<*> simpleOption "startWithRandomVector" False
"A initial action for FCM algorithm. False - random matrix initialization. True - random choice of centers."
<*> simpleOption "outPath" ""
"A path to output file. Writes to the console if is empty."
|
xixi4Den/BSUIR-Haskell-lw1
|
src/Args.hs
|
mit
| 1,615
| 0
| 17
| 510
| 219
| 117
| 102
| 38
| 0
|
-- Which string is worth more?
-- https://www.codewars.com/kata/5840586b5225616069000001
module Kata where
import Data.Char (ord)
highestValue :: String -> String -> String
highestValue a b = if f a >= f b then a else b
where f = sum . map ord
|
gafiatulin/codewars
|
src/7 kyu/StringWorth.hs
|
mit
| 249
| 0
| 8
| 47
| 71
| 39
| 32
| 5
| 2
|
module Main (main) where
import FullBatteries (projectName)
main :: IO ()
main = putStrLn ("Executable for " ++ projectName)
|
vrom911/hs-init
|
summoner-cli/examples/full-batteries/app/Main.hs
|
mit
| 128
| 0
| 7
| 22
| 42
| 24
| 18
| 4
| 1
|
{-# LANGUAGE RecordWildCards, FlexibleContexts #-}
module Utils.Error where
import Control.Monad.Error
import Text.Printf
data Stage = Parser | Rewrite | CPS | ClosConv | CodeGen
type Loc = String
type Info = String
data Failure = Failure { state :: Stage
, location :: Loc
, summary :: Info }
presentError :: Failure -> String
presentError Failure{..} =
printf "Error! failure while %s at %s. \n %s" stateS location summary
where stateS = case state of
CPS -> "transforming to CPS"
ClosConv -> "converting closures and lambda lifting"
CodeGen -> "generating C code"
Parser -> "parsing"
Rewrite -> "rewriting"
failRW :: MonadError Failure m => Loc -> Info -> m a
failRW loc info = throwError $ Failure Rewrite loc info
failCPS :: MonadError Failure m => Loc -> Info -> m a
failCPS loc info = throwError $ Failure CPS loc info
failClos :: MonadError Failure m => Loc -> Info -> m a
failClos loc info = throwError $ Failure ClosConv loc info
failGen :: MonadError Failure m => Loc -> Info -> m a
failGen loc info = throwError $ Failure CodeGen loc info
|
jozefg/c_of_scheme
|
src/Utils/Error.hs
|
mit
| 1,176
| 0
| 9
| 311
| 333
| 174
| 159
| 27
| 5
|
module Language.Jass.Codegen.Statement(
genBodyBlocks
) where
import Language.Jass.Parser.AST
import Language.Jass.Codegen.Context
import Language.Jass.Codegen.Expression
import Language.Jass.Codegen.Type
import LLVM.General.AST as LLVM
import LLVM.General.AST.CallingConvention as LLVM
import LLVM.General.AST.Type as LLVM
import LLVM.General.AST.Constant as Const
import LLVM.General.AST.Instruction as Instr
import Control.Monad.Except
import Control.Arrow
import Data.Maybe
genBodyBlocks :: Name -> [Statement] -> Codegen [BasicBlock]
genBodyBlocks entryName stmts = do
epilogueName <- generateName "epilogue"
(start, blocks) <- genBlocks stmts epilogueName
let entryBlock = BasicBlock entryName [] (Do $ Br start [])
pushNewBlock epilogueName
retType <- getFunctionReturnType =<< getCurrentFunction
let endTerm = case retType of
VoidType -> Ret Nothing []
_ -> Unreachable []
appendCurrentBlock =<< getEpilogueInstructions
finishCurrentBlock endTerm
[endBlock] <- purgeBlocks
finalBlocks <- redirectToEpilogue retType epilogueName endBlock blocks
return $ entryBlock:finalBlocks
-- | Replaces all returns in blocks to jumps into epilogue
redirectToEpilogue :: LLVM.Type -> Name -> BasicBlock -> [BasicBlock] -> Codegen [BasicBlock]
redirectToEpilogue VoidType epilogueName epilogueBlock blocks =
return $ (replaceRet <$> blocks) ++ [epilogueBlock]
where
replaceRet (BasicBlock n is (rn := Ret _ _)) = BasicBlock n is $ rn := Br epilogueName []
replaceRet (BasicBlock n is (Do (Ret _ _))) = BasicBlock n is $ Do $ Br epilogueName []
replaceRet b = b
redirectToEpilogue retType epilogueName epilogueBlock blocks = do
let (blocks', phiVals) = second catMaybes $ unzip (modifyBlocks <$> blocks)
epilogue' <- modifyEpilogue epilogueBlock phiVals
return $ blocks' ++ [epilogue']
where
modifyBlocks (BasicBlock n is (rn := Ret mop _)) = (BasicBlock n is (rn := Br epilogueName []), Just (fromJust mop, n))
modifyBlocks (BasicBlock n is (Do (Ret mop _))) = (BasicBlock n is (Do $ Br epilogueName []), Just (fromJust mop, n))
modifyBlocks b
| isJumpToEpilogue (getTerm b) = (b, Just (ConstantOperand $ Const.Undef retType, getName b))
| otherwise = (b, Nothing)
getTerm (BasicBlock _ _ (_ := term)) = term
getTerm (BasicBlock _ _ (Do term)) = term
getName (BasicBlock n _ _) = n
isJumpToEpilogue (Br e _) = e == epilogueName
isJumpToEpilogue (CondBr _ e1 e2 _) = e1 == epilogueName || e2 == epilogueName
isJumpToEpilogue _ = False
modifyEpilogue (BasicBlock n is _) phiVals = do
tempName <- generateName "return"
return $ BasicBlock n
((tempName := Phi retType phiVals []) : is)
(Do $ Ret (Just $ LocalReference retType tempName) [])
genBlocks :: [Statement] -> Name -> Codegen (Name, [BasicBlock])
genBlocks stmts nextBlock = catchBlocks $ do
startName <- generateName "block"
pushNewBlock startName
mapM_ genLLVMStatement stmts
finishCurrentBlock $ Br nextBlock []
blocks <- purgeBlocks
return (startName, reverse blocks)
catchBlocks :: Codegen a -> Codegen a
catchBlocks action = do
savedBlocks <- purgeBlocks
res <- action
purgeBlocks >> pushBlocks savedBlocks
return res
genLLVMStatement :: Statement -> Codegen ()
genLLVMStatement (SetStatement _ _ name expr) = do
(exprName, exprInstr) <- genLLVMExpression expr
(varType, varRef) <- getReference name
let saveInstr = Do $ Store False
varRef
(LocalReference varType exprName)
Nothing 0 []
appendCurrentBlock $ exprInstr ++ [saveInstr]
genLLVMStatement (SetArrayStatement _ _ name indExpr valExpr) = do
(indExprName, indExprInstr) <- genLLVMExpression indExpr
(valExprName, valExprInstr) <- genLLVMExpression valExpr
(elemType, varRef) <- getReference name
valType <- toLLVMType =<< inferType valExpr
indType <- toLLVMType JInteger
indexPtrName <- generateName "index"
let indexInstr = indexPtrName := Instr.GetElementPtr True
varRef
[ConstantOperand $ Const.Int 32 0, LocalReference indType indExprName] []
storeInstr = Do $ Store False
(LocalReference elemType indexPtrName)
(LocalReference valType valExprName) Nothing 0 []
appendCurrentBlock $ indExprInstr ++ valExprInstr ++ [indexInstr, storeInstr]
genLLVMStatement (IfThenElseStatement _ _ condExpr thenStmts elseifs) = do
(condExprName, condExprInstr) <- genLLVMExpression condExpr
afterBlockName <- generateName "block_afterif"
(thenStart, thenBlocks) <- genBlocks thenStmts afterBlockName
elseifTriples <- mapM (genElseIf afterBlockName) elseifs
let firstBranchName = getFirstElseBlock afterBlockName elseifTriples
let condJump = CondBr
(LocalReference i1 condExprName)
thenStart -- true
firstBranchName -- false
[]
appendCurrentBlock condExprInstr
finishCurrentBlock condJump
pushBlocks thenBlocks
(_, blocks) <- catchBlocks $ foldM linkElses (afterBlockName, []) $ reverse elseifTriples
pushBlocks $ reverse blocks
pushNewBlock afterBlockName
where
getFirstElseBlock :: Name -> [(Maybe (Name, Name, [Named Instruction]), Name, [BasicBlock])] -> Name
getFirstElseBlock defName [] = defName
getFirstElseBlock _ ((Nothing, elseBlock, _):_) = elseBlock
getFirstElseBlock _ ((Just (_, condBlock, _), _, _):_) = condBlock
genElseIf ::
Name -- ^ Next block after if name
-> (Maybe Expression, [Statement]) -- ^ Elseif clause
-> Codegen (Maybe (Name, Name, [Named Instruction]), Name, [BasicBlock]) -- ^ Maybe expression for condition, name of start block and blocks
genElseIf afterName (Nothing, stmts) = do
(elseStart, elseBlocks) <- genBlocks stmts afterName
return (Nothing, elseStart, elseBlocks)
genElseIf afterName (Just cond, stmts) = do
(condName, condInstr) <- genLLVMExpression cond
(elseStart, elseBlocks) <- genBlocks stmts afterName
futureCondBlock <- generateName "block_elseifcond"
return (Just (condName, futureCondBlock, condInstr), elseStart, elseBlocks)
linkElses :: (Name, [BasicBlock]) -> (Maybe (Name, Name, [Named Instruction]), Name, [BasicBlock]) -> Codegen (Name, [BasicBlock])
linkElses _ (Nothing, elseName, elseBlocks) = return (elseName, elseBlocks)
linkElses (nextBlock, accBlocks) (Just (condName, condBlockName, condInstr), branchName, branchBlocks) = do
pushNewBlock condBlockName
appendCurrentBlock condInstr
finishCurrentBlock condJump
condBlock <- purgeBlocks
return (condBlockName, condBlock ++ branchBlocks ++ accBlocks)
where condJump = CondBr (LocalReference i1 condName) branchName nextBlock []
genLLVMStatement (CallStatement _ _ name args) = do
funcType <- ptr <$> getFunctionType name
argsTypes <- getFunctionArgumentsTypes name
(argsNames, argsInstrs) <- unzip <$> mapM genLLVMExpression args
let argsRefs = uncurry LocalReference <$> zip argsTypes argsNames
let callInstr = [Do $ Call False C []
(Right $ ConstantOperand $ GlobalReference funcType (Name name))
(zip argsRefs (repeat [])) [] []]
appendCurrentBlock $ concat argsInstrs ++ callInstr
genLLVMStatement (LoopStatement _ _ stmts) = do
preBlock <- generateName "block_loop_ptr"
finishCurrentBlock (Br preBlock [])
pushNewBlock preBlock
afterBlock <- generateName "block_loop_after"
savedRet <- getLoopReturnMaybe
saveLoopReturn afterBlock
(loopStart, loopBlocks) <- genBlocks stmts preBlock
finishCurrentBlock (Br loopStart [])
pushBlocks loopBlocks
pushNewBlock afterBlock
case savedRet of
Nothing -> return ()
Just ret -> saveLoopReturn ret
genLLVMStatement (ExitWhenStatement _ cond) = do
(condName, condInstr) <- genLLVMExpression cond
retName <- getLoopReturn
afterBlock <- generateName "block_exitwhen_after"
appendCurrentBlock condInstr
finishCurrentBlock $ CondBr (LocalReference i1 condName) retName afterBlock []
pushNewBlock afterBlock
genLLVMStatement (ReturnStatement _ Nothing) = finishCurrentBlock $ Ret Nothing []
genLLVMStatement (ReturnStatement _ (Just expr)) = do
(exprName, epxrInstr) <- genLLVMExpression expr
retType <- getFunctionReturnType =<< getCurrentFunction
appendCurrentBlock epxrInstr
finishCurrentBlock $ Ret (Just $ LocalReference retType exprName) []
|
NCrashed/hjass
|
src/library/Language/Jass/Codegen/Statement.hs
|
mit
| 8,551
| 0
| 17
| 1,741
| 2,701
| 1,348
| 1,353
| 170
| 8
|
-- We need 'FlexibleInstances to instance 'ArgVal' for 'Maybe Exp' and
-- '( String, Exp )'.
{-# LANGUAGE FlexibleInstances #-}
module Arith where
import Prelude hiding ( exp )
import System.Console.CmdTheLine hiding ( eval )
import Control.Applicative hiding ( (<|>) )
import Control.Monad ( guard )
import Data.Char ( isAlpha )
import Data.Function ( on )
import Text.Parsec
import qualified Text.PrettyPrint as PP
import qualified Data.Map as M
import System.IO
type Parser a = Parsec String () a
data Bin = Pow | Mul | Div | Add | Sub
prec :: Bin -> Int
prec b = case b of
{ Pow -> 3 ; Mul -> 2 ; Div -> 2 ; Add -> 1 ; Sub -> 1 }
assoc :: Bin -> Assoc
assoc b = case b of
Pow -> R
_ -> L
toFunc :: Bin -> (Int -> Int -> Int)
toFunc b = case b of
{ Pow -> (^) ; Mul -> (*) ; Div -> div ; Add -> (+) ; Sub -> (-) }
data Exp = IntExp Int
| VarExp String
| BinExp Bin Exp Exp
instance ArgVal Exp where
converter = ( parser, pretty 0 )
where
parser = fromParsec onErr exp
onErr str = PP.text "invalid expression" PP.<+> PP.quotes (PP.text str)
instance ArgVal (Maybe Exp) where
converter = just
instance ArgVal ( String, Exp ) where
converter = pair '='
data Assoc = L | R
type Env = M.Map String Exp
catParsers :: [Parser String] -> Parser String
catParsers = foldl (liftA2 (++)) (return "")
integer :: Parser Int
integer = read <$> catParsers [ option "" $ string "-", many1 digit ]
tok p = p <* spaces
parens = between op cp
where
op = tok $ char '('
cp = tok $ char ')'
-- Parse a terminal expression.
term :: Parser Exp
term = parens exp <|> int <|> var
where
int = tok $ IntExp <$> try integer -- Try so '-<not-digits>' won't fail.
var = tok $ VarExp <$> many1 (satisfy isAlpha)
-- Parse a binary operator.
bin :: Parser Bin
bin = choice [ pow, mul, div, add, sub ]
where
pow = tok $ Pow <$ char '^'
mul = tok $ Mul <$ char '*'
div = tok $ Div <$ char '/'
add = tok $ Add <$ char '+'
sub = tok $ Sub <$ char '-'
exp :: Parser Exp
exp = e 0
-- Precedence climbing expressions. See
-- <www.engr.mun.ca/~theo/Misc/exp_parsing.htm> for further information.
e :: Int -> Parser Exp
e p = do
t <- term
try (go t) <|> return t
where
go e1 = do
b <- bin
guard $ prec b >= p
let q = case assoc b of
R -> prec b
L -> prec b + 1
e2 <- e q
let expr = BinExp b e1 e2
try (go expr) <|> return expr
-- Beta reduce by replacing variables in 'e' with values in 'env'.
beta :: Env -> Exp -> Maybe Exp
beta env e = case e of
VarExp str -> M.lookup str env
int@(IntExp _) -> return int
BinExp b e1 e2 -> (liftA2 (BinExp b) `on` beta env) e1 e2
eval :: Exp -> Int
eval e = case e of
VarExp str -> error $ "saw VarExp " ++ str ++ " while evaluating"
IntExp i -> i
BinExp b e1 e2 -> (toFunc b `on` eval) e1 e2
pretty :: Int -> Exp -> PP.Doc
pretty p e = case e of
VarExp str -> PP.text str
IntExp i -> PP.int i
BinExp b e1 e2 -> let q = prec b
in parensOrNot q $ PP.cat [ pretty q e1, ppBin b, pretty q e2 ]
where
parensOrNot q = if q < p then PP.parens else id
ppBin :: Bin -> PP.Doc
ppBin b = case b of
Pow -> PP.char '^'
Mul -> PP.char '*'
Div -> PP.char '/'
Add -> PP.char '+'
Sub -> PP.char '-'
arith :: Bool -> [( String, Exp )] -> Exp -> IO ()
arith pp assoc = maybe badEnv method . beta (M.fromList assoc)
where
method = if pp then print . pretty 0 else print . eval
badEnv = hPutStrLn stderr "arith: bad environment"
arithTerm = arith <$> pp <*> env <*> e
where
pp = value $ flag (optInfo [ "pretty", "p" ])
{ optName = "PP"
, optDoc = "If present, pretty print instead of evaluating EXP."
}
env = nonEmpty $ posRight 0 [] posInfo
{ posName = "ENV"
, posDoc = "One or more assignments of the form '<name>=<exp>' to be "
++ "substituted in the input expression."
}
e = required $ pos 0 Nothing posInfo
{ posName = "EXP"
, posDoc = "An arithmetic expression to be evaluated."
}
termInfo = defTI
{ termName = "arith"
, version = "0.3"
, termDoc = "Evaluate mathematical functions demonstrating precedence "
++ "climbing and instantiating 'ArgVal' for tuples and Parsec "
++ "parsers."
, man = [ S "BUGS"
, P "Email bug reports to <fitsCarolDo@example.com>"
]
}
|
MerelyAPseudonym/cmdtheline
|
test/Arith.hs
|
mit
| 4,438
| 0
| 16
| 1,244
| 1,614
| 847
| 767
| 118
| 5
|
module Language.Camle.Print.ThreeAddressCode where
import Text.PrettyPrint
import Language.Camle.Data.ThreeAddressCode
class PrettyPrintable a where
pprint :: a -> Doc
instance PrettyPrintable TAC where
pprint = text . show
printIr :: Program -> Doc
printIr = vcat . (map pprint)
|
willprice/camle-compiler
|
src/Language/Camle/Print/ThreeAddressCode.hs
|
mit
| 300
| 0
| 7
| 56
| 79
| 45
| 34
| 9
| 1
|
{-# LANGUAGE TemplateHaskell #-}
module PathSpec (
PathSpec,
PathSpecString,
matches,
parse,
testGroup,
) where
import Data.List
import System.FilePath
import qualified System.FilePath.Glob as Glob
import Test.Tasty.HUnit
import Test.Tasty.TH
type PathSpecString = String
data PathSpec = PathSpec { directoryPrefix :: FilePath, glob :: Glob.Pattern }
deriving Show
parse :: PathSpecString -> PathSpec
parse = PathSpec "" . Glob.compile
matches :: PathSpec -> FilePath -> Bool
matches spec path =
maybe False (Glob.match (glob spec)) $ stripPrefix (directoryPrefix spec) (normalise path)
case_matches_exact_path = assert (matches (parse "foo/bar") "foo/bar")
case_star_matches_only_in_directory = do
assert (matches (parse "foo/*") "foo/bar")
assert $ not (matches (parse "foo/*") "baz/bar")
case_double_star_crosses_directory_boundaries = assert (matches (parse "**/*") "foo/bar/baz")
case_matches_fails_for_negated_path = assert $ not (matches (parse "~foo/bar") "foo/bar")
testGroup = $(testGroupGenerator)
|
danstiner/dupes
|
src/PathSpec.hs
|
mit
| 1,093
| 0
| 12
| 203
| 300
| 163
| 137
| 27
| 1
|
-- Copyright (C) 2013 Jorge Aparicio
main :: IO()
main = print $ iterate (scanl1 (+)) (repeat 1 :: [Int]) !! size !! size
where size = 20 :: Int
|
japaric/eulermark
|
problems/0/1/5/015.hs
|
mit
| 149
| 0
| 10
| 34
| 64
| 35
| 29
| 3
| 1
|
module FeatureModel.Parsers.OldGenericParser (
parseFeatureModel,
parseInstanceModel,
FmFormat ( FMPlugin, FMIde, FMGrammar, SXFM, SPLOT )
)
where
import FeatureModel.Types
import qualified BasicTypes as Core
-- modules related to the FMPlugin parser
import FeatureModel.Parsers.FMPlugin.XmlFeatureParser
import FeatureModel.Parsers.FMPlugin.XmlFeatureModel (xmlFeature2FeatureTree, xml2FeatureConfiguration)
-- modules related to the FMIde parser
import FeatureModel.Parsers.FMIde.FMIde2FeatureModel
import FeatureModel.Parsers.FMIde.AbsFMIde
import FeatureModel.Parsers.FMIde.SkelFMIde
import FeatureModel.Parsers.FMIde.ErrM
import FeatureModel.Parsers.FMIde.LexFMIde
import FeatureModel.Parsers.FMIde.ParFMIde
-- modules related to the FMGrammar parser
import qualified FeatureModel.Parsers.FMGrammar.Grammar2FeatureModel as GFMG
import qualified FeatureModel.Parsers.FMGrammar.LexFMGrammar as LFMG
import qualified FeatureModel.Parsers.FMGrammar.SkelFMGrammar as SFMG
import qualified FeatureModel.Parsers.FMGrammar.AbsFMGrammar as AFMG
import qualified FeatureModel.Parsers.FMGrammar.ParFMGrammar as PFMG
import qualified FeatureModel.Parsers.FMGrammar.ErrM as EFMG
--modules related to the SPLOT parser
import FeatureModel.Parsers.SPLOT.SPLOT2FeatureModel
--import FeatureModel.Parsers.SPLOT.NewSPLOT2FeatureModel
import qualified FeatureModel.Parsers.SPLOT.LexSPLOT as LexSPLOT
import FeatureModel.Parsers.SPLOT.SkelSPLOT
import qualified FeatureModel.Parsers.SPLOT.AbsSPLOT as AbsSPLOT
import qualified FeatureModel.Parsers.SPLOT.ParSPLOT as ParSPLOT
import qualified FeatureModel.Parsers.SPLOT.ErrM as ErrSPLOT
-- modules related to the SXFM parser
import qualified FeatureModel.Parsers.SXFM.ParsecSXFM as ParsecSXFM
import Text.ParserCombinators.Parsec
import qualified Text.ParserCombinators.Parsec.Token as P
import Text.ParserCombinators.Parsec.Language( haskellStyle )
import Text.XML.HXT.Core
import Text.XML.HXT.RelaxNG
data FmFormat = FMPlugin | FMIde | FMGrammar | SXFM | SPLOT
-- The top most function for parsing feature models
-- in different formats.
--
parseFeatureModel (schema, fileName) format = do
x <- readFile (fileName)
case (format) of
-- FMPlugin -> do
-- fm <- translateFMPToFm schema fileName
-- return fm
--
-- FMIde -> do
-- let fm = translateFMIdeToFm (pGrammar (myLexer x))
-- return fm
--
-- FMGrammar -> do
-- let fm = translateFMGrammarToFm (PFMG.pFMGrammar (PFMG.myLexer x))
-- return fm
SPLOT -> do
let fm = translateFMSPLOTToFm (ParSPLOT.pSPLOTModel (ParSPLOT.myLexer x))
return fm
-- SXFM -> do
-- r <- parseFromFile ParsecSXFM.parseFeatureModel fileName ;
-- case (r) of
-- Left err -> return $ Core.Fail (show err)
-- Right f -> do let fm = f
-- return $ Core.Success fm
--
-- |
-- Parse a feature configuration. This parser
-- is based on the exported instance models from
-- FMPlugin
--
parseInstanceModel schema fileName =
do
errs <- checkXMLFile schema fileName
case errs of
[] -> do
instanceModel <- parseInstanceModel' fileName
return $ instanceModel
otherwise -> do
let errs' = concat $ map show errs
return $ Core.Fail errs'
parseInstanceModel' fileName =
do
i <- runX ( xunpickleDocument xpFeatureConfiguration [ withValidate yes
, withTrace 1
, withRemoveWS yes
, withPreserveComment yes
] (Core.createURI fileName) )
case i of
[x] -> do return $ Core.Success (xml2FeatureConfiguration x)
otherwise -> return $ Core.Fail "Error parsing instance configuration. Try to check it before parsing."
translateFMIdeToFm (Ok g) = Core.Success (grammarToFeatureModel g)
translateFMIdeToFm (Bad s) = Core.Fail s
translateFMGrammarToFm (EFMG.Ok g) = Core.Success (GFMG.grammarToFeatureModel g)
translateFMGrammarToFm (EFMG.Bad s) = Core.Fail s
translateFMSPLOTToFm (ErrSPLOT.Ok g) = Core.Success (splotToFeatureModel g)
translateFMSPLOTToFm (ErrSPLOT.Bad s) = Core.Fail s
translateFMPToFm schema fileName =
do
errs <- checkXMLFile schema fileName
case errs of
[] ->
do
u <- runX ( xunpickleDocument xpFeature [ withValidate yes
, withTrace 1
, withRemoveWS yes
, withPreserveComment yes
] (Core.createURI fileName));
case u of
[x] -> return $ Core.Success (FeatureModel { fmTree = (xmlFeature2FeatureTree x), fmConstraints = [] })
otherwise -> return $ Core.Fail "Error parsing feature model. Try to check it before parsing."
-- errors found after checking the FMPlugin file
otherwise -> return $ Core.Fail ("Error parsing feature model. " ++ (concat [show e | e <- errs]))
checkXMLFile schema fileName =
do
errs <- runX ( errorMsgCollect
>>>
readDocument [ withValidate yes
, withRelaxNG (Core.createURI schema)
, withErrors yes
] (Core.createURI fileName)
>>>
getErrorMessages
) ;
return errs
|
hephaestus-pl/hephaestus
|
alexandre/feature-modeling/src/FeatureModel/Parsers/OldGenericParser.hs
|
mit
| 5,381
| 2
| 21
| 1,303
| 1,019
| 583
| 436
| 93
| 3
|
module ExercisesHeavyLifting where
e :: IO Integer
e = let ioi = readIO "1" :: IO Integer
changed = read <$> ("123"++) <$> show <$> ioi
in (*3) <$> changed
|
NickAger/LearningHaskell
|
HaskellProgrammingFromFirstPrinciples/Chapter16.hsproj/ExercisesHeavyLifting.hs
|
mit
| 171
| 0
| 12
| 45
| 66
| 36
| 30
| 5
| 1
|
module Data.SimpleHtmlTag (Tag (Tag) ) where
data Tag = Tag
{ name :: String
, content :: String
, children :: [Tag]
} deriving (Show, Eq)
|
taiki45/ex-simple-html-parser
|
src/Data/SimpleHtmlTag.hs
|
mit
| 188
| 0
| 9
| 75
| 56
| 35
| 21
| 8
| 0
|
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveTraversable #-}
import Prelude hiding (catch,(.))
import Data.Monoid
import Data.Foldable
import Data.Traversable
import Control.Category
import Control.Monad
import Control.Applicative
import Control.Error
import qualified Data.Text as T
newtype Nullipotent m a = Nullipotent { runNullipotent:: m a }
deriving (Eq, Ord, Read, Show, Functor, Foldable, Traversable)
instance (Monad m) => Monad (Nullipotent m) where
(Nullipotent m) >>= k = Nullipotent $ m >>= (runNullipotent . k)
return = Nullipotent . return
data Sealed m = Sealed {
unseal:: m (),
tags:: [T.Text]
}
|
danidiaz/haskell-sandbox
|
Pianola.hs
|
mit
| 825
| 0
| 10
| 147
| 210
| 124
| 86
| 24
| 0
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ec2-launchtemplate-tagspecification.html
module Stratosphere.ResourceProperties.EC2LaunchTemplateTagSpecification where
import Stratosphere.ResourceImports
import Stratosphere.ResourceProperties.Tag
-- | Full data type definition for EC2LaunchTemplateTagSpecification. See
-- 'ec2LaunchTemplateTagSpecification' for a more convenient constructor.
data EC2LaunchTemplateTagSpecification =
EC2LaunchTemplateTagSpecification
{ _eC2LaunchTemplateTagSpecificationResourceType :: Maybe (Val Text)
, _eC2LaunchTemplateTagSpecificationTags :: Maybe [Tag]
} deriving (Show, Eq)
instance ToJSON EC2LaunchTemplateTagSpecification where
toJSON EC2LaunchTemplateTagSpecification{..} =
object $
catMaybes
[ fmap (("ResourceType",) . toJSON) _eC2LaunchTemplateTagSpecificationResourceType
, fmap (("Tags",) . toJSON) _eC2LaunchTemplateTagSpecificationTags
]
-- | Constructor for 'EC2LaunchTemplateTagSpecification' containing required
-- fields as arguments.
ec2LaunchTemplateTagSpecification
:: EC2LaunchTemplateTagSpecification
ec2LaunchTemplateTagSpecification =
EC2LaunchTemplateTagSpecification
{ _eC2LaunchTemplateTagSpecificationResourceType = Nothing
, _eC2LaunchTemplateTagSpecificationTags = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ec2-launchtemplate-tagspecification.html#cfn-ec2-launchtemplate-tagspecification-resourcetype
eclttsResourceType :: Lens' EC2LaunchTemplateTagSpecification (Maybe (Val Text))
eclttsResourceType = lens _eC2LaunchTemplateTagSpecificationResourceType (\s a -> s { _eC2LaunchTemplateTagSpecificationResourceType = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ec2-launchtemplate-tagspecification.html#cfn-ec2-launchtemplate-tagspecification-tags
eclttsTags :: Lens' EC2LaunchTemplateTagSpecification (Maybe [Tag])
eclttsTags = lens _eC2LaunchTemplateTagSpecificationTags (\s a -> s { _eC2LaunchTemplateTagSpecificationTags = a })
|
frontrowed/stratosphere
|
library-gen/Stratosphere/ResourceProperties/EC2LaunchTemplateTagSpecification.hs
|
mit
| 2,204
| 0
| 12
| 204
| 264
| 153
| 111
| 28
| 1
|
module Kopia.ParserSpec where
import Test.Hspec
import Kopia.Parser
import Kopia.Model.Command
import Kopia.Model.Bridge
import System.Exit (ExitCode(..))
spec :: Spec
spec = do
describe "Parser" $ do
describe "Bridge" $ do
it "should parse a bridge" $ do
command <- parse ["abc", "def"]
command `shouldBe` Command (Bridge "abc" "def") Test
it "should throw on lack of a bridge" $ do
parse ["acb"] `shouldThrow` (== ExitFailure 1)
parse [] `shouldThrow` (== ExitFailure 1)
describe "Action" $ do
describe "test" $ do
it "should parse a test command" $ do
command <- parse ["a", "b", "test"]
command `shouldBe` Command (Bridge "a" "b") Test
it "should throw on too many arguments" $ do
parse ["a", "b", "test", "extra"]
`shouldThrow` (== ExitFailure 1)
describe "take" $ do
it "should parse a take command" $ do
command <- parse ["a", "b", "take", "name"]
command `shouldBe` Command (Bridge "a" "b") (Take "name")
it "should throw on lack of arguments" $ do
parse ["a", "b", "take"] `shouldThrow` (== ExitFailure 1)
it "should throw on too many arguments" $ do
parse ["a", "b", "name", "extra"]
`shouldThrow` (== ExitFailure 1)
describe "record" $ do
it "should parse a record command" $ do
a <- parse ["a", "b", "record", "name", "123"]
a `shouldBe` Command (Bridge "a" "b") (Record "name" 123 1)
b <- parse ["a", "b", "record", "name", "123", "--max=456"]
b `shouldBe` Command (Bridge "a" "b") (Record "name" 123 456)
it "should throw on lack of arguments" $ do
parse ["a", "b", "record"] `shouldThrow` (== ExitFailure 1)
parse ["a", "b", "record", "name"]
`shouldThrow` (== ExitFailure 1)
parse ["a", "b", "record", "--max=456"]
`shouldThrow` (== ExitFailure 1)
it "should throw on too many arguments" $ do
parse ["a", "b", "record", "123", "extra"]
`shouldThrow` (== ExitFailure 1)
it "should throw if MINUTES is not a number" $ do
parse ["a", "b", "record", "abc"]
`shouldThrow` (== ExitFailure 1)
it "should throw if MAXIMUM is not a number" $ do
parse ["a", "b", "record", "123", "--max=abc"]
`shouldThrow` (== ExitFailure 1)
describe "list" $ do
it "should parse a list command" $ do
a <- parse ["a", "b", "list", "name"]
a `shouldBe` Command (Bridge "a" "b") (List "name" 100 Newest)
b <- parse ["a", "b", "list", "name", "--max=123"]
b `shouldBe` Command (Bridge "a" "b") (List "name" 123 Newest)
c <- parse ["a", "b", "list", "name", "--order=oldest"]
c `shouldBe` Command (Bridge "a" "b") (List "name" 100 Oldest)
it "should throw if MAXIMUM is not a number" $ do
parse ["a", "b", "list", "name", "--max=abc"]
`shouldThrow` (== ExitFailure 1)
it "should throw if ORDER is not valid" $ do
parse ["a", "b", "list", "name", "--order=abc"]
`shouldThrow` (== ExitFailure 1)
describe "clear" $ do
it "should parse a clear command" $ do
a <- parse ["a", "b", "clear", "name"]
a `shouldBe` Command (Bridge "a" "b") (Clear "name")
it "should throw on lack of arguments" $ do
parse ["a", "b", "clear"]
`shouldThrow` (== ExitFailure 1)
it "should throw on too many arguments" $ do
parse ["a", "b", "clear", "name", "extra"]
`shouldThrow` (== ExitFailure 1)
describe "remove" $ do
it "should parse a remove command" $ do
a <- parse ["a", "b", "remove", "name", "123"]
a `shouldBe` Command (Bridge "a" "b") (Remove "name" 123 1)
b <- parse ["a", "b", "remove", "name", "123", "--max=456"]
b `shouldBe` Command (Bridge "a" "b") (Remove "name" 123 456)
it "should enforce integral arguments" $ do
parse ["a", "b", "remove", "name", "abc"]
`shouldThrow` (== ExitFailure 1)
parse ["a", "b", "remove", "name", "123", "--max=abc"]
`shouldThrow` (== ExitFailure 1)
it "should throw on lack of arguments" $ do
parse ["a", "b", "remove"]
`shouldThrow` (== ExitFailure 1)
parse ["a", "b", "remove", "name"]
`shouldThrow` (== ExitFailure 1)
describe "restore" $ do
it "should parse a restore command" $ do
a <- parse ["a", "b", "restore", "name", "123"]
a `shouldBe` Command (Bridge "a" "b") (Restore "name" 123)
it "should enforce integral arguments" $ do
parse ["a", "b", "restore", "name", "abc"]
`shouldThrow` (== ExitFailure 1)
|
Jefffrey/Kopia
|
test/Kopia/ParserSpec.hs
|
gpl-3.0
| 5,759
| 0
| 22
| 2,379
| 1,634
| 855
| 779
| 101
| 1
|
{- |
Module : $Header$
Description : Module to handling basics of finite elliptics curves
Copyright : (c) Michal Parusinski
License : GPLv3
Maintainer : mparusinski@gmail.com
Stability : experimental
Portability : portable
<module description starting at first column>
-}
module EllipticCurves.FiniteEllipticCurves where
-- Assuming F is not a field of characteristic 2 or 3
data
|
mparusinski/Haskell-number-theory-library
|
EllipticCurves/FiniteEllipticCurves.hs
|
gpl-3.0
| 406
| 1
| 3
| 81
| 10
| 7
| 3
| -1
| -1
|
module DL3047 (tests) where
import Data.Text as Text
import Helpers
import Test.Hspec
tests :: SpecWith ()
tests = do
let ?rulesConfig = mempty
describe "DL3047 - `wget` without flag `--progress` will result in excessively bloated build logs when downloading larger files." $ do
it "warns when using wget without --progress option" $
let dockerFile =
[ "FROM node as foo",
"RUN wget my.xyz"
]
in ruleCatches "DL3047" $ Text.unlines dockerFile
it "does not warn when running with --progress option" $
let dockerFile =
[ "FROM node as foo",
"RUN wget --progress=dot:giga my.xyz"
]
in ruleCatchesNot "DL3047" $ Text.unlines dockerFile
it "does not warn when running with -q (quiet short option) and without --progress option" $
let dockerFile =
[ "FROM node as foo",
"RUN wget -q my.xyz"
]
in ruleCatchesNot "DL3047" $ Text.unlines dockerFile
it "does not warn when running with --quiet (quiet long option) and without --progress option" $
let dockerFile =
[ "FROM node as foo",
"RUN wget --quiet my.xyz"
]
in ruleCatchesNot "DL3047" $ Text.unlines dockerFile
it "does not warn when running with -nv (no-verbose short option) and without --progress option" $
let dockerFile =
[ "FROM node as foo",
"RUN wget -nv my.xyz"
]
in ruleCatchesNot "DL3047" $ Text.unlines dockerFile
it "does not warn when running with --no-verbose (no-verbose long option) and without --progress option" $
let dockerFile =
[ "FROM node as foo",
"RUN wget --no-verbose my.xyz"
]
in ruleCatchesNot "DL3047" $ Text.unlines dockerFile
it "does not warn when running with --output-file (output-file long option) and without --progress option" $
let dockerFile =
[ "FROM node as foo",
"RUN wget --output-file=/tmp/wget.log my.xyz"
]
in ruleCatchesNot "DL3047" $ Text.unlines dockerFile
it "does not warn when running with -o (output-file long option) and without --progress option" $
let dockerFile =
[ "FROM node as foo",
"RUN wget -o /tmp/wget.log my.xyz"
]
in ruleCatchesNot "DL3047" $ Text.unlines dockerFile
it "does not warn when running with --append-output (append-output long option) and without --progress option" $
let dockerFile =
[ "FROM node as foo",
"RUN wget --append-output=/tmp/wget.log my.xyz"
]
in ruleCatchesNot "DL3047" $ Text.unlines dockerFile
it "does not warn when running with -a (append-output long option) and without --progress option" $
let dockerFile =
[ "FROM node as foo",
"RUN wget -a /tmp/wget.log my.xyz"
]
in ruleCatchesNot "DL3047" $ Text.unlines dockerFile
|
lukasmartinelli/hadolint
|
test/DL3047.hs
|
gpl-3.0
| 3,021
| 0
| 15
| 938
| 448
| 212
| 236
| -1
| -1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AndroidPublisher.InAppProducts.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists all in-app products - both managed products and subscriptions.
--
-- /See:/ <https://developers.google.com/android-publisher Google Play Android Developer API Reference> for @androidpublisher.inappproducts.list@.
module Network.Google.Resource.AndroidPublisher.InAppProducts.List
(
-- * REST Resource
InAppProductsListResource
-- * Creating a Request
, inAppProductsList
, InAppProductsList
-- * Request Lenses
, iaplXgafv
, iaplUploadProtocol
, iaplPackageName
, iaplAccessToken
, iaplToken
, iaplUploadType
, iaplStartIndex
, iaplMaxResults
, iaplCallback
) where
import Network.Google.AndroidPublisher.Types
import Network.Google.Prelude
-- | A resource alias for @androidpublisher.inappproducts.list@ method which the
-- 'InAppProductsList' request conforms to.
type InAppProductsListResource =
"androidpublisher" :>
"v3" :>
"applications" :>
Capture "packageName" Text :>
"inappproducts" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "token" Text :>
QueryParam "uploadType" Text :>
QueryParam "startIndex" (Textual Word32) :>
QueryParam "maxResults" (Textual Word32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] InAppProductsListResponse
-- | Lists all in-app products - both managed products and subscriptions.
--
-- /See:/ 'inAppProductsList' smart constructor.
data InAppProductsList =
InAppProductsList'
{ _iaplXgafv :: !(Maybe Xgafv)
, _iaplUploadProtocol :: !(Maybe Text)
, _iaplPackageName :: !Text
, _iaplAccessToken :: !(Maybe Text)
, _iaplToken :: !(Maybe Text)
, _iaplUploadType :: !(Maybe Text)
, _iaplStartIndex :: !(Maybe (Textual Word32))
, _iaplMaxResults :: !(Maybe (Textual Word32))
, _iaplCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'InAppProductsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'iaplXgafv'
--
-- * 'iaplUploadProtocol'
--
-- * 'iaplPackageName'
--
-- * 'iaplAccessToken'
--
-- * 'iaplToken'
--
-- * 'iaplUploadType'
--
-- * 'iaplStartIndex'
--
-- * 'iaplMaxResults'
--
-- * 'iaplCallback'
inAppProductsList
:: Text -- ^ 'iaplPackageName'
-> InAppProductsList
inAppProductsList pIaplPackageName_ =
InAppProductsList'
{ _iaplXgafv = Nothing
, _iaplUploadProtocol = Nothing
, _iaplPackageName = pIaplPackageName_
, _iaplAccessToken = Nothing
, _iaplToken = Nothing
, _iaplUploadType = Nothing
, _iaplStartIndex = Nothing
, _iaplMaxResults = Nothing
, _iaplCallback = Nothing
}
-- | V1 error format.
iaplXgafv :: Lens' InAppProductsList (Maybe Xgafv)
iaplXgafv
= lens _iaplXgafv (\ s a -> s{_iaplXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
iaplUploadProtocol :: Lens' InAppProductsList (Maybe Text)
iaplUploadProtocol
= lens _iaplUploadProtocol
(\ s a -> s{_iaplUploadProtocol = a})
-- | Package name of the app.
iaplPackageName :: Lens' InAppProductsList Text
iaplPackageName
= lens _iaplPackageName
(\ s a -> s{_iaplPackageName = a})
-- | OAuth access token.
iaplAccessToken :: Lens' InAppProductsList (Maybe Text)
iaplAccessToken
= lens _iaplAccessToken
(\ s a -> s{_iaplAccessToken = a})
-- | Pagination token. If empty, list starts at the first product.
iaplToken :: Lens' InAppProductsList (Maybe Text)
iaplToken
= lens _iaplToken (\ s a -> s{_iaplToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
iaplUploadType :: Lens' InAppProductsList (Maybe Text)
iaplUploadType
= lens _iaplUploadType
(\ s a -> s{_iaplUploadType = a})
-- | The index of the first element to return.
iaplStartIndex :: Lens' InAppProductsList (Maybe Word32)
iaplStartIndex
= lens _iaplStartIndex
(\ s a -> s{_iaplStartIndex = a})
. mapping _Coerce
-- | How many results the list operation should return.
iaplMaxResults :: Lens' InAppProductsList (Maybe Word32)
iaplMaxResults
= lens _iaplMaxResults
(\ s a -> s{_iaplMaxResults = a})
. mapping _Coerce
-- | JSONP
iaplCallback :: Lens' InAppProductsList (Maybe Text)
iaplCallback
= lens _iaplCallback (\ s a -> s{_iaplCallback = a})
instance GoogleRequest InAppProductsList where
type Rs InAppProductsList = InAppProductsListResponse
type Scopes InAppProductsList =
'["https://www.googleapis.com/auth/androidpublisher"]
requestClient InAppProductsList'{..}
= go _iaplPackageName _iaplXgafv _iaplUploadProtocol
_iaplAccessToken
_iaplToken
_iaplUploadType
_iaplStartIndex
_iaplMaxResults
_iaplCallback
(Just AltJSON)
androidPublisherService
where go
= buildClient
(Proxy :: Proxy InAppProductsListResource)
mempty
|
brendanhay/gogol
|
gogol-android-publisher/gen/Network/Google/Resource/AndroidPublisher/InAppProducts/List.hs
|
mpl-2.0
| 6,132
| 0
| 21
| 1,502
| 984
| 564
| 420
| 141
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Blogger.Posts.Patch
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Patches a post.
--
-- /See:/ <https://developers.google.com/blogger/docs/3.0/getting_started Blogger API v3 Reference> for @blogger.posts.patch@.
module Network.Google.Resource.Blogger.Posts.Patch
(
-- * REST Resource
PostsPatchResource
-- * Creating a Request
, postsPatch
, PostsPatch
-- * Request Lenses
, posoFetchBody
, posoXgafv
, posoUploadProtocol
, posoAccessToken
, posoFetchImages
, posoUploadType
, posoBlogId
, posoPayload
, posoMaxComments
, posoRevert
, posoPostId
, posoPublish
, posoCallback
) where
import Network.Google.Blogger.Types
import Network.Google.Prelude
-- | A resource alias for @blogger.posts.patch@ method which the
-- 'PostsPatch' request conforms to.
type PostsPatchResource =
"v3" :>
"blogs" :>
Capture "blogId" Text :>
"posts" :>
Capture "postId" Text :>
QueryParam "fetchBody" Bool :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "fetchImages" Bool :>
QueryParam "uploadType" Text :>
QueryParam "maxComments" (Textual Word32) :>
QueryParam "revert" Bool :>
QueryParam "publish" Bool :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Post' :>
Patch '[JSON] Post'
-- | Patches a post.
--
-- /See:/ 'postsPatch' smart constructor.
data PostsPatch =
PostsPatch'
{ _posoFetchBody :: !Bool
, _posoXgafv :: !(Maybe Xgafv)
, _posoUploadProtocol :: !(Maybe Text)
, _posoAccessToken :: !(Maybe Text)
, _posoFetchImages :: !(Maybe Bool)
, _posoUploadType :: !(Maybe Text)
, _posoBlogId :: !Text
, _posoPayload :: !Post'
, _posoMaxComments :: !(Maybe (Textual Word32))
, _posoRevert :: !(Maybe Bool)
, _posoPostId :: !Text
, _posoPublish :: !(Maybe Bool)
, _posoCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'PostsPatch' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'posoFetchBody'
--
-- * 'posoXgafv'
--
-- * 'posoUploadProtocol'
--
-- * 'posoAccessToken'
--
-- * 'posoFetchImages'
--
-- * 'posoUploadType'
--
-- * 'posoBlogId'
--
-- * 'posoPayload'
--
-- * 'posoMaxComments'
--
-- * 'posoRevert'
--
-- * 'posoPostId'
--
-- * 'posoPublish'
--
-- * 'posoCallback'
postsPatch
:: Text -- ^ 'posoBlogId'
-> Post' -- ^ 'posoPayload'
-> Text -- ^ 'posoPostId'
-> PostsPatch
postsPatch pPosoBlogId_ pPosoPayload_ pPosoPostId_ =
PostsPatch'
{ _posoFetchBody = True
, _posoXgafv = Nothing
, _posoUploadProtocol = Nothing
, _posoAccessToken = Nothing
, _posoFetchImages = Nothing
, _posoUploadType = Nothing
, _posoBlogId = pPosoBlogId_
, _posoPayload = pPosoPayload_
, _posoMaxComments = Nothing
, _posoRevert = Nothing
, _posoPostId = pPosoPostId_
, _posoPublish = Nothing
, _posoCallback = Nothing
}
posoFetchBody :: Lens' PostsPatch Bool
posoFetchBody
= lens _posoFetchBody
(\ s a -> s{_posoFetchBody = a})
-- | V1 error format.
posoXgafv :: Lens' PostsPatch (Maybe Xgafv)
posoXgafv
= lens _posoXgafv (\ s a -> s{_posoXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
posoUploadProtocol :: Lens' PostsPatch (Maybe Text)
posoUploadProtocol
= lens _posoUploadProtocol
(\ s a -> s{_posoUploadProtocol = a})
-- | OAuth access token.
posoAccessToken :: Lens' PostsPatch (Maybe Text)
posoAccessToken
= lens _posoAccessToken
(\ s a -> s{_posoAccessToken = a})
posoFetchImages :: Lens' PostsPatch (Maybe Bool)
posoFetchImages
= lens _posoFetchImages
(\ s a -> s{_posoFetchImages = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
posoUploadType :: Lens' PostsPatch (Maybe Text)
posoUploadType
= lens _posoUploadType
(\ s a -> s{_posoUploadType = a})
posoBlogId :: Lens' PostsPatch Text
posoBlogId
= lens _posoBlogId (\ s a -> s{_posoBlogId = a})
-- | Multipart request metadata.
posoPayload :: Lens' PostsPatch Post'
posoPayload
= lens _posoPayload (\ s a -> s{_posoPayload = a})
posoMaxComments :: Lens' PostsPatch (Maybe Word32)
posoMaxComments
= lens _posoMaxComments
(\ s a -> s{_posoMaxComments = a})
. mapping _Coerce
posoRevert :: Lens' PostsPatch (Maybe Bool)
posoRevert
= lens _posoRevert (\ s a -> s{_posoRevert = a})
posoPostId :: Lens' PostsPatch Text
posoPostId
= lens _posoPostId (\ s a -> s{_posoPostId = a})
posoPublish :: Lens' PostsPatch (Maybe Bool)
posoPublish
= lens _posoPublish (\ s a -> s{_posoPublish = a})
-- | JSONP
posoCallback :: Lens' PostsPatch (Maybe Text)
posoCallback
= lens _posoCallback (\ s a -> s{_posoCallback = a})
instance GoogleRequest PostsPatch where
type Rs PostsPatch = Post'
type Scopes PostsPatch =
'["https://www.googleapis.com/auth/blogger"]
requestClient PostsPatch'{..}
= go _posoBlogId _posoPostId (Just _posoFetchBody)
_posoXgafv
_posoUploadProtocol
_posoAccessToken
_posoFetchImages
_posoUploadType
_posoMaxComments
_posoRevert
_posoPublish
_posoCallback
(Just AltJSON)
_posoPayload
bloggerService
where go
= buildClient (Proxy :: Proxy PostsPatchResource)
mempty
|
brendanhay/gogol
|
gogol-blogger/gen/Network/Google/Resource/Blogger/Posts/Patch.hs
|
mpl-2.0
| 6,653
| 0
| 24
| 1,807
| 1,263
| 722
| 541
| 180
| 1
|
{- ORMOLU_DISABLE -}
-- Implicit CAD. Copyright (C) 2011, Christopher Olah (chris@colah.ca)
-- Copyright (C) 2016, Julia Longtin (julial@turinglace.com)
-- Released under the GNU AGPLV3+, see LICENSE
module Graphics.Implicit.Export.Render.HandleSquares (mergedSquareTris) where
import Prelude((+), foldMap, (<>), ($), fmap, concat, (.), (==), compare, error, otherwise, concatMap)
import Graphics.Implicit.Definitions (TriangleMesh(TriangleMesh, getTriangles), Triangle(Triangle))
import Graphics.Implicit.Export.Render.Definitions (TriSquare(Tris, Sq))
import Linear ( V2(V2), (*^), (^*) )
import GHC.Exts (groupWith)
import Data.List (sortBy)
-- We want small meshes. Essential to this, is getting rid of triangles.
-- We specifically mark quads in tesselation (refer to Graphics.Implicit.
-- Export.Render.Definitions, Graphics.Implicit.Export.Render.TesselateLoops)
-- So that we can try and merge them together.
{- Core idea of mergedSquareTris:
Many Quads on Plane
____________
| | | |
|____|____| |
|____|____|__|
| joinXaligned
v
____________
| | |
|_________|__|
|_________|__|
| joinYaligned
v
____________
| | |
| | |
|_________|__|
| joinXaligned
v
____________
| |
| |
|____________|
| squareToTri
v
____________
|\ |
| ---------- |
|___________\|
-}
mergedSquareTris :: [TriSquare] -> TriangleMesh
mergedSquareTris sqTris =
let
-- We don't need to do any work on triangles. They'll just be part of
-- the list of triangles we give back. So, the triangles coming from
-- triangles...
triTriangles :: [Triangle]
triTriangles = [tri | Tris tris <- sqTris, tri <- getTriangles tris ]
-- We actually want to work on the quads, so we find those
squaresFromTris :: [TriSquare]
squaresFromTris = [ Sq x y z q | Sq x y z q <- sqTris ]
-- Collect squares that are on the same plane.
planeAligned = groupWith (\(Sq basis z _ _) -> (basis,z)) squaresFromTris
-- For each plane:
-- Select for being the same range on X and then merge them on Y
-- Then vice versa.
joined :: [[TriSquare]]
joined = fmap
( concatMap joinXaligned . groupWith (\(Sq _ _ xS _) -> xS)
. concatMap joinYaligned . groupWith (\(Sq _ _ _ yS) -> yS)
. concatMap joinXaligned . groupWith (\(Sq _ _ xS _) -> xS))
planeAligned
-- Merge them back together, and we have the desired reult!
finishedSquares = concat joined
in
-- merge them to triangles, and combine with the original triangles.
TriangleMesh $ triTriangles <> foldMap squareToTri finishedSquares
-- And now for the helper functions that do the heavy lifting...
joinXaligned :: [TriSquare] -> [TriSquare]
joinXaligned quads@((Sq b z xS _):_) =
let
orderedQuads = sortBy
(\(Sq _ _ _ (V2 ya _)) (Sq _ _ _ (V2 yb _)) -> compare ya yb)
quads
mergeAdjacent (pres@(Sq _ _ _ (V2 y1a y2a)) : next@(Sq _ _ _ (V2 y1b y2b)) : others)
| y2a == y1b = mergeAdjacent (Sq b z xS (V2 y1a y2b) : others)
| y1a == y2b = mergeAdjacent (Sq b z xS (V2 y1b y2a) : others)
| otherwise = pres : mergeAdjacent (next : others)
mergeAdjacent a = a
in
mergeAdjacent orderedQuads
joinXaligned (Tris _:_) = error "Tried to join y aligned triangles."
joinXaligned [] = []
joinYaligned :: [TriSquare] -> [TriSquare]
joinYaligned quads@((Sq b z _ yS):_) =
let
orderedQuads = sortBy
(\(Sq _ _ (V2 xa _) _) (Sq _ _ (V2 xb _) _) -> compare xa xb)
quads
mergeAdjacent (pres@(Sq _ _ (V2 x1a x2a) _) : next@(Sq _ _ (V2 x1b x2b) _) : others)
| x2a == x1b = mergeAdjacent (Sq b z (V2 x1a x2b) yS : others)
| x1a == x2b = mergeAdjacent (Sq b z (V2 x1b x2a) yS : others)
| otherwise = pres : mergeAdjacent (next : others)
mergeAdjacent a = a
in
mergeAdjacent orderedQuads
joinYaligned (Tris _:_) = error "Tried to join y aligned triangles."
joinYaligned [] = []
-- Deconstruct a square into two triangles.
squareToTri :: TriSquare -> [Triangle]
squareToTri (Sq (b1,b2,b3) z (V2 x1 x2) (V2 y1 y2)) =
let
zV = b3 ^* z
(x1V, x2V) = (x1 *^ b1, x2 *^ b1)
(y1V, y2V) = (y1 *^ b2, y2 *^ b2)
a = zV + x1V + y1V
b = zV + x2V + y1V
c = zV + x1V + y2V
d = zV + x2V + y2V
in
[Triangle (a,b,c), Triangle (c,b,d)]
squareToTri (Tris t) = getTriangles t
|
colah/ImplicitCAD
|
Graphics/Implicit/Export/Render/HandleSquares.hs
|
agpl-3.0
| 4,654
| 0
| 20
| 1,322
| 1,329
| 721
| 608
| 63
| 2
|
{-# LANGUAGE OverloadedStrings #-}
module Store.ConfigTest
where
import Data.Aeson hiding (Value)
import qualified Data.ByteString as BS
import Data.Monoid ((<>))
import qualified Data.Text as T
-- import Test.Tasty
import Test.Tasty.HUnit
import Store.Config
-- example usage; TODO: replace guts with data.yaml
unit_Config_examples :: Assertion
unit_Config_examples = do
pathKey (Path ["k1", "k2"]) @?= "k1.k2"
encode ((mempty :: Config) <> mempty) @?= encode (mempty :: Config)
mempty ! "k1.k2" @?= (Nothing :: Maybe Value)
mempty ! "k1.k2" @?= (Nothing :: Maybe ConfigMap)
encode (mempty ! "k1.k2" :: Maybe Config) @?= encode (Nothing :: Maybe Config)
mempty ! "k1.k2" @?= (Nothing :: Maybe (Maybe Bool))
mempty ! "k1.k2" @?= (Nothing :: Maybe Integer)
mempty ! "k1.k2" @?= (Nothing :: Maybe BS.ByteString)
mempty ! "k1.k2" @?= (Nothing :: Maybe [Integer])
mempty ! "k1.k2" @?= (Nothing :: Maybe T.Text)
mempty ! "k1.k2" @?= (Nothing :: Maybe String)
mempty ! "k1.k2" @?= (Nothing :: Maybe Int)
|
databrary/databrary
|
test/Store/ConfigTest.hs
|
agpl-3.0
| 1,049
| 0
| 11
| 199
| 351
| 189
| 162
| 22
| 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.