code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
module Main where
import Test.HUnit hiding (path)
import TestUtil
import Database.TokyoCabinet.BDB
import qualified Database.TokyoCabinet.BDB.Cursor as C
import Data.Maybe (catMaybes, fromJust)
import Data.List (sort)
import Control.Monad
dbname :: String
dbname = "foo.tcb"
withOpenedBDB :: String -> (BDB -> IO a) -> IO ()
withOpenedBDB name action = do
h <- new
open h name [OREADER, OWRITER, OCREAT]
res <- action h
close h
return ()
test_ecode =
withoutFile dbname $ \fn -> do
h <- new
open h fn [OREADER]
ecode h >>= (ENOFILE @=?)
test_new_delete = do
bdb <- new
delete bdb
test_open_close =
withoutFile dbname $ \fn -> do
bdb <- new
not `fmap` open bdb fn [OREADER] @? "file does not exist"
open bdb fn [OREADER, OWRITER, OCREAT] @? "open"
close bdb @? "close"
not `fmap` close bdb @? "cannot close closed file"
test_putxx =
withoutFile dbname $ \fn ->
withOpenedBDB fn $ \bdb -> do
put bdb "foo" "bar"
get bdb "foo" >>= (Just "bar" @=?)
putkeep bdb "foo" "baz"
get bdb "foo" >>= (Just "bar" @=?)
putcat bdb "foo" "baz"
get bdb "foo" >>= (Just "barbaz" @=?)
putdup bdb "foo" "bar2" @? "putdup"
getlist bdb "foo" >>= (["barbaz", "bar2"] @=?)
putlist bdb "bar" ["hoge", "fuga", "abc"] @? "putlist"
getlist bdb "bar" >>= (["hoge", "fuga", "abc"] @=?)
test_out =
withoutFile dbname $ \fn ->
withOpenedBDB fn $ \bdb -> do
put bdb "foo" "bar"
get bdb "foo" >>= (Just "bar" @=?)
out bdb "foo" @? "out succeeded"
get bdb "foo" >>= ((Nothing :: Maybe String) @=?)
putlist bdb "bar" ([1, 2, 3] :: [Int])
out bdb "bar" -- first one is removed
get bdb "bar" >>= ((Just 2 :: Maybe Int) @=?)
outlist bdb "bar"
get bdb "bar" >>= ((Nothing :: Maybe Int) @=?)
test_put_get =
withoutFile dbname $ \fn ->
withOpenedBDB fn $ \bdb -> do
put bdb "1" "foo"
put bdb "2" "bar"
put bdb "3" "baz"
get bdb "1" >>= (Just "foo" @=?)
get bdb "2" >>= (Just "bar" @=?)
get bdb "3" >>= (Just "baz" @=?)
putdup bdb "1" "foo2"
get bdb "4" >>= ((Nothing :: Maybe String) @=?)
getlist bdb "1" >>= (["foo", "foo2"] @=?)
getlist bdb "4" >>= (([] :: [String]) @=?)
test_vnum =
withoutFile dbname $ \fn ->
withOpenedBDB fn $ \bdb -> do
putlist bdb "foo" ["bar", "baz", "hoge", "fuga"]
vnum bdb "foo" >>= (Just 4 @=?)
vnum bdb "bar" >>= (Nothing @=?)
test_vsiz =
withoutFile dbname $ \fn ->
withOpenedBDB fn $ \bdb -> do
put bdb "foo" "bar"
vsiz bdb "foo" >>= (Just 3 @=?)
vsiz bdb "bar" >>= ((Nothing :: Maybe Int) @=?)
test_iterate =
withoutFile dbname $ \fn ->
withOpenedBDB fn $ \bdb -> do
let keys = ["foo", "bar", "baz", "jkl"]
vals = [100, 200 ..] :: [Int]
kvs = sort $ zip keys vals
destkey = "baz"
destval = fromJust $ lookup destkey kvs
zipWithM (put bdb) keys vals
cur <- C.new bdb
C.first cur
C.key cur >>= (Just (fst . head $ kvs) @=?)
C.val cur >>= (Just (snd . head $ kvs) @=?)
C.out cur @? "cursor out"
get bdb (fst . head $ kvs) >>= ((Nothing :: Maybe String) @=?)
C.key cur >>= (Just (fst . (!! 1) $ kvs) @=?)
C.val cur >>= (Just (snd . (!! 1) $ kvs) @=?)
C.next cur @? "cursor next"
C.key cur >>= (Just (fst . (!! 2) $ kvs) @=?)
C.val cur >>= (Just (snd . (!! 2) $ kvs) @=?)
C.prev cur @? "cursor prev"
C.key cur >>= (Just (fst . (!! 1) $ kvs) @=?)
C.val cur >>= (Just (snd . (!! 1) $ kvs) @=?)
C.jump cur "b" @? "cursor jump"
C.key cur >>= (Just destkey @=?)
C.put cur (100 :: Int) C.CPAFTER @? "cursor put"
getlist bdb destkey >>= (([destval, 100] :: [Int]) @=?)
C.last cur @? "cursor last"
C.key cur >>= (Just (fst . last $ kvs) @=?)
C.delete cur
test_range =
withoutFile dbname $ \fn ->
withOpenedBDB fn $ \bdb -> do
let keys = ["abc", "abd", "bcd", "bcz", "fgh", "ghjc", "ziji"]
zipWithM (put bdb) keys ([1..] :: [Int])
range bdb (Just "a") True (Just "abz") True 10
>>= (["abc", "abd"] @=?)
range bdb (Just "a") True (Just "abd") False 10
>>= (["abc"] @=?)
range bdb (Just "abc") False (Just "fgh") False 3
>>= (["abd", "bcd", "bcz"] @=?)
range bdb (Just "a") False (Just "ab") False 10
>>= (([] :: [String]) @=?)
range bdb Nothing False Nothing False (-1) >>= (keys @=?)
test_fwmkeys =
withoutFile dbname $ \fn ->
withOpenedBDB fn $ \bdb -> do
mapM_ (uncurry (put bdb)) ([ ("foo", 100)
, ("bar", 200)
, ("baz", 201)
, ("jkl", 300)] :: [(String, Int)])
fwmkeys bdb "ba" 10 >>= (["bar", "baz"] @=?) . sort
fwmkeys bdb "ba" 1 >>= (["bar"] @=?)
fwmkeys bdb "" 10 >>= (["bar", "baz", "foo", "jkl"] @=?) . sort
test_addint =
withoutFile dbname $ \fn ->
withOpenedBDB fn $ \bdb -> do
let ini = 32 :: Int
put bdb "foo" ini
get bdb "foo" >>= (Just ini @=?)
addint bdb "foo" 3
get bdb "foo" >>= (Just (ini+3) @=?)
addint bdb "bar" 1 >>= (Just 1 @=?)
put bdb "bar" "foo"
addint bdb "bar" 1 >>= (Nothing @=?)
test_adddouble =
withoutFile dbname $ \fn ->
withOpenedBDB fn $ \bdb -> do
let ini = 0.003 :: Double
put bdb "foo" ini
get bdb "foo" >>= (Just ini @=?)
adddouble bdb "foo" 0.3
(get bdb "foo" >>= (isIn (ini+0.3))) @? "isIn"
adddouble bdb "bar" 0.5 >>= (Just 0.5 @=?)
put bdb "bar" "foo"
adddouble bdb "bar" 1.2 >>= (Nothing @=?)
where
margin = 1e-30
isIn :: Double -> (Maybe Double) -> IO Bool
isIn expected (Just actual) =
let diff = expected - actual
in return $ abs diff <= margin
test_vanish =
withoutFile dbname $ \fn ->
withOpenedBDB fn $ \bdb -> do
put bdb "foo" "111"
put bdb "bar" "222"
put bdb "baz" "333"
rnum bdb >>= (3 @=?)
vanish bdb
rnum bdb >>= (0 @=?)
test_copy =
withoutFile dbname $ \fns ->
withoutFile "bar.tcb" $ \fnd ->
withOpenedBDB fns $ \bdb -> do
put bdb "foo" "bar"
copy bdb fnd
close bdb
open bdb fnd [OREADER]
get bdb "foo" >>= (Just "bar" @=?)
test_txn =
withoutFile dbname $ \fn ->
withOpenedBDB fn $ \bdb -> do
tranbegin bdb
put bdb "foo" "bar"
get bdb "foo" >>= (Just "bar" @=?)
tranabort bdb
get bdb "foo" >>= ((Nothing :: Maybe String) @=?)
tranbegin bdb
put bdb "foo" "baz"
get bdb "foo" >>= (Just "baz" @=?)
trancommit bdb
get bdb "foo" >>= (Just "baz" @=?)
test_path =
withoutFile dbname $ \fn ->
withOpenedBDB fn $ \bdb ->
path bdb >>= (Just dbname @=?)
test_util =
withoutFile dbname $ \fn -> do
bdb <- new
setcache bdb 1000000 0 @? "setcache"
setxmsiz bdb 1000000 @? "setxmsiz"
tune bdb 0 0 0 (-1) (-1) [TLARGE, TBZIP] @? "tune"
open bdb fn [OREADER, OWRITER, OCREAT]
path bdb >>= (Just fn @=?)
rnum bdb >>= (0 @=?)
((> 0) `fmap` fsiz bdb) @? "fsiz"
sync bdb @? "sync"
optimize bdb 0 0 0 (-1) (-1) [] @? "optimize"
close bdb
tests = test [
"new delete" ~: test_new_delete
, "ecode" ~: test_ecode
, "open close" ~: test_open_close
, "put get" ~: test_put_get
, "vnum" ~: test_vnum
, "out" ~: test_out
, "putxx" ~: test_putxx
, "copy" ~: test_copy
, "transaction" ~: test_txn
, "range" ~: test_range
, "fwmkeys" ~: test_fwmkeys
, "path" ~: test_path
, "addint" ~: test_addint
, "adddouble" ~: test_adddouble
, "util" ~: test_util
, "vsiz" ~: test_vsiz
, "vanish" ~: test_vanish
, "iterate" ~: test_iterate
]
main = runTestTT tests
| tom-lpsd/tokyocabinet-haskell | tests/BDBTest.hs | bsd-3-clause | 8,667 | 0 | 17 | 3,168 | 3,298 | 1,665 | 1,633 | 231 | 1 |
module Sexy.Classes.Show (Show(..)) where
import Sexy.Data.String (String)
class Show a where
show :: a -> String
| DanBurton/sexy | src/Sexy/Classes/Show.hs | bsd-3-clause | 119 | 0 | 7 | 21 | 45 | 27 | 18 | 4 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Bead.View.Content.SubmissionTable (
AdministratedCourses
, AdministratedGroups
, CourseTestScriptInfos
, SubmissionTableContext(..)
, submissionTable
, submissionTableContext
, sortUserLines
, resultCell
) where
import Control.Monad
import Data.Char (isAlphaNum)
import Data.Function
import Data.List
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe
import Data.Monoid
import Data.String
import Data.Time
import Numeric
import qualified Bead.Domain.Entities as E
import qualified Bead.Domain.Entity.Assignment as Assignment
import Bead.Domain.Evaluation
import Bead.Domain.Relationships
import qualified Bead.Controller.Pages as Pages
import Bead.Controller.UserStories (UserStory)
import qualified Bead.Controller.UserStories as S
import Bead.View.Content
import qualified Bead.View.Content.Bootstrap as Bootstrap
import qualified Bead.View.DataBridge as Param
import Text.Blaze.Html5 ((!))
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
type AdministratedCourses = Map CourseKey E.Course
type AdministratedGroups = Map GroupKey (E.Group, String)
type CourseTestScriptInfos = Map CourseKey [(TestScriptKey, TestScriptInfo)]
data SubmissionTableContext = SubmissionTableContext {
stcAdminCourses :: AdministratedCourses
, stcAdminGroups :: AdministratedGroups
, stcCourseTestScriptInfos :: CourseTestScriptInfos
}
submissionTableContextCata f (SubmissionTableContext courses groups testscripts)
= f courses groups testscripts
submissionTableContext :: UserStory SubmissionTableContext
submissionTableContext = do
ac <- S.administratedCourses
ag <- S.administratedGroups
ts <- Map.fromList <$> mapM (testScriptForCourse . fst) ac
return $! SubmissionTableContext {
stcAdminCourses = adminCourseMap ac
, stcAdminGroups = adminGroupMap ag
, stcCourseTestScriptInfos = ts
}
where
testScriptForCourse ck = do
infos <- S.testScriptInfos ck
return (ck, infos)
adminCourseMap = Map.fromList
adminGroupMap = Map.fromList . map (\(k,g,c) -> (k,(g,c)))
submissionTable :: String -> UTCTime -> SubmissionTableContext -> SubmissionTableInfo -> IHtml
submissionTable tableId now stb table = submissionTableContextCata html stb where
html courses groups testscripts = do
msg <- getI18N
return $ do
H.h4 . H.b $ fromString $ stiCourse table
i18n msg $ assignmentCreationMenu courses groups table
i18n msg $ submissionTablePart tableId now stb table
i18n msg $ courseTestScriptTable testscripts table
-- Produces the HTML table from the submission table information,
-- if there is no users registered and submission posted to the
-- group or course students, an informational text is shown.
-- Supposing that the given tableid is unique name on the page.
submissionTablePart :: String -> UTCTime -> SubmissionTableContext -> SubmissionTableInfo -> IHtml
-- Empty table
submissionTablePart _tableId _now _ctx s
| and [null $ submissionTableInfoAssignments s, null $ stiUsers s] = do
msg <- getI18N
return $ do
Bootstrap.rowColMd12 $ Bootstrap.table $ do
H.td (fromString $ msg $ msg_Home_SubmissionTable_NoCoursesOrStudents "There are no assignments or students yet.")
-- Non empty table
submissionTablePart tableId now ctx s = do
msg <- getI18N
return $ do
courseForm $ Bootstrap.rowColMd12 $ do
Bootstrap.table $ do
checkedUserScript
assignmentLine msg
mapM_ (userLine msg s) (stiUserLines s)
where
-- JavaScript
tableIdJSName = filter isAlphaNum tableId
noOfUsers = tableIdJSName ++ "NoOfUsers"
onCheck = tableIdJSName ++ "OnCheck"
onUncheck = tableIdJSName ++ "OnUncheck"
removeButton = tableIdJSName ++ "Button"
onClick = tableIdJSName ++ "OnClick"
checkedUserScript = H.script $ fromString $ unlines
[ concat ["var ", noOfUsers, " = 0;"]
, concat ["function ", onCheck, "(){"]
, noOfUsers ++ "++;"
, concat ["if(", noOfUsers, " > 0) {"]
, concat ["document.getElementById(\"",removeButton,"\").disabled = false;"]
, "}"
, "}"
, concat ["function ", onUncheck, "(){"]
, noOfUsers ++ "--;"
, concat ["if(", noOfUsers, " < 1) {"]
, concat ["document.getElementById(\"",removeButton,"\").disabled = true;"]
, noOfUsers ++ " = 0;"
, "}"
, "}"
, concat ["function ", onClick, "(checkbox){"]
, "if(checkbox.checked) {"
, onCheck ++ "();"
, "} else {"
, onUncheck ++ "();"
, "}"
, "}"
]
-- HTML
courseForm = submissionTableInfoCata course group s where
course _n _us _as _uls _ns ck = postForm (routeOf $ Pages.deleteUsersFromCourse ck ())
group _n _us _cgas _uls _ns _ck gk = postForm (routeOf $ Pages.deleteUsersFromGroup gk ())
headerCell = H.th
assignmentLine msg = H.tr $ do
headerCell $ fromString $ msg $ msg_Home_SubmissionTable_StudentName "Name"
headerCell $ fromString $ msg $ msg_Home_SubmissionTable_Username "Username"
assignmentLinks
deleteHeaderCell msg
where
assignmentLinks = submissionTableInfoCata course group s
course _name _users as _ulines _anames _key =
mapM_ (modifyAssignmentLink courseButtonStyle "") $ zip [1..] as
group _name _users cgas _ulines _anames ckey _gkey = do
let as = reverse . snd $ foldl numbering ((1,1),[]) cgas
mapM_ header as
where
numbering ((c,g),as) = cgInfoCata
(\ak -> ((c+1,g),(CourseInfo (c,ak):as)))
(\ak -> ((c,g+1),(GroupInfo (g,ak):as)))
header = cgInfoCata
(viewAssignmentLink courseButtonStyle ckey (msg $ msg_Home_CourseAssignmentIDPreffix "C"))
(modifyAssignmentLink groupButtonStyle (msg $ msg_Home_GroupAssignmentIDPreffix "G"))
assignmentName ak = maybe "" Assignment.name . Map.lookup ak $ stiAssignmentInfos s
isActiveAssignment ak =
maybe False (flip Assignment.isActive now) . Map.lookup ak $ stiAssignmentInfos s
courseButtonStyle = ("btn-hcao", "btn-hcac")
groupButtonStyle = ("btn-hgao", "btn-hgac")
modifyAssignmentLink _buttonStyle@(active, passive) pfx (i,ak) =
-- If the assignment is active we render with active assignment button style,
-- if not active the closed button style
H.td $ Bootstrap.customButtonLink
[if (isActiveAssignment ak) then active else passive]
(routeOf $ Pages.modifyAssignment ak ())
(assignmentName ak)
(concat [pfx, show i])
viewAssignmentLink _buttonStyle@(active, passive) ck pfx (i,ak) =
H.td $ Bootstrap.customButtonLink
[if (isActiveAssignment ak) then active else passive]
(viewOrModifyAssignmentLink ck ak)
(assignmentName ak)
(concat [pfx, show i])
where
viewOrModifyAssignmentLink ck ak =
case Map.lookup ck (stcAdminCourses ctx) of
Nothing -> routeOf $ Pages.viewAssignment ak ()
Just _ -> routeOf $ Pages.modifyAssignment ak ()
userLine msg s (u,_p,submissionInfoMap) = do
H.tr $ do
let username = ud_username u
H.td . fromString $ ud_fullname u
H.td . fromString $ uid id $ ud_uid u
submissionCells msg username s
deleteUserCheckbox u
where
submissionInfos = submissionTableInfoCata course group where
course _n _users as _ulines _anames _key =
catMaybes $ map (\ak -> Map.lookup ak submissionInfoMap) as
group _n _users as _ulines _anames _ckey _gkey =
catMaybes $ map lookup as
where
lookup = cgInfoCata (const Nothing) (flip Map.lookup submissionInfoMap)
submissionCells msg username = submissionTableInfoCata course group where
course _n _users as _ulines _anames _key = mapM_ (submissionInfoCell msg username) as
group _n _users as _ulines _anames _ck _gk =
mapM_ (cgInfoCata (submissionInfoCell msg username) (submissionInfoCell msg username)) as
submissionInfoCell msg u ak = case Map.lookup ak submissionInfoMap of
Nothing -> H.td $ mempty
Just si -> submissionCell msg u (ak,si)
submissionCell msg u (ak,si) =
resultCell
(linkWithHtml (routeWithParams (Pages.userSubmissions ()) [requestParam u, requestParam ak]))
mempty -- not found
(H.i ! A.class_ "glyphicon glyphicon-stop" ! A.style "color:#AAAAAA; font-size: xx-large"
! tooltip (msg_Home_SubmissionCell_NonEvaluated "Non evaluated") $ mempty) -- non-evaluated
(bool (H.i ! A.class_ "glyphicon glyphicon-ok-circle" ! A.style "color:#AAAAAA; font-size: xx-large"
! tooltip (msg_Home_SubmissionCell_Tests_Passed "Tests are passed") $ mempty) -- tested accepted
(H.i ! A.class_ "glyphicon glyphicon-remove-circle" ! A.style "color:#AAAAAA; font-size: xx-large"
! tooltip (msg_Home_SubmissionCell_Tests_Failed "Tests are failed") $ mempty)) -- tested rejected
(H.i ! A.class_ "glyphicon glyphicon-thumbs-up" ! A.style "color:#00FF00; font-size: xx-large"
! tooltip (msg_Home_SubmissionCell_Accepted "Accepted") $ mempty) -- accepted
(H.i ! A.class_ "glyphicon glyphicon-thumbs-down" ! A.style "color:#FF0000; font-size: xx-large"
! tooltip (msg_Home_SubmissionCell_Rejected "Rejected") $ mempty) -- rejected
si -- of percent
where
tooltip m = A.title (fromString $ msg m)
deleteHeaderCell msg = submissionTableInfoCata deleteForCourseButton deleteForGroupButton s where
deleteForCourseButton _n _us _as _uls _ans _ck =
headerCell $ submitButtonDanger
removeButton
(msg $ msg_Home_DeleteUsersFromCourse "Remove") ! A.disabled ""
deleteForGroupButton _n _us _as _uls _ans _ck _gk =
headerCell $ submitButtonDanger
removeButton
(msg $ msg_Home_DeleteUsersFromGroup "Remove") ! A.disabled ""
deleteUserCheckbox u = submissionTableInfoCata deleteCourseCheckbox deleteGroupCheckbox s where
deleteCourseCheckbox _n _us _as _uls _ans _ck =
H.td $ checkBox
(Param.name delUserFromCoursePrm)
(encode delUserFromCoursePrm $ ud_username u)
False ! A.onclick (fromString (onClick ++ "(this)"))
deleteGroupCheckbox _n _us _as _uls _ans _ck _gk =
H.td $ checkBox
(Param.name delUserFromGroupPrm)
(encode delUserFromGroupPrm $ ud_username u)
False ! A.onclick (fromString (onClick ++ "(this)"))
resultCell contentWrapper notFound unevaluated tested passed failed s =
H.td $ contentWrapper (sc s)
where
sc = submissionInfoCata
notFound
unevaluated
tested
(\_key result -> val result) -- evaluated
val (EvResult (BinEval (Binary Passed))) = passed
val (EvResult (BinEval (Binary Failed))) = failed
val (EvResult (PctEval (Percentage (Scores [p])))) = H.span ! A.class_ "label label-primary" $ fromString $ percent p
val (EvResult (PctEval (Percentage _))) = error "SubmissionTable.coloredSubmissionCell percentage is not defined"
percent x = join [show . round $ (100 * x), "%"]
courseTestScriptTable :: CourseTestScriptInfos -> SubmissionTableInfo -> IHtml
courseTestScriptTable cti = submissionTableInfoCata course group where
course _n _us _as _uls _ans ck = testScriptTable cti ck
group _n _us _as _uls _ans _ck _gk = (return (return ()))
-- Renders a course test script modification table if the information is found in the
-- for the course, otherwise an error message. If the course is found, and there is no
-- test script found for the course a message indicating that will be rendered, otherwise
-- the modification table is rendered
testScriptTable :: CourseTestScriptInfos -> CourseKey -> IHtml
testScriptTable cti ck = maybe (return "") courseFound $ Map.lookup ck cti where
courseFound ts = do
msg <- getI18N
return $ do
Bootstrap.rowColMd12 $ do
H.h3 $ fromString $ msg $ msg_Home_ModifyTestScriptTable "Testers"
case ts of
[] -> H.p $ fromString $ msg $ msg_Home_NoTestScriptsWereDefined "There are no testers for the course."
ts' -> Bootstrap.unorderedListGroup $ forM_ ts' $ \(tsk, tsi) ->
Bootstrap.listGroupLinkItem
(routeOf (Pages.modifyTestScript tsk ()))
(fromString $ tsiName tsi)
-- Renders a menu for the creation of the course or group assignment if the
-- user administrates the given group or course
assignmentCreationMenu
:: AdministratedCourses
-> AdministratedGroups
-> SubmissionTableInfo
-> IHtml
assignmentCreationMenu courses groups = submissionTableInfoCata courseMenu groupMenu
where
groupMenu _n _us _as _uls _ans ck gk = maybe
(return (return ()))
(const $ do
msg <- getI18N
return . navigationWithRoute msg $
case Map.lookup ck courses of
Nothing -> [Pages.newGroupAssignment gk ()]
Just _ -> [Pages.newGroupAssignment gk (), Pages.newCourseAssignment ck ()] )
(Map.lookup gk groups)
courseMenu _n _us _as _uls _ans ck = maybe
(return (return ()))
(const $ do
msg <- getI18N
return (navigationWithRoute msg [Pages.newCourseAssignment ck ()]))
(Map.lookup ck courses)
navigationWithRoute msg links =
H.div ! A.class_ "row" $ H.div ! A.class_ "col-md-6" $ H.div ! A.class_ "btn-group" $ mapM_ elem links
where
elem page = H.a ! A.href (routeOf page) ! A.class_ "btn btn-default" $ (fromString . msg $ linkText page)
-- * CSS Section
openCourseAssignmentStyle = backgroundColor "#52B017"
openGroupAssignmentStyle = backgroundColor "#00FF00"
closedCourseAssignmentStyle = backgroundColor "#736F6E"
closedGroupAssignmentStyle = backgroundColor "#A3AFAE"
-- * Colors
newtype RGB = RGB (Int, Int, Int)
pctCellColor :: Double -> RGB
pctCellColor x = RGB (round ((1 - x) * 255), round (x * 255), 0)
colorStyle :: RGB -> String
colorStyle (RGB (r,g,b)) = join ["background-color:#", hex r, hex g, hex b]
where
twoDigits [d] = ['0',d]
twoDigits ds = ds
hex x = twoDigits (showHex x "")
-- * Tools
sortUserLines = submissionTableInfoCata course group where
course name users assignments userlines names key =
CourseSubmissionTableInfo name users assignments (sort userlines) names key
group name users assignments userlines names ckey gkey =
GroupSubmissionTableInfo name users assignments (sort userlines) names ckey gkey
sort = sortBy (compareHun `on` fst3)
fst3 :: (a,b,c) -> a
fst3 (x,_,_) = x
submissionTableInfoAssignments = submissionTableInfoCata course group where
course _n _us as _uls _ans _ck = as
group _n _us cgas _uls _ans _ck _gk = map (cgInfoCata id id) cgas
headLine = H.tr . H.th . fromString
| pgj/bead | src/Bead/View/Content/SubmissionTable.hs | bsd-3-clause | 15,319 | 0 | 25 | 3,713 | 4,168 | 2,154 | 2,014 | 284 | 5 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[RnBinds]{Renaming and dependency analysis of bindings}
This module does renaming and dependency analysis on value bindings in
the abstract syntax. It does {\em not} do cycle-checks on class or
type-synonym declarations; those cannot be done at this stage because
they may be affected by renaming (which isn't fully worked out yet).
-}
module RnBinds (
-- Renaming top-level bindings
rnTopBindsLHS, rnTopBindsBoot, rnValBindsRHS,
-- Renaming local bindings
rnLocalBindsAndThen, rnLocalValBindsLHS, rnLocalValBindsRHS,
-- Other bindings
rnMethodBinds, renameSigs,
rnMatchGroup, rnGRHSs, rnGRHS,
makeMiniFixityEnv, MiniFixityEnv,
HsSigCtxt(..)
) where
import {-# SOURCE #-} RnExpr( rnLExpr, rnStmts )
import HsSyn
import TcRnMonad
import TcEvidence ( emptyTcEvBinds )
import RnTypes
import RnPat
import RnNames
import RnEnv
import DynFlags
import Module
import Name
import NameEnv
import NameSet
import RdrName ( RdrName, rdrNameOcc )
import SrcLoc
import ListSetOps ( findDupsEq )
import BasicTypes ( RecFlag(..) )
import Digraph ( SCC(..) )
import Bag
import Util
import Outputable
import FastString
import UniqFM
import Maybes ( orElse )
import qualified GHC.LanguageExtensions as LangExt
import Control.Monad
import Data.List ( partition, sort )
{-
-- ToDo: Put the annotations into the monad, so that they arrive in the proper
-- place and can be used when complaining.
The code tree received by the function @rnBinds@ contains definitions
in where-clauses which are all apparently mutually recursive, but which may
not really depend upon each other. For example, in the top level program
\begin{verbatim}
f x = y where a = x
y = x
\end{verbatim}
the definitions of @a@ and @y@ do not depend on each other at all.
Unfortunately, the typechecker cannot always check such definitions.
\footnote{Mycroft, A. 1984. Polymorphic type schemes and recursive
definitions. In Proceedings of the International Symposium on Programming,
Toulouse, pp. 217-39. LNCS 167. Springer Verlag.}
However, the typechecker usually can check definitions in which only the
strongly connected components have been collected into recursive bindings.
This is precisely what the function @rnBinds@ does.
ToDo: deal with case where a single monobinds binds the same variable
twice.
The vertag tag is a unique @Int@; the tags only need to be unique
within one @MonoBinds@, so that unique-Int plumbing is done explicitly
(heavy monad machinery not needed).
************************************************************************
* *
* naming conventions *
* *
************************************************************************
\subsection[name-conventions]{Name conventions}
The basic algorithm involves walking over the tree and returning a tuple
containing the new tree plus its free variables. Some functions, such
as those walking polymorphic bindings (HsBinds) and qualifier lists in
list comprehensions (@Quals@), return the variables bound in local
environments. These are then used to calculate the free variables of the
expression evaluated in these environments.
Conventions for variable names are as follows:
\begin{itemize}
\item
new code is given a prime to distinguish it from the old.
\item
a set of variables defined in @Exp@ is written @dvExp@
\item
a set of variables free in @Exp@ is written @fvExp@
\end{itemize}
************************************************************************
* *
* analysing polymorphic bindings (HsBindGroup, HsBind)
* *
************************************************************************
\subsubsection[dep-HsBinds]{Polymorphic bindings}
Non-recursive expressions are reconstructed without any changes at top
level, although their component expressions may have to be altered.
However, non-recursive expressions are currently not expected as
\Haskell{} programs, and this code should not be executed.
Monomorphic bindings contain information that is returned in a tuple
(a @FlatMonoBinds@) containing:
\begin{enumerate}
\item
a unique @Int@ that serves as the ``vertex tag'' for this binding.
\item
the name of a function or the names in a pattern. These are a set
referred to as @dvLhs@, the defined variables of the left hand side.
\item
the free variables of the body. These are referred to as @fvBody@.
\item
the definition's actual code. This is referred to as just @code@.
\end{enumerate}
The function @nonRecDvFv@ returns two sets of variables. The first is
the set of variables defined in the set of monomorphic bindings, while the
second is the set of free variables in those bindings.
The set of variables defined in a non-recursive binding is just the
union of all of them, as @union@ removes duplicates. However, the
free variables in each successive set of cumulative bindings is the
union of those in the previous set plus those of the newest binding after
the defined variables of the previous set have been removed.
@rnMethodBinds@ deals only with the declarations in class and
instance declarations. It expects only to see @FunMonoBind@s, and
it expects the global environment to contain bindings for the binders
(which are all class operations).
************************************************************************
* *
\subsubsection{ Top-level bindings}
* *
************************************************************************
-}
-- for top-level bindings, we need to make top-level names,
-- so we have a different entry point than for local bindings
rnTopBindsLHS :: MiniFixityEnv
-> HsValBinds RdrName
-> RnM (HsValBindsLR Name RdrName)
rnTopBindsLHS fix_env binds
= rnValBindsLHS (topRecNameMaker fix_env) binds
rnTopBindsBoot :: NameSet -> HsValBindsLR Name RdrName -> RnM (HsValBinds Name, DefUses)
-- A hs-boot file has no bindings.
-- Return a single HsBindGroup with empty binds and renamed signatures
rnTopBindsBoot bound_names (ValBindsIn mbinds sigs)
= do { checkErr (isEmptyLHsBinds mbinds) (bindsInHsBootFile mbinds)
; (sigs', fvs) <- renameSigs (HsBootCtxt bound_names) sigs
; return (ValBindsOut [] sigs', usesOnly fvs) }
rnTopBindsBoot _ b = pprPanic "rnTopBindsBoot" (ppr b)
{-
*********************************************************
* *
HsLocalBinds
* *
*********************************************************
-}
rnLocalBindsAndThen :: HsLocalBinds RdrName
-> (HsLocalBinds Name -> FreeVars -> RnM (result, FreeVars))
-> RnM (result, FreeVars)
-- This version (a) assumes that the binding vars are *not* already in scope
-- (b) removes the binders from the free vars of the thing inside
-- The parser doesn't produce ThenBinds
rnLocalBindsAndThen EmptyLocalBinds thing_inside =
thing_inside EmptyLocalBinds emptyNameSet
rnLocalBindsAndThen (HsValBinds val_binds) thing_inside
= rnLocalValBindsAndThen val_binds $ \ val_binds' ->
thing_inside (HsValBinds val_binds')
rnLocalBindsAndThen (HsIPBinds binds) thing_inside = do
(binds',fv_binds) <- rnIPBinds binds
(thing, fvs_thing) <- thing_inside (HsIPBinds binds') fv_binds
return (thing, fvs_thing `plusFV` fv_binds)
rnIPBinds :: HsIPBinds RdrName -> RnM (HsIPBinds Name, FreeVars)
rnIPBinds (IPBinds ip_binds _no_dict_binds) = do
(ip_binds', fvs_s) <- mapAndUnzipM (wrapLocFstM rnIPBind) ip_binds
return (IPBinds ip_binds' emptyTcEvBinds, plusFVs fvs_s)
rnIPBind :: IPBind RdrName -> RnM (IPBind Name, FreeVars)
rnIPBind (IPBind ~(Left n) expr) = do
(expr',fvExpr) <- rnLExpr expr
return (IPBind (Left n) expr', fvExpr)
{-
************************************************************************
* *
ValBinds
* *
************************************************************************
-}
-- Renaming local binding groups
-- Does duplicate/shadow check
rnLocalValBindsLHS :: MiniFixityEnv
-> HsValBinds RdrName
-> RnM ([Name], HsValBindsLR Name RdrName)
rnLocalValBindsLHS fix_env binds
= do { binds' <- rnValBindsLHS (localRecNameMaker fix_env) binds
-- Check for duplicates and shadowing
-- Must do this *after* renaming the patterns
-- See Note [Collect binders only after renaming] in HsUtils
-- We need to check for dups here because we
-- don't don't bind all of the variables from the ValBinds at once
-- with bindLocatedLocals any more.
--
-- Note that we don't want to do this at the top level, since
-- sorting out duplicates and shadowing there happens elsewhere.
-- The behavior is even different. For example,
-- import A(f)
-- f = ...
-- should not produce a shadowing warning (but it will produce
-- an ambiguity warning if you use f), but
-- import A(f)
-- g = let f = ... in f
-- should.
; let bound_names = collectHsValBinders binds'
-- There should be only Ids, but if there are any bogus
-- pattern synonyms, we'll collect them anyway, so that
-- we don't generate subsequent out-of-scope messages
; envs <- getRdrEnvs
; checkDupAndShadowedNames envs bound_names
; return (bound_names, binds') }
-- renames the left-hand sides
-- generic version used both at the top level and for local binds
-- does some error checking, but not what gets done elsewhere at the top level
rnValBindsLHS :: NameMaker
-> HsValBinds RdrName
-> RnM (HsValBindsLR Name RdrName)
rnValBindsLHS topP (ValBindsIn mbinds sigs)
= do { mbinds' <- mapBagM (wrapLocM (rnBindLHS topP doc)) mbinds
; return $ ValBindsIn mbinds' sigs }
where
bndrs = collectHsBindsBinders mbinds
doc = text "In the binding group for:" <+> pprWithCommas ppr bndrs
rnValBindsLHS _ b = pprPanic "rnValBindsLHSFromDoc" (ppr b)
-- General version used both from the top-level and for local things
-- Assumes the LHS vars are in scope
--
-- Does not bind the local fixity declarations
rnValBindsRHS :: HsSigCtxt
-> HsValBindsLR Name RdrName
-> RnM (HsValBinds Name, DefUses)
rnValBindsRHS ctxt (ValBindsIn mbinds sigs)
= do { (sigs', sig_fvs) <- renameSigs ctxt sigs
; binds_w_dus <- mapBagM (rnLBind (mkSigTvFn sigs')) mbinds
; case depAnalBinds binds_w_dus of
(anal_binds, anal_dus) -> return (valbind', valbind'_dus)
where
valbind' = ValBindsOut anal_binds sigs'
valbind'_dus = anal_dus `plusDU` usesOnly sig_fvs
-- Put the sig uses *after* the bindings
-- so that the binders are removed from
-- the uses in the sigs
}
rnValBindsRHS _ b = pprPanic "rnValBindsRHS" (ppr b)
-- Wrapper for local binds
--
-- The *client* of this function is responsible for checking for unused binders;
-- it doesn't (and can't: we don't have the thing inside the binds) happen here
--
-- The client is also responsible for bringing the fixities into scope
rnLocalValBindsRHS :: NameSet -- names bound by the LHSes
-> HsValBindsLR Name RdrName
-> RnM (HsValBinds Name, DefUses)
rnLocalValBindsRHS bound_names binds
= rnValBindsRHS (LocalBindCtxt bound_names) binds
-- for local binds
-- wrapper that does both the left- and right-hand sides
--
-- here there are no local fixity decls passed in;
-- the local fixity decls come from the ValBinds sigs
rnLocalValBindsAndThen
:: HsValBinds RdrName
-> (HsValBinds Name -> FreeVars -> RnM (result, FreeVars))
-> RnM (result, FreeVars)
rnLocalValBindsAndThen binds@(ValBindsIn _ sigs) thing_inside
= do { -- (A) Create the local fixity environment
new_fixities <- makeMiniFixityEnv [L loc sig
| L loc (FixSig sig) <- sigs]
-- (B) Rename the LHSes
; (bound_names, new_lhs) <- rnLocalValBindsLHS new_fixities binds
-- ...and bring them (and their fixities) into scope
; bindLocalNamesFV bound_names $
addLocalFixities new_fixities bound_names $ do
{ -- (C) Do the RHS and thing inside
(binds', dus) <- rnLocalValBindsRHS (mkNameSet bound_names) new_lhs
; (result, result_fvs) <- thing_inside binds' (allUses dus)
-- Report unused bindings based on the (accurate)
-- findUses. E.g.
-- let x = x in 3
-- should report 'x' unused
; let real_uses = findUses dus result_fvs
-- Insert fake uses for variables introduced implicitly by
-- wildcards (#4404)
implicit_uses = hsValBindsImplicits binds'
; warnUnusedLocalBinds bound_names
(real_uses `unionNameSet` implicit_uses)
; let
-- The variables "used" in the val binds are:
-- (1) the uses of the binds (allUses)
-- (2) the FVs of the thing-inside
all_uses = allUses dus `plusFV` result_fvs
-- Note [Unused binding hack]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Note that *in contrast* to the above reporting of
-- unused bindings, (1) above uses duUses to return *all*
-- the uses, even if the binding is unused. Otherwise consider:
-- x = 3
-- y = let p = x in 'x' -- NB: p not used
-- If we don't "see" the dependency of 'y' on 'x', we may put the
-- bindings in the wrong order, and the type checker will complain
-- that x isn't in scope
--
-- But note that this means we won't report 'x' as unused,
-- whereas we would if we had { x = 3; p = x; y = 'x' }
; return (result, all_uses) }}
-- The bound names are pruned out of all_uses
-- by the bindLocalNamesFV call above
rnLocalValBindsAndThen bs _ = pprPanic "rnLocalValBindsAndThen" (ppr bs)
---------------------
-- renaming a single bind
rnBindLHS :: NameMaker
-> SDoc
-> HsBind RdrName
-- returns the renamed left-hand side,
-- and the FreeVars *of the LHS*
-- (i.e., any free variables of the pattern)
-> RnM (HsBindLR Name RdrName)
rnBindLHS name_maker _ bind@(PatBind { pat_lhs = pat })
= do
-- we don't actually use the FV processing of rnPatsAndThen here
(pat',pat'_fvs) <- rnBindPat name_maker pat
return (bind { pat_lhs = pat', bind_fvs = pat'_fvs })
-- We temporarily store the pat's FVs in bind_fvs;
-- gets updated to the FVs of the whole bind
-- when doing the RHS below
rnBindLHS name_maker _ bind@(FunBind { fun_id = rdr_name })
= do { name <- applyNameMaker name_maker rdr_name
; return (bind { fun_id = name
, bind_fvs = placeHolderNamesTc }) }
rnBindLHS name_maker _ (PatSynBind psb@PSB{ psb_id = rdrname })
| isTopRecNameMaker name_maker
= do { addLocM checkConName rdrname
; name <- lookupLocatedTopBndrRn rdrname -- Should be in scope already
; return (PatSynBind psb{ psb_id = name }) }
| otherwise -- Pattern synonym, not at top level
= do { addErr localPatternSynonymErr -- Complain, but make up a fake
-- name so that we can carry on
; name <- applyNameMaker name_maker rdrname
; return (PatSynBind psb{ psb_id = name }) }
where
localPatternSynonymErr :: SDoc
localPatternSynonymErr
= hang (text "Illegal pattern synonym declaration for" <+> quotes (ppr rdrname))
2 (text "Pattern synonym declarations are only valid at top level")
rnBindLHS _ _ b = pprPanic "rnBindHS" (ppr b)
rnLBind :: (Name -> [Name]) -- Signature tyvar function
-> LHsBindLR Name RdrName
-> RnM (LHsBind Name, [Name], Uses)
rnLBind sig_fn (L loc bind)
= setSrcSpan loc $
do { (bind', bndrs, dus) <- rnBind sig_fn bind
; return (L loc bind', bndrs, dus) }
-- assumes the left-hands-side vars are in scope
rnBind :: (Name -> [Name]) -- Signature tyvar function
-> HsBindLR Name RdrName
-> RnM (HsBind Name, [Name], Uses)
rnBind _ bind@(PatBind { pat_lhs = pat
, pat_rhs = grhss
-- pat fvs were stored in bind_fvs
-- after processing the LHS
, bind_fvs = pat_fvs })
= do { mod <- getModule
; (grhss', rhs_fvs) <- rnGRHSs PatBindRhs rnLExpr grhss
-- No scoped type variables for pattern bindings
; let all_fvs = pat_fvs `plusFV` rhs_fvs
fvs' = filterNameSet (nameIsLocalOrFrom mod) all_fvs
-- Keep locally-defined Names
-- As well as dependency analysis, we need these for the
-- MonoLocalBinds test in TcBinds.decideGeneralisationPlan
bndrs = collectPatBinders pat
bind' = bind { pat_rhs = grhss',
pat_rhs_ty = placeHolderType, bind_fvs = fvs' }
is_wild_pat = case pat of
L _ (WildPat {}) -> True
L _ (BangPat (L _ (WildPat {}))) -> True -- #9127
_ -> False
-- Warn if the pattern binds no variables, except for the
-- entirely-explicit idiom _ = rhs
-- which (a) is not that different from _v = rhs
-- (b) is sometimes used to give a type sig for,
-- or an occurrence of, a variable on the RHS
; whenWOptM Opt_WarnUnusedPatternBinds $
when (null bndrs && not is_wild_pat) $
addWarn (Reason Opt_WarnUnusedPatternBinds) $ unusedPatBindWarn bind'
; fvs' `seq` -- See Note [Free-variable space leak]
return (bind', bndrs, all_fvs) }
rnBind sig_fn bind@(FunBind { fun_id = name
, fun_matches = matches })
-- invariant: no free vars here when it's a FunBind
= do { let plain_name = unLoc name
; (matches', rhs_fvs) <- bindSigTyVarsFV (sig_fn plain_name) $
-- bindSigTyVars tests for LangExt.ScopedTyVars
rnMatchGroup (FunRhs name Prefix)
rnLExpr matches
; let is_infix = isInfixFunBind bind
; when is_infix $ checkPrecMatch plain_name matches'
; mod <- getModule
; let fvs' = filterNameSet (nameIsLocalOrFrom mod) rhs_fvs
-- Keep locally-defined Names
-- As well as dependency analysis, we need these for the
-- MonoLocalBinds test in TcBinds.decideGeneralisationPlan
; fvs' `seq` -- See Note [Free-variable space leak]
return (bind { fun_matches = matches'
, bind_fvs = fvs' },
[plain_name], rhs_fvs)
}
rnBind sig_fn (PatSynBind bind)
= do { (bind', name, fvs) <- rnPatSynBind sig_fn bind
; return (PatSynBind bind', name, fvs) }
rnBind _ b = pprPanic "rnBind" (ppr b)
{-
Note [Free-variable space leak]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We have
fvs' = trim fvs
and we seq fvs' before turning it as part of a record.
The reason is that trim is sometimes something like
\xs -> intersectNameSet (mkNameSet bound_names) xs
and we don't want to retain the list bound_names. This showed up in
trac ticket #1136.
-}
{- *********************************************************************
* *
Dependency analysis and other support functions
* *
********************************************************************* -}
depAnalBinds :: Bag (LHsBind Name, [Name], Uses)
-> ([(RecFlag, LHsBinds Name)], DefUses)
-- Dependency analysis; this is important so that
-- unused-binding reporting is accurate
depAnalBinds binds_w_dus
= (map get_binds sccs, map get_du sccs)
where
sccs = depAnal (\(_, defs, _) -> defs)
(\(_, _, uses) -> nonDetEltsUFM uses)
-- It's OK to use nonDetEltsUFM here as explained in
-- Note [depAnal determinism] in NameEnv.
(bagToList binds_w_dus)
get_binds (AcyclicSCC (bind, _, _)) = (NonRecursive, unitBag bind)
get_binds (CyclicSCC binds_w_dus) = (Recursive, listToBag [b | (b,_,_) <- binds_w_dus])
get_du (AcyclicSCC (_, bndrs, uses)) = (Just (mkNameSet bndrs), uses)
get_du (CyclicSCC binds_w_dus) = (Just defs, uses)
where
defs = mkNameSet [b | (_,bs,_) <- binds_w_dus, b <- bs]
uses = unionNameSets [u | (_,_,u) <- binds_w_dus]
---------------------
-- Bind the top-level forall'd type variables in the sigs.
-- E.g f :: a -> a
-- f = rhs
-- The 'a' scopes over the rhs
--
-- NB: there'll usually be just one (for a function binding)
-- but if there are many, one may shadow the rest; too bad!
-- e.g x :: [a] -> [a]
-- y :: [(a,a)] -> a
-- (x,y) = e
-- In e, 'a' will be in scope, and it'll be the one from 'y'!
mkSigTvFn :: [LSig Name] -> (Name -> [Name])
-- Return a lookup function that maps an Id Name to the names
-- of the type variables that should scope over its body.
mkSigTvFn sigs
= \n -> lookupNameEnv env n `orElse` []
where
env :: NameEnv [Name]
env = foldr add_scoped_sig emptyNameEnv sigs
add_scoped_sig :: LSig Name -> NameEnv [Name] -> NameEnv [Name]
add_scoped_sig (L _ (ClassOpSig _ names sig_ty)) env
= add_scoped_tvs names (hsScopedTvs sig_ty) env
add_scoped_sig (L _ (TypeSig names sig_ty)) env
= add_scoped_tvs names (hsWcScopedTvs sig_ty) env
add_scoped_sig (L _ (PatSynSig names sig_ty)) env
= add_scoped_tvs names (hsScopedTvs sig_ty) env
add_scoped_sig _ env = env
add_scoped_tvs :: [Located Name] -> [Name] -> NameEnv [Name] -> NameEnv [Name]
add_scoped_tvs id_names tv_names env
= foldr (\(L _ id_n) env -> extendNameEnv env id_n tv_names) env id_names
-- Process the fixity declarations, making a FastString -> (Located Fixity) map
-- (We keep the location around for reporting duplicate fixity declarations.)
--
-- Checks for duplicates, but not that only locally defined things are fixed.
-- Note: for local fixity declarations, duplicates would also be checked in
-- check_sigs below. But we also use this function at the top level.
makeMiniFixityEnv :: [LFixitySig RdrName] -> RnM MiniFixityEnv
makeMiniFixityEnv decls = foldlM add_one_sig emptyFsEnv decls
where
add_one_sig env (L loc (FixitySig names fixity)) =
foldlM add_one env [ (loc,name_loc,name,fixity)
| L name_loc name <- names ]
add_one env (loc, name_loc, name,fixity) = do
{ -- this fixity decl is a duplicate iff
-- the ReaderName's OccName's FastString is already in the env
-- (we only need to check the local fix_env because
-- definitions of non-local will be caught elsewhere)
let { fs = occNameFS (rdrNameOcc name)
; fix_item = L loc fixity };
case lookupFsEnv env fs of
Nothing -> return $ extendFsEnv env fs fix_item
Just (L loc' _) -> do
{ setSrcSpan loc $
addErrAt name_loc (dupFixityDecl loc' name)
; return env}
}
dupFixityDecl :: SrcSpan -> RdrName -> SDoc
dupFixityDecl loc rdr_name
= vcat [text "Multiple fixity declarations for" <+> quotes (ppr rdr_name),
text "also at " <+> ppr loc]
{- *********************************************************************
* *
Pattern synonym bindings
* *
********************************************************************* -}
rnPatSynBind :: (Name -> [Name]) -- Signature tyvar function
-> PatSynBind Name RdrName
-> RnM (PatSynBind Name Name, [Name], Uses)
rnPatSynBind sig_fn bind@(PSB { psb_id = L l name
, psb_args = details
, psb_def = pat
, psb_dir = dir })
-- invariant: no free vars here when it's a FunBind
= do { pattern_synonym_ok <- xoptM LangExt.PatternSynonyms
; unless pattern_synonym_ok (addErr patternSynonymErr)
; let sig_tvs = sig_fn name
; ((pat', details'), fvs1) <- bindSigTyVarsFV sig_tvs $
rnPat PatSyn pat $ \pat' ->
-- We check the 'RdrName's instead of the 'Name's
-- so that the binding locations are reported
-- from the left-hand side
case details of
PrefixPatSyn vars ->
do { checkDupRdrNames vars
; names <- mapM lookupVar vars
; return ( (pat', PrefixPatSyn names)
, mkFVs (map unLoc names)) }
InfixPatSyn var1 var2 ->
do { checkDupRdrNames [var1, var2]
; name1 <- lookupVar var1
; name2 <- lookupVar var2
-- ; checkPrecMatch -- TODO
; return ( (pat', InfixPatSyn name1 name2)
, mkFVs (map unLoc [name1, name2])) }
RecordPatSyn vars ->
do { checkDupRdrNames (map recordPatSynSelectorId vars)
; let rnRecordPatSynField
(RecordPatSynField { recordPatSynSelectorId = visible
, recordPatSynPatVar = hidden })
= do { visible' <- lookupLocatedTopBndrRn visible
; hidden' <- lookupVar hidden
; return $ RecordPatSynField { recordPatSynSelectorId = visible'
, recordPatSynPatVar = hidden' } }
; names <- mapM rnRecordPatSynField vars
; return ( (pat', RecordPatSyn names)
, mkFVs (map (unLoc . recordPatSynPatVar) names)) }
; (dir', fvs2) <- case dir of
Unidirectional -> return (Unidirectional, emptyFVs)
ImplicitBidirectional -> return (ImplicitBidirectional, emptyFVs)
ExplicitBidirectional mg ->
do { (mg', fvs) <- bindSigTyVarsFV sig_tvs $
rnMatchGroup (FunRhs (L l name) Prefix)
rnLExpr mg
; return (ExplicitBidirectional mg', fvs) }
; mod <- getModule
; let fvs = fvs1 `plusFV` fvs2
fvs' = filterNameSet (nameIsLocalOrFrom mod) fvs
-- Keep locally-defined Names
-- As well as dependency analysis, we need these for the
-- MonoLocalBinds test in TcBinds.decideGeneralisationPlan
; let bind' = bind{ psb_args = details'
, psb_def = pat'
, psb_dir = dir'
, psb_fvs = fvs' }
; let selector_names = case details' of
RecordPatSyn names ->
map (unLoc . recordPatSynSelectorId) names
_ -> []
; fvs' `seq` -- See Note [Free-variable space leak]
return (bind', name : selector_names , fvs1)
-- See Note [Pattern synonym builders don't yield dependencies]
}
where
lookupVar = wrapLocM lookupOccRn
patternSynonymErr :: SDoc
patternSynonymErr
= hang (text "Illegal pattern synonym declaration")
2 (text "Use -XPatternSynonyms to enable this extension")
{-
Note [Pattern synonym builders don't yield dependencies]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When renaming a pattern synonym that has an explicit builder,
references in the builder definition should not be used when
calculating dependencies. For example, consider the following pattern
synonym definition:
pattern P x <- C1 x where
P x = f (C1 x)
f (P x) = C2 x
In this case, 'P' needs to be typechecked in two passes:
1. Typecheck the pattern definition of 'P', which fully determines the
type of 'P'. This step doesn't require knowing anything about 'f',
since the builder definition is not looked at.
2. Typecheck the builder definition, which needs the typechecked
definition of 'f' to be in scope.
This behaviour is implemented in 'tcValBinds', but it crucially
depends on 'P' not being put in a recursive group with 'f' (which
would make it look like a recursive pattern synonym a la 'pattern P =
P' which is unsound and rejected).
-}
{- *********************************************************************
* *
Class/instance method bindings
* *
********************************************************************* -}
{- @rnMethodBinds@ is used for the method bindings of a class and an instance
declaration. Like @rnBinds@ but without dependency analysis.
NOTA BENE: we record each {\em binder} of a method-bind group as a free variable.
That's crucial when dealing with an instance decl:
\begin{verbatim}
instance Foo (T a) where
op x = ...
\end{verbatim}
This might be the {\em sole} occurrence of @op@ for an imported class @Foo@,
and unless @op@ occurs we won't treat the type signature of @op@ in the class
decl for @Foo@ as a source of instance-decl gates. But we should! Indeed,
in many ways the @op@ in an instance decl is just like an occurrence, not
a binder.
-}
rnMethodBinds :: Bool -- True <=> is a class declaration
-> Name -- Class name
-> [Name] -- Type variables from the class/instance header
-> LHsBinds RdrName -- Binds
-> [LSig RdrName] -- and signatures/pragmas
-> RnM (LHsBinds Name, [LSig Name], FreeVars)
-- Used for
-- * the default method bindings in a class decl
-- * the method bindings in an instance decl
rnMethodBinds is_cls_decl cls ktv_names binds sigs
= do { checkDupRdrNames (collectMethodBinders binds)
-- Check that the same method is not given twice in the
-- same instance decl instance C T where
-- f x = ...
-- g y = ...
-- f x = ...
-- We must use checkDupRdrNames because the Name of the
-- method is the Name of the class selector, whose SrcSpan
-- points to the class declaration; and we use rnMethodBinds
-- for instance decls too
-- Rename the bindings LHSs
; binds' <- foldrBagM (rnMethodBindLHS is_cls_decl cls) emptyBag binds
-- Rename the pragmas and signatures
-- Annoyingly the type variables /are/ in scope for signatures, but
-- /are not/ in scope in the SPECIALISE instance pramas; e.g.
-- instance Eq a => Eq (T a) where
-- (==) :: a -> a -> a
-- {-# SPECIALISE instance Eq a => Eq (T [a]) #-}
; let (spec_inst_prags, other_sigs) = partition isSpecInstLSig sigs
bound_nms = mkNameSet (collectHsBindsBinders binds')
sig_ctxt | is_cls_decl = ClsDeclCtxt cls
| otherwise = InstDeclCtxt bound_nms
; (spec_inst_prags', sip_fvs) <- renameSigs sig_ctxt spec_inst_prags
; (other_sigs', sig_fvs) <- extendTyVarEnvFVRn ktv_names $
renameSigs sig_ctxt other_sigs
-- Rename the bindings RHSs. Again there's an issue about whether the
-- type variables from the class/instance head are in scope.
-- Answer no in Haskell 2010, but yes if you have -XScopedTypeVariables
; scoped_tvs <- xoptM LangExt.ScopedTypeVariables
; (binds'', bind_fvs) <- maybe_extend_tyvar_env scoped_tvs $
do { binds_w_dus <- mapBagM (rnLBind (mkSigTvFn other_sigs')) binds'
; let bind_fvs = foldrBag (\(_,_,fv1) fv2 -> fv1 `plusFV` fv2)
emptyFVs binds_w_dus
; return (mapBag fstOf3 binds_w_dus, bind_fvs) }
; return ( binds'', spec_inst_prags' ++ other_sigs'
, sig_fvs `plusFV` sip_fvs `plusFV` bind_fvs) }
where
-- For the method bindings in class and instance decls, we extend
-- the type variable environment iff -XScopedTypeVariables
maybe_extend_tyvar_env scoped_tvs thing_inside
| scoped_tvs = extendTyVarEnvFVRn ktv_names thing_inside
| otherwise = thing_inside
rnMethodBindLHS :: Bool -> Name
-> LHsBindLR RdrName RdrName
-> LHsBindsLR Name RdrName
-> RnM (LHsBindsLR Name RdrName)
rnMethodBindLHS _ cls (L loc bind@(FunBind { fun_id = name })) rest
= setSrcSpan loc $ do
do { sel_name <- wrapLocM (lookupInstDeclBndr cls (text "method")) name
-- We use the selector name as the binder
; let bind' = bind { fun_id = sel_name
, bind_fvs = placeHolderNamesTc }
; return (L loc bind' `consBag` rest ) }
-- Report error for all other forms of bindings
-- This is why we use a fold rather than map
rnMethodBindLHS is_cls_decl _ (L loc bind) rest
= do { addErrAt loc $
vcat [ what <+> text "not allowed in" <+> decl_sort
, nest 2 (ppr bind) ]
; return rest }
where
decl_sort | is_cls_decl = text "class declaration:"
| otherwise = text "instance declaration:"
what = case bind of
PatBind {} -> text "Pattern bindings (except simple variables)"
PatSynBind {} -> text "Pattern synonyms"
-- Associated pattern synonyms are not implemented yet
_ -> pprPanic "rnMethodBind" (ppr bind)
{-
************************************************************************
* *
\subsubsection[dep-Sigs]{Signatures (and user-pragmas for values)}
* *
************************************************************************
@renameSigs@ checks for:
\begin{enumerate}
\item more than one sig for one thing;
\item signatures given for things not bound here;
\end{enumerate}
At the moment we don't gather free-var info from the types in
signatures. We'd only need this if we wanted to report unused tyvars.
-}
renameSigs :: HsSigCtxt
-> [LSig RdrName]
-> RnM ([LSig Name], FreeVars)
-- Renames the signatures and performs error checks
renameSigs ctxt sigs
= do { mapM_ dupSigDeclErr (findDupSigs sigs)
; checkDupMinimalSigs sigs
; (sigs', sig_fvs) <- mapFvRn (wrapLocFstM (renameSig ctxt)) sigs
; let (good_sigs, bad_sigs) = partition (okHsSig ctxt) sigs'
; mapM_ misplacedSigErr bad_sigs -- Misplaced
; return (good_sigs, sig_fvs) }
----------------------
-- We use lookupSigOccRn in the signatures, which is a little bit unsatisfactory
-- because this won't work for:
-- instance Foo T where
-- {-# INLINE op #-}
-- Baz.op = ...
-- We'll just rename the INLINE prag to refer to whatever other 'op'
-- is in scope. (I'm assuming that Baz.op isn't in scope unqualified.)
-- Doesn't seem worth much trouble to sort this.
renameSig :: HsSigCtxt -> Sig RdrName -> RnM (Sig Name, FreeVars)
-- FixitySig is renamed elsewhere.
renameSig _ (IdSig x)
= return (IdSig x, emptyFVs) -- Actually this never occurs
renameSig ctxt sig@(TypeSig vs ty)
= do { new_vs <- mapM (lookupSigOccRn ctxt sig) vs
; let doc = TypeSigCtx (ppr_sig_bndrs vs)
; (new_ty, fvs) <- rnHsSigWcType doc ty
; return (TypeSig new_vs new_ty, fvs) }
renameSig ctxt sig@(ClassOpSig is_deflt vs ty)
= do { defaultSigs_on <- xoptM LangExt.DefaultSignatures
; when (is_deflt && not defaultSigs_on) $
addErr (defaultSigErr sig)
; new_v <- mapM (lookupSigOccRn ctxt sig) vs
; (new_ty, fvs) <- rnHsSigType ty_ctxt ty
; return (ClassOpSig is_deflt new_v new_ty, fvs) }
where
(v1:_) = vs
ty_ctxt = GenericCtx (text "a class method signature for"
<+> quotes (ppr v1))
renameSig _ (SpecInstSig src ty)
= do { (new_ty, fvs) <- rnHsSigType SpecInstSigCtx ty
; return (SpecInstSig src new_ty,fvs) }
-- {-# SPECIALISE #-} pragmas can refer to imported Ids
-- so, in the top-level case (when mb_names is Nothing)
-- we use lookupOccRn. If there's both an imported and a local 'f'
-- then the SPECIALISE pragma is ambiguous, unlike all other signatures
renameSig ctxt sig@(SpecSig v tys inl)
= do { new_v <- case ctxt of
TopSigCtxt {} -> lookupLocatedOccRn v
_ -> lookupSigOccRn ctxt sig v
; (new_ty, fvs) <- foldM do_one ([],emptyFVs) tys
; return (SpecSig new_v new_ty inl, fvs) }
where
ty_ctxt = GenericCtx (text "a SPECIALISE signature for"
<+> quotes (ppr v))
do_one (tys,fvs) ty
= do { (new_ty, fvs_ty) <- rnHsSigType ty_ctxt ty
; return ( new_ty:tys, fvs_ty `plusFV` fvs) }
renameSig ctxt sig@(InlineSig v s)
= do { new_v <- lookupSigOccRn ctxt sig v
; return (InlineSig new_v s, emptyFVs) }
renameSig ctxt sig@(FixSig (FixitySig vs f))
= do { new_vs <- mapM (lookupSigOccRn ctxt sig) vs
; return (FixSig (FixitySig new_vs f), emptyFVs) }
renameSig ctxt sig@(MinimalSig s (L l bf))
= do new_bf <- traverse (lookupSigOccRn ctxt sig) bf
return (MinimalSig s (L l new_bf), emptyFVs)
renameSig ctxt sig@(PatSynSig vs ty)
= do { new_vs <- mapM (lookupSigOccRn ctxt sig) vs
; (ty', fvs) <- rnHsSigType ty_ctxt ty
; return (PatSynSig new_vs ty', fvs) }
where
ty_ctxt = GenericCtx (text "a pattern synonym signature for"
<+> ppr_sig_bndrs vs)
ppr_sig_bndrs :: [Located RdrName] -> SDoc
ppr_sig_bndrs bs = quotes (pprWithCommas ppr bs)
okHsSig :: HsSigCtxt -> LSig a -> Bool
okHsSig ctxt (L _ sig)
= case (sig, ctxt) of
(ClassOpSig {}, ClsDeclCtxt {}) -> True
(ClassOpSig {}, InstDeclCtxt {}) -> True
(ClassOpSig {}, _) -> False
(TypeSig {}, ClsDeclCtxt {}) -> False
(TypeSig {}, InstDeclCtxt {}) -> False
(TypeSig {}, _) -> True
(PatSynSig {}, TopSigCtxt{}) -> True
(PatSynSig {}, _) -> False
(FixSig {}, InstDeclCtxt {}) -> False
(FixSig {}, _) -> True
(IdSig {}, TopSigCtxt {}) -> True
(IdSig {}, InstDeclCtxt {}) -> True
(IdSig {}, _) -> False
(InlineSig {}, HsBootCtxt {}) -> False
(InlineSig {}, _) -> True
(SpecSig {}, TopSigCtxt {}) -> True
(SpecSig {}, LocalBindCtxt {}) -> True
(SpecSig {}, InstDeclCtxt {}) -> True
(SpecSig {}, _) -> False
(SpecInstSig {}, InstDeclCtxt {}) -> True
(SpecInstSig {}, _) -> False
(MinimalSig {}, ClsDeclCtxt {}) -> True
(MinimalSig {}, _) -> False
-------------------
findDupSigs :: [LSig RdrName] -> [[(Located RdrName, Sig RdrName)]]
-- Check for duplicates on RdrName version,
-- because renamed version has unboundName for
-- not-in-scope binders, which gives bogus dup-sig errors
-- NB: in a class decl, a 'generic' sig is not considered
-- equal to an ordinary sig, so we allow, say
-- class C a where
-- op :: a -> a
-- default op :: Eq a => a -> a
findDupSigs sigs
= findDupsEq matching_sig (concatMap (expand_sig . unLoc) sigs)
where
expand_sig sig@(FixSig (FixitySig ns _)) = zip ns (repeat sig)
expand_sig sig@(InlineSig n _) = [(n,sig)]
expand_sig sig@(TypeSig ns _) = [(n,sig) | n <- ns]
expand_sig sig@(ClassOpSig _ ns _) = [(n,sig) | n <- ns]
expand_sig sig@(PatSynSig ns _ ) = [(n,sig) | n <- ns]
expand_sig _ = []
matching_sig (L _ n1,sig1) (L _ n2,sig2) = n1 == n2 && mtch sig1 sig2
mtch (FixSig {}) (FixSig {}) = True
mtch (InlineSig {}) (InlineSig {}) = True
mtch (TypeSig {}) (TypeSig {}) = True
mtch (ClassOpSig d1 _ _) (ClassOpSig d2 _ _) = d1 == d2
mtch (PatSynSig _ _) (PatSynSig _ _) = True
mtch _ _ = False
-- Warn about multiple MINIMAL signatures
checkDupMinimalSigs :: [LSig RdrName] -> RnM ()
checkDupMinimalSigs sigs
= case filter isMinimalLSig sigs of
minSigs@(_:_:_) -> dupMinimalSigErr minSigs
_ -> return ()
{-
************************************************************************
* *
\subsection{Match}
* *
************************************************************************
-}
rnMatchGroup :: Outputable (body RdrName) => HsMatchContext Name
-> (Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-> MatchGroup RdrName (Located (body RdrName))
-> RnM (MatchGroup Name (Located (body Name)), FreeVars)
rnMatchGroup ctxt rnBody (MG { mg_alts = L _ ms, mg_origin = origin })
= do { empty_case_ok <- xoptM LangExt.EmptyCase
; when (null ms && not empty_case_ok) (addErr (emptyCaseErr ctxt))
; (new_ms, ms_fvs) <- mapFvRn (rnMatch ctxt rnBody) ms
; return (mkMatchGroupName origin new_ms, ms_fvs) }
rnMatch :: Outputable (body RdrName) => HsMatchContext Name
-> (Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-> LMatch RdrName (Located (body RdrName))
-> RnM (LMatch Name (Located (body Name)), FreeVars)
rnMatch ctxt rnBody = wrapLocFstM (rnMatch' ctxt rnBody)
rnMatch' :: Outputable (body RdrName) => HsMatchContext Name
-> (Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-> Match RdrName (Located (body RdrName))
-> RnM (Match Name (Located (body Name)), FreeVars)
rnMatch' ctxt rnBody match@(Match { m_ctxt = mf, m_pats = pats
, m_type = maybe_rhs_sig, m_grhss = grhss })
= do { -- Result type signatures are no longer supported
case maybe_rhs_sig of
Nothing -> return ()
Just (L loc ty) -> addErrAt loc (resSigErr match ty)
; let fixity = if isInfixMatch match then Infix else Prefix
-- Now the main event
-- Note that there are no local fixity decls for matches
; rnPats ctxt pats $ \ pats' -> do
{ (grhss', grhss_fvs) <- rnGRHSs ctxt rnBody grhss
; let mf' = case (ctxt,mf) of
(FunRhs (L _ funid) _,FunRhs (L lf _) _)
-> FunRhs (L lf funid) fixity
_ -> ctxt
; return (Match { m_ctxt = mf', m_pats = pats'
, m_type = Nothing, m_grhss = grhss'}, grhss_fvs ) }}
emptyCaseErr :: HsMatchContext Name -> SDoc
emptyCaseErr ctxt = hang (text "Empty list of alternatives in" <+> pp_ctxt)
2 (text "Use EmptyCase to allow this")
where
pp_ctxt = case ctxt of
CaseAlt -> text "case expression"
LambdaExpr -> text "\\case expression"
_ -> text "(unexpected)" <+> pprMatchContextNoun ctxt
resSigErr :: Outputable body
=> Match RdrName body -> HsType RdrName -> SDoc
resSigErr match ty
= vcat [ text "Illegal result type signature" <+> quotes (ppr ty)
, nest 2 $ ptext (sLit
"Result signatures are no longer supported in pattern matches")
, pprMatchInCtxt match ]
{-
************************************************************************
* *
\subsubsection{Guarded right-hand sides (GRHSs)}
* *
************************************************************************
-}
rnGRHSs :: HsMatchContext Name
-> (Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-> GRHSs RdrName (Located (body RdrName))
-> RnM (GRHSs Name (Located (body Name)), FreeVars)
rnGRHSs ctxt rnBody (GRHSs grhss (L l binds))
= rnLocalBindsAndThen binds $ \ binds' _ -> do
(grhss', fvGRHSs) <- mapFvRn (rnGRHS ctxt rnBody) grhss
return (GRHSs grhss' (L l binds'), fvGRHSs)
rnGRHS :: HsMatchContext Name
-> (Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-> LGRHS RdrName (Located (body RdrName))
-> RnM (LGRHS Name (Located (body Name)), FreeVars)
rnGRHS ctxt rnBody = wrapLocFstM (rnGRHS' ctxt rnBody)
rnGRHS' :: HsMatchContext Name
-> (Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-> GRHS RdrName (Located (body RdrName))
-> RnM (GRHS Name (Located (body Name)), FreeVars)
rnGRHS' ctxt rnBody (GRHS guards rhs)
= do { pattern_guards_allowed <- xoptM LangExt.PatternGuards
; ((guards', rhs'), fvs) <- rnStmts (PatGuard ctxt) rnLExpr guards $ \ _ ->
rnBody rhs
; unless (pattern_guards_allowed || is_standard_guard guards')
(addWarn NoReason (nonStdGuardErr guards'))
; return (GRHS guards' rhs', fvs) }
where
-- Standard Haskell 1.4 guards are just a single boolean
-- expression, rather than a list of qualifiers as in the
-- Glasgow extension
is_standard_guard [] = True
is_standard_guard [L _ (BodyStmt _ _ _ _)] = True
is_standard_guard _ = False
{-
************************************************************************
* *
\subsection{Error messages}
* *
************************************************************************
-}
dupSigDeclErr :: [(Located RdrName, Sig RdrName)] -> RnM ()
dupSigDeclErr pairs@((L loc name, sig) : _)
= addErrAt loc $
vcat [ text "Duplicate" <+> what_it_is
<> text "s for" <+> quotes (ppr name)
, text "at" <+> vcat (map ppr $ sort $ map (getLoc . fst) pairs) ]
where
what_it_is = hsSigDoc sig
dupSigDeclErr [] = panic "dupSigDeclErr"
misplacedSigErr :: LSig Name -> RnM ()
misplacedSigErr (L loc sig)
= addErrAt loc $
sep [text "Misplaced" <+> hsSigDoc sig <> colon, ppr sig]
defaultSigErr :: Sig RdrName -> SDoc
defaultSigErr sig = vcat [ hang (text "Unexpected default signature:")
2 (ppr sig)
, text "Use DefaultSignatures to enable default signatures" ]
bindsInHsBootFile :: LHsBindsLR Name RdrName -> SDoc
bindsInHsBootFile mbinds
= hang (text "Bindings in hs-boot files are not allowed")
2 (ppr mbinds)
nonStdGuardErr :: Outputable body => [LStmtLR Name Name body] -> SDoc
nonStdGuardErr guards
= hang (text "accepting non-standard pattern guards (use PatternGuards to suppress this message)")
4 (interpp'SP guards)
unusedPatBindWarn :: HsBind Name -> SDoc
unusedPatBindWarn bind
= hang (text "This pattern-binding binds no variables:")
2 (ppr bind)
dupMinimalSigErr :: [LSig RdrName] -> RnM ()
dupMinimalSigErr sigs@(L loc _ : _)
= addErrAt loc $
vcat [ text "Multiple minimal complete definitions"
, text "at" <+> vcat (map ppr $ sort $ map getLoc sigs)
, text "Combine alternative minimal complete definitions with `|'" ]
dupMinimalSigErr [] = panic "dupMinimalSigErr"
| vTurbine/ghc | compiler/rename/RnBinds.hs | bsd-3-clause | 49,610 | 0 | 22 | 15,412 | 9,278 | 4,895 | 4,383 | 565 | 23 |
----------------------------------------------------------------------------
-- |
-- Module : Data.Condition
-- Copyright : (c) Sergey Vinokurov 2016
-- License : BSD3-style (see LICENSE)
-- Maintainer : serg.foo@gmail.com
-- Created : Monday, 12 September 2016
----------------------------------------------------------------------------
{-# LANGUAGE FlexibleContexts #-}
{-# OPTIONS_GHC -Wredundant-constraints #-}
{-# OPTIONS_GHC -Wsimplifiable-class-constraints #-}
module Data.Condition
( Condition
, newUnsetCondition
, setCondition
, waitForCondition
) where
import Control.Concurrent.MVar
import Control.Monad.Base
-- | Concurrent condition that can be awaited to become true.
newtype Condition = Condition (MVar ())
deriving (Eq)
{-# INLINE newUnsetCondition #-}
newUnsetCondition :: MonadBase IO m => m Condition
newUnsetCondition = liftBase $ Condition <$> newEmptyMVar
{-# INLINE setCondition #-}
setCondition :: MonadBase IO m => Condition -> m ()
setCondition (Condition v) = liftBase $ putMVar v ()
{-# INLINE waitForCondition #-}
waitForCondition :: MonadBase IO m => Condition -> m ()
waitForCondition (Condition v) = liftBase $ readMVar v
| sergv/tags-server | src/Data/Condition.hs | bsd-3-clause | 1,207 | 0 | 8 | 193 | 192 | 108 | 84 | 21 | 1 |
-- Copyright (C) 2015-2016 Moritz Schulte <mtesseract@silverratio.net>
module Wosa where
import Data.Map
import Control.Exception
import Data.Typeable
import Data.IORef
import Nebelfiller.Datatypes
-- | These are the possible "actions". These allow for transforming a
-- given state to some other state.
data WosaAction =
ActionInit
| ActionNop
| ActionSuggestWordset -- ^ Backend shall produce a new
-- quadruple suggestion.
| ActionAcceptWordset -- ^ The user accepts the current
-- quadruple proposal.
| ActionSuggestOrAcceptWordset
| ActionRejectWordset -- ^ The user rejects the current wordset
-- suggestions and wants to work on the
-- current quadruple manually.
| ActionLoadWordset Int Card -- ^ Load a quadruple into a
-- card.
| ActionSaveWordset Int Card -- ^ The user wants to save
-- quadruple on the specified
-- card.
| ActionQuit -- ^ Program shall quit.
deriving (Eq, Show)
type WordsetMap = Map Integer Wordset
data WosaException = ExceptionString String | ExceptionNone
deriving (Show, Typeable)
instance Exception WosaException
-- | These are the possible states types.
data State = StateNothing -- ^ Dummy state.
| StateManually -- ^ User is free to modify edit wordsets.
| StateAsk -- ^ Used is presented a new wordset
-- suggestion and asked if that is a good
-- word set.
deriving (Eq, Show)
-- | The global state of this application is stored in the Ctx
-- datatype.
data Ctx = Ctx { ctxState :: State
, ctxDebug :: Bool
, ctxBackend :: BackendCtx
, ctxWordsets :: WordsetMap
}
-- | Actions implemented by the backend.
type BackendActionInitialize = [String] -> (WosaAction -> IO ())
-> IO (Either String (WordsetMap, BackendCtx))
type BackendActionLoop = Ctx -> IO ()
type BackendActionQuit = Ctx -> IO ()
type BackendActionPrintWordset = Wordset -> String
type BackendActionPhaseManually = Ctx -> IO ()
type BackendActionPhaseQuery = Ctx -> IO ()
type BackendActionUpdateStats = Ctx -> IO ()
type BackendActionSuggestWordset = Ctx -> Maybe Wordset
type BackendActionPresentWordset = IORef Ctx -> Wordset -> Integer -> Card -> IO ()
type BackendActionReplaceWordset = Ctx -> Integer -> Maybe Wordset -> Wordset -> IO BackendCtx
type BackendActionRetrieveWordsetNo = Ctx -> Card -> IO (Maybe Integer)
type BackendActionRetrieveWordset = Ctx -> Card -> IO (Maybe Wordset)
type BackendActionInfo = Ctx -> String -> IO ()
type BackendActionSetup = IORef Ctx -> (WosaAction -> IO ()) -> IO ()
type BackendActionDisplayWordsets = IORef Ctx -> IO ()
type BackendActionDebugCtx = BackendCtx -> IO ()
data BackendSpec =
BackendSpec { backendWordsetsN :: Integer
, backendInitialize :: BackendActionInitialize
, backendLoop :: BackendActionLoop
, backendQuit :: BackendActionQuit
, backendPrintWordset :: BackendActionPrintWordset
, backendPhaseManually :: BackendActionPhaseManually
, backendPhaseQuery :: BackendActionPhaseQuery
, backendUpdateStats :: BackendActionUpdateStats
, backendSuggestWordset :: BackendActionSuggestWordset
, backendPresentWordset :: BackendActionPresentWordset
, backendReplaceWordset :: BackendActionReplaceWordset
, backendRetrieveWordsetNo :: BackendActionRetrieveWordsetNo
, backendRetrieveWordset :: BackendActionRetrieveWordset
, backendInfo :: BackendActionInfo
, backendSetup :: BackendActionSetup
, backendDisplayWordsets :: BackendActionDisplayWordsets
, backendDebugCtx :: BackendActionDebugCtx
}
| mtesseract/wosa | src/Wosa.hs | bsd-3-clause | 4,301 | 0 | 10 | 1,416 | 647 | 378 | 269 | 64 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE CPP #-}
module Facebook.Object.Marketing.AdImage where
import Facebook.Records hiding (get)
import qualified Facebook.Records as Rec
import Facebook.Types hiding (Id)
import Facebook.Pager
import Facebook.Monad
import Facebook.Graph
import Facebook.Base (FacebookException(..))
import qualified Data.Aeson as A
import Data.Time.Format
import Data.Aeson hiding (Value)
import Control.Applicative
import Data.Text (Text)
import Data.Text.Read (decimal)
import Data.Scientific (toBoundedInteger)
import qualified Data.Text.Encoding as TE
import GHC.Generics (Generic)
import qualified Data.Map.Strict as Map
import Data.Vector (Vector)
import qualified Data.Vector as V
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as B8
import qualified Data.ByteString.Builder as BSB
import qualified Data.ByteString.Lazy as BSL
import qualified Control.Monad.Trans.Resource as R
import Control.Monad.Trans.Control (MonadBaseControl)
#if MIN_VERSION_time(1,5,0)
import System.Locale hiding (defaultTimeLocale, rfc822DateFormat)
import Data.Time.Clock
#else
import System.Locale
import Data.Time.Clock hiding (defaultTimeLocale, rfc822DateFormat)
#endif
import Facebook.Object.Marketing.Types
data Filename = Filename
newtype Filename_ = Filename_ Text deriving (Show, Generic)
instance Field Filename where
type FieldValue Filename = Filename_
fieldName _ = "filename"
fieldLabel = Filename
unFilename_ :: Filename_ -> Text
unFilename_ (Filename_ x) = x
data Creatives = Creatives
newtype Creatives_ = Creatives_ (Vector Text) deriving (Show, Generic)
instance Field Creatives where
type FieldValue Creatives = Creatives_
fieldName _ = "creatives"
fieldLabel = Creatives
unCreatives_ :: Creatives_ -> Vector Text
unCreatives_ (Creatives_ x) = x
data Height = Height
newtype Height_ = Height_ Int deriving (Show, Generic)
instance Field Height where
type FieldValue Height = Height_
fieldName _ = "height"
fieldLabel = Height
unHeight_ :: Height_ -> Int
unHeight_ (Height_ x) = x
data PermalinkUrl = PermalinkUrl
newtype PermalinkUrl_ = PermalinkUrl_ Text deriving (Show, Generic)
instance Field PermalinkUrl where
type FieldValue PermalinkUrl = PermalinkUrl_
fieldName _ = "permalink_url"
fieldLabel = PermalinkUrl
unPermalinkUrl_ :: PermalinkUrl_ -> Text
unPermalinkUrl_ (PermalinkUrl_ x) = x
data Url128 = Url128
newtype Url128_ = Url128_ Text deriving (Show, Generic)
instance Field Url128 where
type FieldValue Url128 = Url128_
fieldName _ = "url_128"
fieldLabel = Url128
unUrl128_ :: Url128_ -> Text
unUrl128_ (Url128_ x) = x
data OriginalHeight = OriginalHeight
newtype OriginalHeight_ = OriginalHeight_ Int deriving (Show, Generic)
instance Field OriginalHeight where
type FieldValue OriginalHeight = OriginalHeight_
fieldName _ = "original_height"
fieldLabel = OriginalHeight
unOriginalHeight_ :: OriginalHeight_ -> Int
unOriginalHeight_ (OriginalHeight_ x) = x
data Url = Url
newtype Url_ = Url_ Text deriving (Show, Generic)
instance Field Url where
type FieldValue Url = Url_
fieldName _ = "url"
fieldLabel = Url
unUrl_ :: Url_ -> Text
unUrl_ (Url_ x) = x
data Status = Status
newtype Status_ = Status_ Bool deriving (Show, Generic)
instance Field Status where
type FieldValue Status = Status_
fieldName _ = "status"
fieldLabel = Status
unStatus_ :: Status_ -> Bool
unStatus_ (Status_ x) = x
data OriginalWidth = OriginalWidth
newtype OriginalWidth_ = OriginalWidth_ Int deriving (Show, Generic)
instance Field OriginalWidth where
type FieldValue OriginalWidth = OriginalWidth_
fieldName _ = "original_width"
fieldLabel = OriginalWidth
unOriginalWidth_ :: OriginalWidth_ -> Int
unOriginalWidth_ (OriginalWidth_ x) = x
data Width = Width
newtype Width_ = Width_ Int deriving (Show, Generic)
instance Field Width where
type FieldValue Width = Width_
fieldName _ = "width"
fieldLabel = Width
unWidth_ :: Width_ -> Int
unWidth_ (Width_ x) = x
instance A.FromJSON Filename_
instance A.ToJSON Filename_
instance A.FromJSON Creatives_
instance A.ToJSON Creatives_
instance A.FromJSON Height_
instance A.ToJSON Height_
instance A.FromJSON PermalinkUrl_
instance A.ToJSON PermalinkUrl_
instance A.FromJSON Url128_
instance A.ToJSON Url128_
instance A.FromJSON OriginalHeight_
instance A.ToJSON OriginalHeight_
instance A.FromJSON Url_
instance A.ToJSON Url_
instance A.FromJSON Status_
instance A.ToJSON Status_
instance A.FromJSON OriginalWidth_
instance A.ToJSON OriginalWidth_
instance A.FromJSON Width_
instance A.ToJSON Width_
instance ToBS Filename_ where
toBS (Filename_ a) = toBS a
instance ToBS Creatives_ where
toBS (Creatives_ a) = toBS a
instance ToBS Height_ where
toBS (Height_ a) = toBS a
instance ToBS PermalinkUrl_ where
toBS (PermalinkUrl_ a) = toBS a
instance ToBS Url128_ where
toBS (Url128_ a) = toBS a
instance ToBS OriginalHeight_ where
toBS (OriginalHeight_ a) = toBS a
instance ToBS Url_ where
toBS (Url_ a) = toBS a
instance ToBS Status_ where
toBS (Status_ a) = toBS a
instance ToBS OriginalWidth_ where
toBS (OriginalWidth_ a) = toBS a
instance ToBS Width_ where
toBS (Width_ a) = toBS a
filename r = r `Rec.get` Filename
creatives r = r `Rec.get` Creatives
height r = r `Rec.get` Height
permalink_url r = r `Rec.get` PermalinkUrl
url_128 r = r `Rec.get` Url128
original_height r = r `Rec.get` OriginalHeight
url r = r `Rec.get` Url
status r = r `Rec.get` Status
original_width r = r `Rec.get` OriginalWidth
width r = r `Rec.get` Width
-- Entity:AdImage, mode:Reading
class IsAdImageGetField r
instance (IsAdImageGetField h, IsAdImageGetField t) => IsAdImageGetField (h :*: t)
instance IsAdImageGetField Nil
instance IsAdImageGetField AccountId
instance IsAdImageGetField Creatives
instance IsAdImageGetField Hash
instance IsAdImageGetField Height
instance IsAdImageGetField PermalinkUrl
instance IsAdImageGetField CreatedTime
instance IsAdImageGetField Url128
instance IsAdImageGetField UpdatedTime
instance IsAdImageGetField Id
instance IsAdImageGetField OriginalHeight
instance IsAdImageGetField Url
instance IsAdImageGetField Status
instance IsAdImageGetField Name
instance IsAdImageGetField OriginalWidth
instance IsAdImageGetField Width
type AdImageGet fl r = (A.FromJSON r, IsAdImageGetField r, FieldListToRec fl r)
type AdImageGetRet r = Hash :*: r -- Default fields
getAdImage :: (R.MonadResource m, MonadBaseControl IO m, AdImageGet fl r) =>
Id_ -- ^ Ad Account Id
-> fl -- ^ Arguments to be passed to Facebook.
-> UserAccessToken -- ^ Optional user access token.
-> FacebookT anyAuth m (Pager (AdImageGetRet r))
getAdImage (Id_ id) fl mtoken = getObject ("/v2.7/" <> id <> "/adimages") [("fields", textListToBS $ fieldNameList $ Hash ::: fl)] $ Just mtoken
-- Entity:AdImage, mode:Creating
class IsAdImageSetField r
instance (IsAdImageSetField h, IsAdImageSetField t) => IsAdImageSetField (h :*: t)
instance IsAdImageSetField Nil
instance IsAdImageSetField Filename
data SetImgs = SetImgs { -- as seen when using curl
images :: Map.Map Text SetImg
} deriving (Show, Generic)
instance FromJSON SetImgs
data SetImg = SetImg {
hash, url_ :: Text
} deriving Show
instance FromJSON SetImg where
parseJSON (Object v) =
SetImg <$> v .: "hash"
<*> v .: "url"
type AdImageSet r = (Has Filename r, A.FromJSON r, IsAdImageSetField r, ToForm r)
setAdImage :: (R.MonadResource m, MonadBaseControl IO m, AdImageSet r) =>
Id_ -- ^ Ad Account Id
-> r -- ^ Arguments to be passed to Facebook.
-> UserAccessToken -- ^ Optional user access token.
-> FacebookT Auth m (Either FacebookException SetImgs)
setAdImage (Id_ id) r mtoken = postForm ("/v2.7/" <> id <> "/adimages") (toForm r) mtoken
-- Entity:AdImage, mode:Deleting
class IsAdImageDelField r
instance (IsAdImageDelField h, IsAdImageDelField t) => IsAdImageDelField (h :*: t)
instance IsAdImageDelField Nil
instance IsAdImageDelField Hash
type AdImageDel r = (Has Hash r, A.FromJSON r, IsAdImageDelField r, ToForm r)
delAdImage :: (R.MonadResource m, MonadBaseControl IO m, AdImageDel r) =>
Id_ -- ^ Ad Account Id
-> r -- ^ Arguments to be passed to Facebook.
-> UserAccessToken -- ^ Optional user access token.
-> FacebookT Auth m (Either FacebookException Success)
delAdImage (Id_ id) r mtoken = deleteForm ("/v2.7/" <> id <> "") (toForm r) mtoken
| BeautifulDestinations/fb | src/Facebook/Object/Marketing/AdImage.hs | bsd-3-clause | 8,508 | 0 | 13 | 1,308 | 2,450 | 1,316 | 1,134 | -1 | -1 |
{-# LANGUAGE CPP
, DataKinds
, InstanceSigs
, GADTs
, KindSignatures
, Rank2Types
, TypeOperators
#-}
module Language.Hakaru.Syntax.Reducer where
import Language.Hakaru.Types.DataKind
import Language.Hakaru.Types.HClasses
import Language.Hakaru.Syntax.IClasses
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative
import Data.Monoid (Monoid(..))
#endif
data Reducer (abt :: [Hakaru] -> Hakaru -> *)
(xs :: [Hakaru])
(a :: Hakaru) where
Red_Fanout
:: Reducer abt xs a
-> Reducer abt xs b
-> Reducer abt xs (HPair a b)
Red_Index
:: abt xs 'HNat -- size of resulting array
-> abt ( 'HNat ': xs) 'HNat -- index into array (bound i)
-> Reducer abt ( 'HNat ': xs) a -- reduction body (bound b)
-> Reducer abt xs ('HArray a)
Red_Split
:: abt ( 'HNat ': xs) HBool -- (bound i)
-> Reducer abt xs a
-> Reducer abt xs b
-> Reducer abt xs (HPair a b)
Red_Nop
:: Reducer abt xs HUnit
Red_Add
:: HSemiring a
-> abt ( 'HNat ': xs) a -- (bound i)
-> Reducer abt xs a
instance Functor31 Reducer where
fmap31 f (Red_Fanout r1 r2) = Red_Fanout (fmap31 f r1) (fmap31 f r2)
fmap31 f (Red_Index n ix r) = Red_Index (f n) (f ix) (fmap31 f r)
fmap31 f (Red_Split b r1 r2) = Red_Split (f b) (fmap31 f r1) (fmap31 f r2)
fmap31 _ Red_Nop = Red_Nop
fmap31 f (Red_Add h e) = Red_Add h (f e)
instance Foldable31 Reducer where
foldMap31 f (Red_Fanout r1 r2) = foldMap31 f r1 `mappend` foldMap31 f r2
foldMap31 f (Red_Index n ix r) = f n `mappend` f ix `mappend` foldMap31 f r
foldMap31 f (Red_Split b r1 r2) = f b `mappend` foldMap31 f r1 `mappend` foldMap31 f r2
foldMap31 _ Red_Nop = mempty
foldMap31 f (Red_Add _ e) = f e
instance Traversable31 Reducer where
traverse31 f (Red_Fanout r1 r2) = Red_Fanout <$> traverse31 f r1 <*> traverse31 f r2
traverse31 f (Red_Index n ix r) = Red_Index <$> f n <*> f ix <*> traverse31 f r
traverse31 f (Red_Split b r1 r2) = Red_Split <$> f b <*> traverse31 f r1 <*> traverse31 f r2
traverse31 f Red_Nop = pure Red_Nop
traverse31 f (Red_Add h e) = Red_Add h <$> f e
instance Eq2 abt => Eq1 (Reducer abt xs) where
eq1 (Red_Fanout r1 r2) (Red_Fanout r1' r2') = eq1 r1 r1' && eq1 r2 r2'
eq1 (Red_Index n ix r) (Red_Index n' ix' r') = eq2 n n' && eq2 ix ix' && eq1 r r'
eq1 (Red_Split b r1 r2) (Red_Split b' r1' r2') = eq2 b b' && eq1 r1 r1' && eq1 r2 r2'
eq1 Red_Nop Red_Nop = True
eq1 (Red_Add _ e) (Red_Add _ e') = eq2 e e'
eq1 _ _ = False
instance JmEq2 abt => JmEq1 (Reducer abt xs) where
jmEq1 = jmEqReducer
jmEqReducer
:: (JmEq2 abt)
=> Reducer abt xs a
-> Reducer abt xs b
-> Maybe (TypeEq a b)
jmEqReducer (Red_Fanout a b) (Red_Fanout a' b') = do
Refl <- jmEqReducer a a'
Refl <- jmEqReducer b b'
return Refl
jmEqReducer (Red_Index s i r) (Red_Index s' i' r') = do
(Refl, Refl) <- jmEq2 s s'
(Refl, Refl) <- jmEq2 i i'
Refl <- jmEqReducer r r'
return Refl
jmEqReducer (Red_Split b r s) (Red_Split b' r' s') = do
(Refl, Refl) <- jmEq2 b b'
Refl <- jmEqReducer r r'
Refl <- jmEqReducer s s'
return Refl
jmEqReducer Red_Nop Red_Nop = return Refl
jmEqReducer (Red_Add _ x) (Red_Add _ x') = do
(Refl, Refl) <- jmEq2 x x'
return Refl
jmEqReducer _ _ = Nothing
| zaxtax/hakaru | haskell/Language/Hakaru/Syntax/Reducer.hs | bsd-3-clause | 3,652 | 0 | 12 | 1,172 | 1,427 | 708 | 719 | 87 | 1 |
module NIB
( Switch (..)
, Endpoint (..)
, FlowTbl (..)
, FlowTblEntry
, PortCfg (..)
, Msg (..)
, Queue (..)
, SwitchType (..)
, newQueue
, switchWithNPorts
, newEmptyNIB
, addSwitch
, addPort
, addEndpoint
, linkPorts
, getPath
, endpointPort
, getEthFromIP
, snapshot
, NIB
, Snapshot
, emptySwitch
) where
import Debug.Trace
import qualified Nettle.OpenFlow as OF
import qualified Nettle.Ethernet.AddressResolutionProtocol as OFARP
import qualified Nettle.Servers.Server as OFS
import HFT (MatchTable (..))
import Base
import qualified Nettle.OpenFlow as OF
import qualified Nettle.OpenFlow.StrictPut as OFBS
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Word (Word16)
import Data.Int (Int32)
import Data.IORef
import Data.HashTable (HashTable)
import qualified Data.HashTable as Ht
import Data.Maybe (isJust, fromJust, catMaybes)
import System.IO.Unsafe (unsafePerformIO)
import qualified Data.HList as HL
import System.Log.Logger.TH (deriveLoggers)
import qualified System.Log.Logger as Logger
$(deriveLoggers "Logger" [Logger.DEBUG, Logger.INFO, Logger.WARNING,
Logger.ERROR])
type FlowTblEntry = (Word16, OF.Match, [OF.Action], Limit)
type FlowTbl = Set FlowTblEntry
type Snapshot = Map OF.SwitchID Switch
data Queue = Queue {
queueMinRate :: OF.QueueRate,
queueMaxRate :: OF.QueueRate,
queueExpiry :: Limit
} deriving (Show,Eq)
data NIB = NIB {
nibSwitches :: HashTable OF.SwitchID SwitchData,
nibEndpoints :: HashTable OF.EthernetAddress EndpointData,
nibEndpointsByIP :: HashTable OF.IPAddress EndpointData
}
-- TODO(adf): There is no guarantee that nibEndpoints and nibEndpointsByIP be
-- consistent. It might be better to have a single nibEndpoints HashTable, and
-- then a separate HashTable which does IP -> EthernetAddress. In the "Real
-- World" we can expect to see multiple EthernetAddresses attached to the same
-- switch port, and multiple IP addresses assocated with the same EthernetAddress.
-- Multiple ethernet address for a single IP is also possible (load-balancing),
-- but those will ultimately have separate endpoints.
data EndpointData = EndpointData {
endpointEthAddr :: OF.EthernetAddress,
endpointIP :: OF.IPAddress,
endpointPort :: PortData
}
instance Show EndpointData where
show (EndpointData ea ip port) =
"\nEthernet Addr: " ++ show(ea) ++
"\n IP: " ++ show(ip) ++
"\n Port: " ++ show(port)
data SwitchData = SwitchData {
switchSwitchID :: OF.SwitchID,
switchType :: SwitchType,
switchFlowTable :: IORef FlowTbl,
switchPorts :: HashTable OF.PortID PortData,
switchFlowTableUpdateListener :: IORef (FlowTbl -> IO ())
}
instance Show SwitchData where
show (SwitchData sid stype ft ports listener) =
"\nSwitchID: " ++ show(sid) ++
"\n Type: " ++ show(stype)
data SwitchType
= ReferenceSwitch
| OpenVSwitch
| ProntoSwitch
| OtherSwitch String
| UnknownSwitch
deriving Eq
instance Show SwitchType where
show ReferenceSwitch = "Reference Switch"
show OpenVSwitch = "Open vSwitch"
show ProntoSwitch = "Pronto Switch"
show (OtherSwitch t) = show t
show UnknownSwitch = "(Unknown)"
data PortData = PortData {
portPortID :: OF.PortID,
portQueues :: HashTable OF.QueueID Queue,
portDevice :: Element, -- ^ local device (always a switch)
portConnectedTo :: IORef Element, -- ^ other end of the wire
portQueueUpdateListener :: IORef ([(OF.QueueID, Queue)] -> IO ())
}
data Element
= ToNone
| ToEndpoint EndpointData
| ToSwitch SwitchData PortData
instance Show Element where
show ToNone = "<nothing>"
show (ToEndpoint ep) = show (endpointIP ep)
show (ToSwitch sw pd) = show (switchSwitchID sw) ++
":" ++ show(portPortID pd)
class ShowIO a where
showIO :: a -> IO String
instance Show a => ShowIO (IORef a) where
showIO a = readIORef a >>= return . show
instance Show PortData where
show p = show (portDevice p) ++ ":" ++ show (portPortID p) ++
" -> " ++ (unsafePerformIO $ showIO (portConnectedTo p))
data Msg
= NewSwitch OFS.SwitchHandle OF.SwitchFeatures
| PacketIn OF.SwitchID OF.PacketInfo
| StatsReply OF.SwitchID OF.StatsReply
| DisplayNIB (String -> IO ())
newEmptyNIB :: Chan Msg -> IO NIB
newEmptyNIB msg = do
s <- Ht.new (==) ((Ht.hashInt).fromIntegral)
e <- Ht.new (==) ((Ht.hashInt).fromIntegral.(OF.unpack64))
i <- Ht.new (==) ((Ht.hashInt).fromIntegral.(OF.ipAddressToWord32))
let nib = NIB s e i
forkIO (forever (readChan msg >>= nibMutator nib))
return nib
nibMutator :: NIB -> Msg -> IO ()
nibMutator nib (NewSwitch handle features) = do
let swID = OF.switchID features
maybe <- addSwitch swID nib
case maybe of
Nothing -> warningM $ "nibMutator: switch already exists " ++ OF.showSwID swID
Just sw -> do
infoM$ "NIB added switch " ++ OF.showSwID swID ++ "."
let addPort' p = do
maybe <- addPort (OF.portID p) sw
case maybe of
Nothing -> warningM $ "nibMutator: port already exists"
Just _ -> do
debugM $ "NIB added port " ++ show (OF.portID p) ++
" on switch " ++ show swID
sendDP handle (OF.portID p)
return ()
ignoreExns ("sending PaneDP on switch " ++ OF.showSwID swID) $
mapM_ addPort' (OF.ports features)
nibMutator nib (StatsReply swid reply) = case reply of
OF.DescriptionReply desc -> case OF.hardwareDesc desc of
"Reference Userspace Switch" -> setSwitchType swid ReferenceSwitch nib
"Open vSwitch" -> setSwitchType swid OpenVSwitch nib
"Pronto 3290" -> setSwitchType swid ProntoSwitch nib
otherwise -> setSwitchType swid (OtherSwitch (OF.hardwareDesc desc)) nib
otherwise -> infoM $ "unhandled statistics reply from switch " ++
(OF.showSwID swid) ++ "\n" ++ show reply
nibMutator nib (DisplayNIB putter) = do
sw <- Ht.toList (nibSwitches nib)
e <- Ht.toList (nibEndpoints nib)
eip <- Ht.toList (nibEndpointsByIP nib)
let sw' = map (\(k,v) -> v) sw
e' = map (\(k,v) -> v) e
eip' = map (\(k,v) -> v) eip
str = "Displaying the NIB...\n" ++
"Switches:\n" ++ show sw' ++
"\n-------------------------------------\n" ++
"Endpoints:\n" ++ show e' ++
"\n-------------------------------------\n" ++
"EndpointsByIP:\n" ++ show eip' ++
"\n-------------------------------------\n"
putter $ str
-- TODO: the code below should be broken-up somehow
nibMutator nib (PacketIn tS pkt) = case OF.enclosedFrame pkt of
Right (HL.HCons _ (HL.HCons (OF.PaneDPInEthernet fS fP) HL.HNil)) -> do
let tP = OF.receivedOnPort pkt
yFromSwitch <- Ht.lookup (nibSwitches nib) fS
yToSwitch <- Ht.lookup (nibSwitches nib) tS
case (yFromSwitch, yToSwitch) of
(Just fromSwitch, Just toSwitch) -> do
yFromPort <- Ht.lookup (switchPorts fromSwitch) fP
yToPort <- Ht.lookup (switchPorts toSwitch) tP
case (yFromPort, yToPort) of
(Just fromPort, Just toPort) -> do
toDevice <- readIORef (portConnectedTo fromPort)
case toDevice of
ToNone -> do
linkPorts fromPort toPort
return ()
ToEndpoint ep -> do
Ht.delete (nibEndpoints nib) (endpointEthAddr ep)
Ht.delete (nibEndpointsByIP nib) (endpointIP ep)
writeIORef (portConnectedTo fromPort) ToNone
writeIORef (portConnectedTo toPort) ToNone
linkPorts fromPort toPort
return ()
ToSwitch _ _ -> do
errorM $ "NIB already linked to a switch"
otherwise -> errorM $ "NIB failed to find port(s)"
otherwise -> errorM $ "NIB failed to find switch(s)"
Right (HL.HCons hdr (HL.HCons (OF.ARPInEthernet arp) HL.HNil)) -> do
let srcEth = OF.sourceMACAddress hdr
let srcIP = case arp of
OFARP.ARPQuery qp -> OFARP.querySenderIPAddress qp
OFARP.ARPReply rp -> OFARP.replySenderIPAddress rp
let srcPort = OF.receivedOnPort pkt
ySwitch <- Ht.lookup (nibSwitches nib) tS
let hostStr = show (srcEth, srcIP)
case ySwitch of
Nothing -> do
errorM $ "NIB cannot find switch for " ++ hostStr
return ()
Just switch -> do
maybe <- Ht.lookup (switchPorts switch) srcPort
case maybe of
Nothing -> do
errorM $ "NIB cannot find port for " ++ hostStr
return ()
Just port -> do
connectedTo <- readIORef (portConnectedTo port)
case connectedTo of
ToNone -> do
maybe <- addEndpoint srcEth srcIP nib
case maybe of
Nothing -> do
infoM $ "NIB already knows " ++ hostStr
return ()
Just endpoint -> do
b <- linkPorts port (endpointPort endpoint)
infoM $ "NIB discovered host " ++ (show (srcEth, srcIP)) ++ " " ++ show b
return ()
conn -> do
warningM $ "NIB already connects " ++ hostStr ++ " to " ++
show conn
return ()
Right (HL.HCons hdr (HL.HCons (OF.IPInEthernet (HL.HCons ipHdr (HL.HCons _ HL.HNil))) HL.HNil)) -> do
let srcEth = OF.sourceMACAddress hdr
let srcIP = OF.ipSrcAddress ipHdr
let srcPort = OF.receivedOnPort pkt
ySwitch <- Ht.lookup (nibSwitches nib) tS
let hostStr = show (srcEth, srcIP)
case ySwitch of
Nothing -> do
errorM $ "NIB cannot find switch for " ++ hostStr
return ()
Just switch -> do
maybe <- Ht.lookup (switchPorts switch) srcPort
case maybe of
Nothing -> do
errorM $ "NIB cannot find port for " ++ hostStr
return ()
Just port -> do
connectedTo <- readIORef (portConnectedTo port)
case connectedTo of
ToNone -> do
maybe <- addEndpoint srcEth srcIP nib
case maybe of
Nothing -> do
return ()
Just endpoint -> do
b <- linkPorts port (endpointPort endpoint)
infoM $ "NIB discovered host " ++ (show (srcEth, srcIP)) ++ " " ++ show b
return ()
conn -> do
return ()
otherwise -> return ()
sendDP :: OFS.SwitchHandle -> OF.PortID -> IO ()
sendDP handle portID = do
threadDelay 1000
let ethAddr = OF.ethernetAddress64 0
let hdr = OF.EthernetHeader ethAddr ethAddr OF.ethTypePaneDP
let body = OF.PaneDPInEthernet (OFS.handle2SwitchID handle) portID
let frm = HL.HCons hdr (HL.HCons body HL.HNil)
let bs = OFBS.runPutToByteString 200 (OF.putEthFrame frm)
let out = OF.PacketOutRecord (Right bs) Nothing (OF.sendOnPort portID)
OFS.sendToSwitch handle (0xbe, OF.PacketOut out)
addSwitch :: OF.SwitchID -> NIB -> IO (Maybe SwitchData)
addSwitch newSwitchID nib = do
-- TODO(adf): why don't we do anything with maybe?
maybe <- Ht.lookup (nibSwitches nib) newSwitchID
flowTbl <- newIORef Set.empty
ports <- Ht.new (==) ((Ht.hashInt).fromIntegral)
updListener <- newIORef (\_ -> return ())
let sw = SwitchData newSwitchID UnknownSwitch flowTbl ports updListener
Ht.insert (nibSwitches nib) newSwitchID sw
return (Just sw)
setSwitchType :: OF.SwitchID -> SwitchType -> NIB -> IO ()
setSwitchType swid stype nib = do
maybe <- Ht.lookup (nibSwitches nib) swid
case maybe of
Nothing -> do
errorM $ "switch " ++ OF.showSwID swid ++ " not yet in NIB."
++ " cannot add its type."
return()
Just sd ->
let sd' = sd { switchType = stype }
in do Ht.update (nibSwitches nib) swid sd'
debugM $ "set switch " ++ OF.showSwID swid ++ " to have type: "
++ show stype
return()
addPort :: OF.PortID -> SwitchData -> IO (Maybe PortData)
addPort newPortID switch = do
maybe <- Ht.lookup (switchPorts switch) newPortID
case maybe of
Just _ -> return Nothing
Nothing -> do
queues <- Ht.new (==) ((Ht.hashInt).fromIntegral)
connectedTo <- newIORef ToNone
updListener <- newIORef (\_ -> return ())
let port = PortData newPortID queues (ToSwitch switch port)
connectedTo updListener
Ht.insert (switchPorts switch) newPortID port
return (Just port)
addEndpoint :: OF.EthernetAddress -> OF.IPAddress -> NIB
-> IO (Maybe EndpointData)
addEndpoint newEthAddr ipAddr nib = do
maybe <- Ht.lookup (nibEndpoints nib) newEthAddr
case maybe of
Just _ -> return Nothing
Nothing -> do
connectedTo <- newIORef ToNone
queues <- Ht.new (==) ((Ht.hashInt).fromIntegral)
updListener <- newIORef (\_ -> return ())
let ep = EndpointData newEthAddr ipAddr
(PortData 0 queues (ToEndpoint ep)
connectedTo updListener)
Ht.insert (nibEndpoints nib) newEthAddr ep
Ht.insert (nibEndpointsByIP nib) ipAddr ep
return (Just ep)
getEndpoint :: OF.EthernetAddress -> NIB -> IO (Maybe EndpointData)
getEndpoint ethAddr nib = Ht.lookup (nibEndpoints nib) ethAddr
getPorts :: SwitchData -> IO [PortData]
getPorts switch = do
links <- Ht.toList (switchPorts switch)
return (map snd links)
followLink :: PortData -> IO (Maybe PortData)
followLink port = do
elem <- readIORef (portConnectedTo port)
case elem of
ToNone -> return Nothing
ToEndpoint ep -> return (Just (endpointPort ep))
ToSwitch sw p -> return (Just p)
linkPorts :: PortData -> PortData -> IO Bool
linkPorts port1 port2 = do
conn1 <- readIORef (portConnectedTo port1)
conn2 <- readIORef (portConnectedTo port2)
case (conn1, conn2) of
(ToNone, ToNone) -> do
writeIORef (portConnectedTo port1) (portDevice port2)
writeIORef (portConnectedTo port2) (portDevice port1)
return True
otherwise -> return False
-- "Neighborhood"
-- (ingress port, switch, egress port)
type NbhWalk = [(OF.PortID,OF.SwitchID, OF.PortID)]
data Nbh
= EpNbh NbhWalk OF.EthernetAddress (Maybe Nbh)
| SwNbh NbhWalk OF.SwitchID [Nbh]
getEndpointNbh :: NbhWalk -> EndpointData -> IO Nbh
getEndpointNbh walk endpoint = do
otherEnd <- readIORef (portConnectedTo (endpointPort endpoint))
case otherEnd of
ToNone -> do
return (EpNbh walk (endpointEthAddr endpoint) Nothing)
ToEndpoint otherEndpoint -> do
let nbh = unsafePerformIO $ getEndpointNbh walk otherEndpoint
return (EpNbh walk (endpointEthAddr endpoint) (Just nbh))
ToSwitch switch otherPort -> do
let nbh = unsafePerformIO $ getSwitchNbh (portPortID otherPort) [] switch
return (EpNbh walk (endpointEthAddr endpoint) (Just nbh))
getSwitchNbh :: OF.PortID -> NbhWalk -> SwitchData -> IO Nbh
getSwitchNbh inPort walk switch = do
let continueWalk outPort = do
otherEnd <- readIORef (portConnectedTo outPort)
let walk' = (inPort, switchSwitchID switch, portPortID outPort):walk
case otherEnd of
ToNone -> return Nothing
ToEndpoint ep -> do
let nbh = unsafePerformIO $ getEndpointNbh walk' ep
return (Just nbh)
ToSwitch switch' inPort' -> do
let nbh = unsafePerformIO $
getSwitchNbh (portPortID inPort') walk' switch'
return (Just nbh)
outPorts <- getPorts switch
nbhs <- mapM continueWalk outPorts
return (SwNbh walk (switchSwitchID switch) (catMaybes nbhs))
getEthFromIP :: OF.IPAddress -> NIB -> IO (Maybe OF.EthernetAddress)
getEthFromIP ip nib = do
maybe <- Ht.lookup (nibEndpointsByIP nib) ip
case maybe of
Nothing -> return Nothing
Just ep -> return (Just (endpointEthAddr ep))
getPath :: OF.EthernetAddress -> OF.EthernetAddress -> NIB
-> IO NbhWalk
getPath srcEth dstEth nib = do
let loop fringe visited = case fringe of
[] -> []
((EpNbh walk eth nbh):rest) -> case eth == dstEth of
True -> reverse walk
False -> loop rest visited
((SwNbh walk swID neighbors):fringe') ->
let isVisited (EpNbh _ _ _) = False
isVisited (SwNbh _ swID' _) = swID' `Set.member` visited
visited' = Set.insert swID visited
in loop (fringe' ++ (filter (not.isVisited) neighbors)) visited'
maybe <- getEndpoint srcEth nib
case maybe of
Nothing -> return []
Just srcEp -> do
nbh <- getEndpointNbh [] srcEp
case nbh of
EpNbh _ _ (Just nbh) -> return (loop [nbh] Set.empty)
otherwise -> return []
snapshotPortData :: (OF.PortID, PortData) -> IO (OF.PortID, PortCfg)
snapshotPortData (portID, port) = do
queues <- Ht.toList (portQueues port)
return (portID, PortCfg (Map.fromList queues))
snapshotSwitchData :: (OF.SwitchID, SwitchData) -> IO (OF.SwitchID, Switch)
snapshotSwitchData (sid, switch) = do
ft <- readIORef (switchFlowTable switch)
ports <- Ht.toList (switchPorts switch)
ports <- mapM snapshotPortData ports
return (sid, Switch (Map.fromList ports) ft (switchType switch))
snapshot :: NIB -> IO Snapshot
snapshot nib = do
lst <- Ht.toList (nibSwitches nib)
lst <- mapM snapshotSwitchData lst
return (Map.fromList lst)
data PortCfg = PortCfg (Map OF.QueueID Queue) deriving (Show, Eq)
data Switch = Switch {
switchPortMap :: Map OF.PortID PortCfg,
switchTbl :: FlowTbl,
switchTypeSnap :: SwitchType
} deriving (Show, Eq)
data Endpoint = Endpoint OF.IPAddress OF.EthernetAddress deriving (Show, Eq)
data Edge
= Inner OF.SwitchID OF.PortID OF.SwitchID OF.PortID
| Leaf OF.IPAddress OF.SwitchID OF.PortID
deriving (Show, Eq)
type Network = (Map OF.SwitchID Switch, [Endpoint], [Edge])
emptySwitch = Switch Map.empty Set.empty UnknownSwitch
-- |'unusedNumWithFloor flr lst' returns the smallest positive number greater
-- than 'flr' which is not in 'lst'. Assumes that 'lst' is in ascending order.
unusedNumWithFloor :: (Num a, Ord a) => a -> [a] -> a
unusedNumWithFloor flr lst = loop flr lst
where loop m [] = m
loop m (n:ns) | m < n = m
| m == n = loop (m+1) ns
| otherwise = error "unusedNum : lst not ascending"
newQueue :: Map OF.PortID PortCfg -- ^ports
-> OF.PortID -- ^port to adjust
-> OF.QueueRate -- ^queue GMB
-> OF.QueueRate -- ^queue Rlimit
-> Limit -- ^queue ending time
-> (OF.QueueID, Map OF.PortID PortCfg) -- ^new configuration
newQueue ports portID gmb rlimit end = (queueID, ports')
-- Queue IDs start with 1 for Open vSwitch and go up, so let's follow that
where queueID = unusedNumWithFloor 1 (Map.keys queues)
queues = case Map.lookup portID ports of
Just (PortCfg q) -> q
Nothing -> error "newQueue: bad portID"
queues' = Map.insert queueID (Queue gmb rlimit end) queues
ports' = Map.adjust (\(PortCfg queues) -> PortCfg queues') portID ports
switchWithNPorts :: Word16 -> Switch
switchWithNPorts n =
Switch (Map.fromList [(k, PortCfg Map.empty) | k <- [0 .. n-1]]) Set.empty UnknownSwitch
| brownsys/pane | src/NIB.hs | bsd-3-clause | 19,672 | 2 | 35 | 5,359 | 6,250 | 3,092 | 3,158 | -1 | -1 |
module Quiz.Config ( getConfig ) where
import Data.Yaml
import Quiz.Prelude
import System.Environment
import System.Exit
import Quiz.Types
-- | Deserialize command-line arguments and quiz file.
getConfig :: IO (Int, Map Text Quiz)
getConfig = do
-- Read and decode command line arguments
(port, quizFile) <- getSettings =<< getArgs
-- Decode quiz file
qmap <- either decodeErr decodeRet =<< decodeFileEither quizFile
-- Generate an index for the quizzes given by the quiz file
return (port, qmap)
where
decodeErr err = do
putStrLn $ "ERROR: Decoding quiz file failed: " ++ show err
exitFailure
decodeRet = pure . unQuizFile
-- | Determine settings from a list of command-line arguments.
getSettings :: [String] -> IO (Int, FilePath)
getSettings ("-q":quiz:xs) = do
(port, _) <- getSettings xs
return (port, quiz)
getSettings ("-p":port:xs) = do
(_, quiz) <- getSettings xs
return (read port, quiz)
getSettings [] = return (3000, "quiz.yaml")
getSettings _ = do
putStrLn "ERROR: Invalid command-line arguments."
putStrLn ""
printUsage
exitFailure
-- | Print a program usage message.
printUsage :: IO ()
printUsage = do
putStrLn "USAGE: quick-quiz [-q QUIZ-FILE] [-p PORT]"
putStrLn ""
putStrLn "Default QUIZ-FILE = 'quiz.yaml'"
putStrLn "Default PORT = 3000"
| michael-swan/quick-quiz | src/Quiz/Config.hs | bsd-3-clause | 1,379 | 0 | 10 | 311 | 352 | 178 | 174 | 34 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Trombone.Db.Reflection
( probeTemplate
, uscToCamel
) where
import Data.Text ( Text, pack, unpack )
import Database.HsSqlPpp.Ast
import Database.HsSqlPpp.Parser
import Trombone.Db.Parse
import Trombone.Db.Template
import qualified Data.Text as Text
probeTemplate :: DbTemplate -> (Maybe Text, Maybe [Text])
{-# INLINE probeTemplate #-}
probeTemplate = probe . arbitrary
probe :: String -- ^ A "raw" SQL SELECT or INSERT statement
-> (Maybe Text, Maybe [Text]) -- ^ Table name and list of columns
probe x = case parseStatements "" x of
Right [i@Insert{}] -> (statmTable i, cc $ statmCols i)
Right [QueryStatement _ s] -> (queryTable s, cc $ queryCols s)
Right _ -> (Nothing, Nothing)
Left e -> error $ show e
where cc Nothing = Nothing
cc (Just cols) = Just $ map uscToCamel cols -- CamelCase field names
-- | Probe and extract the table name from a standard SELECT query.
queryTable :: QueryExpr -> Maybe Text
queryTable ( Select _ _ _ t _ _ _ _ _ _ ) = extractTref t
queryTable _ = Nothing
-- | Probe and extract a list of column names from a standard SELECT query.
queryCols :: QueryExpr -> Maybe [Text]
queryCols ( Select _ _ s _ _ _ _ _ _ _ ) = Just $ extractFromList s
queryCols _ = Nothing
extractTref :: [TableRef] -> Maybe Text
extractTref [Tref _ (Name _ [Nmc n]) _] = Just $ pack n
extractTref _ = Nothing
extractFromList :: SelectList -> [Text]
extractFromList (SelectList _ xs) = concatMap extract xs
-- | Extract the name components from a SELECT item.
extract :: SelectItem -> [Text]
extract ( SelExp _ (Star _) ) = ["*"]
extract ( SelExp _ s ) = f s
where f (Identifier _ (Nmc n) ) = [pack n]
f (QIdentifier _ xs ) = [Text.intercalate "_" $ map (pack . ncStr) xs]
f _ = []
extract ( SelectItem _ _ (Nmc a) ) = [pack a]
extract ( SelectItem _ _ (QNmc a) ) = [pack a]
-- | Probe and extract the table name from an INSERT statement.
statmTable :: Statement -> Maybe Text
statmTable ( Insert _ (Name _ [Nmc n]) _ _ _ ) = Just $ pack n
statmTable _ = Nothing
-- | Extract a list of column names from an INSERT statement.
statmCols :: Statement -> Maybe [Text]
statmCols ( Insert _ _ xs _ _ ) = Just $ map (pack . ncStr) xs
statmCols _ = Nothing
-- | Translate underscore_formatted_text to camelCaseFormatting.
uscToCamel :: Text -> Text
{-# INLINE uscToCamel #-}
uscToCamel = toCamelCase "_"
toCamelCase :: Text -> Text -> Text
toCamelCase _ "" = ""
toCamelCase d t = Text.concat $ head pieces:map oneUp (tail pieces)
where
pieces = Text.splitOn d t
oneUp "" = ""
oneUp text = let (a, b) = Text.splitAt 1 text in Text.concat [Text.toUpper a, b]
| johanneshilden/trombone | Trombone/Db/Reflection.hs | bsd-3-clause | 3,056 | 0 | 12 | 939 | 958 | 497 | 461 | 56 | 5 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeApplications #-}
-- | Display mode (resolution, refresh rate, etc.)
module Haskus.System.Linux.Graphics.Mode
( Mode(..)
, ModeType(..)
, ModeTypes
, ModeFlag(..)
, ModeFlags
-- * Low level
, fromStructMode
, toStructMode
)
where
import Haskus.Format.Binary.BitField
import Haskus.Format.Binary.Enum
import Haskus.Format.Binary.Word
import Haskus.Format.Binary.Ptr (castPtr)
import Haskus.Format.Binary.Storable
import Haskus.Format.String
import Haskus.System.Linux.Internals.Graphics
-- | Display mode
data Mode = Mode
{ modeClock :: !Word32
, modeHorizontalDisplay :: !Word16
, modeHorizontalSyncStart :: !Word16
, modeHorizontalSyncEnd :: !Word16
, modeHorizontalTotal :: !Word16
, modeHorizontalSkew :: !Word16
, modeVerticalDisplay :: !Word16
, modeVerticalSyncStart :: !Word16
, modeVerticalSyncEnd :: !Word16
, modeVerticalTotal :: !Word16
, modeVerticalScan :: !Word16
, modeVerticalRefresh :: !Word32
, modeFlags :: !ModeFlags
, modeStereo3D :: !Stereo3D
, modeType :: !ModeTypes
, modeName :: !String
} deriving (Show)
instance Storable Mode where
sizeOf _ = sizeOfT @StructMode
alignment _ = alignmentT @StructMode
peekIO v = fromStructMode <$> peekIO (castPtr v)
pokeIO p v = pokeIO (castPtr p) (toStructMode v)
fromStructMode :: StructMode -> Mode
fromStructMode StructMode {..} =
let
flgs = extractField @"flags" miFlags
flg3d = fromEnumField $ extractField @"stereo3d" miFlags
in Mode
{ modeClock = miClock
, modeHorizontalDisplay = miHDisplay
, modeHorizontalSyncStart = miHSyncStart
, modeHorizontalSyncEnd = miHSyncEnd
, modeHorizontalTotal = miHTotal
, modeHorizontalSkew = miHSkew
, modeVerticalDisplay = miVDisplay
, modeVerticalSyncStart = miVSyncStart
, modeVerticalSyncEnd = miVSyncEnd
, modeVerticalTotal = miVTotal
, modeVerticalScan = miVScan
, modeVerticalRefresh = miVRefresh
, modeFlags = flgs
, modeStereo3D = flg3d
, modeType = miType
, modeName = fromCStringBuffer miName
}
toStructMode :: Mode -> StructMode
toStructMode Mode {..} =
let
flgs = updateField @"flags" modeFlags
$ updateField @"stereo3d" (toEnumField modeStereo3D)
$ BitFields 0
in StructMode
{ miClock = modeClock
, miHDisplay = modeHorizontalDisplay
, miHSyncStart = modeHorizontalSyncStart
, miHSyncEnd = modeHorizontalSyncEnd
, miHTotal = modeHorizontalTotal
, miHSkew = modeHorizontalSkew
, miVDisplay = modeVerticalDisplay
, miVSyncStart = modeVerticalSyncStart
, miVSyncEnd = modeVerticalSyncEnd
, miVTotal = modeVerticalTotal
, miVScan = modeVerticalScan
, miVRefresh = modeVerticalRefresh
, miFlags = flgs
, miType = modeType
, miName = toCStringBuffer modeName
}
| hsyl20/ViperVM | haskus-system/src/lib/Haskus/System/Linux/Graphics/Mode.hs | bsd-3-clause | 3,279 | 0 | 13 | 972 | 632 | 374 | 258 | 117 | 1 |
{-
PlayFile.hs (adapted from playfile.c in freealut)
Copyright (c) Sven Panne 2005 <sven.panne@aedion.de>
This file is part of the ALUT package & distributed under a BSD-style license
See the file libraries/ALUT/LICENSE
-}
import Control.Monad ( when, unless )
import Data.List ( intersperse )
import Sound.ALUT
import System.Exit ( exitFailure )
import System.IO ( hPutStrLn, stderr )
-- This program loads and plays a variety of files.
playFile :: FilePath -> IO ()
playFile fileName = do
-- Create an AL buffer from the given sound file.
buf <- createBuffer (File fileName)
-- Generate a single source, attach the buffer to it and start playing.
[source] <- genObjectNames 1
buffer source $= Just buf
play [source]
-- Normally nothing should go wrong above, but one never knows...
errs <- get alErrors
unless (null errs) $ do
hPutStrLn stderr (concat (intersperse "," [ d | ALError _ d <- errs ]))
exitFailure
-- Check every 0.1 seconds if the sound is still playing.
let waitWhilePlaying = do
sleep 0.1
state <- get (sourceState source)
when (state == Playing) $
waitWhilePlaying
waitWhilePlaying
main :: IO ()
main = do
-- Initialise ALUT and eat any ALUT-specific commandline flags.
withProgNameAndArgs runALUT $ \progName args -> do
-- Check for correct usage.
unless (length args == 1) $ do
hPutStrLn stderr ("usage: " ++ progName ++ " <fileName>")
exitFailure
-- If everything is OK, play the sound file and exit when finished.
playFile (head args)
| FranklinChen/hugs98-plus-Sep2006 | packages/ALUT/examples/Basic/PlayFile.hs | bsd-3-clause | 1,618 | 0 | 18 | 400 | 338 | 165 | 173 | 28 | 1 |
{-# LANGUAGE OverloadedStrings #-}
-- | SIG page controller.
module HL.Controller.SIG where
import HL.Controller
import HL.Controller.Markdown
import HL.View
-- | SIG controller.
getSIGR :: C (Html ())
getSIGR =
markdownPage [] "Commercial Haskell Special Interest Group" "sig.md"
| commercialhaskell/commercialhaskell.com | src/HL/Controller/SIG.hs | bsd-3-clause | 287 | 0 | 8 | 43 | 56 | 33 | 23 | 8 | 1 |
{-# LANGUAGE PackageImports #-}
import "UCD" Application (develMain)
import Prelude (IO)
main :: IO ()
main = develMain
| mithrandi/ucd-api | app/devel.hs | mit | 121 | 0 | 6 | 19 | 34 | 20 | 14 | 5 | 1 |
module MothAST where
import Control.Monad.Reader
type VName = String
type Label = String
type OpName = String
data Typ = TVar VName
| TBool
| TFun [Typ] Typ
| TNum
| TUndefined
| TObj [(Label,Typ)]
| TString
data MothExp = MVar VName
| MVarDecl VName Typ MothExp
| MApp MothExp [MothExp]
| MLam [(VName,Typ)] Typ [MothStmt] MothExp
-- last MothExp is the return of the function
-- we're going to be a bit strict and make you
| MTypeOf MothExp
| MBinOp OpName
| MUnOp OpName
| MThis
| MObjLit [(Label, MothExp)]
| MDot MothExp Label
| MPrint MothExp
| MRead
| MStringLit String
| MNumLit Float
| MUndefined
-- I'm thinking of leaving out 'new' syntax for making objects
-- we can just avoid prototyping and all of that in a first teaching language
data MothStmt = MExp MothExp
| MFor MothExp MothExp MothExp [MothStmt]
| MWhile MothExp [MothStmt]
| MIf MothExp [MothStmt] [(MothExp,[MothStmt])] [MothStmt]
type MothProg = [MothStmt]
data CheckData = CD {vars :: [(VName,Typ)],
ctxt :: [[(Label,Typ)]]}
type Check = Reader [(VName,Typ)]
lookupM :: MonadReader m [(VName,Typ)] => VName -> m (Maybe Typ)
lookupM = asks . lookup
checkExp :: MothExp -> Check Typ
checkExp (MVar v) = do
mt <- lookupM v
case mt of
Nothing -> error "variable not defined"
-- obviously we want better errors later, I'm thinking something that will print out in a nice format
-- all the variables that exist in scope at the time
Just t -> return t
checkExp (MApp f es) = do
ft <- checkExp f
ts <- mapM checkExp es
case ft of
TFun ts' tr -> if ts == ts'
then return r
else error "type mismatch in function"
_ -> error "tried to apply not a function"
checkExp (MTypeOf m) = do
checkExp m
return TString
checkExp (MObjLit ls) = do
ts <- mapM (\ (l,e) -> do
t <- checkExp e
return (l,t)) ls
return $ TObj ts
checkExp (MStringLit _) = return TString
checkExp (MNumLit _) = return TNum
checkExp (MPrint me) = checkExp me >> return TUndefined
checkExp MRead = return TString
checkExp (MDot me l) = do
t <- checkExp me
case t of
TObj ts -> case lookup l t of
Nothing -> error "label doesn't exist"
Just t' -> return t'
_ -> error "using dot notation at non-object type"
checkExp
checkStmt :: MothStmt -> Check ()
checkStmt (MExp m) = checkExp m >> return ()
checkStmt (MFor m1 m2 m3 ms) = do
t <- checkExp m1
case t of
TNum -> do
checkExp m2
checkExp m3
mapM_ checkStmt ms
_ -> error "not using a number as the index in a for loop"
-- this isn't quite right but it's a start
checkStmt (MWhile me ms) = do
t <- checkExp me
case t of
TBool -> mapM_ checkStmt ms
_ -> error "not using a boolean for the condition in while"
checkStmt (MIf me mthens mifelses melses) = do
b <- checkExp me
if b
then do
mapM_ checkStmt mthens
else do
checkAux :: MothExp -> [MothStmt] ->
| clarissalittler/moth-lang | MothAST.hs | mit | 3,315 | 1 | 14 | 1,098 | 986 | 513 | 473 | -1 | -1 |
-- -- $Id$
module Pump.Positiv
( positiv )
where
import Pump.Type
import Language.Type
import Autolib.Util.Seed
import Autolib.ToDoc
import Autolib.Size
import Control.Monad ( guard )
import Autolib.Reporter
import Autolib.FiniteMap
import Autolib.Set
import Data.List ( sort, nub )
import Data.Maybe ( isNothing )
positiv :: Pumping z
=> Language -> Pump z -> [ String ]
-> Reporter Int
positiv l ( p @ Ja {} :: Pump z ) ws = do
let fodder = undefined :: z
inform $ vcat $ map text
[ "Sie möchten nachweisen, daß die Sprache L = "
, show l
, "die " ++ tag fodder ++ " erfüllt."
, ""
, "Sie behaupten, JEDES Wort p in L mit |p| >= " ++ show (n p)
, "besitzt eine Zerlegung p = " ++ tag_show fodder ++ ","
, "so daß für alle i: " ++ inflate_show_i fodder ++ " in L."
]
newline
when ( n p < 1 ) $ reject $ text "Es soll aber n >= 1 sein."
inform $ vcat
[ text "Ich prüfe jetzt, ob die von Ihnen angegebenen Zerlegungen für die Wörter"
, nest 4 $ toDoc ws
, text "tatsächlich die geforderten Eigenschaften besitzen."
]
newline
mapM_ ( report l p ) ws
return $ size p
----------------------------------------------------------------------------
report :: Pumping z
=> Language -> Pump z -> String
-> Reporter ()
report l p w = do
inform $ text $ "Ich wähle p = " ++ show w
let mz @ ~ (Just z) = lookupFM (zerlege p) w
when ( isNothing mz )
$ reject $ text "Sie haben gar keine Zerlegung angegeben."
inform $ text "Sie wählen" <+> toDoc z
admissable (n p) z
when ( w /= inflate 1 z )
$ reject $ text "Das ist gar keine Zerlegung von p."
let check i = do
let w' = inflate i z
when ( not $ contains l w' ) $ reject $ text
$ "aber " ++ inflate_show i z ++ " = " ++ show w'
++ " ist nicht in " ++ show l
mapM_ check [ 0 .. 100 ] -- FIXME
inform $ text "OK"
newline
| florianpilz/autotool | src/Pump/Positiv.hs | gpl-2.0 | 1,979 | 69 | 9 | 574 | 598 | 319 | 279 | -1 | -1 |
module Rasa.Internal.AsyncSpec where
import Test.Hspec
spec :: Spec
spec = return ()
| samcal/rasa | rasa/test/Rasa/Internal/AsyncSpec.hs | gpl-3.0 | 87 | 0 | 6 | 14 | 27 | 16 | 11 | 4 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.Support.DescribeAttachment
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Returns the attachment that has the specified ID. Attachment IDs are
-- generated by the case management system when you add an attachment to a case
-- or case communication. Attachment IDs are returned in the 'AttachmentDetails'
-- objects that are returned by the 'DescribeCommunications' operation.
--
-- <http://docs.aws.amazon.com/awssupport/latest/APIReference/API_DescribeAttachment.html>
module Network.AWS.Support.DescribeAttachment
(
-- * Request
DescribeAttachment
-- ** Request constructor
, describeAttachment
-- ** Request lenses
, daAttachmentId
-- * Response
, DescribeAttachmentResponse
-- ** Response constructor
, describeAttachmentResponse
-- ** Response lenses
, darAttachment
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.Support.Types
import qualified GHC.Exts
newtype DescribeAttachment = DescribeAttachment
{ _daAttachmentId :: Text
} deriving (Eq, Ord, Read, Show, Monoid, IsString)
-- | 'DescribeAttachment' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'daAttachmentId' @::@ 'Text'
--
describeAttachment :: Text -- ^ 'daAttachmentId'
-> DescribeAttachment
describeAttachment p1 = DescribeAttachment
{ _daAttachmentId = p1
}
-- | The ID of the attachment to return. Attachment IDs are returned by the 'DescribeCommunications' operation.
daAttachmentId :: Lens' DescribeAttachment Text
daAttachmentId = lens _daAttachmentId (\s a -> s { _daAttachmentId = a })
newtype DescribeAttachmentResponse = DescribeAttachmentResponse
{ _darAttachment :: Maybe Attachment
} deriving (Eq, Read, Show)
-- | 'DescribeAttachmentResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'darAttachment' @::@ 'Maybe' 'Attachment'
--
describeAttachmentResponse :: DescribeAttachmentResponse
describeAttachmentResponse = DescribeAttachmentResponse
{ _darAttachment = Nothing
}
-- | The attachment content and file name.
darAttachment :: Lens' DescribeAttachmentResponse (Maybe Attachment)
darAttachment = lens _darAttachment (\s a -> s { _darAttachment = a })
instance ToPath DescribeAttachment where
toPath = const "/"
instance ToQuery DescribeAttachment where
toQuery = const mempty
instance ToHeaders DescribeAttachment
instance ToJSON DescribeAttachment where
toJSON DescribeAttachment{..} = object
[ "attachmentId" .= _daAttachmentId
]
instance AWSRequest DescribeAttachment where
type Sv DescribeAttachment = Support
type Rs DescribeAttachment = DescribeAttachmentResponse
request = post "DescribeAttachment"
response = jsonResponse
instance FromJSON DescribeAttachmentResponse where
parseJSON = withObject "DescribeAttachmentResponse" $ \o -> DescribeAttachmentResponse
<$> o .:? "attachment"
| kim/amazonka | amazonka-support/gen/Network/AWS/Support/DescribeAttachment.hs | mpl-2.0 | 3,977 | 3 | 9 | 798 | 457 | 276 | 181 | 56 | 1 |
module Lamb where
addOneIfOdd n = case odd n of
True -> f n
False -> n
where f = \n -> n + 1
-- p. 355 pattern matching
f :: (a, b, c) -> (d, e, f) -> ((a, d), (c, f))
f (a, _, c) (d, _, f) = ((a, d), (c, f))
-- p. 360 case expressions
functionC x y = if (x > y) then x else y
funcC :: Ord a => a -> a -> a
funcC x y =
case (x > y) of
True -> x
False -> y
ifEvenAdd2 n = if even n then (n+2) else n
ifEA2 :: Integral a => a -> a
ifEA2 n =
case (even n) of
True -> n + 2
False -> n
nums :: (Num a, Ord a) => a -> a
nums x =
case compare x 0 of
LT -> -1
GT -> 1
EQ -> 0
-- p. 372 higher order functions
dodgy :: Num a => a -> a -> a
dodgy x y = x + y * 10
oneIsOne :: Num a => a -> a
oneIsOne = dodgy 1
oneIsTwo :: Num a => a -> a
oneIsTwo = (flip dodgy) 2
-- p. 381 Guard Duty
avgGrade :: (Fractional a, Ord a) => a -> Char
avgGrade x
| y >= 0.9 = 'A'
| y >= 0.8 = 'B'
| y >= 0.7 = 'C'
| y >= 0.6 = 'D'
| y >= 0.5 = 'E'
| y < 0.5 = 'F'
where y = x / 100
pal :: Eq a => [a] -> Bool
pal xs
| xs == reverse xs = True
| otherwise = False
numbers :: (Ord a, Num a) => a -> a
numbers x
| x < 0 = -1
| x == 0 = 0
| x > 0 = 1
| m3mitsuppe/haskell | exercises/ch_07_01_lambda.hs | unlicense | 1,332 | 0 | 9 | 533 | 693 | 361 | 332 | 49 | 3 |
module Foundation
( App (..)
, Route (..)
, AppMessage (..)
, resourcesApp
, Handler
, Widget
, Form
, maybeAuth
, requireAuth
, module Settings
, module Model
) where
import Prelude
import Yesod
import Yesod.Static
import Yesod.Auth
import Yesod.Auth.BrowserId
import Yesod.Auth.GoogleEmail
import Yesod.Default.Config
import Yesod.Default.Util (addStaticContentExternal)
import Yesod.Logger (Logger, logMsg, formatLogText)
import Network.HTTP.Conduit (Manager)
import qualified Settings
import qualified Database.Persist.Store
import Settings.StaticFiles
import Database.Persist.GenericSql
import Settings (widgetFile, Extra (..))
import Model
import Text.Jasmine (minifym)
import Web.ClientSession (getKey)
import Text.Hamlet (hamletFile)
-- | The site argument for your application. This can be a good place to
-- keep settings and values requiring initialization before your application
-- starts running, such as database connections. Every handler will have
-- access to the data present here.
data App = App
{ settings :: AppConfig DefaultEnv Extra
, getLogger :: Logger
, getStatic :: Static -- ^ Settings for static file serving.
, connPool :: Database.Persist.Store.PersistConfigPool Settings.PersistConfig -- ^ Database connection pool.
, httpManager :: Manager
, persistConfig :: Settings.PersistConfig
}
-- Set up i18n messages. See the message folder.
mkMessage "App" "messages" "en"
-- This is where we define all of the routes in our application. For a full
-- explanation of the syntax, please see:
-- http://www.yesodweb.com/book/handler
--
-- This function does three things:
--
-- * Creates the route datatype AppRoute. Every valid URL in your
-- application can be represented as a value of this type.
-- * Creates the associated type:
-- type instance Route App = AppRoute
-- * Creates the value resourcesApp which contains information on the
-- resources declared below. This is used in Handler.hs by the call to
-- mkYesodDispatch
--
-- What this function does *not* do is create a YesodSite instance for
-- App. Creating that instance requires all of the handler functions
-- for our application to be in scope. However, the handler functions
-- usually require access to the AppRoute datatype. Therefore, we
-- split these actions into two functions and place them in separate files.
mkYesodData "App" $(parseRoutesFile "config/routes")
type Form x = Html -> MForm App App (FormResult x, Widget)
-- Please see the documentation for the Yesod typeclass. There are a number
-- of settings which can be configured by overriding methods here.
instance Yesod App where
approot = ApprootMaster $ appRoot . settings
-- Store session data on the client in encrypted cookies,
-- default session idle timeout is 120 minutes
makeSessionBackend _ = do
key <- getKey "config/client_session_key.aes"
return . Just $ clientSessionBackend key 120
defaultLayout widget = do
master <- getYesod
mmsg <- getMessage
-- We break up the default layout into two components:
-- default-layout is the contents of the body tag, and
-- default-layout-wrapper is the entire page. Since the final
-- value passed to hamletToRepHtml cannot be a widget, this allows
-- you to use normal widget features in default-layout.
pc <- widgetToPageContent $ do
$(widgetFile "normalize")
addStylesheet $ StaticR css_bootstrap_css
$(widgetFile "default-layout")
hamletToRepHtml $(hamletFile "templates/default-layout-wrapper.hamlet")
-- This is done to provide an optimization for serving static files from
-- a separate domain. Please see the staticRoot setting in Settings.hs
urlRenderOverride y (StaticR s) =
Just $ uncurry (joinPath y (Settings.staticRoot $ settings y)) $ renderRoute s
urlRenderOverride _ _ = Nothing
-- The page to be redirected to when authentication is required.
authRoute _ = Just $ AuthR LoginR
messageLogger y loc level msg =
formatLogText (getLogger y) loc level msg >>= logMsg (getLogger y)
-- This function creates static content files in the static folder
-- and names them based on a hash of their content. This allows
-- expiration dates to be set far in the future without worry of
-- users receiving stale content.
addStaticContent = addStaticContentExternal minifym base64md5 Settings.staticDir (StaticR . flip StaticRoute [])
-- Place Javascript at bottom of the body tag so the rest of the page loads first
jsLoader _ = BottomOfBody
-- How to run database actions.
instance YesodPersist App where
type YesodPersistBackend App = SqlPersist
runDB f = do
master <- getYesod
Database.Persist.Store.runPool
(persistConfig master)
f
(connPool master)
instance YesodAuth App where
type AuthId App = UserId
-- Where to send a user after successful login
loginDest _ = HomeR
-- Where to send a user after logout
logoutDest _ = HomeR
getAuthId creds = runDB $ do
x <- getBy $ UniqueUser $ credsIdent creds
case x of
Just (Entity uid _) -> return $ Just uid
Nothing -> do
fmap Just $ insert $ User (credsIdent creds) Nothing Nothing
-- You can add other plugins like BrowserID, email or OAuth here
authPlugins _ = [authBrowserId, authGoogleEmail]
authHttpManager = httpManager
-- This instance is required to use forms. You can modify renderMessage to
-- achieve customized and internationalized form validation messages.
instance RenderMessage App FormMessage where
renderMessage _ _ = defaultFormMessage
-- Note: previous versions of the scaffolding included a deliver function to
-- send emails. Unfortunately, there are too many different options for us to
-- give a reasonable default. Instead, the information is available on the
-- wiki:
--
-- https://github.com/yesodweb/yesod/wiki/Sending-email
| sordina/RedditFollow | Foundation.hs | bsd-2-clause | 6,098 | 0 | 17 | 1,325 | 873 | 488 | 385 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.Redshift.EnableSnapshotCopy
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Enables the automatic copy of snapshots from one region to another region for
-- a specified cluster.
--
-- <http://docs.aws.amazon.com/redshift/latest/APIReference/API_EnableSnapshotCopy.html>
module Network.AWS.Redshift.EnableSnapshotCopy
(
-- * Request
EnableSnapshotCopy
-- ** Request constructor
, enableSnapshotCopy
-- ** Request lenses
, escClusterIdentifier
, escDestinationRegion
, escRetentionPeriod
-- * Response
, EnableSnapshotCopyResponse
-- ** Response constructor
, enableSnapshotCopyResponse
-- ** Response lenses
, escrCluster
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.Redshift.Types
import qualified GHC.Exts
data EnableSnapshotCopy = EnableSnapshotCopy
{ _escClusterIdentifier :: Text
, _escDestinationRegion :: Text
, _escRetentionPeriod :: Maybe Int
} deriving (Eq, Ord, Read, Show)
-- | 'EnableSnapshotCopy' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'escClusterIdentifier' @::@ 'Text'
--
-- * 'escDestinationRegion' @::@ 'Text'
--
-- * 'escRetentionPeriod' @::@ 'Maybe' 'Int'
--
enableSnapshotCopy :: Text -- ^ 'escClusterIdentifier'
-> Text -- ^ 'escDestinationRegion'
-> EnableSnapshotCopy
enableSnapshotCopy p1 p2 = EnableSnapshotCopy
{ _escClusterIdentifier = p1
, _escDestinationRegion = p2
, _escRetentionPeriod = Nothing
}
-- | The unique identifier of the source cluster to copy snapshots from.
--
-- Constraints: Must be the valid name of an existing cluster that does not
-- already have cross-region snapshot copy enabled.
escClusterIdentifier :: Lens' EnableSnapshotCopy Text
escClusterIdentifier =
lens _escClusterIdentifier (\s a -> s { _escClusterIdentifier = a })
-- | The destination region that you want to copy snapshots to.
--
-- Constraints: Must be the name of a valid region. For more information, see <http://docs.aws.amazon.com/general/latest/gr/rande.html#redshift_region Regions and Endpoints> in the Amazon Web Services General Reference.
escDestinationRegion :: Lens' EnableSnapshotCopy Text
escDestinationRegion =
lens _escDestinationRegion (\s a -> s { _escDestinationRegion = a })
-- | The number of days to retain automated snapshots in the destination region
-- after they are copied from the source region.
--
-- Default: 7.
--
-- Constraints: Must be at least 1 and no more than 35.
escRetentionPeriod :: Lens' EnableSnapshotCopy (Maybe Int)
escRetentionPeriod =
lens _escRetentionPeriod (\s a -> s { _escRetentionPeriod = a })
newtype EnableSnapshotCopyResponse = EnableSnapshotCopyResponse
{ _escrCluster :: Maybe Cluster
} deriving (Eq, Read, Show)
-- | 'EnableSnapshotCopyResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'escrCluster' @::@ 'Maybe' 'Cluster'
--
enableSnapshotCopyResponse :: EnableSnapshotCopyResponse
enableSnapshotCopyResponse = EnableSnapshotCopyResponse
{ _escrCluster = Nothing
}
escrCluster :: Lens' EnableSnapshotCopyResponse (Maybe Cluster)
escrCluster = lens _escrCluster (\s a -> s { _escrCluster = a })
instance ToPath EnableSnapshotCopy where
toPath = const "/"
instance ToQuery EnableSnapshotCopy where
toQuery EnableSnapshotCopy{..} = mconcat
[ "ClusterIdentifier" =? _escClusterIdentifier
, "DestinationRegion" =? _escDestinationRegion
, "RetentionPeriod" =? _escRetentionPeriod
]
instance ToHeaders EnableSnapshotCopy
instance AWSRequest EnableSnapshotCopy where
type Sv EnableSnapshotCopy = Redshift
type Rs EnableSnapshotCopy = EnableSnapshotCopyResponse
request = post "EnableSnapshotCopy"
response = xmlResponse
instance FromXML EnableSnapshotCopyResponse where
parseXML = withElement "EnableSnapshotCopyResult" $ \x -> EnableSnapshotCopyResponse
<$> x .@? "Cluster"
| kim/amazonka | amazonka-redshift/gen/Network/AWS/Redshift/EnableSnapshotCopy.hs | mpl-2.0 | 4,996 | 0 | 9 | 1,003 | 568 | 348 | 220 | 69 | 1 |
--
-- Copyright (c) 2012 Citrix Systems, Inc.
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
--
{-# LANGUAGE Arrows, PatternGuards #-}
module OVF.Parse
( runParser
) where
import OVF.Model
import OVF.ModelXCI
import Data.Maybe
import Data.Char
import Data.Ord
import Data.List
import Data.String
import Text.XML.HXT.Core
--import Tools.Text
import qualified Data.Text as T
import OVF.AllocationUnit
import Core.Types
encryptionKeySizeDefault = 512
strip :: String -> String
strip = T.unpack . T.strip . T.pack
splitOnSpace = map T.unpack . filter (not . T.null) . T.split (T.pack " ") . T.pack
namespaces :: [(String,String)]
namespaces =
[ ("ovf", "http://schemas.dmtf.org/ovf/envelope/1")
, ("vssd", "http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/CIM_VirtualSystemSettingData")
, ("rasd", "http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/CIM_ResourceAllocationSettingData")
, ("sasd", "http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/CIM_StorageAllocationSettingData")
, ("epasd", "http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/CIM_EthernetPortAllocationSettingData")
, ("xci", "http://www.citrix.com/xenclient/ovf/1")
]
xmlParseOpts
= [ withValidate no
, withCheckNamespaces no
, withSubstDTDEntities no
]
maybeA :: ArrowIf a => a b c -> a b (Maybe c)
maybeA f = (f >>> arr Just) `orElse` arr (const Nothing)
withDefault' = flip withDefault
readA :: (ArrowIf a, Read c) => a String c
readA = f $< this where
f str = case reads str of
[] -> none
((v,_):_) -> constA v
readOrFatalA :: (Read a) => String -> IOSArrow String a
readOrFatalA msg = readA `orElse` (issueFatal msg >>> none)
envelopeA :: IOSArrow XmlTree Envelope
envelopeA = proc x -> do
references_ <- withDefault' [] (getChildren /> (hasName "ovf:References") >>> listA fileReferenceA) -< x
diskSections_ <- listA diskSectionA -< x
networkSection_ <- maybeA networkSectionA -< x
eulas_ <- listA eulaA -< x
content_ <- getChildren >>> contentA -< x
returnA -<
Envelope { references = references_, diskSections = diskSections_, networkSection = networkSection_
, eulas = eulas_, content = content_
}
eulaA :: IOSArrow XmlTree String
eulaA = deep (hasName "ovf:EulaSection" /> hasName "ovf:License" /> getText)
contentA :: IOSArrow XmlTree Content
contentA = collections <+> systems where
systems = getChildren >>> virtualSystemA >>> arr ContentVirtualSystem
collections = getChildren >>> hasName "ovf:VirtualSystemCollection" >>> proc x -> do
id <- getAttrValue "ovf:id" -< x
info <- infoA -< x
name <- (getChildren >>> hasName "ovf:Name" /> getText) `orElse` (constA "") -< x
productSections_ <- listA productSectionA -< x
items_ <- listA contentA -< x
returnA -< ContentVirtualSystemCollection { collectionID = id, collectionInfo = info, collectionName = name,
collectionProductSections = productSections_, collectionItems = items_ }
fileReferenceA :: IOSArrow XmlTree FileRef
fileReferenceA = deep (hasName "ovf:File") >>> proc x -> do
id <- getAttrValue "ovf:id" -< x
href <- getAttrValue "ovf:href" -< x
size <- withDefault' 0 (getAttrValue0 "ovf:size" >>> readOrFatalA "bad ovf:size attribute") -< x
returnA -< FileRef (FileID id) href size
infoA :: IOSArrow XmlTree String
infoA = withDefault' "" (getChildren >>> hasName "ovf:Info" /> getText)
diskSectionA :: IOSArrow XmlTree DiskSection
diskSectionA =
(deep (hasName "ovf:DiskSection") >>> contentsA False)
<+>
(deep (hasName "ovf:SharedDiskSection") >>> contentsA True)
where
contentsA shared = proc x -> do
info <- infoA -< x
disks <- listA (diskA shared) -< x
returnA -< DiskSection info disks
diskA :: Bool -> IOSArrow XmlTree Disk
diskA shared = deep (hasName "ovf:Disk") >>> proc x -> do
id <- getAttrValue "ovf:diskId" -< x
fileRef <- maybeA (getAttrValue0 "ovf:fileRef") -< x
capacity <- getAttrValue "ovf:capacity" >>> readOrFatalA "bad ovf:capacity attribute" -< x
capacityUnits <- withDefault' auByte (getAttrValue0 "ovf:capacityAllocationUnits" >>> allocationUnitA) -< x
popsz <- (getAttrValue0 "ovf:populatedSize" >>> readOrFatalA "bad ovf:populatedSize attribute" >>> arr Just) `orElse` constA Nothing -< x
f <- getAttrValue "ovf:format" -< x
returnA -< Disk { diskID = DiskID id, diskFileRef = FileID `fmap` fileRef,
diskCapacity = capacity, diskCapacityAllocationUnits = capacityUnits,
diskPopulatedSize = popsz, diskShared = shared, diskFormat = f }
networkSectionA :: IOSArrow XmlTree NetworkSection
networkSectionA = deep (hasName "ovf:NetworkSection") >>> proc x -> do
info <- infoA -< x
networks_ <- listA networkA -< x
returnA -< NetworkSection info networks_
networkA :: IOSArrow XmlTree Network
networkA = deep (hasName "ovf:Network") >>> proc x -> do
name <- getAttrValue "ovf:name" -< x
descr <- deep (hasName "ovf:Description") /> getText -< x
returnA -< Network name descr
virtualSystemA :: IOSArrow XmlTree VirtualSystem
virtualSystemA = hasName "ovf:VirtualSystem" >>> proc x -> do
id <- getAttrValue "ovf:id" -< x
info <- infoA -< x
name <- (getChildren >>> hasName "ovf:Name" /> getText) `orElse` (constA "") -< x
envfiles <- envFilesSectionA -< x
productSections <- listA productSectionA -< x
(install, installDelay) <- installSectionA -< x
items <- listA itemA >>> arr sortItems -< x
let (resourceItems, storageItems, ethernetPortItems) = partitionItems items
transport <- withDefault' [] (getAttrValue0 "ovf:transport" >>> arr splitOnSpace) -< x
returnA -< VirtualSystem {
systemID = SystemID id, systemInfo = info, systemName = name, systemProductSections = productSections
, systemOSSection = []
, systemEnvFiles = envfiles
, systemResourceItems = resourceItems
, systemEthernetPortItems = ethernetPortItems
, systemStorageItems = storageItems
, systemInstall = install
, systemInstallDelay = installDelay
, systemTransport = transport
}
-- to ease later processing we convert ResouriceItems into StorageItems and EthernetPortItems if deemed ok
partitionItems :: [Item] -> ( [ResourceItem], [StorageItem], [EthernetPortItem] )
partitionItems = foldr part ( [], [], [] ) where
rev (a,b,c) = (reverse a, reverse b, reverse c)
part (SRI i) (rs, ss, eps) = (rs, i:ss, eps)
part (EPI i) (rs, ss, eps) = (rs, ss , i:eps)
part (RI i) (rs, ss, eps)
| isStorage = (rs , mkStorage i : ss, eps)
| isEthernetPort = (rs , ss , mkEthernetPort i : eps)
| otherwise = (i:rs, ss , eps)
where
t = getResourceType (resTypeID i)
isStorage | Just x <- t = x `elem` [RT_CDDrive, RT_DVDDrive, RT_HardDisk]
| otherwise = False
isEthernetPort | Just x <- t = x `elem` [RT_EthernetAdapter]
| otherwise = False
mkStorage res = StorageItem {
srResourceItem = res
, srAccess = 0
, srHostExtentName = ""
, srHostExtentNameFormat = 0
, srHostExtentNameNamespace = 0
, srHostExtentStartingAddress = 0
, srHostResourceBlockSize = 0
, srLimit = 0
, srOtherHostExtentNameFormat = ""
, srOtherHostExtentNameNamespace = ""
, srVirtualResourceBlockSize = 0
}
mkEthernetPort res = EthernetPortItem {
ethResourceItem = res
, ethDefaultPortVID = Nothing
, ethDefaultPriority = Nothing
, ethDesiredVLANEndpointMode = Nothing
, ethGroupID = Nothing
, ethManagerID = Nothing
, ethNetworkPortProfileID = Nothing
, ethOtherEndpointMode = Nothing
, ethOtherNetworkPortProfileIDTypeInfo = Nothing
, ethPortCorrelationID = Nothing
, ethPortVID = Nothing
, ethPromiscuous = False
, ethReceiveBandwidthLimit = 0
, ethReceiveBandwidthReservation = 0
, ethSourceMACFilteringEnabled = False
, ethAllowedPriorities = []
, ethAllowedToReceiveMACAddresses = []
, ethAllowedToReceiveVLANs = []
, ethAllowedToTransmitMACAddresses = []
, ethAllowedToTransmitVLANs = []
}
productSectionA :: IOSArrow XmlTree ProductSection
productSectionA = deep (hasName "ovf:ProductSection") >>> proc x -> do
class_ <- maybeA (getAttrValue0 "ovf:class") -< x
instance_ <- maybeA (getAttrValue0 "ovf:instance") -< x
info <- infoA -< x
name <- withDefault' "" (getChildren >>> hasName "ovf:Name" /> getText) -< x
version <- withDefault' "" (getChildren >>> hasName "ovf:Version" /> getText) -< x
properties <- listA productPropertyA -< x
returnA -< ProductSection {
productClass = class_, productInstance = instance_, productInfo = info, productName = name, productVersion = version, productProperties = properties }
productPropertyA :: IOSArrow XmlTree ProductProperty
productPropertyA = deep (hasName "ovf:Property") >>> proc x -> do
descr <- withDefault' "" (getChildren >>> hasName "ovf:Description" /> getText) -< x
key <- getAttrValue "ovf:key" -< x
typeStr <- getAttrValue "ovf:type" -< x
value <- getAttrValue "ovf:value" -< x
userConfigurableStr <- withDefault' "false" (getAttrValue0 "ovf:userConfigurable") -< x
passwordStr <- withDefault' "false" (getAttrValue0 "ovf:password") -< x
let userConfigurable = boolStr userConfigurableStr
password = boolStr passwordStr
typ = ovfTypeFromStr typeStr
returnA -< ProductProperty {
propertyKey = key, propertyType = typ, propertyValue = value, propertyUserConfigurable = userConfigurable
, propertyDescription = descr, propertyPassword = password }
envFilesSectionA :: IOSArrow XmlTree [(FileID, FilePath)]
envFilesSectionA = withDefault' [] $ getChildren >>> hasName "ovf:EnvironmentFilesSection" >>> listA envFile where
envFile = getChildren >>> hasName "ovf:File" >>> proc x -> do
ref <- getAttrValue0 "ovf:fileRef" -< x
path <- getAttrValue0 "ovf:path" -< x
returnA -< (FileID ref,path)
installSectionA :: IOSArrow XmlTree (Bool, Int)
installSectionA = (getChildren >>> sectionData) `orElse` constA (False, 0) where
sectionData = hasName "ovf:InstallSection" >>> proc x -> do
delay <- withDefault' 0 (getAttrValue0 "ovf:initialBootStopDelay" >>> readOrFatalA "bad ovf:initialBootStopDelay") -< x
returnA -< (True, delay)
itemA :: IOSArrow XmlTree Item
itemA = (resourceItemA >>> arr RI) <+> (storageItemA >>> arr SRI) <+> (ethernetPortItemA >>> arr EPI)
resourceItemA = deep (hasName "ovf:Item") >>> resourceItemBodyA "rasd"
resourceItemBodyA :: String -> IOSArrow XmlTree ResourceItem
resourceItemBodyA nsPrefix = proc x -> do
bound <- withDefault' "normal" (getAttrValue0 "ovf:bound") -< x
--FIXME: currently skipping all but 'normal' bounds, implement support for bounds later
none `whenP` (/= "normal") -< bound
address <- withDefault' "" (subitemA "Address") -< x
addressOnParent <- withDefault' "" (subitemA "AddressOnParent") -< x
unit <- withDefault' auByte (subitemA "AllocationUnits" >>> allocationUnitA) -< x
autoalloc <- maybeA (subitemA "AutomaticAllocation") >>> arr (maybe True boolStr) -< x
descr <- withDefault' "" (subitemA "Description") -< x
connection <- maybeA (subitemA "Connection") -< x
hostresource <- maybeA (subitemA "HostResource") -< x
name <- withDefault' "" (subitemA "ElementName") -< x
instID <- readSubitemA "InstanceID" >>> arr ResInstanceID -< x
parent <- maybeA (readSubitemA "Parent" >>> arr ResInstanceID) -< x
typeID <- readSubitemA "ResourceType" -< x
subtype <- withDefault' "" (subitemA "ResourceSubType") -< x
quantity <- withDefault' 0 (readSubitemA "VirtualQuantity") -< x
quantityUnits <- withDefault' auByte (subitemA "VirtualQuantityUnits" >>> allocationUnitA) -< x
reservation <- withDefault' 0 (readSubitemA "Reservation") -< x
returnA -< ResourceItem {
resAddress = address
, resAddressOnParent = addressOnParent
, resAllocationUnits = unit
, resAutomaticAllocation = autoalloc
, resDescription = descr
, resConnection = connection
, resHostResource = hostresource
, resName = name
, resInstanceID = instID
, resParent = parent
, resTypeID = typeID
, resSubType = subtype
, resVirtualQuantity = quantity
, resVirtualQuantityUnits = quantityUnits
, resReservation = reservation
}
where
subitemA n = getChildren >>> hasName (nsPrefix ++ ":" ++ n) /> getText
readSubitemA n = subitemA n >>> readOrFatalA ("Item: bad " ++ n)
ethernetPortItemA :: IOSArrow XmlTree EthernetPortItem
ethernetPortItemA = deep (hasName "ovf:EthernetPortItem") >>> proc x -> do
res <- resourceItemBodyA "epasd" -< x
defaultPortVID <- maybeA (readSubitemA "DefaultPortVID") -< x
defaultPriority <- maybeA (readSubitemA "DefaultPriority") -< x
desiredVLANEndpointMode <- maybeA (readSubitemA "DesiredVLANEndpointMode") -< x
groupID <- maybeA (readSubitemA "GroupID") -< x
managerID <- maybeA (readSubitemA "ManagerID") -< x
networkPortProfileID <- maybeA (subitemA "NetworkPortProfileID") -< x
otherEndpointMode <- maybeA (subitemA "OtherEndpointMode") -< x
otherNetworkPortProfileIDTypeInfo <- maybeA (subitemA "OtherNetworkPortProfileIDTypeInfo") -< x
portCorrelationID <- maybeA (subitemA "PortCorrelationID") -< x
portVID <- maybeA (readSubitemA "PortVID") -< x
promiscuous <- withDefault' False (subitemA "Promiscuous" >>> boolA) -< x
receiveBandwidthLimit <- withDefault' 0 (readSubitemA "ReceiveBandwithLimit") -< x
receiveBandwidthReservation <- withDefault' 0 (readSubitemA "ReceiveBandwithReservation") -< x
sourceMACFilteringEnabled <- withDefault' False (subitemA "SourceMACFilteringEnabled" >>> boolA) -< x
returnA -< EthernetPortItem {
ethResourceItem = res, ethDefaultPortVID = defaultPortVID, ethDefaultPriority = defaultPriority
, ethDesiredVLANEndpointMode = desiredVLANEndpointMode, ethGroupID = groupID, ethManagerID = managerID
, ethNetworkPortProfileID = networkPortProfileID, ethOtherEndpointMode = otherEndpointMode
, ethOtherNetworkPortProfileIDTypeInfo = otherNetworkPortProfileIDTypeInfo
, ethPortCorrelationID = portCorrelationID, ethPortVID = portVID, ethPromiscuous = promiscuous
, ethReceiveBandwidthLimit = receiveBandwidthLimit, ethReceiveBandwidthReservation = receiveBandwidthReservation
, ethSourceMACFilteringEnabled = sourceMACFilteringEnabled
, ethAllowedPriorities = [], ethAllowedToReceiveMACAddresses = [], ethAllowedToReceiveVLANs = []
, ethAllowedToTransmitMACAddresses = [], ethAllowedToTransmitVLANs = [] }
where
subitemA n = getChildren >>> hasName ("epasd:" ++ n) /> getText
readSubitemA n = subitemA n >>> readOrFatalA ("EthernetPortItem: bad " ++ n)
storageItemA :: IOSArrow XmlTree StorageItem
storageItemA = deep (hasName "ovf:StorageItem") >>> proc x -> do
res <- resourceItemBodyA "sasd" -< x
access <- withDefault' 0 (readSubitemA "Access") -< x
hostExtentName <- withDefault' "" (subitemA "HostExtentName") -< x
hostExtentNameFormat <- withDefault' 0 (readSubitemA "HostExtentNameFormat") -< x
hostExtentNameNamespace <- withDefault' 0 (readSubitemA "HostExtentNameNamespace") -< x
hostExtentStartingAddress <- withDefault' 0 (readSubitemA "HostExtentStartingAddress") -< x
hostResourceBlockSize <- withDefault' 0 (readSubitemA "HostResourceBlockSize") -< x
limit <- withDefault' 0 (readSubitemA "Limit") -< x
otherHostExtentNameFormat <- withDefault' "" (subitemA "OtherHostExtentNameFormat") -< x
otherHostExtentNameNamespace <- withDefault' "" (subitemA "OtherHostExtentNameNamespace") -< x
virtualResourceBlockSize <- withDefault' 0 (readSubitemA "VirtualResourceBlockSize") -< x
returnA -< StorageItem {
srResourceItem = res, srAccess = access, srHostExtentName = hostExtentName
, srHostExtentNameFormat = hostExtentNameFormat, srHostExtentNameNamespace = hostExtentNameNamespace
, srHostExtentStartingAddress = hostExtentStartingAddress, srHostResourceBlockSize = hostResourceBlockSize
, srLimit = limit, srOtherHostExtentNameFormat = otherHostExtentNameFormat
, srOtherHostExtentNameNamespace = otherHostExtentNameNamespace
, srVirtualResourceBlockSize = virtualResourceBlockSize }
where
subitemA n = getChildren >>> hasName ("sasd:" ++ n) /> getText
readSubitemA n = subitemA n >>> readOrFatalA ("StorageItem: bad " ++ n)
allocationUnitA :: IOSArrow String AllocationUnit
allocationUnitA = f $< this where
f str = case allocationUnitParse str of
Just unit -> constA unit
Nothing -> issueFatal ("malformed allocation units string: '" ++ show str ++ "'") >>> none
defaultXciApp =
XCIAppliance { xciAppDisks = [], xciAppNetworks = [], xciAppVms = [], xciAppID = Nothing, xciAppVersion = Nothing }
xciAppA :: IOSArrow XmlTree XCIAppliance
xciAppA = xciAppA' `orElse` constA defaultXciApp
xciAppA' :: IOSArrow XmlTree XCIAppliance
xciAppA' = deep (hasName "xci:ApplianceSection") >>> proc x -> do
appid <- maybeA (getAttrValue0 "xci:applianceId") -< x
appv <- maybeA (getAttrValue0 "xci:version" >>> readOrFatalA "bad appliance version") -< x
disks <- listA (getChildren >>> xciDiskA) -< x
networks <- listA (getChildren >>> xciNetworkA) -< x
vms <- listA (getChildren >>> xciVmA) -< x
returnA -< XCIAppliance { xciAppDisks = disks, xciAppNetworks = networks, xciAppVms = vms, xciAppID = appid, xciAppVersion = appv }
xciDiskA :: IOSArrow XmlTree XCIDisk
xciDiskA = hasName "xci:Disk" >>> proc x -> do
id <- getAttrValue0 "xci:ovfId" >>> arr DiskID -< x
enc <- encryptionA -< x
filesys <- maybeA (filesystemA $< getAttrValue0 "xci:filesystem") -< x
returnA -< XCIDisk { xciDiskId = id, xciDiskEncryption = enc, xciDiskFilesystem = filesys }
where
filesystemA str = f (filesystemFromStr str) where
f (Just fs) = constA fs
f _ = issueFatal ("unknown xci:filesystem '" ++ show str ++ "'") >>> none
encryptionA = encryptionGenerateA `orElse` encryptionImportA `orElse` constA NoEncryption
encryptionGenerateA = getChildren >>> hasName "xci:GenerateEncryptionKey" >>>
withDefault' 512 (getAttrValue0 "xci:keySize" >>> readOrFatalA "bad encryption key size") >>> arr GenerateCryptoKey
encryptionImportA = getChildren >>> hasName "xci:ImportEncryptionKey" >>> getAttrValue0 "xci:fileRef" >>> arr (UseCryptoKey . FileID)
xciNetworkA :: IOSArrow XmlTree XCINetwork
xciNetworkA = hasName "xci:Network" >>> proc x -> do
name <- getAttrValue0 "xci:name" -< x
netid <- maybeA (getAttrValue0 "xci:clientNetworkId") -< x
returnA -< XCINetwork { xciNetworkName = name, xciNetworkClientId = netid }
xciVmA :: IOSArrow XmlTree XCIVm
xciVmA = hasName "xci:VirtualMachine" >>> proc x -> do
id <- getAttrValue0 "xci:ovfId" >>> arr SystemID -< x
template <- maybeA (getAttrValue0 "xci:templateId") -< x
uuid <- maybeA (getAttrValue0 "xci:uuid" >>> arr fromString) -< x
netdevs <- listA (getChildren >>> xciNetworkAdapterA) -< x
storagedevs <- listA (getChildren >>> xciStorageDeviceA) -< x
v4v <- xciV4VRulesA -< x
rpc <- xciRpcRulesA -< x
pci <- xciPtRulesA -< x
dbentries <- withDefault' [] xciDBEntriesA -< x
dsfiles <- withDefault' [] xciDomStoreFilesA -< x
props <- xciPropertyOverridesA -< x
returnA -< XCIVm { xciVmId = id, xciVmUuid = uuid, xciVmTemplate = template,
xciVmPropertyOverride = props, xciVmV4VRules = v4v, xciVmRpcRules = rpc, xciVmPtRules = pci,
xciVmDB = dbentries, xciVmDomStoreFiles = dsfiles,
xciVmNetworkAdapters = netdevs, xciVmStorageDevices = storagedevs
}
xciNetworkAdapterA :: IOSArrow XmlTree XCINetworkAdapter
xciNetworkAdapterA = hasName "xci:NetworkAdapter" >>> proc x -> do
id <- (getAttrValue0 "xci:ovfInstanceId" >>> readOrFatalA "bad instance id" >>> arr ResInstanceID) -< x
props <- xciPropertyOverridesA -< x
returnA -< XCINetworkAdapter { xciNetworkAdapterId = id, xciNetworkAdapterPropertyOverride = props }
xciStorageDeviceA :: IOSArrow XmlTree XCIStorageDevice
xciStorageDeviceA = hasName "xci:StorageDevice" >>> proc x -> do
id <- (getAttrValue0 "xci:ovfInstanceId" >>> readOrFatalA "bad instance id" >>> arr ResInstanceID) -< x
props <- xciPropertyOverridesA -< x
returnA -< XCIStorageDevice { xciStorageDeviceId = id, xciStorageDevicePropertyOverride = props }
xciPropertyOverridesA :: IOSArrow XmlTree [XCIPropertyOverride]
xciPropertyOverridesA =
withDefault' [] ( getChildren >>> hasName "xci:PropertyOverride" >>> listA xciPropertyOverrideA )
xciPropertyOverrideA :: IOSArrow XmlTree XCIPropertyOverride
xciPropertyOverrideA = deep (hasName "xci:Property") >>> proc x -> do
key <- getAttrValue "xci:name" -< x
value <- getAttrValue "xci:value" -< x
returnA -< XCIPropertyOverride key value
xciV4VRulesA :: IOSArrow XmlTree [String]
xciV4VRulesA = withDefault' [] ( deep (hasName "xci:V4VFirewall") >>> listA xciV4VRuleA )
xciV4VRuleA :: IOSArrow XmlTree String
xciV4VRuleA = deep (hasName "xci:V4VRule") /> getText >>> arr strip
xciRpcRulesA :: IOSArrow XmlTree [String]
xciRpcRulesA = withDefault' [] ( deep (hasName "xci:RpcFirewall") >>> listA xciRpcRuleA )
xciRpcRuleA :: IOSArrow XmlTree String
xciRpcRuleA = deep (hasName "xci:RpcRule") /> getText >>> arr strip
xciPtRulesA :: IOSArrow XmlTree [PtRule]
xciPtRulesA = withDefault' [] ( deep (hasName "xci:PCIPassthrough") >>> listA xciPtRuleA )
xciPtRuleA :: IOSArrow XmlTree PtRule
xciPtRuleA = byID <+> byBDF where
byID = deep (hasName "xci:MatchID") >>> proc x -> do
cls <- maybeA (getAttrValue0 "xci:class" >>> readOrFatalA "bad pci class") -< x
vendor <- maybeA (getAttrValue0 "xci:vendor" >>> readOrFatalA "bad pci vendor") -< x
dev <- maybeA (getAttrValue0 "xci:device" >>> readOrFatalA "bad pci device") -< x
returnA -< PtMatchID cls vendor dev
byBDF = deep (hasName "xci:MatchBDF") >>> proc x -> do
bdf <- getAttrValue "xci:bdf" -< x
returnA -< PtMatchBDF bdf
xciSystemTemplateIDA :: IOSArrow XmlTree (Maybe String)
xciSystemTemplateIDA = maybeA (getChildren >>> hasName "xci:SystemTemplate" /> getText >>> arr strip)
xciDomStoreFilesA :: IOSArrow XmlTree [FileID]
xciDomStoreFilesA = listA domStoreFileA where
domStoreFileA = deep (hasName "xci:DomStoreFile") >>> getAttrValue0 "xci:fileRef" >>> arr FileID
xciDBEntriesA :: IOSArrow XmlTree [DBEntry]
xciDBEntriesA = listA xciDBEntryA
xciDBEntryA :: IOSArrow XmlTree DBEntry
xciDBEntryA = deep (hasName "xci:DBEntry") >>> proc x -> do
section <- withDefault' VmSection (mkDBSection $< getAttrValue0 "xci:section") -< x
key <- getAttrValue0 "xci:key" -< x
v <- getAttrValue0 "xci:value" -< x
returnA -< DBEntry section key v
where
mkDBSection "vm" = constA VmSection
mkDBSection "vm-domstore" = constA DomStoreSection
mkDBSection x = issueFatal ("bad xci:section value '" ++ x ++ "'") >>> none
boolA :: IOSArrow String Bool
boolA = arr boolStr
boolStr :: String -> Bool
boolStr x = case map toLower x of
"true" -> True
_ -> False
ovfTypeFromStr :: String -> PPType
ovfTypeFromStr x = f (map toLower x) where
f "uint8" = PPT_Uint8
f "sint8" = PPT_Sint8
f "uint16" = PPT_Uint16
f "sint16" = PPT_Sint16
f "uint32" = PPT_Uint32
f "sint32" = PPT_Sint32
f "uint64" = PPT_Uint64
f "sint64" = PPT_Sint64
f "string" = PPT_String
f "boolean" = PPT_Bool
f "real32" = PPT_Real32
f "real64" = PPT_Real64
f _ = PPT_String
sortItems :: [Item] -> [Item]
sortItems = sortBy (comparing instID) where
u (ResInstanceID x) = x
instID (RI i) = u $ resInstanceID i
instID (SRI i) = u $ resInstanceID $ srResourceItem i
instID (EPI i) = u $ resInstanceID $ ethResourceItem i
runParser :: FilePath -> IO (Maybe (Envelope, XCIAppliance))
runParser path
= rv =<< runX ( errorMsgStderrAndCollect
>>> readDocument xmlParseOpts path
>>> propagateNamespaces
>>> normaliseNamespaces
>>> uniqueNamespacesFromDeclAndQNames
>>> (envelopeA &&& xciAppA) &&& getErrStatus
)
where
normaliseNamespaces = fromLA $ cleanupNamespaces (constA namespaces >>> unlistA)
rv [] = return Nothing
rv (((env,xci),status):_)
| status == 0 = return $ Just (env,xci)
| otherwise = return $ Nothing
| crogers1/manager | apptool/OVF/Parse.hs | gpl-2.0 | 25,107 | 25 | 18 | 4,790 | 7,024 | 3,567 | 3,457 | 441 | 13 |
{-# LANGUAGE TupleSections #-}
-- | haste-install-his; install all .hi files in a directory.
module Main where
import Haste.Environment
import System.Environment
import Control.Applicative
import Control.Monad
import Data.List
import Data.Char
import Control.Shell
main :: IO ()
main = do
args <- getArgs
case args of
[package, dir] -> shell $ installFromDir (pkgSysLibDir </> package) dir
_ -> shell $ echo "Usage: haste-install-his pkgname dir"
return ()
getHiFiles :: FilePath -> Shell [FilePath]
getHiFiles dir =
filter (".hi" `isSuffixOf`) <$> ls dir
getSubdirs :: FilePath -> Shell [FilePath]
getSubdirs dir = do
contents <- ls dir
someDirs <- mapM (\d -> (d,) <$> isDirectory (dir </> d)) contents
return [path | (path, isDir) <- someDirs
, isDir
, head path /= '.'
, isUpper (head path)]
installFromDir :: FilePath -> FilePath -> Shell ()
installFromDir base path = do
hiFiles <- getHiFiles path
when (not $ null hiFiles) $ do
mkdir True (pkgSysLibDir </> base)
mapM_ (installHiFile base path) hiFiles
getSubdirs path >>= mapM_ (\d -> installFromDir (base </> d) (path </> d))
installHiFile :: FilePath -> FilePath -> FilePath -> Shell ()
installHiFile to from file = do
echo $ "Installing " ++ from </> file ++ "..."
cp (from </> file) (to </> file)
| joelburget/haste-compiler | src/haste-install-his.hs | bsd-3-clause | 1,360 | 0 | 14 | 305 | 484 | 244 | 240 | 38 | 2 |
module CaseIn2 where
foo :: Int -> Int
foo x
= case x of
1 -> foo 0
0 -> ((\ a b c -> addThree a b c) 1 2 3) +
((\ y -> plusOne y) 2)
where
addThree a b c = (a + b) + c
plusOne y = y + 1
main = foo 10
| kmate/HaRe | old/testing/introNewDef/CaseIn2AST.hs | bsd-3-clause | 313 | 0 | 13 | 172 | 135 | 70 | 65 | 10 | 2 |
{-| Metadata daemon types.
-}
{-
Copyright (C) 2014 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.Metad.Types where
import Data.Map (Map)
import Text.JSON
type InstanceParams = Map String JSValue
| apyrgio/ganeti | src/Ganeti/Metad/Types.hs | bsd-2-clause | 1,443 | 0 | 5 | 224 | 35 | 22 | 13 | 4 | 0 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE AllowAmbiguousTypes #-}
module T14955a where
import Prelude (Bool(..), (||), (&&))
-- Implementation 1
class Prop r where
or :: r -> r -> r
and :: r -> r -> r
true :: r
false :: r
instance Prop Bool where
or = (||)
and = (&&)
true = True
false = False
-- Implementation 2
data PropDict r = PropDict {
dor :: r -> r -> r
, dand :: r -> r -> r
, dtrue :: r
, dfalse :: r
}
boolDict = PropDict {
dor = (||)
, dand = (&&)
, dtrue = True
, dfalse = False }
-- Implementation 3
class PropProxy r where
propDict :: PropDict r
instance PropProxy Bool where
propDict = boolDict
-- Implementation 4
class PropProxy2 r where
propDict2 :: PropDict r
dummy :: ()
instance PropProxy2 Bool where
propDict2 = boolDict
dummy = ()
ors :: Prop r => [r] -> r
ors [] = true
ors (o:os) = o `or` ors os
{-# INLINABLE ors #-}
dors :: PropDict r -> [r] -> r
dors pd [] = dtrue pd
dors pd (o:os) = dor pd o (dors pd os)
pors :: PropProxy r => [r] -> r
pors [] = dtrue propDict
pors (o:os) = dor propDict o (pors os)
{-# INLINABLE pors #-}
porsProxy :: PropProxy2 r => [r] -> r
porsProxy [] = dtrue propDict2
porsProxy (o:os) = dor propDict2 o (porsProxy os)
{-# INLINABLE porsProxy #-}
| sdiehl/ghc | testsuite/tests/perf/should_run/T14955a.hs | bsd-3-clause | 1,272 | 0 | 10 | 315 | 510 | 285 | 225 | 46 | 1 |
-- !! fromRational woes
import Data.Ratio -- 1.3
main = putStr (
shows tinyFloat ( '\n'
: shows t_f ( '\n'
: shows hugeFloat ( '\n'
: shows h_f ( '\n'
: shows tinyDouble ( '\n'
: shows t_d ( '\n'
: shows hugeDouble ( '\n'
: shows h_d ( '\n'
: shows x_f ( '\n'
: shows x_d ( '\n'
: shows y_f ( '\n'
: shows y_d ( "\n"
)))))))))))))
where
t_f :: Float
t_d :: Double
h_f :: Float
h_d :: Double
x_f :: Float
x_d :: Double
y_f :: Float
y_d :: Double
t_f = fromRationalX (toRational tinyFloat)
t_d = fromRationalX (toRational tinyDouble)
h_f = fromRationalX (toRational hugeFloat)
h_d = fromRationalX (toRational hugeDouble)
x_f = fromRationalX (1.82173691287639817263897126389712638972163e-300 :: Rational)
x_d = fromRationalX (1.82173691287639817263897126389712638972163e-300 :: Rational)
y_f = 1.82173691287639817263897126389712638972163e-300
y_d = 1.82173691287639817263897126389712638972163e-300
fromRationalX :: (RealFloat a) => Rational -> a
fromRationalX r =
let
h = ceiling (huge `asTypeOf` x)
b = toInteger (floatRadix x)
x = fromRat 0 r
fromRat e0 r' =
let d = denominator r'
n = numerator r'
in if d > h then
let e = integerLogBase b (d `div` h) + 1
in fromRat (e0-e) (n % (d `div` (b^e)))
else if abs n > h then
let e = integerLogBase b (abs n `div` h) + 1
in fromRat (e0+e) ((n `div` (b^e)) % d)
else
scaleFloat e0 (rationalToRealFloat {-fromRational-} r')
in x
{-
fromRationalX r =
rationalToRealFloat r
{- Hmmm...
let
h = ceiling (huge `asTypeOf` x)
b = toInteger (floatRadix x)
x = fromRat 0 r
fromRat e0 r' =
{--} trace (shows e0 ('/' : shows r' ('/' : shows h "\n"))) (
let d = denominator r'
n = numerator r'
in if d > h then
let e = integerLogBase b (d `div` h) + 1
in fromRat (e0-e) (n % (d `div` (b^e)))
else if abs n > h then
let e = integerLogBase b (abs n `div` h) + 1
in fromRat (e0+e) ((n `div` (b^e)) % d)
else
scaleFloat e0 (rationalToRealFloat r')
-- now that we know things are in-bounds,
-- we use the "old" Prelude code.
{--} )
in x
-}
-}
-- Compute the discrete log of i in base b.
-- Simplest way would be just divide i by b until it's smaller then b, but that would
-- be very slow! We are just slightly more clever.
integerLogBase :: Integer -> Integer -> Int
integerLogBase b i =
if i < b then
0
else
-- Try squaring the base first to cut down the number of divisions.
let l = 2 * integerLogBase (b*b) i
doDiv :: Integer -> Int -> Int
doDiv i l = if i < b then l else doDiv (i `div` b) (l+1)
in doDiv (i `div` (b^l)) l
------------
-- Compute smallest and largest floating point values.
tiny :: (RealFloat a) => a
tiny =
let (l, _) = floatRange x
x = encodeFloat 1 (l-1)
in x
huge :: (RealFloat a) => a
huge =
let (_, u) = floatRange x
d = floatDigits x
x = encodeFloat (floatRadix x ^ d - 1) (u - d)
in x
tinyDouble = tiny :: Double
tinyFloat = tiny :: Float
hugeDouble = huge :: Double
hugeFloat = huge :: Float
{-
[In response to a request by simonpj, Joe Fasel writes:]
A quite reasonable request! This code was added to the Prelude just
before the 1.2 release, when Lennart, working with an early version
of hbi, noticed that (read . show) was not the identity for
floating-point numbers. (There was a one-bit error about half the time.)
The original version of the conversion function was in fact simply
a floating-point divide, as you suggest above. The new version is,
I grant you, somewhat denser.
How's this?
--Joe
-}
rationalToRealFloat :: (RealFloat a) => Rational -> a
rationalToRealFloat x = x'
where x' = f e
-- If the exponent of the nearest floating-point number to x
-- is e, then the significand is the integer nearest xb^(-e),
-- where b is the floating-point radix. We start with a good
-- guess for e, and if it is correct, the exponent of the
-- floating-point number we construct will again be e. If
-- not, one more iteration is needed.
f e = if e' == e then y else f e'
where y = encodeFloat (round (x * (1%b)^^e)) e
(_,e') = decodeFloat y
b = floatRadix x'
-- We obtain a trial exponent by doing a floating-point
-- division of x's numerator by its denominator. The
-- result of this division may not itself be the ultimate
-- result, because of an accumulation of three rounding
-- errors.
(s,e) = decodeFloat (fromInteger (numerator x) `asTypeOf` x'
/ fromInteger (denominator x))
| urbanslug/ghc | testsuite/tests/codeGen/should_run/cgrun034.hs | bsd-3-clause | 4,739 | 44 | 41 | 1,297 | 1,104 | 594 | 510 | 79 | 3 |
-- There was a bug in 6.12 that meant that the binding
-- for 'rght' was initially determined (correctly) to be
-- strictly demanded, but the FloatOut pass made it lazy
--
-- The test compiles the program and greps for the
-- binding of 'rght' to check that it is marked strict
-- something like this:
-- rght [Dmd=Just S] :: EvalTest.AList a
module EvalTest where
import GHC.Conc
import Control.Applicative (Applicative(..))
import Control.Monad (liftM, ap)
data Eval a = Done a
instance Functor Eval where
fmap = liftM
instance Applicative Eval where
pure = return
(<*>) = ap
instance Monad Eval where
return x = Done x
Done x >>= k = k x -- Note: pattern 'Done x' makes '>>=' strict
rpar :: a -> Eval a
rpar x = x `par` return x
rseq :: a -> Eval a
rseq x = x `pseq` return x
runEval :: Eval a -> a
runEval (Done x) = x
data AList a = ANil | ASing a | Append (AList a) (AList a) | AList [a]
append ANil r = r
append l ANil = l -- **
append l r = Append l r
parListTreeLike :: Integer -> Integer -> (Integer -> a) -> AList a
parListTreeLike min max fn
| max - min <= threshold = ASing (fn max)
| otherwise =
runEval $ do
rpar rght
rseq left
return (left `append` rght)
where
mid = min + ((max - min) `quot` 2)
left = parListTreeLike min mid fn
rght = parListTreeLike (mid+1) max fn
threshold = 1
| siddhanathan/ghc | testsuite/tests/simplCore/should_compile/EvalTest.hs | bsd-3-clause | 1,397 | 0 | 11 | 360 | 449 | 240 | 209 | 35 | 1 |
-- | Athena.Translation.Utils module.
{-# LANGUAGE UnicodeSyntax #-}
module Athena.Translation.Utils
( stdName
, subIndex
) where
------------------------------------------------------------------------------
import Data.List.Split ( splitOn )
------------------------------------------------------------------------------
stdName ∷ String → String
stdName name = map subIndex . concat $ splitOn "-" name
subIndex ∷ Char → Char
subIndex '0' = '₀'
subIndex '1' = '₁'
subIndex '2' = '₂'
subIndex '3' = '₃'
subIndex '4' = '₄'
subIndex '5' = '₅'
subIndex '6' = '₆'
subIndex '7' = '₇'
subIndex '8' = '₈'
subIndex '9' = '₉'
subIndex s = s
| jonaprieto/athena | src/Athena/Translation/Utils.hs | mit | 676 | 0 | 7 | 103 | 160 | 86 | 74 | 19 | 1 |
-- file: ch03/NestedLets.hs
-- From chapter 3, http://book.realworldhaskell.org/read/defining-types-streamlining-functions.html
foo = let a = 1
in let b = 2
in a + b
bar = let x = 1
in ((let x = "foo" in x), x)
quux a = let a = "foo"
in a ++ "eek!"
| Sgoettschkes/learning | haskell/RealWorldHaskell/ch03/NestedLets.hs | mit | 272 | 0 | 12 | 71 | 93 | 47 | 46 | 7 | 1 |
module TeX.Count
( Count(CountOverflow)
)
where
data Count = Count Integer | CountOverflow
deriving (Eq, Show)
checkOverflowed :: Count -> Count
checkOverflowed (Count num)
| num < -2147483647 = CountOverflow
| num > 2147483647 = CountOverflow
| otherwise = (Count num)
checkOverflowed CountOverflow = CountOverflow
instance Num Count where
(+) (Count a) (Count b) = checkOverflowed $ Count (a + b)
(+) _ _ = CountOverflow
(-) (Count a) (Count b) = checkOverflowed $ Count (a - b)
(-) _ _ = CountOverflow
(*) (Count a) (Count b) = checkOverflowed $ Count (a * b)
(*) _ _ = CountOverflow
negate (Count a) = Count $ negate a
negate _ = CountOverflow
abs (Count a) = Count $ abs a
abs _ = CountOverflow
signum (Count a) = Count $ signum a
signum _ = CountOverflow
fromInteger a = checkOverflowed $ Count a
| spicyj/tex-parser | src/TeX/Count.hs | mit | 846 | 0 | 9 | 186 | 369 | 191 | 178 | 26 | 1 |
module UnitB.Expr.Parser where
import Logic.Expr
import Logic.Expr.Parser.Internal.Setting
import UnitB.UnitB
import Control.Lens hiding ( indices )
import Data.Map hiding ( map )
import qualified Data.Map as M
machine_setting :: Machine -> ParserSetting
machine_setting m = setting
& decls %~ (view' variables m `union`)
& primed_vars .~ M.mapKeys addPrime (M.map prime $ m!.variables)
where
setting = theory_setting (getExpr <$> m!.theory)
schedule_setting :: Machine -> Event -> ParserSetting
schedule_setting m evt = setting & decls %~ ((evt^.indices) `union`)
where
setting = machine_setting m
event_setting :: Machine -> Event -> ParserSetting
event_setting m evt = setting & decls %~ ((evt^.params) `union`)
where
setting = schedule_setting m evt
| literate-unitb/literate-unitb | src/UnitB/Expr/Parser.hs | mit | 833 | 0 | 11 | 182 | 253 | 144 | 109 | 18 | 1 |
-- Implemntation of a merge sort.
-- [] and [a] are already sorted,
-- and any other list is sorted by merging together
-- the two lists that result from sorting the two halves of the list separately.
halve :: [a] -> ([a],[a])
halve xs = splitAt (length xs `div` 2) xs
merge :: Ord a => [a] -> [a] -> [a]
merge xs [] = xs
merge [] ys = ys
merge (x:xs) (y:ys)
| x <= y = x : merge xs (y:ys)
| otherwise = y : merge (x:xs) ys
msort :: Ord a => [a] -> [a]
msort [] = []
msort [a] = [a]
msort xs = merge (msort ys) (msort zs)
where (ys, zs) = halve xs
test = msort [10,2,5,1,9,4,3,6,8,7]
| calebgregory/fp101x | wk3/msort.hs | mit | 604 | 0 | 9 | 147 | 310 | 168 | 142 | 14 | 1 |
{-# LANGUAGE RecordWildCards #-}
module Main where
-- import Options.Applicative
import Control.Monad (filterM, forM)
import Data.List (find, isSuffixOf)
import Data.String.Utils (join)
import Development.FileModules
import Distribution.ModuleName (ModuleName, fromString)
import Distribution.PackageDescription
import Distribution.PackageDescription.Parse (ParseResult (..),
parsePackageDescription)
import System.Directory
import System.Environment
import System.FilePath
import Text.Printf
main :: IO ()
main = do
args <- getArgs
case args of
(dirName:_) -> do
pkg <- packageAtDirectory dirName
splitPackage pkg
_ -> error "Invalid arguments"
data Package = Package { packagePath :: FilePath
, packageGenericDescription :: GenericPackageDescription
}
data Module = Module { moduleFPath :: FilePath
, moduleName :: ModuleName
}
deriving(Show)
splitPackage :: Package -> IO ()
splitPackage pkg = packageModulesIO pkg >>= mapM_ (splitModule pkg)
splitModule :: Package -> Module -> IO ()
splitModule _pkg m = do
ms <- fileModulesRecur (moduleFPath m)
print ms
-- TODO refactor this shit and getDirectoryModules
packageModulesIO :: Package -> IO [Module]
packageModulesIO Package{..} = packageLibModules
where
clib = condTreeData <$> condLibrary packageGenericDescription
packageLibModules | Just lib <- clib =
concat <$> mapM (getDirectoryModules . (takeDirectory packagePath </>))
(hsSourceDirs (libBuildInfo lib))
| otherwise = return []
-- |
-- Recursivelly lists modules under a directory
getDirectoryModules :: FilePath -> IO [Module]
getDirectoryModules fp = print fp >> go [] [] fp
where
isHaskellFile = (== ".hs") . takeExtension
go modPrefix mods dir = do
potentialModules <- map (dir </>) <$>
filter (\f -> f /= "." && f /= "..") <$>
getDirectoryContents dir
dirs <- filterM doesDirectoryExist potentialModules
let modFs = filter isHaskellFile potentialModules
mods' = mods ++ map moduleAtFile modFs
case dirs of
[] -> return mods'
ds -> concat <$> forM ds (\d ->
go (modPrefix ++ [takeBaseName d]) mods' d)
where
moduleAtFile mf = Module { moduleFPath = mf
, moduleName = fromString $
join "." (modPrefix ++ [takeBaseName mf])
}
packageAtDirectory :: FilePath -> IO Package
packageAtDirectory fp = do
printf "Looking for .cabal file in %s\n" fp
fs <- getDirectoryContents fp
case find (".cabal" `isSuffixOf`) fs of
Just cabalFile -> do
let pkgPath = fp </> cabalFile
printf "Using %s\n" pkgPath
result <- parsePackageDescription <$> readFile pkgPath
case result of
ParseOk _ pkgDesc -> return $ Package pkgPath pkgDesc
_ -> error (printf "Failed to parse %s" pkgPath)
Nothing -> error (printf "Couldn't find a cabal file in %s" fp)
| haskellbr/missingh | package-splitter/src/Main.hs | mit | 3,526 | 1 | 20 | 1,260 | 845 | 431 | 414 | 70 | 3 |
import Control.Error
import Control.Monad.Trans
data Failure
= NonPositive Int
| ReadError String
deriving Show
main :: IO ()
main = do
e <- runEitherT $ do
liftIO $ putStrLn "Enter a positive number."
s <- liftIO getLine
n <- tryRead (ReadError s) s
if n > 0
then return $ n + 1
else throwT $ NonPositive n
case e of
Left n -> putStrLn $ "Failed with: " ++ show n
Right s -> putStrLn $ "Succeeded with: " ++ show s
| riwsky/wiwinwlh | src/eithert.hs | mit | 482 | 0 | 14 | 149 | 170 | 82 | 88 | 18 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-job-jobcommand.html
module Stratosphere.ResourceProperties.GlueJobJobCommand where
import Stratosphere.ResourceImports
-- | Full data type definition for GlueJobJobCommand. See 'glueJobJobCommand'
-- for a more convenient constructor.
data GlueJobJobCommand =
GlueJobJobCommand
{ _glueJobJobCommandName :: Maybe (Val Text)
, _glueJobJobCommandScriptLocation :: Maybe (Val Text)
} deriving (Show, Eq)
instance ToJSON GlueJobJobCommand where
toJSON GlueJobJobCommand{..} =
object $
catMaybes
[ fmap (("Name",) . toJSON) _glueJobJobCommandName
, fmap (("ScriptLocation",) . toJSON) _glueJobJobCommandScriptLocation
]
-- | Constructor for 'GlueJobJobCommand' containing required fields as
-- arguments.
glueJobJobCommand
:: GlueJobJobCommand
glueJobJobCommand =
GlueJobJobCommand
{ _glueJobJobCommandName = Nothing
, _glueJobJobCommandScriptLocation = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-job-jobcommand.html#cfn-glue-job-jobcommand-name
gjjcName :: Lens' GlueJobJobCommand (Maybe (Val Text))
gjjcName = lens _glueJobJobCommandName (\s a -> s { _glueJobJobCommandName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-glue-job-jobcommand.html#cfn-glue-job-jobcommand-scriptlocation
gjjcScriptLocation :: Lens' GlueJobJobCommand (Maybe (Val Text))
gjjcScriptLocation = lens _glueJobJobCommandScriptLocation (\s a -> s { _glueJobJobCommandScriptLocation = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/GlueJobJobCommand.hs | mit | 1,718 | 0 | 12 | 205 | 264 | 151 | 113 | 27 | 1 |
{-|
Module : BreadU.Pages.Markup.Common.Footer
Description : HTML markup for pages' top area.
Stability : experimental
Portability : POSIX
HTML markup for pages' top area.
Please don't confuse it with <head>-tag, it's defined in another module.
-}
module BreadU.Pages.Markup.Common.Footer
( commonFooter
) where
import BreadU.Types ( LangCode(..) )
import BreadU.Pages.Types ( FooterContent(..) )
import BreadU.Pages.CSS.Names ( ClassName(..) )
import BreadU.Pages.Markup.Common.Utils
import BreadU.Pages.JS.SocialButtons ( facebookSDK )
import Prelude hiding ( div, span )
import Text.Blaze.Html5
import qualified Text.Blaze.Html5.Attributes as A
import TextShow ( showt )
import Data.Monoid ( (<>) )
-- | Footer for all pages.
commonFooter :: FooterContent -> LangCode -> Html
commonFooter FooterContent{..} langCode = footer $ do
authorInfo
socialButtons
where
authorInfo = div ! A.class_ (toValue AuthorInfo) $ do
span $ toHtml authorName
span ! A.class_ (toValue AuthorInfoMailToSeparator) $ mempty
a ! A.href "mailto:me@dshevchenko.biz"
! A.class_ (toValue MailToIcon)
! A.title (toValue emailToAuthor) $ fa "fa-envelope"
socialButtons = div ! A.class_ (toValue SocialButtons) $
row_ $ do
div ! A.class_ "col-6 text-right" $ facebook
div ! A.class_ "col-6" $ twitter
-- | Obtained from Facebook SDK documentation.
facebook = do
div ! A.id "fb-root" $ mempty
script $ toHtml $ facebookSDK langCode
div ! A.class_ "fb-share-button"
! dataAttribute "href" (toValue $ "https://breadu.info/" <> showt langCode)
! dataAttribute "layout" "button"
! dataAttribute "size" "large"
! dataAttribute "mobile-iframe" "true" $
a ! A.class_ "fb-xfbml-parse-ignore"
! customAttribute "target" "_blank"
! A.href (toValue $ "https://www.facebook.com/sharer/sharer.php?u=https%3A%2F%2Fbreadu.info%2F"
<> showt langCode
<> "&src=sdkpreparse") $ mempty
-- | <a>-code obtained from the Twitter Developer Documentation.
twitter =
a ! A.class_ "twitter-share-button"
! A.href "https://twitter.com/intent/tweet?hashtags=BreadUCalculator"
! dataAttribute "size" "large" $ mempty
| denisshevchenko/breadu.info | src/lib/BreadU/Pages/Markup/Common/Footer.hs | mit | 2,635 | 0 | 21 | 835 | 523 | 273 | 250 | -1 | -1 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
-- | Controlling biegunka interpreters and their composition
module Control.Biegunka.Settings
( -- * Settings common for all interpreters
Settings
, HasSettings(..)
, HasRunRoot(..)
, defaultSettings
, logger
, Templates(..)
-- ** Biegunka mode
, Mode(..)
, defaultMode
, _Online
, _Offline
) where
import Control.Lens
import Control.Biegunka.Logger (Logger, HasLogger(..))
import Control.Biegunka.Script (HasRunRoot(..))
import Control.Biegunka.Templates
import Control.Biegunka.Templates.HStringTemplate
-- | Settings common for all interpreters and also specific for this one
data Settings = Settings
{ _runRoot :: FilePath -- ^ Root path for 'Source' layer
, _biegunkaRoot :: FilePath -- ^ Absolute of the Biegunka data files root
, __logger :: Maybe Logger -- ^ 'Logger' handle
, _templates :: Templates -- ^ Templates mapping
, _mode :: Mode -- ^ Biegunka mode
}
class HasSettings t where
settings :: Lens' t Settings
_logger :: Lens' t (Maybe Logger)
_logger = settings . \f x -> f (__logger x) <&> \y -> x { __logger = y }
templates :: Lens' t Templates
templates = settings . \f x -> f (_templates x) <&> \y -> x { _templates = y }
mode :: Lens' t Mode
mode = settings . \f x -> f (_mode x) <&> \y -> x { _mode = y }
biegunkaRoot :: Lens' t FilePath
biegunkaRoot = settings . \f x -> f (_biegunkaRoot x) <&> \y -> x { _biegunkaRoot = y }
instance HasSettings Settings where
settings = id
{-# INLINE settings #-}
instance HasRunRoot Settings where
runRoot f x = f (_runRoot x) <&> \y -> x { _runRoot = y }
instance HasLogger Applicative Settings where
logger = _logger.traverse
defaultSettings :: Settings
defaultSettings = Settings
{ _runRoot = "~"
, _biegunkaRoot = "~/.biegunka"
, __logger = Nothing
, _templates = hStringTemplate ()
, _mode = defaultMode
}
data Mode = Offline | Online
deriving (Show, Eq)
_Offline :: Prism' Mode ()
_Offline = prism' (\_ -> Offline) (\case Offline -> Just (); Online -> Nothing)
{-# ANN _Offline "HLint: ignore Use const" #-}
_Online :: Prism' Mode ()
_Online = prism' (\_ -> Online) (\case Online -> Just (); Offline -> Nothing)
{-# ANN _Online "HLint: ignore Use const" #-}
defaultMode :: Mode
defaultMode = Online
| biegunka/biegunka | src/Control/Biegunka/Settings.hs | mit | 2,392 | 0 | 12 | 535 | 653 | 378 | 275 | 59 | 2 |
{-# LANGUAGE MultiParamTypeClasses, DeriveDataTypeable, FlexibleInstances #-}
module Program.Array.Instance where
import Program.Array.Statement
import Program.Array.Value
import Program.Array.Semantics
import qualified Program.Array.Roll as R
import qualified Program.Array.Config as F
import Program.General.Class
import Program.General.Central
import Program.General.Environment
import Program.General.Program
import Autolib.Reader
import Autolib.ToDoc
import Autolib.Reporter
import qualified Challenger as C
import Inter.Types
import Inter.Quiz
import Autolib.Size
import Autolib.Util.Zufall ( repeat_until )
import Data.Typeable
import Data.Maybe ( isNothing, isJust )
data Program_Array = Program_Array deriving ( Eq, Ord, Show, Read, Typeable )
instance OrderScore Program_Array where
scoringOrder _ = None -- ?
instance Class Program_Array Statement Program.Array.Value.Value where
execute p = Program.Array.Semantics.execute
example p = ( Program [ Program.Array.Statement.s0 ]
, Program.General.Environment.make
[ ( read "x", Program.Array.Value.example ) ]
)
make_quiz :: Make
make_quiz = quiz Program_Array F.example
instance Generator
Program_Array
F.Config
( Environment Program.Array.Value.Value
, Program Statement
, Environment Program.Array.Value.Value
) where
generator p conf key =
R.roll conf `repeat_until` nontrivial conf
nontrivial conf (_, Program sts , final) = not $ or $ do
let bnd = ( 0 , fromIntegral $ F.max_data_size conf )
ps <- [] : map return ( patches final bnd )
return $ matches ( final , Program $ ps ++ sts , final )
matches ( start, prog, final ) =
isJust $ result $ C.total Program_Array ( prog, final ) start
instance Project
Program_Array
( Environment Program.Array.Value.Value
, Program Statement
, Environment Program.Array.Value.Value
)
( Program Statement
, Environment Program.Array.Value.Value
) where
project _ ( start, p, final ) = ( p, final )
| Erdwolf/autotool-bonn | src/Program/Array/Instance.hs | gpl-2.0 | 2,171 | 12 | 14 | 518 | 576 | 329 | 247 | 53 | 1 |
{-# LANGUAGE FlexibleInstances, FlexibleContexts #-}
{- |
Module : $Header$
Description : CASL signatures colimits
Copyright : (c) Mihai Codescu, and Uni Bremen 2002-2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : mcodescu@informatik.uni-bremen.de
Stability : provisional
Portability : non-portable
CASL signature colimits, computed component-wise.
Supposed to be working for CASL extensions as well.
based on
<http://www.informatik.uni-bremen.de/~till/papers/colimits.ps>
-}
module CASL.ColimSign(signColimit, extCASLColimit) where
import CASL.Sign
import CASL.Morphism
import CASL.Overload
import CASL.AS_Basic_CASL
import Common.Id
import Common.SetColimit
import Common.Utils (number, nubOrd)
import Common.Lib.Graph
import qualified Common.Lib.Rel as Rel
import qualified Common.Lib.MapSet as MapSet
import Data.Graph.Inductive.Graph as Graph
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.List
import Logic.Logic
extCASLColimit :: Gr () (Int, ()) ->
Map.Map Int CASLMor ->
((),Map.Map Int ())
extCASLColimit graph _ = ((),Map.fromList $ zip (nodes graph) (repeat ()))
--central function for computing CASL signature colimits
signColimit :: (Category (Sign f e) (Morphism f e m)) =>
Gr (Sign f e)(Int,Morphism f e m) ->
( Gr e (Int, m) ->
Map.Map Int (Morphism f e m)
-> (e, Map.Map Int m)
)
->
(Sign f e, Map.Map Int (Morphism f e m))
signColimit graph extColimit =
case labNodes graph of
[] -> error "empty graph"
(n,sig):[] -> (sig, Map.fromAscList[(n, ide sig)])
_ -> let
getSortMap (x, phi) = (x,sort_map phi)
sortGraph = emap getSortMap $ nmap sortSet graph
(setSort0, funSort0) = computeColimitSet sortGraph
(setSort, funSort) = addIntToSymbols (setSort0, funSort0)
sigmaSort = (emptySign $ error "err")
{ sortRel = Rel.fromKeysSet setSort }
phiSort = Map.fromList
$ map (\ (node, s)-> (node, (embedMorphism (error "err") s sigmaSort)
{sort_map = Map.findWithDefault (error "sort_map") node funSort}))
$ labNodes graph
relS = computeSubsorts graph funSort
sigmaRel = sigmaSort{sortRel = relS}
phiRel = Map.map (\ phi -> phi{mtarget = sigmaRel}) phiSort
(sigmaOp, phiOp) = computeColimitOp graph sigmaRel phiRel
(sigmaPred, phiPred) = computeColimitPred graph sigmaOp phiOp
(sigAssoc, phiAssoc) = colimitAssoc graph sigmaPred phiPred
extGraph = emap (\(i, phi) -> (i, extended_map phi)) $
nmap extendedInfo graph
(extInfo, extMaps) = extColimit extGraph phiAssoc
sigmaExt = sigAssoc{extendedInfo = extInfo}
phiExt = Map.mapWithKey
(\ node phi -> phi{mtarget = sigmaExt,
sort_map = Map.filterWithKey (/=) $ sort_map phi,
extended_map = Map.findWithDefault (error "ext_map")
node extMaps})
phiAssoc
in (sigmaExt, phiExt)
-- computing subsorts in the colimit
-- the subsort relation in the colimit is the transitive closure
-- of the subsort relations in the diagram
-- mapped along the structural morphisms of the colimit
computeSubsorts :: Gr (Sign f e)(Int,Morphism f e m) ->
Map.Map Node (EndoMap Id) -> Rel.Rel Id
computeSubsorts graph funSort = let
getPhiSort (x, phi) = (x,sort_map phi)
graph1 = nmap sortSet $ emap getPhiSort $ graph
rels = Map.fromList $ map (\(node, sign) -> (node, sortRel sign)) $
labNodes graph
in subsorts (nodes graph1) graph1 rels funSort Rel.empty
-- rels is a function assigning to each node
-- the subsort relation of its label's elements
subsorts :: [Node] -> Gr (Set.Set SORT)(Int,Sort_map) ->
Map.Map Node (Rel.Rel SORT) -> Map.Map Node (EndoMap Id) -> Rel.Rel SORT ->
Rel.Rel SORT
subsorts listNode graph rels colimF rel =
case listNode of
[] -> rel
x:xs -> case lab graph x of
Nothing -> subsorts xs graph rels colimF rel
Just set -> let
f = Map.findWithDefault (error "subsorts") x colimF
in subsorts xs graph rels colimF (Rel.transClosure $
Rel.union rel (Rel.fromList [ (
Map.findWithDefault (error "f(m)") m f,
Map.findWithDefault (error "f(n)") n f
)
| m <- Set.elems set, n <- Set.elems set,
Rel.member m n (Map.findWithDefault (error "rels(x)") x rels)]))
-- CASL signatures colimit on operation symbols
--algorithm description:
-- 1. project the graph on operation symbols
-- i.e. set of (Id, OpType)s in nodes and corresponding maps on edges
-- 2. compute colimit in Set of the graph => a set of ((Id, OpType), Node)
-- 3. build the overloading relation in colimit
-- two symbols are overloaded in the colimit
-- if there is some node and two opsymbols there
-- that are mapped in them and are overloaded
-- collect the names entering each symbol (try to keep names)
-- collect information about totality: a symbol must be total in the colimit
-- if we have a total symbol in the graph which is mapped to it
-- 4. assign names to each partition, in order of size
-- (i.e. the equivalence class with most symbols
-- will be prefered to keep name):
-- if there is available a name of a symbol entering the class,
-- then assign that name to the class, otherwise generate a name
-- also the morphisms have to be built
computeColimitOp :: Gr (Sign f e)(Int,Morphism f e m) ->
Sign f e -> Map.Map Node (Morphism f e m) ->
(Sign f e, Map.Map Node (Morphism f e m))
computeColimitOp graph sigmaRel phiSRel = let
graph' = buildOpGraph graph
(colim, morMap') = computeColimitSet graph'
(ovrl, names, totalOps) = buildColimOvrl graph graph' colim morMap'
(colim1, morMap1) = nameSymbols graph' morMap' phiSRel names ovrl totalOps
morMap2 = Map.map (\f -> Map.map (\((i,o),_) -> (i, opKind o)) f) morMap1
morMap3 = Map.map (\f -> Map.fromAscList $
map (\((i,o),y) -> ((i, mkPartial o), y)) $
Map.toList f) morMap2
sigmaOps = sigmaRel{opMap = colim1}
phiOps = Map.mapWithKey
(\n phi -> phi{op_map =
Map.findWithDefault (error "op_map") n morMap3})
phiSRel
in (sigmaOps, phiOps)
buildOpGraph :: Gr (Sign f e) (Int, Morphism f e m) ->
Gr (Set.Set (Id, OpType))
(Int, Map.Map (Id, OpType) (Id, OpType))
buildOpGraph graph = let
getOps = mapSetToList . opMap
getOpFun mor = let
ssign = msource mor
smap = sort_map mor
omap = op_map mor
in foldl (\f x -> let y = mapOpSym smap omap x
in if x == y then f else Map.insert x y f)
Map.empty $ getOps ssign
in nmap (Set.fromList . getOps) $ emap (\ (i, m) -> (i, getOpFun m)) graph
buildColimOvrl :: Gr (Sign f e) (Int, Morphism f e m) ->
Gr (Set.Set (Id, OpType))(Int, EndoMap (Id, OpType)) ->
Set.Set ((Id, OpType), Int) ->
Map.Map Int (Map.Map (Id, OpType) ((Id, OpType), Int)) ->
(Rel.Rel ((Id, OpType), Int),
Map.Map ((Id, OpType), Int)
(Map.Map Id Int),
Map.Map ((Id, OpType), Int) Bool)
buildColimOvrl graph graph' colim morMap = let
(ovrl, names) = (Rel.empty, Map.fromList $ zip (Set.toList colim) $
repeat Map.empty )
(ovrl', names', totalF') = buildOvrlAtNode graph' colim morMap
ovrl names Map.empty $ labNodes graph
in (Rel.transClosure ovrl', names', totalF')
buildOvrlAtNode :: Gr (Set.Set (Id, OpType))(Int, EndoMap (Id, OpType)) ->
Set.Set ((Id, OpType), Int) ->
Map.Map Int (Map.Map (Id, OpType) ((Id, OpType), Int)) ->
Rel.Rel ((Id, OpType), Int) ->
Map.Map ((Id, OpType), Int) (Map.Map Id Int) ->
Map.Map ((Id, OpType), Int) Bool ->
[(Int, Sign f e)] ->
(Rel.Rel ((Id, OpType), Int),
Map.Map ((Id, OpType), Int)(Map.Map Id Int),
Map.Map ((Id, OpType), Int) Bool )
buildOvrlAtNode graph' colim morMap ovrl names totalF nodeList =
case nodeList of
[] -> (ovrl, names, totalF)
(n, sig):lists -> let
Just oSet = lab graph' n
names' = foldl (\g x@(idN,_) -> let
y = Map.findWithDefault (x,n) x $
Map.findWithDefault (error $ show n)
n morMap
altF v = case v of
Nothing -> Just 1
Just m -> Just $ m+1
in Map.adjust (\gy -> Map.alter altF idN gy)
y g)
names $ Set.toList oSet
equivF (id1, ot1) (id2, ot2) = (id1 == id2) && leqF sig ot1 ot2
parts = Rel.leqClasses equivF oSet
addParts rel equivList =
foldl (\(r, f) l -> let l1 = map (\x -> Map.findWithDefault (x,n) x $
Map.findWithDefault
(error "morMap(n)") n morMap) l
in case l1 of
[] -> error "addParts"
x:xs -> let
(r', ly) = foldl
(\(rl,lx) y -> (Rel.insertPair lx y rl, y))
(r,x) xs
f' = foldl (\g ((_i,o),((i',o'),n')) ->
if isTotal o then
Map.insert ((i', mkPartial o'), n')
True g
else g ) f $ zip l l1
in (Rel.insertPair ly x r', f')
)
(rel, totalF) equivList
(ovrl', totalF') = addParts ovrl parts
in buildOvrlAtNode graph' colim morMap ovrl' names' totalF' lists
assignName :: (Set.Set ((Id, OpType), Int), Int) -> [Id] ->
Map.Map ((Id, OpType), Int) (Map.Map Id Int) ->
(Id, [Id])
assignName (opSet,idx) givenNames namesFun =
let opSetNames = Set.fold (\x f -> Map.unionWith (\a b -> a + b) f
( Map.findWithDefault
(error "namesFun") x
namesFun))
Map.empty opSet
availNames = filter (\x -> not $ x `elem` givenNames) $
Map.keys opSetNames
in case availNames of
[] -> let
-- must generate name with the most frequent name idx and an origin
sndOrd x y= compare
(Map.findWithDefault (error "assignName") x opSetNames)
(Map.findWithDefault (error "assignName") y opSetNames)
avail' = sortBy sndOrd $ Map.keys opSetNames
idN = head avail'
in (appendNumber idN idx, givenNames)
_ -> -- must take the most frequent available name and give it to the class
-- and this name becomes given
let
sndOrd x y = compare
(Map.findWithDefault (error "assignName") x opSetNames)
(Map.findWithDefault (error "assignName") y opSetNames)
avail' = sortBy sndOrd availNames
idN = head $ reverse avail'
in (idN, idN:givenNames)
nameSymbols :: Gr (Set.Set (Id, OpType))
(Int, Map.Map (Id,OpType)(Id, OpType)) ->
Map.Map Int (Map.Map (Id, OpType) ((Id, OpType), Int)) ->
Map.Map Int (Morphism f e m) ->
Map.Map ((Id, OpType), Int) (Map.Map Id Int) ->
Rel.Rel ((Id, OpType), Int) ->
Map.Map ((Id, OpType), Int) Bool ->
(OpMap, Map.Map Int (Map.Map (Id, OpType) ((Id, OpType),Int)))
nameSymbols graph morMap phi names ovrl totalOps = let
colimOvrl = Rel.sccOfClosure $ ovrl
nameClass opFun gNames (set, idx) morFun = let
(newName, gNames') = assignName (set, idx) gNames names
opTypes = Set.map (\((oldId,ot),i) -> let
oKind' = if Map.findWithDefault False
((oldId, mkPartial ot), i)
totalOps
then Total else Partial
imor = Map.findWithDefault (error "imor") i phi
in mapOpType (sort_map imor) $ setOpKind oKind' ot) set
renameSymbols n f = let
Just opSyms = lab graph n
setKeys = filter (\x -> let y = Map.findWithDefault (x, n) x f
in Set.member y set) $ Set.toList opSyms
updateAtKey (i,o) ((i', o'), n') = let
nmor = Map.findWithDefault (error "nmor") n phi
o'' = mapOpType (sort_map nmor) o'
oKind = if Map.findWithDefault False
((i', mkPartial o'), n')
totalOps
then Total else Partial
z = (newName, setOpKind oKind o'')
in if (i,o) == z then
Nothing
else
Just (z,n')
in foldl (\g x -> Map.update (updateAtKey x) x g)
f setKeys
-- -- i have to map symbols entering set
-- -- to (newName, their otype mapped)
morFun' = Map.mapWithKey renameSymbols morFun
in (MapSet.update (const opTypes) newName opFun, gNames', morFun')
colimOvrl' = reverse $
sortBy (\ s1 s2 -> compare (Set.size s1)(Set.size s2)) colimOvrl
(opFuns, _, renMap) = foldl (\(oF,gN, mM) x -> nameClass oF gN x mM)
(MapSet.empty,[], morMap)
$ number colimOvrl'
in (opFuns , renMap)
{--CASL signatures colimit on predicate symbols
almost identical with operation symbols,
only minor changes because of different types
--}
computeColimitPred :: Gr (Sign f e)(Int,Morphism f e m) -> Sign f e ->
Map.Map Node (Morphism f e m) -> (Sign f e, Map.Map Node (Morphism f e m))
computeColimitPred graph sigmaOp phiOp = let
graph' = buildPredGraph graph
(colim, morMap') = computeColimitSet graph'
(ovrl, names) = buildPColimOvrl graph graph' colim morMap'
(colim1, morMap1) = namePSymbols graph' morMap' phiOp names ovrl
morMap2 = Map.map (\f -> Map.map (\((i,_p),_) -> i) f) morMap1
sigmaPreds = sigmaOp{predMap = colim1}
phiPreds = Map.mapWithKey
(\n phi -> phi{pred_map =
Map.findWithDefault (error "pred_map") n morMap2})
phiOp
in (sigmaPreds, phiPreds)
buildPredGraph :: Gr (Sign f e) (Int, Morphism f e m) ->
Gr (Set.Set (Id, PredType))
(Int, Map.Map (Id, PredType) (Id, PredType))
buildPredGraph graph = let
getPreds = mapSetToList . predMap
getPredFun mor = let
ssign = msource mor
smap = sort_map mor
pmap = pred_map mor
in foldl (\f x -> let y = mapPredSym smap pmap x
in if x == y then f else Map.insert x y f)
Map.empty $ getPreds ssign
in nmap (Set.fromList . getPreds) $ emap (\ (i, m) -> (i,getPredFun m)) graph
buildPColimOvrl :: Gr (Sign f e) (Int, Morphism f e m) ->
Gr (Set.Set (Id, PredType))(Int, EndoMap (Id, PredType)) ->
Set.Set ((Id, PredType), Int) ->
Map.Map Int (Map.Map (Id, PredType) ((Id, PredType), Int)) ->
(Rel.Rel ((Id, PredType), Int),
Map.Map ((Id, PredType), Int) (Map.Map Id Int))
buildPColimOvrl graph graph' colim morMap = let
(ovrl, names) = (Rel.empty, Map.fromList $ zip (Set.toList colim) $
repeat Map.empty )
(ovrl', names') = buildPOvrlAtNode graph' colim morMap
ovrl names $ labNodes graph
in (Rel.transClosure ovrl', names')
buildPOvrlAtNode :: Gr (Set.Set (Id, PredType))(Int, EndoMap (Id, PredType)) ->
Set.Set ((Id, PredType), Int) ->
Map.Map Int (Map.Map (Id, PredType) ((Id, PredType), Int)) ->
Rel.Rel ((Id, PredType), Int) ->
Map.Map ((Id, PredType), Int) (Map.Map Id Int) ->
[(Int, Sign f e)] ->
(Rel.Rel ((Id, PredType), Int),
Map.Map ((Id, PredType), Int)(Map.Map Id Int))
buildPOvrlAtNode graph' colim morMap ovrl names nodeList =
case nodeList of
[] -> (ovrl, names)
(n, sig):lists -> let
Just pSet = lab graph' n
names' = foldl (\g x@(idN,_) -> let
y = Map.findWithDefault (x,n) x $
Map.findWithDefault (error $ show n)
n morMap
altF v = case v of
Nothing -> Just 1
Just m -> Just $ m+1
in Map.adjust (\gy -> Map.alter altF idN gy)
y g)
names $ Set.toList pSet
equivP (id1, pt1) (id2, pt2) = (id1 == id2) && leqP sig pt1 pt2
parts = Rel.leqClasses equivP pSet
nmor = Map.findWithDefault (error "buildAtNode") n morMap
addParts rel equivList =
foldl (\r l -> let l1 = map (\x ->
Map.findWithDefault (x,n) x nmor) l
in case l1 of
[] -> error "addParts"
x:xs -> let
(r', ly) = foldl
(\(rl,lx) y -> (Rel.insertPair lx y rl, y))
(r,x) xs
in Rel.insertPair ly x r'
)
rel equivList
ovrl' = addParts ovrl parts
in buildPOvrlAtNode graph' colim morMap ovrl' names' lists
assignPName :: (Set.Set ((Id, PredType), Int), Int) -> [Id] ->
Map.Map ((Id, PredType), Int) (Map.Map Id Int) ->
(Id, [Id])
assignPName (pSet,idx) givenNames namesFun =
let pSetNames = Set.fold (\x f -> Map.unionWith (\a b -> a + b) f
(Map.findWithDefault (error "pname") x namesFun))
Map.empty pSet
availNames = filter (\x -> not $ x `elem` givenNames) $
Map.keys pSetNames
in case availNames of
[] -> let
-- must generate name with the most frequent name idx and an origin
sndOrd x y= compare (pSetNames Map.! x) (pSetNames Map.! y)
avail' = sortBy sndOrd $ Map.keys pSetNames
idN = head avail'
in (appendNumber idN idx, givenNames)
_ -> -- must take the most frequent available name and give it to the class
-- and this name becomes given
let
sndOrd x y= compare (pSetNames Map.! x) (pSetNames Map.! y)
avail' = sortBy sndOrd availNames
idN = head $ reverse avail'
in (idN, idN:givenNames)
namePSymbols :: Gr (Set.Set (Id, PredType))
(Int, Map.Map (Id,PredType)(Id, PredType)) ->
Map.Map Int (Map.Map (Id, PredType) ((Id, PredType), Int)) ->
Map.Map Int (Morphism f e m) ->
Map.Map ((Id, PredType), Int) (Map.Map Id Int) ->
Rel.Rel ((Id, PredType), Int) ->
(PredMap, Map.Map Int (Map.Map (Id, PredType) ((Id, PredType),Int)))
namePSymbols graph morMap phi names ovrl = let
colimOvrl = Rel.sccOfClosure $ ovrl
nameClass pFun gNames (set, idx) morFun = let
(newName, gNames') = assignPName (set, idx) gNames names
pTypes = Set.map (\((_oldId,pt), i) -> let
in mapPredType (sort_map $ phi Map.! i) pt) $
set
renameSymbols n f = let
Just pSyms = lab graph n
setKeys = filter (\x -> let y = Map.findWithDefault (x, n) x f
in Set.member y set) $ Set.toList pSyms
updateAtKey (i,p) ((_i', p'), n') = let
p'' = mapPredType (sort_map $ phi Map.! n) p'
z = (newName, p'')
in if (i,p) == z then
Nothing
else
Just (z,n')
in foldl (\g x -> Map.update (updateAtKey x) x g)
f setKeys
-- -- i have to map symbols entering set
-- -- to (newName, their predtype mapped)
morFun' = Map.mapWithKey renameSymbols morFun
in (MapSet.update (const pTypes) newName pFun, gNames', morFun')
colimOvrl' = reverse $
sortBy (\ s1 s2 -> compare (Set.size s1)(Set.size s2)) colimOvrl
(pFuns, _, renMap) = foldl (\(pF,gN, mM) x -> nameClass pF gN x mM)
(MapSet.empty,[], morMap)
$ number colimOvrl'
in (pFuns , renMap)
applyMor :: Morphism f e m -> (Id, OpType) -> (Id, OpType)
applyMor phi (i, optype) = mapOpSym (sort_map phi) (op_map phi) (i, optype)
-- associative operations
assocSymbols :: Sign f e -> [(Id, OpType)]
assocSymbols = mapSetToList . assocOps
colimitAssoc :: Gr (Sign f e) (Int,Morphism f e m) -> Sign f e ->
Map.Map Int (Morphism f e m) -> (Sign f e, Map.Map Int (Morphism f e m))
colimitAssoc graph sig morMap = let
assocOpList = nubOrd $ concatMap
(\ (node, sigma) -> map (applyMor ((Map.!)morMap node)) $
assocSymbols sigma ) $ labNodes graph
idList = nubOrd $ map fst assocOpList
sig1 = sig{assocOps = MapSet.fromList $
map (\sb -> (sb, map snd $ filter (\(i,_) -> i==sb)
assocOpList )) idList}
morMap1 = Map.map (\ phi -> phi{mtarget = sig1}) morMap
in (sig1, morMap1)
| nevrenato/Hets_Fork | CASL/ColimSign.hs | gpl-2.0 | 22,130 | 4 | 32 | 7,884 | 7,505 | 4,003 | 3,502 | 394 | 5 |
module Language.Dockerfile.EDSL.PluginsSpec
where
-- import Language.Dockerfile.EDSL
-- import Language.Dockerfile.EDSL.Plugins
import Test.Hspec
spec =
describe "listPlugins" $
it "lists docker images matching language-dockerfile-*" pending
-- str <- toDockerFileStrIO $ do
-- ds <- liftIO (glob "./test/*.hs")
-- from "ubuntu"
-- mapM_ add ds
-- str `shouldBe` unlines [ "FROM ubuntu"
-- , "ADD Spec.hs"
-- , "ADD SanitySpec.hs"
-- , "ADD Test.hs"
-- ]
| beijaflor-io/haskell-language-dockerfile | test/Language/Dockerfile/EDSL/PluginsSpec.hs | gpl-3.0 | 713 | 0 | 6 | 321 | 41 | 28 | 13 | 5 | 1 |
module HEP.Automation.MadGraph.Dataset.Set20110302set3 where
import HEP.Automation.MadGraph.Model
import HEP.Automation.MadGraph.Machine
import HEP.Automation.MadGraph.UserCut
import HEP.Automation.MadGraph.Cluster
import HEP.Automation.MadGraph.SetupType
import HEP.Automation.MadGraph.Dataset.Common
my_ssetup :: ScriptSetup
my_ssetup = SS {
scriptbase = "/home/wavewave/nfs/workspace/ttbar/mc_script/"
, mg5base = "/home/wavewave/nfs/montecarlo/MG_ME_V4.4.44/MadGraph5_v0_6_1/"
, workbase = "/home/wavewave/nfs/workspace/ttbar/mc/"
}
ucut :: UserCut
ucut = UserCut {
uc_metcut = 15.0
, uc_etacutlep = 1.2
, uc_etcutlep = 18.0
, uc_etacutjet = 2.5
, uc_etcutjet = 15.0
}
processTTBar0or1jet :: [Char]
processTTBar0or1jet =
"\ngenerate P P > t t~ QED=99 @1 \nadd process P P > t t~ J QED=99 @2 \n"
psetup_axi_ttbar01j :: ProcessSetup
psetup_axi_ttbar01j = PS {
mversion = MadGraph4
, model = AxiGluon
, process = processTTBar0or1jet
, processBrief = "ttbar01j"
, workname = "302Axi1J"
}
my_csetup :: ClusterSetup
my_csetup = CS { cluster = Parallel 6 }
axiparamset :: [Param]
axiparamset = [ AxiGluonParam mass 0.0 0.0 ga ga
| mass <- [1600.0, 1800.0 .. 2400.0 ]
, ga <- [0.8, 1.2 .. 4.0 ] ]
psetuplist :: [ProcessSetup]
psetuplist = [ psetup_axi_ttbar01j ]
sets :: [Int]
sets = [1]
axitasklist :: [WorkSetup]
axitasklist = [ WS my_ssetup (psetup_axi_ttbar01j)
(rsetupGen p MLM NoUserCutDef NoPGS 20000 num)
my_csetup
| p <- axiparamset
, num <- sets ]
totaltasklist :: [WorkSetup]
totaltasklist = axitasklist
| wavewave/madgraph-auto-dataset | src/HEP/Automation/MadGraph/Dataset/Set20110302set3.hs | gpl-3.0 | 1,747 | 0 | 8 | 435 | 366 | 226 | 140 | 47 | 1 |
module UnitConversions where
import Data.Maybe (mapMaybe)
import Data.Text (pack, replace, unpack)
import Text.Read (readMaybe)
import Text.Regex.TDFA ((=~))
import Types
-- NOTE: Here, "imperial" means "U.S. Customary". Conversion to British,
-- Australian, Canadian, etc. imperial units is not yet implemented.
data Conversion = Metric | Imperial | None deriving (Show, Read, Eq)
convertRecipeUnits :: Conversion -> Recipe -> Recipe
convertRecipeUnits unit recp =
case unit of
None -> recp
Metric ->
recp
{ ingredients = map convertIngredientToMetric (ingredients recp),
directions = map convertTemperatureToMetric (directions recp)
}
Imperial ->
recp
{ ingredients = map convertIngredientToImperial (ingredients recp),
directions = map convertTemperatureToImperial (directions recp)
}
convertIngredientToMetric :: Ingredient -> Ingredient
convertIngredientToMetric ingr =
case unit ingr of
Tsp -> ingr {quantity = quantity ingr * 5, unit = Ml}
Tbsp -> ingr {quantity = quantity ingr * 15, unit = Ml}
Oz -> ingr {quantity = quantity ingr * 30, unit = Ml}
FlOz -> ingr {quantity = quantity ingr * 28, unit = G}
Cup -> ingr {quantity = quantity ingr * 237, unit = Ml}
Lb -> ingr {quantity = quantity ingr * 454, unit = G}
Pint -> ingr {quantity = quantity ingr * 473, unit = Ml}
Quart -> ingr {quantity = quantity ingr * 946, unit = Ml}
Gallon -> ingr {quantity = quantity ingr * 3.785, unit = L}
-- These cases are here so that if we add other units, the compiler will force us
-- to add appropriate cases here.
Ml -> ingr
L -> ingr
G -> ingr
Other _ -> ingr
convertIngredientToImperial :: Ingredient -> Ingredient
convertIngredientToImperial ingr =
case unit ingr of
Ml
| quantity ingr < 15 -> ingr {quantity = quantity ingr / 5, unit = Tsp}
| quantity ingr < 250 -> ingr {quantity = quantity ingr / 15, unit = Tbsp}
| otherwise -> ingr {quantity = quantity ingr / 250, unit = Cup}
L
| quantity ingr < 4 -> ingr {quantity = quantity ingr * 4.23, unit = Cup}
| otherwise -> ingr {quantity = quantity ingr * 0.26, unit = Gallon}
G -> ingr {quantity = quantity ingr / 28, unit = Oz}
-- These cases are here so that if we add other units, the compiler will force us
-- to add appropriate cases here.
Tsp -> ingr
Tbsp -> ingr
Cup -> ingr
Oz -> ingr
FlOz -> ingr
Lb -> ingr
Pint -> ingr
Quart -> ingr
Gallon -> ingr
Other _ -> ingr
convertTemperatureToMetric = convertTemperature C
convertTemperatureToImperial = convertTemperature F
convertTemperature :: TempUnit -> String -> String
convertTemperature u s =
unpack $ foldl replaceTemperature (pack s) (packText . convertReplacement <$> findReplacements s)
where
packText (s1, s2) = (pack s1, pack s2)
replaceTemperature text (old, new) = replace old new text
convertReplacement = fmap $ show . toTempUnit u
findReplacements :: String -> [(String, Temperature)]
findReplacements = mapMaybe parseRegexResult . findTemperatures
where
parseRegexResult r = to3Tuple r >>= parseTemperature
to3Tuple :: [a] -> Maybe (a, a, a)
to3Tuple (a : b : c : _) = Just (a, b, c)
to3Tuple _ = Nothing
parseTemperature :: (String, String, String) -> Maybe (String, Temperature)
parseTemperature (s, v, u) = case (readMaybe v, parseTempUnit u) of
(Just value, Just unit) -> Just (s, Temperature value unit)
_ -> Nothing
-- returns a list of matches, where every match is a list of the regex groups
findTemperatures :: String -> [[String]]
findTemperatures s = s =~ "(-?[0-9]{1,3}) ?°?(C|F)([ .!?]|$)"
parseTempUnit :: String -> Maybe TempUnit
parseTempUnit "C" = Just C
parseTempUnit "F" = Just F
parseTempUnit _ = Nothing
data Temperature = Temperature Int TempUnit deriving (Eq)
instance Show Temperature where
show (Temperature value unit) = show value ++ show unit
data TempUnit = C | F deriving (Eq)
instance Show TempUnit where
show C = "°C"
show F = "°F"
toTempUnit :: TempUnit -> Temperature -> Temperature
toTempUnit C (Temperature x F) = Temperature (fahrenheitToCelsius x) C
toTempUnit F (Temperature x C) = Temperature (celsiusToFahrenheit x) F
toTempUnit _ t = t
fahrenheitToCelsius :: Int -> Int
fahrenheitToCelsius = round . (/ 1.8) . (+ (-32)) . fromIntegral
celsiusToFahrenheit :: Int -> Int
celsiusToFahrenheit = round . (+ 32) . (* 1.8) . fromIntegral
| JackKiefer/herms | src/UnitConversions.hs | gpl-3.0 | 4,486 | 0 | 12 | 987 | 1,458 | 780 | 678 | 95 | 13 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DerivingStrategies #-}
-- |
-- Module : Aura.Core
-- Copyright : (c) Colin Woodbury, 2012 - 2020
-- License : GPL3
-- Maintainer: Colin Woodbury <colin@fosskers.ca>
--
-- Core types and functions which belong nowhere else.
module Aura.Core
( -- * Types
Env(..)
, Repository(..)
, liftMaybeM
-- * User Privileges
, sudo, trueRoot
-- * Querying the Package Database
, foreignPackages, orphans
, develPkgs, isDevelPkg
, Unsatisfied(..), Satisfied(..)
, areSatisfied, isInstalled
, checkDBLock
-- * Misc. Package Handling
, removePkgs, partitionPkgs
-- * Content Diffing
, diff
-- * IO
, notify, warn, scold, report
) where
import Aura.Colour
import Aura.IO
import Aura.Languages
import Aura.Pacman
import Aura.Settings
import Aura.Shell
import Aura.Types
import Aura.Utils
import Control.Monad.Trans.Maybe
import Data.Bifunctor (bimap)
import Data.Text.Prettyprint.Doc
import Data.Text.Prettyprint.Doc.Render.Terminal
import RIO hiding ((<>))
import qualified RIO.ByteString as B
import RIO.Directory (doesFileExist)
import qualified RIO.List as L
import qualified RIO.NonEmpty as NEL
import qualified RIO.Set as S
import qualified RIO.Text as T
import System.Process.Typed (proc, runProcess)
---
--------
-- TYPES
--------
-- | The complete Aura runtime environment. `Repository` has internal caches
-- instantiated in `IO`, while `Settings` is mostly static and derived from
-- command-line arguments.
data Env = Env { repository :: !Repository, settings :: !Settings }
deriving stock (Generic)
settingsL :: Lens' Env Settings
settingsL f e = (\ss -> e { settings = ss }) <$> f (settings e)
instance HasLogFunc Env where
logFuncL = settingsL . logFuncOfL
-- | A `Repository` is a place where packages may be fetched from. Multiple
-- repositories can be combined with the `Semigroup` instance. Checks packages
-- in batches for efficiency.
data Repository = Repository
{ repoCache :: !(TVar (Map PkgName Package))
, repoLookup :: Settings -> NonEmpty PkgName -> IO (Maybe (Set PkgName, Set Package)) }
-- NOTE The `repoCache` value passed to the combined `Repository` constructor is
-- irrelevant, and only sits there for typechecking purposes. Each `Repository`
-- is expected to leverage its own cache within its `repoLookup` function.
instance Semigroup Repository where
a <> b = Repository (repoCache a) $ \ss ps -> runMaybeT $ do
items@(bads, goods) <- MaybeT $ repoLookup a ss ps
case nes bads of
Nothing -> pure items
Just bads' -> second (goods <>) <$> MaybeT (repoLookup b ss bads')
---------------------------------
-- Functions common to `Package`s
---------------------------------
-- | Partition a list of packages into pacman and buildable groups. Yes, this is
-- the correct signature. As far as this function (in isolation) is concerned,
-- there is no way to guarantee that the list of `NonEmpty`s will itself be
-- non-empty.
partitionPkgs :: NonEmpty (NonEmpty Package) -> ([Prebuilt], [NonEmpty Buildable])
partitionPkgs = bimap fold f . L.unzip . map g . NEL.toList
where
g :: NonEmpty Package -> ([Prebuilt], [Buildable])
g = fmapEither toEither . NEL.toList
f :: [[a]] -> [NonEmpty a]
f = mapMaybe NEL.nonEmpty
toEither :: Package -> Either Prebuilt Buildable
toEither (FromAUR b) = Right b
toEither (FromRepo b) = Left b
-----------
-- THE WORK
-----------
liftMaybeM :: (MonadThrow m, Exception e) => e -> m (Maybe a) -> m a
liftMaybeM a m = m >>= maybe (throwM a) pure
-- | Action won't be allowed unless user is root, or using sudo.
sudo :: RIO Env a -> RIO Env a
sudo act = asks (hasRootPriv . envOf . settings) >>= bool (throwM $ Failure mustBeRoot_1) act
-- | Stop the user if they are the true root. Building as root isn't allowed
-- since makepkg v4.2.
trueRoot :: RIO Env a -> RIO Env a
trueRoot action = asks settings >>= \ss ->
if not (isTrueRoot $ envOf ss) && buildUserOf (buildConfigOf ss) /= Just (User "root")
then action else throwM $ Failure trueRoot_3
-- | A list of non-prebuilt packages installed on the system.
-- @-Qm@ yields a list of sorted values.
foreignPackages :: IO (Set SimplePkg)
foreignPackages = S.fromList . mapMaybe simplepkg' <$> pacmanLines ["-Qm"]
-- | Packages marked as a dependency, yet are required by no other package.
orphans :: IO (Set PkgName)
orphans = S.fromList . map PkgName <$> pacmanLines ["-Qqdt"]
-- | Any installed package whose name is suffixed by git, hg, svn, darcs, cvs,
-- or bzr.
develPkgs :: IO (Set PkgName)
develPkgs = S.filter isDevelPkg . S.map spName <$> foreignPackages
-- | Is a package suffixed by git, hg, svn, darcs, cvs, or bzr?
isDevelPkg :: PkgName -> Bool
isDevelPkg (PkgName pkg) = any (`T.isSuffixOf` pkg) suffixes
where
suffixes :: [Text]
suffixes = ["-git", "-hg", "-svn", "-darcs", "-cvs", "-bzr"]
-- | Returns what it was given if the package is already installed.
-- Reasoning: Using raw bools can be less expressive.
isInstalled :: PkgName -> IO (Maybe PkgName)
isInstalled pkg = bool Nothing (Just pkg) <$> pacmanSuccess ["-Qq", pnName pkg]
-- | An @-Rsu@ call.
removePkgs :: NonEmpty PkgName -> RIO Env ()
removePkgs pkgs = do
pacOpts <- asks (commonConfigOf . settings)
liftIO . pacman $ ["-Rsu"] <> asFlag pkgs <> asFlag pacOpts
-- | Depedencies which are not installed, or otherwise provided by some
-- installed package.
newtype Unsatisfied = Unsatisfied (NonEmpty Dep)
-- | The opposite of `Unsatisfied`.
newtype Satisfied = Satisfied (NonEmpty Dep)
-- | Similar to `isSatisfied`, but dependencies are checked in a batch, since
-- @-T@ can accept multiple inputs.
areSatisfied :: NonEmpty Dep -> IO (These Unsatisfied Satisfied)
areSatisfied ds = do
unsats <- S.fromList . mapMaybe parseDep <$> unsat
pure . bimap Unsatisfied Satisfied $ partNonEmpty (f unsats) ds
where
unsat :: IO [Text]
unsat = pacmanLines $ "-T" : map renderedDep (toList ds)
f :: Set Dep -> Dep -> These Dep Dep
f unsats d | S.member d unsats = This d
| otherwise = That d
-- | Block further action until the database is free.
checkDBLock :: Settings -> IO ()
checkDBLock ss = do
locked <- doesFileExist lockFile
when locked $ warn ss checkDBLock_1 *> B.getLine *> checkDBLock ss
----------
-- DIFFING
----------
-- | Given two filepaths, output the diff of the two files.
-- Output will be coloured unless colour is deactivated by
-- `--color never` or by detection of a non-terminal output
-- target.
diff :: MonadIO m => Settings -> FilePath -> FilePath -> m ()
diff ss f1 f2 = void . runProcess . proc "diff" $ c <> ["-u", f1, f2]
where
c :: [FilePath]
c = bool ["--color"] [] $ shared ss (Colour Never)
-------
-- MISC -- Too specific for `Utilities.hs` or `Aura.Utils`
-------
-- | Print some message in green with Aura flair.
notify :: MonadIO m => Settings -> (Language -> Doc AnsiStyle) -> m ()
notify ss msg = putStrLnA ss $ green (msg $ langOf ss)
-- | Print some message in yellow with Aura flair.
warn :: MonadIO m => Settings -> (Language -> Doc AnsiStyle) -> m ()
warn ss msg = putStrLnA ss $ yellow (msg $ langOf ss)
-- | Print some message in red with Aura flair.
scold :: MonadIO m => Settings -> (Language -> Doc AnsiStyle) -> m ()
scold ss msg = putStrLnA ss $ red (msg $ langOf ss)
-- | Report a message with multiple associated items. Usually a list of
-- naughty packages.
report :: (Doc AnsiStyle -> Doc AnsiStyle) -> (Language -> Doc AnsiStyle) -> NonEmpty PkgName -> RIO Env ()
report c msg pkgs = do
ss <- asks settings
putStrLnA ss . c . msg $ langOf ss
putTextLn . dtot . colourCheck ss . vsep . map (cyan . pretty . pnName) $ toList pkgs
| bb010g/aura | aura/lib/Aura/Core.hs | gpl-3.0 | 7,885 | 0 | 17 | 1,645 | 2,002 | 1,070 | 932 | 120 | 2 |
{-# LANGUAGE DeriveDataTypeable, FlexibleContexts, FlexibleInstances, DeriveGeneric #-}
module Tile where
{-module Tile (Tile (), Floor (..), Wall (..),
makeWall, makeFloor, emptySpace,
tileColor, renderTile, describeTile, tileWallType,
tileBlocksVision, tileBlocksMovement, tileHasFloor) where
-}
import Data.Bits
import CursesWrap (ColorName (..), StyledChar (..), Style (..))
import GHC.Generics (Generic)
import Data.Typeable
-- bit allocation for now: last 4 bits for Floor, last 4 bits before them for Wall
-- I'm assuming Liberally that the Int type will have enough bits for my needs so I don't have to bother with casting :>
-- (in fact this file is pretty lazily written in general...)
newtype Tile = Tile {unTile :: Int} deriving (Show, Read, Typeable, Generic)
data Floor = NoFloor | Concrete | Asphalt | Grass | Sett | Floor | Carpeting | Sand | Water deriving (Show, Eq, Enum)
data Wall = NoWall | PlainWall | GlassWall deriving (Eq, Enum, Show, Typeable, Generic)
tileWallType :: Tile -> Wall
tileWallType (Tile t) = toEnum $ shiftR t 4
makeWall :: Wall -> Tile
makeWall w = Tile (shiftL (fromEnum w) 4)
makeFloor :: Floor -> Tile
makeFloor f = Tile (fromEnum f)
emptySpace :: Tile
emptySpace = Tile 0
wallBlocksVision NoWall = False
wallBlocksVision PlainWall = True
wallBlocksVision GlassWall = False
tileColor :: Tile -> ColorName
tileColor (Tile t) = go where
go | PlainWall == (toEnum $ shiftR t 4) = Grey
| GlassWall == (toEnum $ shiftR t 4) = Cyan
| Concrete == toEnum t = Grey
| Asphalt == toEnum t = Black
| Grass == toEnum t = Green
| Sett == toEnum t = Red
| Floor == toEnum t = Black
| Carpeting == toEnum t = Green
| Sand == toEnum t = Yellow
| Water == toEnum t = Blue
| otherwise = error "tileColor: No colour defined for tile"
tileBlocksVision :: Tile -> Bool
tileBlocksVision (Tile t) = wallBlocksVision (toEnum $ shiftR t 4)
tileBlocksMovement :: Tile -> Bool
tileBlocksMovement (Tile t) = (shiftR t 4) > 0
tileHasFloor :: Tile -> Bool
tileHasFloor (Tile t) = t > 0
renderTile :: Tile -> StyledChar
renderTile t | tileBlocksMovement t =
StyledChar (Style False False (tileColor t) Black) '#'
| tileHasFloor t =
StyledChar (Style False False (tileColor t) Black) '.'
| otherwise =
StyledChar (Style False False Grey Black) ' '
describeTile :: Tile -> String
describeTile t | tileBlocksMovement t = (show :: Wall -> String) (toEnum $ shiftR (unTile t) 4)
| tileHasFloor t = (show :: Floor -> String) (toEnum $ unTile t)
| otherwise = "Empty space" | arirahikkala/straylight-divergence | src/Tile.hs | gpl-3.0 | 2,718 | 0 | 13 | 664 | 826 | 420 | 406 | 50 | 1 |
<?xml version='1.0' encoding='ISO-8859-1' ?>
<!DOCTYPE helpset
PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN"
"http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0">
<title>Remote Execution Processor Help</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view mergetype="javax.help.UniteAppendMerge">
<name>TOC</name>
<label>Contents</label>
<type>javax.help.TOCView</type>
<data>toc.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">JavaHelpSearch</data>
</view>
</helpset>
| senbox-org/snap-desktop | snap-remote-execution-ui/src/main/resources/org/esa/snap/remote/execution/docs/help.hs | gpl-3.0 | 787 | 54 | 44 | 166 | 285 | 144 | 141 | -1 | -1 |
main = do
putStrLn $ "haskell" ++ "lang"
putStrLn $ "1 + 1 = " ++ show (1 + 1)
putStrLn $ "7.0/3.0 = " ++ show (7.0 / 3.0)
print $ True && False
print $ True || False
print $ not True
| daewon/til | haskell/haskell_by_example/values.hs | mpl-2.0 | 197 | 0 | 10 | 57 | 91 | 42 | 49 | 7 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.ServiceConsumerManagement.Services.TenancyUnits.AttachProject
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Attach an existing project to the tenancy unit as a new tenant resource.
-- The project could either be the tenant project reserved by calling
-- \`AddTenantProject\` under a tenancy unit of a service producer\'s
-- project of a managed service, or from a separate project. The caller is
-- checked against a set of permissions as if calling \`AddTenantProject\`
-- on the same service consumer. To trigger the attachment, the targeted
-- tenant project must be in a folder. Make sure the
-- ServiceConsumerManagement service account is the owner of that project.
-- These two requirements are already met if the project is reserved by
-- calling \`AddTenantProject\`. Operation.
--
-- /See:/ <https://cloud.google.com/service-consumer-management/docs/overview Service Consumer Management API Reference> for @serviceconsumermanagement.services.tenancyUnits.attachProject@.
module Network.Google.Resource.ServiceConsumerManagement.Services.TenancyUnits.AttachProject
(
-- * REST Resource
ServicesTenancyUnitsAttachProjectResource
-- * Creating a Request
, servicesTenancyUnitsAttachProject
, ServicesTenancyUnitsAttachProject
-- * Request Lenses
, stuapXgafv
, stuapUploadProtocol
, stuapAccessToken
, stuapUploadType
, stuapPayload
, stuapName
, stuapCallback
) where
import Network.Google.Prelude
import Network.Google.ServiceConsumerManagement.Types
-- | A resource alias for @serviceconsumermanagement.services.tenancyUnits.attachProject@ method which the
-- 'ServicesTenancyUnitsAttachProject' request conforms to.
type ServicesTenancyUnitsAttachProjectResource =
"v1" :>
CaptureMode "name" "attachProject" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] AttachTenantProjectRequest :>
Post '[JSON] Operation
-- | Attach an existing project to the tenancy unit as a new tenant resource.
-- The project could either be the tenant project reserved by calling
-- \`AddTenantProject\` under a tenancy unit of a service producer\'s
-- project of a managed service, or from a separate project. The caller is
-- checked against a set of permissions as if calling \`AddTenantProject\`
-- on the same service consumer. To trigger the attachment, the targeted
-- tenant project must be in a folder. Make sure the
-- ServiceConsumerManagement service account is the owner of that project.
-- These two requirements are already met if the project is reserved by
-- calling \`AddTenantProject\`. Operation.
--
-- /See:/ 'servicesTenancyUnitsAttachProject' smart constructor.
data ServicesTenancyUnitsAttachProject =
ServicesTenancyUnitsAttachProject'
{ _stuapXgafv :: !(Maybe Xgafv)
, _stuapUploadProtocol :: !(Maybe Text)
, _stuapAccessToken :: !(Maybe Text)
, _stuapUploadType :: !(Maybe Text)
, _stuapPayload :: !AttachTenantProjectRequest
, _stuapName :: !Text
, _stuapCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ServicesTenancyUnitsAttachProject' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'stuapXgafv'
--
-- * 'stuapUploadProtocol'
--
-- * 'stuapAccessToken'
--
-- * 'stuapUploadType'
--
-- * 'stuapPayload'
--
-- * 'stuapName'
--
-- * 'stuapCallback'
servicesTenancyUnitsAttachProject
:: AttachTenantProjectRequest -- ^ 'stuapPayload'
-> Text -- ^ 'stuapName'
-> ServicesTenancyUnitsAttachProject
servicesTenancyUnitsAttachProject pStuapPayload_ pStuapName_ =
ServicesTenancyUnitsAttachProject'
{ _stuapXgafv = Nothing
, _stuapUploadProtocol = Nothing
, _stuapAccessToken = Nothing
, _stuapUploadType = Nothing
, _stuapPayload = pStuapPayload_
, _stuapName = pStuapName_
, _stuapCallback = Nothing
}
-- | V1 error format.
stuapXgafv :: Lens' ServicesTenancyUnitsAttachProject (Maybe Xgafv)
stuapXgafv
= lens _stuapXgafv (\ s a -> s{_stuapXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
stuapUploadProtocol :: Lens' ServicesTenancyUnitsAttachProject (Maybe Text)
stuapUploadProtocol
= lens _stuapUploadProtocol
(\ s a -> s{_stuapUploadProtocol = a})
-- | OAuth access token.
stuapAccessToken :: Lens' ServicesTenancyUnitsAttachProject (Maybe Text)
stuapAccessToken
= lens _stuapAccessToken
(\ s a -> s{_stuapAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
stuapUploadType :: Lens' ServicesTenancyUnitsAttachProject (Maybe Text)
stuapUploadType
= lens _stuapUploadType
(\ s a -> s{_stuapUploadType = a})
-- | Multipart request metadata.
stuapPayload :: Lens' ServicesTenancyUnitsAttachProject AttachTenantProjectRequest
stuapPayload
= lens _stuapPayload (\ s a -> s{_stuapPayload = a})
-- | Required. Name of the tenancy unit that the project will be attached to.
-- Such as
-- \'services\/service.googleapis.com\/projects\/12345\/tenancyUnits\/abcd\'.
stuapName :: Lens' ServicesTenancyUnitsAttachProject Text
stuapName
= lens _stuapName (\ s a -> s{_stuapName = a})
-- | JSONP
stuapCallback :: Lens' ServicesTenancyUnitsAttachProject (Maybe Text)
stuapCallback
= lens _stuapCallback
(\ s a -> s{_stuapCallback = a})
instance GoogleRequest
ServicesTenancyUnitsAttachProject
where
type Rs ServicesTenancyUnitsAttachProject = Operation
type Scopes ServicesTenancyUnitsAttachProject =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient ServicesTenancyUnitsAttachProject'{..}
= go _stuapName _stuapXgafv _stuapUploadProtocol
_stuapAccessToken
_stuapUploadType
_stuapCallback
(Just AltJSON)
_stuapPayload
serviceConsumerManagementService
where go
= buildClient
(Proxy ::
Proxy ServicesTenancyUnitsAttachProjectResource)
mempty
| brendanhay/gogol | gogol-serviceconsumermanagement/gen/Network/Google/Resource/ServiceConsumerManagement/Services/TenancyUnits/AttachProject.hs | mpl-2.0 | 7,107 | 0 | 16 | 1,424 | 797 | 473 | 324 | 117 | 1 |
{- |
Shutdown can be used to execute functions when exiting the program
This can be very usefull when threads need to be stopped on exit.
addEnd will add a function to the list of functions to be executed
shutdown will block while the functions added by addEnd are executed.
-}
module CanvasHs.Shutdown
(
addEnd,
shutdown
)
where
import System.IO.Unsafe (unsafePerformIO)
import Data.IORef (IORef, newIORef, atomicModifyIORef, readIORef)
import Debug.Trace (traceShow)
-- | unsafePerformIO-hack function to manage thread completion
ends :: IORef ([IO ()])
{-# NOINLINE ends #-}
ends = unsafePerformIO (newIORef [])
-- | adds a function which should be executed when the shutdown function is called
addEnd :: IO () -> IO ()
addEnd a = atomicModifyIORef ends (\es -> (a:es, ()))
-- | shutdown calls all functions added by addEnd and will block untill all of them have finished.
-- executes all function sequentially in the current thread
shutdown :: IO ()
shutdown = readIORef ends >>= shutdown'
where
shutdown' :: [IO()] -> IO ()
shutdown' [] = return ()
shutdown' (e:es) = traceShow (length es + 1) $ e `seq` shutdown' es | CanvasHS/Canvas.hs | canvashs-module/CanvasHs/Shutdown.hs | lgpl-2.1 | 1,186 | 0 | 12 | 249 | 249 | 136 | 113 | 17 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
module X01 where
------------------------------------------------------------------------------
import Gauge
------------------------------------------------------------------------------
--import qualified Data.Text as T
--import qualified Data.Text.Prettyprint.Doc as PP
import Data.Typeable
import qualified Prelude
import Protolude
import Refined
------------------------------------------------------------------------------
{-# ANN module ("HLint: ignore Reduce duplication" :: Prelude.String) #-}
------------------------------------------------------------------------------
x01a
:: Double -> Double -> Double
-> (Double -> Double -> [Double] -> Gauge Double -> IO Double)
-> IO Double
x01a incGauge incCheck mx lop =
Gauge.new137 0.0 incGauge mx >>=
lop incCheck mx [0.0]
l1 :: Double -> Double -> [Double] -> Gauge Double -> IO Double
l1 = fix $ \loop inc mx rs g -> do
r <- readGauge g
print r
case doCheck inc (r:rs) of
Left e -> panic (show e)
Right rs' ->
if r < mx
then loop inc mx rs' g
else pure r
where
doCheck = checkMaxFlow
x01al1Good, x01al1Bad :: IO Double
x01al1Good = x01a 1.0 1.0 10.0 l1 -- increases and correct rate
x01al1Bad = x01a 2.0 1.0 10.0 l1 -- detects increases faster than possible
------------------------------------------------------------------------------
l2 :: Double -> Double -> [Double] -> Gauge Double -> IO Double
l2 = fix $ \loop inc mx rs g -> do
r <- readGauge g
print r
let rsx = if r > 3.0 then -r:rs else r:rs
print rsx
case doCheck inc rsx of
Left e -> panic (show e)
Right rs' ->
if r < mx
then loop inc mx rs' g
else pure r
where
doCheck = checkMaxFlow
x01al2Bad :: IO Double
x01al2Bad = x01a 1.0 1.0 10.0 l2 -- does NOT detect reverse flow (causing infinite loop)
------------------------------------------------------------------------------
l3 :: Double -> Double -> [Double] -> Gauge Double -> IO Double
l3 = fix $ \loop inc mx rs g -> do
r <- readGauge g
print r
let rsx = if r > 3.0 then -r:rs else r:rs
print rsx
case doCheck inc rsx of
Left e -> panic (show e)
Right rs' ->
if r < mx
then loop inc mx rs' g
else pure r
where
doCheck inc rs' = checkDecr 0.0 rs' >>= checkMaxFlow inc
x01al3Good :: IO Double
x01al3Good = x01a 1.0 1.0 10.0 l3 -- detects negative flow
------------------------------------------------------------------------------
newtype PositiveFlowNT = PositiveFlowNT [Double]
newtype FlowOkNT = FlowOkNT [Double]
checkDecrNT :: [Double] -> Either GaugeException PositiveFlowNT
checkDecrNT xs =
checkDecr 0.0 xs >>= pure . PositiveFlowNT
checkMaxFlowNT :: Double -> PositiveFlowNT -> Either GaugeException FlowOkNT
checkMaxFlowNT mx (PositiveFlowNT xs) =
checkMaxFlow mx xs >>= pure . FlowOkNT
lNT :: Double -> Double -> FlowOkNT -> Gauge Double -> IO Double
lNT = fix $ \loop inc mx (FlowOkNT rs) g -> do
r <- readGauge g
print r
let rsx = if r > 3.0 then -r:rs else r:rs
print rsx
case doCheck inc rsx of
Left e -> panic (show e)
Right rs' ->
if r < mx
then loop inc mx rs' g
else pure r
where
doCheck :: Double -> [Double] -> Either GaugeException FlowOkNT
doCheck inc rs' = checkDecrNT rs' >>= checkMaxFlowNT inc
x01b
:: Double -> Double -> Double
-> (Double -> Double -> FlowOkNT -> Gauge Double -> IO Double)
-> IO Double
x01b incGauge incCheck mx lop =
Gauge.new137 0.0 incGauge mx >>=
lop incCheck mx (FlowOkNT [0.0])
x01blNTGood :: IO Double
x01blNTGood = x01b 1.0 1.0 10.0 lNT -- detects negative flow
------------------------------------------------------------------------------
data PositiveFlow
data FlowOk
newtype GaugeReadingsPT p = GaugeReadingsPT [Double]
checkDecrPT :: [Double] -> Either GaugeException (GaugeReadingsPT PositiveFlow)
checkDecrPT xs =
checkDecr 0.0 xs >>= pure . GaugeReadingsPT
checkMaxFlowPT
:: Double -> GaugeReadingsPT PositiveFlow -> Either GaugeException (GaugeReadingsPT FlowOk)
checkMaxFlowPT mx (GaugeReadingsPT xs) =
checkMaxFlow mx xs >>= pure . GaugeReadingsPT
lPT :: Double -> Double -> GaugeReadingsPT FlowOk -> Gauge Double -> IO Double
lPT = fix $ \loop inc mx (GaugeReadingsPT rs) g -> do
r <- readGauge g
print r
let rsx = if r > 3.0 then -r:rs else r:rs
print rsx
case doCheck inc rsx of
Left e -> panic (show e)
Right rs' ->
if r < mx
then loop inc mx rs' g
else pure r
where
doCheck :: Double -> [Double] -> Either GaugeException (GaugeReadingsPT FlowOk)
doCheck inc rs' = checkDecrPT rs' >>= checkMaxFlowPT inc
x01c
:: Double -> Double -> Double
-> (Double -> Double -> GaugeReadingsPT FlowOk -> Gauge Double -> IO Double)
-> IO Double
x01c incGauge incCheck mx lop =
Gauge.new137 0.0 incGauge mx >>=
lop incCheck mx (GaugeReadingsPT [0.0])
x01cl5NTGood :: IO Double
x01cl5NTGood = x01c 1.0 1.0 10.0 lPT -- detects negative flow
------------------------------------------------------------------------------
type PositiveFlowOk = Refined (And PositiveFlow FlowOk) [Double]
instance Predicate PositiveFlowOk [Double] where
validate p v = case checkDecr 0.0 v of
Left e -> throwRefineSomeException (typeOf p) (toException e)
Right v' -> case checkMaxFlow 1.0 v' of
Left e -> throwRefineSomeException (typeOf p) (toException e)
Right _ -> pure ()
lRT :: Double -> Refined PositiveFlowOk [Double] -> Gauge Double -> IO Double
lRT = fix $ \loop mx rs g -> do
r <- readGauge g
print r
let rsx = if r > 3.0 then -r:unrefine rs else r:unrefine rs
print rsx
case refine rsx of
Left e ->
if | isDecrException e -> panic (show NotDecr)
| isExceedsMaxFlowException e -> panic (show ExceedsMaxFlow)
| otherwise -> panic (show e)
Right rs' ->
if r < mx
then loop mx rs' g
else pure r
x01d
:: Double -> Double
-> (Double -> Refined PositiveFlowOk [Double] -> Gauge Double -> IO Double)
-> IO Double
x01d incGauge mx lop =
case refine [] of
Left e -> panic (show e)
Right rs -> do
g <- Gauge.new137 0.0 incGauge mx
lop mx rs g
x01dlRTGood :: IO Double
x01dlRTGood = x01d 1.0 10.0 lRT -- detects negative flow
------------------------------------------------------------------------------
isDecrException :: RefineException -> Bool
isDecrException = isGaugeException NotDecr
isExceedsMaxFlowException :: RefineException -> Bool
isExceedsMaxFlowException = isGaugeException ExceedsMaxFlow
isGaugeException :: GaugeException -> RefineException -> Bool
isGaugeException ge = \case
RefineSomeException _ e | fromException e == Just ge -> True
_ -> False
| haroldcarr/learn-haskell-coq-ml-etc | haskell/playpen/2020-07-07-harold-carr-phantom-existential-scratchpad/src/X01.hs | unlicense | 7,084 | 0 | 16 | 1,601 | 2,216 | 1,089 | 1,127 | -1 | -1 |
{-
Each new term in the Fibonacci sequence is generated by adding the previous two terms.
By starting with 1 and 2, the first 10 terms will be:
1, 2, 3, 5, 8, 13, 21, 34, 55, 89, ...
By considering the terms in the Fibonacci sequence whose values do not exceed four million,
find the sum of the even-valued terms.
-}
fib :: Int -> Int
fib 1 = 1
fib 2 = 2
fib n = fib (n-1) + fib (n-2)
fibs x = map fib [1..x]
-- sumEvenFibs max = sum [fib x | x <- [1..], fib x < max, even (fib x)]
sumEvenFibs maxi = sum [fib x | x <- takeWhile check [1..], even (fib x)] where
= fib x < maxi
check2 :: Int -> Int -> Bool
check2 x maxi = fib x < maxi && even (fib x) | m3mitsuppe/haskell | projecteuler/pr002.hs | unlicense | 660 | 0 | 10 | 156 | 181 | 90 | 91 | -1 | -1 |
{-# LANGUAGE ScopedTypeVariables #-}
module Sync.Retrieve.GitHub.GitHub where
import Control.Exception
import Data.Issue
import Data.Maybe
import Data.Char
import Data.List (sort)
import Debug.Trace
import Network.HTTP.Conduit (HttpException(..))
import Network.HTTP.Types (statusCode, statusMessage)
import qualified Github.Auth as GA
import qualified Github.Issues as GI
import qualified Github.Issues.Events as GIE
import qualified Github.Issues.Comments as GIC
import qualified Github.Data.Definitions as GD
rstrip xs = reverse $ lstrip $ reverse xs
lstrip = dropWhile (== ' ')
strip xs = lstrip $ rstrip xs
convertIssue :: String -> GD.Issue -> Issue
convertIssue origin iss =
let user = case GD.issueAssignee iss of
Nothing -> GD.issueUser iss
Just us -> us
userName = GD.githubOwnerLogin user
tags = map GD.labelName $ GD.issueLabels iss
isClosed = isJust $ GD.issueClosedAt iss
isActive = any (== "T:Active") tags
status = if isClosed
then Closed
else if isActive
then Active
else Open
cleanChar c
| isAlphaNum c = c
| otherwise = '_'
cleanTag tag = map cleanChar tag
cleanTags = map cleanTag tags
nr = GD.issueNumber iss
url = "https://www.github.com/" ++ origin ++ "/issues/" ++
(show nr)
in (Issue origin nr userName status cleanTags
(strip $ GD.issueTitle iss) "github" url [])
wrapEvent :: GD.Event -> IssueEventDetails -> IssueEvent
wrapEvent event details =
IssueEvent (GD.fromGithubDate $ GD.eventCreatedAt event) (
GD.githubOwnerLogin $ GD.eventActor event) details
convertEvent :: GD.Event -> IssueEventDetails
convertEvent evt = IssueComment (show evt)
convertIssueEvent :: GD.Event -> [IssueEvent]
convertIssueEvent event
-- status change
| (GD.eventType event) == GD.Assigned = [
wrapEvent event $ IssueOwnerChange (
GD.githubOwnerLogin $ GD.eventActor event)]
| (GD.eventType event) == GD.Closed = [
wrapEvent event $ IssueStatusChange Closed]
| (GD.eventType event) == GD.ActorUnassigned = [
wrapEvent event $ IssueComment "Unassigned owner"]
| (GD.eventType event) == GD.Reopened = [
wrapEvent event $ IssueStatusChange Open]
| (GD.eventType event) == GD.Renamed = [
wrapEvent event $ IssueComment ("Changed title")]
-- label change
| (GD.eventType event) == GD.Labeled = [
wrapEvent event $ IssueComment ("Added a label")]
| (GD.eventType event) == GD.Unlabeled = [
wrapEvent event $ IssueComment ("Removed a label")]
-- milestone change
| (GD.eventType event) == GD.Milestoned =
let mstone =
case GD.eventIssue event of
Just evt ->
case GD.issueMilestone evt of
Just ms -> " " ++ GD.milestoneTitle ms
Nothing -> ""
Nothing -> ""
in [wrapEvent event $ (IssueComment ("Added milestone" ++ mstone))]
| (GD.eventType event) == GD.Demilestoned = [
wrapEvent event $ IssueComment "Removed a milestone"]
| (GD.eventType event) == GD.Subscribed = [
wrapEvent event $ IssueComment "Subscribed"]
| (GD.eventType event) == GD.Mentioned = [
wrapEvent event $ IssueComment "Mentioned"]
-- ignored, make into comment
| otherwise = [wrapEvent event $ (IssueComment (show $ GD.eventType event))]
convertIssueComment :: GD.IssueComment -> [IssueEvent]
convertIssueComment comment =
[IssueEvent (GD.fromGithubDate $ GD.issueCommentCreatedAt comment) (
GD.githubOwnerLogin $ GD.issueCommentUser comment) (
IssueComment (GD.issueCommentBody comment))]
loadIssueComments :: Maybe GA.GithubAuth -> String -> String -> Int -> IO [IssueEvent]
loadIssueComments oauth user repo num = do
res <- GIC.comments' oauth user repo num
case res of
Left err -> do
putStrLn (user ++ "/" ++ repo ++ ": issue " ++ (
show num) ++ ": " ++ show err)
return []
Right comments ->
return $ concatMap convertIssueComment comments
loadIssueEvents :: Maybe GA.GithubAuth -> String -> String -> Int -> IO [IssueEvent]
loadIssueEvents oauth user repo issnum = do
let classifyError (GD.HTTPConnectionError ex) =
case (fromException ex) of
Just (StatusCodeException st _ _) -> "HTTP Connection Error " ++
show (statusCode st) ++ ": " ++
show (statusMessage st)
_ -> "HTTP Connection Error (unknown status code): " ++ show ex
classifyError err = show err
res <- GIE.eventsForIssue' oauth user repo issnum
case res of
Left err -> do
putStrLn (user ++ "/" ++ repo ++ ": issue " ++ (
show issnum) ++ ": " ++ classifyError err)
return []
Right events ->
return $ concatMap convertIssueEvent events
makeIssueComment :: GD.Issue -> IssueEvent
makeIssueComment issue =
let user = case GD.issueAssignee issue of
Nothing -> GD.issueUser issue
Just us -> us
userName = GD.githubOwnerLogin user
createDate = GD.fromGithubDate $ GD.issueCreatedAt issue
in IssueEvent createDate userName (IssueComment (maybe "" id (GD.issueBody issue)))
fetchIssue :: Maybe String -> String -> String -> Int -> IO (Maybe Issue)
fetchIssue tok user repo issuenum = do
let auth = fmap GA.GithubOAuth tok
res <- GI.issue' auth user repo issuenum
case res of
Left err -> do putStrLn $ show err; return Nothing
Right issue -> return $ Just $ convertIssue (user ++ "/" ++ repo ) issue
fetchDetails :: Maybe String -> String -> String -> Issue -> IO (Issue)
fetchDetails tok user repo issue = do
let auth = fmap GA.GithubOAuth tok
issuenum = number issue
eventList <- loadIssueEvents auth user repo issuenum
commentList <- loadIssueComments auth user repo issuenum
-- assume that the issue already has the initial comment.
return $ issue { events = (events issue) ++ (sort eventList ++ commentList) }
fetch :: Maybe String -> String -> String -> Maybe IssueStatus -> [String] -> IO [Issue]
fetch tok user repo stat tags = do
let auth = fmap GA.GithubOAuth tok
statusLim = case stat of
Just Open -> [GI.Open]
Just Closed -> [GI.OnlyClosed]
_ -> []
tagLim = if length tags > 0
then [GI.Labels tags]
else []
res <- GI.issuesForRepo' auth user repo (statusLim++tagLim)
case res of
Left err -> do
putStrLn $ show err
return []
Right issues -> do
-- eventList <- mapM (\is -> loadIssueEvents auth user repo $ GD.issueNumber is) issues
-- commentList <-
-- mapM (\i -> loadIssueComments auth user repo (GD.issueNumber i)) issues
let convertedIssues = map (convertIssue (user++"/"++repo)) issues
comments = map makeIssueComment issues
conversions = zip convertedIssues comments
return $
map (\(i,comm) -> i { events = [comm] }) conversions
| lally/org-issue-sync | src/Sync/Retrieve/GitHub/GitHub.hs | apache-2.0 | 7,005 | 0 | 20 | 1,769 | 2,246 | 1,113 | 1,133 | 156 | 5 |
----
-- Copyright (c) 2013 Andrea Bernardini.
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
----
module Handler.UpdateFeed where
import Data.Aeson.Types
import Data.Time
import System.Locale
import Data.Text hiding (tail, head)
import Prelude (head, tail)
import Import hiding (catch)
import Network.HTTP.Types.Status
import GHC.Generics
data JSONUpdate = JSONUpdate {
url :: Text
, time :: UTCTime
} deriving (Show, Generic)
data UpdateArray = UpdateArray {
feeds :: [JSONUpdate]
} deriving (Show, Generic)
instance FromJSON JSONUpdate
instance FromJSON UpdateArray
-- Retreive the submitted data from the user
postUpdateFeedR :: Text -> Handler ()
postUpdateFeedR token = do
(result :: Result Value) <- parseJsonBody
lift $ putStrLn $ "####################UPDATE FEED HANDLER#####################"
case result of
Success v
-> case v of
Object o
-> do (jsup :: Result UpdateArray)
<- return (fromJSON v)
user <- iouser
case jsup of
Success (UpdateArray f) ->
tryUpdate f user
Error e -> sendFail
otherwise -> sendFail
Error e
-> sendFail
where iouser = verifySession token
verifySession :: Text -> Handler Text
verifySession token = do
session <- runDB $ selectFirst [SessionToken ==. token] []
case session of
Nothing -> sendResponseStatus status400 ("Not a valid session" :: Text)
Just (Entity _ (Session exp user _))
-> do now <- lift getNow
if exp > now then
return user
else do
sendResponseStatus status400 ("Session expired" :: Text)
tryUpdate :: [JSONUpdate] -> Text -> Handler ()
tryUpdate feeds user =
do updateFeeds feeds user
sendResponse ()
updateFeeds :: [JSONUpdate] -> Text -> Handler ()
updateFeeds [] user = do return ()
updateFeeds (x:xs) user =
do _ <- case x of
JSONUpdate url date ->
runDB $ updateWhere [FeedUrl ==. url, FeedUser ==. user] [FeedRead =. date]
_ -> return ()
lift $ putStrLn $ show x
updateFeeds xs user
getNow :: IO UTCTime
getNow = do now <- getCurrentTime
return now
sendFail :: Handler ()
sendFail = sendResponseStatus status400 () | andrebask/newsprint | src/AccountManagerWS/Handler/UpdateFeed.hs | apache-2.0 | 2,907 | 0 | 20 | 817 | 708 | 363 | 345 | -1 | -1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE Unsafe #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UnboxedTuples #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# OPTIONS_HADDOCK not-home #-}
--------------------------------------------------------------------------------
-- |
-- Copyright : (c) Edward Kmett 2015
-- License : BSD-style
-- Maintainer : Edward Kmett <ekmett@gmail.com>
-- Portability : non-portable
--
-- This module suppose a Word64-based array-mapped PATRICIA Trie.
--
-- The most significant nybble is isolated by using techniques based on
-- <https://www.fpcomplete.com/user/edwardk/revisiting-matrix-multiplication/part-4>
-- but modified to work nybble-by-nybble rather than bit-by-bit.
--
-- This structure secretly maintains a finger to the previous mutation to
-- speed access and repeated operations.
--
--------------------------------------------------------------------------------
module Data.Transient.WordMap.Internal where
import Control.Applicative hiding (empty)
import Control.DeepSeq
import Control.Lens hiding (index, deep)
import Control.Monad
import Control.Monad.ST hiding (runST)
import Control.Monad.Primitive
import Data.Bits
import Data.Foldable (fold)
import Data.Primitive.MutVar
import Data.Transient.Primitive.SmallArray
import Data.Transient.Primitive.Unsafe
import Data.Word
import qualified GHC.Exts as Exts
import Prelude hiding (lookup, length, foldr)
import GHC.Exts as Exts
import GHC.ST
import GHC.Word
import Unsafe.Coerce
type Mask = Word16
type Offset = Int
--------------------------------------------------------------------------------
-- * Utilities
--------------------------------------------------------------------------------
ptrEq :: a -> a -> Bool
ptrEq x y = isTrue# (Exts.reallyUnsafePtrEquality# x y)
{-# INLINEABLE ptrEq #-}
ptrNeq :: a -> a -> Bool
ptrNeq x y = isTrue# (Exts.reallyUnsafePtrEquality# x y `xorI#` 1#)
{-# INLINEABLE ptrNeq #-}
index :: Word16 -> Word16 -> Int
index m b = popCount (m .&. (b-1))
{-# INLINE index #-}
-- | Note: @level 0@ will return a negative shift, so don't use it
level :: Word64 -> Int
level okk = 60 - (countLeadingZeros okk .&. 0x7c)
{-# INLINE level #-}
maskBit :: Word64 -> Offset -> Int
maskBit k o = fromIntegral (unsafeShiftR k o .&. 0xf)
{-# INLINE maskBit #-}
mask :: Word64 -> Offset -> Word16
mask k o = unsafeShiftL 1 (maskBit k o)
{-# INLINE mask #-}
-- | Given a pair of keys, they agree down to this height in the display, don't use this when they are the same
--
-- @
-- apogeeBit k ok = unsafeShiftR (level (xor k ok)) 2
-- level (xor k ok) = unsafeShiftL (apogeeBit k ok) 2
-- @
apogeeBit :: Word64 -> Word64 -> Int
apogeeBit k ok = 15 - unsafeShiftR (countLeadingZeros (xor k ok)) 2
apogee :: Word64 -> Word64 -> Mask
apogee k ok = unsafeShiftL 1 (apogeeBit k ok)
--------------------------------------------------------------------------------
-- * WordMaps
--------------------------------------------------------------------------------
data Node a
= Full {-# UNPACK #-} !Word64 {-# UNPACK #-} !Offset !(SmallArray (Node a))
| Node {-# UNPACK #-} !Word64 {-# UNPACK #-} !Offset {-# UNPACK #-} !Mask !(SmallArray (Node a))
| Tip {-# UNPACK #-} !Word64 a
deriving (Functor,Foldable,Show)
data WordMap a = WordMap
{ fingerKey :: {-# UNPACK #-} !Word64
, fingerMask :: {-# UNPACK #-} !Mask
, fingerValue :: !(Maybe a)
, fingerPath :: {-# UNPACK #-} !(SmallArray (Node a))
} deriving (Functor,Show)
data TNode s a
= TFull {-# UNPACK #-} !Word64 {-# UNPACK #-} !Offset !(SmallMutableArray s (TNode s a))
| TNode {-# UNPACK #-} !Word64 {-# UNPACK #-} !Offset {-# UNPACK #-} !Mask !(SmallMutableArray s (TNode s a))
| TTip {-# UNPACK #-} !Word64 a
-- | This is a transient WordMap with a clojure-like API
data TWordMap s a = TWordMap
{ transientFingerKey :: {-# UNPACK #-} !Word64
, transientFingerMask :: {-# UNPACK #-} !Mask
, transientFingerValue :: !(Maybe a)
, transientFingerPath :: {-# UNPACK #-} !(SmallMutableArray s (TNode s a))
}
persisted :: (forall s. TWordMap s a) -> WordMap a
persisted = unsafeCoerce
unsafePersistentTNode :: TNode s a -> Node a
unsafePersistentTNode = unsafeCoerce
unsafePersistent :: TWordMap s a -> WordMap a
unsafePersistent = unsafeCoerce
{-# INLINE unsafePersistent #-}
-- | This is a mutable WordMap with a classic Haskell mutable container-style API
--
-- On the plus side, this API means you don't need to carefully avoid reusing a transient
-- On the minus side, you have an extra reference to track.
newtype MWordMap s a = MWordMap { runMWordMap :: MutVar s (TWordMap s a) }
thaw :: PrimMonad m => WordMap a -> m (MWordMap (PrimState m) a)
thaw m = MWordMap <$> newMutVar (transient m)
freeze :: PrimMonad m => MWordMap (PrimState m) a -> m (WordMap a)
freeze (MWordMap r) = do
x <- readMutVar r
persistent x
--------------------------------------------------------------------------------
-- * Transient WordMaps
--------------------------------------------------------------------------------
-- | O(1) worst-case conversion from an immutable structure to a mutable one
transient :: WordMap a -> TWordMap s a
transient = unsafeCoerce
{-# INLINE transient #-}
-- | O(1) amortized conversion from a mutable structure to an immutable one
persistent :: PrimMonad m => TWordMap (PrimState m) a -> m (WordMap a)
persistent r@(TWordMap _ _ _ ns0) = primToPrim $ do
go ns0
return (unsafePersistent r)
where
go :: SmallMutableArray s (TNode s a) -> ST s ()
go ns = unsafeCheckSmallMutableArray ns >>= \case
True -> walk ns (sizeOfSmallMutableArray ns - 1)
False -> return ()
walk :: SmallMutableArray s (TNode s a) -> Int -> ST s ()
walk ns !i
| i >= 0 = readSmallArray ns i >>= \case
TNode _ _ _ as -> do go as; walk ns (i - 1)
TFull _ _ as -> do go as; walk ns (i - 1)
_ -> return ()
| otherwise = return ()
{-# NOINLINE persistent #-}
empty :: WordMap a
empty = persisted emptyT
{-# NOINLINE empty #-}
emptySmallMutableArray :: SmallMutableArray s a
emptySmallMutableArray = runST $ unsafeCoerce <$> newSmallArray 0 undefined
{-# NOINLINE emptySmallMutableArray #-}
emptyT :: TWordMap s a
emptyT = TWordMap 0 0 Nothing emptySmallMutableArray
{-# INLINE emptyT #-}
emptyM :: PrimMonad m => m (MWordMap (PrimState m) a)
emptyM = thaw empty
{-# INLINE emptyM #-}
-- | Build a singleton WordMap
singleton :: Word64 -> a -> WordMap a
singleton k v = WordMap k 0 (Just v) mempty
{-# INLINE singleton #-}
singletonT :: Word64 -> a -> TWordMap s a
singletonT k v = TWordMap k 0 (Just v) emptySmallMutableArray
{-# INLINE singletonT #-}
singletonM :: PrimMonad m => Word64 -> a -> m (MWordMap (PrimState m) a)
singletonM k v = thaw (singleton k v)
lookupTNode :: Word64 -> TNode s a -> ST s (Maybe a)
lookupTNode !k (TFull ok o a)
| z > 0xf = return Nothing
| otherwise = do
x <- readSmallArray a (fromIntegral z)
lookupTNode k x
where
z = unsafeShiftR (xor k ok) o
lookupTNode k (TNode ok o m a)
| z > 0xf = return Nothing
| m .&. b == 0 = return Nothing
| otherwise = do
x <- readSmallArray a (index m b)
lookupTNode k x
where
z = unsafeShiftR (xor k ok) o
b = unsafeShiftL 1 (fromIntegral z)
lookupTNode k (TTip ok ov)
| k == ok = return (Just ov)
| otherwise = return (Nothing)
lookupT :: PrimMonad m => Word64 -> TWordMap (PrimState m) a -> m (Maybe a)
lookupT k0 (TWordMap ok m mv mns)
| k0 == ok = return mv
| b <- apogee k0 ok = if
| m .&. b == 0 -> return Nothing
| otherwise -> do
x <- readSmallArray mns (index m b)
primToPrim (lookupTNode k0 x)
{-# INLINE lookupT #-}
lookupM :: PrimMonad m => Word64 -> MWordMap (PrimState m) a -> m (Maybe a)
lookupM k0 (MWordMap m) = do
x <- readMutVar m
lookupT k0 x
{-# INLINE lookupM #-}
-- implementation of lookup using the transient operations
lookup0 :: Word64 -> WordMap a -> Maybe a
lookup0 k m = runST (lookupT k (transient m))
{-# INLINE lookup0 #-}
lookupNode :: Word64 -> Node a -> Maybe a
lookupNode !k (Full ok o a)
| z > 0xf = Nothing
| otherwise = lookupNode k (indexSmallArray a (fromIntegral z))
where
z = unsafeShiftR (xor k ok) o
lookupNode k (Node ok o m a)
| z > 0xf = Nothing
| m .&. b == 0 = Nothing
| otherwise = lookupNode k (indexSmallArray a (index m b))
where
z = unsafeShiftR (xor k ok) o
b = unsafeShiftL 1 (fromIntegral z)
lookupNode k (Tip ok ov)
| k == ok = Just ov
| otherwise = Nothing
lookup :: Word64 -> WordMap a -> Maybe a
lookup k0 (WordMap ok m mv mns)
| k0 == ok = mv
| b <- apogee k0 ok = if
| m .&. b == 0 -> Nothing
| otherwise -> lookupNode k0 (indexSmallArray mns (index m b))
{-# INLINE lookup #-}
-- | Modify an immutable structure with mutable operations.
--
-- @modify f wm@ passed @f@ a "persisted" transient that may be reused.
modify :: (forall s. TWordMap s a -> ST s (TWordMap s b)) -> WordMap a -> WordMap b
modify f wm = runST $ do
mwm <- f (transient wm)
persistent mwm
{-# INLINE modify #-}
-- | Modify a mutable wordmap with a transient operation.
modifyM :: PrimMonad m => (TWordMap (PrimState m) a -> m (TWordMap (PrimState m) a)) -> MWordMap (PrimState m) a -> m ()
modifyM f (MWordMap r) = do
t <- readMutVar r
t' <- f t
writeMutVar r t'
{-# INLINE modifyM #-}
{-# RULES "persistent/transient" forall m. persistent (unsafeCoerce m) = return m #-}
-- | Query a transient structure with queries designed for an immutable structure.
--
-- This does _not_ destroy the transient, although, it does mean subsequent actions need to copy-on-write from scratch.
--
-- After @query f wm@, @wm@ is considered persisted and may be reused.
query :: PrimMonad m => (WordMap a -> r) -> TWordMap (PrimState m) a -> m r
query f t = stToPrim $ f <$> persistent t
{-# INLINE query #-}
-- | Query a mutable structure with queries designed for an immutable structure.
queryM :: PrimMonad m => (WordMap a -> r) -> MWordMap (PrimState m) a -> m r
queryM f (MWordMap m) = stToPrim $ do
t <- readMutVar m
query f t
{-# INLINE queryM #-}
--------------------------------------------------------------------------------
-- * Construction
--------------------------------------------------------------------------------
-- @
-- unsafeFreezeSmallArray# :: Hint s
-- warm :: Hint s
-- @
--
-- invariant: everything below a given position in a tree must be at least this persisted
type Hint s = forall x. SmallMutableArray# s x -> State# s -> State# s
warm :: Hint s
warm _ s = s
{-# INLINE warm #-}
cold :: Hint s
cold m s = case unsafeFreezeSmallArray# m s of (# s', _ #) -> s'
{-# NOINLINE cold #-}
apply :: PrimMonad m => Hint (PrimState m) -> SmallMutableArray (PrimState m) a -> m ()
apply hint (SmallMutableArray m) = primitive_ (hint m)
{-# INLINE apply #-}
insertSmallMutableArray :: Hint s -> SmallMutableArray s a -> Int -> a -> ST s (SmallMutableArray s a)
insertSmallMutableArray hint i k a = do
let n = sizeOfSmallMutableArray i
o <- newSmallArray (n + 1) a
copySmallMutableArray o 0 i 0 k -- backwards `primitive` convention
copySmallMutableArray o (k+1) i k (n-k) -- backwards `primitive` convention
apply hint o
return o
{-# INLINEABLE insertSmallMutableArray #-}
deleteSmallMutableArray :: Hint s -> SmallMutableArray s a -> Int -> ST s (SmallMutableArray s a)
deleteSmallMutableArray hint i k = do
let n = sizeOfSmallMutableArray i
o <- newSmallArray (n - 1) undefined
copySmallMutableArray o 0 i 0 k -- backwards `primitive` convention
copySmallMutableArray o k i (k+1) (n-k-1) -- backwards `primitive` convention
apply hint o
return o
{-# INLINEABLE deleteSmallMutableArray #-}
nodeT :: Word64 -> Offset -> Mask -> SmallMutableArray s (TNode s a) -> TNode s a
nodeT k o 0xffff a = TFull k o a
nodeT k o m a = TNode k o m a
{-# INLINE nodeT #-}
forkT :: Hint s -> Word64 -> TNode s a -> Word64 -> TNode s a -> ST s (TNode s a)
forkT hint k n ok on = do
arr <- newSmallArray 2 n
writeSmallArray arr (fromEnum (k < ok)) on
let !o = level (xor k ok)
apply hint arr
return $! TNode (k .&. unsafeShiftL 0xfffffffffffffff0 o) o (mask k o .|. mask ok o) arr
-- O(1) remove the _entire_ branch containing a given node from this tree, in situ
unplugT :: Hint s -> Word64 -> TNode s a -> ST s (TNode s a)
unplugT hint k on@(TFull ok n as)
| wd >= 0xf = return on
| d <- fromIntegral wd = TNode ok n (complement (unsafeShiftL 1 d)) <$> deleteSmallMutableArray hint as d
where !wd = unsafeShiftR (xor k ok) n
unplugT hint !k on@(TNode ok n m as)
| wd >= 0xf = return on
| !b <- unsafeShiftL 1 (fromIntegral wd), m .&. b /= 0, p <- index m b =
if sizeOfSmallMutableArray as == 2
then readSmallArray as (1-p) -- keep the other node
else TNode ok n (m .&. complement b) <$> deleteSmallMutableArray hint as p
| otherwise = return on
where !wd = unsafeShiftR (xor k ok) n
unplugT _ _ on = return on
canonical :: WordMap a -> Maybe (Node a)
canonical wm = runST $ case transient wm of
TWordMap _ 0 Nothing _ -> return Nothing
TWordMap k _ mv ns -> Just . unsafePersistentTNode <$> replugPathT cold k 0 (sizeOfSmallMutableArray ns) mv ns
-- O(1) plug a child node directly into an open parent node
-- carefully retains identity in case we plug what is already there back in
plugT :: Hint s -> Word64 -> TNode s a -> TNode s a -> ST s (TNode s a)
plugT hint k z on@(TNode ok n m as)
| wd > 0xf = forkT hint k z ok on
| otherwise = do
let d = fromIntegral wd
b = unsafeShiftL 1 d
odm = index m b
if m .&. b == 0
then nodeT ok n (m .|. b) <$> insertSmallMutableArray hint as odm z
else unsafeCheckSmallMutableArray as >>= \case
True -> do -- really mutable, mutate in place
writeSmallArray as odm z
apply hint as -- we may be freezing as we go, apply the hint
return on
False -> do -- this is a persisted node
!oz <- readSmallArray as odm
if ptrEq oz z
then return on -- but we arent changing it
else do -- here we are, and we need to copy on write
bs <- cloneSmallMutableArray as 0 odm
writeSmallArray bs odm z
apply hint bs
return (TNode ok n m bs)
where wd = unsafeShiftR (xor k ok) n
plugT hint k z on@(TFull ok n as)
| wd > 0xf = forkT hint k z ok on
| otherwise = do
let d = fromIntegral wd
unsafeCheckSmallMutableArray as >>= \case
True -> do
writeSmallArray as d z
apply hint as
return on
False -> do
!oz <- readSmallArray as d
if ptrEq oz z
then return on
else do
bs <- cloneSmallMutableArray as 0 16
writeSmallArray bs d z
apply hint bs
return (TFull ok n bs)
where wd = unsafeShiftR (xor k ok) n
plugT hint k z on@(TTip ok _) = forkT hint k z ok on
-- | Given @k@ located under @acc@, @plugPathT k i t acc ns@ plugs acc recursively into each of the nodes
-- of @ns@ from @[i..t-1]@ from the bottom up
plugPathT :: Hint s -> Word64 -> Int -> Int -> TNode s a -> SmallMutableArray s (TNode s a) -> ST s (TNode s a)
plugPathT hint !k !i !t !acc !ns
| i < t = do
x <- readSmallArray ns i
y <- plugT hint k acc x
plugPathT hint k (i+1) t y ns
| otherwise = return acc
-- this recurses into @plugPathT@ deliberately.
unplugPathT :: Hint s -> Word64 -> Int -> Int -> SmallMutableArray s (TNode s a) -> ST s (TNode s a)
unplugPathT hint k i t ns = do
x <- readSmallArray ns i
y <- unplugT hint k x
plugPathT hint k (i+1) t y ns
replugPathT :: PrimMonad m => Hint (PrimState m) -> Word64 -> Int -> Int -> Maybe v -> SmallMutableArray (PrimState m) (TNode (PrimState m) v) -> m (TNode (PrimState m) v)
replugPathT hint k i t (Just v) ns = primToPrim $ plugPathT hint k i t (TTip k v) ns
replugPathT hint k i t Nothing ns = primToPrim $ unplugPathT hint k i t ns
unI# :: Int -> Int#
unI# (I# i) = i
-- | O(1) This function enables us to GC the items that lie on the path to the finger.
--
-- Normally we only do this lazily as the finger moves out of a given area, but if you have
-- particularly burdensome items for the garbage collector it may be worth paying this price
-- to explicitly allow them to go free.
trimWithHint :: PrimMonad m => Hint (PrimState m) -> TWordMap (PrimState m) a -> m (TWordMap (PrimState m) a)
trimWithHint _ wm@(TWordMap _ 0 _ _) = return wm
trimWithHint hint0 wm0@(TWordMap k0 m mv ns) = primToPrim $ unsafeCheckSmallMutableArray ns >>= \case
True -> go hint0 k0 ns ns (n-1) wm0
False -> do
ns' <- newSmallArray n undefined
go hint0 k0 ns' ns (n-1) (TWordMap k0 m mv ns')
where
n = sizeOfSmallMutableArray ns
go :: Hint s -> Word64 -> SmallMutableArray s (TNode s a) -> SmallMutableArray s (TNode s a) -> Int -> TWordMap s a -> ST s (TWordMap s a)
go hint k dst src i wm
| i >= 0 = do
x <- readSmallArray src i
y <- unplugT hint k x
writeSmallArray dst i y
go hint k dst src (i - 1) wm
| otherwise = do
apply hint dst
return wm
trimT :: PrimMonad m => TWordMap (PrimState m) a -> m (TWordMap (PrimState m) a)
trimT = trimWithHint warm
{-# INLINE trimT #-}
-- | O(1) This function enables us to GC the items that lie on the path to the finger.
--
-- Normally we only do this lazily as the finger moves out of a given area, but if you
-- have particularly burdensome items for the garbage collector it may be worth paying this price.
-- to explicitly allow them to go free.
trimM :: PrimMonad m => MWordMap (PrimState m) a -> m ()
trimM = modifyM (trimWithHint warm)
{-# INLINE trimM #-}
-- | O(1) This function enables us to GC the items that lie on the path to the finger.
--
-- Normally we only do this lazily as the finger moves out of a given area, but if you
-- have particularly burdensome items for the garbage collector it may be worth paying this price.
-- to explicitly allow them to go free.
trim :: WordMap a -> WordMap a
trim = modify trimT
{-# INLINE trim #-}
focusWithHint :: PrimMonad m => Hint (PrimState m) -> Word64 -> TWordMap (PrimState m) a -> m (TWordMap (PrimState m) a)
focusWithHint hint k0 wm0@(TWordMap ok0 m0 mv0 ns0@(SmallMutableArray ns0#))
| k0 == ok0 = return wm0 -- keys match, easy money
| m0 == 0 = case mv0 of
Nothing -> return (TWordMap k0 0 Nothing emptySmallMutableArray)
Just v -> do
ns <- newSmallArray 1 (TTip ok0 v)
apply hint ns
return $! TWordMap k0 (unsafeShiftL 1 (unsafeShiftR (level (xor ok0 k0)) 2)) Nothing ns
| kept <- m0 .&. unsafeShiftL 0xfffe (unsafeShiftR (level (xor ok0 k0)) 2)
, nkept@(I# nkept#) <- popCount kept
, top@(I# top#) <- sizeOfSmallMutableArray ns0 - nkept
= do
root <- replugPathT hint ok0 0 top mv0 ns0
primitive $ \s -> case go k0 nkept# root s of
(# s', ms, m#, omv #) -> case copySmallMutableArray# ns0# top# ms (sizeofSmallMutableArray# ms -# nkept#) nkept# s' of -- we're copying nkept
s'' -> case hint ms s'' of
s''' -> (# s''', TWordMap k0 (kept .|. W16# m#) omv (SmallMutableArray ms) #)
where
deep :: Word64 -> Int# -> SmallMutableArray# s (TNode s a) -> Int# -> TNode s a -> Int# -> State# s ->
(# State# s, SmallMutableArray# s (TNode s a), Word#, Maybe a #)
deep k h# as d# on n# s = case readSmallArray# as d# s of
(# s', on' #) -> case go k (h# +# 1#) on' s' of
(# s'', ms, m#, mv #) -> case writeSmallArray# ms (sizeofSmallMutableArray# ms -# h# -# 1#) on s'' of
s''' -> case unsafeShiftL 1 (unsafeShiftR (I# n#) 2) .|. W16# m# of
W16# m'# -> (# s''', ms, m'#, mv #)
shallow :: Int# -> TNode s a -> Int# -> Maybe a -> State# s ->
(# State# s, SmallMutableArray# s (TNode s a), Word#, Maybe a #)
shallow h# on n# mv s = case newSmallArray# (h# +# 1#) on s of
(# s', ms #) -> case unsafeShiftL 1 (unsafeShiftR (I# n#) 2) of
W16# m# -> (# s', ms, m#, mv #)
go :: Word64 -> Int# -> TNode s a -> State# s -> (# State# s, SmallMutableArray# s (TNode s a), Word#, Maybe a #)
go k h# on@(TFull ok n@(I# n#) (SmallMutableArray as)) s
| wd > 0xf = shallow h# on (unI# (level okk)) Nothing s -- we're a sibling of what we recursed into -- [Displaced TFull]
| otherwise = deep k h# as (unI# (fromIntegral wd)) on n# s -- Parent TFull : ..
where !okk = xor k ok
!wd = unsafeShiftR okk n
go k h# on@(TNode ok n@(I# n#) m (SmallMutableArray as)) s
| wd > 0xf = shallow h# on (unI# (level okk)) Nothing s -- [Displaced TNode]
| !b <- unsafeShiftL 1 (fromIntegral wd), m .&. b /= 0 = deep k h# as (unI# (index m b)) on n# s -- Parent TNode : ..
| otherwise = shallow h# on n# Nothing s -- [TNode]
where !okk = xor k ok
!wd = unsafeShiftR okk n
go k h# on@(TTip ok v) s
| k == ok = case newSmallArray# h# undefined s of (# s', ms #) -> (# s', ms, int2Word# 0#, Just v #)
| otherwise = shallow h# on (unI# (level (xor k ok))) Nothing s -- [Displaced TTip]
-- end focusWithHint
-- | This changes the location of the focus in a transient map. Operations near the focus are considerably cheaper.
--
-- @focusT k wm@ invalidates any unpersisted transient @wm@ it is passed
focusT :: PrimMonad m => Word64 -> TWordMap (PrimState m) a -> m (TWordMap (PrimState m) a)
focusT = focusWithHint warm
{-# INLINE focusT #-}
-- | This changes the location of the focus in a mutable map. Operations near the focus are considerably cheaper.
focusM :: PrimMonad m => Word64 -> MWordMap (PrimState m) a -> m ()
focusM k = modifyM (focusT k)
{-# INLINE focusM #-}
-- | This changes the location of the focus in an immutable map. Operations near the focus are considerably cheaper.
focus :: Word64 -> WordMap a -> WordMap a
focus k wm = modify (focusWithHint cold k) wm
{-# INLINE focus #-}
insertWithHint :: PrimMonad m => Hint (PrimState m) -> Word64 -> a -> TWordMap (PrimState m) a -> m (TWordMap (PrimState m) a)
insertWithHint hint k v wm@(TWordMap ok _ mv _)
| k == ok, Just ov <- mv, ptrEq v ov = return wm
| otherwise = do
wm' <- focusWithHint hint k wm
return $! wm' { transientFingerValue = Just v }
{-# INLINE insertWithHint #-}
-- | Transient insert.
--
-- @insertT k v wm@ invalidates any unpersisted transient @wm@ it is passed
insertT :: PrimMonad m => Word64 -> a -> TWordMap (PrimState m) a -> m (TWordMap (PrimState m) a)
insertT k v wm = insertWithHint warm k v wm
{-# INLINE insertT #-}
-- | Mutable insert.
insertM :: PrimMonad m => Word64 -> a -> MWordMap (PrimState m) a -> m ()
insertM k v mwm = modifyM (insertT k v) mwm
{-# INLINE insertM #-}
-- | Immutable insert.
insert :: Word64 -> a -> WordMap a -> WordMap a
insert k v wm = modify (insertWithHint cold k v) wm
{-# INLINE insert #-}
deleteWithHint :: PrimMonad m => Hint (PrimState m) -> Word64 -> TWordMap (PrimState m) a -> m (TWordMap (PrimState m) a)
deleteWithHint hint k wm = do
wm' <- focusWithHint hint k wm
return $! wm' { transientFingerValue = Nothing }
{-# INLINE deleteWithHint #-}
-- | Transient delete. @deleteT k v wm@ invalidates any unpersisted transient it is passed.
deleteT :: PrimMonad m => Word64 -> TWordMap (PrimState m) a -> m (TWordMap (PrimState m) a)
deleteT k wm = deleteWithHint warm k wm
{-# INLINE deleteT #-}
-- | Mutable delete.
deleteM :: PrimMonad m => Word64 -> MWordMap (PrimState m) a -> m ()
deleteM k wm = modifyM (deleteT k) wm
{-# INLINE deleteM #-}
-- | Immutable delete.
delete0 :: Word64 -> WordMap a -> WordMap a
delete0 k wm = modify (deleteWithHint cold k) wm
{-# INLINE delete0 #-}
-- | Immutable delete.
delete :: Word64 -> WordMap a -> WordMap a
delete k wm = (focus k wm) { fingerValue = Nothing }
{-# INLINE delete #-}
--------------------------------------------------------------------------------
-- * Instances
--------------------------------------------------------------------------------
instance IsList (WordMap a) where
type Item (WordMap a) = (Word64, a)
toList = ifoldr (\i a r -> (i, a): r) []
{-# INLINE toList #-}
fromList xs = runST $ do
o <- fromListT xs
persistent o
{-# INLINE fromList #-}
fromListN _ = fromList
{-# INLINE fromListN #-}
-- stuff to eventually clean up and reintroduce
type instance Index (WordMap a) = Word64
type instance IxValue (WordMap a) = a
instance At (WordMap a) where
at k f wm = let c = focus k wm in f (lookup k wm) <&> \mv' -> c { fingerValue = mv' }
{-# INLINE at #-}
instance Ixed (WordMap a) where
ix k f wm = case lookup k wm of
Nothing -> pure wm
Just v -> let c = focus k wm in f v <&> \v' -> c { fingerValue = Just v' }
{-# INLINE ix #-}
instance NFData a => NFData (Node a) where
rnf (Full _ _ a) = rnf a
rnf (Node _ _ _ a) = rnf a
rnf (Tip _ v) = rnf v
instance NFData a => NFData (WordMap a) where
rnf (WordMap _ _ mv as) = rnf mv `seq` rnf as
instance AsEmpty (WordMap a) where
_Empty = prism (const empty) $ \s -> case s of
WordMap _ 0 Nothing _ -> Right ()
t -> Left t
instance AsEmpty (TWordMap s a) where
_Empty = prism (const emptyT) $ \s -> case s of
TWordMap _ 0 Nothing _ -> Right ()
t -> Left t
instance Eq (MWordMap s a) where
MWordMap m == MWordMap n = m == n
{-# INLINE (==) #-}
instance FunctorWithIndex Word64 WordMap where
imap f (WordMap k n mv as) = WordMap k n (fmap (f k) mv) (fmap (imap f) as)
instance FunctorWithIndex Word64 Node where
imap f (Node k n m as) = Node k n m (fmap (imap f) as)
imap f (Tip k v) = Tip k (f k v)
imap f (Full k n as) = Full k n (fmap (imap f) as)
instance Foldable WordMap where
fold wm = foldMap fold (canonical wm)
foldMap f wm = foldMap (foldMap f) (canonical wm)
null (WordMap _ 0 Nothing _) = True
null _ = False
instance FoldableWithIndex Word64 WordMap where
ifoldMap f wm = foldMap (ifoldMap f) (canonical wm)
instance FoldableWithIndex Word64 Node where
ifoldMap f (Node _ _ _ as) = foldMap (ifoldMap f) as
ifoldMap f (Tip k v) = f k v
ifoldMap f (Full _ _ as) = foldMap (ifoldMap f) as
instance Eq v => Eq (WordMap v) where
as == bs = Exts.toList as == Exts.toList bs
{-# INLINE (==) #-}
instance Ord v => Ord (WordMap v) where
compare as bs = compare (Exts.toList as) (Exts.toList bs)
{-# INLINE compare #-}
-- TODO: Traversable, TraversableWithIndex Word64 WordMap
-- stToPrim :: PrimMonad m => ST (PrimState m) a -> m a
--stToPrim = primToPrim
--{-# INLINE stToPrim #-}
fromListT :: PrimMonad m => [(Word64, a)] -> m (TWordMap (PrimState m) a)
fromListT xs = stToPrim $ foldM (\r (k,v) -> insertT k v r) emptyT xs
{-# INLINE fromListT #-}
toListT :: PrimMonad m => TWordMap (PrimState m) a -> m [(Word64, a)]
toListT = query Exts.toList
{-# INLINE toListT #-}
fromListM :: PrimMonad m => [(Word64, a)] -> m (MWordMap (PrimState m) a)
fromListM xs = stToPrim $ do
o <- fromListT xs
MWordMap <$> newMutVar o
{-# INLINE fromListM #-}
toListM :: PrimMonad m => MWordMap (PrimState m) a -> m [(Word64, a)]
toListM = queryM Exts.toList
{-# INLINE toListM #-}
| ekmett/transients | src/Data/Transient/WordMap/Internal.hs | bsd-2-clause | 27,551 | 2 | 23 | 6,315 | 9,341 | 4,609 | 4,732 | 521 | 6 |
-- Module: ModuleWithCommentsResembilngModuleHeader
--
-- Please note the module ... where part
module ModuleWithCommentsResemblingModuleHeader
( foo )
where
import EmptyModule
foo :: a -> a
foo x = x
-- where
| sergv/tags-server | test-data/0003module_header_detection/ModuleWithCommentsResemblingModuleHeader.hs | bsd-3-clause | 216 | 0 | 5 | 37 | 33 | 21 | 12 | 5 | 1 |
module Main where
import Control.Monad.Random
import System.Random
import Data.Ord
import Data.VPTree
import Data.VPTree.MetricSpace
example :: VPTree Int
example = Split 0 10 (Leaf [0, 7]) (Leaf [14])
data Point = Point Double Double deriving (Show)
instance MetricSpace Point where
distance (Point x0 y0) (Point x1 y1) = sqrt (x0 * x1 + y0 * y1)
instance Random Point where
random = runRand $ do
x <- liftRand random
y <- liftRand random
return (Point x y)
randomR ((Point l b), (Point r t)) = runRand $ do
x <- liftRand $ randomR (l, r)
y <- liftRand $ randomR (b, t)
return (Point x y)
isSorted :: Ord a => [a] -> Bool
isSorted (x : y : as)
| x <= y = isSorted as
| otherwise = False
isSorted _ = True
main :: IO ()
main = putStrLn "Hello, world!"
| tixxit/hillside | app/Main.hs | bsd-3-clause | 831 | 0 | 12 | 222 | 369 | 188 | 181 | 27 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Request
( getFileRequests
, forEachFileRequestIn
, FileRequest
, FileID
, FileSize
, RequestType(..)
) where
import qualified Data.ByteString as B (ByteString, hGetContents)
import qualified Data.ByteString.Char8 as B (lines, readInt, split)
import System.IO
data RequestType = Read | Write | Remove
type FileRequest = (RequestType, B.ByteString, FileSize)
type FileID = B.ByteString
type FileSize = Int
forEachFileRequestIn :: ([FileRequest] -> a) -> String -> IO a
forEachFileRequestIn function fileName =
withFile fileName ReadMode (\handle -> do
operations <- getFileRequests handle
return $! function operations
)
getFileRequests :: Handle -> IO [FileRequest]
getFileRequests handle = do
file <- B.hGetContents handle
let fileOperationLines = B.lines file
return $ map convert fileOperationLines
convert :: B.ByteString -> FileRequest
convert line =
let [_, _, operation, fileID, fileSize] = B.split ' ' line
accessType = textToAccessType operation
size = toInt fileSize
in (accessType, fileID, size)
textToAccessType :: B.ByteString -> RequestType
textToAccessType "write" = Write
textToAccessType "read" = Read
textToAccessType _ = Remove
toInt :: B.ByteString -> FileSize
toInt asByteString =
let Just(asInt, _) = B.readInt asByteString
in asInt
| wochinge/CacheSimulator | src/Request.hs | bsd-3-clause | 1,423 | 0 | 11 | 303 | 404 | 221 | 183 | 39 | 1 |
module Musica.Render.Keyboard (
renderKeyboard
) where
import Data.List (genericLength)
import Graphics.Gloss
whiteHeight = 14
whiteWidth = 2.3
blackHeight = 9
blackWidth = 1.5
bww = 2 * blackWidth / whiteWidth
bwh = blackHeight / whiteHeight
keyRatio = whiteHeight / whiteWidth
-- Output Picture will be 1 fixed height and width proportional to keys size.
renderKeyboard :: Picture ->[Bool] ->Picture
renderKeyboard bg xs = pictures [bg, translate (-0.5 * nteclas / keyRatio) 0 $ scale (0.5 / keyRatio) (-1) $ translate 1 0 $ pictures (blancas ++ negras)]
where blancas = renderKeyStream odd white 2 1 xs
negras = renderKeyStream even black bww bwh xs
nteclas = genericLength blancas
renderKeyStream :: (Int ->Bool) ->Color ->Float ->Float ->[Bool] ->[Picture]
renderKeyStream f c w h xs = [render k b | (k, b) <-zip keysPos xs, f k]
where render k b = translate (fromIntegral k - 1) (h / 2)
$ pictures [ color (if b then green else c) $ rectangleSolid w h
, color black $ rectangleWire w h ]
keysPos = [i | i <-[1..], let z = i `mod` 14, z /= 0 && z /= 6]
| josejuan/midi-keyboard-player | app/Musica/Render/Keyboard.hs | bsd-3-clause | 1,177 | 0 | 14 | 308 | 439 | 233 | 206 | 22 | 2 |
{-# Language BangPatterns #-}
{- |
Copyright : 2010-2016 Erlend Hamberg
License : BSD3
Stability : experimental
Portability : portable
A framework for simple evolutionary algorithms. Provided with a function for
evaluating a genome's fitness, a function for probabilistic selection among a
pool of genomes, and recombination and mutation operators, 'runEA' will run an
EA that lazily produces an infinite list of generations.
'AI.SimpleEA.Utils' contains utility functions that makes it easier to write
the genetic operators.
-}
module AI.SimpleEA (
runEA
, FitnessFunc
, SelectionFunction
, RecombinationOp
, MutationOp
, Fitness
, Genome
-- * Example Program
-- $SimpleEAExample
) where
import Control.Monad.Random
import System.Random.Mersenne.Pure64
-- | An individual's fitness is simply a number.
type Fitness = Double
-- | A genome is a list (e.g. a 'String').
type Genome a = [a]
-- | A fitness functions assigns a fitness score to a genome. The rest of the
-- individuals of that generation is also provided in case the fitness is
-- in proportion to its neighbours.
type FitnessFunc a = Genome a -> [Genome a] -> Fitness
-- | A selection function is responsible for selection. It takes pairs of
-- genomes and their fitness and is responsible for returning one or more
-- individuals.
type SelectionFunction a = [(Genome a, Fitness)] -> Rand PureMT [Genome a]
-- | A recombination operator takes two /parent/ genomes and returns two
-- /children/.
type RecombinationOp a = (Genome a, Genome a) -> Rand PureMT (Genome a, Genome a)
-- | A mutation operator takes a genome and returns (a possibly altered) copy
-- of it.
type MutationOp a = Genome a -> Rand PureMT (Genome a)
-- | Runs the evolutionary algorithm with the given start population. This will
-- produce an infinite list of generations and 'take' or 'takeWhile' should be
-- used to decide how many generations should be computed. To run a specific
-- number of generations, use 'take':
--
-- > let generations = take 50 $ runEA myFF mySF myROp myMOp myStdGen
--
-- To run until a criterion is met, e.g. that an individual with a fitness of at
-- least 19 is found, 'takeWhile' can be used:
--
-- > let criterion = any id . map (\i -> snd i >= 19.0)
-- > let generations = takeWhile (not . criterion) $ runEA myFF mySF myROp myMOp myStdGen
runEA ::
[Genome a] ->
FitnessFunc a ->
SelectionFunction a ->
RecombinationOp a ->
MutationOp a ->
PureMT ->
[[(Genome a,Fitness)]]
runEA startPop fitFun selFun recOp mutOp g =
let p = zip startPop (map (`fitFun` startPop) startPop)
in evalRand (generations p selFun fitFun recOp mutOp) g
generations ::
[(Genome a, Fitness)] ->
SelectionFunction a ->
FitnessFunc a ->
RecombinationOp a ->
MutationOp a ->
Rand PureMT [[(Genome a, Fitness)]]
generations !pop selFun fitFun recOp mutOp = do
-- first, select parents for the new generation
newGen <- selFun pop
-- then create offspring by using the recombination operator
newGen <- doRecombinations newGen recOp
-- mutate genomes using the mutation operator
newGen <- mapM mutOp newGen
let fitnessVals = map (`fitFun` newGen) newGen
nextGens <- generations (zip newGen fitnessVals) selFun fitFun recOp mutOp
return $ pop : nextGens
doRecombinations :: [Genome a] -> RecombinationOp a -> Rand PureMT [Genome a]
doRecombinations [] _ = return []
doRecombinations [_] _ = error "odd number of parents"
doRecombinations (a:b:r) rec = do
(a',b') <- rec (a,b)
rest <- doRecombinations r rec
return $ a':b':rest
{- $SimpleEAExample
The aim of this /OneMax/ EA is to maximize the number of @1@'s in a bitstring.
The fitness of a bitstring is defined to be the number of @1@'s it contains.
>import AI.SimpleEA
>import AI.SimpleEA.Utils
>
>import System.Random.Mersenne.Pure64
>import Control.Monad.Random
>import Data.List
>import System.Environment (getArgs)
>import Control.Monad (unless)
The @numOnes@ function will function as our 'FitnessFunc' and simply returns
the number of @1@'s in the string. It ignores the rest of the population (the
second parameter) since the fitness is not relative to the other individuals in
the generation.
>numOnes :: FitnessFunc Char
>numOnes g _ = (fromIntegral . length . filter (=='1')) g
The @select@ function is our 'SelectionFunction'. It uses sigma-scaled,
fitness-proportionate selection. 'sigmaScale' is defined in
'AI.SimpleEA.Utils'. By first taking the four best genomes (by using the
@elite@ function) we make sure that maximum fitness never decreases
('elitism').
>select :: SelectionFunction Char
>select gs = select' (take 4 $ elite gs)
> where scaled = zip (map fst gs) (sigmaScale (map snd gs))
> select' gs' =
> if length gs' >= length gs
> then return gs'
> else do
> p1 <- fitPropSelect scaled
> p2 <- fitPropSelect scaled
> let newPop = p1:p2:gs'
> select' newPop
Crossover is done by finding a crossover point along the length of the genomes
and swapping what comes after that point between the two genomes. The parameter
@p@ determines the likelihood of crossover taking place.
>crossOver :: Double -> RecombinationOp Char
>crossOver p (g1,g2) = do
> t <- getRandomR (0.0, 1.0)
> if t < p
> then do
> r <- getRandomR (0, length g1-1)
> return (take r g1 ++ drop r g2, take r g2 ++ drop r g1)
> else return (g1,g2)
The mutation operator @mutate@ flips a random bit along the length of the
genome with probability @p@.
>mutate :: Double -> MutationOp Char
>mutate p g = do
> t <- getRandomR (0.0, 1.0)
> if t < p
> then do
> r <- getRandomR (0, length g-1)
> return (take r g ++ flipBit (g !! r) : drop (r+1) g)
> else return g
> where
> flipBit '0' = '1'
> flipBit '1' = '0'
The @main@ function creates a list of 100 random genomes (bit-strings) of length
20 and then runs the EA for 100 generations (101 generations including the
random starting population). Average and maximum fitness values and standard
deviation is then calculated for each generation and written to a file if a file
name was provided as a parameter. This data can then be plotted with, e.g.
gnuplot (<http://www.gnuplot.info/>).
>main = do
> args <- getArgs
> g <- newPureMT
> let (p,g') = runRand (randomGenomes 100 20 '0' '1') g
> let gs = take 101 $ runEA p numOnes select (crossOver 0.75) (mutate 0.01) g'
> let fs = avgFitnesses gs
> let ms = maxFitnesses gs
> let ds = stdDeviations gs
> mapM_ print $ zip5 gs [1..] fs ms ds
> unless (null args) $ writeFile (head args) $ getPlottingData gs
-}
| ehamberg/simpleea | AI/SimpleEA.hs | bsd-3-clause | 6,826 | 0 | 14 | 1,536 | 655 | 354 | 301 | 49 | 1 |
-- | Main compiler executable.
module Main where
import Fay
import Paths_fay (version)
import Control.Monad
import Data.List.Split (wordsBy)
import Data.Maybe
import Data.Version (showVersion)
import Options.Applicative
import Options.Applicative.Types
import qualified Control.Exception as E
-- | Options and help.
data FayCompilerOptions = FayCompilerOptions
{ optLibrary :: Bool
, optFlattenApps :: Bool
, optHTMLWrapper :: Bool
, optHTMLJSLibs :: [String]
, optInclude :: [String]
, optPackages :: [String]
, optWall :: Bool
, optNoGHC :: Bool
, optStdout :: Bool
, optVersion :: Bool
, optOutput :: Maybe String
, optPretty :: Bool
, optOptimize :: Bool
, optGClosure :: Bool
, optPackageConf :: Maybe String
, optNoRTS :: Bool
, optNoStdlib :: Bool
, optPrintRuntime :: Bool
, optStdlibOnly :: Bool
, optBasePath :: Maybe FilePath
, optStrict :: [String]
, optTypecheckOnly :: Bool
, optRuntimePath :: Maybe FilePath
, optSourceMap :: Bool
, optFiles :: [String]
, optNoOptimizeNewtypes :: Bool
}
-- | Main entry point.
main :: IO ()
main = do
config' <- defaultConfigWithSandbox
opts <- execParser parser
let config = addConfigDirectoryIncludePaths ("." : optInclude opts) $
addConfigPackages (optPackages opts) $ config'
{ configOptimize = optOptimize opts
, configFlattenApps = optFlattenApps opts
, configPrettyPrint = optPretty opts
, configLibrary = optLibrary opts
, configHtmlWrapper = optHTMLWrapper opts
, configHtmlJSLibs = optHTMLJSLibs opts
, configTypecheck = not $ optNoGHC opts
, configWall = optWall opts
, configGClosure = optGClosure opts
, configPackageConf = optPackageConf opts <|> configPackageConf config'
, configExportRuntime = not (optNoRTS opts)
, configExportStdlib = not (optNoStdlib opts)
, configExportStdlibOnly = optStdlibOnly opts
, configBasePath = optBasePath opts
, configStrict = optStrict opts
, configTypecheckOnly = optTypecheckOnly opts
, configRuntimePath = optRuntimePath opts
, configSourceMap = optSourceMap opts
, configOptimizeNewtypes = not $ optNoOptimizeNewtypes opts
}
if optVersion opts
then runCommandVersion
else if optPrintRuntime opts
then getConfigRuntime config >>= readFile >>= putStr
else do
void $ incompatible htmlAndStdout opts "Html wrapping and stdout are incompatible"
case optFiles opts of
[] -> putStrLn $ helpTxt ++ "\n More information: fay --help"
files -> forM_ files $ \file ->
compileFromTo config file (if optStdout opts then Nothing else Just (outPutFile opts file))
where
parser = info (helper <*> options) (fullDesc <> header helpTxt)
outPutFile :: FayCompilerOptions -> String -> FilePath
outPutFile opts file = fromMaybe (toJsName file) $ optOutput opts
-- | All Fay's command-line options.
options :: Parser FayCompilerOptions
options = FayCompilerOptions
<$> switch (long "library" <> help "Don't automatically call main in generated JavaScript")
<*> switch (long "flatten-apps" <> help "flatten function applicaton")
<*> switch (long "html-wrapper" <> help "Create an html file that loads the javascript")
<*> strsOption (long "html-js-lib" <> metavar "file1[, ..]"
<> help "javascript files to add to <head> if using option html-wrapper")
<*> strsOption (long "include" <> metavar "dir1[, ..]"
<> help "additional directories for include")
<*> strsOption (long "package" <> metavar "package[, ..]"
<> help "packages to use for compilation")
<*> switch (long "Wall" <> help "Typecheck with -Wall")
<*> switch (long "no-ghc" <> help "Don't typecheck, specify when not working with files")
<*> switch (long "stdout" <> short 's' <> help "Output to stdout")
<*> switch (long "version" <> help "Output version number")
<*> optional (strOption (long "output" <> short 'o' <> metavar "file" <> help "Output to specified file"))
<*> switch (long "pretty" <> short 'p' <> help "Pretty print the output")
<*> switch (long "optimize" <> short 'O' <> help "Apply optimizations to generated code")
<*> switch (long "closure" <> help "Provide help with Google Closure")
<*> optional (strOption (long "package-conf" <> help "Specify the Cabal package config file"))
<*> switch (long "no-rts" <> short 'r' <> help "Don't export the RTS")
<*> switch (long "no-stdlib" <> help "Don't generate code for the Prelude/FFI")
<*> switch (long "print-runtime" <> help "Print the runtime JS source to stdout")
<*> switch (long "stdlib" <> help "Only output the stdlib")
<*> optional (strOption $ long "base-path" <> help "If fay can't find the sources of fay-base you can use this to provide the path. Use --base-path ~/example instead of --base-path=~/example to make sure ~ is expanded properly")
<*> strsOption (long "strict" <> metavar "modulename[, ..]"
<> help "Generate strict and uncurried exports for the supplied modules. Simplifies calling Fay from JS")
<*> switch (long "typecheck-only" <> help "Only invoke GHC for typechecking, don't produce any output")
<*> optional (strOption $ long "runtime-path" <> help "Custom path to the runtime so you don't have to reinstall fay when modifying it")
<*> switch (long "sourcemap" <> help "Produce a source map in <outfile>.map")
<*> many (argument Just (metavar "<hs-file>..."))
<*> switch (long "no-optimized-newtypes" <> help "Remove optimizations for newtypes, treating them as normal data types")
where
strsOption :: Mod OptionFields [String] -> Parser [String]
strsOption m = option (ReadM . Right . wordsBy (== ',')) (m <> value [])
-- | Make incompatible options.
incompatible :: Monad m
=> (FayCompilerOptions -> Bool)
-> FayCompilerOptions -> String -> m Bool
incompatible test opts message = if test opts
then E.throw $ userError message
else return True
-- | The basic help text.
helpTxt :: String
helpTxt = concat
["fay -- The fay compiler from (a proper subset of) Haskell to Javascript\n\n"
," fay <hs-file>... processes each .hs file"
]
-- | Print the command version.
runCommandVersion :: IO ()
runCommandVersion = putStrLn $ "fay " ++ showVersion version
-- | Incompatible options.
htmlAndStdout :: FayCompilerOptions -> Bool
htmlAndStdout opts = optHTMLWrapper opts && optStdout opts
| fpco/fay | src/main/Main.hs | bsd-3-clause | 6,933 | 0 | 34 | 1,810 | 1,563 | 798 | 765 | 123 | 5 |
{-# LANGUAGE GADTs, ExistentialQuantification, StandaloneDeriving, BangPatterns #-}
module Data.MatFile where
import Data.Binary
import Data.Binary.Get
import Data.Binary.IEEE754
import Data.Text.Encoding
import Data.Text hiding (map, zipWith)
import Control.Monad (replicateM)
import Data.Int
import Data.Word
import Codec.Compression.GZip (decompress)
import Data.Bits (testBit, (.&.))
import qualified Data.Map (fromList, Map(..))
import Data.List (elem)
import Data.Complex
import GHC.Float (float2Double)
import Foreign.Storable (Storable)
import qualified Data.ByteString.Lazy as LBS (takeWhile, length, pack, toStrict)
import qualified Data.ByteString as BS (takeWhile, length, pack)
import Data.Typeable (Typeable(..))
import Debug.Trace
-- | Parsing in either little-endian or big-endian mode
data Endian = LE
| BE
deriving (Show, Eq)
data DataType = MiInt8 [Int8]
| MiUInt8 [Word8]
| MiInt16 [Int16]
| MiUInt16 [Word16]
| MiInt32 [Int32]
| MiUInt32 [Word32]
| MiInt64 [Int64]
| MiUInt64 [Word64]
| MiSingle [Float]
| MiDouble [Double]
| MiMatrix ArrayType
| MiUtf8 Text
| MiUtf16 Text
| MiUtf32 Text
| MiComplex [Complex Double]
deriving (Show)
data ArrayType = NumericArray Text [Int] DataType -- Name, dimensions and values
| forall a. (Integral a, Storable a, Show a, Eq a, Typeable a) => SparseIntArray Text [Int] (Data.Map.Map (Word32, Word32) a)-- Name, dimensions
| forall a. (RealFrac a, Storable a, Show a, Eq a, Typeable a) => SparseFloatArray Text [Int] (Data.Map.Map (Word32, Word32) a)
| SparseComplexArray Text [Int] (Data.Map.Map (Word32, Word32) (Complex Double))
| CellArray Text [Int] [ArrayType]
| StructureArray Text [Int] [ArrayType]
| ObjectArray Text Text [Int] [ArrayType]
instance Show ArrayType where
show (NumericArray t dim dt) = "NumericArray " ++ show t ++ " " ++ show dim ++ " " ++ show dt
show (SparseIntArray t dim dt) = "SparseIntArray " ++ show t ++ " " ++ show dim ++ " " ++ show dt
show (SparseFloatArray t dim dt) = "SparseFloatArray " ++ show t ++ " " ++ show dim ++ " " ++ show dt
show (SparseComplexArray t dim dt) = "SparseComplexArray " ++ show t ++ " " ++ show dim ++ " " ++ show dt
show (CellArray t dim dt) = "CellArray " ++ show t ++ " " ++ show dim ++ " " ++ show dt
show (StructureArray t dim dt) = "StructureArray " ++ show t ++ " " ++ show dim ++ " " ++ show dt
show (ObjectArray t cname dim dt) = "ObjectArray " ++ show t ++ " " ++ show cname ++ " " ++ show dim ++ " " ++ show dt
toDoubles (MiInt8 x) = map fromIntegral x
toDoubles (MiUInt8 x) = map fromIntegral x
toDoubles (MiInt16 x) = map fromIntegral x
toDoubles (MiUInt16 x) = map fromIntegral x
toDoubles (MiInt32 x) = map fromIntegral x
toDoubles (MiUInt32 x) = map fromIntegral x
toDoubles (MiInt64 x) = map fromIntegral x
toDoubles (MiUInt64 x) = map fromIntegral x
toDoubles (MiSingle x) = map float2Double x
toDoubles (MiDouble x) = x
getMatFile = do
endian <- getHeader
case endian of
LE -> bodyLe
BE -> undefined
bodyLe = do
align
emp <- isEmpty
case emp of
True -> return []
False -> do
field <- leDataField
fmap (field :) bodyLe
align = do
bytes <- bytesRead
skip $ (8 - (fromIntegral bytes `mod` 8)) `mod` 8
getHeader = do
skip 124
version <- getWord16le
endian <- getWord16le
case (version, endian) of
(0x0100, 0x4d49) -> return LE
(0x0001, 0x494d) -> return BE
_ -> fail "Not a .mat file"
beHeader = do
skip 124
version <- getWord16be
endian <- getWord16be
case (version, endian) of
(0x0100, 0x4d49) -> return ()
_ -> fail "Not a big-endian .mat file"
-- | Parses a data field from the file. In general a data field of the numeric types will be an array (list in Haskell)
leDataField = do
align
smallDataElementCheck <- fmap (.&. 0xffff0000) $ lookAhead getWord32le
(dataType, length) <- case smallDataElementCheck of
0 -> normalDataElement
_ -> smallDataElement
res <- case dataType of
1 -> getMiInt8 length
2 -> getMiUInt8 length
3 -> getMiInt16le length
4 -> getMiUInt16le length
5 -> getMiInt32le length
6 -> getMiUInt32le length
7 -> getMiSingleLe length
--8
9 -> getMiDoubleLe length
--10
--11
12 -> getMiInt64le length
13 -> getMiUInt64le length
14 -> getMatrixLe length
15 -> getCompressedLe length
16 -> getUtf8 length
17 -> getUtf16le length
18 -> getUtf32le length
return res
where
smallDataElement = do
dataType <- getWord16le
length <- getWord16le
return (fromIntegral dataType, fromIntegral length)
normalDataElement = do
dataType <- getWord32le
length <- getWord32le
return (fromIntegral dataType, fromIntegral length)
getMatrixLe _ = do
align
flagsArray <- lookAhead leDataField
case flagsArray of
MiUInt32 (flags : _) -> do
let arrayClass = flags .&. 0xFF
case arrayClass of
a | elem a [4, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15] -> getNumericMatrixLe a
| a == 1 -> getCellArrayLe
| a == 2 -> getStructureLe
| a == 3 -> getObjectLe
| a == 5 -> getSparseArrayLe a
_ -> fail "Invalid matrix flags"
getNumericMatrixLe arrayType = do
MiUInt32 (flags:_) <- leDataField
let (complex, global, logical) = extractFlags flags
MiInt32 dimensions <- leDataField
name <- getArrayName
real <- fmap (promoteArrayValues arrayType) leDataField
case complex of
False -> return $ MiMatrix $ NumericArray name (Prelude.map fromIntegral dimensions) real
True -> do
c <- fmap toDoubles leDataField -- Since Haskell only has a complex type for doubles, we automatically convert to doubles
let r = toDoubles real
complex = MiComplex $ zipWith (:+) r c
return $ MiMatrix $ NumericArray name (map fromIntegral dimensions) complex
getSparseArrayLe arrayType = do
MiUInt32 (flags:_) <- leDataField
let (complex, global, logical) = extractFlags flags
MiInt32 dimensions <- leDataField
name <- getArrayName
MiInt32 rowIndices <- leDataField
MiInt32 colIndices <- leDataField
real <- fmap (promoteArrayValues arrayType) leDataField
case complex of
False -> do
return $ MiMatrix $ makeArrayType real name dimensions (map (+ 1) rowIndices) $ processCol 1 colIndices
True -> do
c <- fmap toDoubles leDataField
let r = toDoubles real
complex = zipWith (:+) r c
return $ MiMatrix $ SparseComplexArray name (map fromIntegral dimensions) $ buildMap rowIndices colIndices complex
where
combineIndices row col real = ((fromIntegral row, fromIntegral col), real)
buildMap row col val = Data.Map.fromList (zipWith3 combineIndices row col val)
makeIntArrayType ints name dimensions rowIndices colIndices =
SparseIntArray name (map fromIntegral dimensions) $ buildMap rowIndices colIndices ints
makeFloatArrayType floats name dimensions rowIndices colIndices =
SparseFloatArray name (map fromIntegral dimensions) $ buildMap rowIndices colIndices floats
makeArrayType (MiInt8 x) = makeIntArrayType x
makeArrayType (MiUInt8 x) = makeIntArrayType x
makeArrayType (MiInt16 x) = makeIntArrayType x
makeArrayType (MiUInt16 x) = makeIntArrayType x
makeArrayType (MiInt32 x) = makeIntArrayType x
makeArrayType (MiUInt32 x) = makeIntArrayType x
makeArrayType (MiInt64 x) = makeIntArrayType x
makeArrayType (MiUInt64 x) = makeIntArrayType x
makeArrayType (MiSingle x) = makeFloatArrayType x
makeArrayType (MiDouble x) = makeFloatArrayType x
-- processCol replicates column entries because Matlab only includes unique column entries. The last item doesn't matter
processCol j (x:y:xs) | x < y - 1 = Prelude.replicate (fromIntegral (y - x)) j ++ processCol (j+1) (y:xs)
processCol j (x:y:xs) | x == y = processCol (j+1) (y:xs)
processCol j (x:[]) = []
processCol j (x:xs) = j : processCol (j+1) xs
getCellArrayLe = do
MiUInt32 (flags:_) <- leDataField
let (complex, global, logical) = extractFlags flags
MiInt32 dimensions <- leDataField
let entries = fromIntegral $ product dimensions
name <- getArrayName
matrices <- replicateM entries (fmap removeMiMatrix $ skip 8 >> getMatrixLe undefined)
return $ MiMatrix $ CellArray name (map fromIntegral dimensions) matrices
where
removeMiMatrix (MiMatrix arrayType) = arrayType
getStructureFieldNames fieldNameLength = do
MiInt8 nameData <- leDataField
let nameDataBytes = LBS.pack $ (map fromIntegral nameData)
let names = flip runGet nameDataBytes $
replicateM (fromIntegral (LBS.length nameDataBytes) `div` fromIntegral fieldNameLength) getName
return names
where
getName = do
bytes <- getByteString $ fromIntegral fieldNameLength
let nameBytes = BS.takeWhile (/= 0) bytes
return $ decodeUtf8 nameBytes
getStructureHelperLe classAction = do
MiUInt32 (flags:_) <- leDataField
let (complex, global, logical) = extractFlags flags
MiInt32 dimensions <- leDataField
name <- getArrayName
align
className <- classAction
align
loc <- bytesRead
temp <- leDataField
let MiInt32 (fieldNameLength:_) = temp
names <- getStructureFieldNames fieldNameLength
fields <- replicateM (Prelude.length names) leDataField
return $ (name, className, map fromIntegral dimensions, zipWith renameField names fields)
where
renameField name (MiMatrix (NumericArray _ dim dt)) = NumericArray name dim dt
renameField name (MiMatrix (SparseIntArray _ dim dt)) = SparseIntArray name dim dt
renameField name (MiMatrix (SparseFloatArray _ dim dt)) = SparseFloatArray name dim dt
renameField name (MiMatrix (SparseComplexArray _ dim dt)) = SparseComplexArray name dim dt
renameField name (MiMatrix (CellArray _ dim dt)) = CellArray name dim dt
renameField name (MiMatrix (StructureArray _ dim dt)) = StructureArray name dim dt
renameField name (MiMatrix (ObjectArray _ className dim dt)) = ObjectArray name className dim dt
getStructureLe = do
(name, _, dim, fields) <- getStructureHelperLe (return undefined)
return $ MiMatrix $ StructureArray name dim fields
getObjectLe = do
(name, className, dim, fields) <- getStructureHelperLe getArrayName
return $ MiMatrix $ ObjectArray name className dim fields
extractFlags word =
(testBit word 11, testBit word 10, testBit word 9)
getArrayName :: Get Text
getArrayName = do
MiInt8 name <- leDataField
return $ pack $ Prelude.map (toEnum . fromEnum) name
getCompressedLe bytes = do
element <- fmap decompress $ getLazyByteString $ fromIntegral bytes
let result = runGetOrFail leDataField element
case result of
Left (_, _, msg) -> fail msg
Right (_, _, a) -> return a
getMiInt8 length =
fmap MiInt8 $ replicateM length (fmap fromIntegral getWord8)
getMiUInt8 length =
fmap MiUInt8 $ replicateM length getWord8
getMiInt16le bytes = do
let length = bytes `div` 2
fmap MiInt16 $ replicateM length (fmap fromIntegral getWord16le)
getMiUInt16le bytes = do
let length = bytes `div` 2
fmap MiUInt16 $ replicateM length getWord16le
getMiInt32le bytes = do
let length = bytes `div` 4
fmap MiInt32 $ replicateM length (fmap fromIntegral getWord32le)
getMiUInt32le bytes = do
let length = bytes `div`4
fmap MiUInt32 $ replicateM length getWord32le
getMiInt64le bytes = do
let length = bytes `div`8
fmap MiInt64 $ replicateM length (fmap fromIntegral getWord64le)
getMiUInt64le bytes = do
let length = bytes `div` 8
fmap MiUInt64 $ replicateM length getWord64le
getMiSingleLe bytes = do
let length = bytes `div` 4
fmap MiSingle $ replicateM length getFloat32le
getMiDoubleLe bytes = do
let length = bytes `div` 8
fmap MiDouble $ replicateM length getFloat64le
getUtf8 bytes =
fmap (MiUtf8 . decodeUtf8) $ getByteString bytes
getUtf16le bytes =
fmap (MiUtf16 . decodeUtf16LE) $ getByteString bytes
getUtf32le bytes =
fmap (MiUtf32 . decodeUtf32LE) $ getByteString bytes
-- Array types can be different from the stored value due to compression.
-- This promotes to the correct type.
promoteArrayValues 4 dataType = promoteTo16UInt dataType
promoteArrayValues 6 dataType = promoteToDouble dataType
promoteArrayValues 7 dataType = promoteToSingle dataType
promoteArrayValues 10 dataType = promoteTo16Int dataType
promoteArrayValues 11 dataType = promoteTo16UInt dataType
promoteArrayValues 12 dataType = promoteTo32Int dataType
promoteArrayValues 13 dataType = promoteTo32UInt dataType
promoteArrayValues 14 dataType = promoteTo64Int dataType
promoteArrayValues 15 dataType = promoteTo64UInt dataType
promoteArrayValues _ dataType = dataType
promoteToSingle dataType = MiSingle $ promoteFloat dataType
promoteToDouble (MiSingle v) = MiDouble $ map float2Double v
promoteToDouble v@(MiDouble _) = v
promoteToDouble dataType = MiDouble $ promoteFloat dataType
promoteTo16Int dataType = MiInt16 $ promoteInt dataType
promoteTo16UInt dataType = MiUInt16 $ promoteInt dataType
promoteTo32Int dataType = MiInt32 $ promoteInt dataType
promoteTo32UInt dataType = MiUInt32 $ promoteInt dataType
promoteTo64Int dataType = MiInt64 $ promoteInt dataType
promoteTo64UInt dataType = MiUInt64 $ promoteInt dataType
promoteInt (MiInt8 v) = map fromIntegral v
promoteInt (MiUInt8 v) = map fromIntegral v
promoteInt (MiInt16 v) = map fromIntegral v
promoteInt (MiUInt16 v) = map fromIntegral v
promoteInt (MiInt32 v) = map fromIntegral v
promoteInt (MiUInt32 v) = map fromIntegral v
promoteInt (MiInt64 v) = map fromIntegral v
promoteInt (MiUInt64 v) = map fromIntegral v
promoteFloat (MiInt8 v) = map fromIntegral v
promoteFloat (MiUInt8 v) = map fromIntegral v
promoteFloat (MiInt16 v) = map fromIntegral v
promoteFloat (MiUInt16 v) = map fromIntegral v
promoteFloat (MiInt32 v) = map fromIntegral v
promoteFloat (MiUInt32 v) = map fromIntegral v
promoteFloat (MiInt64 v) = map fromIntegral v
promoteFloat (MiUInt64 v) = map fromIntegral v
| BJTerry/matfile | Data/MatFile.hs | bsd-3-clause | 14,138 | 5 | 22 | 2,954 | 4,811 | 2,365 | 2,446 | 317 | 16 |
import Data.Conduit
import Data.Conduit.Binary
import Data.Conduit.List as C
import Data.Conduit.BZlib
import System.Environment
main :: IO ()
main = do
[file] <- getArgs
runResourceT $ sourceFile file =$= bzip2 $$ sinkNull
-- runResourceT $ sourceFile file =$= bunzip2 $$ sinkNull
| tanakh/bzlib-conduit | bench/bench.hs | bsd-3-clause | 289 | 0 | 10 | 46 | 75 | 42 | 33 | 9 | 1 |
module Main where
import Ivory.Tower.Config
import Ivory.OS.FreeRTOS.Tower.STM32
import LDrive.Platforms
import LDrive.Tests.SPI (app)
main :: IO ()
main = compileTowerSTM32FreeRTOS testplatform_stm32 p $
app (stm32config_clock . testplatform_stm32)
testplatform_spi
testplatform_uart
testplatform_leds
where
p topts = getConfig topts testPlatformParser
| sorki/odrive | test/SPITest.hs | bsd-3-clause | 406 | 0 | 8 | 88 | 89 | 50 | 39 | 12 | 1 |
{- Test the unit parser
Copyright (c) 2014 Richard Eisenberg
-}
{-# LANGUAGE TemplateHaskell, TypeOperators, CPP #-}
module Tests.Parser where
import Prelude hiding ( lex, exp )
import Text.Parse.Units
import Control.Monad.Reader
import qualified Data.Map.Strict as Map
import Text.Parsec
import Data.Generics
import Language.Haskell.TH
import Test.Tasty
import Test.Tasty.HUnit
leftOnly :: Either a b -> Maybe a
leftOnly (Left a) = Just a
leftOnly (Right _) = Nothing
----------------------------------------------------------------------
-- TH functions
----------------------------------------------------------------------
stripModules :: Data a => a -> a
stripModules = everywhere (mkT (mkName . nameBase))
pprintUnqualified :: (Ppr a, Data a) => a -> String
pprintUnqualified = pprint . stripModules
----------------------------------------------------------------------
-- Lexer
----------------------------------------------------------------------
lexTest :: String -> String
lexTest s =
case lex s of
Left _ -> "error"
Right toks -> show toks
lexTestCases :: [(String, String)]
lexTestCases = [ ( "m", "[m]" )
, ( "", "[]" )
, ( "m s", "[m,s]" )
, ( " m s ", "[m,s]" )
, ( "m ", "[m]" )
, ( " m", "[m]" )
, ( "( m /s", "[(,m,/,s]" )
, ( "!", "error" )
, ( "1 2 3", "[1,2,3]" )
, ( " ", "[]" )
]
lexTests :: TestTree
lexTests = testGroup "Lexer" $
map (\(str, out) -> testCase ("`" ++ str ++ "'") $ lexTest str @?= out) lexTestCases
----------------------------------------------------------------------
-- Unit strings
----------------------------------------------------------------------
unitStringTestCases :: [(String, String)]
unitStringTestCases = [ ("m", "Meter")
, ("s", "Second")
, ("min", "Minute")
, ("km", "Kilo :@ Meter")
, ("mm", "Milli :@ Meter")
, ("kmin", "Kilo :@ Minute")
, ("dam", "error") -- ambiguous!
, ("damin", "Deca :@ Minute")
, ("ms", "Milli :@ Second")
, ("mmin", "Milli :@ Minute")
, ("mmm", "error")
, ("mmmin", "error")
, ("sm", "error")
, ("", "error")
, ("dak", "error")
, ("das", "Deca :@ Second")
, ("ds", "Deci :@ Second")
, ("daam", "Deca :@ Ampere")
, ("kam", "Kilo :@ Ampere")
, ("dm", "Deci :@ Meter")
]
parseUnitStringTest :: String -> String
parseUnitStringTest s =
case flip runReader testSymbolTable $ runParserT unitStringParser () "" s of
Left _ -> "error"
Right exp -> show exp
unitStringTests :: TestTree
unitStringTests = testGroup "UnitStrings" $
map (\(str, out) -> testCase ("`" ++ str ++ "'") $ parseUnitStringTest str @?= out)
unitStringTestCases
----------------------------------------------------------------------
-- Symbol tables
----------------------------------------------------------------------
mkSymbolTableTests :: TestTree
mkSymbolTableTests = testGroup "mkSymbolTable"
[ testCase "Unambiguous1" (Map.keys (prefixTable testSymbolTable) @?= ["d","da","k","m"])
-- , testCase "Unambiguous2" (Map.keys (unitTable testSymbolTable) @?= ["am","m","min","s"])
, testCase "AmbigPrefix" (leftOnly (mkSymbolTable [("a",''Milli),("a",''Centi)] ([] :: [(String,Name)])) @?= Just "The label `a' is assigned to the following meanings:\n[\"Tests.Parser.Milli\",\"Tests.Parser.Centi\"]\nThis is ambiguous. Please fix before building a unit parser.")
, testCase "AmbigUnit" (leftOnly (mkSymbolTable ([] :: [(String,Name)]) [("m",''Meter),("m",''Minute)]) @?= Just "The label `m' is assigned to the following meanings:\n[\"Tests.Parser.Meter\",\"Tests.Parser.Minute\"]\nThis is ambiguous. Please fix before building a unit parser.")
, testCase "MultiAmbig" (leftOnly (mkSymbolTable [("a",''Milli),("b",''Centi),("b",''Deci),("b",''Kilo),("c",''Atto),("c",''Deca)] [("m",''Meter),("m",''Minute),("s",''Second)]) @?= Just "The label `b' is assigned to the following meanings:\n[\"Tests.Parser.Centi\",\"Tests.Parser.Deci\",\"Tests.Parser.Kilo\"]\nThe label `c' is assigned to the following meanings:\n[\"Tests.Parser.Atto\",\"Tests.Parser.Deca\"]\nThis is ambiguous. Please fix before building a unit parser.")
]
testSymbolTable :: SymbolTable Name Name
Right testSymbolTable =
mkSymbolTable (stripModules [ ("k", ''Kilo)
, ("da", ''Deca)
, ("m", ''Milli)
, ("d", ''Deci) ])
(stripModules [ ("m", ''Meter)
, ("s", ''Second)
, ("min", ''Minute)
, ("am", ''Ampere) ])
data Kilo = Kilo
data Deca = Deca
data Centi = Centi
data Milli = Milli
data Deci = Deci
data Atto = Atto
data Meter = Meter
data Second = Second
data Minute = Minute
data Ampere = Ampere
----------------------------------------------------------------------
-- TH conversions, taken from the `units` package
----------------------------------------------------------------------
-- This is silly, but better than rewriting the tests.
-- Note that we can't depend on `units` package, because we want
-- `units` to depend on `units-parser`. Urgh.
data Number = Number
data a :@ b = a :@ b
data a :* b = a :* b
data a :/ b = a :/ b
data a :^ b = a :^ b
data Succ a
data Z = Zero
sPred, sSucc, sZero :: ()
sPred = ()
sSucc = ()
sZero = ()
parseUnitExp :: SymbolTable Name Name -> String -> Either String Exp
parseUnitExp tbl s = to_exp `liftM` parseUnit tbl s -- the Either monad
where
to_exp Unity = ConE 'Number
to_exp (Unit (Just pre) unit) = ConE '(:@) `AppE` of_type pre `AppE` of_type unit
to_exp (Unit Nothing unit) = of_type unit
to_exp (Mult e1 e2) = ConE '(:*) `AppE` to_exp e1 `AppE` to_exp e2
to_exp (Div e1 e2) = ConE '(:/) `AppE` to_exp e1 `AppE` to_exp e2
to_exp (Pow e i) = ConE '(:^) `AppE` to_exp e `AppE` mk_sing i
of_type :: Name -> Exp
of_type n = (VarE 'undefined) `SigE` (ConT n)
mk_sing :: Integer -> Exp
mk_sing n
| n < 0 = VarE 'sPred `AppE` mk_sing (n + 1)
| n > 0 = VarE 'sSucc `AppE` mk_sing (n - 1)
| otherwise = VarE 'sZero
parseUnitType :: SymbolTable Name Name -> String -> Either String Type
parseUnitType tbl s = to_type `liftM` parseUnit tbl s -- the Either monad
where
to_type Unity = ConT ''Number
to_type (Unit (Just pre) unit) = ConT ''(:@) `AppT` ConT pre `AppT` ConT unit
to_type (Unit Nothing unit) = ConT unit
to_type (Mult e1 e2) = ConT ''(:*) `AppT` to_type e1 `AppT` to_type e2
to_type (Div e1 e2) = ConT ''(:/) `AppT` to_type e1 `AppT` to_type e2
to_type (Pow e i) = ConT ''(:^) `AppT` to_type e `AppT` mk_z i
mk_z :: Integer -> Type
mk_z n
| n < 0 = ConT ''Pred `AppT` mk_z (n + 1)
| n > 0 = ConT ''Succ `AppT` mk_z (n - 1)
| otherwise = ConT 'Zero -- single quote as it's a data constructor!
----------------------------------------------------------------------
-- Overall parser
----------------------------------------------------------------------
parseUnitTest :: String -> String
parseUnitTest s =
case parseUnitExp testSymbolTable s of
Left _ -> "error"
Right exp -> pprintUnqualified exp
parseTestCases :: [(String, String)]
parseTestCases =
[ ("m", "undefined :: Meter")
, ("s", "undefined :: Second")
, ("ms", "(:@) (undefined :: Milli) (undefined :: Second)")
, ("mm", "(:@) (undefined :: Milli) (undefined :: Meter)")
, ("mmm", "error")
, ("km", "(:@) (undefined :: Kilo) (undefined :: Meter)")
, ("m s", "(:*) (undefined :: Meter) (undefined :: Second)")
, ("m/s", "(:/) (undefined :: Meter) (undefined :: Second)")
, ("m/s^2", "(:/) (undefined :: Meter) ((:^) (undefined :: Second) (sSucc (sSucc sZero)))")
, ("s/m m", "(:/) (undefined :: Second) ((:*) (undefined :: Meter) (undefined :: Meter))")
, ("s s/m m", "(:/) ((:*) (undefined :: Second) (undefined :: Second)) ((:*) (undefined :: Meter) (undefined :: Meter))")
, ("s*s/m*m", "(:*) ((:/) ((:*) (undefined :: Second) (undefined :: Second)) (undefined :: Meter)) (undefined :: Meter)")
, ("s*s/(m*m)", "(:/) ((:*) (undefined :: Second) (undefined :: Second)) ((:*) (undefined :: Meter) (undefined :: Meter))")
, ("m^-1", "(:^) (undefined :: Meter) (sPred sZero)")
, ("m^(-1)", "(:^) (undefined :: Meter) (sPred sZero)")
, ("m^(-(1))", "(:^) (undefined :: Meter) (sPred sZero)")
, ("1", "Number")
, ("1/s", "(:/) Number (undefined :: Second)")
, ("m 1 m", "(:*) ((:*) (undefined :: Meter) Number) (undefined :: Meter)")
, (" ", "Number")
, ("", "Number")
]
parseUnitTests :: TestTree
parseUnitTests = testGroup "ParseUnit" $
map (\(str, out) -> testCase ("`" ++ str ++ "'") $ parseUnitTest str @?= out)
parseTestCases
parseUnitTestT :: String -> String
parseUnitTestT s =
case parseUnitType testSymbolTable s of
Left _ -> "error"
Right exp -> pprintUnqualified exp
op :: String -> String
#if __GLASGOW_HASKELL__ > 802
op s = "(" ++ s ++ ")"
#else
op = id
#endif
opm, opd, ope, opa :: String
opm = op ":*"
opd = op ":/"
ope = op ":^"
opa = op ":@"
parseTestCasesT :: [(String, String)]
parseTestCasesT =
[ ("m", "Meter")
, ("s", "Second")
, ("ms", opa ++ " Milli Second")
, ("mm", opa ++ " Milli Meter")
, ("mmm", "error")
, ("km", opa ++ " Kilo Meter")
, ("m s", opm ++ " Meter Second")
, ("m/s", opd ++ " Meter Second")
, ("m/s^2", opd ++ " Meter (" ++ ope ++ " Second (Succ (Succ Zero)))")
, ("s/m m", opd ++ " Second (" ++ opm ++ " Meter Meter)")
, ("s s/m m", opd ++ " (" ++ opm ++ " Second Second) (" ++ opm ++ " Meter Meter)")
, ("s*s/m*m", opm ++ " (" ++ opd ++ " (" ++ opm ++ " Second Second) Meter) Meter")
, ("s*s/(m*m)", opd ++ " (" ++ opm ++ " Second Second) (" ++ opm ++ " Meter Meter)")
, ("m^-1", ope ++ " Meter (Pred Zero)")
, ("m^(-1)", ope ++ " Meter (Pred Zero)")
, ("m^(-(1))", ope ++ " Meter (Pred Zero)")
, ("1", "Number")
, ("1/s", opd ++ " Number Second")
, ("1/s", opd ++ " Number Second")
, ("m 1 m", opm ++ " (" ++ opm ++ " Meter Number) Meter")
, (" ", "Number")
, ("", "Number")
]
parseUnitTestsT :: TestTree
parseUnitTestsT = testGroup "ParseUnitType" $
map (\(str, out) -> testCase ("`" ++ str ++ "'") $ parseUnitTestT str @?= out)
parseTestCasesT
----------------------------------------------------------------------
-- Conclusion
----------------------------------------------------------------------
tests :: TestTree
tests = testGroup "Parser"
[ lexTests
, mkSymbolTableTests
, unitStringTests
, parseUnitTests
, parseUnitTestsT
]
main :: IO ()
main = defaultMain tests
| adamgundry/units-parser | Tests/Parser.hs | bsd-3-clause | 11,335 | 0 | 15 | 2,896 | 2,980 | 1,711 | 1,269 | -1 | -1 |
{-# LANGUAGE ScopedTypeVariables #-}
module Test.HUnit.SafeCopy
( testSafeCopy
, MissingFilesPolicy(..)
) where
-------------------------------------------------------------------------------
import Control.Monad (unless)
import qualified Data.ByteString as BS
import Data.List (intercalate, stripPrefix, (\\))
import qualified Data.SafeCopy as SC
import qualified Data.SafeCopy.Internal as SCI
import qualified Data.Serialize as S
import qualified Path as P
import qualified Path.Internal as PI
import qualified Path.IO as PIO
import qualified System.IO as SIO
import qualified Test.HUnit as HU
import Text.Read (readMaybe)
-------------------------------------------------------------------------------
-- | Safecopy provides a list of all supported versions. If we are
-- unable to find a corresponding file for a version, what should we
-- do? A missing file typically means you forgot to run tests when you
-- were at one of the versions of @a@.
data MissingFilesPolicy = IgnoreMissingFiles
| WarnMissingFiles
| FailMissingFiles
deriving (Show, Eq)
-------------------------------------------------------------------------------
testSafeCopy
:: forall a b. (SC.SafeCopy a, Eq a, Show a)
=> MissingFilesPolicy
-> P.Path b P.File
-- ^ Base filename, e.g. @test/data/MyType.golden@. Will be
-- postfixed with each version, e.g. @MyType.safecopy.1@,
-- @MyType.safecopy.2@, etc. If its a primitive value, versioning
-- is not supported and thus no extension will be added.
-> a
-- ^ The current value that all past versions of this file must upgrade to.
-> HU.Assertion
testSafeCopy missingFilesPolicy baseFile a = do
case SC.objectProfile :: SC.Profile a of
SC.PrimitiveProfile -> do
-- dump file, test
assertLatest baseFile
SC.InvalidProfile e -> HU.assertFailure e
SC.Profile currentVersion supportedVersions -> do
let versions = (SCI.Version <$> supportedVersions) :: [SC.Version a]
let currentFile = mkVersionPath (SCI.Version currentVersion) baseFile
dumpVersionUnlessExists currentFile a
files <- discoverSafeCopyFiles baseFile
let missingVersions = versions \\ (scfVersion <$> files)
--TODO: make this a warning or omit based on option
unless (null missingVersions) $ do
let msg = ("Missing files for the following versions: " ++ intercalate "," (show . SCI.unVersion <$> missingVersions))
case missingFilesPolicy of
IgnoreMissingFiles -> return ()
WarnMissingFiles -> SIO.hPutStrLn SIO.stderr msg
FailMissingFiles -> HU.assertFailure msg
-- TODO: check versions
mapM_ (\f -> assertFile (scfPath f) a) files
where
assertLatest f = do
dumpVersionUnlessExists f a
assertFile f a
-------------------------------------------------------------------------------
data SafeCopyFile rel a = SafeCopyFile
{ scfPath :: P.Path rel P.File
, scfVersion :: SC.Version a
} deriving (Show)
-------------------------------------------------------------------------------
mkVersionPath :: SC.Version a -> P.Path rel P.File -> P.Path rel P.File
mkVersionPath (SCI.Version v) (PI.Path fp) = PI.Path (fp ++ "." ++ show v)
-------------------------------------------------------------------------------
discoverSafeCopyFiles
:: P.Path rel P.File
-> IO [SafeCopyFile P.Abs a]
discoverSafeCopyFiles baseFile = do
dir <- P.parent <$> PIO.makeAbsolute baseFile
(_, files) <- PIO.listDir dir
return [ SafeCopyFile f v | Just (f, v) <- check <$> files]
where
fname = P.toFilePath . P.filename
check f = do
ext <- stripPrefix (fname baseFile) (fname f)
v <- case ext of
'.':rawVersion -> SCI.Version <$> readMaybe rawVersion
_ -> Nothing
return (f, v)
-------------------------------------------------------------------------------
assertFile
:: ( SC.SafeCopy a
, Eq a
, Show a
)
=> P.Path b P.File
-> a
-> HU.Assertion
assertFile f expected = do
raw <- BS.readFile rawFile
case S.runGet SC.safeGet raw of
Left e -> HU.assertFailure ("SafeCopy error in " ++ rawFile ++ ": " ++ e)
Right actual -> HU.assertEqual "" expected actual
where
rawFile = P.toFilePath f
-------------------------------------------------------------------------------
dumpVersionUnlessExists
:: (SC.SafeCopy a)
=> P.Path rel P.File
-> a
-> IO ()
dumpVersionUnlessExists f a = do
fabs <- PIO.makeAbsolute f
PIO.ensureDir (P.parent fabs)
exists <- PIO.doesFileExist f
unless exists $
BS.writeFile (P.toFilePath f) (S.runPut (SC.safePut a))
| Soostone/safecopy-hunit | src/Test/HUnit/SafeCopy.hs | bsd-3-clause | 4,947 | 0 | 24 | 1,213 | 1,141 | 591 | 550 | 91 | 5 |
module Rubik.Relation where
data Relation a b = Relation { apply :: a -> b, coapply :: b -> a }
| andygill/rubik-solver | src/Rubik/Relation.hs | bsd-3-clause | 98 | 0 | 9 | 23 | 37 | 23 | 14 | 2 | 0 |
module Module3.Task2 where
nTimes:: a -> Int -> [a]
nTimes x n = iter [] x n
where
iter a _ 0 = a
iter a x n = iter (x : a) x (n - 1)
| dstarcev/stepic-haskell | src/Module3/Task2.hs | bsd-3-clause | 145 | 0 | 9 | 48 | 88 | 46 | 42 | 5 | 2 |
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
module AuthorInfo
( AuthorInfo(..)
) where
import Data.Aeson
import Data.Text
import Data.Typeable
import GHC.Generics
import Author (AuthorId)
type AuthorName = Text
data AuthorInfo = AuthorInfo { authorId :: AuthorId
, name :: AuthorName
}
deriving (Show, FromJSON, ToJSON, Generic, Typeable)
| cutsea110/servant-sample-book | src/AuthorInfo.hs | bsd-3-clause | 453 | 0 | 8 | 143 | 93 | 57 | 36 | 13 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Crypto.Hash.Tsuraan.Blake2
( hash
, hash_key
) where
import Data.ByteString ( ByteString, length )
import Prelude hiding ( length )
import qualified Crypto.Hash.Tsuraan.Blake2.Parallel as Par
import qualified Crypto.Hash.Tsuraan.Blake2.Serial as Ser
-- | Hash a strict 'ByteString' into a digest 'ByteString' using a key. This
-- will choose to use parallel or serial Blake2 depending on the size of the
-- input 'ByteString'.
hash_key :: ByteString -- ^The key to use when hashing
-> Int -- ^The digest size to generate; must be 1-64
-> ByteString -- ^The 'ByteString' to hash
-> ByteString
hash_key key hashlen bytes =
if length bytes < cutoff
then Ser.hash_key key hashlen bytes
else Par.hash_key key hashlen bytes
-- | Hash a strict 'ByteString' into a digest 'ByteString'. This will choose to
-- use parallel or serial Blake2 depending on the size of the input
-- 'ByteString'
hash :: Int -- ^The digest size to generate; must be 1-64
-> ByteString -- ^The 'ByteString' to hash
-> ByteString
hash hashlen bytes =
if length bytes < cutoff
then Ser.hash hashlen bytes
else Par.hash hashlen bytes
-- This is a fairly sane cross-over point for when a hash is faster to
-- calculate in parallel than serially. This was found through experimentation,
-- so there's probably a smarter way to deal with it.
cutoff :: Int
cutoff = 5000
| tsuraan/hs-blake2 | src/Crypto/Hash/Tsuraan/Blake2.hs | bsd-3-clause | 1,451 | 0 | 7 | 303 | 199 | 121 | 78 | 25 | 2 |
{-
Types.hs, odds and ends
Joel Svensson
Todo:
-}
module Obsidian.ArrowObsidian.Types where
type Name = String
| svenssonjoel/ArrowObsidian | Obsidian/ArrowObsidian/Types.hs | bsd-3-clause | 131 | 0 | 4 | 35 | 14 | 10 | 4 | 2 | 0 |
module Zero.GHCJS where
import Control.Monad
import Control.Monad.Trans.Maybe (MaybeT(..), runMaybeT)
import qualified JavaScript.Array.Internal as AI
import qualified JavaScript.Object.Internal as OI
import qualified Data.HashMap.Strict as H
import qualified Data.Aeson as AE
import qualified Data.Vector as V
import qualified Data.JSString.Text as JSS
import Data.Scientific (Scientific, scientific, fromFloatDigits)
import GHCJS.Foreign
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..))
import GHCJS.Types (JSVal)
------------------------------------------------------------------------------
instance FromJSVal AE.Value where
fromJSVal r = case jsonTypeOf r of
JSONNull -> return (Just AE.Null)
JSONInteger -> liftM (AE.Number . flip scientific 0 . (toInteger :: Int -> Integer))
<$> fromJSVal r
JSONFloat -> liftM (AE.Number . (fromFloatDigits :: Double -> Scientific))
<$> fromJSVal r
JSONBool -> liftM AE.Bool <$> fromJSVal r
JSONString -> liftM AE.String <$> fromJSVal r
JSONArray -> liftM (AE.Array . V.fromList) <$> fromJSVal r
JSONObject -> do
props <- OI.listProps (OI.Object r)
runMaybeT $ do
propVals <- forM props $ \p -> do
v <- MaybeT (fromJSVal =<< OI.getProp p (OI.Object r))
return (JSS.textFromJSString p, v)
return (AE.Object (H.fromList propVals))
{-# INLINE fromJSVal #-}
instance ToJSVal AE.Value where
toJSVal = toJSVal_aeson
{-# INLINE toJSVal #-}
toJSVal_aeson :: AE.ToJSON a => a -> IO JSVal
toJSVal_aeson x = cv (AE.toJSON x)
where
cv = convertValue
convertValue :: AE.Value -> IO JSVal
convertValue AE.Null = return jsNull
convertValue (AE.String t) = return (pToJSVal t)
convertValue (AE.Array a) = (\(AI.SomeJSArray x) -> x) <$>
(AI.fromListIO =<< mapM convertValue (V.toList a))
convertValue (AE.Number n) = toJSVal (realToFrac n :: Double)
convertValue (AE.Bool b) = return (toJSBool b)
convertValue (AE.Object o) = do
obj@(OI.Object obj') <- OI.create
mapM_ (\(k,v) -> convertValue v >>= \v' -> OI.setProp (JSS.textToJSString k) v' obj) (H.toList o)
return obj'
| et4te/zero | src/Zero/GHCJS.hs | bsd-3-clause | 2,474 | 0 | 26 | 706 | 785 | 414 | 371 | 49 | 6 |
{-# LANGUAGE TypeFamilies,GeneralizedNewtypeDeriving #-}
module Fragnix.ModuleDeclarations
( parse
, moduleDeclarationsWithEnvironment
, moduleSymbols
) where
import Fragnix.Declaration (
Declaration(Declaration),Genre(..))
import Language.Haskell.Exts (
Module,ModuleName,QName(Qual,UnQual),Decl(..),
parseFileContentsWithMode,defaultParseMode,ParseMode(..),baseFixities,
ParseResult(ParseOk,ParseFailed),
SrcSpan,srcInfoSpan,SrcLoc(SrcLoc),
prettyPrint,
readExtensions,Extension(EnableExtension,UnknownExtension),KnownExtension(..))
import Language.Haskell.Names (
resolve,annotate,
Environment,Symbol,Scoped(Scoped),
NameInfo(GlobalSymbol,RecPatWildcard))
import Language.Haskell.Names.SyntaxUtils (
getModuleDecls,getModuleName,getModuleExtensions,dropAnn)
import Language.Haskell.Names.ModuleSymbols (
getTopDeclSymbols)
import qualified Language.Haskell.Names.GlobalSymbolTable as GlobalTable (
empty)
import qualified Data.Map.Strict as Map (
(!),fromList)
import Data.Maybe (mapMaybe)
import Data.Text (pack)
import Data.Foldable (toList)
-- -- | Given a list of filepaths to valid Haskell modules produces a list of all
-- -- declarations in those modules. The default environment loaded and used.
-- moduleDeclarations :: [FilePath] -> IO [Declaration]
-- moduleDeclarations modulepaths = do
-- builtinEnvironment <- loadEnvironment builtinEnvironmentPath
-- environment <- loadEnvironment environmentPath
-- modules <- forM modulepaths parse
-- return (moduleDeclarationsWithEnvironment (Map.union builtinEnvironment environment) modules)
-- | Use the given environment to produce a list of all declarations from the given list
-- of modules.
moduleDeclarationsWithEnvironment :: Environment -> [Module SrcSpan] -> [Declaration]
moduleDeclarationsWithEnvironment environment modules = declarations where
declarations = do
annotatedModule <- annotatedModules
let (_,moduleExtensions) = getModuleExtensions annotatedModule
allExtensions = moduleExtensions ++ globalExtensions ++ perhapsTemplateHaskell moduleExtensions
extractDeclarations allExtensions annotatedModule
environment' = resolve modules environment
annotatedModules = map (annotate environment') modules
-- moduleNameErrors :: Environment -> [Module SrcSpan] -> [Error SrcSpan]
-- moduleNameErrors environment modules = errors where
-- errors = do
-- Scoped (ScopeError errorInfo) _ <- concatMap toList annotatedModules
-- return errorInfo
-- annotatedModules = map (annotate environment') modules
-- environment' = resolve modules environment
-- | Get the exports of the given modules resolved against the given environment.
moduleSymbols :: Environment -> [Module SrcSpan] -> Environment
moduleSymbols environment modules = Map.fromList (do
let environment' = resolve modules environment
moduleName <- map (dropAnn . getModuleName) modules
return (moduleName,environment' Map.! moduleName))
parse :: FilePath -> IO (Module SrcSpan)
parse path = do
fileContents <- readFile path
let parseMode = defaultParseMode {
parseFilename = path,
extensions = globalExtensions ++ perhapsTemplateHaskell moduleExtensions,
fixities = Just baseFixities}
parseresult = parseFileContentsWithMode parseMode fileContents
moduleExtensions = maybe [] snd (readExtensions fileContents)
case parseresult of
ParseOk ast -> return (fmap srcInfoSpan ast)
ParseFailed (SrcLoc filename line column) message -> error (unlines [
"failed to parse module.",
"filename: " ++ filename,
"line: " ++ show line,
"column: " ++ show column,
"error: " ++ message])
globalExtensions :: [Extension]
globalExtensions = [
EnableExtension MultiParamTypeClasses,
EnableExtension NondecreasingIndentation,
EnableExtension ExplicitForAll,
EnableExtension PatternGuards]
-- | Because haskell-src-exts cannot handle TemplateHaskellQuotes we enable
-- TemplateHaskell when we encounter it.
-- See https://github.com/haskell-suite/haskell-src-exts/issues/357
perhapsTemplateHaskell :: [Extension] -> [Extension]
perhapsTemplateHaskell exts =
if any (== UnknownExtension "TemplateHaskellQuotes") exts
then [EnableExtension TemplateHaskell]
else []
extractDeclarations :: [Extension] -> Module (Scoped SrcSpan) -> [Declaration]
extractDeclarations declarationExtensions annotatedast =
mapMaybe (declToDeclaration declarationExtensions modulnameast) (getModuleDecls annotatedast) where
modulnameast = getModuleName annotatedast
-- | Make a 'Declaration' from a 'haskell-src-exts' 'Decl'.
declToDeclaration ::
[Extension] ->
ModuleName (Scoped SrcSpan) ->
Decl (Scoped SrcSpan) ->
Maybe Declaration
declToDeclaration declarationExtensions modulnameast annotatedast = do
let genre = declGenre annotatedast
case genre of
Other -> Nothing
_ -> return (Declaration
genre
declarationExtensions
(pack (prettyPrint annotatedast))
(declaredSymbols modulnameast annotatedast)
(mentionedSymbols annotatedast))
-- | The genre of a declaration, for example Type, Value, TypeSignature, ...
declGenre :: Decl (Scoped SrcSpan) -> Genre
declGenre (TypeDecl _ _ _) = Type
declGenre (TypeFamDecl _ _ _ _) = Type
declGenre (DataDecl _ _ _ _ _ _) = Type
declGenre (GDataDecl _ _ _ _ _ _ _) = Type
declGenre (DataFamDecl _ _ _ _) = Type
declGenre (TypeInsDecl _ _ _) = FamilyInstance
declGenre (DataInsDecl _ _ _ _ _) = FamilyInstance
declGenre (GDataInsDecl _ _ _ _ _ _) = FamilyInstance
declGenre (ClassDecl _ _ _ _ _) = TypeClass
declGenre (InstDecl _ _ _ _) = TypeClassInstance
declGenre (DerivDecl _ _ _ _) = DerivingInstance
declGenre (TypeSig _ _ _) = TypeSignature
declGenre (FunBind _ _) = Value
declGenre (PatBind _ _ _ _) = Value
declGenre (ForImp _ _ _ _ _ _) = ForeignImport
declGenre (InfixDecl _ _ _ _) = InfixFixity
declGenre _ = Other
-- | All symbols the given declaration in a module with the given name binds.
declaredSymbols :: ModuleName (Scoped SrcSpan) -> Decl (Scoped SrcSpan) -> [Symbol]
declaredSymbols modulnameast annotatedast =
getTopDeclSymbols GlobalTable.empty modulnameast annotatedast
-- | All symbols the given declaration mentions together with a qualifiaction
-- if they are used qualified.
mentionedSymbols :: Decl (Scoped SrcSpan) -> [(Symbol,Maybe (ModuleName ()))]
mentionedSymbols decl = concatMap scopeSymbol (toList decl)
-- | Get all references to global symbols from the given scope annotation.
scopeSymbol :: Scoped SrcSpan -> [(Symbol,Maybe (ModuleName ()))]
scopeSymbol (Scoped (GlobalSymbol symbol (Qual _ modulname _)) _) =
[(symbol,Just (dropAnn modulname))]
scopeSymbol (Scoped (GlobalSymbol symbol (UnQual _ _)) _) =
[(symbol,Nothing)]
scopeSymbol (Scoped (RecPatWildcard symbols) _) =
map (\symbol -> (symbol,Nothing)) symbols
scopeSymbol _ =
[]
| phischu/fragnix | src/Fragnix/ModuleDeclarations.hs | bsd-3-clause | 7,077 | 0 | 16 | 1,265 | 1,617 | 876 | 741 | 122 | 2 |
{-# LANGUAGE ForeignFunctionInterface, JavaScriptFFI, EmptyDataDecls,
DeriveDataTypeable, GHCForeignImportPrim, DataKinds, KindSignatures,
PolyKinds, MultiParamTypeClasses, TypeSynonymInstances, FlexibleInstances,
UnboxedTuples, MagicHash, UnliftedFFITypes
#-}
module JavaScript.JSON.Types.Internal
( -- * Core JSON types
SomeValue(..), Value, MutableValue
, SomeValue'(..), Value', MutableValue'
, MutableValue, MutableValue'
, emptyArray, isEmptyArray
, Pair
, Object, MutableObject
, objectProperties, objectPropertiesIO
, objectAssocs, objectAssocsIO
, Lookup(..), IOLookup(..)
, emptyObject
, match
, arrayValue, stringValue, doubleValue, nullValue, boolValue, objectValue
, arrayValueList, indexV
{- fixme implement freezing / thawing
, freeze, unsafeFreeze
, thaw, unsafeThaw
-}
-- * Type conversion
, Parser
, Result(..)
, parse
, parseEither
, parseMaybe
, modifyFailure
, encode
-- * Constructors and accessors
, object
-- * Generic and TH encoding configuration
, Options(..)
, SumEncoding(..)
, defaultOptions
, defaultTaggedObject
-- * Used for changing CamelCase names into something else.
, camelTo
-- * Other types
, DotNetTime(..)
) where
import Data.Aeson.Types
( Parser, Result(..)
, parse, parseEither, parseMaybe, modifyFailure
, Options(..), SumEncoding(..), defaultOptions, defaultTaggedObject
, camelTo
, DotNetTime(..)
)
import Prelude hiding (lookup)
import Control.DeepSeq
import Control.Exception
import Data.Coerce
import Data.Data
import qualified Data.JSString as JSS
import Data.JSString.Internal.Type (JSString(..))
import Data.Maybe (fromMaybe)
import Data.Typeable
import qualified GHC.Exts as Exts
import GHC.Types (IO(..))
import qualified GHCJS.Foreign as F
import GHCJS.Internal.Types
import GHCJS.Types
import qualified GHCJS.Prim.Internal.Build as IB
import qualified JavaScript.Array as A
import qualified JavaScript.Array.Internal as AI
import Unsafe.Coerce
data JSONException = UnknownKey
deriving (Show, Typeable)
instance Exception JSONException
-- any JSON value
newtype SomeValue (m :: MutabilityType s) =
SomeValue JSVal deriving (Typeable)
type Value = SomeValue Immutable
type MutableValue = SomeValue Mutable
instance NFData (SomeValue (m :: MutabilityType s)) where
rnf (SomeValue v) = rnf v
-- a dictionary (object)
newtype SomeObject (m :: MutabilityType s) =
SomeObject JSVal deriving (Typeable)
type Object = SomeObject Immutable
type MutableObject = SomeObject Mutable
instance NFData (SomeObject (m :: MutabilityType s)) where
rnf (SomeObject v) = rnf v
{-
objectFromAssocs :: [(JSString, Value)] -> Object
objectFromAssocs xs = rnf xs `seq` js_objectFromAssocs (unsafeCoerce xs)
{-# INLINE objectFromAssocs #-}
-}
objectProperties :: Object -> AI.JSArray
objectProperties o = js_objectPropertiesPure o
{-# INLINE objectProperties #-}
objectPropertiesIO :: SomeObject o -> IO AI.JSArray
objectPropertiesIO o = js_objectProperties o
{-# INLINE objectPropertiesIO #-}
objectAssocs :: Object -> [(JSString, Value)]
objectAssocs o = unsafeCoerce (js_listAssocsPure o)
{-# INLINE objectAssocs #-}
objectAssocsIO :: SomeObject m -> IO [(JSString, Value)]
objectAssocsIO o = IO $ \s -> case js_listAssocs o s of
(# s', r #) -> (# s', unsafeCoerce r #)
{-# INLINE objectAssocsIO #-}
type Pair = (JSString, Value)
type MutablePair = (JSString, MutableValue)
data SomeValue' (m :: MutabilityType s)
= Object !(SomeObject m)
| Array !(AI.SomeJSArray m)
| String !JSString
| Number !Double
| Bool !Bool
| Null
deriving (Typeable)
type Value' = SomeValue' Immutable
type MutableValue' = SomeValue' Mutable
-- -----------------------------------------------------------------------------
-- immutable lookup
class Lookup k a where
(!) :: k -> a -> Value -- ^ throws when result is not a JSON value
lookup :: k -> a -> Maybe Value -- ^ returns Nothing when result is not a JSON value
-- fixme more optimized matching
-- lookup' :: k -> a -> Maybe Value' -- ^ returns Nothing when result is not a JSON value
instance Lookup JSString Object where
p ! d = fromMaybe (throw UnknownKey) (lookup p d)
lookup p d = let v = js_lookupDictPure p d
in if isUndefined v then Nothing else Just (SomeValue v)
instance Lookup JSString Value where
p ! d = fromMaybe (throw UnknownKey) (lookup p d)
lookup p d = let v = js_lookupDictPureSafe p d
in if isUndefined v then Nothing else Just (SomeValue v)
instance Lookup Int A.JSArray where
i ! a = fromMaybe (throw UnknownKey) (lookup i a)
lookup i a = let v = js_lookupArrayPure i a
in if isUndefined v then Nothing else Just (SomeValue v)
instance Lookup Int Value where
i ! a = fromMaybe (throw UnknownKey) (lookup i a)
lookup i a = let v = js_lookupArrayPureSafe i a
in if isUndefined v then Nothing else Just (SomeValue v)
-- -----------------------------------------------------------------------------
-- mutable lookup
class IOLookup k a where
(^!) :: k -> a -> IO MutableValue -- ^ throws when result is not a JSON value
lookupIO :: k -> a -> IO (Maybe MutableValue) -- ^ returns Nothing when result is not a JSON value
lookupIO' :: k -> a -> IO (Maybe MutableValue') -- ^ returns Nothing when result is not a JSON value
-- -----------------------------------------------------------------------------
match :: SomeValue m -> SomeValue' m
match (SomeValue v) =
case F.jsonTypeOf v of
F.JSONNull -> Null
F.JSONBool -> Bool (js_jsvalToBool v)
F.JSONInteger -> Number (js_jsvalToDouble v)
F.JSONFloat -> Number (js_jsvalToDouble v)
F.JSONString -> String (JSString v)
F.JSONArray -> Array (AI.SomeJSArray v)
F.JSONObject -> Object (SomeObject v)
{-# INLINE match #-}
emptyArray :: Value
emptyArray = js_emptyArray
{-# INLINE emptyArray #-}
isEmptyArray :: Value -> Bool
isEmptyArray v = js_isEmptyArray v
{-# INLINE isEmptyArray #-}
emptyObject :: Object
emptyObject = js_emptyObject
{-# INLINE emptyObject #-}
object :: [Pair] -> Object
object [] = js_emptyObject
object xs = SomeObject (IB.buildObjectI $ coerce xs)
{-# INLINE object #-}
freeze :: MutableValue -> IO Value
freeze v = js_clone v
{-# INLINE freeze #-}
unsafeFreeze :: MutableValue -> IO Value
unsafeFreeze (SomeValue v) = pure (SomeValue v)
{-# INLINE unsafeFreeze #-}
thaw :: Value -> IO MutableValue
thaw v = js_clone v
{-# INLINE thaw #-}
unsafeThaw :: Value -> IO MutableValue
unsafeThaw (SomeValue v) = pure (SomeValue v)
{-# INLINE unsafeThaw #-}
-- -----------------------------------------------------------------------------
-- smart constructors
arrayValue :: AI.JSArray -> Value
arrayValue (AI.SomeJSArray a) = SomeValue a
{-# INLINE arrayValue #-}
stringValue :: JSString -> Value
stringValue (JSString x) = SomeValue x
{-# INLINE stringValue #-}
doubleValue :: Double -> Value
doubleValue d = SomeValue (js_doubleToJSVal d)
{-# INLINE doubleValue #-}
boolValue :: Bool -> Value
boolValue True = js_trueValue
boolValue False = js_falseValue
{-# INLINE boolValue #-}
nullValue :: Value
nullValue = SomeValue F.jsNull
arrayValueList :: [Value] -> AI.JSArray
arrayValueList xs = A.fromList (coerce xs)
{-# INLINE arrayValueList #-}
indexV :: AI.JSArray -> Int -> Value
indexV a i = SomeValue (AI.index i a)
{-# INLINE indexV #-}
objectValue :: Object -> Value
objectValue (SomeObject o) = SomeValue o
{-# INLINE objectValue #-}
encode :: Value -> JSString
encode v = js_encode v
{-# INLINE encode #-}
-- -----------------------------------------------------------------------------
foreign import javascript unsafe
"$r = [];" js_emptyArray :: Value
foreign import javascript unsafe
"$r = {};" js_emptyObject :: Object
foreign import javascript unsafe
"$1.length === 0" js_isEmptyArray :: Value -> Bool
foreign import javascript unsafe
"$r = true;" js_trueValue :: Value
foreign import javascript unsafe
"$r = false;" js_falseValue :: Value
-- -----------------------------------------------------------------------------
-- types must be checked before using these conversions
foreign import javascript unsafe
"$r = $1;" js_jsvalToDouble :: JSVal -> Double
foreign import javascript unsafe
"$r = $1;" js_jsvalToBool :: JSVal -> Bool
-- -----------------------------------------------------------------------------
-- various lookups
foreign import javascript unsafe
"$2[$1]"
js_lookupDictPure :: JSString -> Object -> JSVal
foreign import javascript unsafe
"typeof($2)==='object'?$2[$1]:undefined"
js_lookupDictPureSafe :: JSString -> Value -> JSVal
foreign import javascript unsafe
"$2[$1]" js_lookupArrayPure :: Int -> A.JSArray -> JSVal
foreign import javascript unsafe
"h$isArray($2) ? $2[$1] : undefined"
js_lookupArrayPureSafe :: Int -> Value -> JSVal
foreign import javascript unsafe
"$r = $1;"
js_doubleToJSVal :: Double -> JSVal
foreign import javascript unsafe
"JSON.decode(JSON.encode($1))"
js_clone :: SomeValue m0 -> IO (SomeValue m1)
-- -----------------------------------------------------------------------------
foreign import javascript unsafe
"h$allProps"
js_objectPropertiesPure :: Object -> AI.JSArray
foreign import javascript unsafe
"h$allProps"
js_objectProperties :: SomeObject m -> IO AI.JSArray
foreign import javascript unsafe
"h$listAssocs"
js_listAssocsPure :: Object -> Exts.Any -- [(JSString, Value)]
foreign import javascript unsafe
"h$listAssocs"
js_listAssocs :: SomeObject m -> Exts.State# s -> (# Exts.State# s, Exts.Any {- [(JSString, Value)] -} #)
foreign import javascript unsafe
"JSON.stringify($1)"
js_encode :: Value -> JSString
| ghcjs/ghcjs-base | JavaScript/JSON/Types/Internal.hs | mit | 10,200 | 39 | 12 | 2,153 | 2,334 | 1,271 | 1,063 | 229 | 7 |
{-
SockeyeParserAST.hs: AST for the Sockeye parser
Part of Sockeye
Copyright (c) 2018, ETH Zurich.
All rights reserved.
This file is distributed under the terms in the attached LICENSE file.
If you do not find this file, copies can be found by writing to:
ETH Zurich D-INFK, CAB F.78, Universitaetstrasse 6, CH-8092 Zurich,
Attn: Systems Group.
-}
module SockeyeParserAST
( module SockeyeParserAST
, module SockeyeSymbolTable
, module SockeyeAST
) where
import Data.Map (Map)
import SockeyeASTMeta
import SockeyeSymbolTable
( NodeType(NodeType)
, nodeTypeMeta, originDomain, originType, targetDomain, targetType
, Domain(Memory, Interrupt, Power, Clock)
, EdgeType(TypeLiteral, TypeName)
, edgeTypeMeta, typeLiteral, typeRef
, AddressType(AddressType)
, ArraySize(ArraySize)
)
import SockeyeAST
( UnqualifiedRef(UnqualifiedRef)
, refMeta, refName, refIndex
, NodeReference(InternalNodeRef, InputPortRef)
, nodeRefMeta, instRef, nodeRef
, ArrayIndex(ArrayIndex)
, Address(Address)
, AddressBlock(AddressBlock)
, WildcardSet(ExplicitSet, Wildcard)
, NaturalSet(NaturalSet)
, NaturalRange(SingletonRange, LimitRange, BitsRange)
, natRangeMeta, base, limit, bits
, NaturalExpr(Addition, Subtraction, Multiplication, Slice, Concat, Variable, Literal)
, natExprMeta, natExprOp1, natExprOp2, bitRange, varName, natural
, PropertyExpr(And, Or, Not, Property, True, False)
, propExprMeta, propExprOp1, propExprOp2, property
)
data Sockeye = Sockeye
{ entryPoint :: FilePath
, files :: Map FilePath SockeyeFile
}
deriving (Show)
data SockeyeFile = SockeyeFile
{ sockeyeFileMeta :: ASTMeta
, imports :: [Import]
, modules :: [Module]
, types :: [NamedType]
}
deriving (Show)
instance MetaAST SockeyeFile where
meta = sockeyeFileMeta
data Import = Import
{ importMeta :: ASTMeta
, importFile :: !FilePath
, explImports :: Maybe [ImportAlias]
}
deriving (Show)
instance MetaAST Import where
meta = importMeta
data ImportAlias = ImportAlias
{ importAliasMeta :: ASTMeta
, originalName :: !String
, importAlias :: !String
}
deriving (Show)
instance MetaAST ImportAlias where
meta = importAliasMeta
data Module = Module
{ moduleMeta :: ASTMeta
, moduleExtern:: Bool
, moduleName :: !String
, parameters :: [ModuleParameter]
, constants :: [NamedConstant]
, instDecls :: [InstanceDeclaration]
, nodeDecls :: [NodeDeclaration]
, definitions :: [Definition]
}
deriving (Show)
instance MetaAST Module where
meta = moduleMeta
data ModuleParameter = ModuleParameter
{ paramMeta :: ASTMeta
, paramName :: !String
, paramRange :: NaturalSet
}
deriving (Show)
instance MetaAST ModuleParameter where
meta = paramMeta
data InstanceDeclaration = InstanceDeclaration
{ instDeclMeta :: ASTMeta
, instName :: !String
, instModName :: !String
, instArrSize :: Maybe ArraySize
}
deriving (Show)
instance MetaAST InstanceDeclaration where
meta = instDeclMeta
data NodeDeclaration = NodeDeclaration
{ nodeDeclMeta :: ASTMeta
, nodeKind :: !NodeKind
, nodeType :: NodeType
, nodeName :: !String
, nodeArrSize :: Maybe ArraySize
}
deriving (Show)
instance MetaAST NodeDeclaration where
meta = nodeDeclMeta
data NodeKind
= InputPort
| OutputPort
| InternalNode
deriving (Eq, Show)
data Definition
= Accepts
{ defMeta :: ASTMeta
, node :: UnqualifiedRef
, accepts :: [AddressBlock]
}
| Maps
{ defMeta :: ASTMeta
, node :: UnqualifiedRef
, maps :: [MapSpec]
}
| Converts
{ defMeta :: ASTMeta
, node :: UnqualifiedRef
, converts :: [ConvertSpec]
}
| Overlays
{ defMeta :: ASTMeta
, node :: UnqualifiedRef
, overlays :: NodeReference
}
| BlockOverlays
{ defMeta :: ASTMeta
, node :: UnqualifiedRef
, overlays :: NodeReference
, blocksizes :: [Integer]
}
| Instantiates
{ defMeta :: ASTMeta
, inst :: UnqualifiedRef
, instModule :: !String
, arguments :: [NaturalExpr]
}
| Binds
{ defMeta :: ASTMeta
, inst :: UnqualifiedRef
, binds :: [PortBinding]
}
| Forall
{ defMeta :: ASTMeta
, boundVarName :: !String
, varRange :: NaturalSet
, quantifierBody :: [Definition]
}
deriving (Show)
instance MetaAST Definition where
meta = defMeta
data MapSpec = MapSpec
{ mapSpecMeta :: ASTMeta
, mapAddr :: AddressBlock
, mapTargets :: [MapTarget]
}
deriving (Show)
instance MetaAST MapSpec where
meta = mapSpecMeta
data MapTarget = MapTarget
{ mapTargetMeta :: ASTMeta
, targetNode :: NodeReference
, targetAddr :: AddressBlock
}
deriving (Show)
instance MetaAST MapTarget where
meta = mapTargetMeta
type ConvertSpec = MapSpec
data PortBinding = PortBinding
{ portBindMeta :: ASTMeta
, boundPort :: UnqualifiedRef
, boundNode :: NodeReference
}
deriving (Show)
instance MetaAST PortBinding where
meta = portBindMeta
data NamedType = NamedType
{ namedTypeMeta :: ASTMeta
, typeName :: !String
, namedType :: AddressType
}
deriving (Show)
instance MetaAST NamedType where
meta = namedTypeMeta
data NamedConstant = NamedConstant
{ namedConstMeta :: ASTMeta
, constName :: !String
, namedConst :: !Integer
}
deriving (Show)
instance MetaAST NamedConstant where
meta = namedConstMeta
| kishoredbn/barrelfish | tools/sockeye/SockeyeParserAST.hs | mit | 5,941 | 0 | 10 | 1,707 | 1,250 | 788 | 462 | 200 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.CloudTrail.StartLogging
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Starts the recording of AWS API calls and log file delivery for a trail.
--
-- /See:/ <http://docs.aws.amazon.com/awscloudtrail/latest/APIReference/API_StartLogging.html AWS API Reference> for StartLogging.
module Network.AWS.CloudTrail.StartLogging
(
-- * Creating a Request
startLogging
, StartLogging
-- * Request Lenses
, sName
-- * Destructuring the Response
, startLoggingResponse
, StartLoggingResponse
-- * Response Lenses
, srsResponseStatus
) where
import Network.AWS.CloudTrail.Types
import Network.AWS.CloudTrail.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | The request to CloudTrail to start logging AWS API calls for an account.
--
-- /See:/ 'startLogging' smart constructor.
newtype StartLogging = StartLogging'
{ _sName :: Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'StartLogging' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sName'
startLogging
:: Text -- ^ 'sName'
-> StartLogging
startLogging pName_ =
StartLogging'
{ _sName = pName_
}
-- | The name of the trail for which CloudTrail logs AWS API calls.
sName :: Lens' StartLogging Text
sName = lens _sName (\ s a -> s{_sName = a});
instance AWSRequest StartLogging where
type Rs StartLogging = StartLoggingResponse
request = postJSON cloudTrail
response
= receiveEmpty
(\ s h x ->
StartLoggingResponse' <$> (pure (fromEnum s)))
instance ToHeaders StartLogging where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("com.amazonaws.cloudtrail.v20131101.CloudTrail_20131101.StartLogging"
:: ByteString),
"Content-Type" =#
("application/x-amz-json-1.1" :: ByteString)])
instance ToJSON StartLogging where
toJSON StartLogging'{..}
= object (catMaybes [Just ("Name" .= _sName)])
instance ToPath StartLogging where
toPath = const "/"
instance ToQuery StartLogging where
toQuery = const mempty
-- | Returns the objects or data listed below if successful. Otherwise,
-- returns an error.
--
-- /See:/ 'startLoggingResponse' smart constructor.
newtype StartLoggingResponse = StartLoggingResponse'
{ _srsResponseStatus :: Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'StartLoggingResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'srsResponseStatus'
startLoggingResponse
:: Int -- ^ 'srsResponseStatus'
-> StartLoggingResponse
startLoggingResponse pResponseStatus_ =
StartLoggingResponse'
{ _srsResponseStatus = pResponseStatus_
}
-- | The response status code.
srsResponseStatus :: Lens' StartLoggingResponse Int
srsResponseStatus = lens _srsResponseStatus (\ s a -> s{_srsResponseStatus = a});
| fmapfmapfmap/amazonka | amazonka-cloudtrail/gen/Network/AWS/CloudTrail/StartLogging.hs | mpl-2.0 | 3,831 | 0 | 13 | 872 | 499 | 302 | 197 | 66 | 1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1998
\section[TyCoRep]{Type and Coercion - friends' interface}
Note [The Type-related module hierarchy]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Class
CoAxiom
TyCon imports Class, CoAxiom
TyCoRep imports Class, CoAxiom, TyCon
TysPrim imports TyCoRep ( including mkTyConTy )
Kind imports TysPrim ( mainly for primitive kinds )
Type imports Kind
Coercion imports Type
-}
-- We expose the relevant stuff from this module via the Type module
{-# OPTIONS_HADDOCK hide #-}
{-# LANGUAGE CPP, DeriveDataTypeable, DeriveFunctor, DeriveFoldable,
DeriveTraversable, MultiWayIf #-}
{-# LANGUAGE ImplicitParams #-}
module TyCoRep (
TyThing(..), tyThingCategory, pprTyThingCategory, pprShortTyThing,
-- * Types
Type(..),
TyLit(..),
KindOrType, Kind,
PredType, ThetaType, -- Synonyms
ArgFlag(..),
-- * Coercions
Coercion(..),
UnivCoProvenance(..), CoercionHole(..),
CoercionN, CoercionR, CoercionP, KindCoercion,
-- * Functions over types
mkTyConTy, mkTyVarTy, mkTyVarTys,
mkFunTy, mkFunTys, mkForAllTy, mkForAllTys,
mkPiTy, mkPiTys,
isLiftedTypeKind, isUnliftedTypeKind,
isCoercionType, isRuntimeRepTy, isRuntimeRepVar,
isRuntimeRepKindedTy, dropRuntimeRepArgs,
sameVis,
-- * Functions over binders
TyBinder(..), TyVarBinder,
binderVar, binderVars, binderKind, binderArgFlag,
delBinderVar,
isInvisibleArgFlag, isVisibleArgFlag,
isInvisibleBinder, isVisibleBinder,
-- * Functions over coercions
pickLR,
-- * Pretty-printing
pprType, pprParendType, pprTypeApp, pprTvBndr, pprTvBndrs,
pprSigmaType,
pprTheta, pprForAll, pprUserForAll,
pprTyVar, pprTyVars,
pprThetaArrowTy, pprClassPred,
pprKind, pprParendKind, pprTyLit,
TyPrec(..), maybeParen, pprTcAppCo, pprTcAppTy,
pprPrefixApp, pprArrowChain,
pprDataCons, ppSuggestExplicitKinds,
-- * Free variables
tyCoVarsOfType, tyCoVarsOfTypeDSet, tyCoVarsOfTypes, tyCoVarsOfTypesDSet,
tyCoFVsBndr, tyCoFVsOfType, tyCoVarsOfTypeList,
tyCoFVsOfTypes, tyCoVarsOfTypesList,
closeOverKindsDSet, closeOverKindsFV, closeOverKindsList,
coVarsOfType, coVarsOfTypes,
coVarsOfCo, coVarsOfCos,
tyCoVarsOfCo, tyCoVarsOfCos,
tyCoVarsOfCoDSet,
tyCoFVsOfCo, tyCoFVsOfCos,
tyCoVarsOfCoList, tyCoVarsOfProv,
closeOverKinds,
-- * Substitutions
TCvSubst(..), TvSubstEnv, CvSubstEnv,
emptyTvSubstEnv, emptyCvSubstEnv, composeTCvSubstEnv, composeTCvSubst,
emptyTCvSubst, mkEmptyTCvSubst, isEmptyTCvSubst,
mkTCvSubst, mkTvSubst,
getTvSubstEnv,
getCvSubstEnv, getTCvInScope, getTCvSubstRangeFVs,
isInScope, notElemTCvSubst,
setTvSubstEnv, setCvSubstEnv, zapTCvSubst,
extendTCvInScope, extendTCvInScopeList, extendTCvInScopeSet,
extendTCvSubst,
extendCvSubst, extendCvSubstWithClone,
extendTvSubst, extendTvSubstBinder, extendTvSubstWithClone,
extendTvSubstList, extendTvSubstAndInScope,
unionTCvSubst, zipTyEnv, zipCoEnv, mkTyCoInScopeSet,
zipTvSubst, zipCvSubst,
mkTvSubstPrs,
substTyWith, substTyWithCoVars, substTysWith, substTysWithCoVars,
substCoWith,
substTy, substTyAddInScope,
substTyUnchecked, substTysUnchecked, substThetaUnchecked,
substTyWithUnchecked,
substCoUnchecked, substCoWithUnchecked,
substTyWithInScope,
substTys, substTheta,
lookupTyVar, substTyVarBndr,
substCo, substCos, substCoVar, substCoVars, lookupCoVar,
substCoVarBndr, cloneTyVarBndr, cloneTyVarBndrs,
substTyVar, substTyVars,
substForAllCoBndr,
substTyVarBndrCallback, substForAllCoBndrCallback,
substCoVarBndrCallback,
-- * Tidying type related things up for printing
tidyType, tidyTypes,
tidyOpenType, tidyOpenTypes,
tidyOpenKind,
tidyTyCoVarBndr, tidyTyCoVarBndrs, tidyFreeTyCoVars,
tidyOpenTyCoVar, tidyOpenTyCoVars,
tidyTyVarOcc,
tidyTopType,
tidyKind,
tidyCo, tidyCos,
tidyTyVarBinder, tidyTyVarBinders,
-- * Sizes
typeSize, coercionSize, provSize
) where
#include "HsVersions.h"
import {-# SOURCE #-} DataCon( dataConFullSig
, dataConUnivTyVarBinders, dataConExTyVarBinders
, DataCon, filterEqSpec )
import {-# SOURCE #-} Type( isPredTy, isCoercionTy, mkAppTy
, tyCoVarsOfTypesWellScoped
, coreView, typeKind )
-- Transitively pulls in a LOT of stuff, better to break the loop
import {-# SOURCE #-} Coercion
import {-# SOURCE #-} ConLike ( ConLike(..), conLikeName )
import {-# SOURCE #-} ToIface
-- friends:
import IfaceType
import Var
import VarEnv
import VarSet
import Name hiding ( varName )
import TyCon
import Class
import CoAxiom
import FV
-- others
import BasicTypes ( LeftOrRight(..), TyPrec(..), maybeParen, pickLR )
import PrelNames
import Outputable
import DynFlags
import FastString
import Pair
import UniqSupply
import Util
import UniqFM
-- libraries
import qualified Data.Data as Data hiding ( TyCon )
import Data.List
import Data.IORef ( IORef ) -- for CoercionHole
{-
%************************************************************************
%* *
TyThing
%* *
%************************************************************************
Despite the fact that DataCon has to be imported via a hi-boot route,
this module seems the right place for TyThing, because it's needed for
funTyCon and all the types in TysPrim.
It is also SOURCE-imported into Name.hs
Note [ATyCon for classes]
~~~~~~~~~~~~~~~~~~~~~~~~~
Both classes and type constructors are represented in the type environment
as ATyCon. You can tell the difference, and get to the class, with
isClassTyCon :: TyCon -> Bool
tyConClass_maybe :: TyCon -> Maybe Class
The Class and its associated TyCon have the same Name.
-}
-- | A global typecheckable-thing, essentially anything that has a name.
-- Not to be confused with a 'TcTyThing', which is also a typecheckable
-- thing but in the *local* context. See 'TcEnv' for how to retrieve
-- a 'TyThing' given a 'Name'.
data TyThing
= AnId Id
| AConLike ConLike
| ATyCon TyCon -- TyCons and classes; see Note [ATyCon for classes]
| ACoAxiom (CoAxiom Branched)
instance Outputable TyThing where
ppr = pprShortTyThing
instance NamedThing TyThing where -- Can't put this with the type
getName (AnId id) = getName id -- decl, because the DataCon instance
getName (ATyCon tc) = getName tc -- isn't visible there
getName (ACoAxiom cc) = getName cc
getName (AConLike cl) = conLikeName cl
pprShortTyThing :: TyThing -> SDoc
-- c.f. PprTyThing.pprTyThing, which prints all the details
pprShortTyThing thing
= pprTyThingCategory thing <+> quotes (ppr (getName thing))
pprTyThingCategory :: TyThing -> SDoc
pprTyThingCategory = text . capitalise . tyThingCategory
tyThingCategory :: TyThing -> String
tyThingCategory (ATyCon tc)
| isClassTyCon tc = "class"
| otherwise = "type constructor"
tyThingCategory (ACoAxiom _) = "coercion axiom"
tyThingCategory (AnId _) = "identifier"
tyThingCategory (AConLike (RealDataCon _)) = "data constructor"
tyThingCategory (AConLike (PatSynCon _)) = "pattern synonym"
{- **********************************************************************
* *
Type
* *
********************************************************************** -}
-- | The key representation of types within the compiler
type KindOrType = Type -- See Note [Arguments to type constructors]
-- | The key type representing kinds in the compiler.
type Kind = Type
-- If you edit this type, you may need to update the GHC formalism
-- See Note [GHC Formalism] in coreSyn/CoreLint.hs
data Type
-- See Note [Non-trivial definitional equality]
= TyVarTy Var -- ^ Vanilla type or kind variable (*never* a coercion variable)
| AppTy
Type
Type -- ^ Type application to something other than a 'TyCon'. Parameters:
--
-- 1) Function: must /not/ be a 'TyConApp',
-- must be another 'AppTy', or 'TyVarTy'
--
-- 2) Argument type
| TyConApp
TyCon
[KindOrType] -- ^ Application of a 'TyCon', including newtypes /and/ synonyms.
-- Invariant: saturated applications of 'FunTyCon' must
-- use 'FunTy' and saturated synonyms must use their own
-- constructors. However, /unsaturated/ 'FunTyCon's
-- do appear as 'TyConApp's.
-- Parameters:
--
-- 1) Type constructor being applied to.
--
-- 2) Type arguments. Might not have enough type arguments
-- here to saturate the constructor.
-- Even type synonyms are not necessarily saturated;
-- for example unsaturated type synonyms
-- can appear as the right hand side of a type synonym.
| ForAllTy
{-# UNPACK #-} !TyVarBinder
Type -- ^ A Π type.
| FunTy Type Type -- ^ t1 -> t2 Very common, so an important special case
| LitTy TyLit -- ^ Type literals are similar to type constructors.
| CastTy
Type
KindCoercion -- ^ A kind cast. The coercion is always nominal.
-- INVARIANT: The cast is never refl.
-- INVARIANT: The cast is "pushed down" as far as it
-- can go. See Note [Pushing down casts]
| CoercionTy
Coercion -- ^ Injection of a Coercion into a type
-- This should only ever be used in the RHS of an AppTy,
-- in the list of a TyConApp, when applying a promoted
-- GADT data constructor
deriving Data.Data
-- NOTE: Other parts of the code assume that type literals do not contain
-- types or type variables.
data TyLit
= NumTyLit Integer
| StrTyLit FastString
deriving (Eq, Ord, Data.Data)
{- Note [Arguments to type constructors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Because of kind polymorphism, in addition to type application we now
have kind instantiation. We reuse the same notations to do so.
For example:
Just (* -> *) Maybe
Right * Nat Zero
are represented by:
TyConApp (PromotedDataCon Just) [* -> *, Maybe]
TyConApp (PromotedDataCon Right) [*, Nat, (PromotedDataCon Zero)]
Important note: Nat is used as a *kind* and not as a type. This can be
confusing, since type-level Nat and kind-level Nat are identical. We
use the kind of (PromotedDataCon Right) to know if its arguments are
kinds or types.
This kind instantiation only happens in TyConApp currently.
Note [Pushing down casts]
~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have (a :: k1 -> *), (b :: k1), and (co :: * ~ q).
The type (a b |> co) is `eqType` to ((a |> co') b), where
co' = (->) <k1> co. Thus, to make this visible to functions
that inspect types, we always push down coercions, preferring
the second form. Note that this also applies to TyConApps!
Note [Non-trivial definitional equality]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Is Int |> <*> the same as Int? YES! In order to reduce headaches,
we decide that any reflexive casts in types are just ignored. More
generally, the `eqType` function, which defines Core's type equality
relation, ignores casts and coercion arguments, as long as the
two types have the same kind. This allows us to be a little sloppier
in keeping track of coercions, which is a good thing. It also means
that eqType does not depend on eqCoercion, which is also a good thing.
Why is this sensible? That is, why is something different than α-equivalence
appropriate for the implementation of eqType?
Anything smaller than ~ and homogeneous is an appropriate definition for
equality. The type safety of FC depends only on ~. Let's say η : τ ~ σ. Any
expression of type τ can be transmuted to one of type σ at any point by
casting. The same is true of types of type τ. So in some sense, τ and σ are
interchangeable.
But let's be more precise. If we examine the typing rules of FC (say, those in
http://www.cis.upenn.edu/~eir/papers/2015/equalities/equalities-extended.pdf)
there are several places where the same metavariable is used in two different
premises to a rule. (For example, see Ty_App.) There is an implicit equality
check here. What definition of equality should we use? By convention, we use
α-equivalence. Take any rule with one (or more) of these implicit equality
checks. Then there is an admissible rule that uses ~ instead of the implicit
check, adding in casts as appropriate.
The only problem here is that ~ is heterogeneous. To make the kinds work out
in the admissible rule that uses ~, it is necessary to homogenize the
coercions. That is, if we have η : (τ : κ1) ~ (σ : κ2), then we don't use η;
we use η |> kind η, which is homogeneous.
The effect of this all is that eqType, the implementation of the implicit
equality check, can use any homogeneous relation that is smaller than ~, as
those rules must also be admissible.
What would go wrong if we insisted on the casts matching? See the beginning of
Section 8 in the unpublished paper above. Theoretically, nothing at all goes
wrong. But in practical terms, getting the coercions right proved to be
nightmarish. And types would explode: during kind-checking, we often produce
reflexive kind coercions. When we try to cast by these, mkCastTy just discards
them. But if we used an eqType that distinguished between Int and Int |> <*>,
then we couldn't discard -- the output of kind-checking would be enormous,
and we would need enormous casts with lots of CoherenceCo's to straighten
them out.
Would anything go wrong if eqType respected type families? No, not at all. But
that makes eqType rather hard to implement.
Thus, the guideline for eqType is that it should be the largest
easy-to-implement relation that is still smaller than ~ and homogeneous. The
precise choice of relation is somewhat incidental, as long as the smart
constructors and destructors in Type respect whatever relation is chosen.
Another helpful principle with eqType is this:
** If (t1 eqType t2) then I can replace t1 by t2 anywhere. **
This principle also tells us that eqType must relate only types with the
same kinds.
-}
{- **********************************************************************
* *
TyBinder and ArgFlag
* *
********************************************************************** -}
-- | A 'TyBinder' represents an argument to a function. TyBinders can be dependent
-- ('Named') or nondependent ('Anon'). They may also be visible or not.
-- See Note [TyBinders]
data TyBinder
= Named TyVarBinder
| Anon Type -- Visibility is determined by the type (Constraint vs. *)
deriving Data.Data
-- | Remove the binder's variable from the set, if the binder has
-- a variable.
delBinderVar :: VarSet -> TyVarBinder -> VarSet
delBinderVar vars (TvBndr tv _) = vars `delVarSet` tv
-- | Does this binder bind an invisible argument?
isInvisibleBinder :: TyBinder -> Bool
isInvisibleBinder (Named (TvBndr _ vis)) = isInvisibleArgFlag vis
isInvisibleBinder (Anon ty) = isPredTy ty
-- | Does this binder bind a visible argument?
isVisibleBinder :: TyBinder -> Bool
isVisibleBinder = not . isInvisibleBinder
{- Note [TyBinders]
~~~~~~~~~~~~~~~~~~~
A ForAllTy contains a TyVarBinder. But a type can be decomposed
to a telescope consisting of a [TyBinder]
A TyBinder represents the type of binders -- that is, the type of an
argument to a Pi-type. GHC Core currently supports two different
Pi-types:
* A non-dependent function,
written with ->, e.g. ty1 -> ty2
represented as FunTy ty1 ty2
* A dependent compile-time-only polytype,
written with forall, e.g. forall (a:*). ty
represented as ForAllTy (TvBndr a v) ty
Both Pi-types classify terms/types that take an argument. In other
words, if `x` is either a function or a polytype, `x arg` makes sense
(for an appropriate `arg`). It is thus often convenient to group
Pi-types together. This is ForAllTy.
The two constructors for TyBinder sort out the two different possibilities.
`Named` builds a polytype, while `Anon` builds an ordinary function.
(ForAllTy (Anon arg) res used to be called FunTy arg res.)
Note [TyBinders and ArgFlags]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A ForAllTy contains a TyVarBinder. Each TyVarBinder is equipped
with a ArgFlag, which says whether or not arguments for this
binder should be visible (explicit) in source Haskell.
-----------------------------------------------------------------------
Occurrences look like this
TyBinder GHC displays type as in Haskell souce code
-----------------------------------------------------------------------
In the type of a term
Anon: f :: type -> type Arg required: f x
Named Inferred: f :: forall {a}. type Arg not allowed: f
Named Specified: f :: forall a. type Arg optional: f or f @Int
Named Required: Illegal: See Note [No Required TyBinder in terms]
In the kind of a type
Anon: T :: kind -> kind Required: T *
Named Inferred: T :: forall {k}. kind Arg not allowed: T
Named Specified: T :: forall k. kind Arg not allowed[1]: T
Named Required: T :: forall k -> kind Required: T *
------------------------------------------------------------------------
[1] In types, in the Specified case, it would make sense to allow
optional kind applications, thus (T @*), but we have not
yet implemented that
---- Examples of where the different visiblities come from -----
In term declarations:
* Inferred. Function defn, with no signature: f1 x = x
We infer f1 :: forall {a}. a -> a, with 'a' Inferred
It's Inferred because it doesn't appear in any
user-written signature for f1
* Specified. Function defn, with signature (implicit forall):
f2 :: a -> a; f2 x = x
So f2 gets the type f2 :: forall a. a->a, with 'a' Specified
even though 'a' is not bound in the source code by an explicit forall
* Specified. Function defn, with signature (explicit forall):
f3 :: forall a. a -> a; f3 x = x
So f3 gets the type f3 :: forall a. a->a, with 'a' Specified
* Inferred/Specified. Function signature with inferred kind polymorphism.
f4 :: a b -> Int
So 'f4' gets the type f4 :: forall {k} (a:k->*) (b:k). a b -> Int
Here 'k' is Inferred (it's not mentioned in the type),
but 'a' and 'b' are Specified.
* Specified. Function signature with explicit kind polymorphism
f5 :: a (b :: k) -> Int
This time 'k' is Specified, because it is mentioned explicitly,
so we get f5 :: forall (k:*) (a:k->*) (b:k). a b -> Int
* Similarly pattern synonyms:
Inferred - from inferred types (e.g. no pattern type signature)
- or from inferred kind polymorphism
In type declarations:
* Inferred (k)
data T1 a b = MkT1 (a b)
Here T1's kind is T1 :: forall {k:*}. (k->*) -> k -> *
The kind variable 'k' is Inferred, since it is not mentioned
Note that 'a' and 'b' correspond to /Anon/ TyBinders in T1's kind,
and Anon binders don't have a visibility flag. (Or you could think
of Anon having an implicit Required flag.)
* Specified (k)
data T2 (a::k->*) b = MkT (a b)
Here T's kind is T :: forall (k:*). (k->*) -> k -> *
The kind variable 'k' is Specified, since it is mentioned in
the signature.
* Required (k)
data T k (a::k->*) b = MkT (a b)
Here T's kind is T :: forall k:* -> (k->*) -> k -> *
The kind is Required, since it bound in a positional way in T's declaration
Every use of T must be explicitly applied to a kind
* Inferred (k1), Specified (k)
data T a b (c :: k) = MkT (a b) (Proxy c)
Here T's kind is T :: forall {k1:*} (k:*). (k1->*) -> k1 -> k -> *
So 'k' is Specified, because it appears explicitly,
but 'k1' is Inferred, because it does not
---- Printing -----
We print forall types with enough syntax to tell you their visiblity
flag. But this is not source Haskell, and these types may not all
be parsable.
Specified: a list of Specified binders is written between `forall` and `.`:
const :: forall a b. a -> b -> a
Inferred: with -fprint-explicit-foralls, Inferred binders are written
in braces:
f :: forall {k} (a:k). S k a -> Int
Otherwise, they are printed like Specified binders.
Required: binders are put between `forall` and `->`:
T :: forall k -> *
---- Other points -----
* In classic Haskell, all named binders (that is, the type variables in
a polymorphic function type f :: forall a. a -> a) have been Inferred.
* Inferred variables correspond to "generalized" variables from the
Visible Type Applications paper (ESOP'16).
Note [No Required TyBinder in terms]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We don't allow Required foralls for term variables, including pattern
synonyms and data constructors. Why? Because then an application
would need a /compulsory/ type argument (possibly without an "@"?),
thus (f Int); and we don't have concrete syntax for that.
We could change this decision, but Required, Named TyBinders are rare
anyway. (Most are Anons.)
-}
{- **********************************************************************
* *
PredType
* *
********************************************************************** -}
-- | A type of the form @p@ of kind @Constraint@ represents a value whose type is
-- the Haskell predicate @p@, where a predicate is what occurs before
-- the @=>@ in a Haskell type.
--
-- We use 'PredType' as documentation to mark those types that we guarantee to have
-- this kind.
--
-- It can be expanded into its representation, but:
--
-- * The type checker must treat it as opaque
--
-- * The rest of the compiler treats it as transparent
--
-- Consider these examples:
--
-- > f :: (Eq a) => a -> Int
-- > g :: (?x :: Int -> Int) => a -> Int
-- > h :: (r\l) => {r} => {l::Int | r}
--
-- Here the @Eq a@ and @?x :: Int -> Int@ and @r\l@ are all called \"predicates\"
type PredType = Type
-- | A collection of 'PredType's
type ThetaType = [PredType]
{-
(We don't support TREX records yet, but the setup is designed
to expand to allow them.)
A Haskell qualified type, such as that for f,g,h above, is
represented using
* a FunTy for the double arrow
* with a type of kind Constraint as the function argument
The predicate really does turn into a real extra argument to the
function. If the argument has type (p :: Constraint) then the predicate p is
represented by evidence of type p.
%************************************************************************
%* *
Simple constructors
%* *
%************************************************************************
These functions are here so that they can be used by TysPrim,
which in turn is imported by Type
-}
-- named with "Only" to prevent naive use of mkTyVarTy
mkTyVarTy :: TyVar -> Type
mkTyVarTy v = ASSERT2( isTyVar v, ppr v <+> dcolon <+> ppr (tyVarKind v) )
TyVarTy v
mkTyVarTys :: [TyVar] -> [Type]
mkTyVarTys = map mkTyVarTy -- a common use of mkTyVarTy
infixr 3 `mkFunTy` -- Associates to the right
-- | Make an arrow type
mkFunTy :: Type -> Type -> Type
mkFunTy arg res = FunTy arg res
-- | Make nested arrow types
mkFunTys :: [Type] -> Type -> Type
mkFunTys tys ty = foldr mkFunTy ty tys
mkForAllTy :: TyVar -> ArgFlag -> Type -> Type
mkForAllTy tv vis ty = ForAllTy (TvBndr tv vis) ty
-- | Wraps foralls over the type using the provided 'TyVar's from left to right
mkForAllTys :: [TyVarBinder] -> Type -> Type
mkForAllTys tyvars ty = foldr ForAllTy ty tyvars
mkPiTy :: TyBinder -> Type -> Type
mkPiTy (Anon ty1) ty2 = FunTy ty1 ty2
mkPiTy (Named tvb) ty = ForAllTy tvb ty
mkPiTys :: [TyBinder] -> Type -> Type
mkPiTys tbs ty = foldr mkPiTy ty tbs
-- | Does this type classify a core (unlifted) Coercion?
-- At either role nominal or representational
-- (t1 ~# t2) or (t1 ~R# t2)
isCoercionType :: Type -> Bool
isCoercionType (TyConApp tc tys)
| (tc `hasKey` eqPrimTyConKey) || (tc `hasKey` eqReprPrimTyConKey)
, length tys == 4
= True
isCoercionType _ = False
-- | Create the plain type constructor type which has been applied to no type arguments at all.
mkTyConTy :: TyCon -> Type
mkTyConTy tycon = TyConApp tycon []
{-
Some basic functions, put here to break loops eg with the pretty printer
-}
-- | This version considers Constraint to be distinct from *.
isLiftedTypeKind :: Kind -> Bool
isLiftedTypeKind ki | Just ki' <- coreView ki = isLiftedTypeKind ki'
isLiftedTypeKind (TyConApp tc [TyConApp ptr_rep []])
= tc `hasKey` tYPETyConKey
&& ptr_rep `hasKey` ptrRepLiftedDataConKey
isLiftedTypeKind _ = False
isUnliftedTypeKind :: Kind -> Bool
isUnliftedTypeKind ki | Just ki' <- coreView ki = isUnliftedTypeKind ki'
isUnliftedTypeKind (TyConApp tc [TyConApp ptr_rep []])
| tc `hasKey` tYPETyConKey
, ptr_rep `hasKey` ptrRepLiftedDataConKey
= False
isUnliftedTypeKind (TyConApp tc [arg])
= tc `hasKey` tYPETyConKey && isEmptyVarSet (tyCoVarsOfType arg)
-- all other possibilities are unlifted
isUnliftedTypeKind _ = False
-- | Is this the type 'RuntimeRep'?
isRuntimeRepTy :: Type -> Bool
isRuntimeRepTy ty | Just ty' <- coreView ty = isRuntimeRepTy ty'
isRuntimeRepTy (TyConApp tc []) = tc `hasKey` runtimeRepTyConKey
isRuntimeRepTy _ = False
-- | Is this a type of kind RuntimeRep? (e.g. PtrRep)
isRuntimeRepKindedTy :: Type -> Bool
isRuntimeRepKindedTy = isRuntimeRepTy . typeKind
-- | Is a tyvar of type 'RuntimeRep'?
isRuntimeRepVar :: TyVar -> Bool
isRuntimeRepVar = isRuntimeRepTy . tyVarKind
-- | Drops prefix of RuntimeRep constructors in 'TyConApp's. Useful for e.g.
-- dropping 'PtrRep arguments of unboxed tuple TyCon applications:
--
-- dropRuntimeRepArgs [ 'PtrRepLifted, 'PtrRepUnlifted
-- , String, Int# ] == [String, Int#]
--
dropRuntimeRepArgs :: [Type] -> [Type]
dropRuntimeRepArgs = dropWhile isRuntimeRepKindedTy
{-
%************************************************************************
%* *
Coercions
%* *
%************************************************************************
-}
-- | A 'Coercion' is concrete evidence of the equality/convertibility
-- of two types.
-- If you edit this type, you may need to update the GHC formalism
-- See Note [GHC Formalism] in coreSyn/CoreLint.hs
data Coercion
-- Each constructor has a "role signature", indicating the way roles are
-- propagated through coercions.
-- - P, N, and R stand for coercions of the given role
-- - e stands for a coercion of a specific unknown role
-- (think "role polymorphism")
-- - "e" stands for an explicit role parameter indicating role e.
-- - _ stands for a parameter that is not a Role or Coercion.
-- These ones mirror the shape of types
= -- Refl :: "e" -> _ -> e
Refl Role Type -- See Note [Refl invariant]
-- Invariant: applications of (Refl T) to a bunch of identity coercions
-- always show up as Refl.
-- For example (Refl T) (Refl a) (Refl b) shows up as (Refl (T a b)).
-- Applications of (Refl T) to some coercions, at least one of
-- which is NOT the identity, show up as TyConAppCo.
-- (They may not be fully saturated however.)
-- ConAppCo coercions (like all coercions other than Refl)
-- are NEVER the identity.
-- Use (Refl Representational _), not (SubCo (Refl Nominal _))
-- These ones simply lift the correspondingly-named
-- Type constructors into Coercions
-- TyConAppCo :: "e" -> _ -> ?? -> e
-- See Note [TyConAppCo roles]
| TyConAppCo Role TyCon [Coercion] -- lift TyConApp
-- The TyCon is never a synonym;
-- we expand synonyms eagerly
-- But it can be a type function
| AppCo Coercion CoercionN -- lift AppTy
-- AppCo :: e -> N -> e
-- See Note [Forall coercions]
| ForAllCo TyVar KindCoercion Coercion
-- ForAllCo :: _ -> N -> e -> e
-- These are special
| CoVarCo CoVar -- :: _ -> (N or R)
-- result role depends on the tycon of the variable's type
-- AxiomInstCo :: e -> _ -> [N] -> e
| AxiomInstCo (CoAxiom Branched) BranchIndex [Coercion]
-- See also [CoAxiom index]
-- The coercion arguments always *precisely* saturate
-- arity of (that branch of) the CoAxiom. If there are
-- any left over, we use AppCo.
-- See [Coercion axioms applied to coercions]
| UnivCo UnivCoProvenance Role Type Type
-- :: _ -> "e" -> _ -> _ -> e
| SymCo Coercion -- :: e -> e
| TransCo Coercion Coercion -- :: e -> e -> e
-- The number coercions should match exactly the expectations
-- of the CoAxiomRule (i.e., the rule is fully saturated).
| AxiomRuleCo CoAxiomRule [Coercion]
| NthCo Int Coercion -- Zero-indexed; decomposes (T t0 ... tn)
-- :: _ -> e -> ?? (inverse of TyConAppCo, see Note [TyConAppCo roles])
-- Using NthCo on a ForAllCo gives an N coercion always
-- See Note [NthCo and newtypes]
| LRCo LeftOrRight CoercionN -- Decomposes (t_left t_right)
-- :: _ -> N -> N
| InstCo Coercion CoercionN
-- :: e -> N -> e
-- See Note [InstCo roles]
-- Coherence applies a coercion to the left-hand type of another coercion
-- See Note [Coherence]
| CoherenceCo Coercion KindCoercion
-- :: e -> N -> e
-- Extract a kind coercion from a (heterogeneous) type coercion
-- NB: all kind coercions are Nominal
| KindCo Coercion
-- :: e -> N
| SubCo CoercionN -- Turns a ~N into a ~R
-- :: N -> R
deriving Data.Data
type CoercionN = Coercion -- always nominal
type CoercionR = Coercion -- always representational
type CoercionP = Coercion -- always phantom
type KindCoercion = CoercionN -- always nominal
{-
Note [Refl invariant]
~~~~~~~~~~~~~~~~~~~~~
Invariant 1:
Coercions have the following invariant
Refl is always lifted as far as possible.
You might think that a consequencs is:
Every identity coercions has Refl at the root
But that's not quite true because of coercion variables. Consider
g where g :: Int~Int
Left h where h :: Maybe Int ~ Maybe Int
etc. So the consequence is only true of coercions that
have no coercion variables.
Note [Coercion axioms applied to coercions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The reason coercion axioms can be applied to coercions and not just
types is to allow for better optimization. There are some cases where
we need to be able to "push transitivity inside" an axiom in order to
expose further opportunities for optimization.
For example, suppose we have
C a : t[a] ~ F a
g : b ~ c
and we want to optimize
sym (C b) ; t[g] ; C c
which has the kind
F b ~ F c
(stopping through t[b] and t[c] along the way).
We'd like to optimize this to just F g -- but how? The key is
that we need to allow axioms to be instantiated by *coercions*,
not just by types. Then we can (in certain cases) push
transitivity inside the axiom instantiations, and then react
opposite-polarity instantiations of the same axiom. In this
case, e.g., we match t[g] against the LHS of (C c)'s kind, to
obtain the substitution a |-> g (note this operation is sort
of the dual of lifting!) and hence end up with
C g : t[b] ~ F c
which indeed has the same kind as t[g] ; C c.
Now we have
sym (C b) ; C g
which can be optimized to F g.
Note [CoAxiom index]
~~~~~~~~~~~~~~~~~~~~
A CoAxiom has 1 or more branches. Each branch has contains a list
of the free type variables in that branch, the LHS type patterns,
and the RHS type for that branch. When we apply an axiom to a list
of coercions, we must choose which branch of the axiom we wish to
use, as the different branches may have different numbers of free
type variables. (The number of type patterns is always the same
among branches, but that doesn't quite concern us here.)
The Int in the AxiomInstCo constructor is the 0-indexed number
of the chosen branch.
Note [Forall coercions]
~~~~~~~~~~~~~~~~~~~~~~~
Constructing coercions between forall-types can be a bit tricky,
because the kinds of the bound tyvars can be different.
The typing rule is:
kind_co : k1 ~ k2
tv1:k1 |- co : t1 ~ t2
-------------------------------------------------------------------
ForAllCo tv1 kind_co co : all tv1:k1. t1 ~
all tv1:k2. (t2[tv1 |-> tv1 |> sym kind_co])
First, the TyVar stored in a ForAllCo is really an optimisation: this field
should be a Name, as its kind is redundant. Thinking of the field as a Name
is helpful in understanding what a ForAllCo means.
The idea is that kind_co gives the two kinds of the tyvar. See how, in the
conclusion, tv1 is assigned kind k1 on the left but kind k2 on the right.
Of course, a type variable can't have different kinds at the same time. So,
we arbitrarily prefer the first kind when using tv1 in the inner coercion
co, which shows that t1 equals t2.
The last wrinkle is that we need to fix the kinds in the conclusion. In
t2, tv1 is assumed to have kind k1, but it has kind k2 in the conclusion of
the rule. So we do a kind-fixing substitution, replacing (tv1:k1) with
(tv1:k2) |> sym kind_co. This substitution is slightly bizarre, because it
mentions the same name with different kinds, but it *is* well-kinded, noting
that `(tv1:k2) |> sym kind_co` has kind k1.
This all really would work storing just a Name in the ForAllCo. But we can't
add Names to, e.g., VarSets, and there generally is just an impedence mismatch
in a bunch of places. So we use tv1. When we need tv2, we can use
setTyVarKind.
Note [Coherence]
~~~~~~~~~~~~~~~~
The Coherence typing rule is thus:
g1 : s ~ t s : k1 g2 : k1 ~ k2
------------------------------------
CoherenceCo g1 g2 : (s |> g2) ~ t
While this looks (and is) unsymmetric, a combination of other coercion
combinators can make the symmetric version.
For role information, see Note [Roles and kind coercions].
Note [Predicate coercions]
~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have
g :: a~b
How can we coerce between types
([c]~a) => [a] -> c
and
([c]~b) => [b] -> c
where the equality predicate *itself* differs?
Answer: we simply treat (~) as an ordinary type constructor, so these
types really look like
((~) [c] a) -> [a] -> c
((~) [c] b) -> [b] -> c
So the coercion between the two is obviously
((~) [c] g) -> [g] -> c
Another way to see this to say that we simply collapse predicates to
their representation type (see Type.coreView and Type.predTypeRep).
This collapse is done by mkPredCo; there is no PredCo constructor
in Coercion. This is important because we need Nth to work on
predicates too:
Nth 1 ((~) [c] g) = g
See Simplify.simplCoercionF, which generates such selections.
Note [Roles]
~~~~~~~~~~~~
Roles are a solution to the GeneralizedNewtypeDeriving problem, articulated
in Trac #1496. The full story is in docs/core-spec/core-spec.pdf. Also, see
http://ghc.haskell.org/trac/ghc/wiki/RolesImplementation
Here is one way to phrase the problem:
Given:
newtype Age = MkAge Int
type family F x
type instance F Age = Bool
type instance F Int = Char
This compiles down to:
axAge :: Age ~ Int
axF1 :: F Age ~ Bool
axF2 :: F Int ~ Char
Then, we can make:
(sym (axF1) ; F axAge ; axF2) :: Bool ~ Char
Yikes!
The solution is _roles_, as articulated in "Generative Type Abstraction and
Type-level Computation" (POPL 2010), available at
http://www.seas.upenn.edu/~sweirich/papers/popl163af-weirich.pdf
The specification for roles has evolved somewhat since that paper. For the
current full details, see the documentation in docs/core-spec. Here are some
highlights.
We label every equality with a notion of type equivalence, of which there are
three options: Nominal, Representational, and Phantom. A ground type is
nominally equivalent only with itself. A newtype (which is considered a ground
type in Haskell) is representationally equivalent to its representation.
Anything is "phantomly" equivalent to anything else. We use "N", "R", and "P"
to denote the equivalences.
The axioms above would be:
axAge :: Age ~R Int
axF1 :: F Age ~N Bool
axF2 :: F Age ~N Char
Then, because transitivity applies only to coercions proving the same notion
of equivalence, the above construction is impossible.
However, there is still an escape hatch: we know that any two types that are
nominally equivalent are representationally equivalent as well. This is what
the form SubCo proves -- it "demotes" a nominal equivalence into a
representational equivalence. So, it would seem the following is possible:
sub (sym axF1) ; F axAge ; sub axF2 :: Bool ~R Char -- WRONG
What saves us here is that the arguments to a type function F, lifted into a
coercion, *must* prove nominal equivalence. So, (F axAge) is ill-formed, and
we are safe.
Roles are attached to parameters to TyCons. When lifting a TyCon into a
coercion (through TyConAppCo), we need to ensure that the arguments to the
TyCon respect their roles. For example:
data T a b = MkT a (F b)
If we know that a1 ~R a2, then we know (T a1 b) ~R (T a2 b). But, if we know
that b1 ~R b2, we know nothing about (T a b1) and (T a b2)! This is because
the type function F branches on b's *name*, not representation. So, we say
that 'a' has role Representational and 'b' has role Nominal. The third role,
Phantom, is for parameters not used in the type's definition. Given the
following definition
data Q a = MkQ Int
the Phantom role allows us to say that (Q Bool) ~R (Q Char), because we
can construct the coercion Bool ~P Char (using UnivCo).
See the paper cited above for more examples and information.
Note [TyConAppCo roles]
~~~~~~~~~~~~~~~~~~~~~~~
The TyConAppCo constructor has a role parameter, indicating the role at
which the coercion proves equality. The choice of this parameter affects
the required roles of the arguments of the TyConAppCo. To help explain
it, assume the following definition:
type instance F Int = Bool -- Axiom axF : F Int ~N Bool
newtype Age = MkAge Int -- Axiom axAge : Age ~R Int
data Foo a = MkFoo a -- Role on Foo's parameter is Representational
TyConAppCo Nominal Foo axF : Foo (F Int) ~N Foo Bool
For (TyConAppCo Nominal) all arguments must have role Nominal. Why?
So that Foo Age ~N Foo Int does *not* hold.
TyConAppCo Representational Foo (SubCo axF) : Foo (F Int) ~R Foo Bool
TyConAppCo Representational Foo axAge : Foo Age ~R Foo Int
For (TyConAppCo Representational), all arguments must have the roles
corresponding to the result of tyConRoles on the TyCon. This is the
whole point of having roles on the TyCon to begin with. So, we can
have Foo Age ~R Foo Int, if Foo's parameter has role R.
If a Representational TyConAppCo is over-saturated (which is otherwise fine),
the spill-over arguments must all be at Nominal. This corresponds to the
behavior for AppCo.
TyConAppCo Phantom Foo (UnivCo Phantom Int Bool) : Foo Int ~P Foo Bool
All arguments must have role Phantom. This one isn't strictly
necessary for soundness, but this choice removes ambiguity.
The rules here dictate the roles of the parameters to mkTyConAppCo
(should be checked by Lint).
Note [NthCo and newtypes]
~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have
newtype N a = MkN Int
type role N representational
This yields axiom
NTCo:N :: forall a. N a ~R Int
We can then build
co :: forall a b. N a ~R N b
co = NTCo:N a ; sym (NTCo:N b)
for any `a` and `b`. Because of the role annotation on N, if we use
NthCo, we'll get out a representational coercion. That is:
NthCo 0 co :: forall a b. a ~R b
Yikes! Clearly, this is terrible. The solution is simple: forbid
NthCo to be used on newtypes if the internal coercion is representational.
This is not just some corner case discovered by a segfault somewhere;
it was discovered in the proof of soundness of roles and described
in the "Safe Coercions" paper (ICFP '14).
Note [InstCo roles]
~~~~~~~~~~~~~~~~~~~
Here is (essentially) the typing rule for InstCo:
g :: (forall a. t1) ~r (forall a. t2)
w :: s1 ~N s2
------------------------------- InstCo
InstCo g w :: (t1 [a |-> s1]) ~r (t2 [a |-> s2])
Note that the Coercion w *must* be nominal. This is necessary
because the variable a might be used in a "nominal position"
(that is, a place where role inference would require a nominal
role) in t1 or t2. If we allowed w to be representational, we
could get bogus equalities.
A more nuanced treatment might be able to relax this condition
somewhat, by checking if t1 and/or t2 use their bound variables
in nominal ways. If not, having w be representational is OK.
%************************************************************************
%* *
UnivCoProvenance
%* *
%************************************************************************
A UnivCo is a coercion whose proof does not directly express its role
and kind (indeed for some UnivCos, like UnsafeCoerceProv, there /is/
no proof).
The different kinds of UnivCo are described by UnivCoProvenance. Really
each is entirely separate, but they all share the need to represent their
role and kind, which is done in the UnivCo constructor.
-}
-- | For simplicity, we have just one UnivCo that represents a coercion from
-- some type to some other type, with (in general) no restrictions on the
-- type. The UnivCoProvenance specifies more exactly what the coercion really
-- is and why a program should (or shouldn't!) trust the coercion.
-- It is reasonable to consider each constructor of 'UnivCoProvenance'
-- as a totally independent coercion form; their only commonality is
-- that they don't tell you what types they coercion between. (That info
-- is in the 'UnivCo' constructor of 'Coercion'.
data UnivCoProvenance
= UnsafeCoerceProv -- ^ From @unsafeCoerce#@. These are unsound.
| PhantomProv KindCoercion -- ^ See Note [Phantom coercions]. Only in Phantom
-- roled coercions
| ProofIrrelProv KindCoercion -- ^ From the fact that any two coercions are
-- considered equivalent. See Note [ProofIrrelProv].
-- Can be used in Nominal or Representational coercions
| PluginProv String -- ^ From a plugin, which asserts that this coercion
-- is sound. The string is for the use of the plugin.
| HoleProv CoercionHole -- ^ See Note [Coercion holes]
deriving Data.Data
instance Outputable UnivCoProvenance where
ppr UnsafeCoerceProv = text "(unsafeCoerce#)"
ppr (PhantomProv _) = text "(phantom)"
ppr (ProofIrrelProv _) = text "(proof irrel.)"
ppr (PluginProv str) = parens (text "plugin" <+> brackets (text str))
ppr (HoleProv hole) = parens (text "hole" <> ppr hole)
-- | A coercion to be filled in by the type-checker. See Note [Coercion holes]
data CoercionHole
= CoercionHole { chUnique :: Unique -- ^ used only for debugging
, chCoercion :: IORef (Maybe Coercion)
}
instance Data.Data CoercionHole where
-- don't traverse?
toConstr _ = abstractConstr "CoercionHole"
gunfold _ _ = error "gunfold"
dataTypeOf _ = mkNoRepType "CoercionHole"
instance Outputable CoercionHole where
ppr (CoercionHole u _) = braces (ppr u)
{- Note [Phantom coercions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
data T a = T1 | T2
Then we have
T s ~R T t
for any old s,t. The witness for this is (TyConAppCo T Rep co),
where (co :: s ~P t) is a phantom coercion built with PhantomProv.
The role of the UnivCo is always Phantom. The Coercion stored is the
(nominal) kind coercion between the types
kind(s) ~N kind (t)
Note [Coercion holes]
~~~~~~~~~~~~~~~~~~~~~~~~
During typechecking, constraint solving for type classes works by
- Generate an evidence Id, d7 :: Num a
- Wrap it in a Wanted constraint, [W] d7 :: Num a
- Use the evidence Id where the evidence is needed
- Solve the constraint later
- When solved, add an enclosing let-binding let d7 = .... in ....
which actually binds d7 to the (Num a) evidence
For equality constraints we use a different strategy. See Note [The
equality types story] in TysPrim for background on equality constraints.
- For boxed equality constraints, (t1 ~N t2) and (t1 ~R t2), it's just
like type classes above. (Indeed, boxed equality constraints *are* classes.)
- But for /unboxed/ equality constraints (t1 ~R# t2) and (t1 ~N# t2)
we use a different plan
For unboxed equalities:
- Generate a CoercionHole, a mutable variable just like a unification
variable
- Wrap the CoercionHole in a Wanted constraint; see TcRnTypes.TcEvDest
- Use the CoercionHole in a Coercion, via HoleProv
- Solve the constraint later
- When solved, fill in the CoercionHole by side effect, instead of
doing the let-binding thing
The main reason for all this is that there may be no good place to let-bind
the evidence for unboxed equalities:
- We emit constraints for kind coercions, to be used
to cast a type's kind. These coercions then must be used in types. Because
they might appear in a top-level type, there is no place to bind these
(unlifted) coercions in the usual way.
- A coercion for (forall a. t1) ~ forall a. t2) will look like
forall a. (coercion for t1~t2)
But the coercion for (t1~t2) may mention 'a', and we don't have let-bindings
within coercions. We could add them, but coercion holes are easier.
Other notes about HoleCo:
* INVARIANT: CoercionHole and HoleProv are used only during type checking,
and should never appear in Core. Just like unification variables; a Type
can contain a TcTyVar, but only during type checking. If, one day, we
use type-level information to separate out forms that can appear during
type-checking vs forms that can appear in core proper, holes in Core will
be ruled out.
* The Unique carried with a coercion hole is used solely for debugging.
* Coercion holes can be compared for equality only like other coercions:
only by looking at the types coerced.
* We don't use holes for other evidence because other evidence wants to
be /shared/. But coercions are entirely erased, so there's little
benefit to sharing.
Note [ProofIrrelProv]
~~~~~~~~~~~~~~~~~~~~~
A ProofIrrelProv is a coercion between coercions. For example:
data G a where
MkG :: G Bool
In core, we get
G :: * -> *
MkG :: forall (a :: *). (a ~ Bool) -> G a
Now, consider 'MkG -- that is, MkG used in a type -- and suppose we want
a proof that ('MkG co1 a1) ~ ('MkG co2 a2). This will have to be
TyConAppCo Nominal MkG [co3, co4]
where
co3 :: co1 ~ co2
co4 :: a1 ~ a2
Note that
co1 :: a1 ~ Bool
co2 :: a2 ~ Bool
Here,
co3 = UnivCo (ProofIrrelProv co5) Nominal (CoercionTy co1) (CoercionTy co2)
where
co5 :: (a1 ~ Bool) ~ (a2 ~ Bool)
co5 = TyConAppCo Nominal (~) [<*>, <*>, co4, <Bool>]
%************************************************************************
%* *
Free variables of types and coercions
%* *
%************************************************************************
-}
{- Note [Free variables of types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The family of functions tyCoVarsOfType, tyCoVarsOfTypes etc, returns
a VarSet that is closed over the types of its variables. More precisely,
if S = tyCoVarsOfType( t )
and (a:k) is in S
then tyCoVarsOftype( k ) is a subset of S
Example: The tyCoVars of this ((a:* -> k) Int) is {a, k}.
We could /not/ close over the kinds of the variable occurrences, and
instead do so at call sites, but it seems that we always want to do
so, so it's easiest to do it here.
-}
-- | Returns free variables of a type, including kind variables as
-- a non-deterministic set. For type synonyms it does /not/ expand the
-- synonym.
tyCoVarsOfType :: Type -> TyCoVarSet
-- See Note [Free variables of types]
tyCoVarsOfType ty = fvVarSet $ tyCoFVsOfType ty
-- | `tyVarsOfType` that returns free variables of a type in a deterministic
-- set. For explanation of why using `VarSet` is not deterministic see
-- Note [Deterministic FV] in FV.
tyCoVarsOfTypeDSet :: Type -> DTyCoVarSet
-- See Note [Free variables of types]
tyCoVarsOfTypeDSet ty = fvDVarSet $ tyCoFVsOfType ty
-- | `tyVarsOfType` that returns free variables of a type in deterministic
-- order. For explanation of why using `VarSet` is not deterministic see
-- Note [Deterministic FV] in FV.
tyCoVarsOfTypeList :: Type -> [TyCoVar]
-- See Note [Free variables of types]
tyCoVarsOfTypeList ty = fvVarList $ tyCoFVsOfType ty
-- | The worker for `tyVarsOfType` and `tyVarsOfTypeList`.
-- The previous implementation used `unionVarSet` which is O(n+m) and can
-- make the function quadratic.
-- It's exported, so that it can be composed with
-- other functions that compute free variables.
-- See Note [FV naming conventions] in FV.
--
-- Eta-expanded because that makes it run faster (apparently)
-- See Note [FV eta expansion] in FV for explanation.
tyCoFVsOfType :: Type -> FV
-- See Note [Free variables of types]
tyCoFVsOfType (TyVarTy v) a b c = (unitFV v `unionFV` tyCoFVsOfType (tyVarKind v)) a b c
tyCoFVsOfType (TyConApp _ tys) a b c = tyCoFVsOfTypes tys a b c
tyCoFVsOfType (LitTy {}) a b c = emptyFV a b c
tyCoFVsOfType (AppTy fun arg) a b c = (tyCoFVsOfType fun `unionFV` tyCoFVsOfType arg) a b c
tyCoFVsOfType (FunTy arg res) a b c = (tyCoFVsOfType arg `unionFV` tyCoFVsOfType res) a b c
tyCoFVsOfType (ForAllTy bndr ty) a b c = tyCoFVsBndr bndr (tyCoFVsOfType ty) a b c
tyCoFVsOfType (CastTy ty co) a b c = (tyCoFVsOfType ty `unionFV` tyCoFVsOfCo co) a b c
tyCoFVsOfType (CoercionTy co) a b c = tyCoFVsOfCo co a b c
tyCoFVsBndr :: TyVarBinder -> FV -> FV
-- Free vars of (forall b. <thing with fvs>)
tyCoFVsBndr (TvBndr tv _) fvs = (delFV tv fvs)
`unionFV` tyCoFVsOfType (tyVarKind tv)
-- | Returns free variables of types, including kind variables as
-- a non-deterministic set. For type synonyms it does /not/ expand the
-- synonym.
tyCoVarsOfTypes :: [Type] -> TyCoVarSet
-- See Note [Free variables of types]
tyCoVarsOfTypes tys = fvVarSet $ tyCoFVsOfTypes tys
-- | Returns free variables of types, including kind variables as
-- a non-deterministic set. For type synonyms it does /not/ expand the
-- synonym.
tyCoVarsOfTypesSet :: TyVarEnv Type -> TyCoVarSet
-- See Note [Free variables of types]
tyCoVarsOfTypesSet tys = fvVarSet $ tyCoFVsOfTypes $ nonDetEltsUFM tys
-- It's OK to use nonDetEltsUFM here because we immediately forget the
-- ordering by returning a set
-- | Returns free variables of types, including kind variables as
-- a deterministic set. For type synonyms it does /not/ expand the
-- synonym.
tyCoVarsOfTypesDSet :: [Type] -> DTyCoVarSet
-- See Note [Free variables of types]
tyCoVarsOfTypesDSet tys = fvDVarSet $ tyCoFVsOfTypes tys
-- | Returns free variables of types, including kind variables as
-- a deterministically ordered list. For type synonyms it does /not/ expand the
-- synonym.
tyCoVarsOfTypesList :: [Type] -> [TyCoVar]
-- See Note [Free variables of types]
tyCoVarsOfTypesList tys = fvVarList $ tyCoFVsOfTypes tys
tyCoFVsOfTypes :: [Type] -> FV
-- See Note [Free variables of types]
tyCoFVsOfTypes (ty:tys) fv_cand in_scope acc = (tyCoFVsOfType ty `unionFV` tyCoFVsOfTypes tys) fv_cand in_scope acc
tyCoFVsOfTypes [] fv_cand in_scope acc = emptyFV fv_cand in_scope acc
tyCoVarsOfCo :: Coercion -> TyCoVarSet
-- See Note [Free variables of types]
tyCoVarsOfCo co = fvVarSet $ tyCoFVsOfCo co
-- | Get a deterministic set of the vars free in a coercion
tyCoVarsOfCoDSet :: Coercion -> DTyCoVarSet
-- See Note [Free variables of types]
tyCoVarsOfCoDSet co = fvDVarSet $ tyCoFVsOfCo co
tyCoVarsOfCoList :: Coercion -> [TyCoVar]
-- See Note [Free variables of types]
tyCoVarsOfCoList co = fvVarList $ tyCoFVsOfCo co
tyCoFVsOfCo :: Coercion -> FV
-- Extracts type and coercion variables from a coercion
-- See Note [Free variables of types]
tyCoFVsOfCo (Refl _ ty) fv_cand in_scope acc = tyCoFVsOfType ty fv_cand in_scope acc
tyCoFVsOfCo (TyConAppCo _ _ cos) fv_cand in_scope acc = tyCoFVsOfCos cos fv_cand in_scope acc
tyCoFVsOfCo (AppCo co arg) fv_cand in_scope acc
= (tyCoFVsOfCo co `unionFV` tyCoFVsOfCo arg) fv_cand in_scope acc
tyCoFVsOfCo (ForAllCo tv kind_co co) fv_cand in_scope acc
= (delFV tv (tyCoFVsOfCo co) `unionFV` tyCoFVsOfCo kind_co) fv_cand in_scope acc
tyCoFVsOfCo (CoVarCo v) fv_cand in_scope acc
= (unitFV v `unionFV` tyCoFVsOfType (varType v)) fv_cand in_scope acc
tyCoFVsOfCo (AxiomInstCo _ _ cos) fv_cand in_scope acc = tyCoFVsOfCos cos fv_cand in_scope acc
tyCoFVsOfCo (UnivCo p _ t1 t2) fv_cand in_scope acc
= (tyCoFVsOfProv p `unionFV` tyCoFVsOfType t1
`unionFV` tyCoFVsOfType t2) fv_cand in_scope acc
tyCoFVsOfCo (SymCo co) fv_cand in_scope acc = tyCoFVsOfCo co fv_cand in_scope acc
tyCoFVsOfCo (TransCo co1 co2) fv_cand in_scope acc = (tyCoFVsOfCo co1 `unionFV` tyCoFVsOfCo co2) fv_cand in_scope acc
tyCoFVsOfCo (NthCo _ co) fv_cand in_scope acc = tyCoFVsOfCo co fv_cand in_scope acc
tyCoFVsOfCo (LRCo _ co) fv_cand in_scope acc = tyCoFVsOfCo co fv_cand in_scope acc
tyCoFVsOfCo (InstCo co arg) fv_cand in_scope acc = (tyCoFVsOfCo co `unionFV` tyCoFVsOfCo arg) fv_cand in_scope acc
tyCoFVsOfCo (CoherenceCo c1 c2) fv_cand in_scope acc = (tyCoFVsOfCo c1 `unionFV` tyCoFVsOfCo c2) fv_cand in_scope acc
tyCoFVsOfCo (KindCo co) fv_cand in_scope acc = tyCoFVsOfCo co fv_cand in_scope acc
tyCoFVsOfCo (SubCo co) fv_cand in_scope acc = tyCoFVsOfCo co fv_cand in_scope acc
tyCoFVsOfCo (AxiomRuleCo _ cs) fv_cand in_scope acc = tyCoFVsOfCos cs fv_cand in_scope acc
tyCoVarsOfProv :: UnivCoProvenance -> TyCoVarSet
tyCoVarsOfProv prov = fvVarSet $ tyCoFVsOfProv prov
tyCoFVsOfProv :: UnivCoProvenance -> FV
tyCoFVsOfProv UnsafeCoerceProv fv_cand in_scope acc = emptyFV fv_cand in_scope acc
tyCoFVsOfProv (PhantomProv co) fv_cand in_scope acc = tyCoFVsOfCo co fv_cand in_scope acc
tyCoFVsOfProv (ProofIrrelProv co) fv_cand in_scope acc = tyCoFVsOfCo co fv_cand in_scope acc
tyCoFVsOfProv (PluginProv _) fv_cand in_scope acc = emptyFV fv_cand in_scope acc
tyCoFVsOfProv (HoleProv _) fv_cand in_scope acc = emptyFV fv_cand in_scope acc
tyCoVarsOfCos :: [Coercion] -> TyCoVarSet
tyCoVarsOfCos cos = fvVarSet $ tyCoFVsOfCos cos
tyCoVarsOfCosSet :: CoVarEnv Coercion -> TyCoVarSet
tyCoVarsOfCosSet cos = fvVarSet $ tyCoFVsOfCos $ nonDetEltsUFM cos
-- It's OK to use nonDetEltsUFM here because we immediately forget the
-- ordering by returning a set
tyCoFVsOfCos :: [Coercion] -> FV
tyCoFVsOfCos [] fv_cand in_scope acc = emptyFV fv_cand in_scope acc
tyCoFVsOfCos (co:cos) fv_cand in_scope acc = (tyCoFVsOfCo co `unionFV` tyCoFVsOfCos cos) fv_cand in_scope acc
coVarsOfType :: Type -> CoVarSet
coVarsOfType (TyVarTy v) = coVarsOfType (tyVarKind v)
coVarsOfType (TyConApp _ tys) = coVarsOfTypes tys
coVarsOfType (LitTy {}) = emptyVarSet
coVarsOfType (AppTy fun arg) = coVarsOfType fun `unionVarSet` coVarsOfType arg
coVarsOfType (FunTy arg res) = coVarsOfType arg `unionVarSet` coVarsOfType res
coVarsOfType (ForAllTy (TvBndr tv _) ty)
= (coVarsOfType ty `delVarSet` tv)
`unionVarSet` coVarsOfType (tyVarKind tv)
coVarsOfType (CastTy ty co) = coVarsOfType ty `unionVarSet` coVarsOfCo co
coVarsOfType (CoercionTy co) = coVarsOfCo co
coVarsOfTypes :: [Type] -> TyCoVarSet
coVarsOfTypes tys = mapUnionVarSet coVarsOfType tys
coVarsOfCo :: Coercion -> CoVarSet
-- Extract *coercion* variables only. Tiresome to repeat the code, but easy.
coVarsOfCo (Refl _ ty) = coVarsOfType ty
coVarsOfCo (TyConAppCo _ _ args) = coVarsOfCos args
coVarsOfCo (AppCo co arg) = coVarsOfCo co `unionVarSet` coVarsOfCo arg
coVarsOfCo (ForAllCo tv kind_co co)
= coVarsOfCo co `delVarSet` tv `unionVarSet` coVarsOfCo kind_co
coVarsOfCo (CoVarCo v) = unitVarSet v `unionVarSet` coVarsOfType (varType v)
coVarsOfCo (AxiomInstCo _ _ args) = coVarsOfCos args
coVarsOfCo (UnivCo p _ t1 t2) = coVarsOfProv p `unionVarSet` coVarsOfTypes [t1, t2]
coVarsOfCo (SymCo co) = coVarsOfCo co
coVarsOfCo (TransCo co1 co2) = coVarsOfCo co1 `unionVarSet` coVarsOfCo co2
coVarsOfCo (NthCo _ co) = coVarsOfCo co
coVarsOfCo (LRCo _ co) = coVarsOfCo co
coVarsOfCo (InstCo co arg) = coVarsOfCo co `unionVarSet` coVarsOfCo arg
coVarsOfCo (CoherenceCo c1 c2) = coVarsOfCos [c1, c2]
coVarsOfCo (KindCo co) = coVarsOfCo co
coVarsOfCo (SubCo co) = coVarsOfCo co
coVarsOfCo (AxiomRuleCo _ cs) = coVarsOfCos cs
coVarsOfProv :: UnivCoProvenance -> CoVarSet
coVarsOfProv UnsafeCoerceProv = emptyVarSet
coVarsOfProv (PhantomProv co) = coVarsOfCo co
coVarsOfProv (ProofIrrelProv co) = coVarsOfCo co
coVarsOfProv (PluginProv _) = emptyVarSet
coVarsOfProv (HoleProv _) = emptyVarSet
coVarsOfCos :: [Coercion] -> CoVarSet
coVarsOfCos cos = mapUnionVarSet coVarsOfCo cos
-- | Add the kind variables free in the kinds of the tyvars in the given set.
-- Returns a non-deterministic set.
closeOverKinds :: TyVarSet -> TyVarSet
closeOverKinds = fvVarSet . closeOverKindsFV . nonDetEltsUFM
-- It's OK to use nonDetEltsUFM here because we immediately forget
-- about the ordering by returning a set.
-- | Given a list of tyvars returns a deterministic FV computation that
-- returns the given tyvars with the kind variables free in the kinds of the
-- given tyvars.
closeOverKindsFV :: [TyVar] -> FV
closeOverKindsFV tvs =
mapUnionFV (tyCoFVsOfType . tyVarKind) tvs `unionFV` mkFVs tvs
-- | Add the kind variables free in the kinds of the tyvars in the given set.
-- Returns a deterministically ordered list.
closeOverKindsList :: [TyVar] -> [TyVar]
closeOverKindsList tvs = fvVarList $ closeOverKindsFV tvs
-- | Add the kind variables free in the kinds of the tyvars in the given set.
-- Returns a deterministic set.
closeOverKindsDSet :: DTyVarSet -> DTyVarSet
closeOverKindsDSet = fvDVarSet . closeOverKindsFV . dVarSetElems
{-
%************************************************************************
%* *
Substitutions
Data type defined here to avoid unnecessary mutual recursion
%* *
%************************************************************************
-}
-- | Type & coercion substitution
--
-- #tcvsubst_invariant#
-- The following invariants must hold of a 'TCvSubst':
--
-- 1. The in-scope set is needed /only/ to
-- guide the generation of fresh uniques
--
-- 2. In particular, the /kind/ of the type variables in
-- the in-scope set is not relevant
--
-- 3. The substitution is only applied ONCE! This is because
-- in general such application will not reach a fixed point.
data TCvSubst
= TCvSubst InScopeSet -- The in-scope type and kind variables
TvSubstEnv -- Substitutes both type and kind variables
CvSubstEnv -- Substitutes coercion variables
-- See Note [Apply Once]
-- and Note [Extending the TvSubstEnv]
-- and Note [Substituting types and coercions]
-- and Note [The substitution invariant]
-- | A substitution of 'Type's for 'TyVar's
-- and 'Kind's for 'KindVar's
type TvSubstEnv = TyVarEnv Type
-- A TvSubstEnv is used both inside a TCvSubst (with the apply-once
-- invariant discussed in Note [Apply Once]), and also independently
-- in the middle of matching, and unification (see Types.Unify)
-- So you have to look at the context to know if it's idempotent or
-- apply-once or whatever
-- | A substitution of 'Coercion's for 'CoVar's
type CvSubstEnv = CoVarEnv Coercion
{-
Note [Apply Once]
~~~~~~~~~~~~~~~~~
We use TCvSubsts to instantiate things, and we might instantiate
forall a b. ty
\with the types
[a, b], or [b, a].
So the substitution might go [a->b, b->a]. A similar situation arises in Core
when we find a beta redex like
(/\ a /\ b -> e) b a
Then we also end up with a substitution that permutes type variables. Other
variations happen to; for example [a -> (a, b)].
****************************************************
*** So a TCvSubst must be applied precisely once ***
****************************************************
A TCvSubst is not idempotent, but, unlike the non-idempotent substitution
we use during unifications, it must not be repeatedly applied.
Note [Extending the TvSubstEnv]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
See #tcvsubst_invariant# for the invariants that must hold.
This invariant allows a short-cut when the subst envs are empty:
if the TvSubstEnv and CvSubstEnv are empty --- i.e. (isEmptyTCvSubst subst)
holds --- then (substTy subst ty) does nothing.
For example, consider:
(/\a. /\b:(a~Int). ...b..) Int
We substitute Int for 'a'. The Unique of 'b' does not change, but
nevertheless we add 'b' to the TvSubstEnv, because b's kind does change
This invariant has several crucial consequences:
* In substTyVarBndr, we need extend the TvSubstEnv
- if the unique has changed
- or if the kind has changed
* In substTyVar, we do not need to consult the in-scope set;
the TvSubstEnv is enough
* In substTy, substTheta, we can short-circuit when the TvSubstEnv is empty
Note [Substituting types and coercions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Types and coercions are mutually recursive, and either may have variables
"belonging" to the other. Thus, every time we wish to substitute in a
type, we may also need to substitute in a coercion, and vice versa.
However, the constructor used to create type variables is distinct from
that of coercion variables, so we carry two VarEnvs in a TCvSubst. Note
that it would be possible to use the CoercionTy constructor to combine
these environments, but that seems like a false economy.
Note that the TvSubstEnv should *never* map a CoVar (built with the Id
constructor) and the CvSubstEnv should *never* map a TyVar. Furthermore,
the range of the TvSubstEnv should *never* include a type headed with
CoercionTy.
Note [The substitution invariant]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When calling (substTy subst ty) it should be the case that
the in-scope set in the substitution is a superset of both:
* The free vars of the range of the substitution
* The free vars of ty minus the domain of the substitution
If we want to substitute [a -> ty1, b -> ty2] I used to
think it was enough to generate an in-scope set that includes
fv(ty1,ty2). But that's not enough; we really should also take the
free vars of the type we are substituting into! Example:
(forall b. (a,b,x)) [a -> List b]
Then if we use the in-scope set {b}, there is a danger we will rename
the forall'd variable to 'x' by mistake, getting this:
(forall x. (List b, x, x))
Breaking this invariant caused the bug from #11371.
-}
emptyTvSubstEnv :: TvSubstEnv
emptyTvSubstEnv = emptyVarEnv
emptyCvSubstEnv :: CvSubstEnv
emptyCvSubstEnv = emptyVarEnv
composeTCvSubstEnv :: InScopeSet
-> (TvSubstEnv, CvSubstEnv)
-> (TvSubstEnv, CvSubstEnv)
-> (TvSubstEnv, CvSubstEnv)
-- ^ @(compose env1 env2)(x)@ is @env1(env2(x))@; i.e. apply @env2@ then @env1@.
-- It assumes that both are idempotent.
-- Typically, @env1@ is the refinement to a base substitution @env2@
composeTCvSubstEnv in_scope (tenv1, cenv1) (tenv2, cenv2)
= ( tenv1 `plusVarEnv` mapVarEnv (substTy subst1) tenv2
, cenv1 `plusVarEnv` mapVarEnv (substCo subst1) cenv2 )
-- First apply env1 to the range of env2
-- Then combine the two, making sure that env1 loses if
-- both bind the same variable; that's why env1 is the
-- *left* argument to plusVarEnv, because the right arg wins
where
subst1 = TCvSubst in_scope tenv1 cenv1
-- | Composes two substitutions, applying the second one provided first,
-- like in function composition.
composeTCvSubst :: TCvSubst -> TCvSubst -> TCvSubst
composeTCvSubst (TCvSubst is1 tenv1 cenv1) (TCvSubst is2 tenv2 cenv2)
= TCvSubst is3 tenv3 cenv3
where
is3 = is1 `unionInScope` is2
(tenv3, cenv3) = composeTCvSubstEnv is3 (tenv1, cenv1) (tenv2, cenv2)
emptyTCvSubst :: TCvSubst
emptyTCvSubst = TCvSubst emptyInScopeSet emptyTvSubstEnv emptyCvSubstEnv
mkEmptyTCvSubst :: InScopeSet -> TCvSubst
mkEmptyTCvSubst is = TCvSubst is emptyTvSubstEnv emptyCvSubstEnv
isEmptyTCvSubst :: TCvSubst -> Bool
-- See Note [Extending the TvSubstEnv]
isEmptyTCvSubst (TCvSubst _ tenv cenv) = isEmptyVarEnv tenv && isEmptyVarEnv cenv
mkTCvSubst :: InScopeSet -> (TvSubstEnv, CvSubstEnv) -> TCvSubst
mkTCvSubst in_scope (tenv, cenv) = TCvSubst in_scope tenv cenv
mkTvSubst :: InScopeSet -> TvSubstEnv -> TCvSubst
-- ^ Make a TCvSubst with specified tyvar subst and empty covar subst
mkTvSubst in_scope tenv = TCvSubst in_scope tenv emptyCvSubstEnv
getTvSubstEnv :: TCvSubst -> TvSubstEnv
getTvSubstEnv (TCvSubst _ env _) = env
getCvSubstEnv :: TCvSubst -> CvSubstEnv
getCvSubstEnv (TCvSubst _ _ env) = env
getTCvInScope :: TCvSubst -> InScopeSet
getTCvInScope (TCvSubst in_scope _ _) = in_scope
-- | Returns the free variables of the types in the range of a substitution as
-- a non-deterministic set.
getTCvSubstRangeFVs :: TCvSubst -> VarSet
getTCvSubstRangeFVs (TCvSubst _ tenv cenv)
= unionVarSet tenvFVs cenvFVs
where
tenvFVs = tyCoVarsOfTypesSet tenv
cenvFVs = tyCoVarsOfCosSet cenv
isInScope :: Var -> TCvSubst -> Bool
isInScope v (TCvSubst in_scope _ _) = v `elemInScopeSet` in_scope
notElemTCvSubst :: Var -> TCvSubst -> Bool
notElemTCvSubst v (TCvSubst _ tenv cenv)
| isTyVar v
= not (v `elemVarEnv` tenv)
| otherwise
= not (v `elemVarEnv` cenv)
setTvSubstEnv :: TCvSubst -> TvSubstEnv -> TCvSubst
setTvSubstEnv (TCvSubst in_scope _ cenv) tenv = TCvSubst in_scope tenv cenv
setCvSubstEnv :: TCvSubst -> CvSubstEnv -> TCvSubst
setCvSubstEnv (TCvSubst in_scope tenv _) cenv = TCvSubst in_scope tenv cenv
zapTCvSubst :: TCvSubst -> TCvSubst
zapTCvSubst (TCvSubst in_scope _ _) = TCvSubst in_scope emptyVarEnv emptyVarEnv
extendTCvInScope :: TCvSubst -> Var -> TCvSubst
extendTCvInScope (TCvSubst in_scope tenv cenv) var
= TCvSubst (extendInScopeSet in_scope var) tenv cenv
extendTCvInScopeList :: TCvSubst -> [Var] -> TCvSubst
extendTCvInScopeList (TCvSubst in_scope tenv cenv) vars
= TCvSubst (extendInScopeSetList in_scope vars) tenv cenv
extendTCvInScopeSet :: TCvSubst -> VarSet -> TCvSubst
extendTCvInScopeSet (TCvSubst in_scope tenv cenv) vars
= TCvSubst (extendInScopeSetSet in_scope vars) tenv cenv
extendTCvSubst :: TCvSubst -> TyCoVar -> Type -> TCvSubst
extendTCvSubst subst v ty
| isTyVar v
= extendTvSubst subst v ty
| CoercionTy co <- ty
= extendCvSubst subst v co
| otherwise
= pprPanic "extendTCvSubst" (ppr v <+> text "|->" <+> ppr ty)
extendTvSubst :: TCvSubst -> TyVar -> Type -> TCvSubst
extendTvSubst (TCvSubst in_scope tenv cenv) tv ty
= TCvSubst in_scope (extendVarEnv tenv tv ty) cenv
extendTvSubstBinder :: TCvSubst -> TyBinder -> Type -> TCvSubst
extendTvSubstBinder subst (Named bndr) ty
= extendTvSubst subst (binderVar bndr) ty
extendTvSubstBinder subst (Anon _) _
= subst
extendTvSubstWithClone :: TCvSubst -> TyVar -> TyVar -> TCvSubst
-- Adds a new tv -> tv mapping, /and/ extends the in-scope set
extendTvSubstWithClone (TCvSubst in_scope tenv cenv) tv tv'
= TCvSubst (extendInScopeSetSet in_scope new_in_scope)
(extendVarEnv tenv tv (mkTyVarTy tv'))
cenv
where
new_in_scope = tyCoVarsOfType (tyVarKind tv') `extendVarSet` tv'
extendCvSubst :: TCvSubst -> CoVar -> Coercion -> TCvSubst
extendCvSubst (TCvSubst in_scope tenv cenv) v co
= TCvSubst in_scope tenv (extendVarEnv cenv v co)
extendCvSubstWithClone :: TCvSubst -> CoVar -> CoVar -> TCvSubst
extendCvSubstWithClone (TCvSubst in_scope tenv cenv) cv cv'
= TCvSubst (extendInScopeSetSet in_scope new_in_scope)
tenv
(extendVarEnv cenv cv (mkCoVarCo cv'))
where
new_in_scope = tyCoVarsOfType (varType cv') `extendVarSet` cv'
extendTvSubstAndInScope :: TCvSubst -> TyVar -> Type -> TCvSubst
-- Also extends the in-scope set
extendTvSubstAndInScope (TCvSubst in_scope tenv cenv) tv ty
= TCvSubst (in_scope `extendInScopeSetSet` tyCoVarsOfType ty)
(extendVarEnv tenv tv ty)
cenv
extendTvSubstList :: TCvSubst -> [Var] -> [Type] -> TCvSubst
extendTvSubstList subst tvs tys
= foldl2 extendTvSubst subst tvs tys
unionTCvSubst :: TCvSubst -> TCvSubst -> TCvSubst
-- Works when the ranges are disjoint
unionTCvSubst (TCvSubst in_scope1 tenv1 cenv1) (TCvSubst in_scope2 tenv2 cenv2)
= ASSERT( not (tenv1 `intersectsVarEnv` tenv2)
&& not (cenv1 `intersectsVarEnv` cenv2) )
TCvSubst (in_scope1 `unionInScope` in_scope2)
(tenv1 `plusVarEnv` tenv2)
(cenv1 `plusVarEnv` cenv2)
-- mkTvSubstPrs and zipTvSubst generate the in-scope set from
-- the types given; but it's just a thunk so with a bit of luck
-- it'll never be evaluated
-- | Generates an in-scope set from the free variables in a list of types
-- and a list of coercions
mkTyCoInScopeSet :: [Type] -> [Coercion] -> InScopeSet
mkTyCoInScopeSet tys cos
= mkInScopeSet (tyCoVarsOfTypes tys `unionVarSet` tyCoVarsOfCos cos)
-- | Generates the in-scope set for the 'TCvSubst' from the types in the incoming
-- environment. No CoVars, please!
zipTvSubst :: [TyVar] -> [Type] -> TCvSubst
zipTvSubst tvs tys
| debugIsOn
, not (all isTyVar tvs) || length tvs /= length tys
= pprTrace "zipTvSubst" (ppr tvs $$ ppr tys) emptyTCvSubst
| otherwise
= mkTvSubst (mkInScopeSet (tyCoVarsOfTypes tys)) tenv
where
tenv = zipTyEnv tvs tys
-- | Generates the in-scope set for the 'TCvSubst' from the types in the incoming
-- environment. No TyVars, please!
zipCvSubst :: [CoVar] -> [Coercion] -> TCvSubst
zipCvSubst cvs cos
| debugIsOn
, not (all isCoVar cvs) || length cvs /= length cos
= pprTrace "zipCvSubst" (ppr cvs $$ ppr cos) emptyTCvSubst
| otherwise
= TCvSubst (mkInScopeSet (tyCoVarsOfCos cos)) emptyTvSubstEnv cenv
where
cenv = zipCoEnv cvs cos
-- | Generates the in-scope set for the 'TCvSubst' from the types in the
-- incoming environment. No CoVars, please!
mkTvSubstPrs :: [(TyVar, Type)] -> TCvSubst
mkTvSubstPrs prs =
ASSERT2( onlyTyVarsAndNoCoercionTy, text "prs" <+> ppr prs )
mkTvSubst in_scope tenv
where tenv = mkVarEnv prs
in_scope = mkInScopeSet $ tyCoVarsOfTypes $ map snd prs
onlyTyVarsAndNoCoercionTy =
and [ isTyVar tv && not (isCoercionTy ty)
| (tv, ty) <- prs ]
zipTyEnv :: [TyVar] -> [Type] -> TvSubstEnv
zipTyEnv tyvars tys
= ASSERT( all (not . isCoercionTy) tys )
mkVarEnv (zipEqual "zipTyEnv" tyvars tys)
-- There used to be a special case for when
-- ty == TyVarTy tv
-- (a not-uncommon case) in which case the substitution was dropped.
-- But the type-tidier changes the print-name of a type variable without
-- changing the unique, and that led to a bug. Why? Pre-tidying, we had
-- a type {Foo t}, where Foo is a one-method class. So Foo is really a newtype.
-- And it happened that t was the type variable of the class. Post-tiding,
-- it got turned into {Foo t2}. The ext-core printer expanded this using
-- sourceTypeRep, but that said "Oh, t == t2" because they have the same unique,
-- and so generated a rep type mentioning t not t2.
--
-- Simplest fix is to nuke the "optimisation"
zipCoEnv :: [CoVar] -> [Coercion] -> CvSubstEnv
zipCoEnv cvs cos = mkVarEnv (zipEqual "zipCoEnv" cvs cos)
instance Outputable TCvSubst where
ppr (TCvSubst ins tenv cenv)
= brackets $ sep[ text "TCvSubst",
nest 2 (text "In scope:" <+> ppr ins),
nest 2 (text "Type env:" <+> ppr tenv),
nest 2 (text "Co env:" <+> ppr cenv) ]
{-
%************************************************************************
%* *
Performing type or kind substitutions
%* *
%************************************************************************
Note [Sym and ForAllCo]
~~~~~~~~~~~~~~~~~~~~~~~
In OptCoercion, we try to push "sym" out to the leaves of a coercion. But,
how do we push sym into a ForAllCo? It's a little ugly.
Here is the typing rule:
h : k1 ~# k2
(tv : k1) |- g : ty1 ~# ty2
----------------------------
ForAllCo tv h g : (ForAllTy (tv : k1) ty1) ~#
(ForAllTy (tv : k2) (ty2[tv |-> tv |> sym h]))
Here is what we want:
ForAllCo tv h' g' : (ForAllTy (tv : k2) (ty2[tv |-> tv |> sym h])) ~#
(ForAllTy (tv : k1) ty1)
Because the kinds of the type variables to the right of the colon are the kinds
coerced by h', we know (h' : k2 ~# k1). Thus, (h' = sym h).
Now, we can rewrite ty1 to be (ty1[tv |-> tv |> sym h' |> h']). We thus want
ForAllCo tv h' g' :
(ForAllTy (tv : k2) (ty2[tv |-> tv |> h'])) ~#
(ForAllTy (tv : k1) (ty1[tv |-> tv |> h'][tv |-> tv |> sym h']))
We thus see that we want
g' : ty2[tv |-> tv |> h'] ~# ty1[tv |-> tv |> h']
and thus g' = sym (g[tv |-> tv |> h']).
Putting it all together, we get this:
sym (ForAllCo tv h g)
==>
ForAllCo tv (sym h) (sym g[tv |-> tv |> sym h])
-}
-- | Type substitution, see 'zipTvSubst'
substTyWith :: HasCallStack => [TyVar] -> [Type] -> Type -> Type
-- Works only if the domain of the substitution is a
-- superset of the type being substituted into
substTyWith tvs tys = ASSERT( length tvs == length tys )
substTy (zipTvSubst tvs tys)
-- | Type substitution, see 'zipTvSubst'. Disables sanity checks.
-- The problems that the sanity checks in substTy catch are described in
-- Note [The substitution invariant].
-- The goal of #11371 is to migrate all the calls of substTyUnchecked to
-- substTy and remove this function. Please don't use in new code.
substTyWithUnchecked :: [TyVar] -> [Type] -> Type -> Type
substTyWithUnchecked tvs tys
= ASSERT( length tvs == length tys )
substTyUnchecked (zipTvSubst tvs tys)
-- | Substitute tyvars within a type using a known 'InScopeSet'.
-- Pre-condition: the 'in_scope' set should satisfy Note [The substitution
-- invariant]; specifically it should include the free vars of 'tys',
-- and of 'ty' minus the domain of the subst.
substTyWithInScope :: InScopeSet -> [TyVar] -> [Type] -> Type -> Type
substTyWithInScope in_scope tvs tys ty =
ASSERT( length tvs == length tys )
substTy (mkTvSubst in_scope tenv) ty
where tenv = zipTyEnv tvs tys
-- | Coercion substitution, see 'zipTvSubst'
substCoWith :: HasCallStack => [TyVar] -> [Type] -> Coercion -> Coercion
substCoWith tvs tys = ASSERT( length tvs == length tys )
substCo (zipTvSubst tvs tys)
-- | Coercion substitution, see 'zipTvSubst'. Disables sanity checks.
-- The problems that the sanity checks in substCo catch are described in
-- Note [The substitution invariant].
-- The goal of #11371 is to migrate all the calls of substCoUnchecked to
-- substCo and remove this function. Please don't use in new code.
substCoWithUnchecked :: [TyVar] -> [Type] -> Coercion -> Coercion
substCoWithUnchecked tvs tys
= ASSERT( length tvs == length tys )
substCoUnchecked (zipTvSubst tvs tys)
-- | Substitute covars within a type
substTyWithCoVars :: [CoVar] -> [Coercion] -> Type -> Type
substTyWithCoVars cvs cos = substTy (zipCvSubst cvs cos)
-- | Type substitution, see 'zipTvSubst'
substTysWith :: [TyVar] -> [Type] -> [Type] -> [Type]
substTysWith tvs tys = ASSERT( length tvs == length tys )
substTys (zipTvSubst tvs tys)
-- | Type substitution, see 'zipTvSubst'
substTysWithCoVars :: [CoVar] -> [Coercion] -> [Type] -> [Type]
substTysWithCoVars cvs cos = ASSERT( length cvs == length cos )
substTys (zipCvSubst cvs cos)
-- | Substitute within a 'Type' after adding the free variables of the type
-- to the in-scope set. This is useful for the case when the free variables
-- aren't already in the in-scope set or easily available.
-- See also Note [The substitution invariant].
substTyAddInScope :: TCvSubst -> Type -> Type
substTyAddInScope subst ty =
substTy (extendTCvInScopeSet subst $ tyCoVarsOfType ty) ty
-- | When calling `substTy` it should be the case that the in-scope set in
-- the substitution is a superset of the free vars of the range of the
-- substitution.
-- See also Note [The substitution invariant].
isValidTCvSubst :: TCvSubst -> Bool
isValidTCvSubst (TCvSubst in_scope tenv cenv) =
(tenvFVs `varSetInScope` in_scope) &&
(cenvFVs `varSetInScope` in_scope)
where
tenvFVs = tyCoVarsOfTypesSet tenv
cenvFVs = tyCoVarsOfCosSet cenv
-- | This checks if the substitution satisfies the invariant from
-- Note [The substitution invariant].
checkValidSubst :: HasCallStack => TCvSubst -> [Type] -> [Coercion] -> a -> a
checkValidSubst subst@(TCvSubst in_scope tenv cenv) tys cos a
= ASSERT2( isValidTCvSubst subst,
text "in_scope" <+> ppr in_scope $$
text "tenv" <+> ppr tenv $$
text "tenvFVs"
<+> ppr (tyCoVarsOfTypesSet tenv) $$
text "cenv" <+> ppr cenv $$
text "cenvFVs"
<+> ppr (tyCoVarsOfCosSet cenv) $$
text "tys" <+> ppr tys $$
text "cos" <+> ppr cos )
ASSERT2( tysCosFVsInScope,
text "in_scope" <+> ppr in_scope $$
text "tenv" <+> ppr tenv $$
text "cenv" <+> ppr cenv $$
text "tys" <+> ppr tys $$
text "cos" <+> ppr cos $$
text "needInScope" <+> ppr needInScope )
a
where
substDomain = nonDetKeysUFM tenv ++ nonDetKeysUFM cenv
-- It's OK to use nonDetKeysUFM here, because we only use this list to
-- remove some elements from a set
needInScope = (tyCoVarsOfTypes tys `unionVarSet` tyCoVarsOfCos cos)
`delListFromUFM_Directly` substDomain
tysCosFVsInScope = needInScope `varSetInScope` in_scope
-- | Substitute within a 'Type'
-- The substitution has to satisfy the invariants described in
-- Note [The substitution invariant].
substTy :: HasCallStack => TCvSubst -> Type -> Type
substTy subst ty
| isEmptyTCvSubst subst = ty
| otherwise = checkValidSubst subst [ty] [] $ subst_ty subst ty
-- | Substitute within a 'Type' disabling the sanity checks.
-- The problems that the sanity checks in substTy catch are described in
-- Note [The substitution invariant].
-- The goal of #11371 is to migrate all the calls of substTyUnchecked to
-- substTy and remove this function. Please don't use in new code.
substTyUnchecked :: TCvSubst -> Type -> Type
substTyUnchecked subst ty
| isEmptyTCvSubst subst = ty
| otherwise = subst_ty subst ty
-- | Substitute within several 'Type's
-- The substitution has to satisfy the invariants described in
-- Note [The substitution invariant].
substTys :: HasCallStack => TCvSubst -> [Type] -> [Type]
substTys subst tys
| isEmptyTCvSubst subst = tys
| otherwise = checkValidSubst subst tys [] $ map (subst_ty subst) tys
-- | Substitute within several 'Type's disabling the sanity checks.
-- The problems that the sanity checks in substTys catch are described in
-- Note [The substitution invariant].
-- The goal of #11371 is to migrate all the calls of substTysUnchecked to
-- substTys and remove this function. Please don't use in new code.
substTysUnchecked :: TCvSubst -> [Type] -> [Type]
substTysUnchecked subst tys
| isEmptyTCvSubst subst = tys
| otherwise = map (subst_ty subst) tys
-- | Substitute within a 'ThetaType'
-- The substitution has to satisfy the invariants described in
-- Note [The substitution invariant].
substTheta :: HasCallStack => TCvSubst -> ThetaType -> ThetaType
substTheta = substTys
-- | Substitute within a 'ThetaType' disabling the sanity checks.
-- The problems that the sanity checks in substTys catch are described in
-- Note [The substitution invariant].
-- The goal of #11371 is to migrate all the calls of substThetaUnchecked to
-- substTheta and remove this function. Please don't use in new code.
substThetaUnchecked :: TCvSubst -> ThetaType -> ThetaType
substThetaUnchecked = substTysUnchecked
subst_ty :: TCvSubst -> Type -> Type
-- subst_ty is the main workhorse for type substitution
--
-- Note that the in_scope set is poked only if we hit a forall
-- so it may often never be fully computed
subst_ty subst ty
= go ty
where
go (TyVarTy tv) = substTyVar subst tv
go (AppTy fun arg) = mkAppTy (go fun) $! (go arg)
-- The mkAppTy smart constructor is important
-- we might be replacing (a Int), represented with App
-- by [Int], represented with TyConApp
go (TyConApp tc tys) = let args = map go tys
in args `seqList` TyConApp tc args
go (FunTy arg res) = (FunTy $! go arg) $! go res
go (ForAllTy (TvBndr tv vis) ty)
= case substTyVarBndrUnchecked subst tv of
(subst', tv') ->
(ForAllTy $! ((TvBndr $! tv') vis)) $!
(subst_ty subst' ty)
go (LitTy n) = LitTy $! n
go (CastTy ty co) = (CastTy $! (go ty)) $! (subst_co subst co)
go (CoercionTy co) = CoercionTy $! (subst_co subst co)
substTyVar :: TCvSubst -> TyVar -> Type
substTyVar (TCvSubst _ tenv _) tv
= ASSERT( isTyVar tv )
case lookupVarEnv tenv tv of
Just ty -> ty
Nothing -> TyVarTy tv
substTyVars :: TCvSubst -> [TyVar] -> [Type]
substTyVars subst = map $ substTyVar subst
lookupTyVar :: TCvSubst -> TyVar -> Maybe Type
-- See Note [Extending the TCvSubst]
lookupTyVar (TCvSubst _ tenv _) tv
= ASSERT( isTyVar tv )
lookupVarEnv tenv tv
-- | Substitute within a 'Coercion'
-- The substitution has to satisfy the invariants described in
-- Note [The substitution invariant].
substCo :: HasCallStack => TCvSubst -> Coercion -> Coercion
substCo subst co
| isEmptyTCvSubst subst = co
| otherwise = checkValidSubst subst [] [co] $ subst_co subst co
-- | Substitute within a 'Coercion' disabling sanity checks.
-- The problems that the sanity checks in substCo catch are described in
-- Note [The substitution invariant].
-- The goal of #11371 is to migrate all the calls of substCoUnchecked to
-- substCo and remove this function. Please don't use in new code.
substCoUnchecked :: TCvSubst -> Coercion -> Coercion
substCoUnchecked subst co
| isEmptyTCvSubst subst = co
| otherwise = subst_co subst co
-- | Substitute within several 'Coercion's
-- The substitution has to satisfy the invariants described in
-- Note [The substitution invariant].
substCos :: HasCallStack => TCvSubst -> [Coercion] -> [Coercion]
substCos subst cos
| isEmptyTCvSubst subst = cos
| otherwise = checkValidSubst subst [] cos $ map (subst_co subst) cos
subst_co :: TCvSubst -> Coercion -> Coercion
subst_co subst co
= go co
where
go_ty :: Type -> Type
go_ty = subst_ty subst
go :: Coercion -> Coercion
go (Refl r ty) = mkReflCo r $! go_ty ty
go (TyConAppCo r tc args)= let args' = map go args
in args' `seqList` mkTyConAppCo r tc args'
go (AppCo co arg) = (mkAppCo $! go co) $! go arg
go (ForAllCo tv kind_co co)
= case substForAllCoBndrUnchecked subst tv kind_co of { (subst', tv', kind_co') ->
((mkForAllCo $! tv') $! kind_co') $! subst_co subst' co }
go (CoVarCo cv) = substCoVar subst cv
go (AxiomInstCo con ind cos) = mkAxiomInstCo con ind $! map go cos
go (UnivCo p r t1 t2) = (((mkUnivCo $! go_prov p) $! r) $!
(go_ty t1)) $! (go_ty t2)
go (SymCo co) = mkSymCo $! (go co)
go (TransCo co1 co2) = (mkTransCo $! (go co1)) $! (go co2)
go (NthCo d co) = mkNthCo d $! (go co)
go (LRCo lr co) = mkLRCo lr $! (go co)
go (InstCo co arg) = (mkInstCo $! (go co)) $! go arg
go (CoherenceCo co1 co2) = (mkCoherenceCo $! (go co1)) $! (go co2)
go (KindCo co) = mkKindCo $! (go co)
go (SubCo co) = mkSubCo $! (go co)
go (AxiomRuleCo c cs) = let cs1 = map go cs
in cs1 `seqList` AxiomRuleCo c cs1
go_prov UnsafeCoerceProv = UnsafeCoerceProv
go_prov (PhantomProv kco) = PhantomProv (go kco)
go_prov (ProofIrrelProv kco) = ProofIrrelProv (go kco)
go_prov p@(PluginProv _) = p
go_prov p@(HoleProv _) = p
-- NB: this last case is a little suspicious, but we need it. Originally,
-- there was a panic here, but it triggered from deeplySkolemise. Because
-- we only skolemise tyvars that are manually bound, this operation makes
-- sense, even over a coercion with holes.
substForAllCoBndr :: TCvSubst -> TyVar -> Coercion -> (TCvSubst, TyVar, Coercion)
substForAllCoBndr subst
= substForAllCoBndrCallback False (substCo subst) subst
-- | Like 'substForAllCoBndr', but disables sanity checks.
-- The problems that the sanity checks in substCo catch are described in
-- Note [The substitution invariant].
-- The goal of #11371 is to migrate all the calls of substCoUnchecked to
-- substCo and remove this function. Please don't use in new code.
substForAllCoBndrUnchecked :: TCvSubst -> TyVar -> Coercion -> (TCvSubst, TyVar, Coercion)
substForAllCoBndrUnchecked subst
= substForAllCoBndrCallback False (substCoUnchecked subst) subst
-- See Note [Sym and ForAllCo]
substForAllCoBndrCallback :: Bool -- apply sym to binder?
-> (Coercion -> Coercion) -- transformation to kind co
-> TCvSubst -> TyVar -> Coercion
-> (TCvSubst, TyVar, Coercion)
substForAllCoBndrCallback sym sco (TCvSubst in_scope tenv cenv)
old_var old_kind_co
= ( TCvSubst (in_scope `extendInScopeSet` new_var) new_env cenv
, new_var, new_kind_co )
where
new_env | no_change && not sym = delVarEnv tenv old_var
| sym = extendVarEnv tenv old_var $
TyVarTy new_var `CastTy` new_kind_co
| otherwise = extendVarEnv tenv old_var (TyVarTy new_var)
no_kind_change = isEmptyVarSet (tyCoVarsOfCo old_kind_co)
no_change = no_kind_change && (new_var == old_var)
new_kind_co | no_kind_change = old_kind_co
| otherwise = sco old_kind_co
Pair new_ki1 _ = coercionKind new_kind_co
new_var = uniqAway in_scope (setTyVarKind old_var new_ki1)
substCoVar :: TCvSubst -> CoVar -> Coercion
substCoVar (TCvSubst _ _ cenv) cv
= case lookupVarEnv cenv cv of
Just co -> co
Nothing -> CoVarCo cv
substCoVars :: TCvSubst -> [CoVar] -> [Coercion]
substCoVars subst cvs = map (substCoVar subst) cvs
lookupCoVar :: TCvSubst -> Var -> Maybe Coercion
lookupCoVar (TCvSubst _ _ cenv) v = lookupVarEnv cenv v
substTyVarBndr :: HasCallStack => TCvSubst -> TyVar -> (TCvSubst, TyVar)
substTyVarBndr = substTyVarBndrCallback substTy
-- | Like 'substTyVarBndr' but disables sanity checks.
-- The problems that the sanity checks in substTy catch are described in
-- Note [The substitution invariant].
-- The goal of #11371 is to migrate all the calls of substTyUnchecked to
-- substTy and remove this function. Please don't use in new code.
substTyVarBndrUnchecked :: TCvSubst -> TyVar -> (TCvSubst, TyVar)
substTyVarBndrUnchecked = substTyVarBndrCallback substTyUnchecked
-- | Substitute a tyvar in a binding position, returning an
-- extended subst and a new tyvar.
substTyVarBndrCallback :: (TCvSubst -> Type -> Type) -- ^ the subst function
-> TCvSubst -> TyVar -> (TCvSubst, TyVar)
substTyVarBndrCallback subst_fn subst@(TCvSubst in_scope tenv cenv) old_var
= ASSERT2( _no_capture, pprTyVar old_var $$ pprTyVar new_var $$ ppr subst )
ASSERT( isTyVar old_var )
(TCvSubst (in_scope `extendInScopeSet` new_var) new_env cenv, new_var)
where
new_env | no_change = delVarEnv tenv old_var
| otherwise = extendVarEnv tenv old_var (TyVarTy new_var)
_no_capture = not (new_var `elemVarSet` tyCoVarsOfTypesSet tenv)
-- Assertion check that we are not capturing something in the substitution
old_ki = tyVarKind old_var
no_kind_change = isEmptyVarSet (tyCoVarsOfType old_ki) -- verify that kind is closed
no_change = no_kind_change && (new_var == old_var)
-- no_change means that the new_var is identical in
-- all respects to the old_var (same unique, same kind)
-- See Note [Extending the TCvSubst]
--
-- In that case we don't need to extend the substitution
-- to map old to new. But instead we must zap any
-- current substitution for the variable. For example:
-- (\x.e) with id_subst = [x |-> e']
-- Here we must simply zap the substitution for x
new_var | no_kind_change = uniqAway in_scope old_var
| otherwise = uniqAway in_scope $
setTyVarKind old_var (subst_fn subst old_ki)
-- The uniqAway part makes sure the new variable is not already in scope
substCoVarBndr :: TCvSubst -> CoVar -> (TCvSubst, CoVar)
substCoVarBndr = substCoVarBndrCallback False substTy
substCoVarBndrCallback :: Bool -- apply "sym" to the covar?
-> (TCvSubst -> Type -> Type)
-> TCvSubst -> CoVar -> (TCvSubst, CoVar)
substCoVarBndrCallback sym subst_fun subst@(TCvSubst in_scope tenv cenv) old_var
= ASSERT( isCoVar old_var )
(TCvSubst (in_scope `extendInScopeSet` new_var) tenv new_cenv, new_var)
where
-- When we substitute (co :: t1 ~ t2) we may get the identity (co :: t ~ t)
-- In that case, mkCoVarCo will return a ReflCoercion, and
-- we want to substitute that (not new_var) for old_var
new_co = (if sym then mkSymCo else id) $ mkCoVarCo new_var
no_kind_change = isEmptyVarSet (tyCoVarsOfTypes [t1, t2])
no_change = new_var == old_var && not (isReflCo new_co) && no_kind_change
new_cenv | no_change = delVarEnv cenv old_var
| otherwise = extendVarEnv cenv old_var new_co
new_var = uniqAway in_scope subst_old_var
subst_old_var = mkCoVar (varName old_var) new_var_type
(_, _, t1, t2, role) = coVarKindsTypesRole old_var
t1' = subst_fun subst t1
t2' = subst_fun subst t2
new_var_type = uncurry (mkCoercionType role) (if sym then (t2', t1') else (t1', t2'))
-- It's important to do the substitution for coercions,
-- because they can have free type variables
cloneTyVarBndr :: TCvSubst -> TyVar -> Unique -> (TCvSubst, TyVar)
cloneTyVarBndr subst@(TCvSubst in_scope tv_env cv_env) tv uniq
= ASSERT2( isTyVar tv, ppr tv ) -- I think it's only called on TyVars
(TCvSubst (extendInScopeSet in_scope tv')
(extendVarEnv tv_env tv (mkTyVarTy tv')) cv_env, tv')
where
old_ki = tyVarKind tv
no_kind_change = isEmptyVarSet (tyCoVarsOfType old_ki) -- verify that kind is closed
tv1 | no_kind_change = tv
| otherwise = setTyVarKind tv (substTy subst old_ki)
tv' = setVarUnique tv1 uniq
cloneTyVarBndrs :: TCvSubst -> [TyVar] -> UniqSupply -> (TCvSubst, [TyVar])
cloneTyVarBndrs subst [] _usupply = (subst, [])
cloneTyVarBndrs subst (t:ts) usupply = (subst'', tv:tvs)
where
(uniq, usupply') = takeUniqFromSupply usupply
(subst' , tv ) = cloneTyVarBndr subst t uniq
(subst'', tvs) = cloneTyVarBndrs subst' ts usupply'
{-
%************************************************************************
%* *
Pretty-printing types
Defined very early because of debug printing in assertions
%* *
%************************************************************************
@pprType@ is the standard @Type@ printer; the overloaded @ppr@ function is
defined to use this. @pprParendType@ is the same, except it puts
parens around the type, except for the atomic cases. @pprParendType@
works just by setting the initial context precedence very high.
Note [Precedence in types]
~~~~~~~~~~~~~~~~~~~~~~~~~~
We don't keep the fixity of type operators in the operator. So the pretty printer
follows the following precedence order:
Type constructor application binds more tightly than
Operator applications which bind more tightly than
Function arrow
So we might see a :+: T b -> c
meaning (a :+: (T b)) -> c
Maybe operator applications should bind a bit less tightly?
Anyway, that's the current story; it is used consistently for Type and HsType.
-}
------------------
pprType, pprParendType :: Type -> SDoc
pprType = pprIfaceType . tidyToIfaceType
pprParendType = pprParendIfaceType . tidyToIfaceType
pprTyLit :: TyLit -> SDoc
pprTyLit = pprIfaceTyLit . toIfaceTyLit
pprKind, pprParendKind :: Kind -> SDoc
pprKind = pprType
pprParendKind = pprParendType
tidyToIfaceType :: Type -> IfaceType
-- It's vital to tidy before converting to an IfaceType
-- or nested binders will become indistinguishable!
tidyToIfaceType = toIfaceType . tidyTopType
------------
pprClassPred :: Class -> [Type] -> SDoc
pprClassPred clas tys = pprTypeApp (classTyCon clas) tys
------------
pprTheta :: ThetaType -> SDoc
pprTheta = pprIfaceContext . map tidyToIfaceType
pprThetaArrowTy :: ThetaType -> SDoc
pprThetaArrowTy = pprIfaceContextArr . map tidyToIfaceType
------------------
instance Outputable Type where
ppr ty = pprType ty
instance Outputable TyLit where
ppr = pprTyLit
------------------
pprSigmaType :: Type -> SDoc
pprSigmaType = pprIfaceSigmaType . tidyToIfaceType
pprForAll :: [TyVarBinder] -> SDoc
pprForAll tvs = pprIfaceForAll (map toIfaceForAllBndr tvs)
-- | Print a user-level forall; see Note [When to print foralls]
pprUserForAll :: [TyVarBinder] -> SDoc
pprUserForAll = pprUserIfaceForAll . map toIfaceForAllBndr
pprTvBndrs :: [TyVarBinder] -> SDoc
pprTvBndrs tvs = sep (map pprTvBndr tvs)
pprTvBndr :: TyVarBinder -> SDoc
pprTvBndr = pprTyVar . binderVar
pprTyVars :: [TyVar] -> SDoc
pprTyVars tvs = sep (map pprTyVar tvs)
pprTyVar :: TyVar -> SDoc
-- Print a type variable binder with its kind (but not if *)
-- Here we do not go via IfaceType, because the duplication with
-- pprIfaceTvBndr is minimal, and the loss of uniques etc in
-- debug printing is disastrous
pprTyVar tv
| isLiftedTypeKind kind = ppr tv
| otherwise = parens (ppr tv <+> dcolon <+> ppr kind)
where
kind = tyVarKind tv
instance Outputable TyBinder where
ppr (Anon ty) = text "[anon]" <+> ppr ty
ppr (Named (TvBndr v Required)) = ppr v
ppr (Named (TvBndr v Specified)) = char '@' <> ppr v
ppr (Named (TvBndr v Inferred)) = braces (ppr v)
-----------------
instance Outputable Coercion where -- defined here to avoid orphans
ppr = pprCo
{-
Note [When to print foralls]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Mostly we want to print top-level foralls when (and only when) the user specifies
-fprint-explicit-foralls. But when kind polymorphism is at work, that suppresses
too much information; see Trac #9018.
So I'm trying out this rule: print explicit foralls if
a) User specifies -fprint-explicit-foralls, or
b) Any of the quantified type variables has a kind
that mentions a kind variable
This catches common situations, such as a type siguature
f :: m a
which means
f :: forall k. forall (m :: k->*) (a :: k). m a
We really want to see both the "forall k" and the kind signatures
on m and a. The latter comes from pprTvBndr.
Note [Infix type variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
With TypeOperators you can say
f :: (a ~> b) -> b
and the (~>) is considered a type variable. However, the type
pretty-printer in this module will just see (a ~> b) as
App (App (TyVarTy "~>") (TyVarTy "a")) (TyVarTy "b")
So it'll print the type in prefix form. To avoid confusion we must
remember to parenthesise the operator, thus
(~>) a b -> b
See Trac #2766.
-}
pprDataCons :: TyCon -> SDoc
pprDataCons = sepWithVBars . fmap pprDataConWithArgs . tyConDataCons
where
sepWithVBars [] = empty
sepWithVBars docs = sep (punctuate (space <> vbar) docs)
pprDataConWithArgs :: DataCon -> SDoc
pprDataConWithArgs dc = sep [forAllDoc, thetaDoc, ppr dc <+> argsDoc]
where
(_univ_tvs, _ex_tvs, eq_spec, theta, arg_tys, _res_ty) = dataConFullSig dc
univ_bndrs = dataConUnivTyVarBinders dc
ex_bndrs = dataConExTyVarBinders dc
forAllDoc = pprUserForAll $ (filterEqSpec eq_spec univ_bndrs ++ ex_bndrs)
thetaDoc = pprThetaArrowTy theta
argsDoc = hsep (fmap pprParendType arg_tys)
pprTypeApp :: TyCon -> [Type] -> SDoc
pprTypeApp = pprTcAppTy TopPrec
pprTcAppTy :: TyPrec -> TyCon -> [Type] -> SDoc
pprTcAppTy p tc tys
-- TODO: toIfaceTcArgs seems rather wasteful here
= pprIfaceTypeApp p (toIfaceTyCon tc) (toIfaceTcArgs tc tys)
pprTcAppCo :: TyPrec -> (TyPrec -> Coercion -> SDoc)
-> TyCon -> [Coercion] -> SDoc
pprTcAppCo p _pp tc cos
= pprIfaceCoTcApp p (toIfaceTyCon tc) (map toIfaceCoercion cos)
------------------
pprPrefixApp :: TyPrec -> SDoc -> [SDoc] -> SDoc
pprPrefixApp = pprIfacePrefixApp
----------------
pprArrowChain :: TyPrec -> [SDoc] -> SDoc
-- pprArrowChain p [a,b,c] generates a -> b -> c
pprArrowChain _ [] = empty
pprArrowChain p (arg:args) = maybeParen p FunPrec $
sep [arg, sep (map (arrow <+>) args)]
ppSuggestExplicitKinds :: SDoc
-- Print a helpful suggstion about -fprint-explicit-kinds,
-- if it is not already on
ppSuggestExplicitKinds
= sdocWithDynFlags $ \ dflags ->
ppUnless (gopt Opt_PrintExplicitKinds dflags) $
text "Use -fprint-explicit-kinds to see the kind arguments"
{-
%************************************************************************
%* *
\subsection{TidyType}
%* *
%************************************************************************
-}
-- | This tidies up a type for printing in an error message, or in
-- an interface file.
--
-- It doesn't change the uniques at all, just the print names.
tidyTyCoVarBndrs :: TidyEnv -> [TyCoVar] -> (TidyEnv, [TyCoVar])
tidyTyCoVarBndrs (occ_env, subst) tvs
= mapAccumL tidyTyCoVarBndr tidy_env' tvs
where
-- Seed the occ_env with clashes among the names, see
-- Node [Tidying multiple names at once] in OccName
-- Se still go through tidyTyCoVarBndr so that each kind variable is tidied
-- with the correct tidy_env
occs = map getHelpfulOccName tvs
tidy_env' = (avoidClashesOccEnv occ_env occs, subst)
tidyTyCoVarBndr :: TidyEnv -> TyCoVar -> (TidyEnv, TyCoVar)
tidyTyCoVarBndr tidy_env@(occ_env, subst) tyvar
= case tidyOccName occ_env (getHelpfulOccName tyvar) of
(occ_env', occ') -> ((occ_env', subst'), tyvar')
where
subst' = extendVarEnv subst tyvar tyvar'
tyvar' = setTyVarKind (setTyVarName tyvar name') kind'
kind' = tidyKind tidy_env (tyVarKind tyvar)
name' = tidyNameOcc name occ'
name = tyVarName tyvar
getHelpfulOccName :: TyCoVar -> OccName
getHelpfulOccName tyvar = occ1
where
name = tyVarName tyvar
occ = getOccName name
-- System Names are for unification variables;
-- when we tidy them we give them a trailing "0" (or 1 etc)
-- so that they don't take precedence for the un-modified name
-- Plus, indicating a unification variable in this way is a
-- helpful clue for users
occ1 | isSystemName name
= if isTyVar tyvar
then mkTyVarOcc (occNameString occ ++ "0")
else mkVarOcc (occNameString occ ++ "0")
| otherwise = occ
tidyTyVarBinder :: TidyEnv -> TyVarBndr TyVar vis
-> (TidyEnv, TyVarBndr TyVar vis)
tidyTyVarBinder tidy_env (TvBndr tv vis)
= (tidy_env', TvBndr tv' vis)
where
(tidy_env', tv') = tidyTyCoVarBndr tidy_env tv
tidyTyVarBinders :: TidyEnv -> [TyVarBndr TyVar vis]
-> (TidyEnv, [TyVarBndr TyVar vis])
tidyTyVarBinders = mapAccumL tidyTyVarBinder
---------------
tidyFreeTyCoVars :: TidyEnv -> [TyCoVar] -> TidyEnv
-- ^ Add the free 'TyVar's to the env in tidy form,
-- so that we can tidy the type they are free in
tidyFreeTyCoVars (full_occ_env, var_env) tyvars
= fst (tidyOpenTyCoVars (full_occ_env, var_env) tyvars)
---------------
tidyOpenTyCoVars :: TidyEnv -> [TyCoVar] -> (TidyEnv, [TyCoVar])
tidyOpenTyCoVars env tyvars = mapAccumL tidyOpenTyCoVar env tyvars
---------------
tidyOpenTyCoVar :: TidyEnv -> TyCoVar -> (TidyEnv, TyCoVar)
-- ^ Treat a new 'TyCoVar' as a binder, and give it a fresh tidy name
-- using the environment if one has not already been allocated. See
-- also 'tidyTyCoVarBndr'
tidyOpenTyCoVar env@(_, subst) tyvar
= case lookupVarEnv subst tyvar of
Just tyvar' -> (env, tyvar') -- Already substituted
Nothing ->
let env' = tidyFreeTyCoVars env (tyCoVarsOfTypeList (tyVarKind tyvar))
in tidyTyCoVarBndr env' tyvar -- Treat it as a binder
---------------
tidyTyVarOcc :: TidyEnv -> TyVar -> TyVar
tidyTyVarOcc env@(_, subst) tv
= case lookupVarEnv subst tv of
Nothing -> updateTyVarKind (tidyType env) tv
Just tv' -> tv'
---------------
tidyTypes :: TidyEnv -> [Type] -> [Type]
tidyTypes env tys = map (tidyType env) tys
---------------
tidyType :: TidyEnv -> Type -> Type
tidyType _ (LitTy n) = LitTy n
tidyType env (TyVarTy tv) = TyVarTy (tidyTyVarOcc env tv)
tidyType env (TyConApp tycon tys) = let args = tidyTypes env tys
in args `seqList` TyConApp tycon args
tidyType env (AppTy fun arg) = (AppTy $! (tidyType env fun)) $! (tidyType env arg)
tidyType env (FunTy fun arg) = (FunTy $! (tidyType env fun)) $! (tidyType env arg)
tidyType env (ty@(ForAllTy{})) = mkForAllTys' (zip tvs' vis) $! tidyType env' body_ty
where
(tvs, vis, body_ty) = splitForAllTys' ty
(env', tvs') = tidyTyCoVarBndrs env tvs
tidyType env (CastTy ty co) = (CastTy $! tidyType env ty) $! (tidyCo env co)
tidyType env (CoercionTy co) = CoercionTy $! (tidyCo env co)
-- The following two functions differ from mkForAllTys and splitForAllTys in that
-- they expect/preserve the ArgFlag argument. Thes belong to types/Type.hs, but
-- how should they be named?
mkForAllTys' :: [(TyVar, ArgFlag)] -> Type -> Type
mkForAllTys' tvvs ty = foldr strictMkForAllTy ty tvvs
where
strictMkForAllTy (tv,vis) ty = (ForAllTy $! ((TvBndr $! tv) $! vis)) $! ty
splitForAllTys' :: Type -> ([TyVar], [ArgFlag], Type)
splitForAllTys' ty = go ty [] []
where
go (ForAllTy (TvBndr tv vis) ty) tvs viss = go ty (tv:tvs) (vis:viss)
go ty tvs viss = (reverse tvs, reverse viss, ty)
---------------
-- | Grabs the free type variables, tidies them
-- and then uses 'tidyType' to work over the type itself
tidyOpenTypes :: TidyEnv -> [Type] -> (TidyEnv, [Type])
tidyOpenTypes env tys
= (env', tidyTypes (trimmed_occ_env, var_env) tys)
where
(env'@(_, var_env), tvs') = tidyOpenTyCoVars env $
tyCoVarsOfTypesWellScoped tys
trimmed_occ_env = initTidyOccEnv (map getOccName tvs')
-- The idea here was that we restrict the new TidyEnv to the
-- _free_ vars of the types, so that we don't gratuitously rename
-- the _bound_ variables of the types.
---------------
tidyOpenType :: TidyEnv -> Type -> (TidyEnv, Type)
tidyOpenType env ty = let (env', [ty']) = tidyOpenTypes env [ty] in
(env', ty')
---------------
-- | Calls 'tidyType' on a top-level type (i.e. with an empty tidying environment)
tidyTopType :: Type -> Type
tidyTopType ty = tidyType emptyTidyEnv ty
---------------
tidyOpenKind :: TidyEnv -> Kind -> (TidyEnv, Kind)
tidyOpenKind = tidyOpenType
tidyKind :: TidyEnv -> Kind -> Kind
tidyKind = tidyType
----------------
tidyCo :: TidyEnv -> Coercion -> Coercion
tidyCo env@(_, subst) co
= go co
where
go (Refl r ty) = Refl r (tidyType env ty)
go (TyConAppCo r tc cos) = let args = map go cos
in args `seqList` TyConAppCo r tc args
go (AppCo co1 co2) = (AppCo $! go co1) $! go co2
go (ForAllCo tv h co) = ((ForAllCo $! tvp) $! (go h)) $! (tidyCo envp co)
where (envp, tvp) = tidyTyCoVarBndr env tv
-- the case above duplicates a bit of work in tidying h and the kind
-- of tv. But the alternative is to use coercionKind, which seems worse.
go (CoVarCo cv) = case lookupVarEnv subst cv of
Nothing -> CoVarCo cv
Just cv' -> CoVarCo cv'
go (AxiomInstCo con ind cos) = let args = map go cos
in args `seqList` AxiomInstCo con ind args
go (UnivCo p r t1 t2) = (((UnivCo $! (go_prov p)) $! r) $!
tidyType env t1) $! tidyType env t2
go (SymCo co) = SymCo $! go co
go (TransCo co1 co2) = (TransCo $! go co1) $! go co2
go (NthCo d co) = NthCo d $! go co
go (LRCo lr co) = LRCo lr $! go co
go (InstCo co ty) = (InstCo $! go co) $! go ty
go (CoherenceCo co1 co2) = (CoherenceCo $! go co1) $! go co2
go (KindCo co) = KindCo $! go co
go (SubCo co) = SubCo $! go co
go (AxiomRuleCo ax cos) = let cos1 = tidyCos env cos
in cos1 `seqList` AxiomRuleCo ax cos1
go_prov UnsafeCoerceProv = UnsafeCoerceProv
go_prov (PhantomProv co) = PhantomProv (go co)
go_prov (ProofIrrelProv co) = ProofIrrelProv (go co)
go_prov p@(PluginProv _) = p
go_prov p@(HoleProv _) = p
tidyCos :: TidyEnv -> [Coercion] -> [Coercion]
tidyCos env = map (tidyCo env)
{- *********************************************************************
* *
typeSize, coercionSize
* *
********************************************************************* -}
-- NB: We put typeSize/coercionSize here because they are mutually
-- recursive, and have the CPR property. If we have mutual
-- recursion across a hi-boot file, we don't get the CPR property
-- and these functions allocate a tremendous amount of rubbish.
-- It's not critical (because typeSize is really only used in
-- debug mode, but I tripped over and example (T5642) in which
-- typeSize was one of the biggest single allocators in all of GHC.
-- And it's easy to fix, so I did.
-- NB: typeSize does not respect `eqType`, in that two types that
-- are `eqType` may return different sizes. This is OK, because this
-- function is used only in reporting, not decision-making.
typeSize :: Type -> Int
typeSize (LitTy {}) = 1
typeSize (TyVarTy {}) = 1
typeSize (AppTy t1 t2) = typeSize t1 + typeSize t2
typeSize (FunTy t1 t2) = typeSize t1 + typeSize t2
typeSize (ForAllTy (TvBndr tv _) t) = typeSize (tyVarKind tv) + typeSize t
typeSize (TyConApp _ ts) = 1 + sum (map typeSize ts)
typeSize (CastTy ty co) = typeSize ty + coercionSize co
typeSize (CoercionTy co) = coercionSize co
coercionSize :: Coercion -> Int
coercionSize (Refl _ ty) = typeSize ty
coercionSize (TyConAppCo _ _ args) = 1 + sum (map coercionSize args)
coercionSize (AppCo co arg) = coercionSize co + coercionSize arg
coercionSize (ForAllCo _ h co) = 1 + coercionSize co + coercionSize h
coercionSize (CoVarCo _) = 1
coercionSize (AxiomInstCo _ _ args) = 1 + sum (map coercionSize args)
coercionSize (UnivCo p _ t1 t2) = 1 + provSize p + typeSize t1 + typeSize t2
coercionSize (SymCo co) = 1 + coercionSize co
coercionSize (TransCo co1 co2) = 1 + coercionSize co1 + coercionSize co2
coercionSize (NthCo _ co) = 1 + coercionSize co
coercionSize (LRCo _ co) = 1 + coercionSize co
coercionSize (InstCo co arg) = 1 + coercionSize co + coercionSize arg
coercionSize (CoherenceCo c1 c2) = 1 + coercionSize c1 + coercionSize c2
coercionSize (KindCo co) = 1 + coercionSize co
coercionSize (SubCo co) = 1 + coercionSize co
coercionSize (AxiomRuleCo _ cs) = 1 + sum (map coercionSize cs)
provSize :: UnivCoProvenance -> Int
provSize UnsafeCoerceProv = 1
provSize (PhantomProv co) = 1 + coercionSize co
provSize (ProofIrrelProv co) = 1 + coercionSize co
provSize (PluginProv _) = 1
provSize (HoleProv h) = pprPanic "provSize hits a hole" (ppr h)
| olsner/ghc | compiler/types/TyCoRep.hs | bsd-3-clause | 111,889 | 0 | 20 | 26,476 | 15,833 | 8,440 | 7,393 | -1 | -1 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE FlexibleContexts #-}
-- | All types.
module HIndent.Types
(Printer(..)
,PrintState(..)
,Extender(..)
,Style(..)
,Config(..)
,defaultConfig
,NodeInfo(..)
,ComInfo(..)
,ComInfoLocation(..)
) where
import Control.Applicative
import Control.Monad
import Control.Monad.State.Strict (MonadState(..),StateT)
import Control.Monad.Trans.Maybe
import Data.Data
import Data.Default
import Data.Functor.Identity
import Data.Int (Int64)
import Data.Text (Text)
import Data.Text.Lazy.Builder (Builder)
import Language.Haskell.Exts.Comments
import Language.Haskell.Exts.Parser
import Language.Haskell.Exts.SrcLoc
-- | A pretty printing monad.
newtype Printer s a =
Printer {runPrinter :: StateT (PrintState s) (MaybeT Identity) a}
deriving (Applicative,Monad,Functor,MonadState (PrintState s),MonadPlus,Alternative)
-- | The state of the pretty printer.
data PrintState s =
PrintState {psIndentLevel :: !Int64 -- ^ Current indentation level.
,psOutput :: !Builder -- ^ The current output.
,psNewline :: !Bool -- ^ Just outputted a newline?
,psColumn :: !Int64 -- ^ Current column.
,psLine :: !Int64 -- ^ Current line number.
,psUserState :: !s -- ^ User state.
,psExtenders :: ![Extender s] -- ^ Extenders.
,psConfig :: !Config -- ^ Config which styles may or may not pay attention to.
,psEolComment :: !Bool -- ^ An end of line comment has just been outputted.
,psInsideCase :: !Bool -- ^ Whether we're in a case statement, used for Rhs printing.
,psParseMode :: !ParseMode -- ^ Mode used to parse the original AST.
,psCommentPreprocessor :: forall m. MonadState (PrintState s) m => [Comment] -> m [Comment] -- ^ Preprocessor applied to comments on an AST before printing.
}
instance Eq (PrintState s) where
PrintState ilevel out newline col line _ _ _ eolc inc _pm _ == PrintState ilevel' out' newline' col' line' _ _ _ eolc' inc' _pm' _ =
(ilevel,out,newline,col,line,eolc, inc) == (ilevel',out',newline',col',line',eolc', inc')
-- | A printer extender. Takes as argument the user state that the
-- printer was run with, and the current node to print. Use
-- 'prettyNoExt' to fallback to the built-in printer.
data Extender s where
Extender :: forall s a. (Typeable a) => (a -> Printer s ()) -> Extender s
CatchAll :: forall s. (forall a. Typeable a => s -> a -> Maybe (Printer s ())) -> Extender s
-- | A printer style.
data Style =
forall s. Style {styleName :: !Text -- ^ Name of the style, used in the commandline interface.
,styleAuthor :: !Text -- ^ Author of the printer (as opposed to the author of the style).
,styleDescription :: !Text -- ^ Description of the style.
,styleInitialState :: !s -- ^ User state, if needed.
,styleExtenders :: ![Extender s] -- ^ Extenders to the printer.
,styleDefConfig :: !Config -- ^ Default config to use for this style.
,styleCommentPreprocessor :: forall s' m. MonadState (PrintState s') m => [Comment] -> m [Comment] -- ^ Preprocessor to use for comments.
}
-- | Configurations shared among the different styles. Styles may pay
-- attention to or completely disregard this configuration.
data Config =
Config {configMaxColumns :: !Int64 -- ^ Maximum columns to fit code into ideally.
,configIndentSpaces :: !Int64 -- ^ How many spaces to indent?
,configClearEmptyLines :: !Bool -- ^ Remove spaces on lines that are otherwise empty?
}
instance Default Config where
def =
Config {configMaxColumns = 80
,configIndentSpaces = 2
,configClearEmptyLines = False}
-- | Default style configuration.
defaultConfig :: Config
defaultConfig = def
-- | Information for each node in the AST.
data NodeInfo =
NodeInfo {nodeInfoSpan :: !SrcSpanInfo -- ^ Location info from the parser.
,nodeInfoComments :: ![ComInfo] -- ^ Comments which are attached to this node.
}
deriving (Typeable,Show,Data)
-- | Comment relative locations.
data ComInfoLocation = Before | After
deriving (Show,Typeable,Data,Eq)
-- | Comment with some more info.
data ComInfo =
ComInfo {comInfoComment :: !Comment -- ^ The normal comment type.
,comInfoLocation :: !(Maybe ComInfoLocation) -- ^ Where the comment lies relative to the node.
}
deriving (Show,Typeable,Data)
| gittywithexcitement/hindent | src/HIndent/Types.hs | bsd-3-clause | 4,694 | 0 | 15 | 1,091 | 927 | 555 | 372 | -1 | -1 |
{-# LANGUAGE BangPatterns #-}
module Main where
import Control.Monad.ST
import Criterion.Main
import Data.Bits
import Data.ByteString (ByteString)
import Data.ByteString.Lazy (toStrict)
import Data.ByteString.Lazy.Builder
import Data.Monoid
import Data.Word
import qualified Vaultaire.ReaderAlgorithms as A
simplePoints :: [Word64] -> ByteString
simplePoints = toStrict . toLazyByteString . mconcat . map makeSimplePoint
makeSimplePoint :: Word64 -> Builder
makeSimplePoint n =
word64LE ((n `mod` uniqueAddresses) `clearBit` 0) -- address
<> word64LE n -- time
<> word64LE n -- payload
where
uniqueAddresses = 8 * 2
runTest :: ByteString -> ByteString
runTest bs = runST $ A.processBucket bs 4 minBound maxBound
main :: IO ()
main = do
let !points = simplePoints [0..174763] -- 4MB
let !double_points = simplePoints [0..349526]
defaultMain
[ bench "simple points" $ nf runTest points
, bench "simple points (double)" $ nf runTest double_points
]
| afcowie/vaultaire | bench/ReaderAlgorithms.hs | bsd-3-clause | 1,091 | 0 | 11 | 275 | 287 | 155 | 132 | 28 | 1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE NoImplicitPrelude #-}
-----------------------------------------------------------------------------
-- |
-- Module : Text.Read.Lex
-- Copyright : (c) The University of Glasgow 2002
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : provisional
-- Portability : non-portable (uses Text.ParserCombinators.ReadP)
--
-- The cut-down Haskell lexer, used by Text.Read
--
-----------------------------------------------------------------------------
module Text.Read.Lex
-- lexing types
( Lexeme(..), Number
, numberToInteger, numberToFixed, numberToRational, numberToRangedRational
-- lexer
, lex, expect
, hsLex
, lexChar
, readIntP
, readOctP
, readDecP
, readHexP
, isSymbolChar
)
where
import Text.ParserCombinators.ReadP
import GHC.Base
import GHC.Char
import GHC.Num( Num(..), Integer )
import GHC.Show( Show(..) )
import GHC.Unicode
( GeneralCategory(..), generalCategory, isSpace, isAlpha, isAlphaNum )
import GHC.Real( Rational, (%), fromIntegral, Integral,
toInteger, (^), quot, even )
import GHC.List
import GHC.Enum( minBound, maxBound )
import Data.Maybe
-- local copy to break import-cycle
-- | @'guard' b@ is @'return' ()@ if @b@ is 'True',
-- and 'mzero' if @b@ is 'False'.
guard :: (MonadPlus m) => Bool -> m ()
guard True = return ()
guard False = mzero
-- -----------------------------------------------------------------------------
-- Lexing types
-- ^ Haskell lexemes.
data Lexeme
= Char Char -- ^ Character literal
| String String -- ^ String literal, with escapes interpreted
| Punc String -- ^ Punctuation or reserved symbol, e.g. @(@, @::@
| Ident String -- ^ Haskell identifier, e.g. @foo@, @Baz@
| Symbol String -- ^ Haskell symbol, e.g. @>>@, @:%@
| Number Number -- ^ @since 4.6.0.0
| EOF
deriving (Eq, Show)
-- | @since 4.7.0.0
data Number = MkNumber Int -- Base
Digits -- Integral part
| MkDecimal Digits -- Integral part
(Maybe Digits) -- Fractional part
(Maybe Integer) -- Exponent
deriving (Eq, Show)
-- | @since 4.5.1.0
numberToInteger :: Number -> Maybe Integer
numberToInteger (MkNumber base iPart) = Just (val (fromIntegral base) iPart)
numberToInteger (MkDecimal iPart Nothing Nothing) = Just (val 10 iPart)
numberToInteger _ = Nothing
-- | @since 4.7.0.0
numberToFixed :: Integer -> Number -> Maybe (Integer, Integer)
numberToFixed _ (MkNumber base iPart) = Just (val (fromIntegral base) iPart, 0)
numberToFixed _ (MkDecimal iPart Nothing Nothing) = Just (val 10 iPart, 0)
numberToFixed p (MkDecimal iPart (Just fPart) Nothing)
= let i = val 10 iPart
f = val 10 (integerTake p (fPart ++ repeat 0))
-- Sigh, we really want genericTake, but that's above us in
-- the hierarchy, so we define our own version here (actually
-- specialised to Integer)
integerTake :: Integer -> [a] -> [a]
integerTake n _ | n <= 0 = []
integerTake _ [] = []
integerTake n (x:xs) = x : integerTake (n-1) xs
in Just (i, f)
numberToFixed _ _ = Nothing
-- This takes a floatRange, and if the Rational would be outside of
-- the floatRange then it may return Nothing. Not that it will not
-- /necessarily/ return Nothing, but it is good enough to fix the
-- space problems in #5688
-- Ways this is conservative:
-- * the floatRange is in base 2, but we pretend it is in base 10
-- * we pad the floateRange a bit, just in case it is very small
-- and we would otherwise hit an edge case
-- * We only worry about numbers that have an exponent. If they don't
-- have an exponent then the Rational won't be much larger than the
-- Number, so there is no problem
-- | @since 4.5.1.0
numberToRangedRational :: (Int, Int) -> Number
-> Maybe Rational -- Nothing = Inf
numberToRangedRational (neg, pos) n@(MkDecimal iPart mFPart (Just exp))
-- if exp is out of integer bounds,
-- then the number is definitely out of range
| exp > fromIntegral (maxBound :: Int) ||
exp < fromIntegral (minBound :: Int)
= Nothing
| otherwise
= let mFirstDigit = case dropWhile (0 ==) iPart of
iPart'@(_ : _) -> Just (length iPart')
[] -> case mFPart of
Nothing -> Nothing
Just fPart ->
case span (0 ==) fPart of
(_, []) -> Nothing
(zeroes, _) ->
Just (negate (length zeroes))
in case mFirstDigit of
Nothing -> Just 0
Just firstDigit ->
let firstDigit' = firstDigit + fromInteger exp
in if firstDigit' > (pos + 3)
then Nothing
else if firstDigit' < (neg - 3)
then Just 0
else Just (numberToRational n)
numberToRangedRational _ n = Just (numberToRational n)
-- | @since 4.6.0.0
numberToRational :: Number -> Rational
numberToRational (MkNumber base iPart) = val (fromIntegral base) iPart % 1
numberToRational (MkDecimal iPart mFPart mExp)
= let i = val 10 iPart
in case (mFPart, mExp) of
(Nothing, Nothing) -> i % 1
(Nothing, Just exp)
| exp >= 0 -> (i * (10 ^ exp)) % 1
| otherwise -> i % (10 ^ (- exp))
(Just fPart, Nothing) -> fracExp 0 i fPart
(Just fPart, Just exp) -> fracExp exp i fPart
-- fracExp is a bit more efficient in calculating the Rational.
-- Instead of calculating the fractional part alone, then
-- adding the integral part and finally multiplying with
-- 10 ^ exp if an exponent was given, do it all at once.
-- -----------------------------------------------------------------------------
-- Lexing
lex :: ReadP Lexeme
lex = skipSpaces >> lexToken
-- | @since 4.7.0.0
expect :: Lexeme -> ReadP ()
expect lexeme = do { skipSpaces
; thing <- lexToken
; if thing == lexeme then return () else pfail }
hsLex :: ReadP String
-- ^ Haskell lexer: returns the lexed string, rather than the lexeme
hsLex = do skipSpaces
(s,_) <- gather lexToken
return s
lexToken :: ReadP Lexeme
lexToken = lexEOF +++
lexLitChar +++
lexString +++
lexPunc +++
lexSymbol +++
lexId +++
lexNumber
-- ----------------------------------------------------------------------
-- End of file
lexEOF :: ReadP Lexeme
lexEOF = do s <- look
guard (null s)
return EOF
-- ---------------------------------------------------------------------------
-- Single character lexemes
lexPunc :: ReadP Lexeme
lexPunc =
do c <- satisfy isPuncChar
return (Punc [c])
-- | The @special@ character class as defined in the Haskell Report.
isPuncChar :: Char -> Bool
isPuncChar c = c `elem` ",;()[]{}`"
-- ----------------------------------------------------------------------
-- Symbols
lexSymbol :: ReadP Lexeme
lexSymbol =
do s <- munch1 isSymbolChar
if s `elem` reserved_ops then
return (Punc s) -- Reserved-ops count as punctuation
else
return (Symbol s)
where
reserved_ops = ["..", "::", "=", "\\", "|", "<-", "->", "@", "~", "=>"]
isSymbolChar :: Char -> Bool
isSymbolChar c = not (isPuncChar c) && case generalCategory c of
MathSymbol -> True
CurrencySymbol -> True
ModifierSymbol -> True
OtherSymbol -> True
DashPunctuation -> True
OtherPunctuation -> not (c `elem` "'\"")
ConnectorPunctuation -> c /= '_'
_ -> False
-- ----------------------------------------------------------------------
-- identifiers
lexId :: ReadP Lexeme
lexId = do c <- satisfy isIdsChar
s <- munch isIdfChar
return (Ident (c:s))
where
-- Identifiers can start with a '_'
isIdsChar c = isAlpha c || c == '_'
isIdfChar c = isAlphaNum c || c `elem` "_'"
-- ---------------------------------------------------------------------------
-- Lexing character literals
lexLitChar :: ReadP Lexeme
lexLitChar =
do _ <- char '\''
(c,esc) <- lexCharE
guard (esc || c /= '\'') -- Eliminate '' possibility
_ <- char '\''
return (Char c)
lexChar :: ReadP Char
lexChar = do { (c,_) <- lexCharE; consumeEmpties; return c }
where
-- Consumes the string "\&" repeatedly and greedily (will only produce one match)
consumeEmpties :: ReadP ()
consumeEmpties = do
rest <- look
case rest of
('\\':'&':_) -> string "\\&" >> consumeEmpties
_ -> return ()
lexCharE :: ReadP (Char, Bool) -- "escaped or not"?
lexCharE =
do c1 <- get
if c1 == '\\'
then do c2 <- lexEsc; return (c2, True)
else do return (c1, False)
where
lexEsc =
lexEscChar
+++ lexNumeric
+++ lexCntrlChar
+++ lexAscii
lexEscChar =
do c <- get
case c of
'a' -> return '\a'
'b' -> return '\b'
'f' -> return '\f'
'n' -> return '\n'
'r' -> return '\r'
't' -> return '\t'
'v' -> return '\v'
'\\' -> return '\\'
'\"' -> return '\"'
'\'' -> return '\''
_ -> pfail
lexNumeric =
do base <- lexBaseChar <++ return 10
n <- lexInteger base
guard (n <= toInteger (ord maxBound))
return (chr (fromInteger n))
lexCntrlChar =
do _ <- char '^'
c <- get
case c of
'@' -> return '\^@'
'A' -> return '\^A'
'B' -> return '\^B'
'C' -> return '\^C'
'D' -> return '\^D'
'E' -> return '\^E'
'F' -> return '\^F'
'G' -> return '\^G'
'H' -> return '\^H'
'I' -> return '\^I'
'J' -> return '\^J'
'K' -> return '\^K'
'L' -> return '\^L'
'M' -> return '\^M'
'N' -> return '\^N'
'O' -> return '\^O'
'P' -> return '\^P'
'Q' -> return '\^Q'
'R' -> return '\^R'
'S' -> return '\^S'
'T' -> return '\^T'
'U' -> return '\^U'
'V' -> return '\^V'
'W' -> return '\^W'
'X' -> return '\^X'
'Y' -> return '\^Y'
'Z' -> return '\^Z'
'[' -> return '\^['
'\\' -> return '\^\'
']' -> return '\^]'
'^' -> return '\^^'
'_' -> return '\^_'
_ -> pfail
lexAscii =
do choice
[ (string "SOH" >> return '\SOH') <++
(string "SO" >> return '\SO')
-- \SO and \SOH need maximal-munch treatment
-- See the Haskell report Sect 2.6
, string "NUL" >> return '\NUL'
, string "STX" >> return '\STX'
, string "ETX" >> return '\ETX'
, string "EOT" >> return '\EOT'
, string "ENQ" >> return '\ENQ'
, string "ACK" >> return '\ACK'
, string "BEL" >> return '\BEL'
, string "BS" >> return '\BS'
, string "HT" >> return '\HT'
, string "LF" >> return '\LF'
, string "VT" >> return '\VT'
, string "FF" >> return '\FF'
, string "CR" >> return '\CR'
, string "SI" >> return '\SI'
, string "DLE" >> return '\DLE'
, string "DC1" >> return '\DC1'
, string "DC2" >> return '\DC2'
, string "DC3" >> return '\DC3'
, string "DC4" >> return '\DC4'
, string "NAK" >> return '\NAK'
, string "SYN" >> return '\SYN'
, string "ETB" >> return '\ETB'
, string "CAN" >> return '\CAN'
, string "EM" >> return '\EM'
, string "SUB" >> return '\SUB'
, string "ESC" >> return '\ESC'
, string "FS" >> return '\FS'
, string "GS" >> return '\GS'
, string "RS" >> return '\RS'
, string "US" >> return '\US'
, string "SP" >> return '\SP'
, string "DEL" >> return '\DEL'
]
-- ---------------------------------------------------------------------------
-- string literal
lexString :: ReadP Lexeme
lexString =
do _ <- char '"'
body id
where
body f =
do (c,esc) <- lexStrItem
if c /= '"' || esc
then body (f.(c:))
else let s = f "" in
return (String s)
lexStrItem = (lexEmpty >> lexStrItem)
+++ lexCharE
lexEmpty =
do _ <- char '\\'
c <- get
case c of
'&' -> do return ()
_ | isSpace c -> do skipSpaces; _ <- char '\\'; return ()
_ -> do pfail
-- ---------------------------------------------------------------------------
-- Lexing numbers
type Base = Int
type Digits = [Int]
lexNumber :: ReadP Lexeme
lexNumber
= lexHexOct <++ -- First try for hex or octal 0x, 0o etc
-- If that fails, try for a decimal number
lexDecNumber -- Start with ordinary digits
lexHexOct :: ReadP Lexeme
lexHexOct
= do _ <- char '0'
base <- lexBaseChar
digits <- lexDigits base
return (Number (MkNumber base digits))
lexBaseChar :: ReadP Int
-- Lex a single character indicating the base; fail if not there
lexBaseChar = do { c <- get;
case c of
'o' -> return 8
'O' -> return 8
'x' -> return 16
'X' -> return 16
_ -> pfail }
lexDecNumber :: ReadP Lexeme
lexDecNumber =
do xs <- lexDigits 10
mFrac <- lexFrac <++ return Nothing
mExp <- lexExp <++ return Nothing
return (Number (MkDecimal xs mFrac mExp))
lexFrac :: ReadP (Maybe Digits)
-- Read the fractional part; fail if it doesn't
-- start ".d" where d is a digit
lexFrac = do _ <- char '.'
fraction <- lexDigits 10
return (Just fraction)
lexExp :: ReadP (Maybe Integer)
lexExp = do _ <- char 'e' +++ char 'E'
exp <- signedExp +++ lexInteger 10
return (Just exp)
where
signedExp
= do c <- char '-' +++ char '+'
n <- lexInteger 10
return (if c == '-' then -n else n)
lexDigits :: Int -> ReadP Digits
-- Lex a non-empty sequence of digits in specified base
lexDigits base =
do s <- look
xs <- scan s id
guard (not (null xs))
return xs
where
scan (c:cs) f = case valDig base c of
Just n -> do _ <- get; scan cs (f.(n:))
Nothing -> do return (f [])
scan [] f = do return (f [])
lexInteger :: Base -> ReadP Integer
lexInteger base =
do xs <- lexDigits base
return (val (fromIntegral base) xs)
val :: Num a => a -> Digits -> a
val = valSimple
{-# RULES
"val/Integer" val = valInteger
#-}
{-# INLINE [1] val #-}
-- The following algorithm is only linear for types whose Num operations
-- are in constant time.
valSimple :: (Num a, Integral d) => a -> [d] -> a
valSimple base = go 0
where
go r [] = r
go r (d : ds) = r' `seq` go r' ds
where
r' = r * base + fromIntegral d
{-# INLINE valSimple #-}
-- A sub-quadratic algorithm for Integer. Pairs of adjacent radix b
-- digits are combined into a single radix b^2 digit. This process is
-- repeated until we are left with a single digit. This algorithm
-- performs well only on large inputs, so we use the simple algorithm
-- for smaller inputs.
valInteger :: Integer -> Digits -> Integer
valInteger b0 ds0 = go b0 (length ds0) $ map fromIntegral ds0
where
go _ _ [] = 0
go _ _ [d] = d
go b l ds
| l > 40 = b' `seq` go b' l' (combine b ds')
| otherwise = valSimple b ds
where
-- ensure that we have an even number of digits
-- before we call combine:
ds' = if even l then ds else 0 : ds
b' = b * b
l' = (l + 1) `quot` 2
combine b (d1 : d2 : ds) = d `seq` (d : combine b ds)
where
d = d1 * b + d2
combine _ [] = []
combine _ [_] = errorWithoutStackTrace "this should not happen"
-- Calculate a Rational from the exponent [of 10 to multiply with],
-- the integral part of the mantissa and the digits of the fractional
-- part. Leaving the calculation of the power of 10 until the end,
-- when we know the effective exponent, saves multiplications.
-- More importantly, this way we need at most one gcd instead of three.
--
-- frac was never used with anything but Integer and base 10, so
-- those are hardcoded now (trivial to change if necessary).
fracExp :: Integer -> Integer -> Digits -> Rational
fracExp exp mant []
| exp < 0 = mant % (10 ^ (-exp))
| otherwise = fromInteger (mant * 10 ^ exp)
fracExp exp mant (d:ds) = exp' `seq` mant' `seq` fracExp exp' mant' ds
where
exp' = exp - 1
mant' = mant * 10 + fromIntegral d
valDig :: (Eq a, Num a) => a -> Char -> Maybe Int
valDig 8 c
| '0' <= c && c <= '7' = Just (ord c - ord '0')
| otherwise = Nothing
valDig 10 c = valDecDig c
valDig 16 c
| '0' <= c && c <= '9' = Just (ord c - ord '0')
| 'a' <= c && c <= 'f' = Just (ord c - ord 'a' + 10)
| 'A' <= c && c <= 'F' = Just (ord c - ord 'A' + 10)
| otherwise = Nothing
valDig _ _ = errorWithoutStackTrace "valDig: Bad base"
valDecDig :: Char -> Maybe Int
valDecDig c
| '0' <= c && c <= '9' = Just (ord c - ord '0')
| otherwise = Nothing
-- ----------------------------------------------------------------------
-- other numeric lexing functions
readIntP :: Num a => a -> (Char -> Bool) -> (Char -> Int) -> ReadP a
readIntP base isDigit valDigit =
do s <- munch1 isDigit
return (val base (map valDigit s))
{-# SPECIALISE readIntP
:: Integer -> (Char -> Bool) -> (Char -> Int) -> ReadP Integer #-}
readIntP' :: (Eq a, Num a) => a -> ReadP a
readIntP' base = readIntP base isDigit valDigit
where
isDigit c = maybe False (const True) (valDig base c)
valDigit c = maybe 0 id (valDig base c)
{-# SPECIALISE readIntP' :: Integer -> ReadP Integer #-}
readOctP, readDecP, readHexP :: (Eq a, Num a) => ReadP a
readOctP = readIntP' 8
readDecP = readIntP' 10
readHexP = readIntP' 16
{-# SPECIALISE readOctP :: ReadP Integer #-}
{-# SPECIALISE readDecP :: ReadP Integer #-}
{-# SPECIALISE readHexP :: ReadP Integer #-}
| rahulmutt/ghcvm | libraries/base/Text/Read/Lex.hs | bsd-3-clause | 18,738 | 164 | 23 | 5,908 | 4,894 | 2,536 | 2,358 | 401 | 44 |
{-# LANGUAGE Haskell98, MultiParamTypeClasses, FunctionalDependencies, TypeSynonymInstances, FlexibleInstances, FlexibleContexts #-}
{-# LINE 1 "Text/Regex/Base/RegexLike.hs" #-}
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies #-}
-----------------------------------------------------------------------------
-- |
-- Module : Text.Regex.Base.RegexLike
-- Copyright : (c) Chris Kuklewicz 2006
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : libraries@haskell.org, textregexlazy@personal.mightyreason.com
-- Stability : experimental
-- Portability : non-portable (MPTC+FD)
--
-- Classes and instances for Regex matching.
--
--
-- All the classes are declared here, and some common type aliases, and
-- the MatchResult data type.
--
-- The only instances here are for Extract String and Extract ByteString.
-- There are no data values. The 'RegexContext' instances are in
-- "Text.Regex.Base.Context", except for ones which run afoul of a
-- repeated variable (RegexContext regex a a), which are defined in each
-- modules' String and ByteString modules.
-----------------------------------------------------------------------------
module Text.Regex.Base.RegexLike (
-- ** Type aliases
MatchOffset,
MatchLength,
MatchArray,
MatchText,
-- ** Data types
MatchResult(..),
-- ** Classes
RegexOptions(..),
RegexMaker(..),
RegexLike(..),
RegexContext(..),
Extract(..),
AllSubmatches(..),AllTextSubmatches(..),AllMatches(..),AllTextMatches(..)
) where
import Data.Array(Array,(!))
import Data.Maybe(isJust)
import qualified Data.ByteString as B (take,drop,empty,ByteString)
import qualified Data.ByteString.Lazy as L (take,drop,empty,ByteString)
import qualified Data.Sequence as S(take,drop,empty,Seq)
-- | 0 based index from start of source, or (-1) for unused
type MatchOffset = Int
-- | non-negative length of a match
type MatchLength = Int
-- | 0 based array, with 0th index indicating the full match. If the
-- full match location is not available, represent as (0,0).
type MatchArray = Array Int (MatchOffset,MatchLength)
type MatchText source = Array Int (source,(MatchOffset,MatchLength))
-- | This is the same as the type from JRegex.
data MatchResult a = MR {
mrBefore :: a,
mrMatch :: a,
mrAfter :: a,
mrSubList :: [a],
mrSubs :: Array Int a
}
----------------
-- | Rather than carry them around spearately, the options for how to
-- execute a regex are kept as part of the regex. There are two types
-- of options. Those that can only be specified at compilation time
-- and never changed are CompOpt. Those that can be changed later and
-- affect how matching is performed are ExecOpt. The actually types
-- for these depend on the backend.
class RegexOptions regex compOpt execOpt
| regex->compOpt execOpt, compOpt->regex execOpt, execOpt->regex compOpt where
blankCompOpt :: compOpt -- ^ no options set at all in the backend
blankExecOpt :: execOpt -- ^ no options set at all in the backend
defaultCompOpt :: compOpt -- ^ reasonable options (extended,caseSensitive,multiline regex)
defaultExecOpt :: execOpt -- ^ reasonable options (extended,caseSensitive,multiline regex)
setExecOpts :: execOpt -> regex -> regex
-- ^ forget old flags and use new ones
getExecOpts :: regex -> execOpt
-- ^ retrieve the current flags
----------------
-- | RegexMaker captures the creation of the compiled regular
-- expression from a source type and an option type. 'makeRegexM' and
-- 'makeRegexM' report parse error using 'MonadError', usually (Either
-- String regex).
--
-- The 'makeRegex' function has a default implementation that depends
-- on makeRegexOpts and used 'defaultCompOpt' and 'defaultExecOpt'.
-- Similarly for 'makeRegexM' and 'makeRegexOptsM'.
--
-- There are also default implementaions for 'makeRegexOpts' and
-- 'makeRegexOptsM' in terms of each other. So a minimal instance
-- definition needs to only define one of these, hopefully
-- 'makeRegexOptsM'.
class (RegexOptions regex compOpt execOpt) => RegexMaker regex compOpt execOpt source
| regex -> compOpt execOpt, compOpt -> regex execOpt, execOpt -> regex compOpt where
-- | make using the defaultCompOpt and defaultExecOpt
makeRegex :: source -> regex
-- | Specify your own options
makeRegexOpts :: compOpt -> execOpt -> source -> regex
-- | make using the defaultCompOpt and defaultExecOpt, reporting errors with fail
makeRegexM :: (Monad m) => source -> m regex
-- | Specify your own options, reporting errors with fail
makeRegexOptsM :: (Monad m) => compOpt -> execOpt -> source -> m regex
makeRegex = makeRegexOpts defaultCompOpt defaultExecOpt
makeRegexM = makeRegexOptsM defaultCompOpt defaultExecOpt
makeRegexOpts c e s = maybe (error "makeRegexOpts failed") id (makeRegexOptsM c e s)
makeRegexOptsM c e s = return (makeRegexOpts c e s)
----------------
-- | RegexLike is parametrized on a regular expression type and a
-- source type to run the matching on.
--
-- There are default implementations: matchTest and matchOnceText use
-- matchOnce; matchCount and matchAllText use matchAll. matchOnce uses
-- matchOnceText and matchAll uses matchAllText. So a minimal complete
-- instance need to provide at least (matchOnce or matchOnceText) and
-- (matchAll or matchAllText). Additional definitions are often
-- provided where they will increase efficiency.
--
-- > [ c | let notVowel = makeRegex "[^aeiou]" :: Regex, c <- ['a'..'z'], matchTest notVowel [c] ]
-- >
-- > "bcdfghjklmnpqrstvwxyz"
--
-- The strictness of these functions is instance dependent.
class (Extract source)=> RegexLike regex source where
-- | This returns the first match in the source (it checks the whole
-- source, not just at the start). This returns an array of
-- (offset,length) index pairs for the match and captured
-- substrings. The offset is 0-based. A (-1) for an offset means a
-- failure to match. The lower bound of the array is 0, and the 0th
-- element is the (offset,length) for the whole match.
matchOnce :: regex -> source-> Maybe MatchArray
-- | matchAll returns a list of matches. The matches are in order
-- and do not overlap. If any match succeeds but has 0 length then
-- this will be the last match in the list.
matchAll :: regex -> source-> [MatchArray]
-- | matchCount returns the number of non-overlapping matches
-- returned by matchAll.
matchCount :: regex -> source-> Int
-- | matchTest return True if there is a match somewhere in the
-- source (it checks the whole source not just at the start).
matchTest :: regex -> source-> Bool
-- | This is matchAll with the actual subsections of the source
-- instead of just the (offset,length) information.
matchAllText :: regex -> source-> [MatchText source]
-- | This can return a tuple of three items: the source before the
-- match, an array of the match and captured substrings (with their
-- indices), and the source after the match.
matchOnceText :: regex -> source-> Maybe (source,MatchText source,source)
matchAll regex source = map (fmap snd) (matchAllText regex source)
matchOnce regex source = fmap (\(_,mt,_) -> fmap snd mt) (matchOnceText regex source)
matchTest regex source = isJust (matchOnce regex source)
matchCount regex source = length (matchAll regex source)
matchOnceText regex source =
fmap (\ma -> let (o,l) = ma!0
in (before o source
,fmap (\ol -> (extract ol source,ol)) ma
,after (o+l) source))
(matchOnce regex source)
matchAllText regex source =
map (fmap (\ol -> (extract ol source,ol)))
(matchAll regex source)
----------------
-- | RegexContext is the polymorphic interface to do matching. Since
-- 'target' is polymorphic you may need to suply the type explicitly
-- in contexts where it cannot be inferred.
--
-- The monadic 'matchM' version uses 'fail' to report when the 'regex'
-- has no match in 'source'. Two examples:
--
-- Here the contest 'Bool' is inferred:
--
-- > [ c | let notVowel = makeRegex "[^aeiou]" :: Regex, c <- ['a'..'z'], match notVowel [c] ]
-- >
-- > "bcdfghjklmnpqrstvwxyz"
--
-- Here the context '[String]' must be supplied:
--
-- > let notVowel = (makeRegex "[^aeiou]" :: Regex )
-- > in do { c <- ['a'..'z'] ; matchM notVowel [c] } :: [String]
-- >
-- > ["b","c","d","f","g","h","j","k","l","m","n","p","q","r","s","t","v","w","x","y","z"]
class (RegexLike regex source) => RegexContext regex source target where
match :: regex -> source -> target
matchM :: (Monad m) => regex -> source -> m target
----------------
-- | Extract allows for indexing operations on String or ByteString.
class Extract source where
-- | before is a renamed "take"
before :: Int -> source -> source
-- | after is a renamed "drop"
after :: Int -> source -> source
-- | For when there is no match, this can construct an empty data value
empty :: source
-- | extract takes an offset and length and has a default
-- implementation of @extract (off,len) source = before len (after
-- off source)@
extract :: (Int,Int) -> source -> source
extract (off,len) source = before len (after off source)
instance Extract String where
before = take; after = drop; empty = []
instance Extract B.ByteString where
before = B.take; after = B.drop; empty = B.empty
instance Extract L.ByteString where
before = L.take . toEnum; after = L.drop . toEnum; empty = L.empty
instance Extract (S.Seq a) where
before = S.take; after = S.drop; empty = S.empty
-- | Used in results of RegexContext instances
newtype AllSubmatches f b = AllSubmatches {getAllSubmatches :: (f b)}
-- | Used in results of RegexContext instances
newtype AllTextSubmatches f b = AllTextSubmatches {getAllTextSubmatches :: (f b)}
-- | Used in results of RegexContext instances
newtype AllMatches f b = AllMatches {getAllMatches :: (f b)}
-- | Used in results of RegexContext instances
newtype AllTextMatches f b = AllTextMatches {getAllTextMatches :: (f b) }
| phischu/fragnix | tests/packages/scotty/Text.Regex.Base.RegexLike.hs | bsd-3-clause | 10,050 | 0 | 17 | 1,839 | 1,458 | 891 | 567 | 89 | 0 |
<?xml version='1.0' encoding='ISO-8859-1' ?>
<!DOCTYPE helpset
PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN"
"http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0">
<title>Binning Operator Help</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view mergetype="javax.help.UniteAppendMerge">
<name>TOC</name>
<label>Contents</label>
<type>javax.help.TOCView</type>
<data>toc.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">JavaHelpSearch</data>
</view>
</helpset>
| arraydev/snap-desktop | snap-binning-ui/src/main/resources/org/esa/snap/binning/docs/help.hs | gpl-3.0 | 777 | 54 | 44 | 165 | 283 | 143 | 140 | -1 | -1 |
{-# LANGUAGE CPP #-}
module TestImport (
module TestImport,
module Export
) where
import Test.Hspec as Export hiding (Selector)
import Database.MongoDB as Export
import Control.Monad.Trans as Export (MonadIO, liftIO)
import Data.Time (ParseTime, UTCTime)
import qualified Data.Time as Time
-- We support the old version of time because it's easier than trying to use
-- only the new version and test older GHC versions.
#if MIN_VERSION_time(1,5,0)
import Data.Time.Format (defaultTimeLocale, iso8601DateFormat)
#else
import System.Locale (defaultTimeLocale, iso8601DateFormat)
import Data.Maybe (fromJust)
#endif
parseTime :: ParseTime t => String -> String -> t
#if MIN_VERSION_time(1,5,0)
parseTime = Time.parseTimeOrError True defaultTimeLocale
#else
parseTime fmt = fromJust . Time.parseTime defaultTimeLocale fmt
#endif
parseDate :: String -> UTCTime
parseDate = parseTime (iso8601DateFormat Nothing)
parseDateTime :: String -> UTCTime
parseDateTime = parseTime (iso8601DateFormat (Just "%H:%M:%S"))
mongodbHostEnvVariable :: String
mongodbHostEnvVariable = "HASKELL_MONGODB_TEST_HOST"
| VictorDenisov/mongodb | test/TestImport.hs | apache-2.0 | 1,103 | 0 | 9 | 144 | 188 | 116 | 72 | 19 | 1 |
--------------------------------------------------------------------
-- |
-- Module : MediaWiki.API.Query.LangLinks
-- Description : Representing 'langlinks' requests.
-- Copyright : (c) Sigbjorn Finne, 2008
-- License : BSD3
--
-- Maintainer: Sigbjorn Finne <sof@forkIO.com>
-- Stability : provisional
-- Portability: portable
--
-- Representing 'langlinks' requests.
--
--------------------------------------------------------------------
module MediaWiki.API.Query.LangLinks where
import MediaWiki.API.Types
import MediaWiki.API.Utils
data LangLinksRequest
= LangLinksRequest
{ llLimit :: Maybe Int
, llContinueFrom :: Maybe String
}
instance APIRequest LangLinksRequest where
queryKind _ = QProp "langlinks"
showReq r
= [ mbOpt "lllimit" show (llLimit r)
, mbOpt "llcontinue" id (llContinueFrom r)
]
emptyLangLinksRequest :: LangLinksRequest
emptyLangLinksRequest = LangLinksRequest
{ llLimit = Nothing
, llContinueFrom = Nothing
}
data LangLinksResponse
= LangLinksResponse
{ llPages :: [(PageTitle,[LangPageInfo])]
, llContinue :: Maybe String
}
emptyLangLinksResponse :: LangLinksResponse
emptyLangLinksResponse = LangLinksResponse
{ llPages = []
, llContinue = Nothing
}
data LangPageInfo
= LangPageInfo
{ langName :: LangName
, langTitle :: Maybe String
}
emptyLangPageInfo :: LangPageInfo
emptyLangPageInfo = LangPageInfo
{ langName = "en"
, langTitle = Nothing
}
| neobrain/neobot | mediawiki/MediaWiki/API/Query/LangLinks.hs | bsd-3-clause | 1,514 | 0 | 11 | 304 | 254 | 154 | 100 | 32 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="pt-BR">
<title>Regras de varredura ativa - Alfa | Extensão ZAP</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Conteúdo</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Índice</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Busca</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favoritos</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/ascanrulesAlpha/src/main/javahelp/org/zaproxy/zap/extension/ascanrulesAlpha/resources/help_pt_BR/helpset_pt_BR.hs | apache-2.0 | 995 | 78 | 67 | 163 | 427 | 215 | 212 | -1 | -1 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
module Haddock.Backends.Hyperlinker.Ast (enrich) where
import Haddock.Syb
import Haddock.Backends.Hyperlinker.Types
import qualified GHC
import Control.Applicative
import Data.Data
import Data.Maybe
-- | Add more detailed information to token stream using GHC API.
enrich :: GHC.RenamedSource -> [Token] -> [RichToken]
enrich src =
map $ \token -> RichToken
{ rtkToken = token
, rtkDetails = enrichToken token detailsMap
}
where
detailsMap = concatMap ($ src)
[ variables
, types
, decls
, binds
, imports
]
-- | A map containing association between source locations and "details" of
-- this location.
--
-- For the time being, it is just a list of pairs. However, looking up things
-- in such structure has linear complexity. We cannot use any hashmap-like
-- stuff because source locations are not ordered. In the future, this should
-- be replaced with interval tree data structure.
type DetailsMap = [(GHC.SrcSpan, TokenDetails)]
lookupBySpan :: Span -> DetailsMap -> Maybe TokenDetails
lookupBySpan tspan = listToMaybe . map snd . filter (matches tspan . fst)
enrichToken :: Token -> DetailsMap -> Maybe TokenDetails
enrichToken (Token typ _ spn) dm
| typ `elem` [TkIdentifier, TkOperator] = lookupBySpan spn dm
enrichToken _ _ = Nothing
-- | Obtain details map for variables ("normally" used identifiers).
variables :: GHC.RenamedSource -> DetailsMap
variables =
everything (<|>) (var `combine` rec)
where
var term = case cast term of
(Just (GHC.L sspan (GHC.HsVar name))) ->
pure (sspan, RtkVar (GHC.unLoc name))
(Just (GHC.L _ (GHC.RecordCon (GHC.L sspan name) _ _ _))) ->
pure (sspan, RtkVar name)
_ -> empty
rec term = case cast term of
Just (GHC.HsRecField (GHC.L sspan name) (_ :: GHC.LHsExpr GHC.Name) _) ->
pure (sspan, RtkVar name)
_ -> empty
-- | Obtain details map for types.
types :: GHC.RenamedSource -> DetailsMap
types =
everything (<|>) ty
where
ty term = case cast term of
(Just (GHC.L sspan (GHC.HsTyVar name))) ->
pure (sspan, RtkType (GHC.unLoc name))
_ -> empty
-- | Obtain details map for identifier bindings.
--
-- That includes both identifiers bound by pattern matching or declared using
-- ordinary assignment (in top-level declarations, let-expressions and where
-- clauses).
binds :: GHC.RenamedSource -> DetailsMap
binds =
everything (<|>) (fun `combine` pat `combine` tvar)
where
fun term = case cast term of
(Just (GHC.FunBind (GHC.L sspan name) _ _ _ _ :: GHC.HsBind GHC.Name)) ->
pure (sspan, RtkBind name)
_ -> empty
pat term = case cast term of
(Just (GHC.L sspan (GHC.VarPat name))) ->
pure (sspan, RtkBind (GHC.unLoc name))
(Just (GHC.L _ (GHC.ConPatIn (GHC.L sspan name) recs))) ->
[(sspan, RtkVar name)] ++ everything (<|>) rec recs
(Just (GHC.L _ (GHC.AsPat (GHC.L sspan name) _))) ->
pure (sspan, RtkBind name)
_ -> empty
rec term = case cast term of
(Just (GHC.HsRecField (GHC.L sspan name) (_ :: GHC.LPat GHC.Name) _)) ->
pure (sspan, RtkVar name)
_ -> empty
tvar term = case cast term of
(Just (GHC.L sspan (GHC.UserTyVar name))) ->
pure (sspan, RtkBind (GHC.unLoc name))
(Just (GHC.L _ (GHC.KindedTyVar (GHC.L sspan name) _))) ->
pure (sspan, RtkBind name)
_ -> empty
-- | Obtain details map for top-level declarations.
decls :: GHC.RenamedSource -> DetailsMap
decls (group, _, _, _) = concatMap ($ group)
[ concat . map typ . concat . map GHC.group_tyclds . GHC.hs_tyclds
, everything (<|>) fun . GHC.hs_valds
, everything (<|>) (con `combine` ins)
]
where
typ (GHC.L _ t) = case t of
GHC.DataDecl { tcdLName = name } -> pure . decl $ name
GHC.SynDecl name _ _ _ -> pure . decl $ name
GHC.FamDecl fam -> pure . decl $ GHC.fdLName fam
GHC.ClassDecl{..} -> [decl tcdLName] ++ concatMap sig tcdSigs
fun term = case cast term of
(Just (GHC.FunBind (GHC.L sspan name) _ _ _ _ :: GHC.HsBind GHC.Name))
| GHC.isExternalName name -> pure (sspan, RtkDecl name)
_ -> empty
con term = case cast term of
(Just cdcl) ->
map decl (GHC.getConNames cdcl) ++ everything (<|>) fld cdcl
Nothing -> empty
ins term = case cast term of
(Just (GHC.DataFamInstD inst)) -> pure . tyref $ GHC.dfid_tycon inst
(Just (GHC.TyFamInstD (GHC.TyFamInstDecl (GHC.L _ eqn) _))) ->
pure . tyref $ GHC.tfe_tycon eqn
_ -> empty
fld term = case cast term of
Just (field :: GHC.ConDeclField GHC.Name)
-> map (decl . fmap GHC.selectorFieldOcc) $ GHC.cd_fld_names field
Nothing -> empty
sig (GHC.L _ (GHC.TypeSig names _)) = map decl names
sig _ = []
decl (GHC.L sspan name) = (sspan, RtkDecl name)
tyref (GHC.L sspan name) = (sspan, RtkType name)
-- | Obtain details map for import declarations.
--
-- This map also includes type and variable details for items in export and
-- import lists.
imports :: GHC.RenamedSource -> DetailsMap
imports src@(_, imps, _, _) =
everything (<|>) ie src ++ mapMaybe (imp . GHC.unLoc) imps
where
ie term = case cast term of
(Just (GHC.IEVar v)) -> pure $ var v
(Just (GHC.IEThingAbs t)) -> pure $ typ t
(Just (GHC.IEThingAll t)) -> pure $ typ t
(Just (GHC.IEThingWith t _ vs _fls)) ->
[typ t] ++ map var vs
_ -> empty
typ (GHC.L sspan name) = (sspan, RtkType name)
var (GHC.L sspan name) = (sspan, RtkVar name)
imp idecl | not . GHC.ideclImplicit $ idecl =
let (GHC.L sspan name) = GHC.ideclName idecl
in Just (sspan, RtkModule name)
imp _ = Nothing
-- | Check whether token stream span matches GHC source span.
--
-- Currently, it is implemented as checking whether "our" span is contained
-- in GHC span. The reason for that is because GHC span are generally wider
-- and may spread across couple tokens. For example, @(>>=)@ consists of three
-- tokens: @(@, @>>=@, @)@, but GHC source span associated with @>>=@ variable
-- contains @(@ and @)@. Similarly, qualified identifiers like @Foo.Bar.quux@
-- are tokenized as @Foo@, @.@, @Bar@, @.@, @quux@ but GHC source span
-- associated with @quux@ contains all five elements.
matches :: Span -> GHC.SrcSpan -> Bool
matches tspan (GHC.RealSrcSpan aspan)
| saspan <= stspan && etspan <= easpan = True
where
stspan = (posRow . spStart $ tspan, posCol . spStart $ tspan)
etspan = (posRow . spEnd $ tspan, posCol . spEnd $ tspan)
saspan = (GHC.srcSpanStartLine aspan, GHC.srcSpanStartCol aspan)
easpan = (GHC.srcSpanEndLine aspan, GHC.srcSpanEndCol aspan)
matches _ _ = False
| Helkafen/haddock | haddock-api/src/Haddock/Backends/Hyperlinker/Ast.hs | bsd-2-clause | 7,039 | 0 | 18 | 1,788 | 2,301 | 1,190 | 1,111 | 129 | 10 |
module DetMachine where
import NondetMachine
import FSM
import DFA(DFA(..))
import qualified OrdMap as OM
-- Representation of Deterministic Finite State Machines:
type DState = [State]
-- Translation of nondeterministic to deterministic state machines:
deterministicMachine ((start,final),nfa) =
((states inited,finalstates),DFA detm)
where
inited = initM nfa start
detm = determine inited (OM.empty)
determine state@(N g eg ss) detm =
case OM.lookup ss detm of
Just _ -> detm
Nothing ->
foldr determine detm' next
where
detm' = OM.add (ss,(ithis,othis)) detm
next = map snd onext++map snd inext
inext = getnext canAccept
onext = getnext canOutput
ithis = this inext
othis = this onext
this next = [(t,states ss)|(ts,ss)<-next,t<-ts]
getnext f = [(ts,goto state ss)|(ts,ss)<-f state]
finalstates = [ss | ss<-OM.indices detm,final `elem` ss]
states (N _ _ ss) = ss
| forste/haReFork | tools/base/parse2/LexerGen/DetMachine.hs | bsd-3-clause | 956 | 28 | 11 | 219 | 385 | 210 | 175 | 25 | 2 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP, NoImplicitPrelude #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.IO.Handle.FD
-- Copyright : (c) The University of Glasgow, 1994-2008
-- License : see libraries/base/LICENSE
--
-- Maintainer : libraries@haskell.org
-- Stability : internal
-- Portability : non-portable
--
-- Handle operations implemented by file descriptors (FDs)
--
-----------------------------------------------------------------------------
module GHC.IO.Handle.FD (
stdin, stdout, stderr,
openFile, openBinaryFile, openFileBlocking,
mkHandleFromFD, fdToHandle, fdToHandle',
isEOF
) where
import GHC.Base
import GHC.Show
import Data.Maybe
import Foreign.C.Types
import GHC.MVar
import GHC.IO
import GHC.IO.Encoding
import GHC.IO.Device as IODevice
import GHC.IO.Exception
import GHC.IO.IOMode
import GHC.IO.Handle
import GHC.IO.Handle.Types
import GHC.IO.Handle.Internals
import qualified GHC.IO.FD as FD
import qualified System.Posix.Internals as Posix
import qualified Haste.Handle
-- ---------------------------------------------------------------------------
-- Standard Handles
-- Three handles are allocated during program initialisation. The first
-- two manage input or output from the Haskell program's standard input
-- or output channel respectively. The third manages output to the
-- standard error channel. These handles are initially open.
-- | A handle managing input from the Haskell program's standard input channel.
stdin :: Handle
stdin = Haste.Handle.stdin
-- | A handle managing output to the Haskell program's standard output channel.
stdout :: Handle
stdout = Haste.Handle.stdout
-- | A handle managing output to the Haskell program's standard error channel.
stderr :: Handle
stderr = Haste.Handle.stderr
stdHandleFinalizer :: FilePath -> MVar Handle__ -> IO ()
stdHandleFinalizer fp m = do
h_ <- takeMVar m
flushWriteBuffer h_
case haType h_ of
ClosedHandle -> return ()
_other -> closeTextCodecs h_
putMVar m (ioe_finalizedHandle fp)
-- We have to put the FDs into binary mode on Windows to avoid the newline
-- translation that the CRT IO library does.
setBinaryMode :: FD.FD -> IO ()
#ifdef mingw32_HOST_OS
setBinaryMode fd = do _ <- setmode (FD.fdFD fd) True
return ()
#else
setBinaryMode _ = return ()
#endif
#ifdef mingw32_HOST_OS
foreign import ccall unsafe "__hscore_setmode"
setmode :: CInt -> Bool -> IO CInt
#endif
-- ---------------------------------------------------------------------------
-- isEOF
-- | The computation 'isEOF' is identical to 'hIsEOF',
-- except that it works only on 'stdin'.
isEOF :: IO Bool
isEOF = hIsEOF stdin
-- ---------------------------------------------------------------------------
-- Opening and Closing Files
addFilePathToIOError :: String -> FilePath -> IOException -> IOException
addFilePathToIOError fun fp ioe
= ioe{ ioe_location = fun, ioe_filename = Just fp }
-- | Computation 'openFile' @file mode@ allocates and returns a new, open
-- handle to manage the file @file@. It manages input if @mode@
-- is 'ReadMode', output if @mode@ is 'WriteMode' or 'AppendMode',
-- and both input and output if mode is 'ReadWriteMode'.
--
-- If the file does not exist and it is opened for output, it should be
-- created as a new file. If @mode@ is 'WriteMode' and the file
-- already exists, then it should be truncated to zero length.
-- Some operating systems delete empty files, so there is no guarantee
-- that the file will exist following an 'openFile' with @mode@
-- 'WriteMode' unless it is subsequently written to successfully.
-- The handle is positioned at the end of the file if @mode@ is
-- 'AppendMode', and otherwise at the beginning (in which case its
-- internal position is 0).
-- The initial buffer mode is implementation-dependent.
--
-- This operation may fail with:
--
-- * 'isAlreadyInUseError' if the file is already open and cannot be reopened;
--
-- * 'isDoesNotExistError' if the file does not exist; or
--
-- * 'isPermissionError' if the user does not have permission to open the file.
--
-- Note: if you will be working with files containing binary data, you'll want to
-- be using 'openBinaryFile'.
openFile :: FilePath -> IOMode -> IO Handle
openFile fp im =
catchException
(openFile' fp im dEFAULT_OPEN_IN_BINARY_MODE True)
(\e -> ioError (addFilePathToIOError "openFile" fp e))
-- | Like 'openFile', but opens the file in ordinary blocking mode.
-- This can be useful for opening a FIFO for reading: if we open in
-- non-blocking mode then the open will fail if there are no writers,
-- whereas a blocking open will block until a writer appears.
--
-- @since 4.4.0.0
openFileBlocking :: FilePath -> IOMode -> IO Handle
openFileBlocking fp im =
catchException
(openFile' fp im dEFAULT_OPEN_IN_BINARY_MODE False)
(\e -> ioError (addFilePathToIOError "openFile" fp e))
-- | Like 'openFile', but open the file in binary mode.
-- On Windows, reading a file in text mode (which is the default)
-- will translate CRLF to LF, and writing will translate LF to CRLF.
-- This is usually what you want with text files. With binary files
-- this is undesirable; also, as usual under Microsoft operating systems,
-- text mode treats control-Z as EOF. Binary mode turns off all special
-- treatment of end-of-line and end-of-file characters.
-- (See also 'hSetBinaryMode'.)
openBinaryFile :: FilePath -> IOMode -> IO Handle
openBinaryFile fp m =
catchException
(openFile' fp m True True)
(\e -> ioError (addFilePathToIOError "openBinaryFile" fp e))
openFile' :: String -> IOMode -> Bool -> Bool -> IO Handle
openFile' filepath iomode binary non_blocking = do
-- first open the file to get an FD
(fd, fd_type) <- FD.openFile filepath iomode non_blocking
mb_codec <- if binary then return Nothing else fmap Just getLocaleEncoding
-- then use it to make a Handle
mkHandleFromFD fd fd_type filepath iomode
False {- do not *set* non-blocking mode -}
mb_codec
`onException` IODevice.close fd
-- NB. don't forget to close the FD if mkHandleFromFD fails, otherwise
-- this FD leaks.
-- ASSERT: if we just created the file, then fdToHandle' won't fail
-- (so we don't need to worry about removing the newly created file
-- in the event of an error).
-- ---------------------------------------------------------------------------
-- Converting file descriptors to Handles
mkHandleFromFD
:: FD.FD
-> IODeviceType
-> FilePath -- a string describing this file descriptor (e.g. the filename)
-> IOMode
-> Bool -- *set* non-blocking mode on the FD
-> Maybe TextEncoding
-> IO Handle
mkHandleFromFD fd0 fd_type filepath iomode set_non_blocking mb_codec
= do
#ifndef mingw32_HOST_OS
-- turn on non-blocking mode
fd <- if set_non_blocking
then FD.setNonBlockingMode fd0 True
else return fd0
#else
let _ = set_non_blocking -- warning suppression
fd <- return fd0
#endif
let nl | isJust mb_codec = nativeNewlineMode
| otherwise = noNewlineTranslation
case fd_type of
Directory ->
ioException (IOError Nothing InappropriateType "openFile"
"is a directory" Nothing Nothing)
Stream
-- only *Streams* can be DuplexHandles. Other read/write
-- Handles must share a buffer.
| ReadWriteMode <- iomode ->
mkDuplexHandle fd filepath mb_codec nl
_other ->
mkFileHandle fd filepath iomode mb_codec nl
-- | Old API kept to avoid breaking clients
fdToHandle' :: CInt
-> Maybe IODeviceType
-> Bool -- is_socket on Win, non-blocking on Unix
-> FilePath
-> IOMode
-> Bool -- binary
-> IO Handle
fdToHandle' fdint mb_type is_socket filepath iomode binary = do
let mb_stat = case mb_type of
Nothing -> Nothing
-- mkFD will do the stat:
Just RegularFile -> Nothing
-- no stat required for streams etc.:
Just other -> Just (other,0,0)
(fd,fd_type) <- FD.mkFD fdint iomode mb_stat
is_socket
is_socket
enc <- if binary then return Nothing else fmap Just getLocaleEncoding
mkHandleFromFD fd fd_type filepath iomode is_socket enc
-- | Turn an existing file descriptor into a Handle. This is used by
-- various external libraries to make Handles.
--
-- Makes a binary Handle. This is for historical reasons; it should
-- probably be a text Handle with the default encoding and newline
-- translation instead.
fdToHandle :: Posix.FD -> IO Handle
fdToHandle fdint = do
iomode <- Posix.fdGetMode fdint
(fd,fd_type) <- FD.mkFD fdint iomode Nothing
False{-is_socket-}
-- NB. the is_socket flag is False, meaning that:
-- on Windows we're guessing this is not a socket (XXX)
False{-is_nonblock-}
-- file descriptors that we get from external sources are
-- not put into non-blocking mode, because that would affect
-- other users of the file descriptor
let fd_str = "<file descriptor: " ++ show fd ++ ">"
mkHandleFromFD fd fd_type fd_str iomode False{-non-block-}
Nothing -- bin mode
-- ---------------------------------------------------------------------------
-- Are files opened by default in text or binary mode, if the user doesn't
-- specify?
dEFAULT_OPEN_IN_BINARY_MODE :: Bool
dEFAULT_OPEN_IN_BINARY_MODE = False
| beni55/haste-compiler | libraries/ghc-7.10/base/GHC/IO/Handle/FD.hs | bsd-3-clause | 9,828 | 0 | 14 | 2,236 | 1,284 | 714 | 570 | 119 | 4 |
{-# LANGUAGE Rank2Types #-}
module Distribution.Solver.Modular.RetryLog
( RetryLog
, toProgress
, fromProgress
, mapFailure
, retry
, failWith
, succeedWith
, continueWith
, tryWith
) where
import Distribution.Solver.Modular.Message
import Distribution.Solver.Types.Progress
-- | 'Progress' as a difference list that allows efficient appends at failures.
newtype RetryLog step fail done = RetryLog {
unRetryLog :: forall fail2 . (fail -> Progress step fail2 done)
-> Progress step fail2 done
}
-- | /O(1)/. Convert a 'RetryLog' to a 'Progress'.
toProgress :: RetryLog step fail done -> Progress step fail done
toProgress (RetryLog f) = f Fail
-- | /O(N)/. Convert a 'Progress' to a 'RetryLog'.
fromProgress :: Progress step fail done -> RetryLog step fail done
fromProgress l = RetryLog $ \f -> go f l
where
go :: (fail1 -> Progress step fail2 done)
-> Progress step fail1 done
-> Progress step fail2 done
go _ (Done d) = Done d
go f (Fail failure) = f failure
go f (Step m ms) = Step m (go f ms)
-- | /O(1)/. Apply a function to the failure value in a log.
mapFailure :: (fail1 -> fail2)
-> RetryLog step fail1 done
-> RetryLog step fail2 done
mapFailure f l = retry l $ \failure -> RetryLog $ \g -> g (f failure)
-- | /O(1)/. If the first log leads to failure, continue with the second.
retry :: RetryLog step fail1 done
-> (fail1 -> RetryLog step fail2 done)
-> RetryLog step fail2 done
retry (RetryLog f) g =
RetryLog $ \extendLog -> f $ \failure -> unRetryLog (g failure) extendLog
-- | /O(1)/. Create a log with one message before a failure.
failWith :: step -> fail -> RetryLog step fail done
failWith m failure = RetryLog $ \f -> Step m (f failure)
-- | /O(1)/. Create a log with one message before a success.
succeedWith :: step -> done -> RetryLog step fail done
succeedWith m d = RetryLog $ const $ Step m (Done d)
-- | /O(1)/. Prepend a message to a log.
continueWith :: step
-> RetryLog step fail done
-> RetryLog step fail done
continueWith m (RetryLog f) = RetryLog $ Step m . f
-- | /O(1)/. Prepend the given message and 'Enter' to the log, and insert
-- 'Leave' before the failure if the log fails.
tryWith :: Message -> RetryLog Message fail done -> RetryLog Message fail done
tryWith m f =
RetryLog $ Step m . Step Enter . unRetryLog (retry f (failWith Leave))
| sopvop/cabal | cabal-install/Distribution/Solver/Modular/RetryLog.hs | bsd-3-clause | 2,449 | 0 | 11 | 584 | 690 | 357 | 333 | 46 | 3 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE KindSignatures #-}
module T10604_no_PolyKinds where
import GHC.Generics
data F (f :: * -> *) = F deriving Generic1
| ezyang/ghc | testsuite/tests/generics/T10604/T10604_no_PolyKinds.hs | bsd-3-clause | 161 | 0 | 6 | 26 | 32 | 21 | 11 | 5 | 0 |
module IRTS.DumpBC where
import IRTS.Lang
import IRTS.Simplified
import Idris.Core.TT
import IRTS.Bytecode
import Data.List
interMap :: [a] -> [b] -> (a -> [b]) -> [b]
interMap xs y f = concat (intersperse y (map f xs))
indent :: Int -> String
indent n = replicate (n*4) ' '
serializeReg :: Reg -> String
serializeReg (L n) = "L" ++ show n
serializeReg (T n) = "T" ++ show n
serializeReg r = show r
serializeCase :: Show a => Int -> (a, [BC]) -> String
serializeCase n (x, bcs) =
indent n ++ show x ++ ":\n" ++ interMap bcs "\n" (serializeBC (n + 1))
serializeDefault :: Int -> [BC] -> String
serializeDefault n bcs =
indent n ++ "default:\n" ++ interMap bcs "\n" (serializeBC (n + 1))
serializeBC :: Int -> BC -> String
serializeBC n bc = indent n ++
case bc of
ASSIGN a b ->
"ASSIGN " ++ serializeReg a ++ " " ++ serializeReg b
ASSIGNCONST a b ->
"ASSIGNCONST " ++ serializeReg a ++ " " ++ show b
UPDATE a b ->
"UPDATE " ++ serializeReg a ++ " " ++ serializeReg b
MKCON a Nothing b xs ->
"MKCON " ++ serializeReg a ++ " " ++ show b ++ " [" ++ (interMap xs ", " serializeReg) ++ "]"
MKCON a (Just r) b xs ->
"MKCON@" ++ serializeReg r ++ " " ++ serializeReg a ++ " " ++ show b ++ " [" ++ (interMap xs ", " serializeReg) ++ "]"
CASE safe r cases def ->
"CASE " ++ serializeReg r ++ ":\n" ++ interMap cases "\n" (serializeCase (n + 1)) ++
maybe "" (\def' -> "\n" ++ serializeDefault (n + 1) def') def
PROJECT a b c ->
"PROJECT " ++ serializeReg a ++ " " ++ show b ++ " " ++ show c
PROJECTINTO a b c ->
"PROJECTINTO " ++ serializeReg a ++ " " ++ serializeReg b ++ " " ++ show c
CONSTCASE r cases def ->
"CONSTCASE " ++ serializeReg r ++ ":\n" ++ interMap cases "\n" (serializeCase (n + 1)) ++
maybe "" (\def' -> "\n" ++ serializeDefault (n + 1) def') def
CALL x -> "CALL " ++ show x
TAILCALL x -> "TAILCALL " ++ show x
FOREIGNCALL r ret name args ->
"FOREIGNCALL " ++ serializeReg r ++ " \"" ++ show name ++ "\" " ++ show ret ++
" [" ++ interMap args ", " (\(ty, r) -> serializeReg r ++ " : " ++ show ty) ++ "]"
SLIDE n -> "SLIDE " ++ show n
REBASE -> "REBASE"
RESERVE n -> "RESERVE " ++ show n
ADDTOP n -> "ADDTOP " ++ show n
TOPBASE n -> "TOPBASE " ++ show n
BASETOP n -> "BASETOP " ++ show n
STOREOLD -> "STOREOLD"
OP a b c ->
"OP " ++ serializeReg a ++ " " ++ show b ++ " [" ++ interMap c ", " serializeReg ++ "]"
NULL r -> "NULL " ++ serializeReg r
ERROR s -> "ERROR \"" ++ s ++ "\"" -- FIXME: s may contain quotes
-- Issue #1596
serialize :: [(Name, [BC])] -> String
serialize decls =
interMap decls "\n\n" serializeDecl
where
serializeDecl :: (Name, [BC]) -> String
serializeDecl (name, bcs) =
show name ++ ":\n" ++ interMap bcs "\n" (serializeBC 1)
dumpBC :: [(Name, SDecl)] -> String -> IO ()
dumpBC c output = writeFile output $ serialize $ map toBC c
| mrmonday/Idris-dev | src/IRTS/DumpBC.hs | bsd-3-clause | 3,082 | 0 | 17 | 899 | 1,277 | 627 | 650 | 67 | 22 |
{-# LANGUAGE PatternSynonyms, RecordWildCards #-}
module T11283 where
data P = MkP Bool
pattern S{x} = MkP x
d = S{x = True}
e = S{..}
f S{x=x} = x
| olsner/ghc | testsuite/tests/patsyn/should_compile/T11283.hs | bsd-3-clause | 148 | 0 | 8 | 30 | 71 | 40 | 31 | 7 | 1 |
-- | Module responsible to parse a String into a Command
module Shaker.Parser(
parseCommand
)
where
import Data.Char
import Text.Parsec.Combinator
import Text.Parsec
import Text.Parsec.ByteString
import Shaker.Type
import qualified Data.Map as M
import qualified Data.ByteString.Char8 as B
-- | Parse the given string to a Command
parseCommand :: String -> ShakerInput -> Either ParseError Command
parseCommand str shIn = parse (typeCommand cmd_map) "parseCommand" (B.pack str)
where cmd_map = shakerCommandMap shIn
-- | Parse a Command
typeCommand :: CommandMap -> GenParser Char st Command
typeCommand cmMap = choice [try typeEmpty, typeCommandNonEmpty cmMap]
typeCommandNonEmpty :: CommandMap -> GenParser Char st Command
typeCommandNonEmpty cmMap = typeDuration >>= \dur ->
typeMultipleAction cmMap >>= \acts ->
return (Command dur acts)
typeEmpty :: GenParser Char st Command
typeEmpty = spaces >>
notFollowedBy anyChar >>
return emptyCommand
typeMultipleAction :: CommandMap -> GenParser Char st [Action]
typeMultipleAction cmMap = many1 (typeAction cmMap)
-- | Parse to an action
typeAction :: CommandMap -> GenParser Char st Action
typeAction cmMap = skipMany (char ' ') >>
typeShakerAction cmMap >>= \shAct ->
optionMaybe (many $ parseArgument cmMap) >>= \arg ->
skipMany (char ' ') >>
case arg of
Nothing -> return $ Action shAct
Just [] -> return $ Action shAct
Just list -> return $ ActionWithArg shAct list
parseArgument :: CommandMap -> GenParser Char st String
parseArgument cmMap =
skipMany (char ' ') >>
mapM_ notFollowedBy (parseMapAction cmMap) >>
many1 (noneOf " \n") >>= \str ->
skipMany (char ' ') >>
return str
-- | Parse a ShakerAction
typeShakerAction :: CommandMap -> GenParser Char st ShakerAction
typeShakerAction cmMap = skipMany (char ' ') >>
choice (parseMapAction cmMap) >>= \res ->
notFollowedBy (noneOf " \n") >>
skipMany (char ' ') >> return res
parseMapAction :: CommandMap -> [GenParser Char st ShakerAction]
parseMapAction cmMap = map check_key key_list
where key_list = M.toList cmMap
check_key (key,value) = try (walk key >> notFollowedBy (noneOf " \n" ) ) >> return value
walk :: String -> GenParser Char st ()
walk [] = return ()
walk (x:xs) = caseChar x >> walk xs
where caseChar c | isAlpha c = char (toLower c) <|> char (toUpper c)
| otherwise = char c
-- | Parse the continuous tag (~)
typeDuration :: GenParser Char st Duration
typeDuration = skipMany (char ' ') >>
option OneShot (char '~' >> return Continuous)
| bonnefoa/Shaker | src/Shaker/Parser.hs | isc | 2,578 | 0 | 14 | 501 | 835 | 415 | 420 | 57 | 3 |
import qualified System.Directory as Directory
import qualified System.FilePath.Posix as Posix
import qualified Data.Map as DataMap
import qualified Data.Maybe as Maybe
import qualified System.Random as Random
-- import qualified Text.Groom as Groom
-- import qualified Debug.Trace as Trace
------------------------
-- Getting Training Text
------------------------
data Token = TextStart | Word String deriving (Show, Eq, Ord)
data TrainingText = TrainingText [Token] deriving Show
getTrainingText :: String -> TrainingText
getTrainingText trainingString = TrainingText $ TextStart:(getTrainingTokens trainingString)
where
getTrainingTokens :: String -> [Token]
getTrainingTokens trainingString' = map Word $ words trainingString'
isSufficientTrainingText :: Int -> TrainingText -> Bool
-- + 1 for the TextStart token
isSufficientTrainingText stateLength (TrainingText tokens) = length tokens >= stateLength + 1
getTrainingFilenames :: IO [String]
getTrainingFilenames = do
fnames <- Directory.getDirectoryContents "training"
return $ map ("training/" ++) $ filter ((== ".txt") . Posix.takeExtension) fnames
getTrainingStrings :: IO [String]
getTrainingStrings = getTrainingFilenames >>= mapM readFile
---------------------
-- Creating the Chain
---------------------
{-
The way this is set up is that "States" in the chain (Markov Chain,
assuming I'm doing it right) are of length `stateLength`. The Chain
holds a StateTree, which contains a space efficient combination of all
states. The branches of the tree are Maps from Token to the next node.
Putting the Tokens of a state into the statetree in order will lead you
to the state's corresponding leaf, which is again a Map, but from Token
to integer. Each token chosen at a leaf, along with the last
`stateLength - 1` tokens in the given state, identifies a next state.
The corresponding integer is a count of how many times it is a next
state from the given state.
-}
data State = State {getTokens :: [Token]} deriving Show
type StateTreeNode = DataMap.Map Token StateTree
type NextTokenCounts = DataMap.Map Token Int
data StateTree = StateLeaf NextTokenCounts | StateBranch StateTreeNode deriving (Show, Eq)
data Chain = Chain {getStateTree :: StateTree} deriving Show
emptyStateBranch :: StateTree
emptyStateBranch = StateBranch $ DataMap.fromList []
emptyStateLeaf :: StateTree
emptyStateLeaf = StateLeaf $ DataMap.fromList []
emptyChain :: Chain
emptyChain = Chain emptyStateBranch
addState :: Chain -> Maybe Token -> State -> Chain
addState (Chain stateTree) mbNextStateToken (State stateTokens) =
Chain (addState' stateTree stateTokens mbNextStateToken)
addState' :: StateTree -> [Token] -> Maybe Token -> StateTree
addState' (StateLeaf nextTokenCounts) [] mbNextStateToken =
StateLeaf (newNextTokenCounts mbNextStateToken)
where
newNextTokenCounts Nothing = nextTokenCounts
newNextTokenCounts (Just nextStateToken) =
DataMap.insert nextStateToken (oldCount + 1) nextTokenCounts
where
oldCount = Maybe.fromMaybe 0 $ DataMap.lookup nextStateToken nextTokenCounts
addState' (StateLeaf _) _ _ = error $ "StateLeaf reached with extra tokens - "
++ " Make sure token lists are all of the same length"
addState' (StateBranch stateTreeNode) stateTokens mbNextStateToken = newStateTree where
newStateTree :: StateTree
newStateTree = StateBranch (DataMap.insert stateHead newInnerStateTree stateTreeNode)
stateHead :: Token
stateHead = head stateTokens
stateRest :: [Token]
stateRest = tail stateTokens
defaultInnerStateTree :: StateTree
defaultInnerStateTree
| stateRest == [] = emptyStateLeaf
| otherwise = emptyStateBranch
oldInnerStateTree :: StateTree
oldInnerStateTree =
Maybe.fromMaybe defaultInnerStateTree $ DataMap.lookup stateHead stateTreeNode
newInnerStateTree :: StateTree
newInnerStateTree = addState' oldInnerStateTree stateRest mbNextStateToken
-- Creates a state if it's long enough
getState :: TrainingText -> Int -> Maybe State
getState (TrainingText str) desiredLength
| actualLength == desiredLength = Just $ State stateTokens
| otherwise = Nothing
where
stateTokens = take desiredLength str
actualLength = length stateTokens
addTrainingTextToChain :: Int -> Chain -> TrainingText -> Chain
addTrainingTextToChain stateLength chain trainingText =
foldl getNextChain chain (getSubTrainingTexts trainingText)
where
getSubTrainingTexts :: TrainingText -> [TrainingText]
getSubTrainingTexts (TrainingText []) = []
getSubTrainingTexts (TrainingText tTokens) =
(TrainingText tTokens):(getSubTrainingTexts (TrainingText (tail tTokens)))
-- Given a training text, which could be a subset of another training text,
-- assemble the next chain
getNextChain :: Chain -> TrainingText -> Chain
getNextChain chain' tText = maybe chain' (addState chain' $ mbNextStateToken tText) state where
mbNextStateToken (TrainingText tokens) =
Maybe.listToMaybe . (take 1) . (drop stateLength) $ tokens
state = (getState tText stateLength)
------------------------
-- Generating Random Stuff
------------------------
{-
Generating random text involves randomly finding a starting state, and
randomly finding subsequent states. We kept a count of how many
transitions from one state to another we made, and we can bias our
randomly picked subsequent states accordingly. However we cannot use a
bias in creating a starting state (even though both processes involve
picking a series of tokens).
Picking a starting state involves traversing the stateTree picking
(unbiased) random paths. Once we reach a leaf, we can pick a (biased)
random next token. We take the current state, cut off the beginning, put
on the new token, and we have a new state. We use that to traverse the
stateTree again, and we're at a new leaf. We can grab a new (biased)
random token from here. So we can see that once we have our first state,
subsequent tokens are all grabbed with a bias.
Also, if we reach the end state of a training text, we might get stuck;
chances are, there's not a state transition that we can make from
there. If that happens we just start the process over.
This whole thing loops infinitely, and it's up to the caller to cut it
off.
-}
genText :: Chain -> Random.StdGen -> String
genText chain initStdGen = renderTokens $ genTokens initStdGen where
genTokens stdGen = firstTokens ++ nextTokens ++ nextRunTokens where
(firstStateStdGen, stdGen') = Random.split stdGen
(nextTokensStdGen, nextRunStdGen) = Random.split stdGen'
-- first state
firstState = genFirstState chain firstStateStdGen
State firstTokens = firstState
-- subsequent states
nextTokens = genNextTokens chain nextTokensStdGen firstState
-- next run, in case we hit an end
nextRunTokens = genTokens nextRunStdGen
-- Our first so many tokens can't be due to a transition from an old State,
-- since a State requires a full roster of tokens
isEndState :: StateTree -> State -> Bool
isEndState bStateTree (State tokens) = walkStateTree bStateTree tokens == emptyStateLeaf
genFirstState :: Chain -> Random.StdGen -> State
genFirstState (Chain baseSTree) initStdGen = State $ genFirstStateFull stdGens baseSTree where
stdGens = genStdGens initStdGen
genFirstStateRest :: [Random.StdGen] -> StateTree -> [Token]
genFirstStateRest [] _ = error "genFirstStateRest: stdGen list should be infinite"
genFirstStateRest _ (StateLeaf _) = []
genFirstStateRest (stdGen:nextStdGens) stateTree = thisToken:nextTokens where
thisToken = (genTokenFromTree stdGen stateTree)
nextTokens = genFirstStateRest nextStdGens (walkStateTree stateTree [thisToken])
-- Force it to start with a TextStart token, so generated text will make more sense.
genFirstStateFull stdGens' stateTree = TextStart:nextTokens where
nextTokens = genFirstStateRest stdGens' (walkStateTree stateTree [TextStart])
-- Given a chain, and a first state out of the chain, get tokens that
-- come *after* that state
genNextTokens :: Chain -> Random.StdGen -> State -> [Token]
genNextTokens (Chain baseSTree) initStdGen firstState = nextTokens where
stdGens = genStdGens initStdGen
genNextStates :: [Random.StdGen] -> State -> [State]
genNextStates [] _ = error "genNextStates: stdGen list should be infinite"
genNextStates (stdGen:nextStdGens) thisState
| isEndState baseSTree nextState = [nextState]
| otherwise = nextState:genNextStates nextStdGens nextState
where
nextState = State $ nextStateStartTokens ++ [nextStateLastToken]
nextStateStartTokens = tail $ getTokens thisState
lastStateBranch = walkStateTree baseSTree nextStateStartTokens
nextStateLastToken = (genTokenFromTree stdGen lastStateBranch)
nextTokens :: [Token]
nextTokens = lastTokenEachState where
states = genNextStates stdGens firstState
lastTokenEachState = map (last . getTokens) states
-- Get an endless list of random sources based on a random source
genStdGens :: Random.StdGen -> [Random.StdGen]
genStdGens stdGen = stdGen1:stdGen2:(genStdGens stdGen3) where
(stdGen1, stdGen') = Random.split stdGen
(stdGen2, stdGen3) = Random.split stdGen'
walkStateTree :: StateTree -> [Token] -> StateTree
walkStateTree stateTree [] = stateTree -- We want this for either
walkStateTree (StateBranch stateTreeNode) (headToken:tailTokens) =
walkStateTree (Maybe.fromJust (DataMap.lookup headToken stateTreeNode)) tailTokens
walkStateTree (StateLeaf _) _ = error "StateLeaf reached with extra tokens"
randomPick :: Random.StdGen -> [k] -> k
randomPick stdGen lst = lst !! (fst $ Random.randomR (0, (length lst) - 1) stdGen)
genTokenFromTree :: Random.StdGen -> StateTree -> Token
genTokenFromTree stdGen (StateBranch stateTreeNode) = randomKey stdGen stateTreeNode where
randomKey :: Random.StdGen -> DataMap.Map k v -> k
randomKey rkStdGen map' = randomPick rkStdGen $ DataMap.keys map'
genTokenFromTree stdGen (StateLeaf nextTokenCounts) = weightedRandomKey stdGen nextTokenCounts where
weightedRandomKey :: Random.StdGen -> DataMap.Map k Int -> k
weightedRandomKey krkStdGen map' = randomPick krkStdGen $ concatMap replicateTuple kvPairs where
replicateTuple = (uncurry (flip replicate))
kvPairs = (DataMap.assocs map')
renderTokens :: [Token] -> String
renderTokens tokens = concat $ map renderToken tokens
renderToken :: Token -> String
renderToken (Word str) = str ++ " "
renderToken TextStart = ""
------------------------
-- Main
------------------------
main :: IO ()
main = do
trainingStrings <- getTrainingStrings
let stateLength = 3
let trainingTexts = filter (isSufficientTrainingText stateLength) $ map getTrainingText trainingStrings
let chain = foldl (addTrainingTextToChain stateLength) emptyChain trainingTexts
if getStateTree chain == emptyStateBranch
then putStrLn "Insufficient training texts found"
else do
-- putStrLn $ Groom.groom chain
stdGen <- Random.getStdGen
putStrLn $ take 10000 $ genText chain stdGen
return ()
| orblivion/random-chain | Main.hs | mit | 11,514 | 0 | 13 | 2,286 | 2,290 | 1,189 | 1,101 | 151 | 3 |
{-# LANGUAGE OverloadedStrings #-}
module Web.Pocket.Get
( GetReq(..)
, makeGetReq
, GetRsp(..)
)
where
-- aeson
import Data.Aeson
-- text
import Data.Text (Text)
data GetReq =
GetReq
{ getReqConsumerKey :: Text
, getReqAccessToken :: Text
, getReqState :: Maybe Text
, getReqFavorite :: Maybe Integer
, getReqTag :: Maybe Text
, getReqContentType :: Maybe Text
, getReqSort :: Maybe Text
, getReqDetailType :: Maybe Text
, getReqSearch :: Maybe Text
, getReqDomain :: Maybe Text
, getReqSince :: Maybe Text
, getReqCount :: Maybe Integer
, getReqOffset :: Maybe Integer
}
instance ToJSON GetReq where
toJSON getReq =
object
[ "consumer_key" .= getReqConsumerKey getReq
, "access_token" .= getReqAccessToken getReq
, "state" .= getReqState getReq
, "favorite" .= getReqFavorite getReq
, "tag" .= getReqTag getReq
, "contentType" .= getReqContentType getReq
, "sort" .= getReqSort getReq
, "detailType" .= getReqDetailType getReq
, "search" .= getReqSearch getReq
, "domain" .= getReqDomain getReq
, "since" .= getReqSince getReq
, "count" .= getReqCount getReq
, "offset" .= getReqOffset getReq
]
makeGetReq
:: Text
-> Text
-> GetReq
makeGetReq consumerKey accessToken =
GetReq
{ getReqConsumerKey = consumerKey
, getReqAccessToken = accessToken
, getReqState = Nothing
, getReqFavorite = Nothing
, getReqTag = Nothing
, getReqContentType = Nothing
, getReqSort = Nothing
, getReqDetailType = Nothing
, getReqSearch = Nothing
, getReqDomain = Nothing
, getReqSince = Nothing
, getReqCount = Nothing
, getReqOffset = Nothing
}
data GetRsp =
GetRsp
{ getRespList :: Object
, getRespStatus :: Integer
}
deriving (Show)
instance FromJSON GetRsp where
parseJSON =
withObject "" $
\o ->
GetRsp
<$> o .: "list"
<*> o .: "status"
| jpvillaisaza/pocket-haskell | src/Web/Pocket/Get.hs | mit | 2,010 | 0 | 11 | 553 | 485 | 275 | 210 | 69 | 1 |
module ASTParser where
import AST
import Control.Monad (void)
import Text.Megaparsec
import Text.Megaparsec.Expr
import Text.Megaparsec.String
import qualified Text.Megaparsec.Lexer as L
import Text.Megaparsec
import Lexer
testParser = miniJavaParser
miniJavaParser :: Parser MiniJava
miniJavaParser = MiniJava <$> mainClassP <*> many usualClassP
mainClassP :: Parser Class
mainClassP = do
sc
symbol "class"
id <- identifier
(vars, main, methods) <-
braces $ (,,) <$> many varDeclarationP <*> mainMethodP <*> many methodP
return $ Class id "Object" vars (main : methods)
usualClassP :: Parser Class
usualClassP = do
symbol "class"
id <- identifier
extends <- option "Object" $ symbol "extends" *> identifier
(vars, methods) <- braces $ (,) <$> many varDeclarationP <*> many methodP
return $ Class id extends vars methods
-- | Method Parser
--
mainMethodP :: Parser Method
mainMethodP = do
symbol "public"
symbol "static"
typ <- pure VoidT <* symbol "void"
id <- symbol "main"
vars <- parens $ variableP `sepBy` comma
stms <- braces $ many statementP
return $ Method id typ vars stms
methodP :: Parser Method
methodP = do
symbol "public"
typ <- typeP
id <- identifier
vars <- parens $ variableP `sepBy` comma
stms <- braces $ many statementP
return $ Method id typ vars stms
-- | Statement Parser
--
statementP :: Parser Statement
statementP = try ifP <|> whileP <|> try printLnP <|> printP <|> stmExpP <|> blockStmP
ifP :: Parser Statement
ifP = do
symbol "if"
ifexp <- parens expressionP
stms <- (braces $ many statementP) <|> (:[]) <$> statementP
elseexp <-
optional $
symbol "else" *> (braces (many statementP) <|> ((:[]) <$> statementP))
return $ If ifexp stms elseexp
whileP :: Parser Statement
whileP = do
symbol "while"
ifexp <- parens expressionP
stms <- braces $ many statementP
return $ While ifexp stms
printLnP :: Parser Statement
printLnP = do
symbol "System" *> dot *> symbol "out" *> dot <* symbol "println"
PrintLn <$> parens expressionP <* semi
printP :: Parser Statement
printP = do
symbol "System" *> dot *> symbol "out" *> dot <* symbol "print"
Print <$> parens ((parens $ symbol "char") *> expressionP) <* semi
stmExpP :: Parser Statement
stmExpP = StmExp <$> expressionP <* semi
blockStmP :: Parser Statement
blockStmP = BlockStm <$> (braces $ many statementP)
-- | Expression Parser
--
expressionP :: Parser Expression
expressionP = makeExprParser primaryP opTable
primaryP :: Parser Expression
primaryP =
litBoolP <|> try litVarP <|> litIntP <|> litIdentP <|> thisP <|> try intArrP <|>
try strArrP <|>
blockExpP <|>
returnP <|>
newObjP <|>
litStrP
returnP :: Parser Expression
returnP = Return <$> (symbol "return" *> expressionP)
blockExpP :: Parser Expression
blockExpP = BlockExp <$> parens (some expressionP)
litVarP :: Parser Expression
litVarP = LitVar <$> variableP
litBoolP :: Parser Expression
litBoolP = LitBool <$> boolP
litIntP :: Parser Expression
litIntP = LitInt <$> integer
litStrP :: Parser Expression
litStrP = LitStr <$> stringLiteral
thisP :: Parser Expression
thisP = symbol "this" *> return This
litIdentP :: Parser Expression
litIdentP = LitIdent <$> identifier
intArrP :: Parser Expression
intArrP = IntArr <$> (symbol "new" *> symbol "int" *> brackets expressionP)
strArrP :: Parser Expression
strArrP = StrArr <$> (symbol "new" *> symbol "String" *> brackets expressionP)
newObjP :: Parser Expression
newObjP = do
symbol "new"
id <- identifier
args <- parens (expressionP `sepBy` comma)
return $ NewObject id args
boolP :: Parser Bool
boolP = symbol "true" *> return True <|> symbol "false" *> return False
opTable =
[ [ Postfix (flip IndexGet <$> brackets expressionP)
, Postfix $ foldr1 (flip (.)) <$> some (methodGetP <|> memberGetP)
]
, [Prefix (unaryOps [("!", NOT)])]
, [InfixL (binaryOps [("*", MUL), ("/", DIV), ("%", MOD)])]
, [InfixL (binaryOps [("+", PLUS), ("-", MINUS)])]
, [InfixL (binaryOps [("<=", LEQ), ("<", LE), (">=", GEQ), (">", GE)])]
, [InfixL (binaryOps [("==", EQS), ("!=", NEQS)])]
, [InfixL (binaryOps [("&&", AND)])]
, [InfixL (binaryOps [("||", OR)])]
, [InfixR (Assign <$ symbol "=")]
]
methodGetP :: Parser (Expression -> Expression)
methodGetP = try $ do
dot
id <- identifier
exps <- parens (expressionP `sepBy` comma)
return (\obj -> MethodGet obj id exps)
memberGetP :: Parser (Expression -> Expression)
memberGetP = do
dot
id <- identifier
return (\e -> MemberGet e id)
unaryOps :: [(String, UnaryOp)] -> Parser (Expression -> Expression)
unaryOps ops =
foldr1 (<|>) $ map (\(s, op) -> (\e1 -> UnOp op e1) <$ try (symbol s)) ops
binaryOps :: [(String, BinaryOp)]
-> Parser (Expression -> Expression -> Expression)
binaryOps ops =
foldr1 (<|>) $
map (\(s, op) -> (\e1 e2 -> BinOp e1 op e2) <$ try (symbol s)) ops
-- | Variable Parser
--
variableP :: Parser Variable
variableP = Variable <$> typeP <*> identifier
varDeclarationP :: Parser Variable
varDeclarationP = variableP <* semi
typeP :: Parser Type
typeP =
try (symbol "int[] " *> return IntArrT) <|>
try (symbol "int [] " *> return IntArrT) <|>
symbol "String[] " *> return StringArrT <|>
symbol "String [] " *> return StringArrT <|>
symbol "String " *> return StringT <|>
symbol "int " *> return IntT <|>
symbol "boolean " *> return BoolT <|>
symbol "void " *> return VoidT <|>
IdT <$> identifier
| cirquit/hjc | src/ASTParser.hs | mit | 5,626 | 0 | 23 | 1,230 | 2,056 | 1,044 | 1,012 | 156 | 1 |
module Feature.SingularSpec where
import Text.Heredoc
import Test.Hspec
import Test.Hspec.Wai
import Test.Hspec.Wai.JSON
import Network.HTTP.Types
import Network.Wai.Test (SResponse(..))
import Network.Wai (Application)
import SpecHelper
import Protolude hiding (get)
spec :: SpecWith Application
spec =
describe "Requesting singular json object" $ do
let pgrstObj = "application/vnd.pgrst.object+json"
singular = ("Accept", pgrstObj)
context "with GET request" $ do
it "fails for zero rows" $
request methodGet "/items?id=gt.0&id=lt.0" [singular] ""
`shouldRespondWith` 406
it "will select an existing object" $ do
request methodGet "/items?id=eq.5" [singular] ""
`shouldRespondWith` [str|{"id":5}|]
-- also test without the +json suffix
request methodGet "/items?id=eq.5"
[("Accept", "application/vnd.pgrst.object")] ""
`shouldRespondWith` [str|{"id":5}|]
it "can combine multiple prefer values" $
request methodGet "/items?id=eq.5" [singular, ("Prefer","count=none")] ""
`shouldRespondWith` [str|{"id":5}|]
it "can shape plurality singular object routes" $
request methodGet "/projects_view?id=eq.1&select=id,name,clients(*),tasks(id,name)" [singular] ""
`shouldRespondWith`
[json|{"id":1,"name":"Windows 7","clients":{"id":1,"name":"Microsoft"},"tasks":[{"id":1,"name":"Design w7"},{"id":2,"name":"Code w7"}]}|]
{ matchHeaders = ["Content-Type" <:> "application/vnd.pgrst.object+json; charset=utf-8"] }
context "when updating rows" $ do
it "works for one row" $ do
_ <- post "/addresses" [json| { id: 97, address: "A Street" } |]
request methodPatch
"/addresses?id=eq.97"
[("Prefer", "return=representation"), singular]
[json| { address: "B Street" } |]
`shouldRespondWith`
[str|{"id":97,"address":"B Street"}|]
it "raises an error for multiple rows" $ do
_ <- post "/addresses" [json| { id: 98, address: "xxx" } |]
_ <- post "/addresses" [json| { id: 99, address: "yyy" } |]
p <- request methodPatch
"/addresses?id=gt.0"
[("Prefer", "return=representation"), singular]
[json| { address: "zzz" } |]
liftIO $ do
simpleStatus p `shouldBe` notAcceptable406
isErrorFormat (simpleBody p) `shouldBe` True
-- the rows should not be updated, either
get "/addresses?id=eq.98" `shouldRespondWith` [str|[{"id":98,"address":"xxx"}]|]
it "raises an error for zero rows" $ do
p <- request methodPatch "/items?id=gt.0&id=lt.0"
[("Prefer", "return=representation"), singular] [json|{"id":1}|]
liftIO $ do
simpleStatus p `shouldBe` notAcceptable406
isErrorFormat (simpleBody p) `shouldBe` True
context "when creating rows" $ do
it "works for one row" $ do
p <- request methodPost
"/addresses"
[("Prefer", "return=representation"), singular]
[json| [ { id: 100, address: "xxx" } ] |]
liftIO $ simpleBody p `shouldBe` [str|{"id":100,"address":"xxx"}|]
it "works for one row even with return=minimal" $ do
request methodPost "/addresses"
[("Prefer", "return=minimal"), singular]
[json| [ { id: 101, address: "xxx" } ] |]
`shouldRespondWith` ""
{ matchStatus = 201
, matchHeaders = ["Content-Range" <:> "*/*"]
}
-- and the element should exist
get "/addresses?id=eq.101"
`shouldRespondWith` [str|[{"id":101,"address":"xxx"}]|]
{ matchStatus = 200
, matchHeaders = []
}
it "raises an error when attempting to create multiple entities" $ do
p <- request methodPost
"/addresses"
[("Prefer", "return=representation"), singular]
[json| [ { id: 200, address: "xxx" }, { id: 201, address: "yyy" } ] |]
liftIO $ simpleStatus p `shouldBe` notAcceptable406
-- the rows should not exist, either
get "/addresses?id=eq.200" `shouldRespondWith` "[]"
it "return=minimal allows request to create multiple elements" $
request methodPost "/addresses"
[("Prefer", "return=minimal"), singular]
[json| [ { id: 200, address: "xxx" }, { id: 201, address: "yyy" } ] |]
`shouldRespondWith` ""
{ matchStatus = 201
, matchHeaders = ["Content-Range" <:> "*/*"]
}
it "raises an error when creating zero entities" $ do
p <- request methodPost
"/addresses"
[("Prefer", "return=representation"), singular]
[json| [ ] |]
liftIO $ do
simpleStatus p `shouldBe` notAcceptable406
isErrorFormat (simpleBody p) `shouldBe` True
context "when deleting rows" $ do
it "works for one row" $ do
p <- request methodDelete
"/items?id=eq.11"
[("Prefer", "return=representation"), singular] ""
liftIO $ simpleBody p `shouldBe` [str|{"id":11}|]
it "raises an error when attempting to delete multiple entities" $ do
let firstItems = "/items?id=gt.0&id=lt.11"
request methodDelete firstItems
[("Prefer", "return=representation"), singular] ""
`shouldRespondWith` 406
get firstItems
`shouldRespondWith` [json| [{"id":1},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10}] |]
{ matchStatus = 200
, matchHeaders = ["Content-Range" <:> "0-9/*"]
}
it "raises an error when deleting zero entities" $ do
p <- request methodDelete "/items?id=lt.0"
[("Prefer", "return=representation"), singular] ""
liftIO $ do
simpleStatus p `shouldBe` notAcceptable406
isErrorFormat (simpleBody p) `shouldBe` True
context "when calling a stored proc" $ do
it "fails for zero rows" $ do
p <- request methodPost "/rpc/getproject"
[singular] [json|{ "id": 9999999}|]
liftIO $ do
simpleStatus p `shouldBe` notAcceptable406
isErrorFormat (simpleBody p) `shouldBe` True
-- this one may be controversial, should vnd.pgrst.object include
-- the likes of 2 and "hello?"
it "succeeds for scalar result" $
request methodPost "/rpc/sayhello"
[singular] [json|{ "name": "world"}|]
`shouldRespondWith` 200
it "returns a single object for json proc" $
request methodPost "/rpc/getproject"
[singular] [json|{ "id": 1}|] `shouldRespondWith`
[str|{"id":1,"name":"Windows 7","client_id":1}|]
it "fails for multiple rows" $ do
p <- request methodPost "/rpc/getallprojects" [singular] "{}"
liftIO $ do
simpleStatus p `shouldBe` notAcceptable406
isErrorFormat (simpleBody p) `shouldBe` True
it "executes the proc exactly once per request" $ do
request methodPost "/rpc/getproject?select=id,name" [] [json| {"id": 1} |]
`shouldRespondWith` [str|[{"id":1,"name":"Windows 7"}]|]
p <- request methodPost "/rpc/setprojects" [singular]
[json| {"id_l": 1, "id_h": 2, "name": "changed"} |]
liftIO $ do
simpleStatus p `shouldBe` notAcceptable406
isErrorFormat (simpleBody p) `shouldBe` True
-- should not actually have executed the function
request methodPost "/rpc/getproject?select=id,name" [] [json| {"id": 1} |]
`shouldRespondWith` [str|[{"id":1,"name":"Windows 7"}]|]
| begriffs/postgrest | test/Feature/SingularSpec.hs | mit | 7,690 | 0 | 20 | 2,093 | 1,540 | 847 | 693 | -1 | -1 |
{-# LANGUAGE RecordWildCards #-}
import Data.Foldable (for_)
import GHC.Exts (fromList, toList)
import Test.Hspec (Spec, describe, it, shouldMatchList)
import Test.Hspec.Runner (configFastFail, defaultConfig, hspecWith)
import Anagram (anagramsFor)
main :: IO ()
main = hspecWith defaultConfig {configFastFail = True} specs
specs :: Spec
specs = describe "anagramsFor" $ for_ cases test
where
test Case{..} = it description $ expression `shouldMatchList` expected
where
expression = map toList
. toList
. anagramsFor (fromList subject)
. fromList
. map fromList
$ candidates
data Case = Case { description :: String
, subject :: String
, candidates :: [String]
, expected :: [String]
}
cases :: [Case]
cases = [ Case { description = "no matches"
, subject = "diaper"
, candidates = [ "hello", "world", "zombies", "pants"]
, expected = []
}
, Case { description = "detects simple anagram"
, subject = "ant"
, candidates = ["tan", "stand", "at"]
, expected = ["tan"]
}
, Case { description = "does not detect false positives"
, subject = "galea"
, candidates = ["eagle"]
, expected = []
}
, Case { description = "detects two anagrams"
, subject = "master"
, candidates = ["stream", "pigeon", "maters"]
, expected = ["stream", "maters"]
}
, Case { description = "does not detect anagram subsets"
, subject = "good"
, candidates = ["dog", "goody"]
, expected = []
}
, Case { description = "detects anagram"
, subject = "listen"
, candidates = ["enlists", "google", "inlets", "banana"]
, expected = ["inlets"]
}
, Case { description = "detects three anagrams"
, subject = "allergy"
, candidates = ["gallery", "ballerina", "regally", "clergy", "largely", "leading"]
, expected = ["gallery", "regally", "largely"]
}
, Case { description = "does not detect identical words"
, subject = "corn"
, candidates = ["corn", "dark", "Corn", "rank", "CORN", "cron", "park"]
, expected = ["cron"]
}
, Case { description = "does not detect non-anagrams with identical checksum"
, subject = "mass"
, candidates = ["last"]
, expected = []
}
, Case { description = "detects anagrams case-insensitively"
, subject = "Orchestra"
, candidates = ["cashregister", "Carthorse", "radishes"]
, expected = ["Carthorse"]
}
, Case { description = "detects anagrams using case-insensitive subject"
, subject = "Orchestra"
, candidates = ["cashregister", "carthorse", "radishes"]
, expected = ["carthorse"]
}
, Case { description = "detects anagrams using case-insensitive possible matches"
, subject = "orchestra"
, candidates = ["cashregister", "Carthorse", "radishes"]
, expected = ["Carthorse"]
}
, Case { description = "does not detect a word as its own anagram"
, subject = "banana"
, candidates = ["Banana"]
, expected = []
}
, Case { description = "does not detect a anagram if the original word is repeated"
, subject = "go"
, candidates = ["go Go GO"]
, expected = []
}
, Case { description = "anagrams must use all letters exactly once"
, subject = "tapper"
, candidates = ["patter"]
, expected = []
}
, Case { description = "capital word is not own anagram"
, subject = "BANANA"
, candidates = ["Banana"]
, expected = []
}
]
| genos/online_problems | exercism/haskell/anagram/test/Tests.hs | mit | 4,538 | 0 | 15 | 1,943 | 887 | 559 | 328 | 86 | 1 |
module Main where
import Data.HEPEVT
main = do
events <- parseEventFile "events.dat"
putStrLn $ show $ length events
| bytbox/hepevt.hs | Main.hs | mit | 124 | 0 | 8 | 25 | 38 | 19 | 19 | 5 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-|
Module : Graphics.Heatmap
Description : Heatmap generation utilities.
Copyright : (c) Dustin Sallings, 2021
License : BSD3
Maintainer : dustin@spy.net
Stability : experimental
Tools for generating heatmap images.
-}
module Graphics.Heatmap (Point, mkDot, genHeatmap, mkTranslator) where
import Codec.Picture (Image (..), PixelRGBA8 (..), generateImage)
import Control.Lens
import qualified Data.Map.Strict as Map
import Graphics.Heatmap.Types (Colorizer, Depth (..))
import Linear.V2 (V2 (..))
import Linear.Vector (zero)
import Numeric.Natural
-- | A Point as used throughout this heatmap codebase is currently
-- limited to two dimensional vectors of natural numbers.
type Point = V2 Natural
class Projector a b | a -> b where
project :: a -> b -> [a]
instance (Enum a, Integral a) => Projector (V2 a) a where
project o size = [ V2 x y + o | x <- [ 0 .. size ], y <- [ 0 .. size ]]
-- mkDot can be more generic, but we don't have a great use case for
-- things other than V2 right now.
--
-- mkDot :: (Each a a Int Int, Projector a Int, Num a) => a -> Int -> [(a, Depth)]
-- | Make a dot of a given size indicating presence that should begin
-- heating the map.
mkDot :: Point -> Natural -> [(Point, Depth)]
mkDot o size = [ d | p2 <- project zero size,
d <- depthAt (o + p2) (distance p2) ]
where
edge = o & each .~ size
middle = distance edge / 2
distance = sqrt . sumOf (each . to f)
where f off = (fromIntegral off - (fromIntegral size / 2)) ^^ 2
depthAt p d = [(p, maxBound - round (200 * d/middle + 50)) | d < middle]
-- | Generate a heatmap image using a Colorizer and a collection of points and depths.
genHeatmap :: (Natural, Natural) -> Colorizer -> [(Point, Depth)] -> Image PixelRGBA8
genHeatmap (w,h) colorizer points = generateImage pf (fromIntegral w) (fromIntegral h)
where
m = Map.fromListWith (<>) points
pf x y = colorizer $ Map.findWithDefault minBound (V2 (fromIntegral x) (fromIntegral y)) m
-- | Make a scale translator that can translate points in an
-- arbitrarily bounded two dimensional vector to our 2D vector with
-- Natural bounds we use when applying points.
mkTranslator :: RealFrac a => (V2 a, V2 a) -> (V2 Natural, V2 Natural) -> (V2 a -> V2 Natural)
mkTranslator (V2 inminx inminy, V2 inmaxx inmaxy) (V2 outminx outminy, V2 outmaxx outmaxy) = t
where
t (V2 x y) = V2 (trans x inminx inmaxx outminx outmaxx) (trans y inminy inmaxy outminy outmaxy)
trans v il ih ol oh = ol + round ((v - il) * fromIntegral (oh - ol) / (ih - il))
| dustin/heatmap | src/Graphics/Heatmap.hs | mit | 2,780 | 0 | 14 | 666 | 789 | 431 | 358 | -1 | -1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.