code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RankNTypes #-}
module Main where
import ClassyPrelude hiding (Element, FilePath, elem,
getCurrentTime, readFile, writeFile)
import Control.Concurrent.Async (mapConcurrently)
import Control.Lens (Getting, filtered, from, has, ix,
only, to, view, (&), (.~), (<>~),
(^.), (^..), (^?!))
import Control.Monad (foldM_)
import Data.Maybe (fromJust)
import Data.Text (splitOn,breakOn)
import Data.Text.Lens (packed)
import qualified Network.Wreq as Wreq
import qualified Data.CaseInsensitive as CI
import Reviewer.Database
import Reviewer.Entity
import Reviewer.EntityType
import Reviewer.LinkRange
import Reviewer.PageContent
import Reviewer.PageNumber
import Reviewer.RelevantLink
import Reviewer.Settings
import Reviewer.Time
import Reviewer.DdosPrefix
import Reviewer.Url
import qualified Shelly
import qualified System.Console.Haskeline as HL
import Text.Taggy.Lens (Element, allAttributed, allNamed,
attr, contents, html)
outputStrLn :: MonadIO m => Text -> HL.InputT m ()
outputStrLn s = HL.outputStrLn (unpack s)
makePageUrl :: Settings -> PageNumber -> Url
makePageUrl settings page = (pack (settings ^. settingsBaseUrl) <> "/forumdisplay.php?f=" <> pack (settings ^. settingsSubForum) <> "&order=desc&page=" <> pack (show (extractPageNumber page))) ^. from urlAsText
retrieveUrl :: MonadIO m => DdosPrefix -> Url -> m PageContent
retrieveUrl t u = do
{-
let opts = defaults & manager .~ Left (defaultManagerSettings { managerResponseTimeout = Just 10000 } )
in undefined
-}
putStrLn $ "retrieving " <> pack (show u)
response <- liftIO (Wreq.getWith (Wreq.defaults & Wreq.headers <>~ [(CI.mk "Cookie",encodeUtf8 (t ^. ddosName <> "=" <> t ^. ddosValue))]) (u ^. urlAsString))
return $ decodeUtf8 (toStrict (response ^. Wreq.responseBody)) ^. from pageContentAsText
extractLinks :: PageContent -> [RelevantLink]
extractLinks page = page ^.. pageContentAsStrictText . html . allNamed (only "a") . allAttributed (ix "id" . filtered ("thread_title" `isPrefixOf`)) . to relevantLink
where relevantLink :: Element -> RelevantLink
relevantLink l = RelevantLink {
_rlText = l ^. contents
, _rlUrl = fromJust (l ^?! attr "href") ^. from urlAsText
}
classifyLink :: Database -> RelevantLink -> Maybe Entity
classifyLink db link = find (\entity -> (entity ^. entityText) `isInfixOf` (link ^. rlText)) db
eqL :: Eq a => Getting a t a -> t -> t -> Bool
eqL l a b = view l a == view l b
updateDatabase :: Database -> Entity -> Database
updateDatabase db entity = db & traverse . filtered (eqL entityText entity) .~ entity
openBrowser :: MonadIO m => Settings -> Url -> m ()
openBrowser settings url =
let (command:args) = splitOn " " (settings ^. settingsBrowserBin . packed)
in Shelly.shelly $ Shelly.run_ (Shelly.fromText command) (args <> [pack (settings ^. settingsBaseUrl) <> "/" <> (url ^. urlAsText)])
readCharConditional :: (MonadIO m,HL.MonadException m) => String -> (Char -> Maybe a) -> HL.InputT m (Maybe a)
readCharConditional s f = do
c' <- HL.getInputChar s
case c' of
Nothing -> return Nothing
Just c ->
case f c of
Nothing -> readCharConditional s f
Just r -> return (Just r)
readEntityState :: Char -> Maybe EntityType
readEntityState 'g' = Just EntityGood
readEntityState 'b' = Just EntityBad
readEntityState 'i' = Just EntityIndet
readEntityState _ = Nothing
-- The implicit contract for splitOn guarantees that it returns at
-- least one element, which we make use of here to construct
-- MinLen (Succ Zero)
splitOnSafe :: Text -> Text -> MinLen (Succ Zero) [Text]
splitOnSafe a b = unsafeToMinLen (splitOn a b)
longestName :: Text -> Text
longestName = maximumBy (compare `on` length) . splitOnSafe " / "
processLink :: forall m.(MonadIO m,HL.MonadException m) => Settings -> [Entity] -> LinkRange -> RelevantLink -> HL.InputT m [Entity]
processLink settings previousEntities linkRange link = do
db <- readDatabase (settings ^. settingsDbFile)
currentTime <- getCurrentTime
outputStrLn (visualizeLinkRange linkRange)
case classifyLink db link of
Nothing -> do
outputStrLn $ "Entity \"" <> link ^. rlText <> "\" is unknown, opening"
openBrowser settings (link ^. rlUrl)
c' <- readCharConditional "(g)ood | (b)ad | (i)ndet: " readEntityState
case c' of
Nothing -> return previousEntities
Just c -> do
let lonName = longestName (link ^. rlText)
name <- HL.getInputLine (unpack ("Which name? ["<> lonName <>"] "))
case name of
Nothing -> return previousEntities
Just newName -> do
let newEntity = Entity{_entityType = c,_entityText = if null newName then lonName else pack newName,_entityEncounters = [currentTime]}
writeDatabase (settings ^. settingsDbFile) (newEntity:db)
outputStrLn "Updated database!"
return (newEntity : previousEntities)
Just entity ->
if has (traverse . entityText . only (entity ^. entityText)) previousEntities
then do
outputStrLn "Entity already encountered, ignoring"
return previousEntities
else do
let
editedEnt = entity & entityEncounters <>~ [currentTime]
outputStrLn "Previous encounters:"
mapM_ (outputStrLn . pack .show) (entity ^. entityEncounters)
outputStrLn ""
case entity ^. entityType of
EntityBad -> do
outputStrLn $ "Entity \"" <> entity ^. entityText <> "\" is bad, ignoring"
outputStrLn $ "Original link: " <> link ^. rlText
writeDatabase (settings ^. settingsDbFile) (updateDatabase db editedEnt)
return (entity : previousEntities)
EntityGood -> do
outputStrLn $ "Entity \"" <> entity ^. entityText <> "\" is good, opening"
writeDatabase (settings ^. settingsDbFile) (updateDatabase db editedEnt)
openBrowser settings (link ^. rlUrl)
return (entity : previousEntities)
EntityIndet -> do
outputStrLn $ "Entity \"" <> entity ^. entityText <> "\" is indeterminate, opening"
openBrowser settings (link ^. rlUrl)
c' <- readCharConditional "(g)ood | (b)ad | (i)ndet: " readEntityState
case c' of
Nothing ->
return (entity : previousEntities)
Just c -> do
writeDatabase (settings ^. settingsDbFile) (updateDatabase db (editedEnt & entityType .~ c))
outputStrLn "Updated data base!"
return (entity : previousEntities)
extractBetween :: Text -> Text -> Text -> Text
extractBetween prefix suffix text =
let
(_,prefixAndAfter) = breakOn prefix text
(match,_) = breakOn suffix prefixAndAfter
in
match
extractDdosPrefix :: Settings -> IO (Maybe DdosPrefix)
extractDdosPrefix settings = do
getResult <- Wreq.get (settings ^. settingsBaseUrl)
let
resultText = getResult ^. Wreq.responseBody . to (decodeUtf8 . toStrict)
between = extractBetween (settings ^. settingsDdosPrefix . packed <> "=") ";" resultText
(before,equalsAndAfter) = breakOn "=" between
return (if null between then Nothing else Just (DdosPrefix before (drop 1 equalsAndAfter)))
maybeFlipped :: Maybe a -> b -> (a -> b) -> b
maybeFlipped m d f = maybe d f m
main :: IO ()
main = do
--videosPage <- TIO.readFile "/tmp/videos.html"
--print (extractLinks (videosPage ^. from pageContentAsText))
--ctime <- getCurrentTime
--putStrLn (toStrict (toLazyText (encodeToTextBuilder (toJSON ([Entity{_entityType = EntityGood,_entityText = "awesome",_entityEncounters = [ctime]}])))))
--let settings = (Settings{_settingsDbFile="/tmp/db.json",_settingsBrowserBin="/usr/bin/google-chrome --incognito"})
--HL.runInputT HL.defaultSettings (processLink settings (RelevantLink{_rlText="anderertext",_rlUrl="http://php-tech.de" ^. from urlAsText}))
--db <-readDatabase settings
--putStrLn . toStrict . toLazyText . encodeToTextBuilder $ (toJSON db)
settings <- parseSettings
putStrLn "Extracting prefix..."
ddosPrefix' <- extractDdosPrefix settings
maybeFlipped ddosPrefix' (putStrLn "Prefix not found, please check") $ \ddosPrefix -> do
let
pages = [1..settings ^. settingsPages]
pageUrls = (makePageUrl settings . pageNumber) <$> pages
pageContents <- mapConcurrently (retrieveUrl ddosPrefix) pageUrls
let
relevantLinks = concatMap extractLinks pageContents
HL.runInputT HL.defaultSettings $
foldM_ (\previousEntities (i,l) -> processLink settings previousEntities (LinkRange i (length relevantLinks)) l) [] (zip [1..] relevantLinks)
| pmiddend/reviewer | src/Reviewer/Main.hs | gpl-3.0 | 9,278 | 1 | 28 | 2,394 | 2,492 | 1,270 | 1,222 | -1 | -1 |
module Dep.Printing.Schematics where
import Data.Bits
import Data.Word
import Dep.Structures(CombElem(..))
import Dep.Utils (safeTail,safeMapHeads,findDefault,succR,predR,nSuccR)
data Direction = U | R | D | L deriving (Enum,Show,Eq,Bounded)
data Wire = Space | End Direction | VLine | HLine | Cross | TSplit Direction | Turn Direction deriving (Eq)
--Turn contains the highest direction
shiftAdd :: (Bits b,Num b) => Int -> b -> b -> b
shiftAdd s m x = (x .&. m)+(shiftR x s .&. m)
countSet :: Word8 -> Word8
countSet = shiftAdd 4 15 . shiftAdd 2 51 . shiftAdd 1 85
printGate :: [Bool] -> Bool -> Char -> [String]
printGate li lo gt = (' ':'\x250c': replicate ll '\x2500' ++ "\x256e") : printGate' l2 ll li lo gt
where ll = length li
l2 = div ll 2
printGate' :: Int -> Int -> [Bool] -> Bool -> Char -> [String]
printGate' _ g [] _ _ = [' ':'\x2514': replicate g '\x2500' ++ "\x256f" ]
printGate' l2 g [i] o s = printGateLine l2 g i o s : printGate' l2 g [] o s
printGate' l2 g (i:is) o s = printGateLine l2 g i o s : printGate' (l2-1) g is o s
printGateLine :: Int -> Int -> Bool -> Bool -> Char -> String
printGateLine 1 g i o s = printIW i : '\x2524' : replicate g s ++ ['\x251c',printIW o]
printGateLine _ g i _ s = printIW i : '\x2524' : replicate g s ++ "\x2502"
printIW :: Bool -> Char
printIW False = '\x2500'
printIW True = 'o'
--{-
instance Show Wire where
show (End _) = "."
show HLine = "\x2500"
show VLine = "\x2502"
show Cross = "\x253c"
show (TSplit U) = "\x2534"
show (TSplit R) = "\x251c"
show (TSplit D) = "\x2530"
show (TSplit _) = "\x2524"
show (Turn U) = "\x2518" --TODO: decide how to turn
show (Turn R) = "\x2514"
show (Turn D) = "\x250c"
show (Turn _) = "\x2510"
show _ = " "
--}
dirMask :: Direction -> Word8
dirMask = shiftL 1 . fromEnum
maskDir :: Word8 -> Direction
maskDir 1 = U
maskDir 2 = R
maskDir 4 = D
maskDir _ = L
wireMask :: Wire -> Word8
wireMask (End d) = dirMask d
wireMask (Turn d) = dirMask d .|. dirMask (predR d)
wireMask (TSplit d) = wireMask (Turn d) .|. dirMask (succR d)
wireMask VLine = dirMask U .|. dirMask D
wireMask HLine = dirMask L .|. dirMask R
wireMask Cross = 15
wireMask _ = 0
maskWire :: Word8 -> Wire
maskWire x = maskWire' s x
where s = countSet x
maskWire' :: Word8 -> Word8 -> Wire
maskWire' 2 x | x == 5 = VLine
| x == 10 = HLine
| x == 3 = Turn R
| x == 6 = Turn D
| x == 12 = Turn L
| otherwise = Turn U--9
maskWire' 3 x = TSplit $ nSuccR 2 $ maskDir $ 15 .&. complement x
maskWire' 4 _ = Cross
maskWire' 1 x = End $ maskDir x
maskWire' _ _ = Space
--instance Bits Wire where
stackedPrint :: [[String]] -> [String]
stackedPrint = genericStackedPrint ' ' (/= ' ')
genericStackedPrint :: a -> (a -> Bool) -> [[[a]]] -> [[a]]
genericStackedPrint dflt flld lyrs = map (map (findDefault dflt flld)) st
where st = stackTranspose lyrs
--Permutates the dimensions such that the element for (i,j,k) is mapped to (j,k,i)
stackTranspose :: [[[a]]] -> [[[a]]]
stackTranspose [] = []
stackTranspose ([]:xs) = stackTranspose xs
stackTranspose xs = stackTransposeRow (safeMapHeads xs) : stackTranspose (map safeTail xs)
stackTransposeRow :: [[a]] -> [[a]]
stackTransposeRow [] = []
stackTransposeRow ([]:xs) = stackTransposeRow xs
stackTransposeRow xs = safeMapHeads xs : stackTransposeRow (map safeTail xs)
printCombElem :: CombElem -> [String]
printCombElem (MinT _) = []
printCombElem _ = []
-- | MaxT [Int] | SOP [[Int]] | POS [[Int]]
| KommuSoft/dep-software | Dep.Printing.Schematics.hs | gpl-3.0 | 3,562 | 0 | 10 | 819 | 1,533 | 789 | 744 | 84 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TypeSynonymInstances #-}
--------------------------------------------------------------------------------
-- |
-- Module : Tct.Processor.Args.Instances
-- Copyright : (c) Martin Avanzini <martin.avanzini@uibk.ac.at>,
-- Georg Moser <georg.moser@uibk.ac.at>,
-- Andreas Schnabl <andreas.schnabl@uibk.ac.at>,
-- License : LGPL (see COPYING)
--
-- Maintainer : Martin Avanzini <martin.avanzini@uibk.ac.at>
-- Stability : unstable
-- Portability : unportable
--
-- This module defines various instances of processor arguments.
--------------------------------------------------------------------------------
module Tct.Processor.Args.Instances
( Proc (..)
, Processor
, Assoc (..)
, AssocArgument (..)
, EnumOf (..)
, Nat (..)
, nat
, natToInt
-- * Constructors for Arguments
, naturalArg
, boolArg
, maybeArg
, listArg
, processorArg
, EnumArg
, AssocArg
)
where
import Data.Typeable
import Control.Monad (liftM)
import Text.Parsec.Combinator (choice)
import Text.Parsec.Char (string)
import Data.List (intersperse, sortBy)
import Text.Parsec.Prim (many, try, (<|>))
import Tct.Processor.Parse hiding (natural, bool)
import qualified Tct.Processor.Parse as Parse
import Tct.Processor.Args
import Termlib.Utils (PrettyPrintable (..))
import Text.PrettyPrint.HughesPJ
import qualified Tct.Processor as P
import qualified Data.List as L
-- * Primitives
newtype Nat = Nat Int deriving (Typeable, Eq, Ord, Show, Num, Enum)
nat :: Int -> Nat
nat i | i < 0 = error "nat received negative integer"
| otherwise = Nat i
natToInt :: Nat -> Int
natToInt (Nat i) = i
instance Argument Nat where
type Domain Nat = Nat
domainName Phantom = "<nat>"
showArg _ (Nat i) = show i
instance PrettyPrintable Nat where
pprint (Nat i) = text (show i)
instance ParsableArgument Nat where
parseArg Phantom = Nat `liftM` Parse.natural
instance Argument Bool where
type Domain Bool = Bool
domainName Phantom = "On|Off"
showArg _ True = "On"
showArg _ False = "Off"
instance ParsableArgument Bool where
parseArg Phantom = Parse.bool
interactiveParser = defaultIP ["On", "Off"]
-- * Compound
instance Argument a => Argument [a] where
type Domain [a] = [Domain a]
domainName Phantom = domainName (Phantom :: Phantom a) ++ "..."
showArg _ as = "[" ++ concat (L.intersperse ", " [showArg (Phantom :: Phantom a) a | a <- as]) ++ "]"
instance Argument a => Argument (Maybe a) where
type Domain (Maybe a) = Maybe (Domain a)
domainName Phantom = domainName (Phantom :: Phantom a) ++ "|none"
showArg _ (Just a) = showArg (Phantom :: Phantom a) a
showArg _ Nothing = "none"
instance ParsableArgument a => ParsableArgument (Maybe a) where
parseArg Phantom = try (string "none" >> return Nothing)
<|> Just `liftM` parseArg (Phantom :: Phantom a)
interactiveParser (_ :: Phantom (Maybe a)) procs =
IP { ipCompletions = "none" : ipCompletions ip
, ipSynopsis = text "none|" <> ipSynopsis ip
, ipParse = prs }
where pa = Phantom :: Phantom a
ip = interactiveParser pa procs
prs "none" = return $ Right Nothing
prs str = do
res <- ipParse ip str
return $ case res of
Left err -> Left err
Right a -> Right (Just a)
instance ParsableArgument a => ParsableArgument [a] where
parseArg Phantom = many p
where p :: P.ProcessorParser (Domain a)
p = do r <- parseArg (Phantom :: Phantom a)
try whiteSpace <|> return ()
return r
newtype EnumOf a = EnumOf a
domainNameList :: [String] -> String
domainNameList l = concat $ intersperse "|" [ e | e <- l ]
parseArgAssoc :: [(String,e)] -> P.ProcessorParser e
parseArgAssoc l = choice [ try $ pa n e | (n,e) <- l]
where pa n e = do _ <- string n
return e
instance (Typeable a, Show a, Enum a, Bounded a) => Argument (EnumOf a) where
type Domain (EnumOf a) = a
domainName Phantom = domainNameList [show e | e <- [(minBound :: a) .. maxBound]]
showArg _ a = show a
instance (Typeable a, Show a, Enum a, Bounded a) => ParsableArgument (EnumOf a) where
parseArg Phantom = parseArgAssoc [(show e, e) | e <- [(minBound :: a) .. maxBound]]
interactiveParser = defaultIP options
where options = [show e | e <- [(minBound :: a) .. maxBound]]
-- | Instances of this class can be parsed by means of the
-- defined method 'assoc'.
class AssocArgument a where
-- | The resulting list associates names to elements, and should be finite.
-- An element is parsed by parsing its name.
assoc :: Phantom a -> [(String, a)]
newtype Assoc a = Assoc a
instance (Show a, AssocArgument a) => Argument (Assoc a) where
type Domain (Assoc a) = a
domainName _ = domainNameList [ s | (s,_) <- assoc (Phantom :: Phantom a)]
showArg _ a = show a
instance (Show a, AssocArgument a) => ParsableArgument (Assoc a) where
parseArg _ = parseArgAssoc $ assoc (Phantom :: Phantom a)
data Proc a = Proc a
instance (P.Processor a) => Argument (Proc a) where
type Domain (Proc a) = P.InstanceOf a
domainName _ = "<processor>"
showArg _ a = "<processor " ++ P.instanceName a ++ ">"
type Processor = Proc P.AnyProcessor
instance ParsableArgument Processor where
parseArg _ = P.parseAnyProcessor
interactiveParser _ procs =
IP { ipCompletions = [ n | (n, _) <- procLst]
, ipSynopsis = text "<processor>"
, ipParse = prs }
where
procLst = [(P.name p, p) | p <- sortBy compareName $ P.toProcessorList procs]
where compareName p1 p2 = P.name p1 `compare` P.name p2
prs n =
case lookup n procLst of
Nothing -> return $ Left $ text $ "Processor '" ++ n ++ "' not known"
Just proc -> do
parsed <- P.parseFromArgsInteractive proc procs
return $ Right $ P.liftOOI parsed
-- argument types
-- | Natural argument
naturalArg :: Arg Nat
naturalArg = arg
-- | Boolean argument, which is parsed as either /On/ or /Off/.
boolArg :: Arg Bool
boolArg = arg
-- | Argument, that additionally parses as 'none'.
maybeArg :: Arg a -> Arg (Maybe a)
maybeArg a = a {defaultValue = Just $ defaultValue a}
-- | A list of arguments.
listArg :: Arg a -> Arg [a]
listArg a = a {defaultValue = [defaultValue a]}
-- | Construct an argument from an associated list, by declaring
-- a datatype an instance of 'AssocArgument'.
-- Use as follows:
--
-- >>> instance AssocArgument MyType where
-- assoc _ = [("nameA", valueA), ("nameB", valueB)...]
--
-- Then one can use a declaration
--
-- >>> arg :: AssocArg MyType
--
-- which will parse /valueA/ as /nameA/, /valueB/ as /nameB/, and so on.
type AssocArg a = Arg (Assoc a)
-- | Processor argument
processorArg :: Arg Processor
processorArg = arg
-- | This can be used to lift instances of 'Typeable', 'Show', 'Enum' and 'Bounded' to arguments.
-- Suppose you have a datatype like the following.
--
-- >>> data MyType = A | B | C deriving (Typeable, Show, Enum, Bounded)
--
-- An argument description for an element of type @MyType@ is then given by
--
-- >>> arg :: EnumArg MyType
--
type EnumArg a = Arg (EnumOf a)
| mzini/TcT | source/Tct/Processor/Args/Instances.hs | gpl-3.0 | 7,797 | 0 | 16 | 2,040 | 2,115 | 1,141 | 974 | 144 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AndroidDeviceProvisioning.Partners.Devices.Metadata
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates reseller metadata associated with the device.
--
-- /See:/ <https://developers.google.com/zero-touch/ Android Device Provisioning Partner API Reference> for @androiddeviceprovisioning.partners.devices.metadata@.
module Network.Google.Resource.AndroidDeviceProvisioning.Partners.Devices.Metadata
(
-- * REST Resource
PartnersDevicesMetadataResource
-- * Creating a Request
, partnersDevicesMetadata
, PartnersDevicesMetadata
-- * Request Lenses
, pdmXgafv
, pdmUploadProtocol
, pdmAccessToken
, pdmUploadType
, pdmPayload
, pdmDeviceId
, pdmCallback
, pdmMetadataOwnerId
) where
import Network.Google.AndroidDeviceProvisioning.Types
import Network.Google.Prelude
-- | A resource alias for @androiddeviceprovisioning.partners.devices.metadata@ method which the
-- 'PartnersDevicesMetadata' request conforms to.
type PartnersDevicesMetadataResource =
"v1" :>
"partners" :>
Capture "metadataOwnerId" (Textual Int64) :>
"devices" :>
Capture "deviceId" (Textual Int64) :>
"metadata" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] UpdateDeviceMetadataRequest :>
Post '[JSON] DeviceMetadata
-- | Updates reseller metadata associated with the device.
--
-- /See:/ 'partnersDevicesMetadata' smart constructor.
data PartnersDevicesMetadata =
PartnersDevicesMetadata'
{ _pdmXgafv :: !(Maybe Xgafv)
, _pdmUploadProtocol :: !(Maybe Text)
, _pdmAccessToken :: !(Maybe Text)
, _pdmUploadType :: !(Maybe Text)
, _pdmPayload :: !UpdateDeviceMetadataRequest
, _pdmDeviceId :: !(Textual Int64)
, _pdmCallback :: !(Maybe Text)
, _pdmMetadataOwnerId :: !(Textual Int64)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'PartnersDevicesMetadata' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pdmXgafv'
--
-- * 'pdmUploadProtocol'
--
-- * 'pdmAccessToken'
--
-- * 'pdmUploadType'
--
-- * 'pdmPayload'
--
-- * 'pdmDeviceId'
--
-- * 'pdmCallback'
--
-- * 'pdmMetadataOwnerId'
partnersDevicesMetadata
:: UpdateDeviceMetadataRequest -- ^ 'pdmPayload'
-> Int64 -- ^ 'pdmDeviceId'
-> Int64 -- ^ 'pdmMetadataOwnerId'
-> PartnersDevicesMetadata
partnersDevicesMetadata pPdmPayload_ pPdmDeviceId_ pPdmMetadataOwnerId_ =
PartnersDevicesMetadata'
{ _pdmXgafv = Nothing
, _pdmUploadProtocol = Nothing
, _pdmAccessToken = Nothing
, _pdmUploadType = Nothing
, _pdmPayload = pPdmPayload_
, _pdmDeviceId = _Coerce # pPdmDeviceId_
, _pdmCallback = Nothing
, _pdmMetadataOwnerId = _Coerce # pPdmMetadataOwnerId_
}
-- | V1 error format.
pdmXgafv :: Lens' PartnersDevicesMetadata (Maybe Xgafv)
pdmXgafv = lens _pdmXgafv (\ s a -> s{_pdmXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pdmUploadProtocol :: Lens' PartnersDevicesMetadata (Maybe Text)
pdmUploadProtocol
= lens _pdmUploadProtocol
(\ s a -> s{_pdmUploadProtocol = a})
-- | OAuth access token.
pdmAccessToken :: Lens' PartnersDevicesMetadata (Maybe Text)
pdmAccessToken
= lens _pdmAccessToken
(\ s a -> s{_pdmAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pdmUploadType :: Lens' PartnersDevicesMetadata (Maybe Text)
pdmUploadType
= lens _pdmUploadType
(\ s a -> s{_pdmUploadType = a})
-- | Multipart request metadata.
pdmPayload :: Lens' PartnersDevicesMetadata UpdateDeviceMetadataRequest
pdmPayload
= lens _pdmPayload (\ s a -> s{_pdmPayload = a})
-- | Required. The ID of the device.
pdmDeviceId :: Lens' PartnersDevicesMetadata Int64
pdmDeviceId
= lens _pdmDeviceId (\ s a -> s{_pdmDeviceId = a}) .
_Coerce
-- | JSONP
pdmCallback :: Lens' PartnersDevicesMetadata (Maybe Text)
pdmCallback
= lens _pdmCallback (\ s a -> s{_pdmCallback = a})
-- | Required. The owner of the newly set metadata. Set this to the partner
-- ID.
pdmMetadataOwnerId :: Lens' PartnersDevicesMetadata Int64
pdmMetadataOwnerId
= lens _pdmMetadataOwnerId
(\ s a -> s{_pdmMetadataOwnerId = a})
. _Coerce
instance GoogleRequest PartnersDevicesMetadata where
type Rs PartnersDevicesMetadata = DeviceMetadata
type Scopes PartnersDevicesMetadata = '[]
requestClient PartnersDevicesMetadata'{..}
= go _pdmMetadataOwnerId _pdmDeviceId _pdmXgafv
_pdmUploadProtocol
_pdmAccessToken
_pdmUploadType
_pdmCallback
(Just AltJSON)
_pdmPayload
androidDeviceProvisioningService
where go
= buildClient
(Proxy :: Proxy PartnersDevicesMetadataResource)
mempty
| brendanhay/gogol | gogol-androiddeviceprovisioning/gen/Network/Google/Resource/AndroidDeviceProvisioning/Partners/Devices/Metadata.hs | mpl-2.0 | 5,963 | 0 | 20 | 1,379 | 902 | 521 | 381 | 129 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Content.Accounts.Labels.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists the labels assigned to an account.
--
-- /See:/ <https://developers.google.com/shopping-content/v2/ Content API for Shopping Reference> for @content.accounts.labels.list@.
module Network.Google.Resource.Content.Accounts.Labels.List
(
-- * REST Resource
AccountsLabelsListResource
-- * Creating a Request
, accountsLabelsList
, AccountsLabelsList
-- * Request Lenses
, alllXgafv
, alllUploadProtocol
, alllAccessToken
, alllUploadType
, alllAccountId
, alllPageToken
, alllPageSize
, alllCallback
) where
import Network.Google.Prelude
import Network.Google.ShoppingContent.Types
-- | A resource alias for @content.accounts.labels.list@ method which the
-- 'AccountsLabelsList' request conforms to.
type AccountsLabelsListResource =
"content" :>
"v2.1" :>
"accounts" :>
Capture "accountId" (Textual Int64) :>
"labels" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListAccountLabelsResponse
-- | Lists the labels assigned to an account.
--
-- /See:/ 'accountsLabelsList' smart constructor.
data AccountsLabelsList =
AccountsLabelsList'
{ _alllXgafv :: !(Maybe Xgafv)
, _alllUploadProtocol :: !(Maybe Text)
, _alllAccessToken :: !(Maybe Text)
, _alllUploadType :: !(Maybe Text)
, _alllAccountId :: !(Textual Int64)
, _alllPageToken :: !(Maybe Text)
, _alllPageSize :: !(Maybe (Textual Int32))
, _alllCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AccountsLabelsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'alllXgafv'
--
-- * 'alllUploadProtocol'
--
-- * 'alllAccessToken'
--
-- * 'alllUploadType'
--
-- * 'alllAccountId'
--
-- * 'alllPageToken'
--
-- * 'alllPageSize'
--
-- * 'alllCallback'
accountsLabelsList
:: Int64 -- ^ 'alllAccountId'
-> AccountsLabelsList
accountsLabelsList pAlllAccountId_ =
AccountsLabelsList'
{ _alllXgafv = Nothing
, _alllUploadProtocol = Nothing
, _alllAccessToken = Nothing
, _alllUploadType = Nothing
, _alllAccountId = _Coerce # pAlllAccountId_
, _alllPageToken = Nothing
, _alllPageSize = Nothing
, _alllCallback = Nothing
}
-- | V1 error format.
alllXgafv :: Lens' AccountsLabelsList (Maybe Xgafv)
alllXgafv
= lens _alllXgafv (\ s a -> s{_alllXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
alllUploadProtocol :: Lens' AccountsLabelsList (Maybe Text)
alllUploadProtocol
= lens _alllUploadProtocol
(\ s a -> s{_alllUploadProtocol = a})
-- | OAuth access token.
alllAccessToken :: Lens' AccountsLabelsList (Maybe Text)
alllAccessToken
= lens _alllAccessToken
(\ s a -> s{_alllAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
alllUploadType :: Lens' AccountsLabelsList (Maybe Text)
alllUploadType
= lens _alllUploadType
(\ s a -> s{_alllUploadType = a})
-- | Required. The account id for whose labels are to be listed.
alllAccountId :: Lens' AccountsLabelsList Int64
alllAccountId
= lens _alllAccountId
(\ s a -> s{_alllAccountId = a})
. _Coerce
-- | A page token, received from a previous \`ListAccountLabels\` call.
-- Provide this to retrieve the subsequent page. When paginating, all other
-- parameters provided to \`ListAccountLabels\` must match the call that
-- provided the page token.
alllPageToken :: Lens' AccountsLabelsList (Maybe Text)
alllPageToken
= lens _alllPageToken
(\ s a -> s{_alllPageToken = a})
-- | The maximum number of labels to return. The service may return fewer
-- than this value. If unspecified, at most 50 labels will be returned. The
-- maximum value is 1000; values above 1000 will be coerced to 1000.
alllPageSize :: Lens' AccountsLabelsList (Maybe Int32)
alllPageSize
= lens _alllPageSize (\ s a -> s{_alllPageSize = a})
. mapping _Coerce
-- | JSONP
alllCallback :: Lens' AccountsLabelsList (Maybe Text)
alllCallback
= lens _alllCallback (\ s a -> s{_alllCallback = a})
instance GoogleRequest AccountsLabelsList where
type Rs AccountsLabelsList =
ListAccountLabelsResponse
type Scopes AccountsLabelsList =
'["https://www.googleapis.com/auth/content"]
requestClient AccountsLabelsList'{..}
= go _alllAccountId _alllXgafv _alllUploadProtocol
_alllAccessToken
_alllUploadType
_alllPageToken
_alllPageSize
_alllCallback
(Just AltJSON)
shoppingContentService
where go
= buildClient
(Proxy :: Proxy AccountsLabelsListResource)
mempty
| brendanhay/gogol | gogol-shopping-content/gen/Network/Google/Resource/Content/Accounts/Labels/List.hs | mpl-2.0 | 6,028 | 0 | 20 | 1,457 | 911 | 526 | 385 | 131 | 1 |
--
-- Copyright 2017-2018 Azad Bolour
-- Licensed under GNU Affero General Public License v3.0 -
-- https://github.com/azadbolour/boardgame/blob/master/LICENSE.md
--
module Bolour.Util.DataFileSpec where
import Test.Hspec
import Bolour.Util.FileUtil
spec :: Spec
spec = do
describe "read word list" $ do
it "read" $ do
words <- readDataFileAsLines "data/test-words.txt"
length words `shouldSatisfy` (10 <)
| azadbolour/boardgame | haskell-server/test/Bolour/Util/DataFileSpec.hs | agpl-3.0 | 430 | 0 | 14 | 75 | 79 | 44 | 35 | 9 | 1 |
{-# OPTIONS -XMultiParamTypeClasses #-}
--------------------------------------------------------------------------------
-- $Id: ClassRestrictionRule.hs,v 1.15 2004/01/07 19:49:12 graham Exp $
--
-- Copyright (c) 2003, G. KLYNE. All rights reserved.
-- See end of this file for licence information.
--------------------------------------------------------------------------------
-- |
-- Module : ClassRestrictionRule
-- Copyright : (c) 2003, Graham Klyne
-- License : GPL V2
--
-- Maintainer : Graham Klyne
-- Stability : provisional
-- Portability : H98
--
-- This module implements an inference rule based on a restruction on class
-- membership of one or more values.
--
--------------------------------------------------------------------------------
module Swish.HaskellRDF.ClassRestrictionRule
( ClassRestriction(..), ClassRestrictionFn
, makeDatatypeRestriction, makeDatatypeRestrictionFn
, makeRDFClassRestrictionRules
, makeRDFDatatypeRestrictionRules
, falseGraph, falseGraphStr
)
where
import Swish.HaskellRDF.RDFGraph
( RDFLabel(..)
, getScopedName
, RDFGraph
, getArcs
, merge
, toRDFGraph, emptyRDFGraph
, Arc(..)
, res_rdf_type
, res_rdfd_maxCardinality
)
import Swish.HaskellRDF.RDFRuleset
( RDFRule
, makeRDFGraphFromN3String
)
import Swish.HaskellRDF.RDFDatatype
( RDFDatatypeVal
, fromRDFLabel, toRDFLabel
)
import Swish.HaskellRDF.RDFQuery
( rdfQueryFind
, rdfFindValSubj, rdfFindPredVal, rdfFindPredInt
, rdfFindList
)
import Swish.HaskellRDF.RDFVarBinding
( RDFVarBinding )
import Swish.HaskellRDF.Datatype
( DatatypeVal(..)
, DatatypeRel(..), DatatypeRelFn
)
import Swish.HaskellRDF.Rule
( Rule(..)
, bwdCheckInference
)
import Swish.HaskellRDF.VarBinding
( VarBinding(..)
)
import Swish.HaskellUtils.Namespace
( Namespace(..)
, ScopedName(..)
)
import Swish.HaskellRDF.Vocabulary
( namespaceRDFD
)
import Swish.HaskellUtils.PartOrderedCollection
( minima, maxima
, partCompareEq, partComparePair
, partCompareListMaybe
, partCompareListSubset
)
import Swish.HaskellUtils.LookupMap
( LookupEntryClass(..), LookupMap(..)
, mapFindMaybe
)
import Swish.HaskellUtils.ListHelpers
( powerSet )
import Data.Maybe
( Maybe(..)
, isJust, fromJust, fromMaybe, catMaybes )
import Data.List
( delete, nub, (\\) )
import Control.Monad
( liftM )
import Swish.HaskellUtils.TraceHelpers
( trace )
------------------------------------------------------------
-- Class restriction data type
------------------------------------------------------------
-- |Type of function that evaluates missing node values in a
-- restriction from those supplied.
type ClassRestrictionFn = [Maybe RDFLabel] -> Maybe [[RDFLabel]]
-- |Datatype for named class restriction
data ClassRestriction = ClassRestriction
{ crName :: ScopedName
, crFunc :: ClassRestrictionFn
}
instance Eq ClassRestriction where
cr1 == cr2 = crName cr1 == crName cr2
instance Show ClassRestriction where
show cr = "ClassRestriction:"++(show $ crName cr)
instance LookupEntryClass ClassRestriction ScopedName ClassRestriction
where
newEntry (_,fn) = fn
keyVal cr = (crName cr, cr)
------------------------------------------------------------
-- Instantiate a class restriction from a datatype relation
------------------------------------------------------------
-- |Make a class restriction from a datatype relation.
--
-- This "lifts" application of the datatype relation to operate
-- on RDFLabel values, which are presumed to contain appropriately
-- datatyped values.
--
makeDatatypeRestriction ::
RDFDatatypeVal vt -> DatatypeRel vt -> ClassRestriction
makeDatatypeRestriction dtv dtrel = ClassRestriction
{ crName = dtRelName dtrel
, crFunc = makeDatatypeRestrictionFn dtv (dtRelFunc dtrel)
}
-- |Make a class restriction function from a datatype relation function.
--
-- The core logic is something like (map toLabels . dtrelfn . map frLabel)
-- but the extra lifting and catMaybes are needed to get the final result
-- type in the right form.
--
-- ClassRestrictionFn = [Maybe RDFLabel] -> Maybe [[RDFLabel]]
--
makeDatatypeRestrictionFn ::
RDFDatatypeVal vt -> DatatypeRelFn vt -> ClassRestrictionFn
makeDatatypeRestrictionFn dtv dtrelfn =
liftM (catMaybes . map toLabels) . dtrelfn . (map frLabel)
where
frLabel Nothing = Nothing
frLabel (Just l) = fromRDFLabel dtv l
toLabels = sequence . map toLabel -- Maybe [RDFLabel]
toLabel = toRDFLabel dtv
------------------------------------------------------------
-- Make rules from supplied class restrictions and graph
------------------------------------------------------------
ruleQuery :: RDFGraph
ruleQuery = makeRDFGraphFromN3String $
"@prefix rdfd: <" ++ nsURI namespaceRDFD ++ "> . \n" ++
" ?c a rdfd:GeneralRestriction ; " ++
" rdfd:onProperties ?p ; " ++
" rdfd:constraint ?r . "
-- Placeholder false graph for now.
falseGraph :: RDFGraph
falseGraph = makeRDFGraphFromN3String $
"@prefix rdfd: <" ++ nsURI namespaceRDFD ++ "> . \n" ++
falseGraphStr
falseGraphStr :: String
falseGraphStr = "_:a rdfd:false _:b . "
-- |Make a list of class restriction rules given a list of class restriction
-- values and a graph containing one or more class restriction definitions.
--
makeRDFClassRestrictionRules :: [ClassRestriction] -> RDFGraph -> [RDFRule]
makeRDFClassRestrictionRules crs gr =
catMaybes $ ( map constructRule . queryForRules ) gr
where
queryForRules = rdfQueryFind ruleQuery
constructRule = makeRestrictionRule1 crs gr
makeRestrictionRule1 ::
[ClassRestriction] -> RDFGraph -> RDFVarBinding -> Maybe RDFRule
makeRestrictionRule1 crs gr vb =
{-
trace "\nmakeRestrictionRule1:" $
-- seq (traceShow "\ngr:" gr)
seq (traceShow "\nvb:" vb) $
seq (traceShow "\nc:" c) $
seq (traceShow "\np:" p) $
seq (traceShow "\nr:" r) $
seq (traceShow "\nps:" ps) $
-- seq (traceShow "\nrn:" rn) $
-}
makeRestrictionRule2 rn c ps cs
where
c = fromMaybe NoNode $ vbMap vb (Var "c")
p = fromMaybe NoNode $ vbMap vb (Var "p")
r = fromMaybe NoNode $ vbMap vb (Var "r")
cs = filter (>0) $ map fromInteger $
rdfFindPredInt c res_rdfd_maxCardinality gr
ps = rdfFindList gr p
rn = mapFindMaybe (getScopedName r) (LookupMap crs)
makeRestrictionRule2 ::
Maybe ClassRestriction -> RDFLabel -> [RDFLabel] -> [Int]
-> Maybe RDFRule
makeRestrictionRule2 (Just restriction) cls@(Res cname) props cs =
{-
trace "\nmakeRestrictionRule2:" $
seq (traceShow "\ncls:" cls)
seq (traceShow "\nprops:" props) $
-}
Just restrictionRule
where
restrictionRule = Rule
{ ruleName = cname
-- fwdApply :: [ex] -> [ex]
, fwdApply = fwdApplyRestriction restriction cls props cs
-- bwdApply :: ex -> [[ex]]
, bwdApply = bwdApplyRestriction restriction cls props cs
, checkInference = bwdCheckInference restrictionRule
}
makeRestrictionRule2 _ _ _ _ =
trace "\nmakeRestrictionRule: missing class restriction" $
Nothing
-- Forward apply class restriction.
fwdApplyRestriction ::
ClassRestriction -> RDFLabel -> [RDFLabel] -> [Int] -> [RDFGraph]
-> [RDFGraph]
fwdApplyRestriction restriction cls props cs antgrs =
if isJust newgrs then concat $ fromJust newgrs else [falseGraph]
where
-- Instances of the named class in the graph:
ris = nub $ rdfFindValSubj res_rdf_type cls antgr
-- Merge antecedent graphs into one (with bnode renaming):
-- (Uses 'if' and 'foldl1' to avoid merging in the common case
-- of just one graph supplied.)
antgr = if null antgrs then emptyRDFGraph else foldl1 merge antgrs
-- Apply class restriction to single instance of the restricted class
newgr :: RDFLabel -> Maybe [RDFGraph]
newgr ri = fwdApplyRestriction1 restriction ri props cs antgr
newgrs :: Maybe [[RDFGraph]]
newgrs = sequence $ map newgr ris
-- Forward apply class restriction to single class instance (ci).
-- Return single set of inferred results, for each combination of
-- property values, or an empty list, or Nothing if the supplied values
-- are inconsistent with the restriction.
fwdApplyRestriction1 ::
ClassRestriction -> RDFLabel -> [RDFLabel] -> [Int] -> RDFGraph
-> Maybe [RDFGraph]
fwdApplyRestriction1 restriction ci props cs antgr =
{-
trace "\nfwdApplyRestriction1:" $
seq (traceShow "\nci:" ci)
seq (traceShow "\nprops:" props)
seq (traceShow "\nantgr:" antgr) $
-}
if grConsistent then Just newgrs else Nothing
where
-- Apply restriction to graph
(grConsistent,_,_,sts) = applyRestriction restriction ci props cs antgr
-- Select results, eliminate those with unknowns
nts :: [[RDFLabel]]
nts = catMaybes $ map sequence sts
-- Make new graph from results, including only newly generated arcs
newarcs = nub [Arc ci p v | vs <- nts, (p,v) <- zip props vs ]
\\ getArcs antgr
newgrs = if null newarcs then [] else [toRDFGraph newarcs]
-- Backward apply class restriction.
--
-- Returns a list of alternatives, any one of which is sufficient to
-- satisfy the given consequent.
--
bwdApplyRestriction ::
ClassRestriction -> RDFLabel -> [RDFLabel] -> [Int] -> RDFGraph
-> [[RDFGraph]]
bwdApplyRestriction restriction cls props cs congr =
if isJust newgrs then fromJust newgrs else [[falseGraph]]
where
-- Instances of the named class in the graph:
ris = rdfFindValSubj res_rdf_type cls congr
-- Apply class restriction to single instance of the restricted class
newgr :: RDFLabel -> Maybe [[RDFGraph]]
newgr ri = bwdApplyRestriction1 restriction cls ri props cs congr
-- 'map newgr ris' is conjunction of disjunctions, where
-- each disjunction is itself a conjunction of conjunctions.
-- 'sequence' distributes the conjunction over the disjunction,
-- yielding an equivalent disjunction of conjunctions
-- map concat flattens the conjunctions of conjuctions
newgrs :: Maybe [[RDFGraph]]
newgrs = liftM (map concat . sequence) $ sequence $ map newgr ris
-- Backward apply a class restriction to single class instance (ci).
-- Return one or more sets of antecedent results from which the consequence
-- can be derived in the defined relation, an empty list if the supplied
-- consequence cannot be inferred, or Nothing if the consequence is
-- inconsistent with the restriction.
bwdApplyRestriction1 ::
ClassRestriction -> RDFLabel -> RDFLabel -> [RDFLabel] -> [Int] -> RDFGraph
-> Maybe [[RDFGraph]]
bwdApplyRestriction1 restriction cls ci props cs congr =
if grConsistent then Just grss else Nothing
{-
trace "\nfwdApplyRestriction1:" $
seq (traceShow "\nci:" ci)
seq (traceShow "\nprops:" props)
seq (traceShow "\ncongr:" congr) $
-}
where
-- Apply restriction to graph
(grConsistent,pvs,cts,_) =
applyRestriction restriction ci props cs congr
-- Build list of all full tuples consistent with the values supplied
fts :: [[RDFLabel]]
fts = concatMap snd cts
-- Construct partial tuples from members of fts from which at least
-- one of the supplied values can be derived
pts :: [([Maybe RDFLabel],[RDFLabel])]
pts = concatMap (deriveTuple restriction) fts
-- Select combinations of members of pts from which all the
-- supplied values can be derived
dtss :: [[[Maybe RDFLabel]]]
dtss = coverSets pvs pts
-- Filter members of dtss that fully cover the values
-- obtained from the consequence graph.
ftss :: [[[Maybe RDFLabel]]]
ftss = filter (not . (\t -> coversVals deleteMaybe t pvs)) dtss
-- Make new graphs for all alternatives
grss :: [[RDFGraph]]
grss = map ( makeGraphs . newArcs ) ftss
-- Collect arcs for one alternative
newArcs dts =
[ Arc ci p v | mvs <- dts, (p,Just v) <- zip props mvs ]
-- Make graphs for one alternative
makeGraphs = map (toRDFGraph . (:[])) . ((Arc ci res_rdf_type cls):)
-- Helper function to select sub-tuples from which some of a set of
-- values can be derived using a class restriction.
--
-- restriction is the restriction being evaluated.
-- ft is a full tuple of values known to be consistent with
-- the restriction
--
-- The result returned is a list of pairs, whose first member is a partial
-- tuples from which the full tuple supplied can be derived, and the second
-- is the supplied tuple calculated from that input.
--
deriveTuple ::
ClassRestriction -> [RDFLabel]
-> [([Maybe RDFLabel],[RDFLabel])]
deriveTuple restriction ft =
map (tosnd ft) $ minima partCompareListMaybe $ filter derives partials
where
partials = sequence $ map (\x -> [Nothing,Just x]) ft
derives = ([ft]==) . fromJust . crFunc restriction
tosnd = flip (,)
-- Helper function to apply a restriction to selected information from
-- a supplied graph, and returns a tuple containing:
-- (a) an indication of whether the graph is consistent with the
-- restriction
-- (b) a list of values specified in the graph for each property
-- (c) a complete list of tuples that use combinations of values from
-- the graph and are consistent with the restriction.
-- Each member is a pair consisting of some combination of input
-- values, and a list of complete tuple values that can be
-- calculated from those inputs, or an empty list if there is
-- insufficient information.
-- (d) a set of tuples that are consistent with the restriction and use
-- as much information from the graph as possible. This set is
-- minimal in the sense that they must all correspond to different
-- complete input tuples satisfying the restriction.
--
-- This function factors out logic that is common to forward and
-- backward chaining of a class restriction.
--
-- restriction is the class restriction being applied
-- ci is the identifier of a graph node to be tested
-- props is a list of properties of the graph noode whose values
-- are constrained by the class restriction.
-- cs is a list of max cardinality constraints on the restriction,
-- the minimum of which is used as the cardinality constraint
-- on the restriction. If the list is null, no cardinality
-- constraint is applied.
-- gr is the graph from which property values are extracted.
--
applyRestriction ::
ClassRestriction -> RDFLabel -> [RDFLabel] -> [Int] -> RDFGraph
-> ( Bool
, [[RDFLabel]]
, [([Maybe RDFLabel],[[RDFLabel]])]
, [[Maybe RDFLabel]]
)
applyRestriction restriction ci props cs gr =
( (coversVals deleteMaybe sts pvs) && cardinalityOK, pvs, cts, sts )
where
-- Extract from the antecedent graph all specified values of the
-- restricted properties (constructs inner list for each property)
pvs :: [[RDFLabel]]
pvs = [ rdfFindPredVal ci p gr | p <- props ]
-- Convert tuple of alternatives to list of alternative tuples
-- (Each tuple is an inner list)
pts :: [[Maybe RDFLabel]]
pts = sequence $ map allJustAndNothing pvs
-- Try class restriction calculation for each tuple
-- For each, result may be:
-- Nothing (inconsistent)
-- Just [] (underspecified)
-- Just [t] (single tuple of values derived from given values)
-- Just ts (alternative tuples derived from given values)
rts :: [Maybe [[RDFLabel]]]
rts = map (crFunc restriction) pts
-- Extract list of consistent tuples of given values
cts :: [([Maybe RDFLabel],[[RDFLabel]])]
cts = map sndFromJust $ filter (isJust . snd) (zip pts rts)
-- Build list of consistent tuples with maximum information
-- based on that supplied and available
-- mts = concatMap mostValues cts
mts = map mostOneValue cts
-- Eliminate consistent results subsumed by others.
-- This results in a mimimal possible set of consistent inputs,
-- because if any pair could be consistently unified then their
-- common subsumer would still be in the list, and both would be
-- thereby eliminated.
sts :: [[Maybe RDFLabel]]
sts = maxima partCompareListMaybe mts
-- Check the cardinality constraint
cardinalityOK = null cs || length sts <= minimum cs
-- Remove Maybe wrapper from second component of a pair
sndFromJust :: (a,Maybe b) -> (a,b)
sndFromJust (a,Just b) = (a,b)
-- Map a non-empty list of values to a list of Just values,
-- preceding each with a Nothing element.
--
-- Nothing corresponds to an unknown value. This logic is used
-- as part of constructing a list of alternative tuples of known
-- data values (either supplied or calculated from the class
-- restriction).
--
allJustAndNothing :: [a] -> [Maybe a]
allJustAndNothing as = Nothing:map Just as
-- Get maximum information about possible tuple values from a
-- given pair of input tuple, which is known to be consistent with
-- the restriction, and calculated result tuples. Where the result
-- tuple is not exactly calculated, return the input tuple.
--
-- imvs tuple of Maybe element values, with Nothing for
-- unspecified values
-- movss Maybe list of possible fully-specified result tuples,
-- an empty list if no result tuples can be computed
-- based on the input tuple, or Nothing if the input
-- tuple is inconsistent.
--
mostValues :: ([Maybe a],[[a]]) -> [[Maybe a]]
mostValues (imvs,([])) = [imvs]
mostValues (_,movss) = map (map Just) movss
-- Get maximum information about possible tuple values from a
-- given pair of input and possible result tuples, which is
-- known to be consistent with the restriction. If the result
-- tuple is not exactly calculated, return the input tuple.
--
-- This is a variant of mostValues that returns a single vector.
-- Multiple possible values are considered to be equivalent to
-- Just [], i.e. unknown result.
--
-- imvs tuple of Maybe element values, with Nothing for
-- unspecified values
-- movss Maybe list of possible fully-specified result tuples,
-- or an empty list if no result tuples can be computed
-- based on the input tuple.
--
mostOneValue :: ([Maybe a],[[a]]) -> [Maybe a]
mostOneValue (_,[movs]) = map Just movs
mostOneValue (imvs,_) = imvs
-- Helper function that returns subsets of dts that "cover" the indicated
-- values; i.e. from which all of the supplied values can be deduced
-- by the enumerated function results. The minima of all such subsets is
-- returned, as each of these corresponds to some minimum information needed
-- to deduce all of the given values.
--
-- pvs is a list of lists of values to be covered. The inner list
-- contains multiple values for each member of a tuple.
-- dts is an enumerated list of function values from some subset of
-- the tuple space to complete tuples. Each member is a pair
-- containing the partial tuple (using Nothing for unspecified
-- values) and the full tuple calculated from it.
--
-- The return value is a disjunction of conjunctions of partial tuples
-- that cover the indicated parameter values.
--
-- NOTE:
-- The result minimization is not perfect (cf. test2 below), but I believe
-- it is adequate for the practical situations I envisage, and in any
-- case will not result in incorrect values. It's significance is for
-- search-tree pruning. A perfect minimization might be achieved by
-- using a more subtle partial ordering that takes account of both subsets
-- and the partial ordering of set members in place of 'partCompareListSubset'.
--
coverSets :: (Eq a) => [[a]] -> [([Maybe a],[a])] -> [[[Maybe a]]]
coverSets pvs dts =
minima partCompareListSubset $ map (map fst) ctss
where
ctss = filter (coverspvs) $ powerSet cts
cts = minima (partComparePair partCompareListMaybe partCompareEq) dts
coverspvs cts = coversVals delete (map snd cts) pvs
-- Does a supplied list of tuples cover a list of possible alternative
-- values for each tuple member?
--
coversVals :: (a->[b]->[b]) -> [[a]] -> [[b]] -> Bool
coversVals dropVal ts vss =
-- all null (foldr dropUsed vss ts)
any (all null) (scanr dropUsed vss ts)
where
-- Remove single tuple values from the list of supplied values:
dropUsed [] [] = []
dropUsed (t:ts) (vs:vss) = dropVal t vs:dropUsed ts vss
dropUsed _ _ = error "coversVals.dropUsed: list length mismatch"
{-
-- Does a supplied list of possible alternative values for each
-- element of a tuple cover every tuple in a supplied list?
--
-- (See module spike-coverVals.hs for test cases)
--
coversAll :: ([a]->b->Bool) -> [[a]] -> [[b]] -> Bool
coversAll matchElem vss ts = all (invss vss) ts
where
-- Test if a given tuple is covered by vss
invss vss t = and $ zipWith matchElem vss t
-- Test if the value in a Maybe is contained in a list.
maybeElem :: (Eq a) => Maybe a -> [a] -> Bool
maybeElem Nothing = const True
maybeElem (Just t) = elem t
-}
-- |Delete a Maybe value from a list
deleteMaybe :: (Eq a) => Maybe a -> [a] -> [a]
deleteMaybe Nothing as = as
deleteMaybe (Just a) as = delete a as
------------------------------------------------------------
-- Make restriction rules from supplied datatype and graph
------------------------------------------------------------
makeRDFDatatypeRestrictionRules :: RDFDatatypeVal vt -> RDFGraph -> [RDFRule]
makeRDFDatatypeRestrictionRules dtval gr =
makeRDFClassRestrictionRules dcrs gr
where
dcrs = map (makeDatatypeRestriction dtval) (tvalRel dtval)
--------------------------------------------------------------------------------
--
-- Copyright (c) 2003, G. KLYNE. All rights reserved.
--
-- This file is part of Swish.
--
-- Swish is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- Swish is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with Swish; if not, write to:
-- The Free Software Foundation, Inc.,
-- 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
--
--------------------------------------------------------------------------------
-- $Source: /file/cvsdev/HaskellRDF/ClassRestrictionRule.hs,v $
-- $Author: graham $
-- $Revision: 1.15 $
-- $Log: ClassRestrictionRule.hs,v $
-- Revision 1.15 2004/01/07 19:49:12 graham
-- Reorganized RDFLabel details to eliminate separate language field,
-- and to use ScopedName rather than QName.
-- Removed some duplicated functions from module Namespace.
--
-- Revision 1.14 2003/12/20 12:53:39 graham
-- Fix up code to compile and test with GHC 5.04.3
--
-- Revision 1.13 2003/12/20 12:00:14 graham
-- Introduced new TraceHelpers module for Hugs-2003 compatibility.
--
-- Revision 1.12 2003/12/19 21:01:25 graham
-- Change Debug.Trace import (from Hugs.Trace)
--
-- Revision 1.11 2003/12/08 23:55:36 graham
-- Various enhancements to variable bindings and proof structure.
-- New module BuiltInMap coded and tested.
-- Script processor is yet to be completed.
--
-- Revision 1.10 2003/12/04 02:53:27 graham
-- More changes to LookupMap functions.
-- SwishScript logic part complete, type-checks OK.
--
-- Revision 1.9 2003/11/28 00:17:55 graham
-- Datatype constraint test cases all passed.
--
-- Revision 1.8 2003/11/27 11:35:49 graham
-- Variable modifier tests all run.
-- Initial class constraint reasoning tests pass.
-- Fixed bug in class constraint backward-chained reasoning that returned
-- multiple instances of some statements, and did not filter out all occurrences
-- of the original statements.
--
-- Revision 1.7 2003/11/24 22:13:09 graham
-- Working on reworking datatype variable modifiers to work with
-- revised datatype framework.
--
-- Revision 1.6 2003/11/24 17:20:35 graham
-- Separate module Vocabulary from module Namespace.
--
-- Revision 1.5 2003/11/20 17:58:09 graham
-- Class-constraint backward chaining: all test cases passed.
--
-- Revision 1.4 2003/11/19 22:13:03 graham
-- Some backward chaining tests passed
--
-- Revision 1.3 2003/11/17 21:53:30 graham
-- Datatype inference forward chaining updated to allow inconsistent
-- partial inputs to be detected. All forward chaining test cases passed.
-- Need to develop backward chaining test cases.
--
-- Revision 1.2 2003/11/14 21:48:35 graham
-- First cut cardinality-checked datatype-constraint rules to pass test cases.
-- Backward chaining is still to do.
--
-- Revision 1.1 2003/11/13 01:15:23 graham
-- Working on ClassRestrictionRule.
-- Code almost complete, some test cases missing.
--
| amccausl/Swish | Swish/HaskellRDF/ClassRestrictionRule.hs | lgpl-2.1 | 26,626 | 0 | 14 | 6,576 | 3,462 | 2,029 | 1,433 | -1 | -1 |
module ESpec where
import Test.Hspec
------------------------------------------------------------------------------
import E
spec :: Spec
spec =
t01
| haroldcarr/learn-haskell-coq-ml-etc | haskell/topic/existentials/2017-03-arnaud-bailly-understanding-existentials/test/ESpec.hs | unlicense | 174 | 0 | 4 | 39 | 23 | 15 | 8 | 6 | 1 |
{-# LANGUAGE DataKinds, TypeFamilies, TemplateHaskell, QuasiQuotes #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE UndecidableInstances #-}
--import Data.Singletons -- .TH
import Data.Singletons.TH hiding (Min, Compare, CompareSym0)
$(singletons [d|
data Nat = Zero | Succ Nat
deriving (Show, Eq)
|])
$(promote [d|
plus :: Nat -> Nat -> Nat
plus Zero y = y
plus (Succ x) y = Succ (plus x y)
min :: Nat -> Nat -> Nat
min Zero _ = Zero
min _ Zero = Zero
min (Succ x) (Succ y) = Succ (min x y)
|])
$(promote [d|
data Range = Empty | Open Nat | Closed Nat Nat
infinite :: Range
infinite = Open Zero
|])
$(promote [d|
data Comparison = Less | Equal | Greater
-- conflicts with Data.Singleton.TH.Compare & CompareSym0
-- hiding when importing Data.Singleton.TH module
compare :: Nat -> Nat -> Comparison
compare Zero Zero = Equal
compare Zero (Succ _) = Less
compare (Succ _) Zero = Greater
compare (Succ x) (Succ y) = compare x y
restrictFrom :: Nat -> Range -> Range
restrictFrom _ Empty = Empty
restrictFrom n (Open f) = restrictFrom1 n f (compare n f)
restrictFrom n (Closed f t) = restrictFrom2 n f t (compare n f) (compare n t)
restrictFrom1 :: Nat -> Nat -> Comparison -> Range
restrictFrom1 n _ Greater = Open n
restrictFrom1 _ f Equal = Open f
restrictFrom1 _ f Less = Open f
restrictFrom2 :: Nat -> Nat -> Nat -> Comparison -> Comparison -> Range
restrictFrom2 _ _ _ Greater Greater = Empty
restrictFrom2 _ _ _ Greater Equal = Empty
restrictFrom2 n _ t Greater Less = Closed n t
restrictFrom2 _ f t Equal _ = Closed f t
restrictFrom2 _ f t Less _ = Closed f t
restrictUntil :: Nat -> Range -> Range
restrictUntil _ Empty = Empty
restrictUntil n (Open f) = restrictUntil1 n f (compare n f)
restrictUntil n (Closed f t) = restrictUntil2 n f t (compare n f) (compare n t)
restrictUntil1 :: Nat -> Nat -> Comparison -> Range
restrictUntil1 n f Greater = Closed f n
restrictUntil1 _ _ Equal = Empty
restrictUntil1 _ _ Less = Empty
restrictUntil2 :: Nat -> Nat -> Nat -> Comparison -> Comparison -> Range
restrictUntil2 _ f t _ Greater = Closed f t
restrictUntil2 _ f t _ Equal = Closed f t
restrictUntil2 n f _ Greater Less = Closed f n
restrictUntil2 _ _ _ Equal Less = Empty
restrictUntil2 _ _ _ Less Less = Empty
|])
data Offer a (r :: Range) where
Present :: a -> Offer a Infinite
PercentDiscount :: Float -> Offer a Infinite
AbsoluteDisocunt :: Float -> Offer a Infinite
From :: SNat n -> Offer a d -> Offer a (RestrictFrom n d)
Until :: SNat n -> Offer a d -> Offer a (RestrictUntil n d)
toNat :: SNat n -> Nat
toNat SZero = Zero
toNat (SSucc n) = Succ (toNat n)
printDateRestriction :: Offer a r -> String
printDateRestriction (From n _) = "From " ++ show (toNat n)
printDateRestriction (Until n _) = "Until" ++ show (toNat n)
printDateRestriction _ = "No date restriction"
zero :: SNat Zero
zero = sing -- results in SZero
one :: SNat (Succ Zero)
one = sing -- results in SSucc SZero
two :: SNat (Succ (Succ Zero))
two = sing -- results in SSucc (SSucc SZero)
three :: SNat (Succ (Succ (Succ Zero)))
three = sing -- results in SSucc (SSucc (SSucc SZero))
| egaburov/funstuff | Haskell/dsl_fold/offer_dtp_singl_th.hs | apache-2.0 | 3,358 | 0 | 11 | 888 | 416 | 220 | 196 | 75 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ro-RO">
<title>HTTPS Info Add-on</title>
<maps>
<homeID>httpsinfo</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | secdec/zap-extensions | addOns/httpsInfo/src/main/javahelp/org/zaproxy/zap/extension/httpsinfo/resources/help_ro_RO/helpset_ro_RO.hs | apache-2.0 | 968 | 77 | 67 | 157 | 413 | 209 | 204 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
{-| The Ganeti WConfd core functions.
This module defines all the functions that WConfD exports for
RPC calls. They are in a separate module so that in a later
stage, TemplateHaskell can generate, e.g., the python interface
for those.
-}
{-
Copyright (C) 2013, 2014 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.WConfd.Core where
import Control.Arrow ((&&&))
import Control.Concurrent (myThreadId)
import Control.Lens.Setter (set)
import Control.Monad (liftM, unless, when)
import qualified Data.Map as M
import qualified Data.Set as S
import Language.Haskell.TH (Name)
import System.Posix.Process (getProcessID)
import qualified System.Random as Rand
import Ganeti.BasicTypes
import qualified Ganeti.Constants as C
import qualified Ganeti.JSON as J
import qualified Ganeti.Locking.Allocation as L
import Ganeti.Logging (logDebug, logWarning)
import Ganeti.Locking.Locks ( GanetiLocks(ConfigLock, BGL)
, LockLevel(LevelConfig)
, lockLevel, LockLevel
, ClientType(ClientOther), ClientId(..) )
import qualified Ganeti.Locking.Waiting as LW
import Ganeti.Objects (ConfigData, DRBDSecret, LogicalVolume, Ip4Address)
import Ganeti.Objects.Lens (configClusterL, clusterMasterNodeL)
import Ganeti.WConfd.ConfigState (csConfigDataL)
import qualified Ganeti.WConfd.ConfigVerify as V
import Ganeti.WConfd.DeathDetection (cleanupLocks)
import Ganeti.WConfd.Language
import Ganeti.WConfd.Monad
import qualified Ganeti.WConfd.TempRes as T
import qualified Ganeti.WConfd.ConfigModifications as CM
import qualified Ganeti.WConfd.ConfigWriter as CW
-- * Functions available to the RPC module
-- Just a test function
echo :: String -> WConfdMonad String
echo = return
-- ** Configuration related functions
checkConfigLock :: ClientId -> L.OwnerState -> WConfdMonad ()
checkConfigLock cid state = do
la <- readLockAllocation
unless (L.holdsLock cid ConfigLock state la)
. failError $ "Requested lock " ++ show state
++ " on the configuration missing"
-- | Read the configuration.
readConfig :: WConfdMonad ConfigData
readConfig = CW.readConfig
-- | Write the configuration, checking that an exclusive lock is held.
-- If not, the call fails.
writeConfig :: ClientId -> ConfigData -> WConfdMonad ()
writeConfig ident cdata = do
checkConfigLock ident L.OwnExclusive
-- V.verifyConfigErr cdata
CW.writeConfig cdata
-- | Explicitly run verification of the configuration.
-- The caller doesn't need to hold the configuration lock.
verifyConfig :: WConfdMonad ()
verifyConfig = CW.readConfig >>= V.verifyConfigErr
-- *** Locks on the configuration (only transitional, will be removed later)
-- | Tries to acquire 'ConfigLock' for the client.
-- If the second parameter is set to 'True', the lock is acquired in
-- shared mode.
--
-- If the lock was successfully acquired, returns the current configuration
-- state.
lockConfig
:: ClientId
-> Bool -- ^ set to 'True' if the lock should be shared
-> WConfdMonad (J.MaybeForJSON ConfigData)
lockConfig cid shared = do
let reqtype = if shared then ReqShared else ReqExclusive
-- warn if we already have the lock, this shouldn't happen
la <- readLockAllocation
when (L.holdsLock cid ConfigLock L.OwnShared la)
. failError $ "Client " ++ show cid ++
" already holds a config lock"
waiting <- tryUpdateLocks cid [(ConfigLock, reqtype)]
liftM J.MaybeForJSON $ case waiting of
[] -> liftM Just CW.readConfig
_ -> return Nothing
-- | Release the config lock, if the client currently holds it.
unlockConfig
:: ClientId -> WConfdMonad ()
unlockConfig cid = freeLocksLevel cid LevelConfig
-- | Write the configuration, if the config lock is held exclusively,
-- and release the config lock. It the caller does not have the config
-- lock, return False.
writeConfigAndUnlock :: ClientId -> ConfigData -> WConfdMonad Bool
writeConfigAndUnlock cid cdata = do
la <- readLockAllocation
if L.holdsLock cid ConfigLock L.OwnExclusive la
then do
CW.writeConfigWithImmediate cdata $ unlockConfig cid
return True
else do
logWarning $ show cid ++ " tried writeConfigAndUnlock without owning"
++ " the config lock"
return False
-- | Force the distribution of configuration without actually modifying it.
-- It is not necessary to hold a lock for this operation.
flushConfig :: WConfdMonad ()
flushConfig = forceConfigStateDistribution
-- ** Temporary reservations related functions
dropAllReservations :: ClientId -> WConfdMonad ()
dropAllReservations cid =
modifyTempResState (const $ T.dropAllReservations cid)
-- *** DRBD
computeDRBDMap :: WConfdMonad T.DRBDMap
computeDRBDMap = uncurry T.computeDRBDMap =<< readTempResState
-- Allocate a drbd minor.
--
-- The free minor will be automatically computed from the existing devices.
-- A node can not be given multiple times.
-- The result is the list of minors, in the same order as the passed nodes.
allocateDRBDMinor
:: T.DiskUUID -> [T.NodeUUID] -> WConfdMonad [T.DRBDMinor]
allocateDRBDMinor disk nodes =
modifyTempResStateErr (\cfg -> T.allocateDRBDMinor cfg disk nodes)
-- Release temporary drbd minors allocated for a given disk using
-- 'allocateDRBDMinor'.
--
-- This should be called on the error paths, on the success paths
-- it's automatically called by the ConfigWriter add and update
-- functions.
releaseDRBDMinors
:: T.DiskUUID -> WConfdMonad ()
releaseDRBDMinors disk = modifyTempResState (const $ T.releaseDRBDMinors disk)
-- *** MACs
-- Randomly generate a MAC for an instance and reserve it for
-- a given client.
generateMAC
:: ClientId -> J.MaybeForJSON T.NetworkUUID -> WConfdMonad T.MAC
generateMAC cid (J.MaybeForJSON netId) = do
g <- liftIO Rand.newStdGen
modifyTempResStateErr $ T.generateMAC g cid netId
-- Reserves a MAC for an instance in the list of temporary reservations.
reserveMAC :: ClientId -> T.MAC -> WConfdMonad ()
reserveMAC = (modifyTempResStateErr .) . T.reserveMAC
-- *** DRBDSecrets
-- Randomly generate a DRBDSecret for an instance and reserves it for
-- a given client.
generateDRBDSecret :: ClientId -> WConfdMonad DRBDSecret
generateDRBDSecret cid = do
g <- liftIO Rand.newStdGen
modifyTempResStateErr $ T.generateDRBDSecret g cid
-- *** LVs
reserveLV :: ClientId -> LogicalVolume -> WConfdMonad ()
reserveLV jobId lv = modifyTempResStateErr $ T.reserveLV jobId lv
-- *** IPv4s
-- | Reserve a given IPv4 address for use by an instance.
reserveIp :: ClientId -> T.NetworkUUID -> Ip4Address -> Bool -> WConfdMonad ()
reserveIp = (((modifyTempResStateErr .) .) .) . T.reserveIp
-- | Give a specific IP address back to an IP pool.
-- The IP address is returned to the IP pool designated by network id
-- and marked as reserved.
releaseIp :: ClientId -> T.NetworkUUID -> Ip4Address -> WConfdMonad ()
releaseIp = (((modifyTempResStateErr .) const .) .) . T.releaseIp
-- Find a free IPv4 address for an instance and reserve it.
generateIp :: ClientId -> T.NetworkUUID -> WConfdMonad Ip4Address
generateIp = (modifyTempResStateErr .) . T.generateIp
-- | Commit all reserved/released IP address to an IP pool.
-- The IP addresses are taken from the network's IP pool and marked as
-- reserved/free for instances.
--
-- Note that the reservations are kept, they are supposed to be cleaned
-- when a job finishes.
commitTemporaryIps :: ClientId -> WConfdMonad ()
commitTemporaryIps = modifyConfigDataErr_ . T.commitReservedIps
-- | Immediately release an IP address, without using the reservations pool.
commitReleaseTemporaryIp
:: T.NetworkUUID -> Ip4Address -> WConfdMonad ()
commitReleaseTemporaryIp net_uuid addr =
modifyConfigDataErr_ (const $ T.commitReleaseIp net_uuid addr)
-- | List all IP reservations for the current client.
--
-- This function won't be needed once the corresponding calls are moved to
-- WConfd.
listReservedIps :: ClientId -> WConfdMonad [T.IPv4Reservation]
listReservedIps jobId =
liftM (S.toList . T.listReservedIps jobId . snd) readTempResState
-- ** Locking related functions
-- | List the locks of a given owner (i.e., a job-id lockfile pair).
listLocks :: ClientId -> WConfdMonad [(GanetiLocks, L.OwnerState)]
listLocks cid = liftM (M.toList . L.listLocks cid) readLockAllocation
-- | List all active locks.
listAllLocks :: WConfdMonad [GanetiLocks]
listAllLocks = liftM L.listAllLocks readLockAllocation
-- | List all active locks with their owners.
listAllLocksOwners :: WConfdMonad [(GanetiLocks, [(ClientId, L.OwnerState)])]
listAllLocksOwners = liftM L.listAllLocksOwners readLockAllocation
-- | Get full information of the lock waiting status, i.e., provide
-- the information about all locks owners and all pending requests.
listLocksWaitingStatus :: WConfdMonad
( [(GanetiLocks, [(ClientId, L.OwnerState)])]
, [(Integer, ClientId, [L.LockRequest GanetiLocks])]
)
listLocksWaitingStatus = liftM ( (L.listAllLocksOwners . LW.getAllocation)
&&& (S.toList . LW.getPendingRequests) )
readLockWaiting
-- | Try to update the locks of a given owner (i.e., a job-id lockfile pair).
-- This function always returns immediately. If the lock update was possible,
-- the empty list is returned; otherwise, the lock status is left completly
-- unchanged, and the return value is the list of jobs which need to release
-- some locks before this request can succeed.
tryUpdateLocks :: ClientId -> GanetiLockRequest -> WConfdMonad [ClientId]
tryUpdateLocks cid req =
liftM S.toList
. (>>= toErrorStr)
$ modifyLockWaiting (LW.updateLocks cid (fromGanetiLockRequest req))
-- | Try to update the locks of a given owner and make that a pending
-- request if not immediately possible.
updateLocksWaiting :: ClientId -> Integer
-> GanetiLockRequest -> WConfdMonad [ClientId]
updateLocksWaiting cid prio req =
liftM S.toList
. (>>= toErrorStr)
. modifyLockWaiting
$ LW.updateLocksWaiting prio cid (fromGanetiLockRequest req)
-- | Tell whether a given owner has pending requests.
hasPendingRequest :: ClientId -> WConfdMonad Bool
hasPendingRequest cid = liftM (LW.hasPendingRequest cid) readLockWaiting
-- | Free all locks of a given owner (i.e., a job-id lockfile pair).
freeLocks :: ClientId -> WConfdMonad ()
freeLocks cid =
modifyLockWaiting_ $ LW.releaseResources cid
-- | Free all locks of a given owner (i.e., a job-id lockfile pair)
-- of a given level in the Ganeti sense (e.g., "cluster", "node").
freeLocksLevel :: ClientId -> LockLevel -> WConfdMonad ()
freeLocksLevel cid level =
modifyLockWaiting_ $ LW.freeLocksPredicate ((==) level . lockLevel) cid
-- | Downgrade all locks of the given level to shared.
downGradeLocksLevel :: ClientId -> LockLevel -> WConfdMonad ()
downGradeLocksLevel cid level =
modifyLockWaiting_ $ LW.downGradeLocksPredicate ((==) level . lockLevel) cid
-- | Intersect the possesed locks of an owner with a given set.
intersectLocks :: ClientId -> [GanetiLocks] -> WConfdMonad ()
intersectLocks cid locks = modifyLockWaiting_ $ LW.intersectLocks locks cid
-- | Opportunistically allocate locks for a given owner.
opportunisticLockUnion :: ClientId
-> [(GanetiLocks, L.OwnerState)]
-> WConfdMonad [GanetiLocks]
opportunisticLockUnion cid req =
modifyLockWaiting $ LW.opportunisticLockUnion cid req
-- | Opprtunistially allocate locks for a given owner, requesting a
-- certain minimum of success.
guardedOpportunisticLockUnion :: Int
-> ClientId
-> [(GanetiLocks, L.OwnerState)]
-> WConfdMonad [GanetiLocks]
guardedOpportunisticLockUnion count cid req =
modifyLockWaiting $ LW.guardedOpportunisticLockUnion count cid req
-- * Prepareation for cluster destruction
-- | Prepare daemon for cluster destruction. This consists of
-- verifying that the requester owns the BGL exclusively, transfering the BGL
-- to WConfD itself, and modifying the configuration so that no
-- node is the master any more. Note that, since we own the BGL exclusively,
-- we can safely modify the configuration, as no other process can request
-- changes.
prepareClusterDestruction :: ClientId -> WConfdMonad ()
prepareClusterDestruction cid = do
la <- readLockAllocation
unless (L.holdsLock cid BGL L.OwnExclusive la)
. failError $ "Cluster destruction requested without owning BGL exclusively"
logDebug $ "preparing cluster destruction as requested by " ++ show cid
-- transfer BGL to ourselfs. The do this, by adding a super-priority waiting
-- request and then releasing the BGL of the requestor.
dh <- daemonHandle
pid <- liftIO getProcessID
tid <- liftIO myThreadId
let mycid = ClientId { ciIdentifier = ClientOther $ "wconfd-" ++ show tid
, ciLockFile = dhLivelock dh
, ciPid = pid
}
_ <- modifyLockWaiting $ LW.updateLocksWaiting
(fromIntegral C.opPrioHighest - 1) mycid
[L.requestExclusive BGL]
_ <- modifyLockWaiting $ LW.updateLocks cid [L.requestRelease BGL]
-- To avoid beeing restarted we change the configuration to a no-master
-- state.
modifyConfigState $ (,) ()
. set (csConfigDataL . configClusterL . clusterMasterNodeL) ""
-- * The list of all functions exported to RPC.
exportedFunctions :: [Name]
exportedFunctions = [ 'echo
, 'cleanupLocks
, 'prepareClusterDestruction
-- config
, 'readConfig
, 'writeConfig
, 'verifyConfig
, 'lockConfig
, 'unlockConfig
, 'writeConfigAndUnlock
, 'flushConfig
-- temporary reservations (common)
, 'dropAllReservations
-- DRBD
, 'computeDRBDMap
, 'allocateDRBDMinor
, 'releaseDRBDMinors
-- MACs
, 'reserveMAC
, 'generateMAC
-- DRBD secrets
, 'generateDRBDSecret
-- LVs
, 'reserveLV
-- IPv4s
, 'reserveIp
, 'releaseIp
, 'generateIp
, 'commitTemporaryIps
, 'commitReleaseTemporaryIp
, 'listReservedIps
-- locking
, 'listLocks
, 'listAllLocks
, 'listAllLocksOwners
, 'listLocksWaitingStatus
, 'tryUpdateLocks
, 'updateLocksWaiting
, 'freeLocks
, 'freeLocksLevel
, 'downGradeLocksLevel
, 'intersectLocks
, 'opportunisticLockUnion
, 'guardedOpportunisticLockUnion
, 'hasPendingRequest
]
++ CM.exportedFunctions
| dimara/ganeti | src/Ganeti/WConfd/Core.hs | bsd-2-clause | 16,598 | 0 | 14 | 3,844 | 2,563 | 1,434 | 1,129 | 222 | 3 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FunctionalDependencies #-}
module Data.Geodetic.EllipsoidReaderT(
EllipsoidReaderT(..)
, EllipsoidReader
, runEllipsoidReader
, toEllipsoidReaderT
, hoistEllipsoidReader
, arrEllipsoidReader
, readEllipsoid
, readSemiMajor
, readFlattening
, readFlatteningReciprocal
, semiMinor
, eccentricitySquared
, eccentricitySquared'
, distributeNormal
, normal
, wgs84'
, wgs84''
, earthGeo
) where
import Control.Applicative(Alternative((<|>), empty), liftA2)
import qualified Control.Monad as Monad(return, (>>=))
import Control.Monad(MonadPlus(mzero, mplus))
import Control.Monad.Fix(MonadFix(mfix))
import Control.Monad.IO.Class(MonadIO(liftIO))
import Control.Monad.Trans.Class(MonadTrans(lift))
import Control.Monad.Trans.Reader(ReaderT)
import Control.Monad.Zip(MonadZip(mzip))
import Data.Functor.Identity(Identity(Identity, runIdentity))
import Data.Geodetic.ECEF(ECEF(..), HasECEF(z))
import Data.Geodetic.Ellipsoid(Ellipsoid, HasEllipsoid(semiMajor, flattening), flatteningReciprocal, wgs84)
import Data.Geodetic.LL(LL(LL), HasLL(lat, lon))
import Data.Geodetic.LLH(LLH(LLH), HasLLH(height))
import Data.Geodetic.XY(XY(XY), HasXY(x, y))
import Papa
newtype EllipsoidReaderT f a =
EllipsoidReaderT (Ellipsoid -> f a)
makeWrapped ''EllipsoidReaderT
type EllipsoidReader a =
EllipsoidReaderT Identity a
runEllipsoidReader ::
Iso'
(EllipsoidReader a)
(Ellipsoid -> a)
runEllipsoidReader =
iso
(\(EllipsoidReaderT k) -> runIdentity . k)
(\k -> EllipsoidReaderT (Identity . k))
toEllipsoidReaderT ::
Iso'
(EllipsoidReaderT f a)
(ReaderT Ellipsoid f a)
toEllipsoidReaderT =
from (_Wrapped' . from _Wrapped')
hoistEllipsoidReader ::
Applicative f =>
EllipsoidReader a
-> EllipsoidReaderT f a
hoistEllipsoidReader (EllipsoidReaderT k) =
EllipsoidReaderT (pure . runIdentity . k)
arrEllipsoidReader ::
Applicative f =>
(Ellipsoid -> a)
-> EllipsoidReaderT f a
arrEllipsoidReader k =
EllipsoidReaderT (pure . k)
instance Functor f => Functor (EllipsoidReaderT f) where
fmap f (EllipsoidReaderT k) =
EllipsoidReaderT (fmap f . k)
instance Applicative f => Applicative (EllipsoidReaderT f) where
pure =
EllipsoidReaderT . pure . pure
EllipsoidReaderT f <*> EllipsoidReaderT a =
EllipsoidReaderT (liftA2 (<*>) f a)
instance Monad f => Monad (EllipsoidReaderT f) where
return =
EllipsoidReaderT . Monad.return . Monad.return
EllipsoidReaderT k >>= f =
EllipsoidReaderT (\e -> k e Monad.>>= \q -> e & f q ^. _Wrapped')
instance Alternative f => Alternative (EllipsoidReaderT f) where
empty =
EllipsoidReaderT (\_ -> empty)
EllipsoidReaderT a <|> EllipsoidReaderT b =
EllipsoidReaderT (liftA2 (<|>) a b)
instance MonadPlus f => MonadPlus (EllipsoidReaderT f) where
mzero =
EllipsoidReaderT (\_ -> mzero)
EllipsoidReaderT a `mplus` EllipsoidReaderT b =
EllipsoidReaderT (liftA2 mplus a b)
instance MonadTrans EllipsoidReaderT where
lift =
EllipsoidReaderT . pure
instance MonadIO f => MonadIO (EllipsoidReaderT f) where
liftIO =
EllipsoidReaderT . pure . liftIO
instance MonadFix f => MonadFix (EllipsoidReaderT f) where
mfix f =
EllipsoidReaderT (\e -> mfix (\q -> e & f q ^. _Wrapped'))
instance MonadZip f => MonadZip (EllipsoidReaderT f) where
EllipsoidReaderT a `mzip` EllipsoidReaderT b =
EllipsoidReaderT (liftA2 mzip a b)
readEllipsoid ::
Applicative f =>
EllipsoidReaderT f Ellipsoid
readEllipsoid =
EllipsoidReaderT pure
readSemiMajor ::
Applicative f =>
EllipsoidReaderT f Double
readSemiMajor =
(^. semiMajor) <$> readEllipsoid
readFlattening ::
Applicative f =>
EllipsoidReaderT f Double
readFlattening =
(^. flattening) <$> readEllipsoid
readFlatteningReciprocal ::
Applicative f =>
EllipsoidReaderT f Double
readFlatteningReciprocal =
(^. flatteningReciprocal) <$> readEllipsoid
semiMinor ::
Applicative f =>
EllipsoidReaderT f Double
semiMinor =
(\f m -> m * (1 - f)) <$>
readFlatteningReciprocal <*>
readSemiMajor
eccentricitySquared ::
Applicative f =>
EllipsoidReaderT f Double
eccentricitySquared =
(\f -> 2 * f - (f * f)) <$> readFlatteningReciprocal
eccentricitySquared' ::
Applicative f =>
EllipsoidReaderT f Double
eccentricitySquared' =
(\f -> (f * (2 - f)) / (1 - f * f)) <$> readFlatteningReciprocal
distributeNormal ::
Applicative f =>
Double
-> EllipsoidReaderT f Double
distributeNormal t =
(\k -> k t) <$> normal
normal ::
Applicative f =>
EllipsoidReaderT f (Double -> Double)
normal =
(\s m t -> m / sqrt (1 - s * sin t ^ (2 :: Int))) <$> eccentricitySquared <*> readSemiMajor
wgs84' ::
EllipsoidReader a
-> a
wgs84' r =
(r ^. runEllipsoidReader) wgs84
wgs84'' ::
EllipsoidReaderT f a
-> f a
wgs84'' r =
(_Unwrapped # r) wgs84
earthGeo ::
Applicative f =>
EllipsoidReaderT f (ReifiedIso' ECEF LLH)
earthGeo =
let f e2 a nt =
Iso (
iso
(\ecef ->
let
x_ = ecef ^. x
y_ = ecef ^. y
h_ = ecef ^. z
sq q = q ^ (2 :: Int)
p2 = sq x_ + sq y_
a2 = sq a
e4 = sq e2
zeta = (1 - e2) * (sq h_ / a2)
rho = (p2 / a2 + zeta - e4) / 6
rho2 = sq rho
rho3 = rho * rho2
s = e4 * zeta * p2 / (4 * a2)
cbrt q = q ** (1 / 3)
t = cbrt (s + rho3 + sqrt (s * (s + 2 * rho3)))
u = rho + t + rho2 / t
v = sqrt (sq u + e4 * zeta)
w = e2 * (u + v - zeta) / (2 * v)
kappa = 1 + e2 * (sqrt (u + v + sq w) + w) / (u + v)
phi = atan (kappa * h_ / sqrt p2)
norm = nt phi
l = h_ + e2 * norm * sin phi
in LLH (LL phi (atan2 y_ x_)) (sqrt (l ^ (2 :: Int) + p2) - norm))
(\llh ->
let t_ = llh ^. lat
n_ = llh ^. lon
h_ = llh ^. height
n = nt t_
cs k = (n + h_) * cos t_ * k n_
z_ = (n * (1 - e2) + h_) * sin t_
in ECEF (XY (cs cos) (cs sin)) z_)
)
in f <$>
eccentricitySquared <*>
readSemiMajor <*>
normal
| NICTA/coordinate | src/Data/Geodetic/EllipsoidReaderT.hs | bsd-3-clause | 6,472 | 0 | 26 | 1,722 | 2,227 | 1,198 | 1,029 | 206 | 1 |
{-# OPTIONS -Wall #-}
-- The pec embedded compiler
-- Copyright 2011-2012, Brett Letner
module Pec.C (cModules) where
import Control.Concurrent
import Control.Monad
import Data.Char
import Data.Generics.Uniplate.Data
import Data.List
import Data.Maybe
import Development.Shake.FilePath
import Grm.Prims
import Language.C.Abs
import Pec.IUtil
import qualified Language.Pir.Abs as I
cModules :: FilePath -> Bool -> I.Module -> IO ()
cModules outdir is_readable (I.Module a _bs cs) = do
let defs = map cDefine cs
writeFileBinary (joinPath [outdir, n ++ ".c"]) $ ppShow $
cleanup $ optimize $ dModule is_readable $
CModule [Import hnfn] defs
xs <- liftM (nub . map cTypeD) $ readMVar gTyDecls
let ifvs = nub [ ifv | ifv@(I.TVar v _) <- concatMap fvsIDefine cs
, v `notElem` cBuiltins ]
writeFileBinary (joinPath [outdir, hnfn]) $ ppShow $
cleanup $
transformBi tArrayArgTy $
HModule hn hn imps $ xs ++ map (Declare . cFunDecl) ifvs
where
hn = n ++ "_H"
hnfn = n ++ ".h"
n = case a of
"" -> error "unused:cModules"
_ -> init a
imps = map GImport
[ "stdio.h"
, "stdint.h"
, "stdlib.h"
, "string.h"
]
cBuiltins :: [String]
cBuiltins = [ "puts", "putchar", "strlen", "strncpy", "strcmp"] -- BAL: figure out how to handle this generically
cTypeD :: (I.Type, I.TyDecl) -> Declare
cTypeD (x@(I.Type s _),y) = Typedef $ Decl t (tyName x)
where
t = case y of
I.TyEnum bs -> TyEnum [ EnumC b | I.EnumC b <- bs ]
I.TyRecord bs -> TyStruct [ cDecl $ I.TVar c d | I.FieldT c d <- bs ]
I.TyTagged bs -> TyStruct
[ Decl (TyName $ s ++ "tag") "tag"
, Decl (TyUnion [ cDecl $ I.TVar c d | I.ConC c d <- bs
, d /= tyVoid ]) "data"
]
cDecl :: I.TVar -> Decl
cDecl (I.TVar a b) = Decl (cType b) a
cType :: I.Type -> Type
cType x@(I.Type a bs) = case a of
"Fun_" -> TyFun (cType $ last bs) (map cType $ init bs)
"Ptr_" -> case bs of
[t] -> TyPtr (cType t)
_ -> error "cType:TyPtr"
"Array_" -> case bs of
[c,d] -> TyArray (cType d) (nCnt [c])
_ -> error "cType:TyArray"
_ -> cTyName x
cTyName :: I.Type -> Type
cTyName = TyName . tyName
tyName :: I.Type -> String
tyName (I.Type a bs) = case (a,bs) of
("Void_",[]) -> "void"
("W_",_) -> "uint" ++ (promote $ nCnt bs) ++ "_t"
("I_",_) -> "int" ++ (promote $ nCnt bs) ++ "_t"
("Double_",[]) -> "double"
("Float_",[]) -> "float"
("Char_",[]) -> "char"
("Fun_",_) -> tyName (I.Type (a ++ show (length bs)) bs)
_ -> mkTyConstr (a : map tyName bs)
mkTyConstr :: [String] -> String
mkTyConstr ss = concat $ intersperse "_" $ map strip_underscore ss
cDefine :: I.Define -> Define
cDefine x@(I.Define a b cs d) =
Define (funDecl a b cs) $ map (DeclS . cDecl) (lvsIDefine x) ++ cBlock d
funDecl :: I.Type -> Lident -> [I.TVar] -> FunDecl
funDecl a b cs = case cType a of
TyFun t ts -> RetFunFD t b vs ts
t -> FunFD t b vs
where vs = map cDecl cs
cFunDecl :: I.TVar -> FunDecl
cFunDecl tv@(I.TVar a b) = case b of
I.Type "Fun_" xs -> funDecl (last xs) a (map (I.TVar "") $ init xs)
_ -> error $ "unused:cFunDecl:not TyFun:" ++ ppShow tv
cExp :: I.Exp -> Exp
cExp x = case x of
I.CallE f [b] | has_suffix "_fld" $ vtvar f ->
AddrE $ ArrowE (cAtom b) $ VarE $ drop_suffix "fld" (vtvar f)
I.CallE f [b] | vtvar f == "tagv" -> ArrowE (cAtom b) (enum "tag")
I.CallE f [b,c] | vtvar f == "un" ->
AddrE $ ArrowE (cAtom c) $ DotE (VarE "data") (cTag b)
I.CallE f [b,c] | vtvar f == "idx" ->
IdxE (cAtom b) (cAtom c)
I.CallE a [b,c] | isBinOp a -> BinOpE (cAtom b) (cBinOp a) (cAtom c)
I.CallE a bs -> CallE (cVarE a) $ map cAtom bs
I.AtomE a -> cAtom a
I.CastE a@(I.TVar _ t) b
| isTypeEquiv t b -> cVarE a
| otherwise -> CastE (cType b) (cVarE a)
I.AllocaE a -> AllocaE $ cType a
I.LoadE a -> LoadE $ cVarE a
cTag :: I.Atom -> Exp
cTag x = case x of
I.LitA (I.TLit (I.EnumL a) _) -> enum a
I.LitA (I.TLit (I.StringL a) _) -> enum a -- BAL: should be EnumL...
_ -> error $ "cTag:" ++ ppShow x
enum :: Uident -> Exp
enum = LitE . EnumL
cVarE :: I.TVar -> Exp
cVarE (I.TVar a _) = VarE a
cStmt :: I.Stmt -> [Stmt]
cStmt x = case x of
I.LetS a b -> case b of
I.CallE f [c] | vtvar f == "mk" ->
[ AssignS (DotE (cVarE a) (enum "tag")) $ cTag c ]
I.CallE f [c,d] | vtvar f == "mk" ->
[ AssignS (DotE (cVarE a) (enum "tag")) $ cTag c
, AssignS (DotE (DotE (cVarE a) (enum "data")) (cTag c)) $
cAtom d
]
I.CallE f [c] | vtvar f == "mk" ->
[AssignS (DotE (cVarE a) (enum "tag")) (cAtom c)]
_ -> [AssignS (cVarE a) (cExp b)]
I.StoreS a b -> [AssignS (LoadE $ cVarE a) (cAtom b)]
I.CallS a bs -> [CallS (cVarE a) $ map cAtom bs]
I.SwitchS a bs cs ->
[SwitchS (cAtom a) $ map cSwitchAlt cs ++
[DefaultAlt $ cBlock bs ++ [BreakS]]]
I.IfS a b c -> [IfS (cAtom a) (cBlock b) (cBlock c)]
I.WhenS a b -> [WhenS (cAtom a) (cBlock b)]
I.WhileS a b c -> ss ++ [WhileS (cAtom b) (cBlock c ++ ss)]
where ss = cBlock a
I.ReturnS (I.LitA (I.TLit I.VoidL _)) -> [RetVoidS]
I.ReturnS a -> [ReturnS (cAtom a)]
I.NoOpS -> error "unused:cStmt:NoOpS"
cAtom :: I.Atom -> Exp
cAtom x = case x of
I.VarA a -> cVarE a
I.LitA (I.TLit I.VoidL _) -> error "void atom not removed"
I.LitA a -> cTLitE a
cTLitE :: I.TLit -> Exp
cTLitE = LitE . cTLit
cBlock :: I.StmtList -> [Stmt]
cBlock = concatMap cStmt
cSwitchAlt :: I.SwitchAlt -> SwitchAlt
cSwitchAlt (I.SwitchAlt a b) =
SwitchAlt (cTLit a) (cBlock b ++ [BreakS])
cTLit :: I.TLit -> Lit
cTLit (I.TLit x y) = case x of
I.StringL a -> StringL a
I.CharL a -> CharL a
-- BAL add int and float suffixes
I.NmbrL a -> NmbrL $ case y of
I.Type "Float_" []
| isFloat a -> a
| otherwise -> show (readNumber a :: Double) ++ "f"
I.Type "Double_" []
| isFloat a -> a
| otherwise -> show (readNumber a :: Double)
_ | isFloat a -> error $ "integral type with float syntax:" ++ a ++
":" ++ ppShow y
| isBinary a -> show (readBinary a)
| isOctal a -> '0' : drop 2 a
| otherwise -> a
I.EnumL a -> EnumL a
I.VoidL -> error "unused:cLTit:VoidL"
cleanup :: Module -> Module
cleanup x =
rewriteBi tTypeD $
transformBi tVarName $
transformBi tName $
x
tMath :: Stmt -> Maybe Stmt
tMath (AssignS a (BinOpE b "+" (LitE (NmbrL s)))) -- BAL: Don't do if float/double?
| a == b && (readNumber s :: Integer) == 1 = Just $ IncS a
tMath (AssignS a (BinOpE b "-" (LitE (NmbrL s)))) -- BAL: Don't do if float/double?
| a == b && (readNumber s :: Integer) == 1 = Just $ DecS a
tMath _ = Nothing
tVarName :: Define -> Define
tVarName x = transformBi k $ transformBi h x
where
tbl = concatMap g $ groupBy (\a b -> f a == f b) $
sort [ v | Decl _ v <- universeBi x ]
f s =
case reverse $ dropWhile (\c -> isDigit c || c == '_') $ reverse s of
"" -> "_"
s1 -> s1
g ss = case ss of
[s] -> [(s, f s)]
_ -> [ (s, f s ++ show i) | (s,i) <- zip ss [0 :: Int .. ]]
h (VarE v) = case lookup v tbl of
Nothing -> VarE v
Just v1 -> VarE v1
h a = a
k (Decl t v) = case lookup v tbl of
Nothing -> error "unused:tVarName"
Just v1 -> Decl t v1
k a = a
optimize :: Module -> Module
optimize x =
rewriteBi canon $
rewriteBi tNoOpS $
rewriteBi canonSS $
rewriteBi opt $
x
opt :: Stmt -> Maybe Stmt
opt (IfS e a _) | isTrue e = Just $ BlockS a
opt (IfS e _ b) | isFalse e = Just $ BlockS b
opt (WhenS e a) | isTrue e = Just $ BlockS a
opt (WhenS e _) | isFalse e = Just NoOpS
opt (WhileS e _) | isFalse e = Just NoOpS
opt _ = Nothing
canon :: Stmt -> Maybe Stmt
canon (IfS e a []) = Just $ WhenS e a
canon (IfS e [] b) = Just $ WhenS (NotE e) b
canon _ = Nothing
canonSS :: [Stmt] -> Maybe [Stmt]
canonSS xs | any isBlockS xs = Just $ concatMap f xs
where f (BlockS ss) = ss
f s = [s]
canonSS _ = Nothing
isBlockS :: Stmt -> Bool
isBlockS BlockS{} = True
isBlockS _ = False
isTrue :: Exp -> Bool
isTrue (LitE (EnumL "True_")) = True
isTrue _ = False
isFalse :: Exp -> Bool
isFalse (LitE (EnumL "False_")) = True
isFalse _ = False
dModule :: Bool -> Module -> Module
dModule is_readable x =
rewriteBi canon $
rewriteBi tNoOpS $
rewriteBi canonSS $
rewriteBi tMath $
transformBi tSort $
transformBi reParen $
transformBi tArray $
transformBi tArrayArgTy $
transformBi tUnused $
(if is_readable then rewriteBi tLive else id) $
transformBi tOnlyAssigned $
transformBi tBlock $
rewriteBi tNoOpS $
rewriteBi tNoOpE $
transformBi tPtr $
rewriteBi tNoOpS $
rewriteBi tNoOpE $
transformBi tRHS $
rewriteBi tNoOpS $
rewriteBi tNoOpE $
transformBi tLHS $
transformBi tAlloca $
transformBi tLit $
x
tTypeD :: Decl -> Maybe Decl
tTypeD (Decl (TyArray a b) c) = Just $ Decl a $ c ++ "[" ++ b ++ "]"
tTypeD (Decl (TyPtr a) b) = Just $ Decl a $ "(*" ++ b ++ ")"
tTypeD (Decl (TyFun t ts) x) = Just $ FunD t ("(*" ++ x ++ ")") ts
tTypeD _ = Nothing
tArray :: Define -> Define
tArray x = transformBi i $ transformBi h $ transformBi g x
where
vs = [ v | Decl (TyArray{}) v <- universeBi x ]
f (AddrE (VarE v)) | v `elem` vs = VarE v -- C passes arrays by reference
f a = a
g (CallE a bs) = CallE a $ map f bs
g a = a
h (CallS a bs) = CallS a $ map f bs
h a = a
i (AssignS a@(VarE v) b) | v `elem` vs =
CallS (VarE "memcpy") [a, b, CallE (VarE "sizeof") [a]]
i a = a
tSort :: Define -> Define
tSort (Define x ys) = Define x $ sortBy f cs ++ ds
where
(cs,ds) = partition isDeclS ys
f (DeclS a) (DeclS b) = compare (declNm a) (declNm b)
f _ _ = error "unused:tSort"
isDeclS :: Stmt -> Bool
isDeclS DeclS{} = True
isDeclS _ = False
tName :: Module -> Module
tName x = transformBi j $ transformBi i $ transformBi h $ transformBi g $
transformBi k $ transformBi f x
where
f (Decl a b) = Decl a $ rName b
f _ = error "unused:tName"
g (VarE a) = VarE $ rName a
g a = a
h (FunFD a b cs) = FunFD a (rName b) cs
h (RetFunFD a b cs ds) = RetFunFD a (rName b) cs ds
i (EnumC a) = EnumC $ rName a
j (EnumL a) = EnumL $ rName a
j a = a
k (TyName a) = TyName $ rName a
k a = a
rName :: String -> String
rName x = strip_underscore $ map f $ filter ((/=) '~') x
where
f '.' = '_'
f c = c
tBlock :: Define -> Define
tBlock x = transformBi f x
where
tbl :: [(String,Stmt)]
tbl = catMaybes $ map h $ universeBi x
f :: Stmt -> Stmt
f s0@(AssignS (VarE v) e) = case lookup v tbl of
Nothing -> s0
Just s -> transformBi g s
where
g (VarE v1) | v1 == v = e
g a = a
f s | s `elem` map snd tbl = NoOpS
f s = s
h :: Stmt -> Maybe (String,Stmt)
h s = case [ v | VarE v <- universeBi s, isFreshVar v ] \\ vs of
[v] -> Just (v,s)
_ -> Nothing
where
vs = case s of
AssignS e _ -> [basename e]
ReturnS e -> [basename e]
_ -> []
-- make sure arithmetic expressions are fully parenthesized
reParen :: Exp -> Exp
reParen (ArrowE (AddrE a) b) = DotE a b
reParen (DotE (LoadE a) b) = ArrowE a b
reParen (CallE a@LoadE{} bs) = CallE (ParenE a) bs
reParen x = x
tUnused :: Define -> Define
tUnused (Define a bs) = Define a (filter f bs)
where
f (DeclS x) = declNm x `elem` vs
f _ = True
vs = nub [ v | VarE v <- universeBi bs ]
tOnlyAssigned :: Define -> Define
tOnlyAssigned x = rewriteBi f x
where
vs = concat [ [v,v] | AssignS (VarE v) (CallE{}) <- universeBi x ] \\
[ v | VarE v <- universeBi x ]
f (AssignS (VarE v) (CallE a bs)) | v `elem` vs = Just $ CallS a bs
f _ = Nothing
tNoOpE :: Exp -> Maybe Exp
tNoOpE (LoadE (AddrE e)) = Just e
tNoOpE (ArrowE (AddrE (ArrowE a b)) c) = Just $ DotE (ArrowE a b) c
tNoOpE (IdxE (AddrE a) b) = Just $ AddrE (IdxE a b)
tNoOpE _ = Nothing
tNoOpS :: [Stmt] -> Maybe [Stmt]
tNoOpS xs
| any isNoOpS xs = Just $ filter (not . isNoOpS) xs
| otherwise = Nothing
isNoOpS :: Stmt -> Bool
isNoOpS NoOpS = True
isNoOpS (AssignS a b) = a == b
isNoOpS (CallS (VarE "memcpy") [a, b, _]) = a == b
isNoOpS _ = False
declNm :: Decl -> String
declNm (Decl _ v) = v
declNm _ = error "unused:declNm"
tLit :: Define -> Define
tLit x = rewriteBi g $ rewriteBi f x
where
tbl = [ (v,e) | AssignS (VarE v) e@LitE{} <- universeBi x ]
f (AssignS (VarE v) _) | isJust (lookup v tbl) = Just NoOpS
f _ = Nothing
g (VarE v) = lookup v tbl
g _ = Nothing
tAlloca :: Define -> Define
tAlloca x@(Define fd _) =
transformBi g $ transformBi f $ rewriteBi h x
where
vs = [ v | AssignS (VarE v) AllocaE{} <- universeBi x ] ++
[ v | Decl (TyPtr TyArray{}) v <- universeBi fd ]
f :: Exp -> Exp
f (ArrowE (AddrE (VarE v)) e) | v `elem` vs = DotE (VarE v) e
f (VarE v) | v `elem` vs = AddrE (VarE v)
f e = e
g (Decl (TyPtr t) v) | v `elem` vs = Decl t v
g a = a
h :: Stmt -> Maybe Stmt
h s | isAllocaS s = Just NoOpS
| otherwise = Nothing
tArrayArgTy :: FunDecl -> FunDecl
tArrayArgTy = transformBi f
where
f :: Type -> Type
f (TyPtr t@TyArray{}) = t
f x = x
isAllocaS :: Stmt -> Bool
isAllocaS (AssignS VarE{} AllocaE{}) = True
isAllocaS _ = False
tPtr :: Define -> Define
tPtr x = transformBi h $ transformBi g $ rewriteBi f x
where
tbl = concat [ let e = VarE ('$':a) in [(a, AddrE e) ,(b, e)]
| AssignS (VarE a) (AddrE (VarE b)) <- universeBi x ]
f (VarE v) = lookup v tbl
f _ = Nothing
g (VarE ('$':v)) = VarE v
g e = e
h (Decl (TyPtr t) v) | v `elem` map fst tbl = Decl t v
h p = p
tRHS :: Define -> Define
tRHS x = rewriteBi f x
where
tbl = [ (v,e) | AssignS e (VarE v) <- universeBi x, isFreshVar v ]
f (VarE v) = lookup v tbl
f _ = Nothing
tLHS :: Define -> Define
tLHS x = rewriteBi f x
where
tbl = [ (v,e) | AssignS (VarE v) e <- universeBi x, isFreshVar v ]
f (VarE v) = lookup v tbl
f _ = Nothing
isFreshVar :: String -> Bool
isFreshVar v = '_' `notElem` v && isDigit (last v)
-- Liveness
tLive :: Define -> Maybe Define -- this is hacky and inefficient, but seems to work
tLive x = case live_tbl x of
[] -> Nothing
(y:_) -> Just $ transformBi (f y) x
where
f (a,b) (VarE v) | v == a = VarE b
f _ a = a
is_reuse :: (String,String) -> Bool
is_reuse (a,b) = a /= b
live_tbl :: Define -> [(Nm, Nm)]
live_tbl (Define _ ss) =
filter is_reuse $ reuse $ liveSS [ d | DeclS d <- ss ] initSt $ sStmts ss
sStmts :: [Stmt] -> [S]
sStmts = concatMap sStmt . reverse
basename :: Exp -> String
basename x = case x of
VarE a -> a
DotE a _ -> basename a
AddrE a -> basename a
ArrowE a _ -> basename a
IdxE a _ -> basename a
CastE _ b -> basename b
LoadE a -> basename a
ParenE a -> basename a
_ -> error $ "unused:basename:" ++ ppShow x
sStmt :: Stmt -> [S]
sStmt x = case x of
AssignS a b -> sExp b ++ [Init $ basename a]
SwitchS a bs -> [Branch $
[ sStmts cs | DefaultAlt cs <- bs ] ++
[ sStmts cs | SwitchAlt _ cs <- bs ]
] ++ sExp a
IfS a bs cs -> [Branch [sStmts bs, sStmts cs]] ++ sExp a
WhenS a bs -> [Branch [sStmts bs]] ++ sExp a
WhileS a bs -> [Loop $ sStmts bs ++ sExp a]
CallS a bs -> sExp a ++ concatMap sExp bs
ReturnS a -> sExp a
DeclS{} -> []
BreakS -> []
RetVoidS -> []
NoOpS -> []
BlockS ss -> sStmts ss
DecS{} -> error $ "unused:sStmt:DecS"
IncS{} -> error $ "unused:sStmt:IncS"
sExp :: Exp -> [S]
sExp x = [ Use v | VarE v <- universeBi x ]
type Nm = String
data S
= Use Nm
| Init Nm
| Branch [[S]]
| Loop [S]
deriving Show
data St = St
{ in_use :: [Nm]
, reuse :: [(Nm, Nm)]
} deriving Show
initSt :: St
initSt = St [] []
liveSS :: [Decl] -> St -> [S] -> St
liveSS vs = loop
where
loop st [] = st
loop st (x:xs) = case x of
Use a -> loop st1 xs
where st1 = st{ in_use = nub $ union [a] $ in_use st }
Init a | isJust $ lookup a $ reuse st -> loop st xs
Init a -> loop st1 xs
where
st1 = St{ in_use = in_use st \\ [a]
, reuse = nub ((a, a1) : reuse st)
}
a1 = reuse_nm a vs $ in_use st
Branch bs -> loop st1 xs
where
sts = map (loop st) bs
st1 = St{ in_use = nub $ foldr1 union $ map in_use sts
, reuse = nub (concatMap reuse sts)
}
Loop bs -> loop st $ concatMap no_inits bs ++ bs ++ xs
reuse_nm :: Nm -> [Decl] -> [Nm] -> Nm
reuse_nm a bs cs =
head $ sort $ a : ((map declNm $ filter (equiv_decl b0) bs) \\ cs)
where
b0 = head $ filter ((==) a . declNm) bs
equiv_decl :: Decl -> Decl -> Bool
equiv_decl (Decl a _) (Decl b _) = a == b
equiv_decl _ _ = error "unused:equiv_decl"
no_inits :: S -> [S]
no_inits x = case x of
Init{} -> []
Use{} -> [x]
Branch bs -> concatMap (concatMap no_inits) bs
Loop bs -> concatMap no_inits bs
| stevezhee/pec | Pec/C.hs | bsd-3-clause | 16,915 | 0 | 28 | 4,731 | 8,633 | 4,258 | 4,375 | 488 | 14 |
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
import Test.Framework (defaultMain, testGroup)
import Test.Framework.Providers.HUnit
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Test.QuickCheck
import Test.HUnit
import NLP.Semiring
import NLP.Semiring.Boolean
import NLP.Semiring.Prob
import NLP.Semiring.Viterbi
import NLP.Semiring.ViterbiNBest
import NLP.Semiring.Counting
import NLP.Semiring.Derivation
import NLP.Semiring.ViterbiNBestDerivation
import qualified Data.Set as S
import Data.List
import Control.Monad (liftM)
main = defaultMain tests
tests = [
testGroup "Semiring Props" [
testProperty "semiProb bool" prop_boolRing,
testProperty "semiProb prob" prop_probRing,
testProperty "semiProb viterbi" prop_viterbiRing,
testProperty "semiProb counting" prop_counting,
testProperty "semiProb viterbi n-best" prop_viterbiNBest,
testProperty "semiProb derivation" prop_derivation,
testProperty "semiProb multi-derivation" prop_multiDerivation,
testProperty "semiProb nbest derivation" prop_nbestMultiDerivation
]
]
instance Arbitrary Prob where
arbitrary = Prob `liftM` choose (0.0, 1.0)
instance (N n, Ord a, Arbitrary a) => Arbitrary (ViterbiNBest n a) where
arbitrary = do
v <- arbitrary
return $ ViterbiNBest $ reverse $ sort $ take (n $ (mkN::n)) $ v
instance Arbitrary Boolean where
arbitrary = Boolean `liftM` choose (True, False)
instance Arbitrary Counting where
arbitrary = Counting `liftM` abs `liftM` arbitrary
instance (Arbitrary a) => Arbitrary (Derivation a) where
arbitrary = Derivation `liftM` arbitrary
instance (Arbitrary a, Ord a) => Arbitrary (MultiDerivation a) where
arbitrary = (MultiDerivation . S.fromList . take 10) `liftM` arbitrary
instance (Arbitrary a, Arbitrary b) => Arbitrary (Weighted a b) where
arbitrary = Weighted `liftM` arbitrary
type Eql s = (s -> s -> Bool)
-- (a * b) * c = a * (b * c)
associativeTimes :: (Semiring s) => (s,s,s) -> Eql s -> Bool
associativeTimes (s1, s2, s3) eq =
((s1 `times` s2) `times` s3) `eq`
(s1 `times` (s2 `times` s3))
-- (a + b) + c = a + (b + c)
associativePlus :: (Semiring s) => (s,s,s) -> Eql s -> Bool
associativePlus (s1, s2, s3) eq =
((s1 `mappend` s2) `mappend` s3) `eq`
(s1 `mappend` (s2 `mappend` s3))
-- a + b = b + a
commutativePlus :: (Semiring s) => (s,s,s) -> Eql s -> Bool
commutativePlus (a, b, _) eq =
(a `mappend` b) `eq`
(b `mappend` a)
-- a * (b + c) = (a * b) + (a * c)
distribution :: (Semiring s) => (s,s,s) -> Eql s -> Bool
distribution (s1, s2, s3) eq =
(s1 `times` (s2 `mappend` s3)) `eq`
((s1 `times` s2) `mappend` (s1 `times` s3))
-- a + 0 = 0 + a = a
zeroAdd :: (Semiring s) => (s,s,s) -> Eql s -> Bool
zeroAdd (a, _, _) eq =
(mempty `mappend` a) `eq` a &&
(a `mappend` mempty) `eq` a
-- a * 0 = 0
zeroMult :: (Semiring s) => (s,s,s) -> Eql s -> Bool
zeroMult (a, _, _) eq =
(mempty `times` a) `eq` mempty &&
(a `times` mempty) `eq` mempty
oneMult :: (Semiring s) => (s,s,s) -> Eql s -> Bool
oneMult (a, _, _) eq =
(one `times` a) `eq` a &&
(a `times` one) `eq` a
semiRingProps :: (Semiring s) => (s,s,s) -> Eql s -> Bool
semiRingProps s eq = and [distribution s eq,
associativePlus s eq,
zeroAdd s eq,
zeroMult s eq,
oneMult s eq,
commutativePlus s eq,
associativeTimes s eq]
doubEq a b = abs (a - b) < 0.000001
prop_probRing s1 s2 s3 = semiRingProps (s1, s2, s3) doubEq
where types = ((s1,s2,s3 ):: (Prob, Prob, Prob))
prop_boolRing s1 s2 s3 = semiRingProps (s1, s2, s3) (==)
where types = ((s1,s2,s3 ):: (Boolean, Boolean, Boolean))
prop_viterbiRing s1 s2 s3 = semiRingProps (s1, s2, s3)
(\(ViterbiNBest a) (ViterbiNBest b) -> and $ zipWith doubEq a b)
where types = ((s1,s2,s3 ):: (Viterbi Prob, Viterbi Prob, Viterbi Prob))
prop_counting s1 s2 s3 = semiRingProps (s1, s2, s3) (==)
where types = ((s1,s2,s3 ):: (Counting, Counting, Counting))
prop_viterbiNBest s1 s2 s3 = semiRingProps (s1, s2, s3) (==)
where types = ((s1,s2,s3 ):: (Viterbi10Best Counting, Viterbi10Best Counting, Viterbi10Best Counting))
prop_derivation s1 s2 s3 = semiRingProps (s1, s2, s3) (==)
where types = ((s1,s2,s3):: (Derivation String, Derivation String, Derivation String))
prop_multiDerivation s1 s2 s3 = semiRingProps (s1, s2, s3) (==)
where types = ((s1,s2,s3):: (MultiDerivation String, MultiDerivation String, MultiDerivation String))
prop_nbestMultiDerivation s1 s2 s3 =
semiRingProps (s1, s2, s3)
(\(ViterbiNBest a) (ViterbiNBest b) ->
and $ zipWith
(\(Weighted (a,b)) (Weighted (a',b')) ->
doubEq a a' && b == b') a b)
where types = ((s1,s2,s3):: (ViterbiNBestDerivation Ten String, ViterbiNBestDerivation Ten String, ViterbiNBestDerivation Ten String)) | srush/SemiRings | tests/Tests.hs | bsd-3-clause | 5,326 | 0 | 14 | 1,458 | 1,916 | 1,108 | 808 | 105 | 1 |
-- |An 'RdfParser' implementation for the Turtle format
-- <http://www.w3.org/TeamSubmission/turtle/>.
module Text.RDF.RDF4H.TurtleParser(
TurtleParser(TurtleParser)
)
where
import Data.Char (toLower,toUpper)
import Data.RDF.Types
import Data.RDF.Namespace
import Text.RDF.RDF4H.ParserUtils
import Text.Parsec
import Text.Parsec.Text
import qualified Data.Map as Map
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
import Data.Sequence(Seq, (|>))
import qualified Data.Sequence as Seq
import qualified Data.Foldable as F
import Data.Char (isDigit)
import Control.Monad
import Data.Maybe (fromMaybe)
-- |An 'RdfParser' implementation for parsing RDF in the
-- Turtle format. It is an implementation of W3C Turtle grammar rules at
-- http://www.w3.org/TR/turtle/#sec-grammar-grammar .
-- It takes optional arguments representing the base URL to use
-- for resolving relative URLs in the document (may be overridden in the document
-- itself using the \@base directive), and the URL to use for the document itself
-- for resolving references to <> in the document.
-- To use this parser, pass a 'TurtleParser' value as the first argument to any of
-- the 'parseString', 'parseFile', or 'parseURL' methods of the 'RdfParser' type
-- class.
data TurtleParser = TurtleParser (Maybe BaseUrl) (Maybe T.Text)
-- |'TurtleParser' is an instance of 'RdfParser'.
instance RdfParser TurtleParser where
parseString (TurtleParser bUrl dUrl) = parseString' bUrl dUrl
parseFile (TurtleParser bUrl dUrl) = parseFile' bUrl dUrl
parseURL (TurtleParser bUrl dUrl) = parseURL' bUrl dUrl
type ParseState =
(Maybe BaseUrl, -- the current BaseUrl, may be Nothing initially, but not after it is once set
Maybe T.Text, -- the docUrl, which never changes and is used to resolve <> in the document.
Int, -- the id counter, containing the value of the next id to be used
PrefixMappings, -- the mappings from prefix to URI that are encountered while parsing
[Subject], -- stack of current subject nodes, if we have parsed a subject but not finished the triple
[Predicate], -- stack of current predicate nodes, if we've parsed a predicate but not finished the triple
[Bool], -- a stack of values to indicate that we're processing a (possibly nested) collection; top True indicates just started (on first element)
Seq Triple) -- the triples encountered while parsing; always added to on the right side
-- grammar rule: [1] turtleDoc
t_turtleDoc :: GenParser ParseState (Seq Triple, PrefixMappings)
t_turtleDoc =
many t_statement >> (eof <?> "eof") >> getState >>= \(_, _, _, pms, _, _, _, ts) -> return (ts, pms)
-- grammar rule: [2] statement
t_statement :: GenParser ParseState ()
t_statement = d <|> t <|> void (many1 t_ws <?> "blankline-whitespace")
where
d = void
(try t_directive >>
(many t_ws <?> "directive-whitespace2"))
t = void
(t_triples >> (many t_ws <?> "triple-whitespace1") >>
(char '.' <?> "end-of-triple-period") >>
(many t_ws <?> "triple-whitespace2"))
-- grammar rule: [6] triples
t_triples :: GenParser ParseState ()
t_triples = t_subject >> (many1 t_ws <?> "subject-predicate-whitespace") >> t_predicateObjectList >> resetSubjectPredicate
-- grammar rule: [3] directive
t_directive :: GenParser ParseState ()
t_directive = t_prefixID <|> t_base <|> t_sparql_prefix <|> t_sparql_base
-- grammar rule: [135s] iri
t_iri :: GenParser ParseState T.Text
t_iri = try t_iriref <|> t_prefixedName
-- grammar rule: [136s] PrefixedName
t_prefixedName :: GenParser ParseState T.Text
t_prefixedName = do
t <- try t_pname_ln <|> try t_pname_ns
return t
-- grammar rule: [4] prefixID
t_prefixID :: GenParser ParseState ()
t_prefixID =
do try (string "@prefix" <?> "@prefix-directive")
pre <- (many1 t_ws <?> "whitespace-after-@prefix") >> option T.empty t_pn_prefix
char ':' >> (many1 t_ws <?> "whitespace-after-@prefix-colon")
uriFrag <- t_iriref
(many t_ws <?> "prefixID-whitespace")
(char '.' <?> "end-of-prefixID-period")
(bUrl, dUrl, _, PrefixMappings pms, _, _, _, _) <- getState
updatePMs $ Just (PrefixMappings $ Map.insert pre (absolutizeUrl bUrl dUrl uriFrag) pms)
return ()
-- grammar rule: [6s] sparqlPrefix
t_sparql_prefix :: GenParser ParseState ()
t_sparql_prefix =
do try (caseInsensitiveString "PREFIX" <?> "@prefix-directive")
pre <- (many1 t_ws <?> "whitespace-after-@prefix") >> option T.empty t_pn_prefix
char ':' >> (many1 t_ws <?> "whitespace-after-@prefix-colon")
uriFrag <- t_iriref
(bUrl, dUrl, _, PrefixMappings pms, _, _, _, _) <- getState
updatePMs $ Just (PrefixMappings $ Map.insert pre (absolutizeUrl bUrl dUrl uriFrag) pms)
return ()
-- grammar rule: [5] base
t_base :: GenParser ParseState ()
t_base =
do try (string "@base" <?> "@base-directive")
many1 t_ws <?> "whitespace-after-@base"
urlFrag <- t_iriref
(many t_ws <?> "base-whitespace")
(char '.' <?> "end-of-base-period")
bUrl <- currBaseUrl
dUrl <- currDocUrl
updateBaseUrl (Just $ Just $ newBaseUrl bUrl (absolutizeUrl bUrl dUrl urlFrag))
-- grammar rule: [5s] sparqlBase
t_sparql_base :: GenParser ParseState ()
t_sparql_base =
do try (caseInsensitiveString "BASE" <?> "@sparql-base-directive")
many1 t_ws <?> "whitespace-after-BASE"
urlFrag <- t_iriref
bUrl <- currBaseUrl
dUrl <- currDocUrl
updateBaseUrl (Just $ Just $ newBaseUrl bUrl (absolutizeUrl bUrl dUrl urlFrag))
t_verb :: GenParser ParseState ()
t_verb = (try t_predicate <|> (char 'a' >> return rdfTypeNode)) >>= pushPred
-- grammar rule: [11] predicate
t_predicate :: GenParser ParseState Node
t_predicate = liftM UNode (t_iri <?> "resource")
t_nodeID :: GenParser ParseState T.Text
t_nodeID = do { try (string "_:"); cs <- t_name; return $! "_:" `T.append` cs }
-- grammar rules: [139s] PNAME_NS
t_pname_ns :: GenParser ParseState T.Text
t_pname_ns =do
pre <- option T.empty (try t_pn_prefix)
char ':'
return pre
-- grammar rules: [168s] PN_LOCAL
t_pn_local :: GenParser ParseState T.Text
t_pn_local = do
x <- t_pn_chars_u_str <|> string ":" <|> satisfy_str <|> t_plx
xs <- option "" $ do
ys <- many ( t_pn_chars_str <|> string "." <|> string ":" <|> t_plx )
return (concat ys)
return (T.pack (x ++ xs))
where
satisfy_str = satisfy (flip in_range [('0', '9')]) >>= \c -> return [c]
t_pn_chars_str = t_pn_chars >>= \c -> return [c]
t_pn_chars_u_str = t_pn_chars_u >>= \c -> return [c]
-- PERCENT | PN_LOCAL_ESC
-- grammar rules: [169s] PLX
t_plx = t_percent <|> t_pn_local_esc_str
where
t_pn_local_esc_str = do
c <- t_pn_local_esc
return ([c])
-- '%' HEX HEX
-- grammar rules: [170s] PERCENT
t_percent = do
char '%'
h1 <- t_hex
h2 <- t_hex
return ([h1,h2])
-- grammar rules: [172s] PN_LOCAL_ESC
t_pn_local_esc = char '\\' >> oneOf "_~.-!$&'()*+,;=/?#@%"
-- grammar rules: [140s] PNAME_LN
t_pname_ln :: GenParser ParseState T.Text
t_pname_ln =
do pre <- t_pname_ns
name <- t_pn_local
(bUrl, _, _, pms, _, _, _, _) <- getState
case resolveQName bUrl pre pms of
Just n -> return $ n `T.append` name
Nothing -> error ("Cannot resolve QName prefix: " ++ T.unpack pre)
-- grammar rule: [10] subject
t_subject :: GenParser ParseState ()
t_subject =
iri <|>
simpleBNode <|>
nodeId <|>
between (char '[') (char ']') poList
where
iri = liftM UNode (try t_iri <?> "subject resource") >>= pushSubj
nodeId = liftM BNode (try t_nodeID <?> "subject nodeID") >>= pushSubj
simpleBNode = try (string "[]") >> nextIdCounter >>= pushSubj . BNodeGen
poList = void
(nextIdCounter >>= pushSubj . BNodeGen >> many t_ws >>
t_predicateObjectList >>
many t_ws)
-- verb objectList (';' (verb objectList)?)*
--
-- verb ws+ objectList ( ws* ';' ws* verb ws+ objectList )* (ws* ';')?
-- grammar rule: [7] predicateObjectlist
t_predicateObjectList :: GenParser ParseState ()
t_predicateObjectList =
do sepEndBy1 (try (t_verb >> many1 t_ws >> t_objectList >> popPred)) (try (many t_ws >> char ';' >> many t_ws))
return ()
-- grammar rule: [8] objectlist
t_objectList :: GenParser ParseState ()
t_objectList = -- t_object actually adds the triples
void
((t_object <?> "object") >>
many (try (many t_ws >> char ',' >> many t_ws >> t_object)))
-- grammar rule: [12] object
t_object :: GenParser ParseState ()
t_object =
do inColl <- isInColl -- whether this object is in a collection
onFirstItem <- onCollFirstItem -- whether we're on the first item of the collection
let processObject = (t_literal >>= addTripleForObject) <|>
(liftM UNode t_iri >>= addTripleForObject) <|>
blank_as_obj <|> t_collection
case (inColl, onFirstItem) of
(False, _) -> processObject
(True, True) -> liftM BNodeGen nextIdCounter >>= \bSubj -> addTripleForObject bSubj >>
pushSubj bSubj >> pushPred rdfFirstNode >> processObject >> collFirstItemProcessed
(True, False) -> liftM BNodeGen nextIdCounter >>= \bSubj -> pushPred rdfRestNode >>
addTripleForObject bSubj >> popPred >> popSubj >>
pushSubj bSubj >> processObject
-- collection: '(' ws* itemList? ws* ')'
-- itemList: object (ws+ object)*
-- grammar rule: [15] collection
t_collection:: GenParser ParseState ()
t_collection =
-- ( object1 object2 ) is short for:
-- [ rdf:first object1; rdf:rest [ rdf:first object2; rdf:rest rdf:nil ] ]
-- ( ) is short for the resource: rdf:nil
between (char '(') (char ')') $
do beginColl
many t_ws
emptyColl <- option True (try t_object >> many t_ws >> return False)
if emptyColl then void (addTripleForObject rdfNilNode) else
void
(many (many t_ws >> try t_object >> many t_ws) >> popPred >>
pushPred rdfRestNode >>
addTripleForObject rdfNilNode >>
popPred >> popSubj)
finishColl
return ()
blank_as_obj :: GenParser ParseState ()
blank_as_obj =
-- if a node id, like _:a1, then create a BNode and add the triple
(liftM BNode t_nodeID >>= addTripleForObject) <|>
-- if a simple blank like [], do likewise
(genBlank >>= addTripleForObject) <|>
-- if a blank containing a predicateObjectList, like [ :b :c; :b :d ]
poList
where
genBlank = liftM BNodeGen (try (string "[]") >> nextIdCounter)
poList = between (char '[') (char ']') $
liftM BNodeGen nextIdCounter >>= \bSubj -> -- generate new bnode
void
(addTripleForObject bSubj >> -- add triple with bnode as object
many t_ws >> pushSubj bSubj >> -- push bnode as new subject
t_predicateObjectList >> popSubj >> many t_ws) -- process polist, which uses bnode as subj, then pop bnode
rdfTypeNode, rdfNilNode, rdfFirstNode, rdfRestNode :: Node
rdfTypeNode = UNode $ mkUri rdf "type"
rdfNilNode = UNode $ mkUri rdf "nil"
rdfFirstNode = UNode $ mkUri rdf "first"
rdfRestNode = UNode $ mkUri rdf "rest"
xsdIntUri, xsdDoubleUri, xsdDecimalUri, xsdBooleanUri :: T.Text
xsdIntUri = mkUri xsd "integer"
xsdDoubleUri = mkUri xsd "double"
xsdDecimalUri = mkUri xsd "decimal"
xsdBooleanUri = mkUri xsd "boolean"
t_literal :: GenParser ParseState Node
t_literal =
try str_literal <|>
liftM (`mkLNode` xsdIntUri) (try t_integer) <|>
liftM (`mkLNode` xsdDoubleUri) (try t_double) <|>
liftM (`mkLNode` xsdDecimalUri) (try t_decimal) <|>
liftM (`mkLNode` xsdBooleanUri) t_boolean
where
mkLNode :: T.Text -> T.Text -> Node
mkLNode bsType bs' = LNode (typedL bsType bs')
str_literal :: GenParser ParseState Node
str_literal =
do str <- t_quotedString <?> "quotedString"
liftM (LNode . typedL str)
(try (count 2 (char '^')) >> t_iri) <|>
liftM (lnode . plainLL str) (char '@' >> t_language) <|>
return (lnode $ plainL str)
t_quotedString :: GenParser ParseState T.Text
t_quotedString = try t_longString <|> t_string
-- a non-long string: any number of scharacters (echaracter without ") inside doublequotes or singlequotes.
t_string :: GenParser ParseState T.Text
t_string = liftM T.concat (between (char '"') (char '"') (many t_scharacter_in_dquot) <|> between (char '\'') (char '\'') (many t_scharacter_in_squot))
t_longString :: GenParser ParseState T.Text
t_longString =
try ( do { tripleQuoteDbl;
strVal <- liftM T.concat (many longString_char);
tripleQuoteDbl;
return strVal }) <|>
try ( do { tripleQuoteSingle;
strVal <- liftM T.concat (many longString_char);
tripleQuoteSingle;
return strVal })
where
tripleQuoteDbl = count 3 (char '"')
tripleQuoteSingle = count 3 (char '\'')
t_integer :: GenParser ParseState T.Text
t_integer =
do sign <- sign_parser <?> "+-"
ds <- many1 digit <?> "digit"
notFollowedBy (char '.')
-- integer must be in canonical format, with no leading plus sign or leading zero
return $! ( T.pack sign `T.append` T.pack ds)
-- grammar rule: [21] DOUBLE
t_double :: GenParser ParseState T.Text
t_double =
do sign <- sign_parser <?> "+-"
rest <- try (do { ds <- many1 digit <?> "digit";
char '.';
ds' <- many digit <?> "digit";
e <- t_exponent <?> "exponent";
return ( T.pack ds `T.snoc` '.' `T.append` T.pack ds' `T.append` e) }) <|>
try (do { char '.';
ds <- many1 digit <?> "digit";
e <- t_exponent <?> "exponent";
return ('.' `T.cons` T.pack ds `T.append` e) }) <|>
try (do { ds <- many1 digit <?> "digit";
e <- t_exponent <?> "exponent";
return ( T.pack ds `T.append` e) })
return $! T.pack sign `T.append` rest
sign_parser :: GenParser ParseState String
sign_parser = option "" (oneOf "-+" >>= (\c -> return [c]))
t_decimal :: GenParser ParseState T.Text
t_decimal =
do sign <- sign_parser
rest <- try (do ds <- many digit <?> "digit"; char '.'; ds' <- option "" (many digit); return (ds ++ ('.':ds')))
<|> try (do { char '.'; ds <- many1 digit <?> "digit"; return ('.':ds) })
<|> many1 digit <?> "digit"
return $ T.pack sign `T.append` T.pack rest
t_exponent :: GenParser ParseState T.Text
t_exponent = do e <- oneOf "eE"
s <- option "" (oneOf "-+" >>= \c -> return [c])
ds <- many1 digit;
return $! (e `T.cons` ( T.pack s `T.append` T.pack ds))
t_boolean :: GenParser ParseState T.Text
t_boolean =
try (liftM T.pack (string "true") <|>
liftM T.pack (string "false"))
t_comment :: GenParser ParseState ()
t_comment =
void (char '#' >> many (satisfy (\ c -> c /= '\n' && c /= '\r')))
t_ws :: GenParser ParseState ()
t_ws =
(void (try (char '\t' <|> char '\n' <|> char '\r' <|> char ' '))
<|> try t_comment)
<?> "whitespace-or-comment"
t_language :: GenParser ParseState T.Text
t_language =
do initial <- many1 lower;
rest <- many (do {char '-'; cs <- many1 (lower <|> digit); return ( T.pack ('-':cs))})
return $! ( T.pack initial `T.append` T.concat rest)
identifier :: GenParser ParseState Char -> GenParser ParseState Char -> GenParser ParseState T.Text
identifier initial rest = initial >>= \i -> many rest >>= \r -> return ( T.pack (i:r))
-- grammar rule: [167s] PN_PREFIX
t_pn_prefix :: GenParser ParseState T.Text
t_pn_prefix = do
i <- try t_pn_chars_base
r <- option "" (many (try t_pn_chars <|> char '.'))
return (T.pack (i:r))
t_name :: GenParser ParseState T.Text
t_name = identifier t_pn_chars_u t_pn_chars
t_iriref :: GenParser ParseState T.Text
t_iriref = between (char '<') (char '>') t_relativeURI
t_relativeURI :: GenParser ParseState T.Text
t_relativeURI =
do frag <- liftM (T.pack . concat) (many t_ucharacter)
bUrl <- currBaseUrl
dUrl <- currDocUrl
return $ absolutizeUrl bUrl dUrl frag
-- We make this String rather than T.Text because we want
-- t_relativeURI (the only place it's used) to have chars so that
-- when it creates a T.Text it can all be in one chunk.
t_ucharacter :: GenParser ParseState String
t_ucharacter =
try (liftM T.unpack unicode_escape) <|>
try (string "\\>") <|>
liftM T.unpack (non_ctrl_char_except ">")
t_pn_chars :: GenParser ParseState Char
t_pn_chars = t_pn_chars_u <|> char '-' <|> char '\x00B7' <|> satisfy f
where
f = flip in_range [('0', '9'), ('\x0300', '\x036F'), ('\x203F', '\x2040')]
longString_char :: GenParser ParseState T.Text
longString_char =
specialChar <|> -- \r|\n|\t as single char
try escapedChar <|> -- an backslash-escaped tab, newline, linefeed, backslash or doublequote
try twoDoubleQuote <|> -- two doublequotes not followed by a doublequote
try oneDoubleQuote <|> -- a single doublequote
safeNonCtrlChar <|> -- anything but a single backslash or doublequote
try unicode_escape -- a unicode escape sequence (\uxxxx or \Uxxxxxxxx)
where
specialChar = oneOf "\t\n\r" >>= bs1
escapedChar =
do char '\\'
(char 't' >> bs1 '\t') <|> (char 'n' >> bs1 '\n') <|> (char 'r' >> bs1 '\r') <|>
(char '\\' >> bs1 '\\') <|> (char '"' >> bs1 '"')
twoDoubleQuote = string "\"\"" >> notFollowedBy (char '"') >> bs "\"\""
oneDoubleQuote = char '"' >> notFollowedBy (char '"') >> bs1 '"'
safeNonCtrlChar = non_ctrl_char_except "\\\""
bs1 :: Char -> GenParser ParseState T.Text
bs1 = return . T.singleton
bs :: String -> GenParser ParseState T.Text
bs = return . T.pack
-- grammar rule: [163s] PN_CHARS_BASE
t_pn_chars_base :: GenParser ParseState Char
t_pn_chars_base = try $ satisfy $ flip in_range blocks
where
blocks = [('A', 'Z'), ('a', 'z'), ('\x00C0', '\x00D6'),
('\x00D8', '\x00F6'), ('\x00F8', '\x02FF'),
('\x0370', '\x037D'), ('\x037F', '\x1FFF'),
('\x200C', '\x200D'), ('\x2070', '\x218F'),
('\x2C00', '\x2FEF'), ('\x3001', '\xD7FF'),
('\xF900', '\xFDCF'), ('\xFDF0', '\xFFFD'),
('\x10000', '\xEFFFF')]
-- grammar rule: [164s] PN_CHARS_U
t_pn_chars_u :: GenParser ParseState Char
t_pn_chars_u = t_pn_chars_base <|> char '_'
-- grammar rules: [171s] HEX
t_hex :: GenParser ParseState Char
t_hex = satisfy (\c -> isDigit c || (c >= 'A' && c <= 'F')) <?> "hexadecimal digit"
-- characters used in (non-long) strings; any echaracters except ", or an escaped \"
-- echaracter - #x22 ) | '\"'
t_scharacter_in_dquot :: GenParser ParseState T.Text
t_scharacter_in_dquot =
(try (string "\\\"") >> return (T.singleton '"'))
<|> (try (string "\\'") >> return (T.singleton '\''))
<|> try (do {char '\\';
(char 't' >> return (T.singleton '\t')) <|>
(char 'n' >> return (T.singleton '\n')) <|>
(char 'r' >> return (T.singleton '\r'))}) -- echaracter part 1
<|> unicode_escape
<|> (non_ctrl_char_except "\\\"" >>= \s -> return $! s) -- echaracter part 2 minus "
-- characters used in (non-long) strings; any echaracters except ', or an escaped \'
-- echaracter - #x22 ) | '\''
t_scharacter_in_squot :: GenParser ParseState T.Text
t_scharacter_in_squot =
(try (string "\\'") >> return (T.singleton '\''))
<|> (try (string "\\\"") >> return (T.singleton '"'))
<|> try (do {char '\\';
(char 't' >> return (T.singleton '\t')) <|>
(char 'n' >> return (T.singleton '\n')) <|>
(char 'r' >> return (T.singleton '\r'))}) -- echaracter part 1
<|> unicode_escape
<|> (non_ctrl_char_except "\\'" >>= \s -> return $! s) -- echaracter part 2 minus '
unicode_escape :: GenParser ParseState T.Text
unicode_escape =
(char '\\' >> return (T.singleton '\\')) >>
((char '\\' >> return "\\\\") <|>
(char 'u' >> count 4 t_hex >>= \cs -> return $! "\\u" `T.append` T.pack cs) <|>
(char 'U' >> count 8 t_hex >>= \cs -> return $! "\\U" `T.append` T.pack cs))
non_ctrl_char_except :: String -> GenParser ParseState T.Text
non_ctrl_char_except cs =
liftM T.singleton
(satisfy (\ c -> c <= '\1114111' && (c >= ' ' && c `notElem` cs)))
{-# INLINE in_range #-}
in_range :: Char -> [(Char, Char)] -> Bool
in_range c = any (\(c1, c2) -> c >= c1 && c <= c2)
newBaseUrl :: Maybe BaseUrl -> T.Text -> BaseUrl
newBaseUrl Nothing url = BaseUrl url
newBaseUrl (Just (BaseUrl bUrl)) url = BaseUrl $! mkAbsoluteUrl bUrl url
currBaseUrl :: GenParser ParseState (Maybe BaseUrl)
currBaseUrl = getState >>= \(bUrl, _, _, _, _, _, _, _) -> return bUrl
currDocUrl :: GenParser ParseState (Maybe T.Text)
currDocUrl = getState >>= \(_, dUrl, _, _, _, _, _, _) -> return dUrl
pushSubj :: Subject -> GenParser ParseState ()
pushSubj s = getState >>= \(bUrl, dUrl, i, pms, ss, ps, cs, ts) ->
setState (bUrl, dUrl, i, pms, s:ss, ps, cs, ts)
popSubj :: GenParser ParseState Subject
popSubj = getState >>= \(bUrl, dUrl, i, pms, ss, ps, cs, ts) ->
setState (bUrl, dUrl, i, pms, tail ss, ps, cs, ts) >>
when (null ss) (error "Cannot pop subject off empty stack.") >>
return (head ss)
pushPred :: Predicate -> GenParser ParseState ()
pushPred p = getState >>= \(bUrl, dUrl, i, pms, ss, ps, cs, ts) ->
setState (bUrl, dUrl, i, pms, ss, p:ps, cs, ts)
popPred :: GenParser ParseState Predicate
popPred = getState >>= \(bUrl, dUrl, i, pms, ss, ps, cs, ts) ->
setState (bUrl, dUrl, i, pms, ss, tail ps, cs, ts) >>
when (null ps) (error "Cannot pop predicate off empty stack.") >>
return (head ps)
isInColl :: GenParser ParseState Bool
isInColl = getState >>= \(_, _, _, _, _, _, cs, _) -> return . not . null $ cs
updateBaseUrl :: Maybe (Maybe BaseUrl) -> GenParser ParseState ()
updateBaseUrl val = _modifyState val no no no no no
-- combines get_current and increment into a single function
nextIdCounter :: GenParser ParseState Int
nextIdCounter = getState >>= \(bUrl, dUrl, i, pms, s, p, cs, ts) ->
setState (bUrl, dUrl, i+1, pms, s, p, cs, ts) >> return i
updatePMs :: Maybe PrefixMappings -> GenParser ParseState ()
updatePMs val = _modifyState no no val no no no
-- Register that we have begun processing a collection
beginColl :: GenParser ParseState ()
beginColl = getState >>= \(bUrl, dUrl, i, pms, s, p, cs, ts) ->
setState (bUrl, dUrl, i, pms, s, p, True:cs, ts)
onCollFirstItem :: GenParser ParseState Bool
onCollFirstItem = getState >>= \(_, _, _, _, _, _, cs, _) -> return (not (null cs) && head cs)
collFirstItemProcessed :: GenParser ParseState ()
collFirstItemProcessed = getState >>= \(bUrl, dUrl, i, pms, s, p, _:cs, ts) ->
setState (bUrl, dUrl, i, pms, s, p, False:cs, ts)
-- Register that a collection is finished being processed; the bool value
-- in the monad is *not* the value that was popped from the stack, but whether
-- we are still processing a parent collection or have finished processing
-- all collections and are no longer in a collection at all.
finishColl :: GenParser ParseState Bool
finishColl = getState >>= \(bUrl, dUrl, i, pms, s, p, cs, ts) ->
let cs' = drop 1 cs
in setState (bUrl, dUrl, i, pms, s, p, cs', ts) >> return (not $ null cs')
-- Alias for Nothing for use with _modifyState calls, which can get very long with
-- many Nothing values.
no :: Maybe a
no = Nothing
-- Update the subject and predicate values of the ParseState to Nothing.
resetSubjectPredicate :: GenParser ParseState ()
resetSubjectPredicate =
getState >>= \(bUrl, dUrl, n, pms, _, _, cs, ts) ->
setState (bUrl, dUrl, n, pms, [], [], cs, ts)
-- Modifies the current parser state by updating any state values among the parameters
-- that have non-Nothing values.
_modifyState :: Maybe (Maybe BaseUrl) -> Maybe (Int -> Int) -> Maybe PrefixMappings ->
Maybe Subject -> Maybe Predicate -> Maybe (Seq Triple) ->
GenParser ParseState ()
_modifyState mb_bUrl mb_n mb_pms mb_subj mb_pred mb_trps =
do (_bUrl, _dUrl, _n, _pms, _s, _p, _cs, _ts) <- getState
setState (fromMaybe _bUrl mb_bUrl,
_dUrl,
maybe _n (const _n) mb_n,
fromMaybe _pms mb_pms,
maybe _s (: _s) mb_subj,
maybe _p (: _p) mb_pred,
_cs,
fromMaybe _ts mb_trps)
addTripleForObject :: Object -> GenParser ParseState ()
addTripleForObject obj =
do (bUrl, dUrl, i, pms, ss, ps, cs, ts) <- getState
when (null ss) $
error $ "No Subject with which to create triple for: " ++ show obj
when (null ps) $
error $ "No Predicate with which to create triple for: " ++ show obj
setState (bUrl, dUrl, i, pms, ss, ps, cs, ts |> Triple (head ss) (head ps) obj)
-- |Parse the document at the given location URL as a Turtle document, using an optional @BaseUrl@
-- as the base URI, and using the given document URL as the URI of the Turtle document itself.
--
-- The @BaseUrl@ is used as the base URI within the document for resolving any relative URI references.
-- It may be changed within the document using the @\@base@ directive. At any given point, the current
-- base URI is the most recent @\@base@ directive, or if none, the @BaseUrl@ given to @parseURL@, or
-- if none given, the document URL given to @parseURL@. For example, if the @BaseUrl@ were
-- @http:\/\/example.org\/@ and a relative URI of @\<b>@ were encountered (with no preceding @\@base@
-- directive), then the relative URI would expand to @http:\/\/example.org\/b@.
--
-- The document URL is for the purpose of resolving references to 'this document' within the document,
-- and may be different than the actual location URL from which the document is retrieved. Any reference
-- to @\<>@ within the document is expanded to the value given here. Additionally, if no @BaseUrl@ is
-- given and no @\@base@ directive has appeared before a relative URI occurs, this value is used as the
-- base URI against which the relative URI is resolved.
--
-- Returns either a @ParseFailure@ or a new RDF containing the parsed triples.
parseURL' :: forall rdf. (RDF rdf) =>
Maybe BaseUrl -- ^ The optional base URI of the document.
-> Maybe T.Text -- ^ The document URI (i.e., the URI of the document itself); if Nothing, use location URI.
-> String -- ^ The location URI from which to retrieve the Turtle document.
-> IO (Either ParseFailure rdf)
-- ^ The parse result, which is either a @ParseFailure@ or the RDF
-- corresponding to the Turtle document.
parseURL' bUrl docUrl = _parseURL (parseString' bUrl docUrl)
-- |Parse the given file as a Turtle document. The arguments and return type have the same semantics
-- as 'parseURL', except that the last @String@ argument corresponds to a filesystem location rather
-- than a location URI.
--
-- Returns either a @ParseFailure@ or a new RDF containing the parsed triples.
parseFile' :: forall rdf. (RDF rdf) => Maybe BaseUrl -> Maybe T.Text -> String -> IO (Either ParseFailure rdf)
parseFile' bUrl docUrl fpath =
TIO.readFile fpath >>= \bs' -> return $ handleResult bUrl (runParser t_turtleDoc initialState (maybe "" T.unpack docUrl) bs')
where initialState = (bUrl, docUrl, 1, PrefixMappings Map.empty, [], [], [], Seq.empty)
-- |Parse the given string as a Turtle document. The arguments and return type have the same semantics
-- as <parseURL>, except that the last @String@ argument corresponds to the Turtle document itself as
-- a string rather than a location URI.
parseString' :: forall rdf. (RDF rdf) => Maybe BaseUrl -> Maybe T.Text -> T.Text -> Either ParseFailure rdf
parseString' bUrl docUrl ttlStr = handleResult bUrl (runParser t_turtleDoc initialState "" ttlStr)
where initialState = (bUrl, docUrl, 1, PrefixMappings Map.empty, [], [], [], Seq.empty)
handleResult :: RDF rdf => Maybe BaseUrl -> Either ParseError (Seq Triple, PrefixMappings) -> Either ParseFailure rdf
handleResult bUrl result =
case result of
(Left err) -> Left (ParseFailure $ show err)
(Right (ts, pms)) -> Right $! mkRdf (F.toList ts) bUrl pms
--------------
-- auxiliary parsing functions
-- Match the lowercase or uppercase form of 'c'
caseInsensitiveChar :: Char -> GenParser ParseState Char
caseInsensitiveChar c = char (toLower c) <|> char (toUpper c)
-- Match the string 's', accepting either lowercase or uppercase form of each character
caseInsensitiveString :: String -> GenParser ParseState String
caseInsensitiveString s = try (mapM caseInsensitiveChar s) <?> "\"" ++ s ++ "\""
| LeifW/rdf4h | src/Text/RDF/RDF4H/TurtleParser.hs | bsd-3-clause | 29,131 | 0 | 20 | 6,734 | 8,125 | 4,257 | 3,868 | -1 | -1 |
{-# LANGUAGE GADTs #-}
-- | Provide a notion of fanout wherein a single input is passed to
-- several consumers.
module Data.Machine.Fanout (fanout, fanoutSteps) where
import Control.Applicative
import Control.Arrow
import Control.Monad (foldM)
import Data.Machine
import Data.Maybe (catMaybes)
import Data.Monoid
import Data.Profunctor.Unsafe ((#.))
import Data.Semigroup (Semigroup(sconcat))
import Data.List.NonEmpty (NonEmpty((:|)))
import Prelude
-- | Feed a value to a 'ProcessT' at an 'Await' 'Step'. If the
-- 'ProcessT' is awaiting a value, then its next step is
-- returned. Otherwise, the original process is returned.
feed :: Monad m => a -> ProcessT m a b -> m (Step (Is a) b (ProcessT m a b))
feed x m = runMachineT m >>= \v ->
case v of
Await f Refl _ -> runMachineT (f x)
s -> return s
-- | Like 'Data.List.mapAccumL' but with a monadic accumulating
-- function.
mapAccumLM :: (Functor m, Monad m)
=> (acc -> x -> m (acc, y)) -> acc -> [x] -> m (acc, [y])
mapAccumLM f z = fmap (second ($ [])) . foldM aux (z,id)
where aux (acc,ys) x = second ((. ys) . (:)) <$> f acc x
-- | Exhaust a sequence of all successive 'Yield' steps taken by a
-- 'MachineT'. Returns the list of yielded values and the next
-- (non-Yield) step of the machine.
flushYields :: Monad m
=> Step k o (MachineT m k o) -> m ([o], Maybe (MachineT m k o))
flushYields = go id
where go rs (Yield o s) = runMachineT s >>= go ((o:) . rs)
go rs Stop = return (rs [], Nothing)
go rs s = return (rs [], Just $ encased s)
-- | Share inputs with each of a list of processes in lockstep. Any
-- values yielded by the processes are combined into a single yield
-- from the composite process.
fanout :: (Functor m, Monad m, Semigroup r)
=> [ProcessT m a r] -> ProcessT m a r
fanout xs = encased $ Await (MachineT #. aux) Refl (fanout xs)
where aux y = do (rs,xs') <- mapM (feed y) xs >>= mapAccumLM yields []
let nxt = fanout $ catMaybes xs'
case rs of
[] -> runMachineT nxt
(r:rs') -> return $ Yield (sconcat $ r :| rs') nxt
yields rs Stop = return (rs,Nothing)
yields rs y@Yield{} = first (++ rs) <$> flushYields y
yields rs a@Await{} = return (rs, Just $ encased a)
-- | Share inputs with each of a list of processes in lockstep. If
-- none of the processes yields a value, the composite process will
-- itself yield 'mempty'. The idea is to provide a handle on steps
-- only executed for their side effects. For instance, if you want to
-- run a collection of 'ProcessT's that await but don't yield some
-- number of times, you can use 'fanOutSteps . map (fmap (const ()))'
-- followed by a 'taking' process.
fanoutSteps :: (Functor m, Monad m, Monoid r)
=> [ProcessT m a r] -> ProcessT m a r
fanoutSteps xs = encased $ Await (MachineT . aux) Refl (fanoutSteps xs)
where aux y = do (rs,xs') <- mapM (feed y) xs >>= mapAccumLM yields []
let nxt = fanoutSteps $ catMaybes xs'
if null rs
then return $ Yield mempty nxt
else return $ Yield (mconcat rs) nxt
yields rs Stop = return (rs,Nothing)
yields rs y@Yield{} = first (++rs) <$> flushYields y
yields rs a@Await{} = return (rs, Just $ encased a)
| treeowl/machines | src/Data/Machine/Fanout.hs | bsd-3-clause | 3,386 | 0 | 16 | 893 | 1,083 | 571 | 512 | 49 | 4 |
{-# LANGUAGE PatternSynonyms #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.NV.VertexProgram3
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.NV.VertexProgram3 (
-- * Extension Support
glGetNVVertexProgram3,
gl_NV_vertex_program3,
-- * Enums
pattern GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS_ARB
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
| haskell-opengl/OpenGLRaw | src/Graphics/GL/NV/VertexProgram3.hs | bsd-3-clause | 668 | 0 | 5 | 91 | 47 | 36 | 11 | 7 | 0 |
module Control.Concurrent.AsCtrl
( module Control.Concurrent.AsCtrl
) where
-- generated by https://github.com/rvion/ride/tree/master/jetpack-gen
import qualified Control.Concurrent as I
-- ctrl_forkFinally :: forall a. IO a -> (Either SomeException a -> IO ()) -> IO ThreadId
ctrl_forkFinally = I.forkFinally
-- ctrl_forkOS :: IO () -> IO ThreadId
ctrl_forkOS = I.forkOS
-- ctrl_isCurrentThreadBound :: IO Bool
ctrl_isCurrentThreadBound = I.isCurrentThreadBound
-- ctrl_rtsSupportsBoundThreads :: Bool
ctrl_rtsSupportsBoundThreads = I.rtsSupportsBoundThreads
-- ctrl_runInBoundThread :: forall a. IO a -> IO a
ctrl_runInBoundThread = I.runInBoundThread
-- ctrl_runInUnboundThread :: forall a. IO a -> IO a
ctrl_runInUnboundThread = I.runInUnboundThread
-- ctrl_threadWaitRead :: Fd -> IO ()
ctrl_threadWaitRead = I.threadWaitRead
-- ctrl_threadWaitReadSTM :: Fd -> IO (STM (), IO ())
ctrl_threadWaitReadSTM = I.threadWaitReadSTM
-- ctrl_threadWaitWrite :: Fd -> IO ()
ctrl_threadWaitWrite = I.threadWaitWrite
-- ctrl_threadWaitWriteSTM :: Fd -> IO (STM (), IO ())
ctrl_threadWaitWriteSTM = I.threadWaitWriteSTM
-- ctrl_dupChan :: forall a. Chan a -> IO (Chan a)
ctrl_dupChan = I.dupChan
-- ctrl_getChanContents :: forall a. Chan a -> IO [a]
ctrl_getChanContents = I.getChanContents
-- ctrl_isEmptyChan :: forall a. Chan a -> IO Bool
ctrl_isEmptyChan = I.isEmptyChan
-- ctrl_newChan :: forall a. IO (Chan a)
ctrl_newChan = I.newChan
-- ctrl_readChan :: forall a. Chan a -> IO a
ctrl_readChan = I.readChan
-- ctrl_unGetChan :: forall a. Chan a -> a -> IO ()
ctrl_unGetChan = I.unGetChan
-- ctrl_writeChan :: forall a. Chan a -> a -> IO ()
ctrl_writeChan = I.writeChan
-- ctrl_writeList2Chan :: forall a. Chan a -> [a] -> IO ()
ctrl_writeList2Chan = I.writeList2Chan
-- ctrl_addMVarFinalizer :: forall a. MVar a -> IO () -> IO ()
ctrl_addMVarFinalizer = I.addMVarFinalizer
-- ctrl_mkWeakMVar :: forall a. MVar a -> IO () -> IO (Weak (MVar a))
ctrl_mkWeakMVar = I.mkWeakMVar
-- ctrl_modifyMVar :: forall a b. MVar a -> (a -> IO (a, b)) -> IO b
ctrl_modifyMVar = I.modifyMVar
-- ctrl_modifyMVarMasked :: forall a b. MVar a -> (a -> IO (a, b)) -> IO b
ctrl_modifyMVarMasked = I.modifyMVarMasked
-- ctrl_modifyMVarMasked_ :: forall a. MVar a -> (a -> IO a) -> IO ()
ctrl_modifyMVarMasked_ = I.modifyMVarMasked_
-- ctrl_modifyMVar_ :: forall a. MVar a -> (a -> IO a) -> IO ()
ctrl_modifyMVar_ = I.modifyMVar_
-- ctrl_swapMVar :: forall a. MVar a -> a -> IO a
ctrl_swapMVar = I.swapMVar
-- ctrl_withMVar :: forall a b. MVar a -> (a -> IO b) -> IO b
ctrl_withMVar = I.withMVar
-- ctrl_withMVarMasked :: forall a b. MVar a -> (a -> IO b) -> IO b
ctrl_withMVarMasked = I.withMVarMasked
-- ctrl_newQSem :: Int -> IO QSem
ctrl_newQSem = I.newQSem
-- ctrl_signalQSem :: QSem -> IO ()
ctrl_signalQSem = I.signalQSem
-- ctrl_waitQSem :: QSem -> IO ()
ctrl_waitQSem = I.waitQSem
-- ctrl_newQSemN :: Int -> IO QSemN
ctrl_newQSemN = I.newQSemN
-- ctrl_signalQSemN :: QSemN -> Int -> IO ()
ctrl_signalQSemN = I.signalQSemN
-- ctrl_waitQSemN :: QSemN -> Int -> IO ()
ctrl_waitQSemN = I.waitQSemN
-- ctrl_threadDelay :: Int -> IO ()
ctrl_threadDelay = I.threadDelay
-- ctrl_forkIO :: IO () -> IO ThreadId
ctrl_forkIO = I.forkIO
-- ctrl_forkIOWithUnmask :: ((forall a. IO a -> IO a) -> IO ()) -> IO ThreadId
ctrl_forkIOWithUnmask = I.forkIOWithUnmask
-- ctrl_forkOn :: Int -> IO () -> IO ThreadId
ctrl_forkOn = I.forkOn
-- ctrl_forkOnWithUnmask :: Int -> ((forall a. IO a -> IO a) -> IO ()) -> IO ThreadId
ctrl_forkOnWithUnmask = I.forkOnWithUnmask
-- ctrl_getNumCapabilities :: IO Int
ctrl_getNumCapabilities = I.getNumCapabilities
-- ctrl_killThread :: ThreadId -> IO ()
ctrl_killThread = I.killThread
-- ctrl_mkWeakThreadId :: ThreadId -> IO (Weak ThreadId)
ctrl_mkWeakThreadId = I.mkWeakThreadId
-- ctrl_myThreadId :: IO ThreadId
ctrl_myThreadId = I.myThreadId
-- ctrl_setNumCapabilities :: Int -> IO ()
ctrl_setNumCapabilities = I.setNumCapabilities
-- ctrl_threadCapability :: ThreadId -> IO (Int, Bool)
ctrl_threadCapability = I.threadCapability
-- ctrl_throwTo :: forall e. Exception e => ThreadId -> e -> IO ()
ctrl_throwTo = I.throwTo
-- ctrl_yield :: IO ()
ctrl_yield = I.yield
-- ctrl_isEmptyMVar :: forall a. MVar a -> IO Bool
ctrl_isEmptyMVar = I.isEmptyMVar
-- ctrl_newEmptyMVar :: forall a. IO (MVar a)
ctrl_newEmptyMVar = I.newEmptyMVar
-- ctrl_newMVar :: forall a. a -> IO (MVar a)
ctrl_newMVar = I.newMVar
-- ctrl_putMVar :: forall a. MVar a -> a -> IO ()
ctrl_putMVar = I.putMVar
-- ctrl_readMVar :: forall a. MVar a -> IO a
ctrl_readMVar = I.readMVar
-- ctrl_takeMVar :: forall a. MVar a -> IO a
ctrl_takeMVar = I.takeMVar
-- ctrl_tryPutMVar :: forall a. MVar a -> a -> IO Bool
ctrl_tryPutMVar = I.tryPutMVar
-- ctrl_tryReadMVar :: forall a. MVar a -> IO (Maybe a)
ctrl_tryReadMVar = I.tryReadMVar
-- ctrl_tryTakeMVar :: forall a. MVar a -> IO (Maybe a)
ctrl_tryTakeMVar = I.tryTakeMVar
type CtrlChan a = I.Chan a
type CtrlQSem = I.QSem
type CtrlQSemN = I.QSemN
type CtrlThreadId = I.ThreadId
type CtrlMVar a = I.MVar a
| rvion/ride | jetpack/src/Control/Concurrent/AsCtrl.hs | bsd-3-clause | 5,087 | 0 | 6 | 841 | 513 | 322 | 191 | 63 | 1 |
module BrownPLT.JavaScript.Contracts.Compiler
( compile
, compile'
, compileFormatted
, compileRelease
) where
import Control.Monad
import qualified Data.Map as M
import Text.PrettyPrint.HughesPJ ( render, vcat )
import Text.ParserCombinators.Parsec.Pos (SourcePos)
import Paths_JsContracts -- created by Cabal
import System.FilePath ((</>))
import BrownPLT.JavaScript.Parser (ParsedExpression, ParsedStatement,
parseJavaScriptFromFile, parseScriptFromString )
import BrownPLT.JavaScript.Environment (env)
import BrownPLT.JavaScript.Syntax
import BrownPLT.JavaScript.PrettyPrint (renderStatements)
import BrownPLT.JavaScript.Contracts.Types
import BrownPLT.JavaScript.Contracts.Parser
import BrownPLT.JavaScript.Contracts.Template
-- Given the name foo, we get
--
-- try {
-- // allow the implementation to export to this directly
-- if (this.foo !== undefined) {
-- this.foo = foo;
-- }
-- }
-- catch (_) {
-- // in case the local variable foo is undefined
-- }
exposeImplementation :: [String]
-> [ParsedStatement]
exposeImplementation names = map export names where
var n = VarRef noPos (Id noPos n)
undef = VarRef noPos (Id noPos "undefined")
export n = TryStmt noPos
(IfSingleStmt noPos (InfixExpr noPos OpStrictNEq (var n) undef)
(ExprStmt noPos $ AssignExpr noPos OpAssign
(LDot noPos (ThisRef noPos) n)
(var n)))
[CatchClause noPos (Id noPos "_") (EmptyStmt noPos)]
Nothing
-- Given a namespace, e.g. flapjax, we get
--
-- window.flapjax = { };
-- for (var ix in impl) {
-- window.flapjax[ix] = impl[ix]; }
exportNamespace :: String
-> [ParsedStatement]
exportNamespace namespace = [decl,loop] where
ix = VarRef noPos (Id noPos "ix")
window_namespace =
(DotRef noPos (VarRef noPos (Id noPos "window"))
(Id noPos namespace))
decl = ExprStmt noPos $ AssignExpr noPos OpAssign
(LDot noPos (VarRef noPos (Id noPos "window")) namespace)
(ObjectLit noPos [])
loop = ForInStmt noPos (ForInVar (Id noPos "ix"))
(VarRef noPos (Id noPos "impl")) $ ExprStmt noPos $
AssignExpr noPos OpAssign (LBracket noPos window_namespace ix)
(BracketRef noPos (VarRef noPos (Id noPos "impl")) ix)
wrapImplementation :: [ParsedStatement] -> [String] -> [ParsedStatement]
wrapImplementation impl names =
[VarDeclStmt noPos [implDecl], ExprStmt noPos callThunkedImpl] where
implDecl = VarDecl noPos (Id noPos "impl") (Just $ ObjectLit noPos [])
implExport = exposeImplementation names
callThunkedImpl = CallExpr noPos
(DotRef noPos
(ParenExpr noPos $ FuncExpr noPos [Id noPos "impl"]
(BlockStmt noPos (impl ++ implExport)))
(Id noPos "apply"))
[VarRef noPos (Id noPos "impl")]
escapeGlobals :: [ParsedStatement] -> [String] -> [ParsedStatement]
escapeGlobals impl exportNames =
[VarDeclStmt noPos [VarDecl noPos (Id noPos s) Nothing] | s <- allGlobals]
where globalMap = snd (env M.empty impl)
allGlobals = M.keys globalMap
makeExportStatements :: InterfaceItem -> [ParsedStatement]
makeExportStatements (InterfaceExport id pos contract) =
[ ExprStmt noPos $ AssignExpr noPos OpAssign
(LDot noPos (ThisRef noPos) id)
(compileContract id contract pos $
DotRef noPos (VarRef noPos (Id noPos "impl")) (Id noPos id))
]
-- allows external code to use "instanceof id"
makeExportStatements (InterfaceInstance id _ _) =
[ ExprStmt noPos $ AssignExpr noPos OpAssign
(LDot noPos (ThisRef noPos) id)
(DotRef noPos (VarRef noPos (Id noPos "impl")) (Id noPos id))
]
makeExportStatements _ = [ ]
exportRelease :: InterfaceItem -> ParsedStatement
exportRelease (InterfaceExport id _ contract) =
ExprStmt noPos $ AssignExpr noPos OpAssign
(LDot noPos (ThisRef noPos) id)
(DotRef noPos (VarRef noPos (Id noPos "impl")) (Id noPos id))
exportRelease (InterfaceInstance id _ _) =
ExprStmt noPos $ AssignExpr noPos OpAssign
(LDot noPos (ThisRef noPos) id)
(DotRef noPos (VarRef noPos (Id noPos "impl")) (Id noPos id))
exportRelease _ = error "exportRelease: expected InterfaceExport / Instance"
-- Given source that reads:
--
-- foo = contract;
-- ...
--
-- Transform it to:
--
-- var foo = { }; ...
-- (function() {
-- var tmp = contract;
-- foo.client = tmp.client;
-- foo.server = tmp.server;
-- })();
--
-- The names are first initialized to empty object to permit mutually-recursive
-- contract definitions.
compileAliases :: [InterfaceItem] -> [ParsedStatement]
compileAliases aliases = concatMap init aliases ++ concatMap def aliases where
init (InterfaceAlias id _) = templateStatements
$ renameVar "alias" id (stmtTemplate "var alias = { };")
init (InterfaceInstance id _ _) = templateStatements -- same as above
$ renameVar "alias" id (stmtTemplate "var alias = { };")
init _ = []
def (InterfaceAlias id contract) = templateStatements
$ renameVar "alias" id
$ substVar "contract" (cc contract)
(stmtTemplate "(function() { var tmp = contract;\n \
\ alias.client = tmp.client;\n \
\ alias.flat = tmp.flat; \
\ alias.server = tmp.server; })(); \n")
def (InterfaceInstance id loc contract) = templateStatements
$ renameVar "alias" id
$ substVar "contract" (cc contract)
$ substVar "name" (StringLit noPos id)
$ substVar "constr" (DotRef noPos (VarRef noPos (Id noPos "impl"))
(Id noPos id))
(stmtTemplate "(function() { \
\ var tmp = contracts.instance(name)(constr,contract); \
\ alias.client = tmp.client; \
\ alias.flat = tmp.flat; \
\ alias.server = tmp.server; })(); ")
def _ = []
compile :: [ParsedStatement] -- ^implementation
-> [InterfaceItem] -- ^the interface
-> [ParsedStatement] -- ^contract library
-> ParsedStatement -- ^encapsulated implementation
compile impl interface boilerplateStmts =
let exportStmts = concatMap makeExportStatements interface
exportNames = [n | InterfaceExport n _ _ <- interfaceExports]
instanceNames = [ n | InterfaceInstance n _ _
<- filter isInterfaceInstance interface ]
aliases = filter isInterfaceAlias interface
aliasStmts = compileAliases interface
wrappedImpl = wrapImplementation (escapeGlobals impl exportNames ++ impl)
(exportNames ++ instanceNames)
interfaceStmts = map interfaceStatement $
filter isInterfaceStatement interface
interfaceExports = filter isInterfaceExport interface
outerWrapper = ParenExpr noPos $ FuncExpr noPos [] $ BlockStmt noPos $
wrappedImpl ++ boilerplateStmts ++ interfaceStmts ++ aliasStmts ++
exportStmts
in ExprStmt noPos $ CallExpr noPos outerWrapper []
libraryHeader =
"(function () {\n \
\ var impl = { };\n \
\ (function() {\n"
compileRelease :: String -- ^implementation
-> String -- ^implementation source
-> String -- ^contract library
-> Bool -- ^export?
-> [InterfaceItem] -- ^the interface
-> Maybe String -- ^the namespace name
-> String -- ^encapsulated implementation
compileRelease rawImpl implSource boilerplate isExport interface namespace =
libraryHeader ++ (renderStatements $ escapeGlobals impl exportNames)
++ rawImpl ++ exposeStatements ++ "\n}).apply(impl,[]);\n"
++ exportStatements ++ namespaceStatements ++ "\n})();" where
impl = case parseScriptFromString implSource rawImpl of
Left err -> error (show err)
Right (Script _ stmts) -> stmts
exports = filter isInterfaceExport interface
instances = filter isInterfaceInstance interface
exportStatements = case isExport of
True -> renderStatements (map exportRelease $ exports ++ instances)
False -> ""
exportNames = [n | InterfaceExport n _ _ <- exports ]
instanceNames = [n | InterfaceInstance n _ _ <- instances]
exposeStatements = renderStatements
(exposeImplementation (exportNames ++ instanceNames))
namespaceStatements = case namespace of
Nothing -> ""
Just s -> renderStatements (exportNamespace s)
compileFormatted :: String -- ^implementation
-> String -- ^implementation source
-> String -- ^contract library
-> Bool -- ^export?
-> [InterfaceItem] -- ^the interface
-> String -- ^encapsulated implementation
compileFormatted rawImpl implSource boilerplate isExport interface =
libraryHeader ++ (renderStatements $ escapeGlobals impl exportNames)
++ rawImpl
++ exposeStatements ++ "\n}).apply(impl,[]);\n" ++ boilerplate
++ interfaceStatements
++ aliasStatements ++ exportStatements
++ "\n})();" where
impl = case parseScriptFromString implSource rawImpl of
Left err -> error (show err)
Right (Script _ stmts) -> stmts
exports =filter isInterfaceExport interface
instances = filter isInterfaceInstance interface
exportStatements = case isExport of
True -> renderStatements (concatMap makeExportStatements interface)
False -> ""
exportNames = [n | InterfaceExport n _ _ <- exports ]
aliases = filter isInterfaceAlias interface
aliasStatements = renderStatements (compileAliases interface)
instanceNames =
[n | InterfaceInstance n _ _ <- filter isInterfaceInstance interface]
exposeStatements = renderStatements $
exposeImplementation (exportNames ++ instanceNames)
interfaceStatements = renderStatements $ map interfaceStatement $
filter isInterfaceStatement interface
compile' :: [ParsedStatement] -> [InterfaceItem] -> IO ParsedStatement
compile' impl iface = do
dataDir <- getDataDir
boilerplateStmts <- parseJavaScriptFromFile (dataDir</>"contracts.js")
return $ compile impl iface boilerplateStmts
compileContract :: String -- ^export name
-> Contract -- ^ contract
-> SourcePos -- ^ location of export
-> ParsedExpression
-> ParsedExpression
compileContract exportId contract pos guardExpr =
CallExpr noPos (DotRef noPos (VarRef noPos (Id noPos "contracts"))
(Id noPos "guard"))
[cc contract, guardExpr, StringLit noPos exportId,
StringLit noPos "client", StringLit noPos (show (contractPos contract))]
where loc = "on " ++ exportId ++ ", defined at " ++ show pos
-- |contract name compiler
nc :: Contract
-> String
nc (FlatContract pos predExpr) =
"value that satisfies the predicate at " ++ show pos
-- TODO: hygiene. Use an extended annotation (Either SourcePos ...) to
-- determine whether or not to substitute into a template.
-- |contract compiler
cc :: Contract -- ^parsed contract
-> ParsedExpression -- ^contract in JavaScript
cc ctc = case ctc of
FlatContract _ predExpr -> templateExpression
$ substVar "pred" predExpr
$ substVar "name" (StringLit noPos (nc ctc))
$ exprTemplate "contracts.flat(name)(pred)"
FunctionContract pos domainContracts (Just restContract) rangeContract ->
templateExpression
$ substVar "restArg" (cc restContract)
$ substVar "result" (cc rangeContract)
$ substVarList "fixedArgs"
(map cc domainContracts)
$ substVar "name" (StringLit noPos $ "function at " ++ show pos)
$ exprTemplate
"contracts.varArityFunc(name)([fixedArgs],restArg,result)"
FunctionContract pos domainContracts Nothing rangeContract ->
let isUndefined = DotRef noPos (VarRef noPos (Id noPos "contracts"))
(Id noPos "isUndefined")
in templateExpression
$ substVar "restArg" isUndefined
$ substVar "result" (cc rangeContract)
$ substVarList "fixedArgs" (map cc domainContracts)
$ substVar "name" (StringLit noPos $ "function at " ++ show pos)
$ exprTemplate
"contracts.varArityFunc(name)([fixedArgs],restArg,result)"
-- User-defined contract constructor
ConstructorContract pos constrName args -> CallExpr noPos
(CallExpr noPos
(VarRef noPos (Id noPos constrName))
[StringLit noPos constrName])
(map cc args)
FixedArrayContract pos elts -> templateExpression
$ substVarList "contracts" (map cc elts)
$ substVar "name" (StringLit noPos "fixed-array")
$ exprTemplate "contracts.fixedArray(name)(contracts)"
ArrayContract _ elt -> templateExpression
$ substVar "contract" (cc elt)
$ substVar "name" (StringLit noPos "array")
$ exprTemplate "contracts.unsizedArray(name)(contract)"
ObjectContract pos fields ->
let getField id = DotRef noPos (VarRef noPos $ Id noPos "val") (Id noPos id)
mkProp id = PropId noPos (Id noPos id)
fieldContract (id,contract) = (id, cc contract)
in templateExpression
$ substFieldList "fieldNames" (map fieldContract fields)
$ substVar "name" (StringLit noPos "name")
$ exprTemplate "contracts.obj(name)({ fieldNames: 42 })"
NamedContract _ nameRef ->
VarRef noPos (Id noPos nameRef)
| brownplt/javascript-contracts | src/BrownPLT/JavaScript/Contracts/Compiler.hs | bsd-3-clause | 13,488 | 0 | 17 | 3,311 | 3,317 | 1,694 | 1,623 | 248 | 8 |
module Network.EmailSend.SMTP(SMTPBackend(..)) where
----------------------------------------
---- STDLIB
----------------------------------------
----------------------------------------
---- SITE-PACKAGES
----------------------------------------
import Data.ByteString (ByteString)
import qualified Network.HaskellNet.SMTP as SMTP
import Network.EmailSend (MailBackend(..), SendingReceipt)
----------------------------------------
---- LOCAL
----------------------------------------
data SMTPBackend = SMTPBackend String String
instance MailBackend SMTPBackend where
sendMessage = smtpSendMessage
smtpSendMessage :: SMTPBackend -> String -> String -> ByteString ->
IO SendingReceipt
smtpSendMessage (SMTPBackend server hostname) from to message = (do
c <- SMTP.connectSMTP server
SMTP.sendCommand c $ SMTP.EHLO hostname
SMTP.sendCommand c $ SMTP.MAIL from
SMTP.sendCommand c $ SMTP.RCPT to
SMTP.sendCommand c $ SMTP.DATA message
SMTP.sendCommand c SMTP.NOOP
SMTP.sendCommand c SMTP.QUIT
return Nothing) `catch` (\m -> return $ Just $ show m)
| weissi/haskell-email | Network/EmailSend/SMTP.hs | bsd-3-clause | 1,105 | 0 | 11 | 163 | 269 | 141 | 128 | 18 | 1 |
{-# OPTIONS -Wall -Werror #-}
-- #hide
module Data.Time.Clock.UTCDiff where
import Data.Time.Clock.POSIX
import Data.Time.Clock.UTC
-- | addUTCTime a b = a + b
addUTCTime :: NominalDiffTime -> UTCTime -> UTCTime
addUTCTime x t = posixSecondsToUTCTime (x + (utcTimeToPOSIXSeconds t))
-- | diffUTCTime a b = a - b
diffUTCTime :: UTCTime -> UTCTime -> NominalDiffTime
diffUTCTime a b = (utcTimeToPOSIXSeconds a) - (utcTimeToPOSIXSeconds b)
| FranklinChen/hugs98-plus-Sep2006 | packages/time/Data/Time/Clock/UTCDiff.hs | bsd-3-clause | 441 | 0 | 9 | 68 | 104 | 60 | 44 | 8 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE RecordWildCards #-}
module Lib where
import Control.Monad (join)
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Reader (ReaderT, ask)
import qualified Control.Monad.State.Strict as State
import Control.Monad.Trans.Class (lift)
import Data.Hashable (Hashable)
import qualified Data.HashMap.Strict as HM
import Data.List (intersperse, sortOn)
import Data.Maybe (fromMaybe, mapMaybe)
import Data.Monoid ((<>))
import qualified Data.Serialize as S
import qualified Data.Serialize.Get as SG
import qualified Data.Serialize.Put as SP
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Data.Time.Calendar as Cal
import qualified Data.Time.Format as Cal
import qualified Data.Time.LocalTime as Time
import GHC.Generics (Generic (..))
import qualified Options.Applicative as OA
import qualified Options.Applicative.Builder.Internal as OA
import qualified Pipes as P
import qualified Pipes.ByteString as PBS
import qualified Pipes.Prelude as P
import qualified Pipes.Safe as P
import qualified System.IO as IO
import Text.Printf (printf)
data Command =
Add AddOptions
| Modify ModifyOptions
| Delete DeleteOptions
| Show ShowOptions
deriving (Show)
data Tag = Tag
{ _tagName :: SerializableText
} deriving (Show, Eq, Generic)
instance Read Tag where
readsPrec _ t = [(Tag . SerializableText $ T.pack t, "")]
newtype Amount = Amount
{ unAmount :: Rational
} deriving (Show, Eq, Enum, Fractional, Num, Ord, Real, RealFrac, Generic)
instance Read Amount where
readsPrec _ t = [((Amount . toRational . (read :: String -> Double)) t, "")]
data AddOptions = AddOptions
{ _addOptDate :: Maybe Cal.Day
, _addOptAmount :: Amount
, _addOptTags :: [Tag]
} deriving (Show)
data ModifyOptions = ModifyOptions
{ _modOptId :: ExpenseId
, _modOptDate :: Maybe Cal.Day
, _modOptAmount :: Maybe Amount
, _modOptTags :: Maybe [Tag]
} deriving (Show)
data DeleteOptions = DeleteOptions
{ _delOptId :: ExpenseId
} deriving (Show)
data ShowOptions = ShowOptions
{
} deriving (Show)
data GlobalOptions = GlobalOptions
{ _globOptDb :: FilePath
} deriving (Show)
defaultGlobalOptions :: GlobalOptions
defaultGlobalOptions = GlobalOptions { _globOptDb = "./db.bin" }
programInfo :: OA.InfoMod a
programInfo = OA.fullDesc <>
OA.progDesc "Pfennig CLI v0.1.0.0" <>
OA.header "Pfennig CLI -- Manage your expenses"
programOpts :: OA.Parser Command
programOpts = OA.hsubparser
(OA.command "add"
(OA.info addOptions $ OA.progDesc "Add an expense") <>
OA.command "modify"
(OA.info modifyOptions $ OA.progDesc "Modify an expense") <>
OA.command "delete"
(OA.info deleteOptions $ OA.progDesc "Delete an expense") <>
OA.command "show"
(OA.info showOptions $ OA.progDesc "Show recorded expenses"))
addOptions :: OA.Parser Command
addOptions = Add <$>
(AddOptions <$>
dayOption <*>
amountArgument <*>
tagArguments)
modifyOptions :: OA.Parser Command
modifyOptions = Modify <$>
(ModifyOptions <$>
expenseIdArgument <*>
dayOption <*>
amountOption <*>
tagOptions)
deleteOptions :: OA.Parser Command
deleteOptions = Delete <$>
(DeleteOptions <$>
expenseIdArgument)
showOptions :: OA.Parser Command
showOptions = pure $ Show ShowOptions
expenseIdArgument :: OA.Parser ExpenseId
expenseIdArgument = OA.argument OA.auto expenseIdMods
dayOption :: OA.Parser (Maybe Cal.Day)
dayOption = (parseDay =<<) <$> OA.optional
(OA.strOption
(dayMods <>
OA.long "date" <>
OA.short 'd'))
amountOption :: OA.Parser (Maybe Amount)
amountOption = OA.optional $ OA.option OA.auto
(amountMods <>
OA.long "amount" <>
OA.short 'a')
amountArgument :: OA.Parser Amount
amountArgument = OA.argument OA.auto amountMods
tagOptions :: OA.Parser (Maybe [Tag])
tagOptions = OA.optional $ OA.some $ OA.option OA.auto
(tagsMods <>
OA.long "tags" <>
OA.short 't')
tagArguments :: OA.Parser [Tag]
tagArguments = OA.some $ OA.argument OA.auto tagsMods
expenseIdMods :: OA.HasMetavar f => OA.Mod f ExpenseId
expenseIdMods =
OA.metavar "ID" <>
OA.help "ID of the expense to modify"
dayMods :: OA.HasMetavar f => OA.Mod f String
dayMods =
OA.metavar "DATE" <>
OA.help "Date of the expense"
amountMods :: OA.HasMetavar f => OA.Mod f Amount
amountMods =
OA.metavar "AMOUNT" <>
OA.help "Amount of the expense"
tagsMods :: OA.HasMetavar f => OA.Mod f Tag
tagsMods =
OA.metavar "TAGS" <>
OA.help "Tags for the given expense"
parseDay :: String -> Maybe Cal.Day
parseDay = Cal.parseTimeM True Cal.defaultTimeLocale "%d.%m.%Y"
newtype ExpenseId = ExpenseId
{ unExpenseId :: Integer
} deriving (Show, Eq, Generic, Hashable)
instance Read ExpenseId where
readsPrec _ s = [((ExpenseId . read) s, "")]
data Event =
CreateExpense ExpenseCreation
| ModifyExpense ExpenseModification
| DeleteExpense ExpenseDeletion
deriving (Show, Generic)
instance S.Serialize Event
instance S.Serialize ExpenseId
instance S.Serialize Amount
instance S.Serialize Tag
newtype SerializableDay = SerializableDay { unSerializableDay :: Cal.Day }
deriving (Show, Eq)
instance S.Serialize SerializableDay where
put = S.put . Cal.toModifiedJulianDay . unSerializableDay
get = SerializableDay . Cal.ModifiedJulianDay <$> S.get
newtype SerializableText = SerializableText { unSerializableText :: T.Text }
deriving (Show, Eq)
instance S.Serialize SerializableText where
put t = SP.putNested (SP.putWord64le . fromIntegral)
(S.put . TE.encodeUtf8 $ unSerializableText t)
get = SerializableText <$>
SG.getNested (fromIntegral <$> SG.getWord64le) (TE.decodeUtf8 <$> S.get)
data ExpenseCreation = ExpenseCreation
{ _createId :: ExpenseId
, _createDate :: SerializableDay
, _createAmount :: Amount
, _createTags :: [Tag]
} deriving (Show, Generic)
instance S.Serialize ExpenseCreation
data ExpenseModification = ExpenseModification
{ _modifyId :: ExpenseId
, _modifyDate :: Maybe SerializableDay
, _modifyAmount :: Maybe Amount
, _modifyTags :: Maybe [Tag]
} deriving (Show, Generic)
instance S.Serialize ExpenseModification
data ExpenseDeletion = ExpenseDeletion
{ _deleteId :: ExpenseId
} deriving (Show, Generic)
instance S.Serialize ExpenseDeletion
data Expense = Expense
{ _expenseId :: ExpenseId
, _expenseDate :: Cal.Day
, _expenseAmount :: Amount
, _expenseTags :: [Tag]
} deriving (Show)
formatExpense :: Expense -> String
formatExpense Expense {..} =
"#" ++ formatId _expenseId ++ "\t" ++
formatDate _expenseDate ++ "\t€ " ++
formatAmount _expenseAmount ++ "\t\t" ++
formatTags _expenseTags
formatId :: ExpenseId -> String
formatId = show . unExpenseId
formatDate :: Cal.Day -> String
formatDate = Cal.formatTime Cal.defaultTimeLocale "%d.%m.%Y"
formatAmount :: Amount -> String
formatAmount a = printf "%5.2f" (fromRational (unAmount a) :: Double)
formatTags :: [Tag] -> String
formatTags = join . intersperse ", " . map tagToString
where tagToString = T.unpack . unSerializableText . _tagName
newtype SizeTagged a = SizeTagged { unSizeTagged :: a } deriving (Show)
instance S.Serialize a => S.Serialize (SizeTagged a) where
put s = SP.putNested (SP.putWord64le . fromIntegral) (S.put $ unSizeTagged s)
get = SizeTagged <$> SG.getNested (fromIntegral <$> SG.getWord64le) S.get
{-|
File handling
-}
defaultExpenseId :: ExpenseId
defaultExpenseId = ExpenseId 0
incExpenseId :: ExpenseId -> ExpenseId
incExpenseId = ExpenseId . (+1) . unExpenseId
nextExpenseId :: [Event] -> ExpenseId
nextExpenseId =
maybe defaultExpenseId incExpenseId . safeLast . mapMaybe createId
where safeLast [] = Nothing
safeLast s = return $ last s
createId (CreateExpense opts) = Just $ _createId opts
createId _ = Nothing
getEvents :: (P.MonadIO m, P.MonadMask m) => FilePath -> m [Event]
getEvents file = P.runSafeT . P.toListM $ fileReader file P.>-> decodeEvents
appendEvents :: (P.MonadIO m, P.MonadMask m) => FilePath -> [Event] -> m ()
appendEvents file events = P.runSafeT . P.runEffect $
P.each events P.>-> encodeEvents P.>-> fileAppender file
fileReader :: (P.MonadIO m, P.MonadMask m) =>
FilePath ->
P.Producer PBS.ByteString (P.SafeT m) ()
fileReader file = P.bracket
(liftIO $ IO.openBinaryFile file IO.ReadWriteMode)
(liftIO . IO.hClose)
PBS.fromHandle
fileAppender :: (P.MonadIO m, P.MonadMask m) =>
FilePath ->
P.Consumer PBS.ByteString (P.SafeT m) ()
fileAppender file = P.bracket
(liftIO $ IO.openBinaryFile file IO.AppendMode)
(liftIO . IO.hClose)
PBS.toHandle
interpret :: (P.MonadIO m, P.MonadMask m) =>
Command -> ReaderT GlobalOptions m ()
interpret (Add opts) = do
file <- _globOptDb <$> ask
events <- getEvents file
let nextId = nextExpenseId events
zonedTime <- liftIO Time.getZonedTime
let today = Time.localDay . Time.zonedTimeToLocalTime $ zonedTime
let expenseCreation = ExpenseCreation
{ _createId = nextId
, _createDate = SerializableDay $ fromMaybe today $ _addOptDate opts
, _createAmount = _addOptAmount opts
, _createTags = _addOptTags opts }
appendEvents file [CreateExpense expenseCreation]
interpret (Modify mo) = do
file <- _globOptDb <$> ask
events <- getEvents file
let expenses = computeExpenses events
let eid = _modOptId mo
let idExists = any (\e -> _expenseId e == eid) expenses
if idExists -- Check if any changes included at all
then let expenseModification = ExpenseModification
{ _modifyId = eid
, _modifyDate = SerializableDay <$> _modOptDate mo
, _modifyAmount = _modOptAmount mo
, _modifyTags = _modOptTags mo }
in appendEvents file [ModifyExpense expenseModification]
else do
liftIO $ putStrLn $ "Couldn't find expense with ID #" ++ formatId eid
return ()
interpret (Delete delOpt) = do
file <- _globOptDb <$> ask
events <- getEvents file
let expenses = computeExpenses events
let eid = _delOptId delOpt
let idExists = any (\e -> _expenseId e == eid) expenses
if idExists
then let expenseDeletion = ExpenseDeletion { _deleteId = eid }
in appendEvents file [DeleteExpense expenseDeletion]
else do
liftIO $ putStrLn $ "Couldn't find expense with ID #" ++ formatId eid
return ()
interpret (Show _) = do
cfg <- ask
events <- getEvents $ _globOptDb cfg
let expenses = computeExpenses events
liftIO $ mapM_ (putStrLn . formatExpense) expenses
computeExpenses :: [Event] -> [Expense]
computeExpenses = sortOn _expenseDate . HM.elems . fst .
foldl applyEvent (HM.empty, defaultExpenseId)
applyEvent :: (HM.HashMap ExpenseId Expense, ExpenseId) ->
Event ->
(HM.HashMap ExpenseId Expense, ExpenseId)
applyEvent (hm, eid) (CreateExpense ec) =
let ex = Expense { _expenseId = eid
, _expenseDate = unSerializableDay $ _createDate ec
, _expenseAmount = _createAmount ec
, _expenseTags = _createTags ec }
hm' = HM.insert eid ex hm
in (hm', incExpenseId eid)
applyEvent acc@(hm, eid) (ModifyExpense me) =
let lookupId = _modifyId me
ex = HM.lookup lookupId hm
in case ex of
Nothing -> acc
Just ex' ->
let newDate = fromMaybe (_expenseDate ex')
(unSerializableDay <$> _modifyDate me)
newAmount = fromMaybe (_expenseAmount ex') (_modifyAmount me)
newTags = fromMaybe (_expenseTags ex') (_modifyTags me)
newEx = Expense { _expenseId = _expenseId ex'
, _expenseDate = newDate
, _expenseAmount = newAmount
, _expenseTags = newTags }
in (HM.insert lookupId newEx hm, eid)
applyEvent acc@(hm, eid) (DeleteExpense de) =
let lookupId = _deleteId de
in if HM.member lookupId hm
then (HM.delete lookupId hm, eid)
else acc
-- | Serialize events
encodeEvents :: Monad m => P.Pipe Event PBS.ByteString m ()
encodeEvents = P.map S.encode
-- | Deserialize events incrementally; abort on any parse error
decodeEvents :: Monad m => P.Pipe PBS.ByteString Event m ()
decodeEvents = State.evalStateT go Nothing
where go = do
s <- State.get
case s of
Nothing -> do -- No previous state available
bs <- lift P.await
State.put . Just . SG.runGetPartial S.get $ bs
go
Just r -> case r of -- Previous state available
SG.Fail _ _ -> return () -- Abort, parsing failed
SG.Partial cont -> do -- Request more input
bs <- lift P.await
State.put . Just . cont $ bs
go
SG.Done evt rest -> do -- Parsed an event successfully
lift $ P.yield evt
State.put . Just . SG.runGetPartial S.get $ rest
go
| muhbaasu/pfennig-cli | src/Lib.hs | bsd-3-clause | 13,889 | 1 | 22 | 3,623 | 3,999 | 2,106 | 1,893 | 343 | 4 |
module Sexy.Instances.Monoid.Either () where
import Sexy.Data (Either)
import Sexy.Classes (Monoid(..), Nil)
import Sexy.Instances.Nil.Either ()
import Sexy.Instances.Plus.Either ()
instance (Nil e, Monoid a) => Monoid (Either e a) where
| DanBurton/sexy | src/Sexy/Instances/Monoid/Either.hs | bsd-3-clause | 240 | 0 | 7 | 30 | 89 | 55 | 34 | 6 | 0 |
module Ellipsoid where
-- | To represent a reference ellipsoid.
data Ellipsoid =
Ellipsoid { semiMajorAxis :: Double -- ^ Semi major axis.
, semiMinorAxis :: Double -- ^ Semi minor axis.
, eccentricitySquared :: Double -- ^ Eccentricity squared.
, flattening :: Double -- ^ Flattening.
} deriving (Eq)
-- | Parameters to build Ellipsoid.
data EllipsoidParams = SemiMajMinAxises Double Double -- ^ Semi major and semi minor axises.
| SemiMajAxisEccentricitySquared Double Double -- ^ Semi major axis and eccentric squared.
instance Show Ellipsoid where
show (Ellipsoid sMajAx sMinAx _ _) =
"[semi-major axis = " ++ show sMajAx ++ ", semi-minor axis = " ++ show sMinAx ++ "]"
-- | Create an Ellipsoid with the given parameters.
mkEllipsoid :: EllipsoidParams -- ^ Ellipsoid parameters.
-> Ellipsoid -- ^ The created Ellipsoid.
mkEllipsoid (SemiMajMinAxises sMajAx sMinAx) =
Ellipsoid { semiMajorAxis = sMajAx
, semiMinorAxis = sMinAx
, eccentricitySquared = 1 - (sMinAx / sMajAx) ** 2 :: Double
, flattening = 1 - sMinAx / sMajAx
}
mkEllipsoid (SemiMajAxisEccentricitySquared sMajAx eccSq) =
Ellipsoid { semiMajorAxis = sMajAx
, semiMinorAxis = sMinAx
, eccentricitySquared = eccSq
, flattening = 1 - sMinAx / sMajAx
}
where sMinAx = sqrt (sMajAx * sMajAx * (1 - eccSq))
-- | Pre-determined ellipsoids:
airy1830Ellipsoid :: Ellipsoid
airy1830Ellipsoid = mkEllipsoid (SemiMajMinAxises 6377563.396 6356256.909)
australianNational1966Ellipsoid :: Ellipsoid
australianNational1966Ellipsoid = mkEllipsoid (SemiMajMinAxises 6378160.0 6356774.719)
bessel1841Ellipsoid :: Ellipsoid
bessel1841Ellipsoid = mkEllipsoid (SemiMajMinAxises 6377397.155 6356078.9629)
clarke1866Ellipsoid :: Ellipsoid
clarke1866Ellipsoid = mkEllipsoid (SemiMajMinAxises 6378206.4 6356583.8)
clarke1880Ellipsoid :: Ellipsoid
clarke1880Ellipsoid = mkEllipsoid (SemiMajMinAxises 6378249.145 6356514.8696)
everest1830Ellipsoid :: Ellipsoid
everest1830Ellipsoid = mkEllipsoid (SemiMajMinAxises 6377276.34518 6356075.41511)
fischer1960Ellipsoid :: Ellipsoid
fischer1960Ellipsoid = mkEllipsoid (SemiMajMinAxises 6378166.0 6356784.284)
fischer1968Ellipsoid :: Ellipsoid
fischer1968Ellipsoid = mkEllipsoid (SemiMajMinAxises 6378150.0 6356768.337)
grs67Ellipsoid :: Ellipsoid
grs67Ellipsoid = mkEllipsoid (SemiMajMinAxises 6378160.0 6356774.51609)
grs75Ellipsoid :: Ellipsoid
grs75Ellipsoid = mkEllipsoid (SemiMajMinAxises 6378140.0 6356755.288)
grs80Ellipsoid :: Ellipsoid
grs80Ellipsoid = mkEllipsoid (SemiMajMinAxises 6378137 6356752.3141)
hayford1910Ellipsoid :: Ellipsoid
hayford1910Ellipsoid = mkEllipsoid (SemiMajMinAxises 6378388.0 6356911.946)
helmert1906Ellipsoid :: Ellipsoid
helmert1906Ellipsoid = mkEllipsoid (SemiMajMinAxises 6378200.0 6356818.17)
hough1956Ellipsoid :: Ellipsoid
hough1956Ellipsoid = mkEllipsoid (SemiMajMinAxises 6378270.0 6356794.34)
iers1989Ellipsoid :: Ellipsoid
iers1989Ellipsoid = mkEllipsoid (SemiMajMinAxises 6378136.0 6356751.302)
internationalEllipsoid :: Ellipsoid
internationalEllipsoid = mkEllipsoid (SemiMajMinAxises 6378388 6356911.9462)
krassovsky1940Ellipsoid :: Ellipsoid
krassovsky1940Ellipsoid = mkEllipsoid (SemiMajMinAxises 6378245.0 6356863.019)
modifiedAiryEllipsoid :: Ellipsoid
modifiedAiryEllipsoid = mkEllipsoid (SemiMajAxisEccentricitySquared 6377340.189 0.00667054015)
modifiedEverestEllipsoid :: Ellipsoid
modifiedEverestEllipsoid = mkEllipsoid (SemiMajMinAxises 6377304.063 6356103.039)
newInternational1967Ellipsoid :: Ellipsoid
newInternational1967Ellipsoid = mkEllipsoid (SemiMajMinAxises 6378157.5 6356772.2)
southAmerican1969Ellipsoid :: Ellipsoid
southAmerican1969Ellipsoid = mkEllipsoid (SemiMajMinAxises 6378160.0 6356774.7192)
wgs60Ellipsoid :: Ellipsoid
wgs60Ellipsoid = mkEllipsoid (SemiMajMinAxises 6378165.0 6356783.287)
wgs66Ellipsoid :: Ellipsoid
wgs66Ellipsoid = mkEllipsoid (SemiMajMinAxises 6378145.0 6356759.770)
wgs72Ellipsoid :: Ellipsoid
wgs72Ellipsoid = mkEllipsoid (SemiMajMinAxises 6378135 6356750.5)
wgs84Ellipsoid :: Ellipsoid
wgs84Ellipsoid = mkEllipsoid (SemiMajMinAxises 6378137 6356752.3142)
| danfran/hcoord | src/Ellipsoid.hs | bsd-3-clause | 4,268 | 0 | 11 | 663 | 804 | 437 | 367 | 75 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
module Stack.Upgrade
( upgrade
, UpgradeOpts
, upgradeOpts
) where
import Control.Exception.Safe (catchAny)
import Control.Monad (unless, when)
import Control.Monad.IO.Class
import Control.Monad.Logger
import Data.Foldable (forM_)
import qualified Data.Map as Map
import Data.Maybe (isNothing)
import Data.Monoid.Extra
import qualified Data.Text as T
import Lens.Micro (set)
import Options.Applicative
import Path
import Path.IO
import qualified Paths_stack as Paths
import Stack.Build
import Stack.Config
import Stack.Fetch
import Stack.PackageIndex
import Stack.Setup
import Stack.Types.PackageIdentifier
import Stack.Types.PackageName
import Stack.Types.Version
import Stack.Types.Config
import Stack.Types.Resolver
import Stack.Types.StackT
import Stack.Types.StringError
import System.Exit (ExitCode (ExitSuccess))
import System.Process (rawSystem, readProcess)
import System.Process.Run
upgradeOpts :: Parser UpgradeOpts
upgradeOpts = UpgradeOpts
<$> (sourceOnly <|> optional binaryOpts)
<*> (binaryOnly <|> optional sourceOpts)
where
binaryOnly = flag' Nothing (long "binary-only" <> help "Do not use a source upgrade path")
sourceOnly = flag' Nothing (long "source-only" <> help "Do not use a binary upgrade path")
binaryOpts = BinaryOpts
<$> optional (strOption
( long "binary-platform"
<> help "Platform type for archive to download"
<> showDefault))
<*> switch
(long "force-download" <>
help "Download a stack executable, even if the version number is older than what we have")
<*> optional (strOption
(long "binary-version" <>
help "Download a specific version, even if it's out of date"))
<*> optional (strOption
(long "github-org" <>
help "Github organization name"))
<*> optional (strOption
(long "github-repo" <>
help "Github repository name"))
sourceOpts = SourceOpts
<$> ((\fromGit repo -> if fromGit then Just repo else Nothing)
<$> switch
( long "git"
<> help "Clone from Git instead of downloading from Hackage (more dangerous)" )
<*> strOption
( long "git-repo"
<> help "Clone from specified git repository"
<> value "https://github.com/commercialhaskell/stack"
<> showDefault ))
data BinaryOpts = BinaryOpts
{ _boPlatform :: !(Maybe String)
, _boForce :: !Bool
-- ^ force a download, even if the downloaded version is older
-- than what we are
, _boVersion :: !(Maybe String)
-- ^ specific version to download
, _boGithubOrg :: !(Maybe String)
, _boGithubRepo :: !(Maybe String)
}
deriving Show
newtype SourceOpts = SourceOpts
{ _soRepo :: Maybe String
}
deriving Show
data UpgradeOpts = UpgradeOpts
{ _uoBinary :: !(Maybe BinaryOpts)
, _uoSource :: !(Maybe SourceOpts)
}
deriving Show
upgrade :: (StackM env m, HasConfig env)
=> ConfigMonoid
-> Maybe AbstractResolver
-> Maybe String -- ^ git hash at time of building, if known
-> UpgradeOpts
-> m ()
upgrade gConfigMonoid mresolver builtHash (UpgradeOpts mbo mso) =
case (mbo, mso) of
-- FIXME It would be far nicer to capture this case in the
-- options parser itself so we get better error messages, but
-- I can't think of a way to make it happen.
(Nothing, Nothing) -> throwString "You must allow either binary or source upgrade paths"
(Just bo, Nothing) -> binary bo
(Nothing, Just so) -> source so
-- See #2977 - if --git or --git-repo is specified, do source upgrade.
(_, Just so@(SourceOpts (Just _))) -> source so
(Just bo, Just so) -> binary bo `catchAny` \e -> do
$logWarn "Exception occured when trying to perform binary upgrade:"
$logWarn $ T.pack $ show e
$logWarn "Falling back to source upgrade"
source so
where
binary bo = binaryUpgrade bo
source so = sourceUpgrade gConfigMonoid mresolver builtHash so
binaryUpgrade
:: (StackM env m, HasConfig env)
=> BinaryOpts
-> m ()
binaryUpgrade (BinaryOpts mplatform force' mver morg mrepo) = do
platforms0 <-
case mplatform of
Nothing -> preferredPlatforms
Just p -> return [("windows" `T.isInfixOf` T.pack p, p)]
archiveInfo <- downloadStackReleaseInfo morg mrepo mver
let mdownloadVersion = getDownloadVersion archiveInfo
force =
case mver of
Nothing -> force'
Just _ -> True -- specifying a version implies we're forcing things
isNewer <-
case mdownloadVersion of
Nothing -> do
$logError "Unable to determine upstream version from Github metadata"
unless force $
$logError "Rerun with --force-download to force an upgrade"
return False
Just downloadVersion -> do
$logInfo $ T.concat
[ "Current Stack version: "
, versionText stackVersion
, ", available download version: "
, versionText downloadVersion
]
return $ downloadVersion > stackVersion
toUpgrade <- case (force, isNewer) of
(False, False) -> do
$logInfo "Skipping binary upgrade, your version is already more recent"
return False
(True, False) -> do
$logInfo "Forcing binary upgrade"
return True
(_, True) -> do
$logInfo "Newer version detected, downloading"
return True
when toUpgrade $ do
config <- view configL
downloadStackExe platforms0 archiveInfo (configLocalBin config) $ \tmpFile -> do
-- Sanity check!
ec <- rawSystem (toFilePath tmpFile) ["--version"]
unless (ec == ExitSuccess)
$ throwString "Non-success exit code from running newly downloaded executable"
sourceUpgrade
:: (StackM env m, HasConfig env)
=> ConfigMonoid
-> Maybe AbstractResolver
-> Maybe String
-> SourceOpts
-> m ()
sourceUpgrade gConfigMonoid mresolver builtHash (SourceOpts gitRepo) =
withSystemTempDir "stack-upgrade" $ \tmp -> do
menv <- getMinimalEnvOverride
mdir <- case gitRepo of
Just repo -> do
remote <- liftIO $ readProcess "git" ["ls-remote", repo, "master"] []
let latestCommit = head . words $ remote
when (isNothing builtHash) $
$logWarn $ "Information about the commit this version of stack was "
<> "built from is not available due to how it was built. "
<> "Will continue by assuming an upgrade is needed "
<> "because we have no information to the contrary."
if builtHash == Just latestCommit
then do
$logInfo "Already up-to-date, no upgrade required"
return Nothing
else do
$logInfo "Cloning stack"
-- NOTE: "--recursive" was added after v1.0.0 (and before the
-- next release). This means that we can't use submodules in
-- the stack repo until we're comfortable with "stack upgrade
-- --git" not working for earlier versions.
let args = [ "clone", repo , "stack", "--depth", "1", "--recursive"]
runCmd (Cmd (Just tmp) "git" menv args) Nothing
return $ Just $ tmp </> $(mkRelDir "stack")
Nothing -> do
updateAllIndices
(caches, _gitShaCaches) <- getPackageCaches
let latest = Map.fromListWith max
$ map toTuple
$ Map.keys
-- Mistaken upload to Hackage, just ignore it
$ Map.delete (PackageIdentifier
$(mkPackageName "stack")
$(mkVersion "9.9.9"))
caches
case Map.lookup $(mkPackageName "stack") latest of
Nothing -> throwString "No stack found in package indices"
Just version | version <= fromCabalVersion Paths.version -> do
$logInfo "Already at latest version, no upgrade required"
return Nothing
Just version -> do
let ident = PackageIdentifier $(mkPackageName "stack") version
paths <- unpackPackageIdents tmp Nothing
-- accept latest cabal revision by not supplying a Git SHA
$ Map.singleton ident Nothing
case Map.lookup ident paths of
Nothing -> error "Stack.Upgrade.upgrade: invariant violated, unpacked directory not found"
Just path -> return $ Just path
forM_ mdir $ \dir -> do
lc <- loadConfig
gConfigMonoid
mresolver
(SYLOverride $ dir </> $(mkRelFile "stack.yaml"))
bconfig <- lcLoadBuildConfig lc Nothing
envConfig1 <- runInnerStackT bconfig $ setupEnv $ Just $
"Try rerunning with --install-ghc to install the correct GHC into " <>
T.pack (toFilePath (configLocalPrograms (view configL bconfig)))
runInnerStackT (set (buildOptsL.buildOptsInstallExesL) True envConfig1) $
build (const $ return ()) Nothing defaultBuildOptsCLI
{ boptsCLITargets = ["stack"]
}
| Fuuzetsu/stack | src/Stack/Upgrade.hs | bsd-3-clause | 10,265 | 0 | 26 | 3,506 | 2,079 | 1,042 | 1,037 | 227 | 6 |
{-# LANGUAGE TupleSections, FlexibleInstances, MultiParamTypeClasses #-}
module Data.Parser.Grempa.Parser.SLR
( Item(..)
) where
import Data.Set(Set)
import qualified Data.Set as S
import Data.Parser.Grempa.Auxiliary.Auxiliary
import Data.Parser.Grempa.Parser.Item
import Data.Parser.Grempa.Grammar.Token
import Data.Parser.Grempa.Grammar.Untyped
data Item s =
Item { itemRId :: RId s
, itemProd :: Int
, itemPos :: Int
}
deriving (Eq, Ord)
instance Show (Item s) where
show (Item r pr po) = "It(" ++ show r ++
"," ++ show pr ++
"," ++ show po ++ ")"
instance Token s => It Item s where
itRId = itemRId
itProd = itemProd
getItPos = itemPos
setItPos i p = i {itemPos = p}
closure = closureLR0
startItem rid = Item rid 0 0
-- | Determine what items may be valid productions from an item
closureLR0 :: Token s => Set (Item s) -> Set (Item s)
closureLR0 = recTraverseG closure'
where
closure' is = (is `S.union` res, res)
where res = S.unions $ map closureI (S.toList is)
closureI i = case nextSymbol i of
Tok (SRule rid) -> firstItems rid
_ -> S.empty
-- | Get the items with the dot at the beginning from a rule
firstItems :: RId s -> Set (Item s)
firstItems rid@(RId _ prods) = S.fromList
$ map (\p -> Item rid p 0) [0..length prods - 1]
| ollef/Grempa | Data/Parser/Grempa/Parser/SLR.hs | bsd-3-clause | 1,486 | 0 | 13 | 461 | 459 | 250 | 209 | 35 | 2 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[RnNames]{Extracting imported and top-level names in scope}
-}
{-# LANGUAGE CPP, NondecreasingIndentation #-}
module RnNames (
rnImports, getLocalNonValBinders, newRecordSelector,
rnExports, extendGlobalRdrEnvRn,
gresFromAvails,
calculateAvails,
reportUnusedNames,
checkConName
) where
#include "HsVersions.h"
import DynFlags
import HsSyn
import TcEnv
import RnEnv
import RnHsDoc ( rnHsDoc )
import LoadIface ( loadSrcInterface )
import TcRnMonad
import PrelNames
import Module
import Name
import NameEnv
import NameSet
import Avail
import FieldLabel
import HscTypes
import RdrName
import RdrHsSyn ( setRdrNameSpace )
import Outputable
import Maybes
import SrcLoc
import BasicTypes ( TopLevelFlag(..), StringLiteral(..) )
import ErrUtils
import Util
import FastString
import FastStringEnv
import ListSetOps
import Id
import Type
import PatSyn
import qualified GHC.LanguageExtensions as LangExt
import Control.Monad
import Data.Either ( partitionEithers, isRight, rights )
-- import qualified Data.Foldable as Foldable
import Data.Map ( Map )
import qualified Data.Map as Map
import Data.Ord ( comparing )
import Data.List ( partition, (\\), find, sortBy )
-- import qualified Data.Set as Set
import System.FilePath ((</>))
import System.IO
{-
************************************************************************
* *
\subsection{rnImports}
* *
************************************************************************
Note [Tracking Trust Transitively]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we import a package as well as checking that the direct imports are safe
according to the rules outlined in the Note [HscMain . Safe Haskell Trust Check]
we must also check that these rules hold transitively for all dependent modules
and packages. Doing this without caching any trust information would be very
slow as we would need to touch all packages and interface files a module depends
on. To avoid this we make use of the property that if a modules Safe Haskell
mode changes, this triggers a recompilation from that module in the dependcy
graph. So we can just worry mostly about direct imports.
There is one trust property that can change for a package though without
recompliation being triggered: package trust. So we must check that all
packages a module tranitively depends on to be trusted are still trusted when
we are compiling this module (as due to recompilation avoidance some modules
below may not be considered trusted any more without recompilation being
triggered).
We handle this by augmenting the existing transitive list of packages a module M
depends on with a bool for each package that says if it must be trusted when the
module M is being checked for trust. This list of trust required packages for a
single import is gathered in the rnImportDecl function and stored in an
ImportAvails data structure. The union of these trust required packages for all
imports is done by the rnImports function using the combine function which calls
the plusImportAvails function that is a union operation for the ImportAvails
type. This gives us in an ImportAvails structure all packages required to be
trusted for the module we are currently compiling. Checking that these packages
are still trusted (and that direct imports are trusted) is done in
HscMain.checkSafeImports.
See the note below, [Trust Own Package] for a corner case in this method and
how its handled.
Note [Trust Own Package]
~~~~~~~~~~~~~~~~~~~~~~~~
There is a corner case of package trust checking that the usual transitive check
doesn't cover. (For how the usual check operates see the Note [Tracking Trust
Transitively] below). The case is when you import a -XSafe module M and M
imports a -XTrustworthy module N. If N resides in a different package than M,
then the usual check works as M will record a package dependency on N's package
and mark it as required to be trusted. If N resides in the same package as M
though, then importing M should require its own package be trusted due to N
(since M is -XSafe so doesn't create this requirement by itself). The usual
check fails as a module doesn't record a package dependency of its own package.
So instead we now have a bool field in a modules interface file that simply
states if the module requires its own package to be trusted. This field avoids
us having to load all interface files that the module depends on to see if one
is trustworthy.
Note [Trust Transitive Property]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
So there is an interesting design question in regards to transitive trust
checking. Say I have a module B compiled with -XSafe. B is dependent on a bunch
of modules and packages, some packages it requires to be trusted as its using
-XTrustworthy modules from them. Now if I have a module A that doesn't use safe
haskell at all and simply imports B, should A inherit all the the trust
requirements from B? Should A now also require that a package p is trusted since
B required it?
We currently say no but saying yes also makes sense. The difference is, if a
module M that doesn't use Safe Haskell imports a module N that does, should all
the trusted package requirements be dropped since M didn't declare that it cares
about Safe Haskell (so -XSafe is more strongly associated with the module doing
the importing) or should it be done still since the author of the module N that
uses Safe Haskell said they cared (so -XSafe is more strongly associated with
the module that was compiled that used it).
Going with yes is a simpler semantics we think and harder for the user to stuff
up but it does mean that Safe Haskell will affect users who don't care about
Safe Haskell as they might grab a package from Cabal which uses safe haskell (say
network) and that packages imports -XTrustworthy modules from another package
(say bytestring), so requires that package is trusted. The user may now get
compilation errors in code that doesn't do anything with Safe Haskell simply
because they are using the network package. They will have to call 'ghc-pkg
trust network' to get everything working. Due to this invasive nature of going
with yes we have gone with no for now.
-}
-- | Process Import Decls. See 'rnImportDecl' for a description of what
-- the return types represent.
-- Note: Do the non SOURCE ones first, so that we get a helpful warning
-- for SOURCE ones that are unnecessary
rnImports :: [LImportDecl RdrName]
-> RnM ([LImportDecl Name], GlobalRdrEnv, ImportAvails, AnyHpcUsage)
rnImports imports = do
this_mod <- getModule
let (source, ordinary) = partition is_source_import imports
is_source_import d = ideclSource (unLoc d)
stuff1 <- mapAndReportM (rnImportDecl this_mod) ordinary
stuff2 <- mapAndReportM (rnImportDecl this_mod) source
-- Safe Haskell: See Note [Tracking Trust Transitively]
let (decls, rdr_env, imp_avails, hpc_usage) = combine (stuff1 ++ stuff2)
return (decls, rdr_env, imp_avails, hpc_usage)
where
combine :: [(LImportDecl Name, GlobalRdrEnv, ImportAvails, AnyHpcUsage)]
-> ([LImportDecl Name], GlobalRdrEnv, ImportAvails, AnyHpcUsage)
combine = foldr plus ([], emptyGlobalRdrEnv, emptyImportAvails, False)
plus (decl, gbl_env1, imp_avails1,hpc_usage1)
(decls, gbl_env2, imp_avails2,hpc_usage2)
= ( decl:decls,
gbl_env1 `plusGlobalRdrEnv` gbl_env2,
imp_avails1 `plusImportAvails` imp_avails2,
hpc_usage1 || hpc_usage2 )
-- | Given a located import declaration @decl@ from @this_mod@,
-- calculate the following pieces of information:
--
-- 1. An updated 'LImportDecl', where all unresolved 'RdrName' in
-- the entity lists have been resolved into 'Name's,
--
-- 2. A 'GlobalRdrEnv' representing the new identifiers that were
-- brought into scope (taking into account module qualification
-- and hiding),
--
-- 3. 'ImportAvails' summarizing the identifiers that were imported
-- by this declaration, and
--
-- 4. A boolean 'AnyHpcUsage' which is true if the imported module
-- used HPC.
rnImportDecl :: Module -> LImportDecl RdrName
-> RnM (LImportDecl Name, GlobalRdrEnv, ImportAvails, AnyHpcUsage)
rnImportDecl this_mod
(L loc decl@(ImportDecl { ideclName = loc_imp_mod_name, ideclPkgQual = mb_pkg
, ideclSource = want_boot, ideclSafe = mod_safe
, ideclQualified = qual_only, ideclImplicit = implicit
, ideclAs = as_mod, ideclHiding = imp_details }))
= setSrcSpan loc $ do
when (isJust mb_pkg) $ do
pkg_imports <- xoptM LangExt.PackageImports
when (not pkg_imports) $ addErr packageImportErr
-- If there's an error in loadInterface, (e.g. interface
-- file not found) we get lots of spurious errors from 'filterImports'
let imp_mod_name = unLoc loc_imp_mod_name
doc = ppr imp_mod_name <+> text "is directly imported"
-- Check for self-import, which confuses the typechecker (Trac #9032)
-- ghc --make rejects self-import cycles already, but batch-mode may not
-- at least not until TcIface.tcHiBootIface, which is too late to avoid
-- typechecker crashes. (Indirect self imports are not caught until
-- TcIface, see #10337 tracking how to make this error better.)
--
-- Originally, we also allowed 'import {-# SOURCE #-} M', but this
-- caused bug #10182: in one-shot mode, we should never load an hs-boot
-- file for the module we are compiling into the EPS. In principle,
-- it should be possible to support this mode of use, but we would have to
-- extend Provenance to support a local definition in a qualified location.
-- For now, we don't support it, but see #10336
when (imp_mod_name == moduleName this_mod &&
(case mb_pkg of -- If we have import "<pkg>" M, then we should
-- check that "<pkg>" is "this" (which is magic)
-- or the name of this_mod's package. Yurgh!
-- c.f. GHC.findModule, and Trac #9997
Nothing -> True
Just (StringLiteral _ pkg_fs) -> pkg_fs == fsLit "this" ||
fsToUnitId pkg_fs == moduleUnitId this_mod))
(addErr (text "A module cannot import itself:" <+> ppr imp_mod_name))
-- Check for a missing import list (Opt_WarnMissingImportList also
-- checks for T(..) items but that is done in checkDodgyImport below)
case imp_details of
Just (False, _) -> return () -- Explicit import list
_ | implicit -> return () -- Do not bleat for implicit imports
| qual_only -> return ()
| otherwise -> whenWOptM Opt_WarnMissingImportList $
addWarn (Reason Opt_WarnMissingImportList)
(missingImportListWarn imp_mod_name)
iface <- loadSrcInterface doc imp_mod_name want_boot (fmap sl_fs mb_pkg)
-- Compiler sanity check: if the import didn't say
-- {-# SOURCE #-} we should not get a hi-boot file
WARN( not want_boot && mi_boot iface, ppr imp_mod_name ) do
-- Issue a user warning for a redundant {- SOURCE -} import
-- NB that we arrange to read all the ordinary imports before
-- any of the {- SOURCE -} imports.
--
-- in --make and GHCi, the compilation manager checks for this,
-- and indeed we shouldn't do it here because the existence of
-- the non-boot module depends on the compilation order, which
-- is not deterministic. The hs-boot test can show this up.
dflags <- getDynFlags
warnIf NoReason
(want_boot && not (mi_boot iface) && isOneShot (ghcMode dflags))
(warnRedundantSourceImport imp_mod_name)
when (mod_safe && not (safeImportsOn dflags)) $
addErr (text "safe import can't be used as Safe Haskell isn't on!"
$+$ ptext (sLit $ "please enable Safe Haskell through either "
++ "Safe, Trustworthy or Unsafe"))
let
qual_mod_name = as_mod `orElse` imp_mod_name
imp_spec = ImpDeclSpec { is_mod = imp_mod_name, is_qual = qual_only,
is_dloc = loc, is_as = qual_mod_name }
-- filter the imports according to the import declaration
(new_imp_details, gres) <- filterImports iface imp_spec imp_details
-- for certain error messages, we’d like to know what could be imported
-- here, if everything were imported
potential_gres <- mkGlobalRdrEnv . snd <$> filterImports iface imp_spec Nothing
let gbl_env = mkGlobalRdrEnv gres
is_hiding | Just (True,_) <- imp_details = True
| otherwise = False
-- should the import be safe?
mod_safe' = mod_safe
|| (not implicit && safeDirectImpsReq dflags)
|| (implicit && safeImplicitImpsReq dflags)
let imv = ImportedModsVal
{ imv_name = qual_mod_name
, imv_span = loc
, imv_is_safe = mod_safe'
, imv_is_hiding = is_hiding
, imv_all_exports = potential_gres
, imv_qualified = qual_only
}
let imports
= (calculateAvails dflags iface mod_safe' want_boot)
{ imp_mods = unitModuleEnv (mi_module iface) [imv] }
-- Complain if we import a deprecated module
whenWOptM Opt_WarnWarningsDeprecations (
case (mi_warns iface) of
WarnAll txt -> addWarn (Reason Opt_WarnWarningsDeprecations)
(moduleWarn imp_mod_name txt)
_ -> return ()
)
let new_imp_decl = L loc (decl { ideclSafe = mod_safe'
, ideclHiding = new_imp_details })
return (new_imp_decl, gbl_env, imports, mi_hpc iface)
-- | Calculate the 'ImportAvails' induced by an import of a particular
-- interface, but without 'imp_mods'.
calculateAvails :: DynFlags
-> ModIface
-> IsSafeImport
-> IsBootInterface
-> ImportAvails
calculateAvails dflags iface mod_safe' want_boot =
let imp_mod = mi_module iface
orph_iface = mi_orphan iface
has_finsts = mi_finsts iface
deps = mi_deps iface
trust = getSafeMode $ mi_trust iface
trust_pkg = mi_trust_pkg iface
-- If the module exports anything defined in this module, just
-- ignore it. Reason: otherwise it looks as if there are two
-- local definition sites for the thing, and an error gets
-- reported. Easiest thing is just to filter them out up
-- front. This situation only arises if a module imports
-- itself, or another module that imported it. (Necessarily,
-- this invoves a loop.)
--
-- We do this *after* filterImports, so that if you say
-- module A where
-- import B( AType )
-- type AType = ...
--
-- module B( AType ) where
-- import {-# SOURCE #-} A( AType )
--
-- then you won't get a 'B does not export AType' message.
-- Compute new transitive dependencies
orphans | orph_iface = ASSERT( not (imp_mod `elem` dep_orphs deps) )
imp_mod : dep_orphs deps
| otherwise = dep_orphs deps
finsts | has_finsts = ASSERT( not (imp_mod `elem` dep_finsts deps) )
imp_mod : dep_finsts deps
| otherwise = dep_finsts deps
pkg = moduleUnitId (mi_module iface)
-- Does this import mean we now require our own pkg
-- to be trusted? See Note [Trust Own Package]
ptrust = trust == Sf_Trustworthy || trust_pkg
(dependent_mods, dependent_pkgs, pkg_trust_req)
| pkg == thisPackage dflags =
-- Imported module is from the home package
-- Take its dependent modules and add imp_mod itself
-- Take its dependent packages unchanged
--
-- NB: (dep_mods deps) might include a hi-boot file
-- for the module being compiled, CM. Do *not* filter
-- this out (as we used to), because when we've
-- finished dealing with the direct imports we want to
-- know if any of them depended on CM.hi-boot, in
-- which case we should do the hi-boot consistency
-- check. See LoadIface.loadHiBootInterface
((moduleName imp_mod,want_boot):dep_mods deps,dep_pkgs deps,ptrust)
| otherwise =
-- Imported module is from another package
-- Dump the dependent modules
-- Add the package imp_mod comes from to the dependent packages
ASSERT2( not (pkg `elem` (map fst $ dep_pkgs deps))
, ppr pkg <+> ppr (dep_pkgs deps) )
([], (pkg, False) : dep_pkgs deps, False)
in ImportAvails {
imp_mods = emptyModuleEnv, -- this gets filled in later
imp_orphs = orphans,
imp_finsts = finsts,
imp_dep_mods = mkModDeps dependent_mods,
imp_dep_pkgs = map fst $ dependent_pkgs,
-- Add in the imported modules trusted package
-- requirements. ONLY do this though if we import the
-- module as a safe import.
-- See Note [Tracking Trust Transitively]
-- and Note [Trust Transitive Property]
imp_trust_pkgs = if mod_safe'
then map fst $ filter snd dependent_pkgs
else [],
-- Do we require our own pkg to be trusted?
-- See Note [Trust Own Package]
imp_trust_own_pkg = pkg_trust_req
}
warnRedundantSourceImport :: ModuleName -> SDoc
warnRedundantSourceImport mod_name
= text "Unnecessary {-# SOURCE #-} in the import of module"
<+> quotes (ppr mod_name)
{-
************************************************************************
* *
\subsection{importsFromLocalDecls}
* *
************************************************************************
From the top-level declarations of this module produce
* the lexical environment
* the ImportAvails
created by its bindings.
Note [Top-level Names in Template Haskell decl quotes]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
See also: Note [Interactively-bound Ids in GHCi] in HscTypes
Note [Looking up Exact RdrNames] in RnEnv
Consider a Template Haskell declaration quotation like this:
module M where
f x = h [d| f = 3 |]
When renaming the declarations inside [d| ...|], we treat the
top level binders specially in two ways
1. We give them an Internal Name, not (as usual) an External one.
This is done by RnEnv.newTopSrcBinder.
2. We make them *shadow* the outer bindings.
See Note [GlobalRdrEnv shadowing]
3. We find out whether we are inside a [d| ... |] by testing the TH
stage. This is a slight hack, because the stage field was really
meant for the type checker, and here we are not interested in the
fields of Brack, hence the error thunks in thRnBrack.
-}
extendGlobalRdrEnvRn :: [AvailInfo]
-> MiniFixityEnv
-> RnM (TcGblEnv, TcLclEnv)
-- Updates both the GlobalRdrEnv and the FixityEnv
-- We return a new TcLclEnv only because we might have to
-- delete some bindings from it;
-- see Note [Top-level Names in Template Haskell decl quotes]
extendGlobalRdrEnvRn avails new_fixities
= do { (gbl_env, lcl_env) <- getEnvs
; stage <- getStage
; isGHCi <- getIsGHCi
; let rdr_env = tcg_rdr_env gbl_env
fix_env = tcg_fix_env gbl_env
th_bndrs = tcl_th_bndrs lcl_env
th_lvl = thLevel stage
-- Delete new_occs from global and local envs
-- If we are in a TemplateHaskell decl bracket,
-- we are going to shadow them
-- See Note [GlobalRdrEnv shadowing]
inBracket = isBrackStage stage
lcl_env_TH = lcl_env { tcl_rdr = delLocalRdrEnvList (tcl_rdr lcl_env) new_occs }
-- See Note [GlobalRdrEnv shadowing]
lcl_env2 | inBracket = lcl_env_TH
| otherwise = lcl_env
-- Deal with shadowing: see Note [GlobalRdrEnv shadowing]
want_shadowing = isGHCi || inBracket
rdr_env1 | want_shadowing = shadowNames rdr_env new_names
| otherwise = rdr_env
lcl_env3 = lcl_env2 { tcl_th_bndrs = extendNameEnvList th_bndrs
[ (n, (TopLevel, th_lvl))
| n <- new_names ] }
; rdr_env2 <- foldlM add_gre rdr_env1 new_gres
; let fix_env' = foldl extend_fix_env fix_env new_gres
gbl_env' = gbl_env { tcg_rdr_env = rdr_env2, tcg_fix_env = fix_env' }
; traceRn (text "extendGlobalRdrEnvRn 2" <+> (pprGlobalRdrEnv True rdr_env2))
; return (gbl_env', lcl_env3) }
where
new_names = concatMap availNames avails
new_occs = map nameOccName new_names
-- If there is a fixity decl for the gre, add it to the fixity env
extend_fix_env fix_env gre
| Just (L _ fi) <- lookupFsEnv new_fixities (occNameFS occ)
= extendNameEnv fix_env name (FixItem occ fi)
| otherwise
= fix_env
where
name = gre_name gre
occ = greOccName gre
new_gres :: [GlobalRdrElt] -- New LocalDef GREs, derived from avails
new_gres = concatMap localGREsFromAvail avails
add_gre :: GlobalRdrEnv -> GlobalRdrElt -> RnM GlobalRdrEnv
-- Extend the GlobalRdrEnv with a LocalDef GRE
-- If there is already a LocalDef GRE with the same OccName,
-- report an error and discard the new GRE
-- This establishes INVARIANT 1 of GlobalRdrEnvs
add_gre env gre
| not (null dups) -- Same OccName defined twice
= do { addDupDeclErr (gre : dups); return env }
| otherwise
= return (extendGlobalRdrEnv env gre)
where
name = gre_name gre
occ = nameOccName name
dups = filter isLocalGRE (lookupGlobalRdrEnv env occ)
{- *********************************************************************
* *
getLocalDeclBindersd@ returns the names for an HsDecl
It's used for source code.
*** See Note [The Naming story] in HsDecls ****
* *
********************************************************************* -}
getLocalNonValBinders :: MiniFixityEnv -> HsGroup RdrName
-> RnM ((TcGblEnv, TcLclEnv), NameSet)
-- Get all the top-level binders bound the group *except*
-- for value bindings, which are treated separately
-- Specifically we return AvailInfo for
-- * type decls (incl constructors and record selectors)
-- * class decls (including class ops)
-- * associated types
-- * foreign imports
-- * value signatures (in hs-boot files only)
getLocalNonValBinders fixity_env
(HsGroup { hs_valds = binds,
hs_tyclds = tycl_decls,
hs_fords = foreign_decls })
= do { -- Process all type/class decls *except* family instances
; let inst_decls = tycl_decls >>= group_instds
; overload_ok <- xoptM LangExt.DuplicateRecordFields
; (tc_avails, tc_fldss)
<- fmap unzip $ mapM (new_tc overload_ok)
(tyClGroupTyClDecls tycl_decls)
; traceRn (text "getLocalNonValBinders 1" <+> ppr tc_avails)
; envs <- extendGlobalRdrEnvRn tc_avails fixity_env
; setEnvs envs $ do {
-- Bring these things into scope first
-- See Note [Looking up family names in family instances]
-- Process all family instances
-- to bring new data constructors into scope
; (nti_availss, nti_fldss) <- mapAndUnzipM (new_assoc overload_ok)
inst_decls
-- Finish off with value binders:
-- foreign decls and pattern synonyms for an ordinary module
-- type sigs in case of a hs-boot file only
; is_boot <- tcIsHsBootOrSig
; let val_bndrs | is_boot = hs_boot_sig_bndrs
| otherwise = for_hs_bndrs
; val_avails <- mapM new_simple val_bndrs
; let avails = concat nti_availss ++ val_avails
new_bndrs = availsToNameSetWithSelectors avails `unionNameSet`
availsToNameSetWithSelectors tc_avails
flds = concat nti_fldss ++ concat tc_fldss
; traceRn (text "getLocalNonValBinders 2" <+> ppr avails)
; (tcg_env, tcl_env) <- extendGlobalRdrEnvRn avails fixity_env
-- Extend tcg_field_env with new fields (this used to be the
-- work of extendRecordFieldEnv)
; let field_env = extendNameEnvList (tcg_field_env tcg_env) flds
envs = (tcg_env { tcg_field_env = field_env }, tcl_env)
; traceRn (text "getLocalNonValBinders 3" <+> vcat [ppr flds, ppr field_env])
; return (envs, new_bndrs) } }
where
ValBindsIn _val_binds val_sigs = binds
for_hs_bndrs :: [Located RdrName]
for_hs_bndrs = hsForeignDeclsBinders foreign_decls
-- In a hs-boot file, the value binders come from the
-- *signatures*, and there should be no foreign binders
hs_boot_sig_bndrs = [ L decl_loc (unLoc n)
| L decl_loc (TypeSig ns _) <- val_sigs, n <- ns]
-- the SrcSpan attached to the input should be the span of the
-- declaration, not just the name
new_simple :: Located RdrName -> RnM AvailInfo
new_simple rdr_name = do{ nm <- newTopSrcBinder rdr_name
; return (avail nm) }
new_tc :: Bool -> LTyClDecl RdrName
-> RnM (AvailInfo, [(Name, [FieldLabel])])
new_tc overload_ok tc_decl -- NOT for type/data instances
= do { let (bndrs, flds) = hsLTyClDeclBinders tc_decl
; names@(main_name : sub_names) <- mapM newTopSrcBinder bndrs
; flds' <- mapM (newRecordSelector overload_ok sub_names) flds
; let fld_env = case unLoc tc_decl of
DataDecl { tcdDataDefn = d } -> mk_fld_env d names flds'
_ -> []
; return (AvailTC main_name names flds', fld_env) }
-- Calculate the mapping from constructor names to fields, which
-- will go in tcg_field_env. It's convenient to do this here where
-- we are working with a single datatype definition.
mk_fld_env :: HsDataDefn RdrName -> [Name] -> [FieldLabel] -> [(Name, [FieldLabel])]
mk_fld_env d names flds = concatMap find_con_flds (dd_cons d)
where
find_con_flds (L _ (ConDeclH98 { con_name = L _ rdr
, con_details = RecCon cdflds }))
= [( find_con_name rdr
, concatMap find_con_decl_flds (unLoc cdflds) )]
find_con_flds (L _ (ConDeclGADT
{ con_names = rdrs
, con_type = (HsIB { hsib_body = res_ty})}))
= map (\ (L _ rdr) -> ( find_con_name rdr
, concatMap find_con_decl_flds cdflds))
rdrs
where
(_tvs, _cxt, tau) = splitLHsSigmaTy res_ty
cdflds = case tau of
L _ (HsFunTy
(L _ (HsAppsTy
[L _ (HsAppPrefix (L _ (HsRecTy flds)))])) _) -> flds
L _ (HsFunTy (L _ (HsRecTy flds)) _) -> flds
_ -> []
find_con_flds _ = []
find_con_name rdr
= expectJust "getLocalNonValBinders/find_con_name" $
find (\ n -> nameOccName n == rdrNameOcc rdr) names
find_con_decl_flds (L _ x)
= map find_con_decl_fld (cd_fld_names x)
find_con_decl_fld (L _ (FieldOcc (L _ rdr) _))
= expectJust "getLocalNonValBinders/find_con_decl_fld" $
find (\ fl -> flLabel fl == lbl) flds
where lbl = occNameFS (rdrNameOcc rdr)
new_assoc :: Bool -> LInstDecl RdrName
-> RnM ([AvailInfo], [(Name, [FieldLabel])])
new_assoc _ (L _ (TyFamInstD {})) = return ([], [])
-- type instances don't bind new names
new_assoc overload_ok (L _ (DataFamInstD d))
= do { (avail, flds) <- new_di overload_ok Nothing d
; return ([avail], flds) }
new_assoc overload_ok (L _ (ClsInstD (ClsInstDecl { cid_poly_ty = inst_ty
, cid_datafam_insts = adts })))
| Just (L loc cls_rdr) <- getLHsInstDeclClass_maybe inst_ty
= do { cls_nm <- setSrcSpan loc $ lookupGlobalOccRn cls_rdr
; (avails, fldss)
<- mapAndUnzipM (new_loc_di overload_ok (Just cls_nm)) adts
; return (avails, concat fldss) }
| otherwise
= return ([], []) -- Do not crash on ill-formed instances
-- Eg instance !Show Int Trac #3811c
new_di :: Bool -> Maybe Name -> DataFamInstDecl RdrName
-> RnM (AvailInfo, [(Name, [FieldLabel])])
new_di overload_ok mb_cls ti_decl
= do { main_name <- lookupFamInstName mb_cls (dfid_tycon ti_decl)
; let (bndrs, flds) = hsDataFamInstBinders ti_decl
; sub_names <- mapM newTopSrcBinder bndrs
; flds' <- mapM (newRecordSelector overload_ok sub_names) flds
; let avail = AvailTC (unLoc main_name) sub_names flds'
-- main_name is not bound here!
fld_env = mk_fld_env (dfid_defn ti_decl) sub_names flds'
; return (avail, fld_env) }
new_loc_di :: Bool -> Maybe Name -> LDataFamInstDecl RdrName
-> RnM (AvailInfo, [(Name, [FieldLabel])])
new_loc_di overload_ok mb_cls (L _ d) = new_di overload_ok mb_cls d
newRecordSelector :: Bool -> [Name] -> LFieldOcc RdrName -> RnM FieldLabel
newRecordSelector _ [] _ = error "newRecordSelector: datatype has no constructors!"
newRecordSelector overload_ok (dc:_) (L loc (FieldOcc (L _ fld) _))
= do { selName <- newTopSrcBinder $ L loc $ field
; return $ qualFieldLbl { flSelector = selName } }
where
fieldOccName = occNameFS $ rdrNameOcc fld
qualFieldLbl = mkFieldLabelOccs fieldOccName (nameOccName dc) overload_ok
field | isExact fld = fld
-- use an Exact RdrName as is to preserve the bindings
-- of an already renamer-resolved field and its use
-- sites. This is needed to correctly support record
-- selectors in Template Haskell. See Note [Binders in
-- Template Haskell] in Convert.hs and Note [Looking up
-- Exact RdrNames] in RnEnv.hs.
| otherwise = mkRdrUnqual (flSelector qualFieldLbl)
{-
Note [Looking up family names in family instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
module M where
type family T a :: *
type instance M.T Int = Bool
We might think that we can simply use 'lookupOccRn' when processing the type
instance to look up 'M.T'. Alas, we can't! The type family declaration is in
the *same* HsGroup as the type instance declaration. Hence, as we are
currently collecting the binders declared in that HsGroup, these binders will
not have been added to the global environment yet.
Solution is simple: process the type family declarations first, extend
the environment, and then process the type instances.
************************************************************************
* *
\subsection{Filtering imports}
* *
************************************************************************
@filterImports@ takes the @ExportEnv@ telling what the imported module makes
available, and filters it through the import spec (if any).
Note [Dealing with imports]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
For import M( ies ), we take the mi_exports of M, and make
imp_occ_env :: OccEnv (Name, AvailInfo, Maybe Name)
One entry for each Name that M exports; the AvailInfo describes just
that Name.
The situation is made more complicated by associated types. E.g.
module M where
class C a where { data T a }
instance C Int where { data T Int = T1 | T2 }
instance C Bool where { data T Int = T3 }
Then M's export_avails are (recall the AvailTC invariant from Avails.hs)
C(C,T), T(T,T1,T2,T3)
Notice that T appears *twice*, once as a child and once as a parent.
From this we construct the imp_occ_env
C -> (C, C(C,T), Nothing)
T -> (T, T(T,T1,T2,T3), Just C)
T1 -> (T1, T(T1,T2,T3), Nothing) -- similarly T2,T3
If we say
import M( T(T1,T2) )
then we get *two* Avails: C(T), T(T1,T2)
Note that the imp_occ_env will have entries for data constructors too,
although we never look up data constructors.
-}
filterImports
:: ModIface
-> ImpDeclSpec -- The span for the entire import decl
-> Maybe (Bool, Located [LIE RdrName]) -- Import spec; True => hiding
-> RnM (Maybe (Bool, Located [LIE Name]), -- Import spec w/ Names
[GlobalRdrElt]) -- Same again, but in GRE form
filterImports iface decl_spec Nothing
= return (Nothing, gresFromAvails (Just imp_spec) (mi_exports iface))
where
imp_spec = ImpSpec { is_decl = decl_spec, is_item = ImpAll }
filterImports iface decl_spec (Just (want_hiding, L l import_items))
= do -- check for errors, convert RdrNames to Names
items1 <- mapM lookup_lie import_items
let items2 :: [(LIE Name, AvailInfo)]
items2 = concat items1
-- NB the AvailInfo may have duplicates, and several items
-- for the same parent; e.g N(x) and N(y)
names = availsToNameSet (map snd items2)
keep n = not (n `elemNameSet` names)
pruned_avails = filterAvails keep all_avails
hiding_spec = ImpSpec { is_decl = decl_spec, is_item = ImpAll }
gres | want_hiding = gresFromAvails (Just hiding_spec) pruned_avails
| otherwise = concatMap (gresFromIE decl_spec) items2
return (Just (want_hiding, L l (map fst items2)), gres)
where
all_avails = mi_exports iface
-- See Note [Dealing with imports]
imp_occ_env :: OccEnv (Name, -- the name
AvailInfo, -- the export item providing the name
Maybe Name) -- the parent of associated types
imp_occ_env = mkOccEnv_C combine [ (nameOccName n, (n, a, Nothing))
| a <- all_avails, n <- availNames a]
where
-- See example in Note [Dealing with imports]
-- 'combine' is only called for associated types which appear twice
-- in the all_avails. In the example, we combine
-- T(T,T1,T2,T3) and C(C,T) to give (T, T(T,T1,T2,T3), Just C)
combine (name1, a1@(AvailTC p1 _ []), mp1)
(name2, a2@(AvailTC p2 _ []), mp2)
= ASSERT( name1 == name2 && isNothing mp1 && isNothing mp2 )
if p1 == name1 then (name1, a1, Just p2)
else (name1, a2, Just p1)
combine x y = pprPanic "filterImports/combine" (ppr x $$ ppr y)
lookup_name :: RdrName -> IELookupM (Name, AvailInfo, Maybe Name)
lookup_name rdr | isQual rdr = failLookupWith (QualImportError rdr)
| Just succ <- mb_success = return succ
| otherwise = failLookupWith BadImport
where
mb_success = lookupOccEnv imp_occ_env (rdrNameOcc rdr)
lookup_lie :: LIE RdrName -> TcRn [(LIE Name, AvailInfo)]
lookup_lie (L loc ieRdr)
= do (stuff, warns) <- setSrcSpan loc $
liftM (fromMaybe ([],[])) $
run_lookup (lookup_ie ieRdr)
mapM_ emit_warning warns
return [ (L loc ie, avail) | (ie,avail) <- stuff ]
where
-- Warn when importing T(..) if T was exported abstractly
emit_warning (DodgyImport n) = whenWOptM Opt_WarnDodgyImports $
addWarn (Reason Opt_WarnDodgyImports) (dodgyImportWarn n)
emit_warning MissingImportList = whenWOptM Opt_WarnMissingImportList $
addWarn (Reason Opt_WarnMissingImportList) (missingImportListItem ieRdr)
emit_warning BadImportW = whenWOptM Opt_WarnDodgyImports $
addWarn (Reason Opt_WarnDodgyImports) (lookup_err_msg BadImport)
run_lookup :: IELookupM a -> TcRn (Maybe a)
run_lookup m = case m of
Failed err -> addErr (lookup_err_msg err) >> return Nothing
Succeeded a -> return (Just a)
lookup_err_msg err = case err of
BadImport -> badImportItemErr iface decl_spec ieRdr all_avails
IllegalImport -> illegalImportItemErr
QualImportError rdr -> qualImportItemErr rdr
-- For each import item, we convert its RdrNames to Names,
-- and at the same time construct an AvailInfo corresponding
-- to what is actually imported by this item.
-- Returns Nothing on error.
-- We return a list here, because in the case of an import
-- item like C, if we are hiding, then C refers to *both* a
-- type/class and a data constructor. Moreover, when we import
-- data constructors of an associated family, we need separate
-- AvailInfos for the data constructors and the family (as they have
-- different parents). See Note [Dealing with imports]
lookup_ie :: IE RdrName -> IELookupM ([(IE Name, AvailInfo)], [IELookupWarning])
lookup_ie ie = handle_bad_import $ do
case ie of
IEVar (L l n) -> do
(name, avail, _) <- lookup_name n
return ([(IEVar (L l name), trimAvail avail name)], [])
IEThingAll (L l tc) -> do
(name, avail, mb_parent) <- lookup_name tc
let warns = case avail of
Avail {} -- e.g. f(..)
-> [DodgyImport tc]
AvailTC _ subs fs
| null (drop 1 subs) && null fs -- e.g. T(..) where T is a synonym
-> [DodgyImport tc]
| not (is_qual decl_spec) -- e.g. import M( T(..) )
-> [MissingImportList]
| otherwise
-> []
renamed_ie = IEThingAll (L l name)
sub_avails = case avail of
Avail {} -> []
AvailTC name2 subs fs -> [(renamed_ie, AvailTC name2 (subs \\ [name]) fs)]
case mb_parent of
Nothing -> return ([(renamed_ie, avail)], warns)
-- non-associated ty/cls
Just parent -> return ((renamed_ie, AvailTC parent [name] []) : sub_avails, warns)
-- associated type
IEThingAbs (L l tc)
| want_hiding -- hiding ( C )
-- Here the 'C' can be a data constructor
-- *or* a type/class, or even both
-> let tc_name = lookup_name tc
dc_name = lookup_name (setRdrNameSpace tc srcDataName)
in
case catIELookupM [ tc_name, dc_name ] of
[] -> failLookupWith BadImport
names -> return ([mkIEThingAbs l name | name <- names], [])
| otherwise
-> do nameAvail <- lookup_name tc
return ([mkIEThingAbs l nameAvail], [])
IEThingWith (L l rdr_tc) wc rdr_ns rdr_fs ->
ASSERT2(null rdr_fs, ppr rdr_fs) do
(name, AvailTC _ ns subflds, mb_parent) <- lookup_name rdr_tc
-- Look up the children in the sub-names of the parent
let subnames = case ns of -- The tc is first in ns,
[] -> [] -- if it is there at all
-- See the AvailTC Invariant in Avail.hs
(n1:ns1) | n1 == name -> ns1
| otherwise -> ns
case lookupChildren (map Left subnames ++ map Right subflds) rdr_ns of
Nothing -> failLookupWith BadImport
Just (childnames, childflds) ->
case mb_parent of
-- non-associated ty/cls
Nothing
-> return ([(IEThingWith (L l name) wc childnames childflds,
AvailTC name (name:map unLoc childnames) (map unLoc childflds))],
[])
-- associated ty
Just parent
-> return ([(IEThingWith (L l name) wc childnames childflds,
AvailTC name (map unLoc childnames) (map unLoc childflds)),
(IEThingWith (L l name) wc childnames childflds,
AvailTC parent [name] [])],
[])
_other -> failLookupWith IllegalImport
-- could be IEModuleContents, IEGroup, IEDoc, IEDocNamed
-- all errors.
where
mkIEThingAbs l (n, av, Nothing ) = (IEThingAbs (L l n),
trimAvail av n)
mkIEThingAbs l (n, _, Just parent) = (IEThingAbs (L l n),
AvailTC parent [n] [])
handle_bad_import m = catchIELookup m $ \err -> case err of
BadImport | want_hiding -> return ([], [BadImportW])
_ -> failLookupWith err
type IELookupM = MaybeErr IELookupError
data IELookupWarning
= BadImportW
| MissingImportList
| DodgyImport RdrName
-- NB. use the RdrName for reporting a "dodgy" import
data IELookupError
= QualImportError RdrName
| BadImport
| IllegalImport
failLookupWith :: IELookupError -> IELookupM a
failLookupWith err = Failed err
catchIELookup :: IELookupM a -> (IELookupError -> IELookupM a) -> IELookupM a
catchIELookup m h = case m of
Succeeded r -> return r
Failed err -> h err
catIELookupM :: [IELookupM a] -> [a]
catIELookupM ms = [ a | Succeeded a <- ms ]
{-
************************************************************************
* *
\subsection{Import/Export Utils}
* *
************************************************************************
-}
plusAvail :: AvailInfo -> AvailInfo -> AvailInfo
plusAvail a1 a2
| debugIsOn && availName a1 /= availName a2
= pprPanic "RnEnv.plusAvail names differ" (hsep [ppr a1,ppr a2])
plusAvail a1@(Avail {}) (Avail {}) = a1
plusAvail (AvailTC _ [] []) a2@(AvailTC {}) = a2
plusAvail a1@(AvailTC {}) (AvailTC _ [] []) = a1
plusAvail (AvailTC n1 (s1:ss1) fs1) (AvailTC n2 (s2:ss2) fs2)
= case (n1==s1, n2==s2) of -- Maintain invariant the parent is first
(True,True) -> AvailTC n1 (s1 : (ss1 `unionLists` ss2))
(fs1 `unionLists` fs2)
(True,False) -> AvailTC n1 (s1 : (ss1 `unionLists` (s2:ss2)))
(fs1 `unionLists` fs2)
(False,True) -> AvailTC n1 (s2 : ((s1:ss1) `unionLists` ss2))
(fs1 `unionLists` fs2)
(False,False) -> AvailTC n1 ((s1:ss1) `unionLists` (s2:ss2))
(fs1 `unionLists` fs2)
plusAvail (AvailTC n1 ss1 fs1) (AvailTC _ [] fs2)
= AvailTC n1 ss1 (fs1 `unionLists` fs2)
plusAvail (AvailTC n1 [] fs1) (AvailTC _ ss2 fs2)
= AvailTC n1 ss2 (fs1 `unionLists` fs2)
plusAvail a1 a2 = pprPanic "RnEnv.plusAvail" (hsep [ppr a1,ppr a2])
-- | trims an 'AvailInfo' to keep only a single name
trimAvail :: AvailInfo -> Name -> AvailInfo
trimAvail (Avail b n) _ = Avail b n
trimAvail (AvailTC n ns fs) m = case find ((== m) . flSelector) fs of
Just x -> AvailTC n [] [x]
Nothing -> ASSERT( m `elem` ns ) AvailTC n [m] []
-- | filters 'AvailInfo's by the given predicate
filterAvails :: (Name -> Bool) -> [AvailInfo] -> [AvailInfo]
filterAvails keep avails = foldr (filterAvail keep) [] avails
-- | filters an 'AvailInfo' by the given predicate
filterAvail :: (Name -> Bool) -> AvailInfo -> [AvailInfo] -> [AvailInfo]
filterAvail keep ie rest =
case ie of
Avail _ n | keep n -> ie : rest
| otherwise -> rest
AvailTC tc ns fs ->
let ns' = filter keep ns
fs' = filter (keep . flSelector) fs in
if null ns' && null fs' then rest else AvailTC tc ns' fs' : rest
-- | Given an import\/export spec, construct the appropriate 'GlobalRdrElt's.
gresFromIE :: ImpDeclSpec -> (LIE Name, AvailInfo) -> [GlobalRdrElt]
gresFromIE decl_spec (L loc ie, avail)
= gresFromAvail prov_fn avail
where
is_explicit = case ie of
IEThingAll (L _ name) -> \n -> n == name
_ -> \_ -> True
prov_fn name
= Just (ImpSpec { is_decl = decl_spec, is_item = item_spec })
where
item_spec = ImpSome { is_explicit = is_explicit name, is_iloc = loc }
{-
Note [Children for duplicate record fields]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider the module
{-# LANGUAGE DuplicateRecordFields #-}
module M (F(foo, MkFInt, MkFBool)) where
data family F a
data instance F Int = MkFInt { foo :: Int }
data instance F Bool = MkFBool { foo :: Bool }
The `foo` in the export list refers to *both* selectors! For this
reason, lookupChildren builds an environment that maps the FastString
to a list of items, rather than a single item.
-}
mkChildEnv :: [GlobalRdrElt] -> NameEnv [GlobalRdrElt]
mkChildEnv gres = foldr add emptyNameEnv gres
where
add gre env = case gre_par gre of
FldParent p _ -> extendNameEnv_Acc (:) singleton env p gre
ParentIs p -> extendNameEnv_Acc (:) singleton env p gre
NoParent -> env
PatternSynonym -> env
findPatSyns :: [GlobalRdrElt] -> [GlobalRdrElt]
findPatSyns gres = foldr add [] gres
where
add g@(GRE { gre_par = PatternSynonym }) ps =
g:ps
add _ ps = ps
findChildren :: NameEnv [a] -> Name -> [a]
findChildren env n = lookupNameEnv env n `orElse` []
lookupChildren :: [Either Name FieldLabel] -> [Located RdrName]
-> Maybe ([Located Name], [Located FieldLabel])
-- (lookupChildren all_kids rdr_items) maps each rdr_item to its
-- corresponding Name all_kids, if the former exists
-- The matching is done by FastString, not OccName, so that
-- Cls( meth, AssocTy )
-- will correctly find AssocTy among the all_kids of Cls, even though
-- the RdrName for AssocTy may have a (bogus) DataName namespace
-- (Really the rdr_items should be FastStrings in the first place.)
lookupChildren all_kids rdr_items
= do xs <- mapM doOne rdr_items
return (fmap concat (partitionEithers xs))
where
doOne (L l r) = case (lookupFsEnv kid_env . occNameFS . rdrNameOcc) r of
Just [Left n] -> Just (Left (L l n))
Just rs | all isRight rs -> Just (Right (map (L l) (rights rs)))
_ -> Nothing
-- See Note [Children for duplicate record fields]
kid_env = extendFsEnvList_C (++) emptyFsEnv
[(either (occNameFS . nameOccName) flLabel x, [x]) | x <- all_kids]
classifyGREs :: [GlobalRdrElt] -> ([Name], [FieldLabel])
classifyGREs = partitionEithers . map classifyGRE
classifyGRE :: GlobalRdrElt -> Either Name FieldLabel
classifyGRE gre = case gre_par gre of
FldParent _ Nothing -> Right (FieldLabel (occNameFS (nameOccName n)) False n)
FldParent _ (Just lbl) -> Right (FieldLabel lbl True n)
_ -> Left n
where
n = gre_name gre
-- | Combines 'AvailInfo's from the same family
-- 'avails' may have several items with the same availName
-- E.g import Ix( Ix(..), index )
-- will give Ix(Ix,index,range) and Ix(index)
-- We want to combine these; addAvail does that
nubAvails :: [AvailInfo] -> [AvailInfo]
nubAvails avails = nameEnvElts (foldl add emptyNameEnv avails)
where
add env avail = extendNameEnv_C plusAvail env (availName avail) avail
{-
************************************************************************
* *
\subsection{Export list processing}
* *
************************************************************************
Processing the export list.
You might think that we should record things that appear in the export
list as ``occurrences'' (using @addOccurrenceName@), but you'd be
wrong. We do check (here) that they are in scope, but there is no
need to slurp in their actual declaration (which is what
@addOccurrenceName@ forces).
Indeed, doing so would big trouble when compiling @PrelBase@, because
it re-exports @GHC@, which includes @takeMVar#@, whose type includes
@ConcBase.StateAndSynchVar#@, and so on...
Note [Exports of data families]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose you see (Trac #5306)
module M where
import X( F )
data instance F Int = FInt
What does M export? AvailTC F [FInt]
or AvailTC F [F,FInt]?
The former is strictly right because F isn't defined in this module.
But then you can never do an explicit import of M, thus
import M( F( FInt ) )
because F isn't exported by M. Nor can you import FInt alone from here
import M( FInt )
because we don't have syntax to support that. (It looks like an import of
the type FInt.)
At one point I implemented a compromise:
* When constructing exports with no export list, or with module M(
module M ), we add the parent to the exports as well.
* But not when you see module M( f ), even if f is a
class method with a parent.
* Nor when you see module M( module N ), with N /= M.
But the compromise seemed too much of a hack, so we backed it out.
You just have to use an explicit export list:
module M( F(..) ) where ...
-}
type ExportAccum -- The type of the accumulating parameter of
-- the main worker function in rnExports
= ([LIE Name], -- Export items with Names
ExportOccMap, -- Tracks exported occurrence names
[AvailInfo]) -- The accumulated exported stuff
-- Not nub'd!
emptyExportAccum :: ExportAccum
emptyExportAccum = ([], emptyOccEnv, [])
type ExportOccMap = OccEnv (Name, IE RdrName)
-- Tracks what a particular exported OccName
-- in an export list refers to, and which item
-- it came from. It's illegal to export two distinct things
-- that have the same occurrence name
rnExports :: Bool -- False => no 'module M(..) where' header at all
-> Maybe (Located [LIE RdrName]) -- Nothing => no explicit export list
-> TcGblEnv
-> RnM (Maybe [LIE Name], TcGblEnv)
-- Complains if two distinct exports have same OccName
-- Warns about identical exports.
-- Complains about exports items not in scope
rnExports explicit_mod exports
tcg_env@(TcGblEnv { tcg_mod = this_mod,
tcg_rdr_env = rdr_env,
tcg_imports = imports })
= unsetWOptM Opt_WarnWarningsDeprecations $
-- Do not report deprecations arising from the export
-- list, to avoid bleating about re-exporting a deprecated
-- thing (especially via 'module Foo' export item)
do {
-- If the module header is omitted altogether, then behave
-- as if the user had written "module Main(main) where..."
-- EXCEPT in interactive mode, when we behave as if he had
-- written "module Main where ..."
-- Reason: don't want to complain about 'main' not in scope
-- in interactive mode
; dflags <- getDynFlags
; let real_exports
| explicit_mod = exports
| ghcLink dflags == LinkInMemory = Nothing
| otherwise
= Just (noLoc [noLoc (IEVar (noLoc main_RDR_Unqual))])
-- ToDo: the 'noLoc' here is unhelpful if 'main'
-- turns out to be out of scope
; (rn_exports, avails) <- exports_from_avail real_exports rdr_env imports this_mod
; traceRn (ppr avails)
; let final_avails = nubAvails avails -- Combine families
final_ns = availsToNameSetWithSelectors final_avails
; traceRn (text "rnExports: Exports:" <+> ppr final_avails)
; let new_tcg_env =
(tcg_env { tcg_exports = final_avails,
tcg_rn_exports = case tcg_rn_exports tcg_env of
Nothing -> Nothing
Just _ -> rn_exports,
tcg_dus = tcg_dus tcg_env `plusDU`
usesOnly final_ns })
; return (rn_exports, new_tcg_env) }
exports_from_avail :: Maybe (Located [LIE RdrName])
-- Nothing => no explicit export list
-> GlobalRdrEnv
-> ImportAvails
-> Module
-> RnM (Maybe [LIE Name], [AvailInfo])
exports_from_avail Nothing rdr_env _imports _this_mod
-- The same as (module M) where M is the current module name,
-- so that's how we handle it, except we also export the data family
-- when a data instance is exported.
= let avails = [ fix_faminst $ availFromGRE gre
| gre <- globalRdrEnvElts rdr_env
, isLocalGRE gre ]
in return (Nothing, avails)
where
-- #11164: when we define a data instance
-- but not data family, re-export the family
-- Even though we don't check whether this is actually a data family
-- only data families can locally define subordinate things (`ns` here)
-- without locally defining (and instead importing) the parent (`n`)
fix_faminst (AvailTC n ns flds)
| not (n `elem` ns)
= AvailTC n (n:ns) flds
fix_faminst avail = avail
exports_from_avail (Just (L _ rdr_items)) rdr_env imports this_mod
= do (ie_names, _, exports) <- foldlM do_litem emptyExportAccum rdr_items
return (Just ie_names, exports)
where
do_litem :: ExportAccum -> LIE RdrName -> RnM ExportAccum
do_litem acc lie = setSrcSpan (getLoc lie) (exports_from_item acc lie)
-- Maps a parent to its in-scope children
kids_env :: NameEnv [GlobalRdrElt]
kids_env = mkChildEnv (globalRdrEnvElts rdr_env)
pat_syns :: [GlobalRdrElt]
pat_syns = findPatSyns (globalRdrEnvElts rdr_env)
imported_modules = [ imv_name imv
| xs <- moduleEnvElts $ imp_mods imports, imv <- xs ]
exports_from_item :: ExportAccum -> LIE RdrName -> RnM ExportAccum
exports_from_item acc@(ie_names, occs, exports)
(L loc (IEModuleContents (L lm mod)))
| let earlier_mods = [ mod
| (L _ (IEModuleContents (L _ mod))) <- ie_names ]
, mod `elem` earlier_mods -- Duplicate export of M
= do { warn_dup_exports <- woptM Opt_WarnDuplicateExports ;
warnIf (Reason Opt_WarnDuplicateExports) warn_dup_exports
(dupModuleExport mod) ;
return acc }
| otherwise
= do { warnDodgyExports <- woptM Opt_WarnDodgyExports
; let { exportValid = (mod `elem` imported_modules)
|| (moduleName this_mod == mod)
; gre_prs = pickGREsModExp mod (globalRdrEnvElts rdr_env)
; new_exports = map (availFromGRE . fst) gre_prs
; names = map (gre_name . fst) gre_prs
; all_gres = foldr (\(gre1,gre2) gres -> gre1 : gre2 : gres) [] gre_prs
}
; checkErr exportValid (moduleNotImported mod)
; warnIf (Reason Opt_WarnDodgyExports)
(warnDodgyExports && exportValid && null gre_prs)
(nullModuleExport mod)
; traceRn (text "efa" <+> (ppr mod $$ ppr all_gres))
; addUsedGREs all_gres
; occs' <- check_occs (IEModuleContents (noLoc mod)) occs names
-- This check_occs not only finds conflicts
-- between this item and others, but also
-- internally within this item. That is, if
-- 'M.x' is in scope in several ways, we'll have
-- several members of mod_avails with the same
-- OccName.
; traceRn (vcat [ text "export mod" <+> ppr mod
, ppr new_exports ])
; return (L loc (IEModuleContents (L lm mod)) : ie_names,
occs', new_exports ++ exports) }
exports_from_item acc@(lie_names, occs, exports) (L loc ie)
| isDoc ie
= do new_ie <- lookup_doc_ie ie
return (L loc new_ie : lie_names, occs, exports)
| otherwise
= do (new_ie, avail) <- lookup_ie ie
if isUnboundName (ieName new_ie)
then return acc -- Avoid error cascade
else do
occs' <- check_occs ie occs (availNames avail)
return (L loc new_ie : lie_names, occs', avail : exports)
-------------
lookup_ie :: IE RdrName -> RnM (IE Name, AvailInfo)
lookup_ie (IEVar (L l rdr))
= do (name, avail) <- lookupGreAvailRn rdr
return (IEVar (L l name), avail)
lookup_ie (IEThingAbs (L l rdr))
= do (name, avail) <- lookupGreAvailRn rdr
return (IEThingAbs (L l name), avail)
lookup_ie ie@(IEThingAll n)
= do
(n, avail, flds) <- lookup_ie_all ie n
let name = unLoc n
return (IEThingAll n, AvailTC name (name:avail) flds)
lookup_ie ie@(IEThingWith l wc sub_rdrs _)
= do
(lname, subs, avails, flds) <- lookup_ie_with ie l sub_rdrs
(_, all_avail, all_flds) <-
case wc of
NoIEWildcard -> return (lname, [], [])
IEWildcard _ -> lookup_ie_all ie l
let name = unLoc lname
return (IEThingWith lname wc subs [],
AvailTC name (name : avails ++ all_avail)
(flds ++ all_flds))
lookup_ie _ = panic "lookup_ie" -- Other cases covered earlier
lookup_ie_with :: IE RdrName -> Located RdrName -> [Located RdrName]
-> RnM (Located Name, [Located Name], [Name], [FieldLabel])
lookup_ie_with ie (L l rdr) sub_rdrs
= do name <- lookupGlobalOccRnExport rdr
let gres = findChildren kids_env name
mchildren =
lookupChildren (map classifyGRE (gres ++ pat_syns)) sub_rdrs
addUsedKids rdr gres
if isUnboundName name
then return (L l name, [], [name], [])
else
case mchildren of
Nothing -> do
addErr (exportItemErr ie)
return (L l name, [], [name], [])
Just (non_flds, flds) -> do
addUsedKids rdr gres
return (L l name, non_flds
, map unLoc non_flds
, map unLoc flds)
lookup_ie_all :: IE RdrName -> Located RdrName
-> RnM (Located Name, [Name], [FieldLabel])
lookup_ie_all ie (L l rdr) =
do name <- lookupGlobalOccRnExport rdr
let gres = findChildren kids_env name
(non_flds, flds) = classifyGREs gres
addUsedKids rdr gres
warnDodgyExports <- woptM Opt_WarnDodgyExports
when (null gres) $
if isTyConName name
then when warnDodgyExports $
addWarn (Reason Opt_WarnDodgyExports)
(dodgyExportWarn name)
else -- This occurs when you export T(..), but
-- only import T abstractly, or T is a synonym.
addErr (exportItemErr ie)
return (L l name, non_flds, flds)
-------------
lookup_doc_ie :: IE RdrName -> RnM (IE Name)
lookup_doc_ie (IEGroup lev doc) = do rn_doc <- rnHsDoc doc
return (IEGroup lev rn_doc)
lookup_doc_ie (IEDoc doc) = do rn_doc <- rnHsDoc doc
return (IEDoc rn_doc)
lookup_doc_ie (IEDocNamed str) = return (IEDocNamed str)
lookup_doc_ie _ = panic "lookup_doc_ie" -- Other cases covered earlier
-- In an export item M.T(A,B,C), we want to treat the uses of
-- A,B,C as if they were M.A, M.B, M.C
-- Happily pickGREs does just the right thing
addUsedKids :: RdrName -> [GlobalRdrElt] -> RnM ()
addUsedKids parent_rdr kid_gres = addUsedGREs (pickGREs parent_rdr kid_gres)
isDoc :: IE RdrName -> Bool
isDoc (IEDoc _) = True
isDoc (IEDocNamed _) = True
isDoc (IEGroup _ _) = True
isDoc _ = False
-------------------------------
check_occs :: IE RdrName -> ExportOccMap -> [Name] -> RnM ExportOccMap
check_occs ie occs names -- 'names' are the entities specifed by 'ie'
= foldlM check occs names
where
check occs name
= case lookupOccEnv occs name_occ of
Nothing -> return (extendOccEnv occs name_occ (name, ie))
Just (name', ie')
| name == name' -- Duplicate export
-- But we don't want to warn if the same thing is exported
-- by two different module exports. See ticket #4478.
-> do unless (dupExport_ok name ie ie') $ do
warn_dup_exports <- woptM Opt_WarnDuplicateExports
warnIf (Reason Opt_WarnDuplicateExports) warn_dup_exports
(dupExportWarn name_occ ie ie')
return occs
| otherwise -- Same occ name but different names: an error
-> do { global_env <- getGlobalRdrEnv ;
addErr (exportClashErr global_env name' name ie' ie) ;
return occs }
where
name_occ = nameOccName name
dupExport_ok :: Name -> IE RdrName -> IE RdrName -> Bool
-- The Name is exported by both IEs. Is that ok?
-- "No" iff the name is mentioned explicitly in both IEs
-- or one of the IEs mentions the name *alone*
-- "Yes" otherwise
--
-- Examples of "no": module M( f, f )
-- module M( fmap, Functor(..) )
-- module M( module Data.List, head )
--
-- Example of "yes"
-- module M( module A, module B ) where
-- import A( f )
-- import B( f )
--
-- Example of "yes" (Trac #2436)
-- module M( C(..), T(..) ) where
-- class C a where { data T a }
-- instace C Int where { data T Int = TInt }
--
-- Example of "yes" (Trac #2436)
-- module Foo ( T ) where
-- data family T a
-- module Bar ( T(..), module Foo ) where
-- import Foo
-- data instance T Int = TInt
dupExport_ok n ie1 ie2
= not ( single ie1 || single ie2
|| (explicit_in ie1 && explicit_in ie2) )
where
explicit_in (IEModuleContents _) = False -- module M
explicit_in (IEThingAll r) = nameOccName n == rdrNameOcc (unLoc r) -- T(..)
explicit_in _ = True
single (IEVar {}) = True
single (IEThingAbs {}) = True
single _ = False
{-
*********************************************************
* *
\subsection{Unused names}
* *
*********************************************************
-}
reportUnusedNames :: Maybe (Located [LIE RdrName]) -- Export list
-> TcGblEnv -> RnM ()
reportUnusedNames _export_decls gbl_env
= do { traceRn ((text "RUN") <+> (ppr (tcg_dus gbl_env)))
; warnUnusedImportDecls gbl_env
; warnUnusedTopBinds unused_locals
; warnMissingSignatures gbl_env }
where
used_names :: NameSet
used_names = findUses (tcg_dus gbl_env) emptyNameSet
-- NB: currently, if f x = g, we only treat 'g' as used if 'f' is used
-- Hence findUses
-- Collect the defined names from the in-scope environment
defined_names :: [GlobalRdrElt]
defined_names = globalRdrEnvElts (tcg_rdr_env gbl_env)
-- Note that defined_and_used, defined_but_not_used
-- are both [GRE]; that's why we need defined_and_used
-- rather than just used_names
_defined_and_used, defined_but_not_used :: [GlobalRdrElt]
(_defined_and_used, defined_but_not_used)
= partition (gre_is_used used_names) defined_names
kids_env = mkChildEnv defined_names
-- This is done in mkExports too; duplicated work
gre_is_used :: NameSet -> GlobalRdrElt -> Bool
gre_is_used used_names (GRE {gre_name = name})
= name `elemNameSet` used_names
|| any (\ gre -> gre_name gre `elemNameSet` used_names) (findChildren kids_env name)
-- A use of C implies a use of T,
-- if C was brought into scope by T(..) or T(C)
-- Filter out the ones that are
-- (a) defined in this module, and
-- (b) not defined by a 'deriving' clause
-- The latter have an Internal Name, so we can filter them out easily
unused_locals :: [GlobalRdrElt]
unused_locals = filter is_unused_local defined_but_not_used
is_unused_local :: GlobalRdrElt -> Bool
is_unused_local gre = isLocalGRE gre && isExternalName (gre_name gre)
{-
*********************************************************
* *
\subsection{Unused imports}
* *
*********************************************************
This code finds which import declarations are unused. The
specification and implementation notes are here:
http://ghc.haskell.org/trac/ghc/wiki/Commentary/Compiler/UnusedImports
-}
type ImportDeclUsage
= ( LImportDecl Name -- The import declaration
, [AvailInfo] -- What *is* used (normalised)
, [Name] ) -- What is imported but *not* used
warnUnusedImportDecls :: TcGblEnv -> RnM ()
warnUnusedImportDecls gbl_env
= do { uses <- readMutVar (tcg_used_gres gbl_env)
; let user_imports = filterOut (ideclImplicit . unLoc) (tcg_rn_imports gbl_env)
-- This whole function deals only with *user* imports
-- both for warning about unnecessary ones, and for
-- deciding the minimal ones
rdr_env = tcg_rdr_env gbl_env
fld_env = mkFieldEnv rdr_env
; let usage :: [ImportDeclUsage]
usage = findImportUsage user_imports uses
; traceRn (vcat [ text "Uses:" <+> ppr uses
, text "Import usage" <+> ppr usage])
; whenWOptM Opt_WarnUnusedImports $
mapM_ (warnUnusedImport Opt_WarnUnusedImports fld_env) usage
; whenGOptM Opt_D_dump_minimal_imports $
printMinimalImports usage }
-- | Warn the user about top level binders that lack type signatures.
warnMissingSignatures :: TcGblEnv -> RnM ()
warnMissingSignatures gbl_env
= do { let exports = availsToNameSet (tcg_exports gbl_env)
sig_ns = tcg_sigs gbl_env
-- We use sig_ns to exclude top-level bindings that are generated by GHC
binds = collectHsBindsBinders $ tcg_binds gbl_env
pat_syns = tcg_patsyns gbl_env
-- Warn about missing signatures
-- Do this only when we we have a type to offer
; warn_missing_sigs <- woptM Opt_WarnMissingSignatures
; warn_only_exported <- woptM Opt_WarnMissingExportedSignatures
; warn_pat_syns <- woptM Opt_WarnMissingPatternSynonymSignatures
; let add_sig_warns
| warn_only_exported = add_warns Opt_WarnMissingExportedSignatures
| warn_missing_sigs = add_warns Opt_WarnMissingSignatures
| warn_pat_syns = add_warns Opt_WarnMissingPatternSynonymSignatures
| otherwise = return ()
add_warns flag
= when warn_pat_syns
(mapM_ add_pat_syn_warn pat_syns) >>
when (warn_missing_sigs || warn_only_exported)
(mapM_ add_bind_warn binds)
where
add_pat_syn_warn p
= add_warn (patSynName p) (pprPatSynType p)
add_bind_warn id
= do { env <- tcInitTidyEnv -- Why not use emptyTidyEnv?
; let name = idName id
(_, ty) = tidyOpenType env (idType id)
ty_msg = ppr ty
; add_warn name ty_msg }
add_warn name ty_msg
= when (name `elemNameSet` sig_ns && export_check name)
(addWarnAt (Reason flag) (getSrcSpan name)
(get_msg name ty_msg))
export_check name
= not warn_only_exported || name `elemNameSet` exports
get_msg name ty_msg
= sep [ text "Top-level binding with no type signature:",
nest 2 $ pprPrefixName name <+> dcolon <+> ty_msg ]
; add_sig_warns }
{-
Note [The ImportMap]
~~~~~~~~~~~~~~~~~~~~
The ImportMap is a short-lived intermediate data struture records, for
each import declaration, what stuff brought into scope by that
declaration is actually used in the module.
The SrcLoc is the location of the END of a particular 'import'
declaration. Why *END*? Because we don't want to get confused
by the implicit Prelude import. Consider (Trac #7476) the module
import Foo( foo )
main = print foo
There is an implicit 'import Prelude(print)', and it gets a SrcSpan
of line 1:1 (just the point, not a span). If we use the *START* of
the SrcSpan to identify the import decl, we'll confuse the implicit
import Prelude with the explicit 'import Foo'. So we use the END.
It's just a cheap hack; we could equally well use the Span too.
The AvailInfos are the things imported from that decl (just a list,
not normalised).
-}
type ImportMap = Map SrcLoc [AvailInfo] -- See [The ImportMap]
findImportUsage :: [LImportDecl Name]
-> [GlobalRdrElt]
-> [ImportDeclUsage]
findImportUsage imports used_gres
= map unused_decl imports
where
import_usage :: ImportMap
import_usage
= foldr extendImportMap Map.empty used_gres
unused_decl decl@(L loc (ImportDecl { ideclHiding = imps }))
= (decl, nubAvails used_avails, nameSetElems unused_imps)
where
used_avails = Map.lookup (srcSpanEnd loc) import_usage `orElse` []
-- srcSpanEnd: see Note [The ImportMap]
used_names = availsToNameSetWithSelectors used_avails
used_parents = mkNameSet [n | AvailTC n _ _ <- used_avails]
unused_imps -- Not trivial; see eg Trac #7454
= case imps of
Just (False, L _ imp_ies) ->
foldr (add_unused . unLoc) emptyNameSet imp_ies
_other -> emptyNameSet -- No explicit import list => no unused-name list
add_unused :: IE Name -> NameSet -> NameSet
add_unused (IEVar (L _ n)) acc = add_unused_name n acc
add_unused (IEThingAbs (L _ n)) acc = add_unused_name n acc
add_unused (IEThingAll (L _ n)) acc = add_unused_all n acc
add_unused (IEThingWith (L _ p) wc ns fs) acc =
add_wc_all (add_unused_with p xs acc)
where xs = map unLoc ns ++ map (flSelector . unLoc) fs
add_wc_all = case wc of
NoIEWildcard -> id
IEWildcard _ -> add_unused_all p
add_unused _ acc = acc
add_unused_name n acc
| n `elemNameSet` used_names = acc
| otherwise = acc `extendNameSet` n
add_unused_all n acc
| n `elemNameSet` used_names = acc
| n `elemNameSet` used_parents = acc
| otherwise = acc `extendNameSet` n
add_unused_with p ns acc
| all (`elemNameSet` acc1) ns = add_unused_name p acc1
| otherwise = acc1
where
acc1 = foldr add_unused_name acc ns
-- If you use 'signum' from Num, then the user may well have
-- imported Num(signum). We don't want to complain that
-- Num is not itself mentioned. Hence the two cases in add_unused_with.
extendImportMap :: GlobalRdrElt -> ImportMap -> ImportMap
-- For each of a list of used GREs, find all the import decls that brought
-- it into scope; choose one of them (bestImport), and record
-- the RdrName in that import decl's entry in the ImportMap
extendImportMap gre imp_map
= add_imp gre (bestImport (gre_imp gre)) imp_map
where
add_imp :: GlobalRdrElt -> ImportSpec -> ImportMap -> ImportMap
add_imp gre (ImpSpec { is_decl = imp_decl_spec }) imp_map
= Map.insertWith add decl_loc [avail] imp_map
where
add _ avails = avail : avails -- add is really just a specialised (++)
decl_loc = srcSpanEnd (is_dloc imp_decl_spec)
-- For srcSpanEnd see Note [The ImportMap]
avail = availFromGRE gre
warnUnusedImport :: WarningFlag -> NameEnv (FieldLabelString, Name)
-> ImportDeclUsage -> RnM ()
warnUnusedImport flag fld_env (L loc decl, used, unused)
| Just (False,L _ []) <- ideclHiding decl
= return () -- Do not warn for 'import M()'
| Just (True, L _ hides) <- ideclHiding decl
, not (null hides)
, pRELUDE_NAME == unLoc (ideclName decl)
= return () -- Note [Do not warn about Prelude hiding]
| null used = addWarnAt (Reason flag) loc msg1 -- Nothing used; drop entire decl
| null unused = return () -- Everything imported is used; nop
| otherwise = addWarnAt (Reason flag) loc msg2 -- Some imports are unused
where
msg1 = vcat [pp_herald <+> quotes pp_mod <+> pp_not_used,
nest 2 (text "except perhaps to import instances from"
<+> quotes pp_mod),
text "To import instances alone, use:"
<+> text "import" <+> pp_mod <> parens Outputable.empty ]
msg2 = sep [pp_herald <+> quotes sort_unused,
text "from module" <+> quotes pp_mod <+> pp_not_used]
pp_herald = text "The" <+> pp_qual <+> text "import of"
pp_qual
| ideclQualified decl = text "qualified"
| otherwise = Outputable.empty
pp_mod = ppr (unLoc (ideclName decl))
pp_not_used = text "is redundant"
ppr_possible_field n = case lookupNameEnv fld_env n of
Just (fld, p) -> ppr p <> parens (ppr fld)
Nothing -> ppr n
-- Print unused names in a deterministic (lexicographic) order
sort_unused = pprWithCommas ppr_possible_field $
sortBy (comparing nameOccName) unused
{-
Note [Do not warn about Prelude hiding]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We do not warn about
import Prelude hiding( x, y )
because even if nothing else from Prelude is used, it may be essential to hide
x,y to avoid name-shadowing warnings. Example (Trac #9061)
import Prelude hiding( log )
f x = log where log = ()
Note [Printing minimal imports]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
To print the minimal imports we walk over the user-supplied import
decls, and simply trim their import lists. NB that
* We do *not* change the 'qualified' or 'as' parts!
* We do not disard a decl altogether; we might need instances
from it. Instead we just trim to an empty import list
-}
printMinimalImports :: [ImportDeclUsage] -> RnM ()
-- See Note [Printing minimal imports]
printMinimalImports imports_w_usage
= do { imports' <- mapM mk_minimal imports_w_usage
; this_mod <- getModule
; dflags <- getDynFlags
; liftIO $
do { h <- openFile (mkFilename dflags this_mod) WriteMode
; printForUser dflags h neverQualify (vcat (map ppr imports')) }
-- The neverQualify is important. We are printing Names
-- but they are in the context of an 'import' decl, and
-- we never qualify things inside there
-- E.g. import Blag( f, b )
-- not import Blag( Blag.f, Blag.g )!
}
where
mkFilename dflags this_mod
| Just d <- dumpDir dflags = d </> basefn
| otherwise = basefn
where
basefn = moduleNameString (moduleName this_mod) ++ ".imports"
mk_minimal (L l decl, used, unused)
| null unused
, Just (False, _) <- ideclHiding decl
= return (L l decl)
| otherwise
= do { let ImportDecl { ideclName = L _ mod_name
, ideclSource = is_boot
, ideclPkgQual = mb_pkg } = decl
; iface <- loadSrcInterface doc mod_name is_boot (fmap sl_fs mb_pkg)
; let lies = map (L l) (concatMap (to_ie iface) used)
; return (L l (decl { ideclHiding = Just (False, L l lies) })) }
where
doc = text "Compute minimal imports for" <+> ppr decl
to_ie :: ModIface -> AvailInfo -> [IE Name]
-- The main trick here is that if we're importing all the constructors
-- we want to say "T(..)", but if we're importing only a subset we want
-- to say "T(A,B,C)". So we have to find out what the module exports.
to_ie _ (Avail _ n)
= [IEVar (noLoc n)]
to_ie _ (AvailTC n [m] [])
| n==m = [IEThingAbs (noLoc n)]
to_ie iface (AvailTC n ns fs)
= case [(xs,gs) | AvailTC x xs gs <- mi_exports iface
, x == n
, x `elem` xs -- Note [Partial export]
] of
[xs] | all_used xs -> [IEThingAll (noLoc n)]
| otherwise -> [IEThingWith (noLoc n) NoIEWildcard
(map noLoc (filter (/= n) ns))
(map noLoc fs)]
-- Note [Overloaded field import]
_other | all_non_overloaded fs
-> map (IEVar . noLoc) $ ns ++ map flSelector fs
| otherwise -> [IEThingWith (noLoc n) NoIEWildcard
(map noLoc (filter (/= n) ns)) (map noLoc fs)]
where
fld_lbls = map flLabel fs
all_used (avail_occs, avail_flds)
= all (`elem` ns) avail_occs
&& all (`elem` fld_lbls) (map flLabel avail_flds)
all_non_overloaded = all (not . flIsOverloaded)
{-
Note [Partial export]
~~~~~~~~~~~~~~~~~~~~~
Suppose we have
module A( op ) where
class C a where
op :: a -> a
module B where
import A
f = ..op...
Then the minimal import for module B is
import A( op )
not
import A( C( op ) )
which we would usually generate if C was exported from B. Hence
the (x `elem` xs) test when deciding what to generate.
Note [Overloaded field import]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
On the other hand, if we have
{-# LANGUAGE DuplicateRecordFields #-}
module A where
data T = MkT { foo :: Int }
module B where
import A
f = ...foo...
then the minimal import for module B must be
import A ( T(foo) )
because when DuplicateRecordFields is enabled, field selectors are
not in scope without their enclosing datatype.
************************************************************************
* *
\subsection{Errors}
* *
************************************************************************
-}
qualImportItemErr :: RdrName -> SDoc
qualImportItemErr rdr
= hang (text "Illegal qualified name in import item:")
2 (ppr rdr)
badImportItemErrStd :: ModIface -> ImpDeclSpec -> IE RdrName -> SDoc
badImportItemErrStd iface decl_spec ie
= sep [text "Module", quotes (ppr (is_mod decl_spec)), source_import,
text "does not export", quotes (ppr ie)]
where
source_import | mi_boot iface = text "(hi-boot interface)"
| otherwise = Outputable.empty
badImportItemErrDataCon :: OccName -> ModIface -> ImpDeclSpec -> IE RdrName -> SDoc
badImportItemErrDataCon dataType_occ iface decl_spec ie
= vcat [ text "In module"
<+> quotes (ppr (is_mod decl_spec))
<+> source_import <> colon
, nest 2 $ quotes datacon
<+> text "is a data constructor of"
<+> quotes dataType
, text "To import it use"
, nest 2 $ quotes (text "import")
<+> ppr (is_mod decl_spec)
<> parens_sp (dataType <> parens_sp datacon)
, text "or"
, nest 2 $ quotes (text "import")
<+> ppr (is_mod decl_spec)
<> parens_sp (dataType <> text "(..)")
]
where
datacon_occ = rdrNameOcc $ ieName ie
datacon = parenSymOcc datacon_occ (ppr datacon_occ)
dataType = parenSymOcc dataType_occ (ppr dataType_occ)
source_import | mi_boot iface = text "(hi-boot interface)"
| otherwise = Outputable.empty
parens_sp d = parens (space <> d <> space) -- T( f,g )
badImportItemErr :: ModIface -> ImpDeclSpec -> IE RdrName -> [AvailInfo] -> SDoc
badImportItemErr iface decl_spec ie avails
= case find checkIfDataCon avails of
Just con -> badImportItemErrDataCon (availOccName con) iface decl_spec ie
Nothing -> badImportItemErrStd iface decl_spec ie
where
checkIfDataCon (AvailTC _ ns _) =
case find (\n -> importedFS == nameOccNameFS n) ns of
Just n -> isDataConName n
Nothing -> False
checkIfDataCon _ = False
availOccName = nameOccName . availName
nameOccNameFS = occNameFS . nameOccName
importedFS = occNameFS . rdrNameOcc $ ieName ie
illegalImportItemErr :: SDoc
illegalImportItemErr = text "Illegal import item"
dodgyImportWarn :: RdrName -> SDoc
dodgyImportWarn item = dodgyMsg (text "import") item
dodgyExportWarn :: Name -> SDoc
dodgyExportWarn item = dodgyMsg (text "export") item
dodgyMsg :: (OutputableBndr n, HasOccName n) => SDoc -> n -> SDoc
dodgyMsg kind tc
= sep [ text "The" <+> kind <+> ptext (sLit "item")
<+> quotes (ppr (IEThingAll (noLoc tc)))
<+> text "suggests that",
quotes (ppr tc) <+> text "has (in-scope) constructors or class methods,",
text "but it has none" ]
exportItemErr :: IE RdrName -> SDoc
exportItemErr export_item
= sep [ text "The export item" <+> quotes (ppr export_item),
text "attempts to export constructors or class methods that are not visible here" ]
exportClashErr :: GlobalRdrEnv -> Name -> Name -> IE RdrName -> IE RdrName
-> MsgDoc
exportClashErr global_env name1 name2 ie1 ie2
= vcat [ text "Conflicting exports for" <+> quotes (ppr occ) <> colon
, ppr_export ie1' name1'
, ppr_export ie2' name2' ]
where
occ = nameOccName name1
ppr_export ie name = nest 3 (hang (quotes (ppr ie) <+> text "exports" <+>
quotes (ppr name))
2 (pprNameProvenance (get_gre name)))
-- get_gre finds a GRE for the Name, so that we can show its provenance
get_gre name
= case lookupGRE_Name global_env name of
(gre:_) -> gre
[] -> pprPanic "exportClashErr" (ppr name)
get_loc name = greSrcSpan (get_gre name)
(name1', ie1', name2', ie2') = if get_loc name1 < get_loc name2
then (name1, ie1, name2, ie2)
else (name2, ie2, name1, ie1)
addDupDeclErr :: [GlobalRdrElt] -> TcRn ()
addDupDeclErr [] = panic "addDupDeclErr: empty list"
addDupDeclErr gres@(gre : _)
= addErrAt (getSrcSpan (last sorted_names)) $
-- Report the error at the later location
vcat [text "Multiple declarations of" <+>
quotes (ppr (nameOccName name)),
-- NB. print the OccName, not the Name, because the
-- latter might not be in scope in the RdrEnv and so will
-- be printed qualified.
text "Declared at:" <+>
vcat (map (ppr . nameSrcLoc) sorted_names)]
where
name = gre_name gre
sorted_names = sortWith nameSrcLoc (map gre_name gres)
dupExportWarn :: OccName -> IE RdrName -> IE RdrName -> SDoc
dupExportWarn occ_name ie1 ie2
= hsep [quotes (ppr occ_name),
text "is exported by", quotes (ppr ie1),
text "and", quotes (ppr ie2)]
dupModuleExport :: ModuleName -> SDoc
dupModuleExport mod
= hsep [text "Duplicate",
quotes (text "Module" <+> ppr mod),
text "in export list"]
moduleNotImported :: ModuleName -> SDoc
moduleNotImported mod
= text "The export item `module" <+> ppr mod <>
text "' is not imported"
nullModuleExport :: ModuleName -> SDoc
nullModuleExport mod
= text "The export item `module" <+> ppr mod <> ptext (sLit "' exports nothing")
missingImportListWarn :: ModuleName -> SDoc
missingImportListWarn mod
= text "The module" <+> quotes (ppr mod) <+> ptext (sLit "does not have an explicit import list")
missingImportListItem :: IE RdrName -> SDoc
missingImportListItem ie
= text "The import item" <+> quotes (ppr ie) <+> ptext (sLit "does not have an explicit import list")
moduleWarn :: ModuleName -> WarningTxt -> SDoc
moduleWarn mod (WarningTxt _ txt)
= sep [ text "Module" <+> quotes (ppr mod) <> ptext (sLit ":"),
nest 2 (vcat (map (ppr . sl_fs . unLoc) txt)) ]
moduleWarn mod (DeprecatedTxt _ txt)
= sep [ text "Module" <+> quotes (ppr mod)
<+> text "is deprecated:",
nest 2 (vcat (map (ppr . sl_fs . unLoc) txt)) ]
packageImportErr :: SDoc
packageImportErr
= text "Package-qualified imports are not enabled; use PackageImports"
-- This data decl will parse OK
-- data T = a Int
-- treating "a" as the constructor.
-- It is really hard to make the parser spot this malformation.
-- So the renamer has to check that the constructor is legal
--
-- We can get an operator as the constructor, even in the prefix form:
-- data T = :% Int Int
-- from interface files, which always print in prefix form
checkConName :: RdrName -> TcRn ()
checkConName name = checkErr (isRdrDataCon name) (badDataCon name)
badDataCon :: RdrName -> SDoc
badDataCon name
= hsep [text "Illegal data constructor name", quotes (ppr name)]
| tjakway/ghcjvm | compiler/rename/RnNames.hs | bsd-3-clause | 88,862 | 3 | 29 | 28,036 | 16,684 | 8,616 | 8,068 | -1 | -1 |
module HSync.Server.Import.NoFoundation
( module Import
) where
import ClassyPrelude.Yesod as Import hiding (Update,Query,get,delete,update
, host, port
)
-- import HSync.Server.Model as Import
import HSync.Server.Settings as Import
import HSync.Server.Settings.StaticFiles as Import
import Yesod.Auth as Import
import Yesod.Core.Types as Import (loggerSet)
import Yesod.Default.Config2 as Import
import HSync.Common.Types as Import
import HSync.Common.Notification as Import
import HSync.Common.FileVersion as Import
import HSync.Common.AcidState as Import
import HSync.Common.AccessPolicy as Import
import HSync.Common.DateTime as Import
import HSync.Common.API as Import
import HSync.Common.Util as Import
| noinia/hsync-server | src/HSync/Server/Import/NoFoundation.hs | bsd-3-clause | 985 | 0 | 5 | 351 | 155 | 113 | 42 | 17 | 0 |
{-- snippet ternary --}
data Ternary
= Yes
| No
| Unknown
deriving (Eq,Show)
{-- /snippet ternary --}
| binesiyu/ifl | examples/ch11/Ternary.hs | mit | 119 | 0 | 6 | 34 | 27 | 16 | 11 | 5 | 0 |
-- https://www.codewars.com/kata/can-you-get-the-loop
module CanYouGetTheLoop where
import CanYouGetTheLoop.Types
{-
data Node a
instance Eq a => Eq (Node a)
next :: Node a -> Node a
-}
loopSize :: Eq a => Node a -> Int
loopSize head = walk 1 (next intersection) intersection
where
intersection = race head (next head)
race x y
| x == y = x
| otherwise = race (next x) (next $ next y)
walk n x t
| x == t = n
| otherwise = walk (succ n) (next x) t
| airtial/Codegames | codewars/can-you-get-the-loop.hs | gpl-2.0 | 492 | 0 | 11 | 132 | 169 | 82 | 87 | 11 | 1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="de-DE">
<title>Forced Browse Add-On</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Suche</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | veggiespam/zap-extensions | addOns/bruteforce/src/main/javahelp/org/zaproxy/zap/extension/bruteforce/resources/help_de_DE/helpset_de_DE.hs | apache-2.0 | 965 | 79 | 67 | 158 | 415 | 210 | 205 | -1 | -1 |
module ParsecChar
{-# DEPRECATED "This module has moved to Text.ParserCombinators.Parsec.Char" #-}
(module Text.ParserCombinators.Parsec.Char) where
import Text.ParserCombinators.Parsec.Char
| alekar/hugs | fptools/hslibs/text/parsec/ParsecChar.hs | bsd-3-clause | 191 | 0 | 5 | 16 | 22 | 16 | 6 | 4 | 0 |
{-|
Module : Qux.Command.Dependencies
Description : Options and handler for the dependencies subcommand.
Copyright : (c) Henry J. Wylde, 2015
License : BSD3
Maintainer : hjwylde@gmail.com
Options and handler for the dependencies subcommand.
-}
module Qux.Command.Dependencies (
-- * Options
Options(..),
-- * Handle
handle,
) where
import Control.Monad.IO.Class
import Data.List.Extra (nubOrd, sort)
import Language.Qux.Annotated.Parser
import Language.Qux.Annotated.Syntax
import Prelude hiding (log)
import qualified Qux.BuildSteps as BuildSteps
import Qux.Worker
-- | Dependencies options.
data Options = Options
{ argFilePaths :: [FilePath] -- ^ The files to read the dependencies from.
} deriving (Eq, Show)
-- | Prints out the file dependencies according to the options.
handle :: Options -> WorkerT IO ()
handle options = do
log Debug "Parsing ..."
BuildSteps.parseAll (argFilePaths options) >>= dependencies
dependencies :: [Program SourcePos] -> WorkerT IO ()
dependencies programs = liftIO $ mapM_ putStrLn (nubOrd $ sort
[ simp $ qualify id
| (Program _ _ decls) <- programs
, (ImportDecl _ id) <- decls
])
| qux-lang/qux | src/Qux/Command/Dependencies.hs | bsd-3-clause | 1,202 | 0 | 14 | 245 | 255 | 145 | 110 | 22 | 1 |
module Eta.CodeGen.Env where
import Eta.BasicTypes.Id
import Eta.StgSyn.StgSyn
import Eta.Types.TyCon
import Codec.JVM
import Eta.Utils.Util
import Eta.CodeGen.Types
import Eta.CodeGen.Closure
import Eta.CodeGen.Monad
import Eta.CodeGen.Utils
import Eta.CodeGen.ArgRep
import Eta.CodeGen.Name
import Control.Monad (liftM)
getArgLoadCode :: NonVoid StgArg -> CodeGen Code
getArgLoadCode (NonVoid (StgVarArg var)) = liftM idInfoLoadCode $ getCgIdInfo var
getArgLoadCode (NonVoid (StgLitArg literal)) = return . snd $ cgLit literal
getNonVoidArgCodes :: [StgArg] -> CodeGen [Code]
getNonVoidArgCodes [] = return []
getNonVoidArgCodes (arg:args)
| isVoidRep (argPrimRep arg) = getNonVoidArgCodes args
| otherwise = do
code <- getArgLoadCode (NonVoid arg)
codes <- getNonVoidArgCodes args
return (code:codes)
getNonVoidArgFtCodes :: [StgArg] -> CodeGen [(FieldType, Code)]
getNonVoidArgFtCodes [] = return []
getNonVoidArgFtCodes (arg:args)
| isVoidRep (argPrimRep arg) = getNonVoidArgFtCodes args
| otherwise = do
code <- getArgLoadCode (NonVoid arg)
ftCodes <- getNonVoidArgFtCodes args
return ((ft, code) : ftCodes)
where primRep = argPrimRep arg
ft = expectJust "getNonVoidArgFtCodes" . primRepFieldType_maybe $ primRep
getNonVoidArgRepCodes :: [StgArg] -> CodeGen [(PrimRep, Code)]
getNonVoidArgRepCodes [] = return []
getNonVoidArgRepCodes (arg:args)
| isVoidRep rep = getNonVoidArgRepCodes args
| otherwise = do
code <- getArgLoadCode (NonVoid arg)
repCodes <- getNonVoidArgRepCodes args
return ((rep, code) : repCodes)
where rep = argPrimRep arg
idInfoLoadCode :: CgIdInfo -> Code
idInfoLoadCode CgIdInfo { cgLocation } = loadLoc cgLocation
rebindId :: NonVoid Id -> CgLoc -> CodeGen ()
rebindId nvId@(NonVoid id) cgLoc = do
info <- getCgIdInfo id
bindId nvId (cgLambdaForm info) cgLoc
bindId :: NonVoid Id -> LambdaFormInfo -> CgLoc -> CodeGen ()
bindId (NonVoid id) lfInfo cgLoc =
addBinding (mkCgIdInfoWithLoc id lfInfo cgLoc)
bindArg :: NonVoid Id -> CgLoc -> CodeGen ()
bindArg nvid@(NonVoid id) = bindId nvid (mkLFArgument id)
bindArgs :: [(NonVoid Id, CgLoc)] -> CodeGen ()
bindArgs = mapM_ (\(nvId, cgLoc) -> bindArg nvId cgLoc)
rhsIdInfo :: Id -> LambdaFormInfo -> CodeGen (CgIdInfo, CgLoc)
rhsIdInfo id lfInfo = do
dflags <- getDynFlags
modClass <- getModClass
let qualifiedClass = qualifiedName modClass (idNameText dflags id)
rhsGenIdInfo id lfInfo (obj qualifiedClass)
-- TODO: getJavaInfo generalize to unify rhsIdInfo and rhsConIdInfo
rhsConIdInfo :: Id -> LambdaFormInfo -> CodeGen (CgIdInfo, CgLoc)
rhsConIdInfo id lfInfo@(LFCon dataCon) = do
dflags <- getDynFlags
let dataClass = dataConClass dflags dataCon
rhsGenIdInfo id lfInfo (obj dataClass)
rhsConIdInfo _ _ = error "rhsConIdInfo: bad arguments"
rhsGenIdInfo :: Id -> LambdaFormInfo -> FieldType -> CodeGen (CgIdInfo, CgLoc)
rhsGenIdInfo id lfInfo ft = do
cgLoc <- newTemp True ft
return (mkCgIdInfoWithLoc id lfInfo cgLoc, cgLoc)
mkRhsInit :: CgLoc -> Code -> Code
mkRhsInit = storeLoc
| rahulmutt/ghcvm | compiler/Eta/CodeGen/Env.hs | bsd-3-clause | 3,085 | 0 | 12 | 517 | 1,067 | 533 | 534 | -1 | -1 |
{-# LANGUAGE Haskell2010 #-}
{-# LINE 1 "Data/Aeson/Encoding.hs" #-}
-- |
--
-- Functions in this module return well-formed 'Encoding''.
-- Polymorphic variants, which return @'Encoding' a@, return a textual JSON
-- value, so it can be used as both @'Encoding'' 'Text'@ and @'Encoding' = 'Encoding'' 'Value'@.
module Data.Aeson.Encoding
(
-- * Encoding
Encoding
, Encoding'
, encodingToLazyByteString
, fromEncoding
, unsafeToEncoding
, Series
, pairs
, pair
, pairStr
, pair'
-- * Predicates
, nullEncoding
-- * Encoding constructors
, emptyArray_
, emptyObject_
, text
, lazyText
, string
, list
, dict
, null_
, bool
-- ** Decimal numbers
, int8, int16, int32, int64, int
, word8, word16, word32, word64, word
, integer, float, double, scientific
-- ** Decimal numbers as Text
, int8Text, int16Text, int32Text, int64Text, intText
, word8Text, word16Text, word32Text, word64Text, wordText
, integerText, floatText, doubleText, scientificText
-- ** Time
, day
, localTime
, utcTime
, timeOfDay
, zonedTime
-- ** value
, value
) where
import Prelude ()
import Data.Aeson.Encoding.Internal
| phischu/fragnix | tests/packages/scotty/Data.Aeson.Encoding.hs | bsd-3-clause | 1,260 | 0 | 4 | 339 | 198 | 137 | 61 | 38 | 0 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Program.Builtin
-- Copyright : Isaac Jones 2006, Duncan Coutts 2007-2009
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- The module defines all the known built-in 'Program's.
--
-- Where possible we try to find their version numbers.
--
module Distribution.Simple.Program.Builtin (
-- * The collection of unconfigured and configured programs
builtinPrograms,
-- * Programs that Cabal knows about
ghcProgram,
ghcPkgProgram,
ghcjsProgram,
ghcjsPkgProgram,
lhcProgram,
lhcPkgProgram,
hmakeProgram,
jhcProgram,
haskellSuiteProgram,
haskellSuitePkgProgram,
uhcProgram,
gccProgram,
arProgram,
stripProgram,
happyProgram,
alexProgram,
hsc2hsProgram,
c2hsProgram,
cpphsProgram,
hscolourProgram,
haddockProgram,
greencardProgram,
ldProgram,
tarProgram,
cppProgram,
pkgConfigProgram,
hpcProgram,
) where
import Distribution.Simple.Program.Find
( findProgramOnSearchPath )
import Distribution.Simple.Program.Run
( getProgramInvocationOutput, programInvocation )
import Distribution.Simple.Program.Types
( Program(..), ConfiguredProgram(..), simpleProgram )
import Distribution.Simple.Utils
( findProgramVersion )
import Distribution.Compat.Exception
( catchIO )
import Distribution.Verbosity
( lessVerbose )
import Distribution.Version
( Version(..), withinRange, earlierVersion, laterVersion
, intersectVersionRanges )
import Data.Char
( isDigit )
import Data.List
( isInfixOf )
import qualified Data.Map as Map
-- ------------------------------------------------------------
-- * Known programs
-- ------------------------------------------------------------
-- | The default list of programs.
-- These programs are typically used internally to Cabal.
builtinPrograms :: [Program]
builtinPrograms =
[
-- compilers and related progs
ghcProgram
, ghcPkgProgram
, ghcjsProgram
, ghcjsPkgProgram
, haskellSuiteProgram
, haskellSuitePkgProgram
, hmakeProgram
, jhcProgram
, lhcProgram
, lhcPkgProgram
, uhcProgram
, hpcProgram
-- preprocessors
, hscolourProgram
, haddockProgram
, happyProgram
, alexProgram
, hsc2hsProgram
, c2hsProgram
, cpphsProgram
, greencardProgram
-- platform toolchain
, gccProgram
, arProgram
, stripProgram
, ldProgram
, tarProgram
-- configuration tools
, pkgConfigProgram
]
ghcProgram :: Program
ghcProgram = (simpleProgram "ghc") {
programFindVersion = findProgramVersion "--numeric-version" id,
-- Workaround for https://ghc.haskell.org/trac/ghc/ticket/8825
-- (spurious warning on non-english locales)
programPostConf = \_verbosity ghcProg ->
do let ghcProg' = ghcProg {
programOverrideEnv = ("LANGUAGE", Just "en")
: programOverrideEnv ghcProg
}
-- Only the 7.8 branch seems to be affected. Fixed in 7.8.4.
affectedVersionRange = intersectVersionRanges
(laterVersion $ Version [7,8,0] [])
(earlierVersion $ Version [7,8,4] [])
return $ maybe ghcProg
(\v -> if withinRange v affectedVersionRange
then ghcProg' else ghcProg)
(programVersion ghcProg)
}
ghcPkgProgram :: Program
ghcPkgProgram = (simpleProgram "ghc-pkg") {
programFindVersion = findProgramVersion "--version" $ \str ->
-- Invoking "ghc-pkg --version" gives a string like
-- "GHC package manager version 6.4.1"
case words str of
(_:_:_:_:ver:_) -> ver
_ -> ""
}
ghcjsProgram :: Program
ghcjsProgram = (simpleProgram "ghcjs") {
programFindVersion = findProgramVersion "--numeric-ghcjs-version" id
}
ghcjsPkgProgram :: Program
ghcjsPkgProgram = (simpleProgram "ghcjs-pkg") {
programFindVersion = findProgramVersion "--ghcjs-version" $ \str ->
-- Invoking "ghcjs-pkg --version" gives a string like
-- "GHCJS package manager version 6.4.1"
case words str of
(_:_:_:_:ver:_) -> ver
_ -> ""
}
lhcProgram :: Program
lhcProgram = (simpleProgram "lhc") {
programFindVersion = findProgramVersion "--numeric-version" id
}
lhcPkgProgram :: Program
lhcPkgProgram = (simpleProgram "lhc-pkg") {
programFindVersion = findProgramVersion "--version" $ \str ->
-- Invoking "lhc-pkg --version" gives a string like
-- "LHC package manager version 0.7"
case words str of
(_:_:_:_:ver:_) -> ver
_ -> ""
}
hmakeProgram :: Program
hmakeProgram = (simpleProgram "hmake") {
programFindVersion = findProgramVersion "--version" $ \str ->
-- Invoking "hmake --version" gives a string line
-- "/usr/local/bin/hmake: 3.13 (2006-11-01)"
case words str of
(_:ver:_) -> ver
_ -> ""
}
jhcProgram :: Program
jhcProgram = (simpleProgram "jhc") {
programFindVersion = findProgramVersion "--version" $ \str ->
-- invoking "jhc --version" gives a string like
-- "jhc 0.3.20080208 (wubgipkamcep-2)
-- compiled by ghc-6.8 on a x86_64 running linux"
case words str of
(_:ver:_) -> ver
_ -> ""
}
uhcProgram :: Program
uhcProgram = (simpleProgram "uhc") {
programFindVersion = findProgramVersion "--version-dotted" id
}
hpcProgram :: Program
hpcProgram = (simpleProgram "hpc")
{
programFindVersion = findProgramVersion "version" $ \str ->
case words str of
(_ : _ : _ : ver : _) -> ver
_ -> ""
}
-- This represents a haskell-suite compiler. Of course, the compiler
-- itself probably is not called "haskell-suite", so this is not a real
-- program. (But we don't know statically the name of the actual compiler,
-- so this is the best we can do.)
--
-- Having this Program value serves two purposes:
--
-- 1. We can accept options for the compiler in the form of
--
-- --haskell-suite-option(s)=...
--
-- 2. We can find a program later using this static id (with
-- requireProgram).
--
-- The path to the real compiler is found and recorded in the ProgramDb
-- during the configure phase.
haskellSuiteProgram :: Program
haskellSuiteProgram = (simpleProgram "haskell-suite") {
-- pretend that the program exists, otherwise it won't be in the
-- "configured" state
programFindLocation =
\_verbosity _searchPath -> return $ Just "haskell-suite-dummy-location"
}
-- This represent a haskell-suite package manager. See the comments for
-- haskellSuiteProgram.
haskellSuitePkgProgram :: Program
haskellSuitePkgProgram = (simpleProgram "haskell-suite-pkg") {
programFindLocation =
\_verbosity _searchPath -> return $ Just "haskell-suite-pkg-dummy-location"
}
happyProgram :: Program
happyProgram = (simpleProgram "happy") {
programFindVersion = findProgramVersion "--version" $ \str ->
-- Invoking "happy --version" gives a string like
-- "Happy Version 1.16 Copyright (c) ...."
case words str of
(_:_:ver:_) -> ver
_ -> ""
}
alexProgram :: Program
alexProgram = (simpleProgram "alex") {
programFindVersion = findProgramVersion "--version" $ \str ->
-- Invoking "alex --version" gives a string like
-- "Alex version 2.1.0, (c) 2003 Chris Dornan and Simon Marlow"
case words str of
(_:_:ver:_) -> takeWhile (\x -> isDigit x || x == '.') ver
_ -> ""
}
gccProgram :: Program
gccProgram = (simpleProgram "gcc") {
programFindVersion = findProgramVersion "-dumpversion" id
}
arProgram :: Program
arProgram = simpleProgram "ar"
stripProgram :: Program
stripProgram = (simpleProgram "strip") {
programFindVersion = \verbosity ->
findProgramVersion "--version" selectVersion (lessVerbose verbosity)
}
where
selectVersion str =
-- Invoking "strip --version" gives very inconsistent
-- results. We look for the first word that starts with a
-- number, and try parsing out the first two components of
-- it. Non-GNU 'strip' doesn't appear to have a version flag.
let numeric "" = False
numeric (x:_) = isDigit x
in case dropWhile (not . numeric) (words str) of
(ver:_) ->
-- take the first two version components
let isDot = (== '.')
(major, rest) = break isDot ver
minor = takeWhile (not . isDot) (dropWhile isDot rest)
in major ++ "." ++ minor
_ -> ""
hsc2hsProgram :: Program
hsc2hsProgram = (simpleProgram "hsc2hs") {
programFindVersion =
findProgramVersion "--version" $ \str ->
-- Invoking "hsc2hs --version" gives a string like "hsc2hs version 0.66"
case words str of
(_:_:ver:_) -> ver
_ -> ""
}
c2hsProgram :: Program
c2hsProgram = (simpleProgram "c2hs") {
programFindVersion = findProgramVersion "--numeric-version" id
}
cpphsProgram :: Program
cpphsProgram = (simpleProgram "cpphs") {
programFindVersion = findProgramVersion "--version" $ \str ->
-- Invoking "cpphs --version" gives a string like "cpphs 1.3"
case words str of
(_:ver:_) -> ver
_ -> ""
}
hscolourProgram :: Program
hscolourProgram = (simpleProgram "hscolour") {
programFindLocation = \v p -> findProgramOnSearchPath v p "HsColour",
programFindVersion = findProgramVersion "-version" $ \str ->
-- Invoking "HsColour -version" gives a string like "HsColour 1.7"
case words str of
(_:ver:_) -> ver
_ -> ""
}
haddockProgram :: Program
haddockProgram = (simpleProgram "haddock") {
programFindVersion = findProgramVersion "--version" $ \str ->
-- Invoking "haddock --version" gives a string like
-- "Haddock version 0.8, (c) Simon Marlow 2006"
case words str of
(_:_:ver:_) -> takeWhile (`elem` ('.':['0'..'9'])) ver
_ -> ""
}
greencardProgram :: Program
greencardProgram = simpleProgram "greencard"
ldProgram :: Program
ldProgram = simpleProgram "ld"
tarProgram :: Program
tarProgram = (simpleProgram "tar") {
-- See #1901. Some versions of 'tar' (OpenBSD, NetBSD, ...) don't support the
-- '--format' option.
programPostConf = \verbosity tarProg -> do
tarHelpOutput <- getProgramInvocationOutput
verbosity (programInvocation tarProg ["--help"])
-- Some versions of tar don't support '--help'.
`catchIO` (\_ -> return "")
let k = "Supports --format"
v = if ("--format" `isInfixOf` tarHelpOutput) then "YES" else "NO"
m = Map.insert k v (programProperties tarProg)
return $ tarProg { programProperties = m }
}
cppProgram :: Program
cppProgram = simpleProgram "cpp"
pkgConfigProgram :: Program
pkgConfigProgram = (simpleProgram "pkg-config") {
programFindVersion = findProgramVersion "--version" id
}
| DavidAlphaFox/ghc | libraries/Cabal/Cabal/Distribution/Simple/Program/Builtin.hs | bsd-3-clause | 11,280 | 0 | 18 | 2,867 | 2,119 | 1,208 | 911 | 224 | 3 |
-----------------------------------------------------------------------------
-- |
-- Module : Haddock.Version
-- Copyright : (c) Simon Marlow 2003
-- License : BSD-like
--
-- Maintainer : haddock@projects.haskell.org
-- Stability : experimental
-- Portability : portable
-----------------------------------------------------------------------------
module Haddock.Version (
projectName, projectVersion, projectUrl
) where
import Paths_haddock ( version )
import Data.Version ( showVersion )
projectName, projectUrl :: String
projectName = "Haddock"
projectUrl = "http://www.haskell.org/haddock/"
projectVersion :: String
projectVersion = showVersion version
| jwiegley/ghc-release | utils/haddock/src/Haddock/Version.hs | gpl-3.0 | 687 | 0 | 5 | 95 | 75 | 50 | 25 | 9 | 1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeInType #-}
module T13780b where
data family Sing (a :: k)
data instance Sing (z :: Bool) =
z ~ False => SFalse
| z ~ True => STrue
| ezyang/ghc | testsuite/tests/dependent/should_fail/T13780b.hs | bsd-3-clause | 210 | 0 | 8 | 48 | 57 | 34 | 23 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="da-DK">
<title>Automation Framework</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/automation/src/main/javahelp/org/zaproxy/addon/automation/resources/help_da_DK/helpset_da_DK.hs | apache-2.0 | 965 | 77 | 66 | 156 | 407 | 206 | 201 | -1 | -1 |
{-# LANGUAGE CPP, UnboxedTuples, MagicHash #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
--
-- (c) The University of Glasgow 2002-2006
--
-- ---------------------------------------------------------------------------
-- The dynamic linker for object code (.o .so .dll files)
-- ---------------------------------------------------------------------------
-- | Primarily, this module consists of an interface to the C-land
-- dynamic linker.
module GHCi.ObjLink
( initObjLinker, ShouldRetainCAFs(..)
, loadDLL
, loadArchive
, loadObj
, unloadObj
, purgeObj
, lookupSymbol
, lookupClosure
, resolveObjs
, addLibrarySearchPath
, removeLibrarySearchPath
, findSystemLibrary
) where
import Prelude -- See note [Why do we import Prelude here?]
import GHCi.RemoteTypes
import Control.Exception (throwIO, ErrorCall(..))
import Control.Monad ( when )
import Foreign.C
import Foreign.Marshal.Alloc ( free )
import Foreign ( nullPtr )
import GHC.Exts
import System.Posix.Internals ( CFilePath, withFilePath, peekFilePath )
import System.FilePath ( dropExtension, normalise )
-- ---------------------------------------------------------------------------
-- RTS Linker Interface
-- ---------------------------------------------------------------------------
data ShouldRetainCAFs
= RetainCAFs
-- ^ Retain CAFs unconditionally in linked Haskell code.
-- Note that this prevents any code from being unloaded.
-- It should not be necessary unless you are GHCi or
-- hs-plugins, which needs to be able call any function
-- in the compiled code.
| DontRetainCAFs
-- ^ Do not retain CAFs. Everything reachable from foreign
-- exports will be retained, due to the StablePtrs
-- created by the module initialisation code. unloadObj
-- frees these StablePtrs, which will allow the CAFs to
-- be GC'd and the code to be removed.
initObjLinker :: ShouldRetainCAFs -> IO ()
initObjLinker RetainCAFs = c_initLinker_ 1
initObjLinker _ = c_initLinker_ 0
lookupSymbol :: String -> IO (Maybe (Ptr a))
lookupSymbol str_in = do
let str = prefixUnderscore str_in
withCAString str $ \c_str -> do
addr <- c_lookupSymbol c_str
if addr == nullPtr
then return Nothing
else return (Just addr)
lookupClosure :: String -> IO (Maybe HValueRef)
lookupClosure str = do
m <- lookupSymbol str
case m of
Nothing -> return Nothing
Just (Ptr addr) -> case addrToAny# addr of
(# a #) -> Just <$> mkRemoteRef (HValue a)
prefixUnderscore :: String -> String
prefixUnderscore
| cLeadingUnderscore = ('_':)
| otherwise = id
-- | loadDLL loads a dynamic library using the OS's native linker
-- (i.e. dlopen() on Unix, LoadLibrary() on Windows). It takes either
-- an absolute pathname to the file, or a relative filename
-- (e.g. "libfoo.so" or "foo.dll"). In the latter case, loadDLL
-- searches the standard locations for the appropriate library.
--
loadDLL :: String -> IO (Maybe String)
-- Nothing => success
-- Just err_msg => failure
loadDLL str0 = do
let
-- On Windows, addDLL takes a filename without an extension, because
-- it tries adding both .dll and .drv. To keep things uniform in the
-- layers above, loadDLL always takes a filename with an extension, and
-- we drop it here on Windows only.
str | isWindowsHost = dropExtension str0
| otherwise = str0
--
maybe_errmsg <- withFilePath (normalise str) $ \dll -> c_addDLL dll
if maybe_errmsg == nullPtr
then return Nothing
else do str <- peekCString maybe_errmsg
free maybe_errmsg
return (Just str)
loadArchive :: String -> IO ()
loadArchive str = do
withFilePath str $ \c_str -> do
r <- c_loadArchive c_str
when (r == 0) (throwIO (ErrorCall ("loadArchive " ++ show str ++ ": failed")))
loadObj :: String -> IO ()
loadObj str = do
withFilePath str $ \c_str -> do
r <- c_loadObj c_str
when (r == 0) (throwIO (ErrorCall ("loadObj " ++ show str ++ ": failed")))
-- | @unloadObj@ drops the given dynamic library from the symbol table
-- as well as enables the library to be removed from memory during
-- a future major GC.
unloadObj :: String -> IO ()
unloadObj str =
withFilePath str $ \c_str -> do
r <- c_unloadObj c_str
when (r == 0) (throwIO (ErrorCall ("unloadObj " ++ show str ++ ": failed")))
-- | @purgeObj@ drops the symbols for the dynamic library from the symbol
-- table. Unlike 'unloadObj', the library will not be dropped memory during
-- a future major GC.
purgeObj :: String -> IO ()
purgeObj str =
withFilePath str $ \c_str -> do
r <- c_purgeObj c_str
when (r == 0) (throwIO (ErrorCall ("purgeObj " ++ show str ++ ": failed")))
addLibrarySearchPath :: String -> IO (Ptr ())
addLibrarySearchPath str =
withFilePath str c_addLibrarySearchPath
removeLibrarySearchPath :: Ptr () -> IO Bool
removeLibrarySearchPath = c_removeLibrarySearchPath
findSystemLibrary :: String -> IO (Maybe String)
findSystemLibrary str = do
result <- withFilePath str c_findSystemLibrary
case result == nullPtr of
True -> return Nothing
False -> do path <- peekFilePath result
free result
return $ Just path
resolveObjs :: IO Bool
resolveObjs = do
r <- c_resolveObjs
return (r /= 0)
-- ---------------------------------------------------------------------------
-- Foreign declarations to RTS entry points which does the real work;
-- ---------------------------------------------------------------------------
foreign import ccall unsafe "addDLL" c_addDLL :: CFilePath -> IO CString
foreign import ccall unsafe "initLinker_" c_initLinker_ :: CInt -> IO ()
foreign import ccall unsafe "lookupSymbol" c_lookupSymbol :: CString -> IO (Ptr a)
foreign import ccall unsafe "loadArchive" c_loadArchive :: CFilePath -> IO Int
foreign import ccall unsafe "loadObj" c_loadObj :: CFilePath -> IO Int
foreign import ccall unsafe "purgeObj" c_purgeObj :: CFilePath -> IO Int
foreign import ccall unsafe "unloadObj" c_unloadObj :: CFilePath -> IO Int
foreign import ccall unsafe "resolveObjs" c_resolveObjs :: IO Int
foreign import ccall unsafe "addLibrarySearchPath" c_addLibrarySearchPath :: CFilePath -> IO (Ptr ())
foreign import ccall unsafe "findSystemLibrary" c_findSystemLibrary :: CFilePath -> IO CFilePath
foreign import ccall unsafe "removeLibrarySearchPath" c_removeLibrarySearchPath :: Ptr() -> IO Bool
-- -----------------------------------------------------------------------------
-- Configuration
#include "ghcautoconf.h"
cLeadingUnderscore :: Bool
#if defined(LEADING_UNDERSCORE)
cLeadingUnderscore = True
#else
cLeadingUnderscore = False
#endif
isWindowsHost :: Bool
#if defined(mingw32_HOST_OS)
isWindowsHost = True
#else
isWindowsHost = False
#endif
| sdiehl/ghc | libraries/ghci/GHCi/ObjLink.hs | bsd-3-clause | 7,099 | 0 | 19 | 1,580 | 1,389 | 724 | 665 | 113 | 2 |
{-# TYPE nat = {x | x > 0} #-}
{-# TYPE notNull = {xs | not (null xs)} #-}
{-# CONTRACT f :: nat -> nat #-}
f :: Int -> Int
f x = x
{-# CONTRACT g :: notNull -> any #-}
g :: [Int] -> Int
g (x:xs) = x
| urbanslug/ghc | testsuite/tests/esc/synonym.hs | bsd-3-clause | 203 | 0 | 7 | 57 | 49 | 28 | 21 | 4 | 1 |
{-# LANGUAGE PolyKinds, FunctionalDependencies, MultiParamTypeClasses #-}
module T9201 where
class MonoidalCCC (f :: x -> y) (d :: y -> y -> *) | f -> d where
ret :: d a (f a)
| urbanslug/ghc | testsuite/tests/typecheck/should_fail/T9201.hs | bsd-3-clause | 180 | 0 | 9 | 38 | 62 | 35 | 27 | 4 | 0 |
module Main (main) where
import System.Environment
import LrcPrelude
import Funcs_Parser_Lazy
import Data_Lazy
import Visfun_Lazy
-- runSemantics :: String -> [BibEntry]
runSemantics inp pw = lrcEval (runParser inp) pw
runEval fn pw
= do s <- readFile fn
let (code,errors,te,pp) = runSemantics s pw
putStrLn "Pretty Printed Input:"
putStrLn pp
putStrLn "MSP Generated Code:"
putStrLn (showCode code)
putStrLn "Detected Semantic Errors:"
putStrLn (show errors)
putStrLn (show te)
-- putStrLn pp_code
return ()
main :: IO ()
main = do args <- getArgs
putStrLn (show args)
let fn = head args
let pw = mytoint (head . tail $ args)
runEval fn pw
mytoint :: String -> Integer
mytoint s = read s
showCode [] = "\n"
showCode (x:xs) = (showInstr x) ++ "\n" ++ (showCode xs)
showInstr (C_ALabel_1 n) = (show n) ++ ":"
showInstr C_Add_1 = "ADD"
showInstr C_And_1 = "AND"
showInstr (C_Call_1 n) = "CALL " ++ (showName n)
showInstr C_Cod_1 = "CODIGO"
showInstr C_Data_1 = "MEMORIA DE DADOS"
showInstr C_Div_1 = "DIV"
showInstr C_Eq_1 = "EQ"
showInstr C_Gt_1 = "GT"
showInstr C_Halt_1 = "HALT"
showInstr C_IIn_1 = "IN"
showInstr C_IOut_1 = "OUT"
showInstr (C_Jump_1 n) = "JMP " ++ (showName n)
showInstr (C_Jumpf_1 n) = "JMPF " ++ (showName n)
showInstr C_Load_1 = "LOAD"
showInstr C_Lt_1 = "LT"
showInstr C_Minus_1 = "MIN"
showInstr C_Mul_1 = "MUL"
showInstr C_Neq_1 = "NEQ"
showInstr C_Not_1 = "NOT"
showInstr C_Or_1 = "OR"
showInstr (C_Pusha_1 n i) = "PUSHa " ++ (showName n) ++ " " ++ (show i)
showInstr (C_Pushb_1 b) = "PUSHb " ++ (show b)
showInstr (C_Pushi_1 i) = "PUSHi " ++ (show i)
showInstr (C_Pushr_1 r) = "PUSHr " ++ (show r)
showInstr C_Ret_1 = "RET"
showInstr C_Store_1 = "STORE"
showInstr C_Sub_1 = "SUB"
showInstr (C_Var_1 n i t) = (showName n) ++ " TAM " ++ (show i)
showName (C_Ident_1 n) = show n
| urbanslug/ghc | testsuite/tests/programs/joao-circular/Main.hs | bsd-3-clause | 2,099 | 0 | 13 | 602 | 729 | 357 | 372 | 58 | 1 |
{-# LANGUAGE MagicHash #-}
-- !! pattern-matching failure on functions that return Int#
--import PrelBase --ghc1.3
import GHC.Base
main = putStr (shows (I# (foo bar1 bar2)) "\n")
where
bar1 = Bar1 40 (39,38) resps
bar2 = Bar1 2 ( 1, 0) resps
resps = error "1.2 responses"
data Response = Response -- stub
data Bar
= Bar1 Int (Int,Int) [Response]
| Bar2 Int Int#
| Bar3 Int
foo :: Bar -> Bar -> Int#
foo (Bar1 (I# i) _ _) (Bar1 (I# j) _ _) = i +# j
| urbanslug/ghc | testsuite/tests/codeGen/should_run/cgrun032.hs | bsd-3-clause | 475 | 0 | 11 | 116 | 186 | 101 | 85 | 13 | 1 |
-- |
-- Module: src-test/Main.hs
-- There are three property tests.
-- propPrime : verifies the prime number generator generates prime numbers.
-- propTabulator : verifies the tabulator is producing a multiplication table.
-- propFormatter : verifies the formatted data is accurately produced from
-- tabulator.
--
-- propPrime checks against known-to-be-accurate isPrime function.
-- both propTabulator and propFormatter reconstructs input from output.
--
-- There is one unit test
-- unitInput verifies error handling works correctly.
module Main (main) where
import BasicPrelude
import Test.Hspec (hspec)
--
import PrimeTest
import TabulatorTest
import FormatTest
import ErrorTest
main :: IO ()
main = do
hspec unitInput
hspec $ propPrime pUpperBound
hspec $ propTabulator tUpperBound
hspec $ propFormatter fUpperBound
pUpperBound :: Int
pUpperBound = 100000
tUpperBound :: Int
tUpperBound = 1000
fUpperBound :: Int
fUpperBound = 100
| mlitchard/primenator | src-test/Main.hs | isc | 956 | 0 | 8 | 153 | 129 | 76 | 53 | 19 | 1 |
{- Copyright (C) 2014 Calvin Beck
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation files
(the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
-}
{-# LANGUAGE OverloadedStrings #-}
import Control.Concurrent
import Control.Monad
import Data.ByteString as B
import System.Environment
import System.IO
import System.Hardware.Serialport
main :: IO ()
main = do [port] <- getArgs
time <- withSerial port defaultSerialSettings (timeSerial)
print time
print (B.length time)
-- | Count the number of times we get an 'a' or something.
timeSerial :: SerialPort -> IO ByteString
timeSerial port = do threadDelay 10000000
flush port
sent <- send port "s"
threadDelay 300000
recv port 1000000
| Chobbes/SerialTimer | Main.hs | mit | 1,765 | 0 | 10 | 418 | 157 | 77 | 80 | 18 | 1 |
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
module ListAp where
import Data.Semigroup
import GHC.Generics
import Test.QuickCheck (Arbitrary, CoArbitrary, arbitrary,
oneof)
import Test.QuickCheck.Checkers (EqProp, eq, (=-=))
data List a = Nil | Cons a (List a) deriving (Eq, Show, Generic, CoArbitrary)
instance Semigroup (List a) where
(<>) (Cons a as) bs = Cons a (as <> bs)
(<>) Nil a = a
instance Monoid (List a) where
mempty = Nil
mappend = (<>)
instance Functor List where
fmap _ Nil = Nil
fmap f (Cons y ys) = Cons (f y) (fmap f ys)
instance Foldable List where
foldr f b (Cons y ys) = foldr f (f y b) ys
foldr _ b _ = b
instance Applicative List where
pure = flip Cons Nil
-- 1:
--fs <*> xs = foldr (\f -> flip mappend (foldr (flip mappend . pure . f) Nil xs)) Nil fs
-- 2:
--(<*>) Nil ca = ca
--(<*>) (Cons f b) ca = fmap f ca <> (b <*> ca)
-- 3:
(<*>) fs xs = foldr (\f b -> b <> fmap f xs) Nil fs
instance Arbitrary a => Arbitrary (List a) where
arbitrary = do
a <- arbitrary
b <- arbitrary
oneof [return $ Cons a b, return $ Cons a Nil]
instance Eq a => EqProp (List a) where
(=-=) = eq
take' :: Int -> List a -> List a
take' _ Nil = Nil
take' 0 _ = Nil
take' n (Cons x xs) = Cons x (take' (n - 1) xs)
length' :: List a -> Int
length' = foldr (\_ b -> b + 1) 0
x :: List Int
x = Cons 1 (Cons 2 (Cons 3 (Cons 4 Nil)))
x' :: List Int
x' = (+10) <$> x
x'' :: List Int
x'' = Cons (+1) (Cons (*5) Nil) <*> x
fns :: List (Int -> Int)
fns = Cons (+1) (Cons (*2) Nil)
infList' :: Int -> List (Int)
infList' n = Cons n (infList' (n + 1))
infList :: List (Int)
infList = infList' 0
y :: List Int
y = Cons 1 (Cons 2 Nil)
apTest :: List Int
apTest = fns <*> y
apTestOk :: Bool
apTestOk = apTest == (Cons 2 (Cons 3 (Cons 2 (Cons 4 Nil))))
| JoshuaGross/haskell-learning-log | Code/Haskellbook/Applicatives/src/ListAp.hs | mit | 1,917 | 0 | 13 | 542 | 852 | 452 | 400 | 55 | 1 |
--------------------------------------------------------------------
-- Simple Grammar Parser
-- Created: April 2015, Samy Narrainen
--------------------------------------------------------------------
module Grammar where
import Char
type NonTerminal = String
type Terminal = String
type Rule = [String] --Encoded as [ta,nR,tb]
type ProductionRule = (NonTerminal, [Rule]) --Encoded as ("S", [[ta,nR,tb], ["tb","nQ"]])
type Grammar = ([NonTerminal], [Terminal], [ProductionRule], NonTerminal)
lambda :: Terminal
lambda = "tLAMBDA"
addPRule :: ProductionRule -> [ProductionRule] -> [ProductionRule]
addPRule rule rules = rule : rules
{- Verification Functions -}
--------------------------------------------------------------------
--The non/terms need to be present in the grammar to be a valid rule
verifyRule :: Rule -> Grammar -> Bool
verifyRule [] g = True
verifyRule (rule:rules) (nTerm, term, p, s)
| head rule == 'n' && contains (tail rule) nTerm = verifyRule rules (nTerm, term, p, s)
| head rule == 't' && contains (tail rule) term = verifyRule rules (nTerm, term, p, s)
| otherwise = False
--verifyRule ["ta","nS","tb"] g1
isNonTerm :: String -> Bool
isNonTerm s = if head s == 'n' then True else False
isNonTerm _ = False
isTerm :: String -> Bool
isTerm s = if head s == 't' then True else False
isTerm _ = False
{- Data Retrieval Functions -}
--------------------------------------------------------------------
getRules :: ProductionRule -> [Rule]
getRules (n, r) = r
getPRule :: NonTerminal -> [ProductionRule] -> ProductionRule
getPRule nTerm [] = error "Undefined: no rule associated with nonTerminal"
getPRule nTerm ((n, r):rs) = if nTerm == n then (n, r) else getPRule nTerm rs
{- Show Functions -}
--------------------------------------------------------------------
showPRule :: ProductionRule -> String
showPRule (nTerm, [r]) = nTerm ++ " -> " ++ showRule r
showPRule (nTerm, rs) = nTerm ++ " -> " ++ showRules rs
--showPRule ("P", [["ta","nS","tb"], [lambda])
--map showPRule [("P", [["ta","nS","tb"]]), ("Q", [["ta","nS","tb"]])]
showPRules :: [ProductionRule] -> String
showPRules [r] = "\t" ++ showPRule r
showPRules (r:rs) = "\t" ++ showPRule r ++ "\n" ++ showPRules rs
showPRules _ = ""
showRule :: Rule -> String
showRule [r] = tail r
showRule (r:rs) = tail r ++ showRule rs
showRule _ = ""
showRules :: [Rule] -> String
showRules [r] = showRule r
showRules (r:rs) = showRule r ++ " | " ++ showRules rs
showRules _ = ""
showGrammar :: Grammar -> String
showGrammar (nTerm, term, p, s) = "N = " ++ show nTerm ++ "\n" ++ "T = " ++ show term ++ "\n"
++ "P = \n" ++ showPRules p ++ "\n" ++ "S = " ++ s
{- Test Input -}
--------------------------------------------------------------------
g1 :: Grammar
--g1 = (["S", "P", "Q", "R"], ["a","b","c"], pRules1, "nS")
g1 = (["nS", "nP", "nQ", "nR"], ["ta","tb","tc"], pRules1, "nS")
pRules1 :: [ProductionRule]
pRules1 = [("nS", [["ta","nP"],["tb","nQ"]]), ("nP", [["ta"],["tc"]]), ("nQ", [["ta","nS"],["tc"]])]
{- Indirect Functionality -}
--------------------------------------------------------------------
contains :: Eq a => a -> [a] -> Bool
contains a [] = False
contains a (x:xs) = if a == x then True else contains a xs
| SamyNarrainen/SimpleGrammarParser | Haskell/Grammar.hs | mit | 3,388 | 0 | 15 | 665 | 1,000 | 560 | 440 | 53 | 2 |
-----------------------------------------------------------------------------
--
-- Module : Language.PureScript.Optimizer.MagicDo
-- Copyright : (c) Phil Freeman 2013-14
-- License : MIT
--
-- Maintainer : Phil Freeman <paf31@cantab.net>
-- Stability : experimental
-- Portability :
--
-- |
-- This module implements the "Magic Do" optimization, which inlines calls to return
-- and bind for the Eff monad, as well as some of its actions.
--
-----------------------------------------------------------------------------
module Language.PureScript.Optimizer.MagicDo (
magicDo
) where
import Data.List (nub)
import Data.Maybe (fromJust, isJust)
import Language.PureScript.Options
import Language.PureScript.CodeGen.JS.AST
import Language.PureScript.CodeGen.Common (identToJs)
import Language.PureScript.Names
import qualified Language.PureScript.Constants as C
magicDo :: Options -> JS -> JS
magicDo opts | optionsNoMagicDo opts = id
| otherwise = inlineST . magicDo'
-- |
-- Inline type class dictionaries for >>= and return for the Eff monad
--
-- E.g.
--
-- Prelude[">>="](dict)(m1)(function(x) {
-- return ...;
-- })
--
-- becomes
--
-- function __do {
-- var x = m1();
-- ...
-- }
--
magicDo' :: JS -> JS
magicDo' = everywhereOnJS undo . everywhereOnJSTopDown convert
where
-- The name of the function block which is added to denote a do block
fnName = "__do"
-- Desugar monomorphic calls to >>= and return for the Eff monad
convert :: JS -> JS
-- Desugar return
convert (JSApp (JSApp ret [val]) []) | isReturn ret = val
-- Desugar >>
convert (JSApp (JSApp bind [m]) [JSFunction Nothing ["_"] (JSBlock js)]) | isBind bind && isJSReturn (last js) =
let JSReturn ret = last js in
JSFunction (Just fnName) [] $ JSBlock (JSApp m [] : init js ++ [JSReturn (JSApp ret [])] )
-- Desugar >>=
convert (JSApp (JSApp bind [m]) [JSFunction Nothing [arg] (JSBlock js)]) | isBind bind && isJSReturn (last js) =
let JSReturn ret = last js in
JSFunction (Just fnName) [] $ JSBlock (JSVariableIntroduction arg (Just (JSApp m [])) : init js ++ [JSReturn (JSApp ret [])] )
-- Desugar untilE
convert (JSApp (JSApp f [arg]) []) | isEffFunc C.untilE f =
JSApp (JSFunction Nothing [] (JSBlock [ JSWhile (JSUnary Not (JSApp arg [])) (JSBlock []), JSReturn (JSObjectLiteral []) ])) []
-- Desugar whileE
convert (JSApp (JSApp (JSApp f [arg1]) [arg2]) []) | isEffFunc C.whileE f =
JSApp (JSFunction Nothing [] (JSBlock [ JSWhile (JSApp arg1 []) (JSBlock [ JSApp arg2 [] ]), JSReturn (JSObjectLiteral []) ])) []
convert other = other
-- Check if an expression represents a monomorphic call to >>= for the Eff monad
isBind (JSApp bindPoly [effDict]) | isBindPoly bindPoly && isEffDict C.bindEffDictionary effDict = True
isBind _ = False
-- Check if an expression represents a monomorphic call to return for the Eff monad
isReturn (JSApp retPoly [effDict]) | isRetPoly retPoly && isEffDict C.monadEffDictionary effDict = True
isReturn _ = False
-- Check if an expression represents the polymorphic >>= function
isBindPoly (JSAccessor prop (JSVar prelude)) = prelude == C.prelude && prop == identToJs (Op (C.>>=))
isBindPoly (JSIndexer (JSStringLiteral bind) (JSVar prelude)) = prelude == C.prelude && bind == (C.>>=)
isBindPoly _ = False
-- Check if an expression represents the polymorphic return function
isRetPoly (JSAccessor returnEscaped (JSVar prelude)) = prelude == C.prelude && returnEscaped == C.returnEscaped
isRetPoly (JSIndexer (JSStringLiteral return') (JSVar prelude)) = prelude == C.prelude && return' == C.return
isRetPoly _ = False
-- Check if an expression represents a function in the Ef module
isEffFunc name (JSAccessor name' (JSVar eff)) = eff == C.eff && name == name'
isEffFunc _ _ = False
-- Check if an expression represents the Monad Eff dictionary
isEffDict name (JSApp (JSVar ident) [JSObjectLiteral []]) | ident == name = True
isEffDict name (JSApp (JSAccessor prop (JSVar eff)) [JSObjectLiteral []]) = eff == C.eff && prop == name
isEffDict _ _ = False
-- Remove __do function applications which remain after desugaring
undo :: JS -> JS
undo (JSReturn (JSApp (JSFunction (Just ident) [] body) [])) | ident == fnName = body
undo other = other
isJSReturn (JSReturn _) = True
isJSReturn _ = False
-- |
-- Inline functions in the ST module
--
inlineST :: JS -> JS
inlineST = everywhereOnJS convertBlock
where
-- Look for runST blocks and inline the STRefs there.
-- If all STRefs are used in the scope of the same runST, only using { read, write, modify }STRef then
-- we can be more aggressive about inlining, and actually turn STRefs into local variables.
convertBlock (JSApp f [arg]) | isSTFunc C.runST f || isSTFunc C.runSTArray f =
let refs = nub . findSTRefsIn $ arg
usages = findAllSTUsagesIn arg
allUsagesAreLocalVars = all (\u -> let v = toVar u in isJust v && fromJust v `elem` refs) usages
localVarsDoNotEscape = all (\r -> length (r `appearingIn` arg) == length (filter (\u -> let v = toVar u in v == Just r) usages)) refs
in everywhereOnJS (convert (allUsagesAreLocalVars && localVarsDoNotEscape)) arg
convertBlock other = other
-- Convert a block in a safe way, preserving object wrappers of references,
-- or in a more aggressive way, turning wrappers into local variables depending on the
-- agg(ressive) parameter.
convert agg (JSApp f [arg]) | isSTFunc C.newSTRef f =
JSFunction Nothing [] (JSBlock [JSReturn $ if agg then arg else JSObjectLiteral [(C.stRefValue, arg)]])
convert agg (JSApp (JSApp f [ref]) []) | isSTFunc C.readSTRef f =
if agg then ref else JSAccessor C.stRefValue ref
convert agg (JSApp (JSApp (JSApp f [ref]) [arg]) []) | isSTFunc C.writeSTRef f =
if agg then JSAssignment ref arg else JSAssignment (JSAccessor C.stRefValue ref) arg
convert agg (JSApp (JSApp (JSApp f [ref]) [func]) []) | isSTFunc C.modifySTRef f =
if agg then JSAssignment ref (JSApp func [ref]) else JSAssignment (JSAccessor C.stRefValue ref) (JSApp func [JSAccessor C.stRefValue ref])
convert _ (JSApp (JSApp (JSApp f [arr]) [i]) []) | isSTFunc C.peekSTArray f =
JSIndexer i arr
convert _ (JSApp (JSApp (JSApp (JSApp f [arr]) [i]) [val]) []) | isSTFunc C.pokeSTArray f =
JSAssignment (JSIndexer i arr) val
convert _ other = other
-- Check if an expression represents a function in the ST module
isSTFunc name (JSAccessor name' (JSVar st)) = st == C.st && name == name'
isSTFunc _ _ = False
-- Find all ST Refs initialized in this block
findSTRefsIn = everythingOnJS (++) isSTRef
where
isSTRef (JSVariableIntroduction ident (Just (JSApp (JSApp f [_]) []))) | isSTFunc C.newSTRef f = [ident]
isSTRef _ = []
-- Find all STRefs used as arguments to readSTRef, writeSTRef, modifySTRef
findAllSTUsagesIn = everythingOnJS (++) isSTUsage
where
isSTUsage (JSApp (JSApp f [ref]) []) | isSTFunc C.readSTRef f = [ref]
isSTUsage (JSApp (JSApp (JSApp f [ref]) [_]) []) | isSTFunc C.writeSTRef f || isSTFunc C.modifySTRef f = [ref]
isSTUsage _ = []
-- Find all uses of a variable
appearingIn ref = everythingOnJS (++) isVar
where
isVar e@(JSVar v) | v == ref = [e]
isVar _ = []
-- Convert a JS value to a String if it is a JSVar
toVar (JSVar v) = Just v
toVar _ = Nothing
| bergmark/purescript | src/Language/PureScript/Optimizer/MagicDo.hs | mit | 7,383 | 0 | 25 | 1,458 | 2,405 | 1,228 | 1,177 | 84 | 18 |
import Data.Char (toUpper)
import Data.Time.Calendar (fromGregorian)
import System.Exit (ExitCode (..), exitWith)
import Test.HUnit (Assertion, Counts (..), Test (..),
runTestTT, (@=?))
import Person (Address (..), Born (..),
Name (..), Person (..), bornStreet,
renameStreets, setBirthMonth,
setCurrentStreet)
testCase :: String -> Assertion -> Test
testCase label assertion = TestLabel label (TestCase assertion)
exitProperly :: IO Counts -> IO ()
exitProperly m = do
counts <- m
exitWith $ if failures counts /= 0 || errors counts /= 0 then ExitFailure 1 else ExitSuccess
main :: IO ()
main = exitProperly $ runTestTT $ TestList
[ TestList personTests ]
testPerson :: Person
testPerson = Person {
_name = Name {
_foreNames = "Jane Joanna",
_surName = "Doe"
},
_born = Born {
_bornAt = Address {
_street = "Longway",
_houseNumber = 1024,
_place = "Springfield",
_country = "United States"
},
_bornOn = fromGregorian 1984 4 12
},
_address = Address {
_street = "Shortlane",
_houseNumber = 2,
_place = "Fallmeadow",
_country = "Canada"
}
}
personTests :: [Test]
personTests =
[ testCase "bornStreet" $
"Longway" @=? bornStreet (_born testPerson),
testCase "setCurrentStreet" $
"Middleroad" @=? (_street . _address) (setCurrentStreet "Middleroad" testPerson),
testCase "setBirthMonth" $
fromGregorian 1984 9 12 @=? (_bornOn . _born) (setBirthMonth 9 testPerson),
testCase "renameStreets birth" $
"LONGWAY" @=? (_street . _bornAt . _born) (renameStreets (map toUpper) testPerson),
testCase "renameStreets current" $
"SHORTLANE" @=? (_street . _address) (renameStreets (map toUpper) testPerson)
]
| stevejb71/xhaskell | lens-person/lens-person_test.hs | mit | 2,347 | 0 | 12 | 983 | 554 | 310 | 244 | 47 | 2 |
module Network.Skype.Parser.ChatMember where
import Control.Applicative
import Data.Attoparsec.ByteString.Lazy
import Network.Skype.Parser.Chat
import Network.Skype.Parser.Types
import Network.Skype.Protocol.ChatMember
chatMemberProperty :: Parser ChatMemberProperty
chatMemberProperty = choice
[ ChatMemberChatName <$> (property "CHATNAME" *> chatID)
, ChatMemberIdentity <$> (property "IDENTITY" *> userID)
, ChatMemberRole <$> (property "ROLE" *> chatRole)
, ChatMemberIsActive <$> (property "IS_ACTIVE" *> boolean)
]
where
property prop = string prop *> spaces
| emonkak/skype4hs | src/Network/Skype/Parser/ChatMember.hs | mit | 587 | 0 | 10 | 79 | 142 | 81 | 61 | 13 | 1 |
{- |
Module : Main
Description : Evaluate a BrainFuck program
Copyright : (c) Sebastian Galkin, 2014
License : MIT
Maintainer : paraseba@gmail.com
Stability : experimental
Usage:
> brainfuck path/to/program.bf
It will parse the program and evaluate it by doing I/O to the console. In case
of parsing or execution errors it reports them to stderr
-}
module Main(main) where
import HaskBF.Eval
( evalBS, EvalResult ( EvalSuccess, EvalExecError, EvalParseError )
, defaultIOMachine, BFExError, Tape ( Tape ), BFTape, errMsg, errTape, rTape )
import System.Exit
( ExitCode (..), exitWith )
import System.Environment
( getArgs, getProgName )
import qualified Data.ByteString.Lazy as BS
main :: IO ExitCode
main =
getProgram >>=
BS.readFile >>=
evalBS defaultIOMachine >>=
reportResults >>=
exitWith
{- | Read command line and obtain the path to the program. Display error message
- if missing argument -}
getProgram :: IO FilePath
getProgram = do
args <- getArgs
if length args /= 1
then do
exe <- getProgName
error $ "Usage: " ++ exe ++ " filepath"
else return $ head args
-- | Report result of program parsing and evaluation
reportResults :: EvalResult -> IO ExitCode
reportResults (EvalSuccess _) = return ExitSuccess
reportResults (EvalParseError parseError) = do
putStrLn "Error parsing program:"
print parseError
return $ ExitFailure 1
reportResults (EvalExecError err) = do
putStrLn $ "Error evaluating program: " ++ errMsg err
putStrLn $ "Current tape value: " ++ (show . rTape . errTape) err
putStrLn $ "Consumed tape: " ++ (showConsumed . errTape) err
return $ ExitFailure 2
{- | Use heuristic to display tape state. It estimates consumed right tape by
- calling 'consumed' -}
showConsumed :: BFTape -> String
showConsumed (Tape _ _ r) =
"[" ++ concatMap ( (++ ",") . show ) (consumed r ++ [0, 0, 0]) ++ "..."
-- | Return consumed tape by assuming that it is unused after 10 zeros.
consumed :: (Eq a, Num a) => [a] -> [a]
consumed (0 : 0 : 0 : 0 : 0 : 0 : 0 : 0 : 0 : 0 : _) = []
consumed (x : xs) = x : consumed xs
| paraseba/haskell-brainfuck | Main/Main.hs | mit | 2,167 | 0 | 16 | 496 | 524 | 279 | 245 | 41 | 2 |
{-# LANGUAGE RecordWildCards #-}
module Database.Mongodb.Internal
( StrictByteString
, LazyByteString
, RequestIdCounter(..)
, ObjectIdCounter(..)
, newRequestIdCounter
, newObjectIdCounter
, newRequestId
, newObjectId
) where
import Data.Int (Int32)
import Data.IORef (IORef, newIORef, atomicModifyIORef')
import Data.Time.Clock.POSIX (getPOSIXTime)
import System.Posix.Process (getProcessID)
import System.Random (Random(..), randomIO)
import qualified Data.ByteString.Lazy as LazyByteString
import qualified Data.ByteString.Char8 as StrictByteString
import Data.Bson (ObjectId(..))
import Data.Word.Word24 (Word24)
type StrictByteString = StrictByteString.ByteString
type LazyByteString = LazyByteString.ByteString
newtype RequestIdCounter = RequestIdCounter { unRequestIdCounter :: IORef Int32 }
newtype ObjectIdCounter = ObjectIdCounter { unObjectIdCounter :: IORef Word24 }
instance Random Word24 where
randomR (a, b) g = case randomR (fromEnum a, fromEnum b) g of
(x, g') -> (toEnum x, g')
random g = randomR (minBound, maxBound) g
newRequestIdCounter :: IO RequestIdCounter
newRequestIdCounter = fmap RequestIdCounter $ newIORef 0
newObjectIdCounter :: IO ObjectIdCounter
newObjectIdCounter = fmap ObjectIdCounter $ randomIO >>= newIORef
newRequestId :: RequestIdCounter -> IO Int32
newRequestId (RequestIdCounter counterRef) = atomicModifyIORef' counterRef $ \r -> (r + 1, r)
newObjectId :: ObjectIdCounter -> IO ObjectId
newObjectId (ObjectIdCounter counterRef) = do
objectIdInc <- atomicModifyIORef' counterRef $ \r -> (r + 1, r)
objectIdTime <- fmap truncate getPOSIXTime
objectIdPid <- fmap fromIntegral getProcessID
objectIdMachine <- return 0
return $! ObjectId { .. }
| lambda-llama/mnogo | src/Database/Mongodb/Internal.hs | mit | 1,766 | 0 | 11 | 282 | 480 | 272 | 208 | 40 | 1 |
module Main where
import System.Console.GetOpt
import System.IO
import System.Exit
import System.Environment
import Data.List
import qualified Data.Map as M
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Lazy.Char8 as BL
import Tokens
import Payload
import APNS
data Flag
= Message -- -m
| Tokens -- -t
| Cert -- -c
| Key -- -k
| Sandbox -- -s
| Help -- --help
deriving (Eq, Ord, Enum, Show, Bounded)
flags :: [OptDescr Flag]
flags =
[Option ['m'] [] (NoArg Message)
"Message to send"
,Option ['t'] [] (NoArg Tokens)
"File containing tokens of devices"
,Option ['s'] [] (NoArg Sandbox)
"Use sandbox APNS"
,Option ['c'] [] (NoArg Cert)
"Path to cert file"
,Option ['k'] [] (NoArg Key)
"Path to key file"
,Option [] ["help"] (NoArg Help)
"Print this help"
]
parse :: [String] -> IO ([Flag], [String])
parse argv =
case getOpt Permute flags argv of
(args, fs, []) ->
do
let files = if null fs then ["-"] else fs
if Help `elem` args
then
do
hPutStrLn stderr (usageInfo header flags)
exitWith ExitSuccess
else return (nub (concatMap set args), files)
(_, _, errs) ->
do
hPutStrLn stderr (concat errs ++ usageInfo header flags)
exitWith (ExitFailure 1)
where
header = "Usage: apnspush [-tmcks] [tokens file, message, cert file, key file, sandbox]"
set f = [f]
main :: IO ()
main =
do
(args, files) <- getArgs >>= parse
let m = M.fromList $ zip (delete Sandbox args) files
tokens <- getTokens $ m M.! Tokens
let payload = Payload (m M.! Message) 0 SoundTypeDefault
let apns_ssl_certificate_file = m M.! Cert
let apns_ssl_private_key_file = m M.! Key
putStrLn "Send payload"
putStrLn (show payload)
putStrLn "to registered devices"
print tokens
case (elem Sandbox args) of
True ->
pushMessTest apns_ssl_private_key_file apns_ssl_certificate_file (BL.pack (show payload)) (map (B.pack) tokens)
False ->
pushMessLive apns_ssl_private_key_file apns_ssl_certificate_file (BL.pack (show payload)) (map (B.pack) tokens)
return ()
| asukharev/apnpush | src/Main.hs | mit | 2,382 | 0 | 15 | 738 | 753 | 396 | 357 | 70 | 4 |
-- Copyright 2015 Mitchell Kember. Subject to the MIT License.
-- | Implements the read-eval-print loop in the 'repl' function.
module Lam.Repl (loadFile, startRepl) where
import Control.Error.Util (maybeT)
import Control.Monad (foldM)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Trans.Maybe (MaybeT(..))
import Data.Char (isSpace)
import Data.List (dropWhileEnd)
import Data.List.Split (splitWhen)
import Data.Maybe (fromMaybe, listToMaybe)
import System.Directory (doesFileExist)
import System.IO (hPutStrLn, stderr)
import qualified System.Console.Haskeline as H
import Lam.Eval
import Lam.Parse
-- | Sets up "Haskeline" and starts the read-eval-print loop.
startRepl :: Environment -> IO ()
startRepl env = H.runInputT settings (repl env)
where settings = H.setComplete completion H.defaultSettings
-- | Performs the read-eval-print loop forever: prompts the user, reads input,
-- parses it, evaluates it, prints the result, and repeats. The environment is
-- updated on each iteration with a new binding if the user enters an assignment
-- (and if they only enter an expression, it will still be bound to the percent
-- variable). Also handles special commands for loading files and exiting.
repl :: Environment -> H.InputT IO ()
repl env = maybe end handle =<< H.getInputLine "lam> "
where
end = return ()
again = repl env
nullOrSpace = maybe True isSpace . listToMaybe
handle line = case stripC line of
"" -> again
"exit" -> end
"quit" -> end
'l':'o':'a':'d':str | nullOrSpace str ->
let env' = loadFile (strip str) env
in repl =<< liftIO env'
str -> case parse str of
Left msg -> liftIO (putErrLn msg) >> again
Right (Assignment mx expr) -> do
let evaluated = process $ eval env expr
H.outputStrLn $ show evaluated
repl $ assign mx evaluated env
-- | Reads the indicated file and loads its lines using 'load'. Fails with an
-- error message if the file doesn't exist or if the path string is empty.
loadFile :: String -> Environment -> IO Environment
loadFile "" env = putErrLn "missing file path" >> return env
loadFile path env = doesFileExist path' >>= go
where
path' = filter (not . flip elem "\\'\"") . strip $ path
failure msg = putErrLn (path' ++ ": " ++ msg) >> return env
env' = liftIO (readFile path') >>= flip load env . splitLines
go True = maybeT (failure "load failed") return env'
go False = failure "file does not exist"
-- | Like `lines`, but combines multiple lines into one if the lines following
-- the first one are indented. Removes newlines in both cases.
splitLines :: String -> [String]
splitLines "" = []
splitLines str = map unpair . splitWhen eos . zip str $ offset
where
offset = tail str ++ "\n"
eos (x, next) = x == '\n' && notElem next whiteChars
unpair = filter (/= '\n') . map fst
-- | Processes an already evaluated expression. This is used to implement some
-- special evaluation rules that are impossible to implement in Lam itself.
process :: Expression -> Expression
process app@(Application s a)
| s == symStr "#" = case a of
-- Special case for the eta-reduction of 1 to the identity function.
Function f (Symbol g) | f == g -> symStr "1"
Function f (Function x body) -> fromMaybe app $ decodeNum f x body
_ -> app
| s == symStr "?" = case a of
Function x (Function y (Symbol z)) -> fromMaybe app $ decodeBool x y z
_ -> app
process expr = expr
-- | Tries to decode a Church numeral given the parameters and function body.
decodeNum :: Token -> Token -> Expression -> Maybe Expression
decodeNum f x body = symStr . show <$> go body 0
where
go (Symbol y) n | y == x = Just n :: Maybe Int
go (Application (Symbol g) expr) n | g == f = go expr (succ n)
go _ _ = Nothing
-- | Tries to decode a Church Boolean value given the parameters and the
-- variable in the function body (which is just a symbol).
decodeBool :: Token -> Token -> Token -> Maybe Expression
decodeBool x y z
| x /= y && x == z = Just $ symStr "True"
| x /= y && y == z = Just $ symStr "False"
decodeBool _ _ _ = Nothing
-- | Creates a symbol from a string.
symStr :: String -> Expression
symStr = Symbol . LongVar
-- | Evaluates each string as a line of input without printing the results.
-- Returns the augmented environment. It is assumed that the input consists of
-- assignments, since nothing will be done with lone expressions).
load :: [String] -> Environment -> MaybeT IO Environment
load input env = foldM accumLine env nonBlanks
where nonBlanks = filter (not . null) . map stripC $ input
-- | Parses and evaluates one line of input, returning the new environment. The
-- 'IO' monad is necessary for printing error messages, and the 'MaybeT' monad
-- transformer is used to terminate the accumulation early as soon as the first
-- error is detected.
accumLine :: Environment -> String -> MaybeT IO Environment
accumLine env str = case parse str of
Left err -> failure str err
Right (Assignment mx expr) -> case mx of
Nothing -> failure str "expected assignment"
Just x -> return $ bind x (eval env expr) env
where
failure s = MaybeT . (>> return Nothing) . putErrLn . quote s
quote s msg = "\"" ++ s ++ "\"\n" ++ msg
-- | Binds an already evaluated expression to the percentage variable (previous
-- result) and optionally to another token. Returns the updated environment.
assign :: Maybe Token -> Expression -> Environment -> Environment
assign (Just x) expr env = bind x expr $ assign Nothing expr env
assign Nothing expr env = bind (LongVar "%") expr env
-- | Removes a comment beginning with a semicolon, if any, and strips whitespace
-- from the string using 'strip'.
stripC :: String -> String
stripC = strip . takeWhile (/= ';')
-- | Removes leading and trailing whitespace from the string.
strip :: String -> String
strip = dropWhileEnd isSpace . dropWhile isSpace
-- | Prints a string to standard error.
putErrLn :: String -> IO ()
putErrLn = hPutStrLn stderr
-- | Completion function which acts like 'H.completeFilename' only when the
-- input line begins with "load". Otherwise, no completion is offered.
completion :: (Functor m, MonadIO m) => H.CompletionFunc m
completion = H.completeWordWithPrev Nothing whiteChars $ \rev word ->
if strip (reverse rev) == "load"
then snd <$> H.completeFilename (reverse word, "")
else return []
| mk12/lam | src/Lam/Repl.hs | mit | 6,543 | 0 | 19 | 1,440 | 1,692 | 857 | 835 | 96 | 6 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.SVGClipPathElement
(js_getClipPathUnits, getClipPathUnits, SVGClipPathElement,
castToSVGClipPathElement, gTypeSVGClipPathElement)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"clipPathUnits\"]"
js_getClipPathUnits ::
SVGClipPathElement -> IO (Nullable SVGAnimatedEnumeration)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGClipPathElement.clipPathUnits Mozilla SVGClipPathElement.clipPathUnits documentation>
getClipPathUnits ::
(MonadIO m) =>
SVGClipPathElement -> m (Maybe SVGAnimatedEnumeration)
getClipPathUnits self
= liftIO (nullableToMaybe <$> (js_getClipPathUnits (self))) | manyoo/ghcjs-dom | ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/SVGClipPathElement.hs | mit | 1,525 | 6 | 10 | 197 | 371 | 236 | 135 | 26 | 1 |
inverter[] = []
inverter(x:xs) = inverter xs ++ [x]
pali s = if inverter s == s then True else False
pertence a [] = False
pertence a (x:xs) = if a == x then True else pertence a xs
repete [] = False
repete (x:xs) = if pertence x xs then True else repete xs
--1
uniao [] ys = ys
uniao (x:xs) ys = if not (pertence x ys) then x:uniao xs ys else uniao xs ys
--2
inter [] _ = []
inter (x:xs) ys = if pertence x ys then x:inter xs ys else inter xs ys
--3
ultimos n xs = take n (inverter xs)
--4
expo b 0 = 1
expo b e = b * expo b (e-1)
--5
converteBin [] _ = 0
converteBin (x:xs) e = if x == '1' then expo 2 e + converteBin xs (e+1) else converteBin xs (e+1)
binToint xs = converteBin (inverter(xs)) 0
--6
menor [x] = x
menor (x:xs) = min x (menor xs)
--7
retirarElemento y (x:xs) = if x == y then xs else x:retirarElemento y xs
where x = menor xs
ordemCres [] = []
ordemCres xs = x:ordemCres (retirarElemento x xs)
--8
insereOrd x [] = [x]
insereOrd x (y:ys)
| pertence x (y:ys) = y:ys
| x < y = x:y:ys
| otherwise = y:(insereOrd x ys)
--9
paridade x
| mod x 2 == 0 = True
| otherwise = False
--10
_filter _[]=[]
_filter p (x:xs) = if p x then x : _filter
p xs else _filter p xs
--11
impares xs = _filter (not.paridade) xs
--12
primeiroTupla xs map fst xs
--13
--14
somatorio xs = fold (+) 0 xs
--16
eratosthenes :: [Int] -> [Int]
eratosthenes [] = []
eratosthenes (h:t) = h : (eratosthenes (filter (x -> mod x h /= 0) t))
-- 17
primes :: Int -> [Int]
primes x = eratosthenes [2..x]
--18
fib 1 == 1
fib 0 == 0
fib [] = fib (n-1) + fib (n-2)
--19
-}
| AndressaUmetsu/PapaEhPop | exercicios.hs | mit | 1,637 | 2 | 12 | 431 | 951 | 486 | 465 | -1 | -1 |
module Parser.Parser where
import Control.Monad (liftM)
import Numeric (readFloat)
import Text.ParserCombinators.Parsec hiding (spaces)
import Parser.Types.LispVal
parseExpr :: Parser LispVal
parseExpr = parseAtom
<|> parseString
<|> parseNumber
<|> parseQuasiQuoted
<|> parseQuoted
<|> parseUnquoted
<|> parseLists
parseAtom :: Parser LispVal
parseAtom = do
first <- letter <|> symbol
rest <- many (letter <|> digit <|> symbol)
return $ case first:rest of
"#t" -> Bool True
"#f" -> Bool False
atom -> Atom atom
parseCharacter :: Parser LispVal
parseCharacter = do
c <- letter
return $ Character c
parseDottedList :: Parser LispVal
parseDottedList = do
head' <- endBy parseExpr spaces
tail' <- char '.' >> spaces >> parseExpr
return $ DottedList head' tail'
parseFloat :: Parser LispVal
parseFloat = do
integral <- many1 digit
_ <- char '.'
fractional <- many1 digit
return $ (Float . fst . head . readFloat) $ integral ++ "." ++ fractional
parseList :: Parser LispVal
parseList = liftM List $ sepBy parseExpr spaces
parseLists :: Parser LispVal
parseLists = do
_ <- char '('
x <- try parseList <|> parseDottedList
_ <- char ')'
return x
parseNumber :: Parser LispVal
parseNumber = liftM (Number . read) (many1 digit)
parseQuasiQuoted :: Parser LispVal
parseQuasiQuoted = do
_ <- char '`'
x <- parseExpr
return $ List [Atom "quasiquote", x]
parseQuoted :: Parser LispVal
parseQuoted = do
_ <- char '\''
x <- parseExpr
return $ List [Atom "quote", x]
parseString :: Parser LispVal
parseString = do
_ <- char '"'
x <- many (letter
<|> space
<|> quotedString
<|> carriageReturn
<|> lineFeed
<|> tabStop
<|> backslash)
_ <- char '"'
return $ String x
parseUnquoted :: Parser LispVal
parseUnquoted = do
_ <- char ','
x <- parseExpr
return $ List [Atom "unquote", x]
carriageReturn :: Parser Char
carriageReturn = char '\\' >> char 'r'
lineFeed :: Parser Char
lineFeed = char '\\' >> char 'n'
tabStop :: Parser Char
tabStop = char '\\' >> char 't'
backslash :: Parser Char
backslash = char '\\' >> char '\\'
symbol :: Parser Char
symbol = oneOf "!#$%&|*+-/:<=>?@^_~"
spaces :: Parser ()
spaces = skipMany1 space
quotedString :: Parser Char
quotedString = char '\\' >> char '"'
| slogsdon/haskell-exercises | write-a-scheme/evaluation-part1/src/Parser/Parser.hs | mit | 2,372 | 0 | 15 | 558 | 813 | 391 | 422 | 87 | 3 |
import Control.Applicative
import Control.Monad
import qualified Data.ByteString.Char8 as BS
import Data.Maybe
solve :: [Int] -> Int
solve arr =
let minValue = minimum arr
in minimum [f x | x <- [(minValue - 5)..minValue]]
where f i = sum $ map (\x -> c i x) arr
c i x = (x - i) `div` 5 + (x - i) `mod` 5 `div` 2 +
(x - i) `mod` 5 `mod` 2
readInt' :: BS.ByteString -> Int
readInt' = fst . fromJust . BS.readInt
main :: IO ()
main = do
t <- readLn :: IO Int
forM_ [1..t] $ \_ -> do
_ <- BS.getLine
arr <- map readInt' . BS.words <$> BS.getLine
putStrLn $ show $ solve arr
| m00nlight/hackerrank | algorithm/Dynamic-Programming/Equal/main.hs | gpl-2.0 | 687 | 0 | 15 | 236 | 308 | 163 | 145 | 20 | 1 |
fac :: (Integral a) => a -> a
fac 0 = 1
fac x = x * fac (x - 1) | BenedictEggers/haskell | fac.hs | gpl-2.0 | 63 | 0 | 8 | 20 | 49 | 25 | 24 | 3 | 1 |
module PanRNA where
import qualified Text.Parsec as P
import qualified Text.Parsec.Token as T
import Text.Parsec ((<?>), (<|>))
import Data.Maybe
import Data.List
import qualified Data.HashMap.Strict as HM
import qualified Data.HashSet as HS
import Debug.Trace
data Tag = Tag String deriving Show
data Sequence = Sequence String deriving Show
data Structure = Structure [(Int,Int)] deriving Show
data Energy = Energy String deriving Show
data RNA = RNA Tag Sequence Structure Energy deriving Show
data Pair = Unpaired | Pair Int deriving (Show, Eq)
data Index = Index Int deriving (Show, Eq)
parse rule text = P.parse rule "" text
opt parser defaultVal = P.try parser <|> (return defaultVal)
nucLine :: P.Parsec String () String
nucLine = P.many $ oneNucleotide
oneNucleotide = P.oneOf "AUGCTNXaugctnx"
whitespace = P.oneOf " \t"
whitespaces = P.many whitespace
whitespaces1 = P.many1 whitespace
eol = whitespaces >> P.newline
int = P.many1 P.digit
endSequence = eol <|> whitespace <|> P.char '1'
emptyStructure = Structure []
emptyTag = Tag ""
noEnergy = Energy ""
nucleotides :: P.Parsec String () Sequence
nucleotides = do s <- P.sepEndBy nucLine endSequence
return . Sequence $ concat s
plain = do ns <- P.many1 nucLine
return $ map toRNA ns
where toRNA s = RNA emptyTag (Sequence s) emptyStructure noEnergy
faTag :: P.Parsec String () Tag
faTag = do P.char '>' >> P.spaces
t <- P.manyTill P.anyChar eol
return $ Tag t
dotSeqTag :: P.Parsec String () Tag
dotSeqTag = do P.manyTill P.anyChar (P.char ';') >> eol
t <- P.manyTill P.anyChar eol
return $ Tag t
parseSequence :: P.Parsec String () Tag -> P.Parsec String () RNA
parseSequence tagParser = do t <- tagParser
s <- nucleotides
return $ RNA t s emptyStructure noEnergy
ctLine :: P.Parsec String st (Index, Char, Pair)
ctLine = do P.spaces
i <- int
n <- whitespaces1 >> oneNucleotide
whitespaces1 >> int >> whitespaces1 >> int
j <- whitespaces1 >> int
whitespaces1 >> int >> eol
return (Index $ read i,
n,
if j=="0" then Unpaired else Pair $ read j)
ctTag :: P.Parsec String st Tag
ctTag = do P.try $ P.spaces >> int >> P.spaces
t <- P.manyTill P.anyChar (P.try eol)
return (Tag t)
fasta = P.many1 $ parseSequence faTag
dotSeq = P.many1 $ parseSequence dotSeqTag
ct = P.many1 $
do t <- ctTag
c <- P.many1 (P.try ctLine)
return $ RNA t (toSeq c) (toStructure c) noEnergy
where toPair (Index i, _, Pair j) = Just (i,j)
toPair (Index i, _, Unpaired) = Nothing
toSeq = Sequence . map (\(_, n, _) -> n)
toStructure = Structure . mapMaybe toPair
dbOpen = "(>{["
dbClose = ")<}]"
dbOther = ".,_-"
dbChar = P.oneOf (dbOpen ++ dbClose ++ dbOther)
db :: P.Parsec String st Structure
db = do l <- P.manyTill dbChar $ P.try whitespaces1
return $ toStructure l
where
toStructure = Structure . getF . foldl' step ([], 1, [])
getF (a, _, _) = a
step (pair, pos, stk) c
| c `elem` dbClose, null stk = error "unbalanced parens in db"
| c `elem` dbOpen = (pair, pos+1, pos:stk)
| c `elem` dbOther = (pair, pos+1, stk)
| c `elem` dbClose = ( (head stk, pos):pair, pos+1, tail stk)
| otherwise = error $ "unrecognized character " ++ [c] ++ " in dot-bracket string"
--parens :: P.Parsec String st String
parens :: P.Parsec String st String -> P.Parsec String st String
parens = P.between (P.char '(') (P.char ')')
viennaEnergy = (P.optionMaybe . parens) (P.many1 $ P.oneOf "1234567890-.")
viennaOutput = P.many1 $ do
t <- opt faTag emptyTag
n <- nucleotides
s <- db
e <- opt (P.manyTill (P.oneOf "1234567890-.() ") P.newline) ""
return $ RNA t n s (Energy e)
toFaTag = (">"++) . filter (\x-> not $ x=='>')
writePlain (RNA _ (Sequence s) _ _) = s
writeDotSeq (RNA (Tag t) (Sequence s) _ _) = unlines [";", t, s++"1"]
writeFaSeq (RNA (Tag t) (Sequence s) _ _) = unlines [toFaTag t, s]
writeCt (RNA (Tag t) (Sequence s) (Structure c) _) = (unlines . concat) text
where text = [[firstline],ctlines]
len = length s
firstline = (show len)++" "++t
ctlines = map toCtLine [1..len]
pairMap = HM.fromList c
partner i = HM.lookupDefault 0 i pairMap
toCtLine i = tabDelimited (i, s!!(i-1), i-1, i+1, partner i, i)
tabDelimited (a, b, c, d, e, f) =
(concat . intersperse "\t") $
[show a, [b], show c, show d, show e, show f]
writeDb (RNA (Tag t) (Sequence s) (Structure c) _) = unlines [toFaTag t, s, toDb]
where toDb = map toDbChar [1..(length s)]
pairMap = HM.fromList c
partner i = HM.lookup i pairMap
toDbChar i | Nothing <- partner i = '.'
| Just j <- partner i = if i>j then ')' else '('
removeComments :: Char -> String -> String
removeComments c = unlines . map (takeWhile (/= c)) . lines
pairs :: [(Int,Int)] -> HM.HashMap Int Int
pairs = HM.fromList
filterByIndex :: Integral i => i -> [a] -> [a]
filterByIndex ind lst | ind < 1 = error "index for selection must be positive"
| otherwise = map fst $ filter ((==(ind-1)) . snd) indexed
where
indexed = zip lst [0..]
removeNoncanonical :: RNA -> RNA
removeNoncanonical (RNA t (Sequence s) (Structure ps) e) = rnc s ps
where rnc s ps = RNA t (Sequence s) (Structure c) e
c = filter (canonical s) ps
canonical s p = [s!!(fst p),s!!(snd p)] `elem` canonicalPairs
canonicalPairs = ["AU","UA","GC","CG","GU","UG"]
convertAmbiguous :: Char -> RNA -> RNA
convertAmbiguous c (RNA t s p e) = RNA t (conv s) p e
where conv (Sequence seq) = Sequence $ map (\x->if x `elem` ambg then c else x) seq
ambg = "NXYRSWKMBDHV"
| michael-sloma/panRNA | src/PanRNA.hs | gpl-2.0 | 6,256 | 0 | 14 | 1,904 | 2,549 | 1,316 | 1,233 | 135 | 2 |
----------------------------------------------------------------------------
-- |
-- Module : Dates
-- Copyright : (c) Simon Foster 2004
-- License : GPL version 2 (see COPYING)
--
-- Maintainer : aca01sdf@shef.ac.uk
-- Stability : experimental
-- Portability : non-portable (ghc >= 6 only)
--
-- An ISO 8601 Compatible Date library for Haskell, specifically for use in HAIFA SOAP.
--
-- @This file is part of HAIFA.@
--
-- @HAIFA is free software; you can redistribute it and\/or modify it under the terms of the
-- GNU General Public License as published by the Free Software Foundation; either version 2
-- of the License, or (at your option) any later version.@
--
-- @HAIFA is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
-- even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.@
--
-- @You should have received a copy of the GNU General Public License along with HAIFA; if not,
-- write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA@
----------------------------------------------------------------------------
module Dates(Date, parseDate) where
import Text.Regex
import Maybe
data Date = Date{year::Int, month::Int, day::Int, hours::Int, minutes::Int, seconds::Int, msecs::Int, timeZone::String} deriving (Eq, Show)
-- From a Regular Expression to a Date made from a list of ints for the date/time parameters and a timezone string
type DateConverter = (String, ([Int]->String->Date))
defaultTimeZone :: String
defaultTimeZone = "GMT"
yearCutPoint :: Int
yearCutPoint = 30
-- Convert a two digit year to a four digit year
normalizeYear :: Int -> Int
normalizeYear d
| (d > 99) = d
| (d > yearCutPoint) = d + 1900
| (d <= yearCutPoint) = d + 2000
| otherwise = 0
-- RE & Function for dates of format (CC)YY-MM-DD(TZ)
dateRE :: String
dateRE = "^([0-9]{2,4})-([0-1][0-9])-([1-3]*[0-9])([A-Z]*)$"
dateConv :: [Int] -> String -> Date
dateConv dl tz = Date{year=(normalizeYear (dl!!0)), month=(dl!!1), day=(dl!!2), hours=0, minutes=0, seconds=0, msecs=0, timeZone=tz}
-- For times of format hh:mm:ss
timeRE :: String
timeRE = "^([0-2][0-9]):([0-6][0-9]):([0-6][0-9])([A-Z]*)$"
timeConv :: [Int] -> String -> Date
timeConv dl tz = Date{year=0, month=0, day=0, hours=(dl!!0), minutes=(dl!!1), seconds=(dl!!2), msecs=0, timeZone=tz}
-- List which associates regular expressions with conversion functions
dateFunctions :: [DateConverter]
dateFunctions = [(dateRE, dateConv), (timeRE, timeConv)]
parseDate :: String -> Maybe Date
parseDate date = findDateRE date dateFunctions
-- Go through the list of Date Regular Expressions, and upon finding a matching one run the appropriate function to create a date
findDateRE :: String->[DateConverter] -> Maybe Date
findDateRE _ [] = Nothing
findDateRE ds (dconv:t)
| (checkDate /= Nothing) = Just ((snd dconv) (mapToInt (fromJust checkDate)) timezone)
| otherwise = findDateRE ds t
where
checkDate = (matchRegex (mkRegex (fst dconv)) ds)
mapToInt sl = map (\a -> (read a)::Int) sl
timezone = getTimeZone (fromJust checkDate)
getTimeZone :: [String] -> String
getTimeZone [] = defaultTimeZone
getTimeZone (h:[])
| (h=="") = defaultTimeZone
| otherwise = h
getTimeZone (_:t) = getTimeZone t
| twopoint718/haifa | src/Dates.hs | gpl-2.0 | 3,364 | 0 | 12 | 553 | 774 | 447 | 327 | 41 | 1 |
-- grid is a game written in Haskell
-- Copyright (C) 2018 karamellpelle@hotmail.com
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with grid. If not, see <http://www.gnu.org/licenses/>.
--
module Game.Run.Output.Fancy.SoundRun
(
soundRunIterationBegin,
) where
import MyPrelude
import Game
import Game.Run.RunData
import OpenAL
import OpenAL.Helpers
soundRunIterationBegin :: SoundRun -> IO ()
soundRunIterationBegin snd = do
alSourceStop $ soundRunSrc snd
alSourcei (soundRunSrc snd) al_BUFFER $ fI (soundRunIterationBeginBuf snd)
alSourcePlay $ soundRunSrc snd
| karamellpelle/grid | source/Game/Run/Output/Fancy/SoundRun.hs | gpl-3.0 | 1,150 | 0 | 10 | 205 | 122 | 73 | 49 | 13 | 1 |
import Src.Model.Employee
import Src.Model.Store
import qualified Src.Model.SecureEmployee as SE
import Src.Model.Credential
import Src.IO.JSONHandler
import Src.IO.TestIOFunctions
import Security.SecureFlow
import qualified Security.SecureComputation as SC
import Security.ThreeLevels
import Security.Unsecure
import Data.List
-- | Declassification policy for login purposes. It just searches for an element
-- in `cs` matching the actual (email, password) pair.
login :: String -> String -> Hatch High [Credential] Bool
login e p = pure (\cs -> elem (Credential e p) cs)
-- | Simple function asking the user for login. It continues asking for email and
-- password ultil they are right. It uses the abovementioned declassification
-- policy and takes the result at the Bool level, so that it can be opened with
-- a `medium` ticket.
askForLogin :: SecureFlow High [Credential] -> IO String
askForLogin cs = do putStr "Email: "
e <- getLine
putStr "Password: "
p <- getLine
let check = (declassifyWith (login e p) cs) :: SecureFlow Medium Bool
success = open medium check
case success of True -> return e
_ -> do putStr "Incorrect credentials, please try again\n"
askForLogin cs
-- | Test function for showing the salary. It uses the second Hatch version and
-- specifies in the type signature the target security level. Note `makeHatch`.
showSalary :: Hatch' High Medium Int Int
showSalary = makeHatch id
-- | This function uses the abovementioned salary declassification policy. Here
-- there are more constraints on the final security level (and ticket) since the
-- second Hatch version is used.
showEmployeeSalary :: String -> [SE.SEmployee] -> IO Int
showEmployeeSalary _ [] = return 0
showEmployeeSalary n (se:ses) = do if n == (open medium $ SE.email se)
then return $ open medium $ ((declassifyWith' showSalary (SE.salary se)) :: SecureFlow Medium Int)
else showEmployeeSalary n ses
-- Increses thesalary of the right employee.
incSalary :: Int -> String -> [SE.SEmployee] -> [SE.SEmployee]
incSalary _ _ [] = []
incSalary i t (se:ses) = if t == (open medium $ SE.email se)
then (SE.increaseSalary i se):ses
else incSalary i t ses
-- | A function requiring pure values for modifying Store data.
storesOperation :: Read a => SC.SecureComputation SC.P String -> String -> (a -> Store -> Store)
-> SC.SecureComputation SC.P [Store] -> SC.SecureComputation SC.P [Store]
storesOperation m n f sc = SC.sapp (SC.spure $ (\ss -> op m n ss)) sc
where op i n (s:ss) = if n == productName s
then (f (read . SC.open $ m) s):ss
else s:(op m n ss)
-- Main menu
menu :: [SE.SEmployee] -> SC.SecureComputation SC.P [Store] -> IO ()
menu se ss = do putStr $ "\n\n0) Exit \n"
putStr $ "1) See employees' public details \n"
putStr $ "2) Increase an employee's salary \n"
putStr $ "3) See stores status \n"
putStr $ "4) Increase price \n"
putStr $ "5) Increase stocks \n"
putStr $ "What do you want to do? "
c <- getLine
case c of "0" -> return ()
"1" -> do putStr $ intercalate "\n\n" $ map (SE.viewPublicDetails medium) se
putStr "\n\n"
menu se ss
"2" -> do putStr $ "Enter the employee's email address: "
t <- getLine
putStr $ "Enter the increase: "
i <- getNat
case validate i of Left vi -> do let se' = incSalary (read vi) t se
s <- showEmployeeSalary t se'
putStr $ "The new salary is " ++ (show s)
menu se' ss
Right e -> do print e
menu se ss
"3" -> do print $ SC.open ss
menu se ss
"4" -> do putStr $ "Enter the product name: "
p <- getLine
putStr $ "Enter the increase: "
--i <- getUnpureNat
--UNSAFE!!
l <- getLine
let i = SC.spure l :: SC.SecureComputation SC.P String
let ss' = storesOperation i p (increasePrice) ss
print $ SC.open ss'
menu se ss'
"5" -> do putStr $ "Enter the product name: "
p <- getLine
putStr $ "Enter the increase: "
--i <- getUnpureNat
--UNSAFE!!
l <- getLine
let i = SC.spure l :: SC.SecureComputation SC.P String
let ss' = storesOperation i p (increaseStocks) ss
print $ SC.open ss'
menu se ss'
main :: IO ()
main = do cs <-getCredentials
e <- askForLogin cs
se <- getSecureEmployees
ss <- getStores
putStr $ "Welcome, " ++ e ++ "\n"
menu se ss
| mdipirro/haskell-secure-types | app/Main.hs | gpl-3.0 | 6,168 | 0 | 22 | 2,806 | 1,322 | 645 | 677 | 89 | 7 |
module Logic.DatalogC.Fresh where
import Control.Applicative
import Control.Arrow (second)
import Control.Monad.State
import Logic.General.Entities
type Fresh = (String, Integer)
type FreshState = State Fresh
freshVar :: Fresh -> E
freshVar (prefix, n) = Variable $ prefix ++ show n
next :: FreshState ()
next = modify $ second (1+)
getFresh :: FreshState E
getFresh = freshVar <$> get <* next
| bogwonch/SecPAL | src/Logic/DatalogC/Fresh.hs | gpl-3.0 | 401 | 0 | 7 | 65 | 136 | 78 | 58 | 13 | 1 |
average :: Fractional a => Int -> Stream a -> a
average n stm = (sumS n stm) / (fromIntegral n) | hmemcpy/milewski-ctfp-pdf | src/content/3.7/code/haskell/snippet23.hs | gpl-3.0 | 95 | 0 | 8 | 20 | 52 | 25 | 27 | 2 | 1 |
{-# LANGUAGE FlexibleContexts #-}
-- |
-- Copyright : (c) 2010-2012 Benedikt Schmidt & Simon Meier
-- License : GPL v3 (see LICENSE)
--
-- Maintainer : Benedikt Schmidt <beschmi@gmail.com>
--
-- AC unification based on maude and free unification.
module Term.Unification (
-- * Unification modulo AC
unifyLTerm
, unifyLNTerm
, unifiableLNTerms
, unifyLTermFactored
, unifyLNTermFactored
-- * matching modulo AC
-- ** Constructing matching problems
, matchLVar
-- ** Solving matching problems
, solveMatchLTerm
, solveMatchLNTerm
-- * Handles to a Maude process
, MaudeHandle
, WithMaude
, startMaude
, getMaudeStats
, mhMaudeSig
, mhFilePath
-- * Maude signatures
, MaudeSig
, enableDH
, enableBP
, enableMSet
, enableDiff
, minimalMaudeSig
, enableDiffMaudeSig
, dhMaudeSig
, bpMaudeSig
, msetMaudeSig
, pairMaudeSig
, symEncMaudeSig
, asymEncMaudeSig
, signatureMaudeSig
, hashMaudeSig
, rrulesForMaudeSig
, stFunSyms
, funSyms
, stRules
, irreducibleFunSyms
, noEqFunSyms
, addFunSym
, addStRule
-- * Convenience exports
, module Term.Substitution
, module Term.Rewriting.Definitions
) where
import Control.Applicative
import Control.Monad.RWS
import Control.Monad.Error
import Control.Monad.State
import qualified Data.Map as M
import Data.Map (Map)
import System.IO.Unsafe (unsafePerformIO)
import Term.Rewriting.Definitions
import Term.Substitution
import qualified Term.Maude.Process as UM
import Term.Maude.Process
(MaudeHandle, WithMaude, startMaude, getMaudeStats, mhMaudeSig, mhFilePath)
import Term.Maude.Signature
import Debug.Trace.Ignore
-- Unification modulo AC
----------------------------------------------------------------------
-- | @unifyLTerm sortOf eqs@ returns a complete set of unifiers for @eqs@ modulo AC.
unifyLTermFactored :: (IsConst c , Show (Lit c LVar), Ord c)
=> (c -> LSort)
-> [Equal (LTerm c)]
-> WithMaude (LSubst c, [SubstVFresh c LVar])
unifyLTermFactored sortOf eqs = reader $ \h -> (\res -> trace (unlines $ ["unifyLTerm: "++ show eqs, "result = "++ show res]) res) $ do
solve h $ execRWST unif sortOf M.empty
where
unif = sequence [ unifyRaw t p | Equal t p <- eqs ]
solve _ Nothing = (emptySubst, [])
solve _ (Just (m, [])) = (substFromMap m, [emptySubstVFresh])
solve h (Just (m, leqs)) =
(subst, unsafePerformIO (UM.unifyViaMaude h sortOf $
map (applyVTerm subst <$>) leqs))
where subst = substFromMap m
-- | @unifyLTerm sortOf eqs@ returns a complete set of unifiers for @eqs@ modulo AC.
unifyLNTermFactored :: [Equal LNTerm]
-> WithMaude (LNSubst, [SubstVFresh Name LVar])
unifyLNTermFactored = unifyLTermFactored sortOfName
-- | @unifyLNTerm eqs@ returns a complete set of unifiers for @eqs@ modulo AC.
unifyLTerm :: (IsConst c , Show (Lit c LVar), Ord c)
=> (c -> LSort)
-> [Equal (LTerm c)]
-> WithMaude [SubstVFresh c LVar]
unifyLTerm sortOf eqs = flattenUnif <$> unifyLTermFactored sortOf eqs
-- | @unifyLNTerm eqs@ returns a complete set of unifiers for @eqs@ modulo AC.
unifyLNTerm :: [Equal LNTerm] -> WithMaude [SubstVFresh Name LVar]
unifyLNTerm = unifyLTerm sortOfName
-- | 'True' iff the terms are unifiable.
unifiableLNTerms :: LNTerm -> LNTerm -> WithMaude Bool
unifiableLNTerms t1 t2 = (not . null) <$> unifyLNTerm [Equal t1 t2]
-- | Flatten a factored substitution to a list of substitutions.
flattenUnif :: IsConst c => (LSubst c, [LSubstVFresh c]) -> [LSubstVFresh c]
flattenUnif (subst, substs) =
(\res -> trace (show ("flattenUnif",subst, substs,res )) res) $ map (`composeVFresh` subst) substs
-- Matching modulo AC
----------------------------------------------------------------------
-- | Match an 'LVar' term to an 'LVar' pattern.
matchLVar :: LVar -> LVar -> Match (LTerm c)
matchLVar t p = varTerm t `matchWith` varTerm p
-- | @solveMatchLNTerm sortOf eqs@ returns a complete set of matchers for
-- @eqs@ modulo AC.
solveMatchLTerm :: (IsConst c , Show (Lit c LVar), Ord c)
=> (c -> LSort)
-> Match (LTerm c)
-> WithMaude [Subst c LVar]
solveMatchLTerm sortOf matchProblem =
case flattenMatch matchProblem of
Nothing -> pure []
Just ms -> reader $ matchTerms ms
where
trace' res = trace
(unlines $ ["matchLTerm: "++ show matchProblem, "result = "++ show res])
res
matchTerms ms hnd =
trace' $ case runState (runErrorT match) M.empty of
(Left NoMatcher, _) -> []
(Left ACProblem, _) ->
unsafePerformIO (UM.matchViaMaude hnd sortOf matchProblem)
(Right (), mappings) -> [substFromMap mappings]
where
match = forM_ ms $ \(t, p) -> matchRaw sortOf t p
-- | @solveMatchLNTerm eqs@ returns a complete set of matchers for @eqs@
-- modulo AC.
solveMatchLNTerm :: Match LNTerm -> WithMaude [Subst Name LVar]
solveMatchLNTerm = solveMatchLTerm sortOfName
-- Free unification with lazy AC-equation solving.
--------------------------------------------------------------------
type UnifyRaw c = RWST (c -> LSort) [Equal (LTerm c)] (Map LVar (VTerm c LVar)) Maybe
-- | Unify two 'LTerm's with delayed AC-unification.
unifyRaw :: IsConst c => LTerm c -> LTerm c -> UnifyRaw c ()
unifyRaw l0 r0 = do
mappings <- get
sortOf <- ask
l <- gets ((`applyVTerm` l0) . substFromMap)
r <- gets ((`applyVTerm` r0) . substFromMap)
guard (trace (show ("unifyRaw", mappings, l ,r)) True)
case (viewTerm l, viewTerm r) of
(Lit (Var vl), Lit (Var vr))
| vl == vr -> return ()
| otherwise -> case (lvarSort vl, lvarSort vr) of
(sl, sr) | sl == sr -> if vl < vr then elim vr l
else elim vl r
_ | sortGeqLTerm sortOf vl r -> elim vl r
-- If unification can succeed here, then it must work by
-- elimating the right-hand variable with the left-hand side.
_ -> elim vr l
(Lit (Var vl), _ ) -> elim vl r
(_, Lit (Var vr) ) -> elim vr l
(Lit (Con cl), Lit (Con cr) ) -> guard (cl == cr)
(FApp (NoEq lfsym) largs, FApp (NoEq rfsym) rargs) ->
guard (lfsym == rfsym && length largs == length rargs)
>> sequence_ (zipWith unifyRaw largs rargs)
(FApp List largs, FApp List rargs) ->
guard (length largs == length rargs)
>> sequence_ (zipWith unifyRaw largs rargs)
-- NOTE: We assume here that terms of the form mult(t) never occur.
(FApp (AC lacsym) _, FApp (AC racsym) _) ->
guard (lacsym == racsym) >> tell [Equal l r] -- delay unification
(FApp (C lsym) largs, FApp (C rsym) rargs) ->
guard (lsym == rsym && length largs == length rargs)
>> tell [Equal l r] -- delay unification
-- all unifiable pairs of term constructors have been enumerated
_ -> mzero -- no unifier
where
elim v t
| v `occurs` t = mzero -- no unifier
| otherwise = do
sortOf <- ask
guard (sortGeqLTerm sortOf v t)
modify (M.insert v t . M.map (applyVTerm (substFromList [(v,t)])))
data MatchFailure = NoMatcher | ACProblem
instance Error MatchFailure where
strMsg _ = NoMatcher
-- | Ensure that the computed substitution @sigma@ satisfies
-- @t ==_AC apply sigma p@ after the delayed equations are solved.
matchRaw :: IsConst c
=> (c -> LSort)
-> LTerm c -- ^ Term @t@
-> LTerm c -- ^ Pattern @p@.
-> ErrorT MatchFailure (State (Map LVar (VTerm c LVar))) ()
matchRaw sortOf t p = do
mappings <- get
guard (trace (show (mappings,t,p)) True)
case (viewTerm t, viewTerm p) of
(_, Lit (Var vp)) ->
case M.lookup vp mappings of
Nothing -> do
unless (sortGeqLTerm sortOf vp t) $
throwError NoMatcher
modify (M.insert vp t)
Just tp | t == tp -> return ()
| otherwise -> throwError NoMatcher
(Lit (Con ct), Lit (Con cp)) -> guard (ct == cp)
(FApp (NoEq tfsym) targs, FApp (NoEq pfsym) pargs) ->
guard (tfsym == pfsym && length targs == length pargs)
>> sequence_ (zipWith (matchRaw sortOf) targs pargs)
(FApp List targs, FApp List pargs) ->
guard (length targs == length pargs)
>> sequence_ (zipWith (matchRaw sortOf) targs pargs)
(FApp (AC _) _, FApp (AC _) _) -> throwError ACProblem
(FApp (C _) _, FApp (C _) _) -> throwError ACProblem
-- all matchable pairs of term constructors have been enumerated
_ -> throwError NoMatcher
-- | @sortGreaterEq v t@ returns @True@ if the sort ensures that the sort of @v@ is greater or equal to
-- the sort of @t@.
sortGeqLTerm :: IsConst c => (c -> LSort) -> LVar -> LTerm c -> Bool
sortGeqLTerm st v t = do
case (lvarSort v, sortOfLTerm st t) of
(s1, s2) | s1 == s2 -> True
-- Node is incomparable to all other sorts, invalid input
(LSortNode, _ ) -> errNodeSort
(_, LSortNode) -> errNodeSort
(s1, s2) -> sortCompare s1 s2 `elem` [Just EQ, Just GT]
where
errNodeSort = error $
"sortGeqLTerm: node sort misuse " ++ show v ++ " -> " ++ show t
| ekr/tamarin-prover | lib/term/src/Term/Unification.hs | gpl-3.0 | 9,746 | 0 | 19 | 2,825 | 2,811 | 1,465 | 1,346 | 181 | 12 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.BigQuery.RowAccessPolicies.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists all row access policies on the specified table.
--
-- /See:/ <https://cloud.google.com/bigquery/ BigQuery API Reference> for @bigquery.rowAccessPolicies.list@.
module Network.Google.Resource.BigQuery.RowAccessPolicies.List
(
-- * REST Resource
RowAccessPoliciesListResource
-- * Creating a Request
, rowAccessPoliciesList
, RowAccessPoliciesList
-- * Request Lenses
, raplDataSetId
, raplPageToken
, raplProjectId
, raplTableId
, raplPageSize
) where
import Network.Google.BigQuery.Types
import Network.Google.Prelude
-- | A resource alias for @bigquery.rowAccessPolicies.list@ method which the
-- 'RowAccessPoliciesList' request conforms to.
type RowAccessPoliciesListResource =
"bigquery" :>
"v2" :>
"projects" :>
Capture "projectId" Text :>
"datasets" :>
Capture "datasetId" Text :>
"tables" :>
Capture "tableId" Text :>
"rowAccessPolicies" :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListRowAccessPoliciesResponse
-- | Lists all row access policies on the specified table.
--
-- /See:/ 'rowAccessPoliciesList' smart constructor.
data RowAccessPoliciesList =
RowAccessPoliciesList'
{ _raplDataSetId :: !Text
, _raplPageToken :: !(Maybe Text)
, _raplProjectId :: !Text
, _raplTableId :: !Text
, _raplPageSize :: !(Maybe (Textual Int32))
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'RowAccessPoliciesList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'raplDataSetId'
--
-- * 'raplPageToken'
--
-- * 'raplProjectId'
--
-- * 'raplTableId'
--
-- * 'raplPageSize'
rowAccessPoliciesList
:: Text -- ^ 'raplDataSetId'
-> Text -- ^ 'raplProjectId'
-> Text -- ^ 'raplTableId'
-> RowAccessPoliciesList
rowAccessPoliciesList pRaplDataSetId_ pRaplProjectId_ pRaplTableId_ =
RowAccessPoliciesList'
{ _raplDataSetId = pRaplDataSetId_
, _raplPageToken = Nothing
, _raplProjectId = pRaplProjectId_
, _raplTableId = pRaplTableId_
, _raplPageSize = Nothing
}
-- | Required. Dataset ID of row access policies to list.
raplDataSetId :: Lens' RowAccessPoliciesList Text
raplDataSetId
= lens _raplDataSetId
(\ s a -> s{_raplDataSetId = a})
-- | Page token, returned by a previous call, to request the next page of
-- results.
raplPageToken :: Lens' RowAccessPoliciesList (Maybe Text)
raplPageToken
= lens _raplPageToken
(\ s a -> s{_raplPageToken = a})
-- | Required. Project ID of the row access policies to list.
raplProjectId :: Lens' RowAccessPoliciesList Text
raplProjectId
= lens _raplProjectId
(\ s a -> s{_raplProjectId = a})
-- | Required. Table ID of the table to list row access policies.
raplTableId :: Lens' RowAccessPoliciesList Text
raplTableId
= lens _raplTableId (\ s a -> s{_raplTableId = a})
-- | The maximum number of results to return in a single response page.
-- Leverage the page tokens to iterate through the entire collection.
raplPageSize :: Lens' RowAccessPoliciesList (Maybe Int32)
raplPageSize
= lens _raplPageSize (\ s a -> s{_raplPageSize = a})
. mapping _Coerce
instance GoogleRequest RowAccessPoliciesList where
type Rs RowAccessPoliciesList =
ListRowAccessPoliciesResponse
type Scopes RowAccessPoliciesList =
'["https://www.googleapis.com/auth/bigquery",
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only"]
requestClient RowAccessPoliciesList'{..}
= go _raplProjectId _raplDataSetId _raplTableId
_raplPageToken
_raplPageSize
(Just AltJSON)
bigQueryService
where go
= buildClient
(Proxy :: Proxy RowAccessPoliciesListResource)
mempty
| brendanhay/gogol | gogol-bigquery/gen/Network/Google/Resource/BigQuery/RowAccessPolicies/List.hs | mpl-2.0 | 4,988 | 0 | 19 | 1,212 | 651 | 381 | 270 | 104 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Content.Accounts.Listlinks
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns the list of accounts linked to your Merchant Center account.
--
-- /See:/ <https://developers.google.com/shopping-content/v2/ Content API for Shopping Reference> for @content.accounts.listlinks@.
module Network.Google.Resource.Content.Accounts.Listlinks
(
-- * REST Resource
AccountsListlinksResource
-- * Creating a Request
, accountsListlinks
, AccountsListlinks
-- * Request Lenses
, alsXgafv
, alsMerchantId
, alsUploadProtocol
, alsAccessToken
, alsUploadType
, alsAccountId
, alsPageToken
, alsMaxResults
, alsCallback
) where
import Network.Google.Prelude
import Network.Google.ShoppingContent.Types
-- | A resource alias for @content.accounts.listlinks@ method which the
-- 'AccountsListlinks' request conforms to.
type AccountsListlinksResource =
"content" :>
"v2.1" :>
Capture "merchantId" (Textual Word64) :>
"accounts" :>
Capture "accountId" (Textual Word64) :>
"listlinks" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Word32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] AccountsListLinksResponse
-- | Returns the list of accounts linked to your Merchant Center account.
--
-- /See:/ 'accountsListlinks' smart constructor.
data AccountsListlinks =
AccountsListlinks'
{ _alsXgafv :: !(Maybe Xgafv)
, _alsMerchantId :: !(Textual Word64)
, _alsUploadProtocol :: !(Maybe Text)
, _alsAccessToken :: !(Maybe Text)
, _alsUploadType :: !(Maybe Text)
, _alsAccountId :: !(Textual Word64)
, _alsPageToken :: !(Maybe Text)
, _alsMaxResults :: !(Maybe (Textual Word32))
, _alsCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AccountsListlinks' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'alsXgafv'
--
-- * 'alsMerchantId'
--
-- * 'alsUploadProtocol'
--
-- * 'alsAccessToken'
--
-- * 'alsUploadType'
--
-- * 'alsAccountId'
--
-- * 'alsPageToken'
--
-- * 'alsMaxResults'
--
-- * 'alsCallback'
accountsListlinks
:: Word64 -- ^ 'alsMerchantId'
-> Word64 -- ^ 'alsAccountId'
-> AccountsListlinks
accountsListlinks pAlsMerchantId_ pAlsAccountId_ =
AccountsListlinks'
{ _alsXgafv = Nothing
, _alsMerchantId = _Coerce # pAlsMerchantId_
, _alsUploadProtocol = Nothing
, _alsAccessToken = Nothing
, _alsUploadType = Nothing
, _alsAccountId = _Coerce # pAlsAccountId_
, _alsPageToken = Nothing
, _alsMaxResults = Nothing
, _alsCallback = Nothing
}
-- | V1 error format.
alsXgafv :: Lens' AccountsListlinks (Maybe Xgafv)
alsXgafv = lens _alsXgafv (\ s a -> s{_alsXgafv = a})
-- | The ID of the managing account. If this parameter is not the same as
-- accountId, then this account must be a multi-client account and
-- \`accountId\` must be the ID of a sub-account of this account.
alsMerchantId :: Lens' AccountsListlinks Word64
alsMerchantId
= lens _alsMerchantId
(\ s a -> s{_alsMerchantId = a})
. _Coerce
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
alsUploadProtocol :: Lens' AccountsListlinks (Maybe Text)
alsUploadProtocol
= lens _alsUploadProtocol
(\ s a -> s{_alsUploadProtocol = a})
-- | OAuth access token.
alsAccessToken :: Lens' AccountsListlinks (Maybe Text)
alsAccessToken
= lens _alsAccessToken
(\ s a -> s{_alsAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
alsUploadType :: Lens' AccountsListlinks (Maybe Text)
alsUploadType
= lens _alsUploadType
(\ s a -> s{_alsUploadType = a})
-- | The ID of the account for which to list links.
alsAccountId :: Lens' AccountsListlinks Word64
alsAccountId
= lens _alsAccountId (\ s a -> s{_alsAccountId = a})
. _Coerce
-- | The token returned by the previous request.
alsPageToken :: Lens' AccountsListlinks (Maybe Text)
alsPageToken
= lens _alsPageToken (\ s a -> s{_alsPageToken = a})
-- | The maximum number of links to return in the response, used for
-- pagination. The minimum allowed value is 5 results per page. If provided
-- value is lower than 5, it will be automatically increased to 5.
alsMaxResults :: Lens' AccountsListlinks (Maybe Word32)
alsMaxResults
= lens _alsMaxResults
(\ s a -> s{_alsMaxResults = a})
. mapping _Coerce
-- | JSONP
alsCallback :: Lens' AccountsListlinks (Maybe Text)
alsCallback
= lens _alsCallback (\ s a -> s{_alsCallback = a})
instance GoogleRequest AccountsListlinks where
type Rs AccountsListlinks = AccountsListLinksResponse
type Scopes AccountsListlinks =
'["https://www.googleapis.com/auth/content"]
requestClient AccountsListlinks'{..}
= go _alsMerchantId _alsAccountId _alsXgafv
_alsUploadProtocol
_alsAccessToken
_alsUploadType
_alsPageToken
_alsMaxResults
_alsCallback
(Just AltJSON)
shoppingContentService
where go
= buildClient
(Proxy :: Proxy AccountsListlinksResource)
mempty
| brendanhay/gogol | gogol-shopping-content/gen/Network/Google/Resource/Content/Accounts/Listlinks.hs | mpl-2.0 | 6,402 | 0 | 21 | 1,577 | 1,004 | 577 | 427 | 141 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Dataflow.Projects.Locations.Jobs.Update
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates the state of an existing Cloud Dataflow job.
--
-- /See:/ <https://cloud.google.com/dataflow Google Dataflow API Reference> for @dataflow.projects.locations.jobs.update@.
module Network.Google.Resource.Dataflow.Projects.Locations.Jobs.Update
(
-- * REST Resource
ProjectsLocationsJobsUpdateResource
-- * Creating a Request
, projectsLocationsJobsUpdate
, ProjectsLocationsJobsUpdate
-- * Request Lenses
, pljuXgafv
, pljuJobId
, pljuUploadProtocol
, pljuLocation
, pljuPp
, pljuAccessToken
, pljuUploadType
, pljuPayload
, pljuBearerToken
, pljuProjectId
, pljuCallback
) where
import Network.Google.Dataflow.Types
import Network.Google.Prelude
-- | A resource alias for @dataflow.projects.locations.jobs.update@ method which the
-- 'ProjectsLocationsJobsUpdate' request conforms to.
type ProjectsLocationsJobsUpdateResource =
"v1b3" :>
"projects" :>
Capture "projectId" Text :>
"locations" :>
Capture "location" Text :>
"jobs" :>
Capture "jobId" Text :>
QueryParam "$.xgafv" Text :>
QueryParam "upload_protocol" Text :>
QueryParam "pp" Bool :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "bearer_token" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Job :> Put '[JSON] Job
-- | Updates the state of an existing Cloud Dataflow job.
--
-- /See:/ 'projectsLocationsJobsUpdate' smart constructor.
data ProjectsLocationsJobsUpdate = ProjectsLocationsJobsUpdate'
{ _pljuXgafv :: !(Maybe Text)
, _pljuJobId :: !Text
, _pljuUploadProtocol :: !(Maybe Text)
, _pljuLocation :: !Text
, _pljuPp :: !Bool
, _pljuAccessToken :: !(Maybe Text)
, _pljuUploadType :: !(Maybe Text)
, _pljuPayload :: !Job
, _pljuBearerToken :: !(Maybe Text)
, _pljuProjectId :: !Text
, _pljuCallback :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ProjectsLocationsJobsUpdate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pljuXgafv'
--
-- * 'pljuJobId'
--
-- * 'pljuUploadProtocol'
--
-- * 'pljuLocation'
--
-- * 'pljuPp'
--
-- * 'pljuAccessToken'
--
-- * 'pljuUploadType'
--
-- * 'pljuPayload'
--
-- * 'pljuBearerToken'
--
-- * 'pljuProjectId'
--
-- * 'pljuCallback'
projectsLocationsJobsUpdate
:: Text -- ^ 'pljuJobId'
-> Text -- ^ 'pljuLocation'
-> Job -- ^ 'pljuPayload'
-> Text -- ^ 'pljuProjectId'
-> ProjectsLocationsJobsUpdate
projectsLocationsJobsUpdate pPljuJobId_ pPljuLocation_ pPljuPayload_ pPljuProjectId_ =
ProjectsLocationsJobsUpdate'
{ _pljuXgafv = Nothing
, _pljuJobId = pPljuJobId_
, _pljuUploadProtocol = Nothing
, _pljuLocation = pPljuLocation_
, _pljuPp = True
, _pljuAccessToken = Nothing
, _pljuUploadType = Nothing
, _pljuPayload = pPljuPayload_
, _pljuBearerToken = Nothing
, _pljuProjectId = pPljuProjectId_
, _pljuCallback = Nothing
}
-- | V1 error format.
pljuXgafv :: Lens' ProjectsLocationsJobsUpdate (Maybe Text)
pljuXgafv
= lens _pljuXgafv (\ s a -> s{_pljuXgafv = a})
-- | The job ID.
pljuJobId :: Lens' ProjectsLocationsJobsUpdate Text
pljuJobId
= lens _pljuJobId (\ s a -> s{_pljuJobId = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pljuUploadProtocol :: Lens' ProjectsLocationsJobsUpdate (Maybe Text)
pljuUploadProtocol
= lens _pljuUploadProtocol
(\ s a -> s{_pljuUploadProtocol = a})
-- | The location that contains this job.
pljuLocation :: Lens' ProjectsLocationsJobsUpdate Text
pljuLocation
= lens _pljuLocation (\ s a -> s{_pljuLocation = a})
-- | Pretty-print response.
pljuPp :: Lens' ProjectsLocationsJobsUpdate Bool
pljuPp = lens _pljuPp (\ s a -> s{_pljuPp = a})
-- | OAuth access token.
pljuAccessToken :: Lens' ProjectsLocationsJobsUpdate (Maybe Text)
pljuAccessToken
= lens _pljuAccessToken
(\ s a -> s{_pljuAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pljuUploadType :: Lens' ProjectsLocationsJobsUpdate (Maybe Text)
pljuUploadType
= lens _pljuUploadType
(\ s a -> s{_pljuUploadType = a})
-- | Multipart request metadata.
pljuPayload :: Lens' ProjectsLocationsJobsUpdate Job
pljuPayload
= lens _pljuPayload (\ s a -> s{_pljuPayload = a})
-- | OAuth bearer token.
pljuBearerToken :: Lens' ProjectsLocationsJobsUpdate (Maybe Text)
pljuBearerToken
= lens _pljuBearerToken
(\ s a -> s{_pljuBearerToken = a})
-- | The ID of the Cloud Platform project that the job belongs to.
pljuProjectId :: Lens' ProjectsLocationsJobsUpdate Text
pljuProjectId
= lens _pljuProjectId
(\ s a -> s{_pljuProjectId = a})
-- | JSONP
pljuCallback :: Lens' ProjectsLocationsJobsUpdate (Maybe Text)
pljuCallback
= lens _pljuCallback (\ s a -> s{_pljuCallback = a})
instance GoogleRequest ProjectsLocationsJobsUpdate
where
type Rs ProjectsLocationsJobsUpdate = Job
type Scopes ProjectsLocationsJobsUpdate =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/userinfo.email"]
requestClient ProjectsLocationsJobsUpdate'{..}
= go _pljuProjectId _pljuLocation _pljuJobId
_pljuXgafv
_pljuUploadProtocol
(Just _pljuPp)
_pljuAccessToken
_pljuUploadType
_pljuBearerToken
_pljuCallback
(Just AltJSON)
_pljuPayload
dataflowService
where go
= buildClient
(Proxy :: Proxy ProjectsLocationsJobsUpdateResource)
mempty
| rueshyna/gogol | gogol-dataflow/gen/Network/Google/Resource/Dataflow/Projects/Locations/Jobs/Update.hs | mpl-2.0 | 6,932 | 0 | 23 | 1,761 | 1,095 | 634 | 461 | 159 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.TargetTCPProxies.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns the specified TargetTcpProxy resource. Gets a list of available
-- target TCP proxies by making a list() request.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.targetTcpProxies.get@.
module Network.Google.Resource.Compute.TargetTCPProxies.Get
(
-- * REST Resource
TargetTCPProxiesGetResource
-- * Creating a Request
, targetTCPProxiesGet
, TargetTCPProxiesGet
-- * Request Lenses
, ttpgProject
, ttpgTargetTCPProxy
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.targetTcpProxies.get@ method which the
-- 'TargetTCPProxiesGet' request conforms to.
type TargetTCPProxiesGetResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"global" :>
"targetTcpProxies" :>
Capture "targetTcpProxy" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] TargetTCPProxy
-- | Returns the specified TargetTcpProxy resource. Gets a list of available
-- target TCP proxies by making a list() request.
--
-- /See:/ 'targetTCPProxiesGet' smart constructor.
data TargetTCPProxiesGet =
TargetTCPProxiesGet'
{ _ttpgProject :: !Text
, _ttpgTargetTCPProxy :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'TargetTCPProxiesGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ttpgProject'
--
-- * 'ttpgTargetTCPProxy'
targetTCPProxiesGet
:: Text -- ^ 'ttpgProject'
-> Text -- ^ 'ttpgTargetTCPProxy'
-> TargetTCPProxiesGet
targetTCPProxiesGet pTtpgProject_ pTtpgTargetTCPProxy_ =
TargetTCPProxiesGet'
{_ttpgProject = pTtpgProject_, _ttpgTargetTCPProxy = pTtpgTargetTCPProxy_}
-- | Project ID for this request.
ttpgProject :: Lens' TargetTCPProxiesGet Text
ttpgProject
= lens _ttpgProject (\ s a -> s{_ttpgProject = a})
-- | Name of the TargetTcpProxy resource to return.
ttpgTargetTCPProxy :: Lens' TargetTCPProxiesGet Text
ttpgTargetTCPProxy
= lens _ttpgTargetTCPProxy
(\ s a -> s{_ttpgTargetTCPProxy = a})
instance GoogleRequest TargetTCPProxiesGet where
type Rs TargetTCPProxiesGet = TargetTCPProxy
type Scopes TargetTCPProxiesGet =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly"]
requestClient TargetTCPProxiesGet'{..}
= go _ttpgProject _ttpgTargetTCPProxy (Just AltJSON)
computeService
where go
= buildClient
(Proxy :: Proxy TargetTCPProxiesGetResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/TargetTCPProxies/Get.hs | mpl-2.0 | 3,660 | 0 | 15 | 812 | 393 | 237 | 156 | 67 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DNS.DNSKeys.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Enumerate DnsKeys to a ResourceRecordSet collection.
--
-- /See:/ <https://developers.google.com/cloud-dns Google Cloud DNS API Reference> for @dns.dnsKeys.list@.
module Network.Google.Resource.DNS.DNSKeys.List
(
-- * REST Resource
DNSKeysListResource
-- * Creating a Request
, dnsKeysList
, DNSKeysList
-- * Request Lenses
, dklProject
, dklDigestType
, dklPageToken
, dklManagedZone
, dklMaxResults
) where
import Network.Google.DNS.Types
import Network.Google.Prelude
-- | A resource alias for @dns.dnsKeys.list@ method which the
-- 'DNSKeysList' request conforms to.
type DNSKeysListResource =
"dns" :>
"v2beta1" :>
"projects" :>
Capture "project" Text :>
"managedZones" :>
Capture "managedZone" Text :>
"dnsKeys" :>
QueryParam "digestType" Text :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Int32) :>
QueryParam "alt" AltJSON :>
Get '[JSON] DNSKeysListResponse
-- | Enumerate DnsKeys to a ResourceRecordSet collection.
--
-- /See:/ 'dnsKeysList' smart constructor.
data DNSKeysList = DNSKeysList'
{ _dklProject :: !Text
, _dklDigestType :: !(Maybe Text)
, _dklPageToken :: !(Maybe Text)
, _dklManagedZone :: !Text
, _dklMaxResults :: !(Maybe (Textual Int32))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'DNSKeysList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dklProject'
--
-- * 'dklDigestType'
--
-- * 'dklPageToken'
--
-- * 'dklManagedZone'
--
-- * 'dklMaxResults'
dnsKeysList
:: Text -- ^ 'dklProject'
-> Text -- ^ 'dklManagedZone'
-> DNSKeysList
dnsKeysList pDklProject_ pDklManagedZone_ =
DNSKeysList'
{ _dklProject = pDklProject_
, _dklDigestType = Nothing
, _dklPageToken = Nothing
, _dklManagedZone = pDklManagedZone_
, _dklMaxResults = Nothing
}
-- | Identifies the project addressed by this request.
dklProject :: Lens' DNSKeysList Text
dklProject
= lens _dklProject (\ s a -> s{_dklProject = a})
-- | An optional comma-separated list of digest types to compute and display
-- for key signing keys. If omitted, the recommended digest type will be
-- computed and displayed.
dklDigestType :: Lens' DNSKeysList (Maybe Text)
dklDigestType
= lens _dklDigestType
(\ s a -> s{_dklDigestType = a})
-- | Optional. A tag returned by a previous list request that was truncated.
-- Use this parameter to continue a previous list request.
dklPageToken :: Lens' DNSKeysList (Maybe Text)
dklPageToken
= lens _dklPageToken (\ s a -> s{_dklPageToken = a})
-- | Identifies the managed zone addressed by this request. Can be the
-- managed zone name or id.
dklManagedZone :: Lens' DNSKeysList Text
dklManagedZone
= lens _dklManagedZone
(\ s a -> s{_dklManagedZone = a})
-- | Optional. Maximum number of results to be returned. If unspecified, the
-- server will decide how many results to return.
dklMaxResults :: Lens' DNSKeysList (Maybe Int32)
dklMaxResults
= lens _dklMaxResults
(\ s a -> s{_dklMaxResults = a})
. mapping _Coerce
instance GoogleRequest DNSKeysList where
type Rs DNSKeysList = DNSKeysListResponse
type Scopes DNSKeysList =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only",
"https://www.googleapis.com/auth/ndev.clouddns.readonly",
"https://www.googleapis.com/auth/ndev.clouddns.readwrite"]
requestClient DNSKeysList'{..}
= go _dklProject _dklManagedZone _dklDigestType
_dklPageToken
_dklMaxResults
(Just AltJSON)
dNSService
where go
= buildClient (Proxy :: Proxy DNSKeysListResource)
mempty
| rueshyna/gogol | gogol-dns/gen/Network/Google/Resource/DNS/DNSKeys/List.hs | mpl-2.0 | 4,852 | 0 | 18 | 1,196 | 658 | 386 | 272 | 100 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Tasks.TaskLists.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes the authenticated user\'s specified task list.
--
-- /See:/ <https://developers.google.com/google-apps/tasks/firstapp Tasks API Reference> for @tasks.tasklists.delete@.
module Network.Google.Resource.Tasks.TaskLists.Delete
(
-- * REST Resource
TaskListsDeleteResource
-- * Creating a Request
, taskListsDelete
, TaskListsDelete
-- * Request Lenses
, tldTaskList
) where
import Network.Google.AppsTasks.Types
import Network.Google.Prelude
-- | A resource alias for @tasks.tasklists.delete@ method which the
-- 'TaskListsDelete' request conforms to.
type TaskListsDeleteResource =
"tasks" :>
"v1" :>
"users" :>
"@me" :>
"lists" :>
Capture "tasklist" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] ()
-- | Deletes the authenticated user\'s specified task list.
--
-- /See:/ 'taskListsDelete' smart constructor.
newtype TaskListsDelete = TaskListsDelete'
{ _tldTaskList :: Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'TaskListsDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tldTaskList'
taskListsDelete
:: Text -- ^ 'tldTaskList'
-> TaskListsDelete
taskListsDelete pTldTaskList_ =
TaskListsDelete'
{ _tldTaskList = pTldTaskList_
}
-- | Task list identifier.
tldTaskList :: Lens' TaskListsDelete Text
tldTaskList
= lens _tldTaskList (\ s a -> s{_tldTaskList = a})
instance GoogleRequest TaskListsDelete where
type Rs TaskListsDelete = ()
type Scopes TaskListsDelete =
'["https://www.googleapis.com/auth/tasks"]
requestClient TaskListsDelete'{..}
= go _tldTaskList (Just AltJSON) appsTasksService
where go
= buildClient
(Proxy :: Proxy TaskListsDeleteResource)
mempty
| rueshyna/gogol | gogol-apps-tasks/gen/Network/Google/Resource/Tasks/TaskLists/Delete.hs | mpl-2.0 | 2,760 | 0 | 14 | 647 | 311 | 190 | 121 | 50 | 1 |
import Data.List
dropEvery n x = [x | x <- x, x `mod` n /= 0 ]
| nstarke/icc13-introduction-to-haskell | ex16.hs | lgpl-3.0 | 63 | 0 | 8 | 17 | 41 | 22 | 19 | 2 | 1 |
module Game where
import qualified Data.Map.Strict as M
import qualified Data.Aeson as Ae
import qualified Data.ByteString.Lazy.Char8 as BS8
import Data.List (isPrefixOf, union)
import Data.List.Split (splitOn)
import Data.Maybe (listToMaybe, fromMaybe, mapMaybe)
import Data.String (fromString)
import qualified Server as S
import Common
import JSON
import Debug.Trace
gameLogic :: S.Event -> GameData -> S.Logic ServerData
gameLogic evt gd
| S.eventType evt == S.EventConnect =
error "We should not receive EventConnect while in game"
| S.eventType evt == S.EventDisconnect =
error "We should not receive EventDisconnect while in game"
| otherwise = gameLogic' evt gd
-- checkPlayersReady gd >>
startsWith = flip isPrefixOf
gameLogic' :: S.Event -> GameData -> S.Logic ServerData
gameLogic' (S.Event (S.EventMessage msg) cid) gd
| msg `startsWith` "move" =
moveCommand (tail . splitOn ";" $ msg) cid gd
| msg `startsWith` "train" =
trainCommand (tail . splitOn ";" $ msg) cid gd
| msg `startsWith` "end" = checkPlayers $ readyPlayer cid gd
gameLogic' e _ =
S.logCon $ "[INFO] gameLogic': unexpected event type: " ++ show e
++ " - ignoring"
-- | Sets the player's status "ready"
readyPlayer :: S.ClientID -> GameData -> GameData
readyPlayer cid gd =
let newPlayerList = M.adjust (\pd -> pd {playerDataReady = True}) cid
(gameDataPlayers gd)
newGd = gd {gameDataPlayers = newPlayerList}
in newGd
checkPlayers :: GameData -> S.Logic ServerData
checkPlayers gd =
if isEveryoneReady gd
then do
let newGd = advanceGameData $ unreadyPlayersGD gd
sendUpdates newGd
S.changeBecause "Everyone is ready, sending updates" $ Right newGd
else S.changeBecause "Player finished sending commands" $ Right gd
isEveryoneReady :: GameData -> Bool
isEveryoneReady gd =
let players = gameDataPlayers gd
in and . M.map playerDataReady $ players
-- | Send updates to all clients
sendUpdates :: GameData -> S.Logic ServerData
sendUpdates gd = do
mapM_ (\cid -> sendUpdate cid (compileUpdate gd cid))
(M.keys $ gameDataPlayers gd)
S.nop
sendUpdate :: S.ClientID -> String -> S.Logic ServerData
sendUpdate cid str =
S.sendMessage (S.mkTargetedMessage S.MessageCustom [cid] str)
-- | Assemble an update for a given client
-- Note that we send every town update, with no respect to visibility.
compileUpdate :: GameData -> S.ClientID -> String
compileUpdate gd cid =
let towns = gameLevelTowns . gameDataLevel $ gd
units = M.filter ((`elem` vCoords) . waypointToCoord . unitCoordinates)
. gameLevelUnits
. gameDataLevel
$ gd
vCoords = visibleCoords gd cid
update = GameUpdate (M.map unitToUnitUpdate units) towns vCoords
unitToUnitUpdate :: UnitData -> UnitUpdate
unitToUnitUpdate (UnitData t o _ _ f c) =
UnitUpdate t o f (waypointToCoords c)
in BS8.unpack $ Ae.encode update
-- | Get all coords where towns and units can see
visibleCoords :: GameData -> S.ClientID -> [HexCoord]
visibleCoords gd cid =
let coords = towns ++ units
player = fromMaybe (error $ "visibleCoords: no clientID: " ++ show cid)
(cid `M.lookup` gameDataPlayers gd)
side = playerDataPlayerSide player
units =
M.elems $ M.map (\u -> ( waypointToCoord (unitCoordinates u)
, unitSight u))
(M.filter ((== side) . unitOwner)
(gameLevelUnits $ gameDataLevel gd))
towns = zip (M.keys $ M.filter ((== side) . townOwner)
(gameLevelTowns $ gameDataLevel gd))
(repeat 4)
in foldl union [] (map (uncurry (visibleCoordsFrom gd)) coords)
where
-- | Given a map, a coordinate and a distance, it returns the coordinates that
-- can be seen from the original coordinate, including the original coord.
visibleCoordsFrom :: GameData -> HexCoord -> Int -> [HexCoord]
visibleCoordsFrom gd hc d =
let tile =
fromMaybe (error $ "visibleCoordsFrom: no clientID: " ++ show cid)
(hc `M.lookup` (gameLevelTiles . gameDataLevel $ gd))
st = tileDataSprite tile
vis = tileDataVisibility tile
in visibleNeighbours gd (st, vis) d hc
visibleNeighbours :: GameData -> (SpriteType, Visibility) -> Int -> HexCoord
-> [HexCoord]
visibleNeighbours gd (st, vis) 0 hc = [hc]
visibleNeighbours gd (st, vis) d hc =
let allNeighbourTiles = mapMaybe tileType (neighbourHexCoords hc)
tileType nhc =
fmap (\hc -> (nhc, hc))
(nhc `M.lookup` (gameLevelTiles . gameDataLevel $ gd))
thisTile =
fromMaybe (error $ "visibleneighbours: no hexcoord: " ++ show hc)
(hc `M.lookup` (gameLevelTiles . gameDataLevel $ gd))
visibleNeighbourTiles =
map fst $ filter (isVisibleFrom st vis thisTile . snd)
allNeighbourTiles
in hc : foldl union [] (map (visibleNeighbours gd (st, vis) (d - 1))
visibleNeighbourTiles)
isVisibleFrom :: SpriteType -> Visibility -> TileData -> TileData -> Bool
isVisibleFrom st vis td1 td2 =
let st1 = tileDataSprite td1
st2 = tileDataSprite td2
vis2 = tileDataVisibility td2
in case vis2 of
AllVisible -> True
SameTypeVisible -> st == st2
ConnectedVisible -> st == st2 && st == st1
NonVisible -> False
-- TODO generate 2 food for towns, decrement unit's food with 1, destroy units
-- with 0 food, resolve battles between units (barb beats archer beats knight,
-- all fighter beats spy, everyone beats kart)
advanceGameData :: GameData -> GameData
advanceGameData = townsGenerateFood
|> unitsEatFood
|> unitsMove
|> unitsCaptureTown
|> resolveBattles
|> unitsGetFood
|> unitsStarve
where
(|>) = flip (.)
-- | Each !!occupied!! town will have +2 food
townsGenerateFood :: GameData -> GameData
townsGenerateFood = id
-- | If a unit is in a town, the town's food wil be transferred to the unit.
-- Then if the unit has food, eats one, else it dies.
unitsEatFood :: GameData -> GameData
unitsEatFood = id
unitsMove :: GameData -> GameData
unitsMove = id
unitsCaptureTown :: GameData -> GameData
unitsCaptureTown = id
-- | If there is a battle, kills the defeated unit, and give its food to the
-- survivor.
resolveBattles :: GameData -> GameData
resolveBattles = id
unitsGetFood :: GameData -> GameData
unitsGetFood = id
unitsStarve :: GameData -> GameData
unitsStarve = id
-- check if the src coordinate is a unit, is our unit, and there is a path to
-- the target coordinate
moveCommand :: [String] -> S.ClientID -> GameData -> S.Logic ServerData
moveCommand (from:to:_) cid gd =
let player = fromMaybe (error $ "moveCommand: no clientID: " ++ show cid)
(cid `M.lookup` gameDataPlayers gd)
fromHc = readHexCoord from
toHc = readHexCoord to
in if areNeighbours fromHc toHc
then S.changeBecause "Unit is moved" $ Right $ moveUnit fromHc toHc gd
else S.sendMessage (S.mkTargetedMessage S.MessageWarning [cid]
("The hexcoords "
++ show fromHc ++ " and "
++ show toHc
++ "are not neighbours"))
where
moveUnit :: HexCoord -> HexCoord -> GameData -> GameData
moveUnit hcfrom hcto gd =
let units = M.filter ((== hcfrom) . waypointToCoord . unitCoordinates)
(gameLevelUnits . gameDataLevel $ gd)
unit = M.elemAt 0 units
wp = addWaypoint (unitCoordinates (snd unit)) hcto
newUnit = (snd unit) {unitCoordinates = wp}
in case M.size units of
0 -> gd
_ -> uncurry changeUnit unit gd
moveCommand _ cid _ = S.sendMessage (S.mkTargetedMessage S.MessageWarning [cid]
"Wrong command syntax")
-- Check if the coordinate is a town, is our town, and is unoccupied
trainCommand :: [String] -> S.ClientID -> GameData -> S.Logic ServerData
trainCommand (at:uType:_) cid gd =
let coord = readHexCoord at
player = fromMaybe (error $ "trainCommand: no clientID: " ++ show cid)
(cid `M.lookup` gameDataPlayers gd)
side = playerDataPlayerSide player
getTownAt hc = M.lookup hc . gameLevelTowns . gameDataLevel $ gd
unitType = readUnitTypeCh $ head uType
in case getTownAt coord of
Nothing -> S.nop
Just (TownData owner food) ->
if owner == side
then S.changeBecause "Unit is trained" $ Right $
addUnit (mkUnit unitType side coord) gd
else S.nop
trainCommand _ cid _ =
S.sendMessage (S.mkTargetedMessage S.MessageWarning [cid]
"Wrong command syntax")
| gizmo-mk0/kartofwar-server | src/Game.hs | unlicense | 9,209 | 0 | 16 | 2,677 | 2,402 | 1,250 | 1,152 | 184 | 5 |
module X_2012_01_29_unicode
where
{-
Created : 2014 Oct 07 (Tue) 08:53:35 by Harold Carr.
Last Modified : 2015 Sep 01 (Tue) 14:48:43 by Harold Carr.
2012-01-29
http://www.haskellforall.com/2012/01/haskell-for-engineers-unicode.html
UNICODE
{-# LANGUAGE UnicodeSyntax #-}
http://en.wikipedia.org/wiki/Unicode_input
-}
| haroldcarr/learn-haskell-coq-ml-etc | haskell/topic/general/haskellforall/src/X_2012_01_29_unicode.hs | unlicense | 326 | 0 | 2 | 43 | 5 | 4 | 1 | 1 | 0 |
-- Symbiotic - math library including linear algebra package, graphs package, and virtual database.
-- @author Jiangcheng Oliver Chu
module Symbiotic
( rref
, rrefi
, transpose
, countRows
, countCols
, mul
, mul'
, wellFormed
, add
, elemMul
, elementWise
, sub
, divide
, scale
, newMatrix
, newVector
, cross
, dot
) where
import Data.List
import Data.Maybe
follower [] = []
follower xs = tail xs
data MatrixHead = MatrixHead { pivotElems :: [Double], leftColumn :: [Double] }
matTail ms = map follower (follower ms)
matHead ms = MatrixHead (head ms) (map head (follower ms))
leftAugment :: (Fractional a) => [a] -> [[a]] -> [[a]]
leftAugment _ [] = []
leftAugment [] _ = []
leftAugment mv msh = (head mv : head msh) : leftAugment (follower mv) (follower msh)
(MatrixHead p c) `matCons` mt = p : leftAugment c mt
maxIndex xs = (length xs) - 1
toDoubleMatrix :: (Integral a) => [[a]] -> [[Double]]
toDoubleMatrix [] = []
toDoubleMatrix (m:ms) = (map fromIntegral m) : (toDoubleMatrix ms)
pivotRow :: (Fractional a, Eq a) => [[a]] -> [a]
pivotRow [] = []
pivotRow (m:ms) = if (head m) /= 0 then m else pivotRow ms
whereIsPivotRow :: (Fractional a, Eq a) => [[a]] -> Int
whereIsPivotRow [] = -1
whereIsPivotRow (m:ms) = if (head m) /= 0 then 0 else 1 + whereIsPivotRow ms
firstNonZero :: [Double] -> Double
firstNonZero [] = -1
firstNonZero (x:xs)
| x /= 0.0 = x
| otherwise = firstNonZero xs
isPivot :: (Fractional a, Eq a) => [a] -> Bool
isPivot [] = False
isPivot (m:ms)
| m == 1.0 = True
| m /= 0.0 = False
| otherwise = isPivot ms
whereIsPivotRow' :: [[Double]] -> Int -> Int
whereIsPivotRow' [] _ = -1
whereIsPivotRow' ms edge
| edge < (length ms) - 1 = whereIsPivotRow' (init ms) edge
| otherwise = let m = last ms
in if isPivot m then (length ms) - 1 else whereIsPivotRow' (init ms) edge
inverted :: (Eq a) => [a] -> Int -> Int -> Int -> a
inverted xs i j i'
| i == i' = xs !! j
| j == i' = xs !! i
| otherwise = xs !! i'
swap :: (Eq a) => [a] -> Int -> Int -> [a]
swap xs i j = map (inverted xs i j) [0..(length xs - 1)]
scalePivot :: (Fractional a) => [a] -> [a]
scalePivot (m:ms) = 1.0 : let s = 1.0 / m in map (s*) ms
scaleHead :: (Fractional a) => [[a]] -> [[a]]
scaleHead [] = []
scaleHead (m:[]) = [scalePivot m]
scaleHead (m:ms) = (scalePivot m) : ms
scaleBy :: (Fractional a) => a -> [a] -> [a]
scaleBy s xs = map (s*) xs
subtractElements :: (Num a) => [a] -> [a] -> [a]
subtractElements upper lower = zipWith (-) upper lower
subtractPivot pivot other = let h = head other in subtractElements other (map (h*) pivot)
addPivotMultiples :: (Num a) => [[a]] -> [[a]]
addPivotMultiples [] = []
addPivotMultiples (m:ms) = m : [subtractPivot m n | n <- ms]
refOnce :: (Fractional a, Eq a) => [[a]] -> [[a]]
refOnce ms = addPivotMultiples (scaleHead (swap ms 0 (whereIsPivotRow ms)))
ref :: [[Double]] -> [[Double]]
ref ms = let r = refOnce ms
ms' = (matHead r) `matCons` refOnce (matTail r)
lastScale = (1.0 / firstNonZero (last ms'))
in init ms' ++ [scaleBy lastScale (last ms')]
rrefAt :: [[Double]] -> Int -> [[Double]]
rrefAt ms edge
| edge >= length ms = ms
| otherwise = let refm = ref ms
i = whereIsPivotRow' refm edge
pivot = refm !! i
Just p = elemIndex 1.0 pivot
in [subtractElements (refm !! j) (scaleBy ((refm !! j) !! p) pivot) | j <- [0..(i-1)]] ++
[refm !! k | k <- [i..(length ms) - 1]]
rref :: [[Double]] -> [[Double]]
rref ms = let maxedge = (length ms) - 1
in prettyZeros $ foldl rrefAt ms [maxedge, (maxedge - 1) .. 0]
refi = ref . toDoubleMatrix
rrefi = rref . toDoubleMatrix
prettyZeros :: [[Double]] -> [[Double]]
prettyZeros ys = [map (\x -> if x == -0.0 then 0.0 else x) y | y <- ys]
-- tranpose from Data.List
countRows ms = length ms
countCols [] = 0
countCols ms = length (head ms)
wellFormed :: [[Double]] -> Bool
wellFormed ms
| null ms = True
| null (head ms) = False
| otherwise = all (\m -> (length m) == h) ms
where h = length $ head $ ms
get ms i j = (ms !! i) !! j
summation f i n = sum [f j | j <- [i..n]]
mulEntry a b i j = summation (\k -> (get a i k) * (get b k j)) 0 ((countCols a) - 1)
mul :: [[Double]] -> [[Double]] -> Maybe [[Double]]
a `mul` b
| countRows a /= countCols b = Nothing
| otherwise = Just [[mulEntry a b i j | j <- [0..(countRows a) - 1]] | i <- [0..(countCols b) - 1]]
unjustify :: ([[Double]] -> [[Double]] -> Maybe [[Double]]) -> ([[Double]] -> [[Double]] -> [[Double]])
unjustify f = \a b -> fromMaybe [] (f a b)
mul' :: [[Double]] -> [[Double]] -> [[Double]]
a `mul'` b = unjustify mul a b
elementWise :: (Double -> Double -> Double) -> [[Double]] -> [[Double]] -> Maybe [[Double]]
elementWise f a b
| countRows a /= countRows b || countCols a /= countCols b = Nothing
| otherwise = Just [[(get a i j) `f` (get b i j) | j <- [0..((countCols a) - 1)]] | i <- [0..((countRows a) - 1)]]
add :: [[Double]] -> [[Double]] -> Maybe [[Double]]
a `add` b = elementWise (+) a b
elemMul :: [[Double]] -> [[Double]] -> Maybe [[Double]]
a `elemMul` b = elementWise (*) a b
sub :: [[Double]] -> [[Double]] -> Maybe [[Double]]
a `sub` b = elementWise (-) a b
divide :: [[Double]] -> [[Double]] -> Maybe [[Double]]
a `divide` b = elementWise (/) a b
newMatrix :: Double -> Int -> Int -> [[Double]]
newMatrix value rows cols = [[value | j <- [1..cols]] | i <- [1..rows]]
newVector :: Double -> Int -> [[Double]]
newVector value size = newMatrix value 1 size
scale :: Double -> [[Double]] -> [[Double]]
scale c ms = fromMaybe [] $ (newMatrix c (countRows ms) (countCols ms)) `elemMul` ms
cross :: [[Double]] -> [[Double]] -> [[Double]]
g `cross` h = let a = head g
b = head h
u = a !! 0
v = a !! 1
w = a !! 2
x = b !! 0
y = b !! 1
z = b !! 2
in [[v*z - w*y, w*x - u*z, u*y - v*x]]
dot :: [[Double]] -> [[Double]] -> Double
g `dot` h = let a = head g
b = head h
u = a !! 0
v = a !! 1
w = a !! 2
x = b !! 0
y = b !! 1
z = b !! 2
in u*x + v*y + w*z
magnitude :: [[Double]] -> Double
magnitude b = sqrt $ sum $ map (\x -> x * x) (head b)
normalize :: [[Double]] -> [[Double]]
normalize b = scale (1 / (magnitude b)) b
-- http://en.wikipedia.org/wiki/Eigenvalue_algorithm
-- http://en.wikipedia.org/wiki/QR_algorithm
-- http://en.wikipedia.org/wiki/Jacobi_eigenvalue_algorithm
-- http://en.wikipedia.org/wiki/Divide-and-conquer_eigenvalue_algorithm
-- http://en.wikipedia.org/wiki/Power_iteration
initVector :: [[Double]] -> [[Double]]
initVector a = [[(fromIntegral ((17 * i) `mod` 61))::Double | i <- [1..(countRows a)]]]
powerIterationStep :: [[Double]] -> [[Double]] -> [[Double]]
powerIterationStep a b = normalize (fromMaybe [] (a `mul` b))
-- Composes a long list of functions together.
compose :: [a -> a] -> a -> a
compose fs v = foldl (flip (.)) id fs $ v
maxAbsEigenvalue a = (compose (replicate 1000 (powerIterationStep a))) (initVector a)
| Carrotlord/matrix-library | symbolic.hs | apache-2.0 | 7,285 | 0 | 16 | 1,890 | 3,689 | 1,974 | 1,715 | 171 | 2 |
-- http://www.codewars.com/kata/529e2e1f16cb0fcccb000a6b
module Split where
splitInteger :: Int -> Int -> [Int]
splitInteger n p = replicate (p-r) q ++ replicate r (q+1) where
(q, r) = quotRem n p | Bodigrim/katas | src/haskell/6-Almost-Even.hs | bsd-2-clause | 199 | 0 | 8 | 32 | 78 | 42 | 36 | 4 | 1 |
{-# LANGUAGE DeriveFunctor, DeriveFoldable, DeriveTraversable, KindSignatures,
NoMonomorphismRestriction, TupleSections, OverloadedStrings,
ScopedTypeVariables, FlexibleContexts, GeneralizedNewtypeDeriving,
Rank2Types, GADTs, LambdaCase, ViewPatterns, OverloadedStrings #-}
{-# OPTIONS_GHC -Wall #-}
import Bound
import Bound.Name
import Bound.Scope
import Bound.Var
import Control.Comonad
import Control.Monad
import Control.Monad.Except
import Control.Monad.Trans.Maybe
import Data.Bifunctor
import Data.List
import Data.String
-- import Data.Functor.Invariant
import Prelude.Extras
import qualified Data.Set
import Debug.Trace
newtype Eval a = Eval { runEval :: (Except String a) }
deriving (Functor, Applicative, Monad, MonadError String)
doEval :: Show a => Eval a -> IO ()
doEval t = case runExcept $ runEval t of
Left s -> putStrLn s
Right a -> print a
data Term n a
= Var !a
| Type
| Pi (Name n (Term n a)) (Scope (Name n ()) (Term n) a)
| Lam (Name n (Maybe (Term n a))) (Scope (Name n ()) (Term n) a)
| Let !Int (Prog n a) (Scope (Name n Int) (Term n) a)
| App (Term n a) (Term n a)
| Sigma (Name n (Term n a)) (Scope (Name n ()) (Term n) a)
| Pair (Term n a) (Term n a)
| Split (Term n a) (n,n) (Scope (Name n Tup) (Term n) a)
| Enum [n]
| Label n
| Case (Term n a) [(n,Term n a)]
| Lift (Term n a)
| Box (Term n a)
| Force (Term n a)
| Rec (Term n a)
| Fold (Term n a)
| Unfold (Name n (Term n a)) (Scope (Name n ()) (Term n) a)
| BeleiveMe
deriving (Eq,Ord,Show,Functor,Foldable,Traversable)
data Tup = Fst | Snd
deriving (Eq,Ord,Show)
instance Eq n => Eq1 (Term n)
instance Ord n => Ord1 (Term n)
instance Show n => Show1 (Term n)
type Type n = Term n
type Prog n a = [Name n ( Scope (Name n Int) (Type n) a
, Scope (Name n Int) (Term n) a)]
instance Applicative (Term n) where
pure = Var
(<*>) = ap
instance Monad (Term n) where
return = Var
(>>=) = bindTerm
bindTerm :: Term n a -> (a -> Term n b) -> Term n b
bindTerm (Var x) f = f x
bindTerm Type _ = Type
bindTerm (Pi tm s) f = Pi (fmap (`bindTerm` f) tm) (s >>>= f)
bindTerm (Lam tmM s) f = Lam ((fmap.fmap) (`bindTerm` f) tmM) (s >>>= f)
bindTerm (Let n p s) f = Let n (bindProg p f) (s >>>= f)
bindTerm (App e u) f = App (bindTerm e f) (bindTerm u f)
bindTerm (Sigma tm s) f = Sigma (fmap (`bindTerm` f) tm) (s >>>= f)
bindTerm (Pair l r) f = Pair (bindTerm l f) (bindTerm r f)
bindTerm (Split t xy s) f = Split (bindTerm t f) xy (s >>>= f)
bindTerm (Enum ls) _ = Enum ls
bindTerm (Label l) _ = Label l
bindTerm (Case t as) f = Case (bindTerm t f) (map (second (`bindTerm` f)) as)
bindTerm (Lift t) f = Lift (bindTerm t f)
bindTerm (Box t) f = Box (bindTerm t f)
bindTerm (Force t) f = Force (bindTerm t f)
bindTerm (Rec t) f = Rec (bindTerm t f)
bindTerm (Fold t) f = Fold (bindTerm t f)
bindTerm (Unfold t s) f = Unfold (fmap (`bindTerm` f) t) (s >>>= f)
bindTerm BeleiveMe _ = BeleiveMe
bindProg :: Prog n a -> (a -> Term n b) -> Prog n b
bindProg ps f = map (fmap (bimap (>>>= f) (>>>= f))) ps
instance IsString a => IsString (Term n a) where
fromString = Var . fromString
data Value n a
= Neutral (Neutral n a)
| VType
| VPi (Name n (Term n a)) (Scope (Name n ()) (Term n) a)
| VLam (Name n (Term n a)) (Scope (Name n ()) (Term n) a)
| VSigma (Name n (Term n a)) (Scope (Name n ()) (Term n) a)
| VPair (Term n a) (Term n a)
| VEnum [n]
| VLabel n
| VLift (Type n a)
| VBox (Boxed n a)
| VRec (Type n a)
| VFold (Term n a)
| VBeleiveMe
deriving (Eq,Ord,Show,Functor,Foldable,Traversable)
newtype Boxed n a = Boxed { unBoxed :: Term n a }
deriving (Eq,Ord,Show,Functor,Foldable,Traversable)
data Neutral n a
= NVar a
| NApp (Neutral n a) (Term n a)
| NSplit (Neutral n a) (n,n) (Scope (Name n Tup) (Term n) a)
| NCase (Neutral n a) [(n,Term n a)]
| NForce (Neutral n a)
| NUnfold (Name n (Neutral n a)) (Scope (Name n ()) (Term n) a)
deriving (Eq,Ord,Show,Functor,Foldable,Traversable)
instance Eq n => Eq1 (Value n)
instance Ord n => Ord1 (Value n)
instance Show n => Show1 (Value n)
data Env f g a
= Env
{ ctx :: a -> f a
, def :: a -> g a
}
data EnvEntry f a
= Cloj (f a)
| Id a
deriving Functor
type EnvEntry' n = EnvEntry (Term n)
type Env' n = Env (Term n) (EnvEntry' n)
emptyEnv :: Show a => Env f g a
emptyEnv = Env (\x -> error ("No declaration for: " ++ show x))
(\x -> error ("No definition for: " ++ show x))
infer :: (Eq a, Eq n, Ord n, Show n, Show a)
=> Env (Term n) (EnvEntry' n) a
-> Term n a
-> Eval (Type n a)
infer env (Var x) = return (ctx env x)
infer _ Type = return Type
infer env (Pi tm s) = do
check env (extract tm) Type
let env' = extendEnvQ env (extract tm)
check env' (fromScope s) Type
return Type
infer env (Lam (Name n (Just tm)) s) = do
check env tm Type
let env' = extendEnvQ env tm
s' <- infer env' (fromScope s)
return (Pi (Name n tm) (toScope s'))
infer env (Let n p s) = do
env' <- checkProg env p
s' <- infer env' (fromScope s)
return (Let n p (toScope s'))
infer env (App t u) = do
infer' env t >>= \case
VPi v s -> do check env u (extract v)
return (instantiate1Name u s)
_ -> throwError "infer: expected pi"
infer env (Sigma tm s) = do
check env (extract tm) Type
let env' = extendEnvQ env (extract tm)
check env' (fromScope s) Type
return Type
infer _ (Enum ls) = if nub ls /= ls
then throwError "infer: duplicate lables"
else return Type
infer env (Box t) = Lift <$> infer env t
infer env (Fold t) = Rec . Box <$> infer env t
infer env (Force t) = do
a <- infer' env t
case a of
VLift b -> return b
_ -> throwError "infer: expected Lifted type"
infer env (Lift a) = check env a Type >> return Type
infer env (Rec a) = check env a (Lift Type) >> return Type
infer _ (Lam _ _) = throwError "infer: cannot infer un-annotated Lambda"
infer _ (Pair _ _) = throwError "infer: cannot infer un-annotated Pair"
infer _ (Split _ _ _) = throwError "infer: cannot infer un-annotated Split"
infer _ (Label _) = throwError "infer: cannot infer un-annotated Label"
infer _ (Case _ _) = throwError "infer: cannot infer un-annotated Case"
infer _ (Unfold _ _) = throwError "infer: cannot infer un-annotated Unfold"
infer _ BeleiveMe = throwError "infer: cannot infer un-annotated BeleiveMe"
infer' :: (Eq a, Eq n, Ord n, Show n, Show a)
=> Env' n a
-> Term n a
-> Eval (Value n a)
infer' env tm = eval env =<< infer env tm
extendEnvQ :: Env' n a
-> Term n a
-> Env' n (Var (Name n ()) a)
extendEnvQ (Env ctxOld defOld) tm = Env ctx' def'
where
ctx' (B _) = F <$> tm
ctx' (F tm') = F <$> ctxOld tm'
def' x@(B _) = Id x
def' (F tm') = F <$> defOld tm'
extendEnvLet :: Env' n a
-> Prog n a
-> Env' n (Var (Name n Int) a)
extendEnvLet (Env ctxOld defOld) ps = Env ctx' def'
where
ctx' (B x) = fromScope . fst . extract . (ps!!) $ extract x
ctx' (F tm) = F <$> ctxOld tm
def' (B x) = Cloj . fromScope . snd . extract . (ps!!) $ extract x
def' (F tm) = F <$> defOld tm
extendEnvSplit :: Eq a
=> Env' n a
-> Type n a
-> Scope (Name n ()) (Type n) a
-> n -> n -> Maybe a
-> Env' n (Var (Name n Tup) a)
extendEnvSplit (Env ctxOld defOld) tyA tyB x y mA = Env ctx' def'
where
ctx' (B (Name _ Fst)) = F <$> tyA
ctx' (B (Name _ Snd)) = fromScope (mapBound (const (Name x Fst)) tyB)
ctx' (F tm') = F <$> ctxOld tm'
def' b@(B _) = Id b
def' (F tm')
| Just tm' == mA = Cloj $ Pair (Var (B (Name x Fst))) (Var (B (Name y Snd)))
| otherwise = F <$> defOld tm'
extendEnvCase :: Eq a
=> Env' n a
-> a -> n
-> Env' n a
extendEnvCase env i l = env {def = def'}
where
def' a | a == i = Cloj (Label l)
| otherwise = def env a
extendEnvUnfold :: Eq a
=> Env' n a
-> Type n a
-> n
-> Maybe a
-> Env' n (Var (Name n ()) a)
extendEnvUnfold (Env ctxOld defOld) tyA tName tM = Env ctx' def'
where
ctx' (B _) = F <$> Force tyA
ctx' (F tm') = F <$> ctxOld tm'
def' b@(B _) = Id b
def' (F tm')
| Just tm' == tM = Cloj (Fold (Var (B (Name tName ()))))
| otherwise = F <$> defOld tm'
checkProg :: (Eq a, Eq n, Ord n, Show n, Show a)
=> Env' n a -> Prog n a
-> Eval (Env' n (Var (Name n Int) a))
checkProg env p = do
let env' = extendEnvLet env p
mapM_ (checkProg' env' . bimap fromScope fromScope . extract) p
return env'
checkProg' :: (Eq a, Eq n, Ord n, Show n, Show a)
=> Env' n a
-> (Type n a, Term n a)
-> Eval ()
checkProg' env (ty,tm) = do
check env ty Type
check env tm ty
check :: (Eq a, Eq n, Ord n, Show n, Show a)
=> Env' n a -> Term n a -> Term n a -> Eval ()
check env (Let _ p s) c = do
env' <- checkProg env p
check env' (fromScope s) (F <$> c)
check env (Split t (x,y) u) c = do
sigmab <- infer' env t
case sigmab of
VSigma tyA tyB -> do
t' <- eval env t
let tM = case t' of
Neutral (NVar i) -> Just i
_ -> Nothing
env' = extendEnvSplit env (extract tyA) tyB x y tM
check env' (fromScope u) (F <$> c)
_ -> throwError ("check: Split: expected sigma: " ++ show sigmab)
check env (Case t as) c = do
enum <- infer' env t
case enum of
VEnum ls ->
let ls' = map fst as
in if (Data.Set.fromList ls) /= (Data.Set.fromList ls')
then throwError ("check: Labels don't match: " ++ show (ls,ls'))
else do
t' <- eval env t
case t' of
Neutral (NVar i) ->
mapM_ (\(l,u) -> let env' = extendEnvCase env i l
in check env' u c) as
_ -> mapM_ (\(_,u) -> check env u c) as
_ -> throwError ("check: Case: expected Enum: " ++ show enum)
check env (Unfold t s) c = do
vrec <- infer' env (extract t)
case vrec of
VRec a -> do
t' <- eval env (extract t)
let tM = case t' of
Neutral (NVar i) -> Just i
_ -> Nothing
env' = extendEnvUnfold env a (name t) tM
check env' (fromScope s) (F <$> c)
_ -> throwError ("check: Unfold: expected Rec: " ++ show vrec)
check env (Force t) c = check env t (Lift c)
check env t a = check' env t =<< eval env a
check' :: (Eq a, Eq n, Ord n, Show n, Show a) => Env' n a -> Term n a -> Value n a -> Eval ()
check' env (Lam v s) (VPi ty s') = do
maybe (return ()) (eq env (extract ty)) (extract v)
let env' = extendEnvQ env (extract ty)
check env' (fromScope s) (fromScope s')
check' _ (Lam _ _) v = throwError ("check': expected pi: " ++ show v)
check' env (Pair l r) (VSigma ty s) = do
check env l (extract ty)
let s' = instantiate1Name l s
check env r s'
check' _ (Pair _ _) v = throwError ("check': expected sigma: " ++ show v)
check' _ (Label l) (VEnum ls) | l `elem` ls = return ()
check' _ (Label _) _ = throwError "check': Label"
check' env (Box t) (VLift a) = check env t a
check' env (Fold t) (VRec a) = check' env t =<< eval env (Force a)
check' _ BeleiveMe v = trace ("BeleiveMe: " ++ show v) (return ()) -- throwError ("BeleiveMe: " ++ show v)
check' env t a = do b <- infer' env t
t' <- infer env t
catchError (eq env a b)
(\s -> throwError (s ++ "\nt: " ++ show t' ++ "\na: " ++ show a))
eval :: (MonadError String m, Eq n)
=> Env' n a -> Term n a -> m (Value n a)
eval env (Var x) = case def env x of
Cloj tm -> eval env tm
Id v -> return (Neutral (NVar v))
eval _ Type = return VType
eval _ (Pi v s) = return (VPi v s)
eval _ (Lam v s) = do v' <- sequence $
fmap (maybe
(throwError "eval: un-annotated Lam")
return)
v
return (VLam v' s)
eval env (Let _ p s) = let inst = instantiateName es
es = inst . defs p
in eval env (inst s)
eval env (App t u) = flip (evalApp env) u =<< eval env t
eval _ (Sigma v s) = return (VSigma v s)
eval _ (Pair l r) = return (VPair l r)
eval env (Split t xy s) = flip (evalSplit env xy) s =<< eval env t
eval _ (Enum ls) = return (VEnum ls)
eval _ (Label l) = return (VLabel l)
eval env (Case t as) = flip (evalCase env) as =<< eval env t
eval _ (Lift t) = return (VLift t)
eval _ (Box t) = return (VBox (Boxed t))
eval env (Force t) = force env =<< eval env t
eval _ (Rec t) = return (VRec t)
eval _ (Fold t) = return (VFold t)
eval env (Unfold t s) = flip (unfold env (name t)) s =<< eval env (extract t)
eval _ BeleiveMe = return VBeleiveMe
evalApp :: (MonadError String m, Eq n)
=> Env' n a -> Value n a -> Term n a -> m (Value n a)
evalApp env (VLam _ s) u = eval env (instantiate1Name u s)
evalApp _ (Neutral t) u = return (Neutral (NApp t u))
evalApp _ _ _ = throwError ("evalApp: function expected")
evalSplit :: (MonadError String m, Eq n)
=> Env' n a
-> (n,n)
-> Value n a
-> Scope (Name n Tup) (Term n) a
-> m (Value n a)
evalSplit env _ (VPair l r) s = do
eval env (instantiateName (\case {Fst -> l;Snd -> r}) s)
evalSplit _ xy (Neutral n) s = return (Neutral (NSplit n xy s))
evalSplit _ _ _ _ = throwError "evalSplit: Pair expected"
evalCase :: (MonadError String m, Eq n)
=> Env' n a
-> Value n a
-> [(n,Term n a)]
-> m (Value n a)
evalCase env (VLabel l) as = case lookup l as of
Just t -> eval env t
Nothing -> throwError "evalCase: case not matched"
evalCase _ (Neutral n) as = return (Neutral (NCase n as))
evalCase _ _ _ = throwError "evalCase: Label expected"
force :: (MonadError String m, Eq n)
=> Env' n a
-> Value n a
-> m (Value n a)
force env (VBox (Boxed c)) = eval env c
force _ (Neutral n) = return (Neutral (NForce n))
force _ _ = throwError "force: Box expected"
unfold :: (MonadError String m, Eq n)
=> Env' n a
-> n
-> Value n a
-> Scope (Name n ()) (Term n) a
-> m (Value n a)
unfold env _ (VFold c) b = eval env (instantiate1Name c b)
unfold _ n (Neutral n') b = return (Neutral (NUnfold (Name n n') b))
unfold _ _ _ _ = throwError "unfold: Fold expected"
defs :: Prog n a -> Int -> Scope (Name n Int) (Term n) a
defs ps i = snd . extract $ (ps!!i)
class Equal f where
eq :: (MonadError String m, Eq a, Eq n, Show n, Show a) => Env' n a -> f n a -> f n a -> m ()
instance Equal Term where
eq env t1 t2 = do
e1 <- eval env t1
e2 <- eval env t2
eq env e1 e2
instance Equal Value where
eq env (Neutral n1) (Neutral n2) = eq env n1 n2
eq env (VPi t0 s0) (VPi t1 s1) = do
eq env (extract t0) (extract t1)
let env' = extendEnvQ env (extract t0)
eq env' (fromScope s0) (fromScope s1)
eq env (VLam v s0) (VLam _ s1) = do
let env' = extendEnvQ env (extract v)
eq env' (fromScope s0) (fromScope s1)
eq env (VSigma t0 s0) (VSigma t1 s1) = do
eq env (extract t0) (extract t1)
let env' = extendEnvQ env (extract t0)
eq env' (fromScope s0) (fromScope s1)
eq env (VPair u0 t0) (VPair u1 t1) =
eq env u0 u1 >>
eq env t0 t1
eq env (VLift u0) (VLift u1) = eq env u0 u1
eq env (VBox u0) (VBox u1) = eq env u0 u1
eq env (VRec u0) (VRec u1) = eq env u0 u1
eq env (VFold u0) (VFold u1) = eq env u0 u1
eq _ v0 v1 | v0 == v1 = return ()
| otherwise = throwError ("eq: Different values:" ++
"\nv0: " ++ show v0 ++
"\nv1: " ++ show v1)
instance Equal Neutral where
eq _ (NVar i0) (NVar i1)
| i0 == i1 = return ()
| otherwise = throwError ("eq: Different variables: " ++ show (i0,i1))
eq env (NApp t0 u0) (NApp t1 u1) =
eq env t0 t1 >>
eq env u0 u1
eq env (NSplit t0 _ u0) (NSplit t1 _ u1) = do
eq env t0 t1
let env' = Env (unvar (\n -> error (show (name n) ++ " undefined")) (fmap F . ctx env))
(unvar (Id . B) (fmap F . def env))
eq env' (fromScope u0) (fromScope u1)
eq env (NCase t0 as0) (NCase t1 as1) = do
eq env t0 t1
let eqBranches [] [] = return ()
eqBranches ((l0,u0):lsu0) ((l1,u1):lsu1)
| l0 == l1 = eq env u0 u1 >> eqBranches lsu0 lsu1
eqBranches _ _ = throwError "eq: Case: branches differ"
eqBranches as0 as1
eq env (NForce t0) (NForce t1) = eq env t0 t1
eq env (NUnfold t0 u0) (NUnfold t1 u1) = do
eq env (extract t0) (extract t1)
let env' = Env (unvar (\n -> error ("eq Neutral: " ++ show (name n) ++ " undefined")) (fmap F . ctx env))
(unvar (Id . B) (fmap F . def env))
eq env' (fromScope u0) (fromScope u1)
eq _ n0 n1 = throwError ("eq: Different Neutrals: \nn0\n" ++ show n0 ++ "\nn1:\n" ++ show n1)
instance Equal Boxed where
eq env (Boxed t0) (Boxed t1) = eqBox env t0 t1
eqBox :: (MonadError String m, Eq a, Eq n, Show n, Show a)
=> Env' n a
-> Term n a -> Term n a
-> m ()
eqBox env (Var i0) (Var i1)
| i0 == i1 = return ()
| otherwise = do
let ei0 = def env i0
ei1 = def env i1
case (ei0,ei1) of
(Id j0, Id j1) -> unless (j0 == j1) (throwError "eqBox: Different variables")
(Cloj t0, Cloj t1) -> eq env t0 t1
_ -> throwError "eqBox: Variable vs Neutral"
eqBox env (Let _ p t) c =
let inst = instantiateName es
es = inst . defs p
in eq env (inst t) c
eqBox env c c'@(Let _ _ _) = eqBox env c' c
eqBox env (Pi t0 u0) (Pi t1 u1) = do
eqBox env (extract t0) (extract t1)
let env' = extendEnvQ env (extract t0)
eq env' (Boxed (fromScope u0)) (Boxed (fromScope u1))
eqBox env (Sigma t0 u0) (Sigma t1 u1) = do
eqBox env (extract t0) (extract t1)
let env' = extendEnvQ env (extract t0)
eq env' (Boxed (fromScope u0)) (Boxed (fromScope u1))
eqBox env (Lam t0 u0) (Lam t1 u1) = do
v' <- runMaybeT $ do t0' <- MaybeT (return (extract t0))
t1' <- MaybeT (return (extract t1))
lift $ eq env t0' t1'
return t0'
case v' of
Just v'' -> let env' = extendEnvQ env v'' in eq env' (fromScope u0) (fromScope u1)
_ -> throwError "eqBox: un-annotated Lambda"
eqBox env (App t0 u0) (App t1 u1) = eqBox env t0 t1 >> eqBox env u0 u1
eqBox env (Pair t0 u0) (Pair t1 u1) = eqBox env t0 t1 >> eqBox env u0 u1
eqBox env (Split t0 _ s0) (Split t1 _ s1) = do
eqBox env t0 t1
let env' = Env (unvar (\n -> error ("eqBox Split: " ++ show (name n) ++ " undefined")) (fmap F . ctx env))
(unvar (Id . B) (fmap F . def env))
eq env' (Boxed (fromScope s0)) (Boxed (fromScope s1))
eqBox env (Case t0 as0) (Case t1 as1) =
eqBox env t0 t1 >>
zipWithM_ (\(l0,t0') (l1,t1') ->
if l0 == l1 then eqBox env t0' t1'
else throwError "eqBox Case"
) as0 as1
eqBox env (Lift t0) (Lift t1) = eqBox env t0 t1
eqBox env (Box t0) (Box t1) = eqBox env t0 t1
eqBox env (Force t0) (Force t1) = eqBox env t0 t1
eqBox env (Rec t0) (Rec t1) = eqBox env t0 t1
eqBox env (Fold t0) (Fold t1) = eqBox env t0 t1
eqBox env (Unfold t0 u0) (Unfold t1 u1) = do
eqBox env (extract t0) (extract t1)
let env' = Env (unvar (\n -> error ("eqBox Unfold: " ++ show (name n) ++ " undefined")) (fmap F . ctx env))
(unvar (Id . B) (fmap F . def env))
eq env' (Boxed (fromScope u0)) (Boxed (fromScope u1))
eqBox _ t0 t1 | t0 == t1 = return () -- Type, Label, Enum
| otherwise = throwError "eqBox: Different terms"
let_ :: Eq a => [(a,Type a a,Type a a)] -> Term a a -> Term a a
let_ [] b = b
let_ bs b = Let (length bs) (map mkP bs) (abstr b)
where
as = map (\(x,_,_) -> x) bs
abstr = abstractName (`elemIndex` as)
mkP (nm,ty,tm) = Name nm (abstr ty,abstr tm)
lam :: Eq a => a -> Term a a -> Term a a
lam v e = Lam (Name v Nothing) (abstract1Name v e)
lam' :: Eq a => a -> Type a a -> Term a a -> Term a a
lam' v t e = Lam (Name v (Just t)) (abstract1Name v e)
pi_ :: Eq a => (a,Type a a) -> Term a a -> Term a a
pi_ (v,b) e = Pi (Name v b) (abstract1Name v e)
sigma :: Eq a => (a,Type a a) -> Term a a -> Term a a
sigma (v,b) e = Sigma (Name v b) (abstract1Name v e)
app :: Term n a -> [Term n a] -> Term n a
app f args = foldl App f args
split :: Eq a => Term a a -> (a,a) -> Term a a -> Term a a
split t (x,y) u = Split t (x,y) (abstractName (\z -> if z == x then Just Fst else if z == y then Just Snd else Nothing) u)
unfold_ :: Eq a => Term a a -> a -> Term a a -> Term a a
unfold_ t v u = Unfold (Name v t) (abstract1Name v u)
unfold' :: (Eq a, IsString a) => Term a a -> Term a a
unfold' t = unfold_ t "x_unfold" "x_unfold"
(->-) :: Term String String -> Term String String -> Term String String
(->-) t = pi_ ("",t)
infixr 5 ->-
(-*-) :: Term String String -> Term String String -> Term String String
(-*-) t = sigma ("",t)
infixr 4 -*-
test :: Term String String
test = let_ [("Eq"
,pi_ ("a",Type) (pi_ ("","a") (pi_ ("","a") Type))
,lam' "a" Type (lam' "x" "a" (lam' "y" "a" (pi_ ("P",pi_ ("","a") Type) (pi_ ("",App "P" "x") (App "P" "y"))))))
,("refl"
,pi_ ("a",Type) (pi_ ("x","a") (app "Eq" ["a","x","x"]))
,lam' "a" Type (lam' "x" "a" (lam' "P" (pi_ ("","a") Type) (lam' "px" (App "P" "x") "px")))
)
,("A"
,Type
,Type
)
,("a"
,"a"
,Type
)
,("b"
,"A"
,"a"
)
,("t0"
,app "Eq" ["A","a","b"]
,app "refl" ["A","a"]
)
]
(Var "t0")
empty :: [(String,Type String String,Term String String)]
empty = [("Empty",Type,Enum [])]
unit :: [(String,Type String String,Term String String)]
unit = [("Unit",Type,Enum ["unit"])]
bool :: [(String,Type String String,Term String String)]
bool = empty ++ unit ++
[("Bool",Type,Enum ["true","false"])
,("T"
,"Bool" ->- Type
,lam' "b" ("Bool")
(Case (Var "b") [("true","Unit")
,("false","Empty")])
)
]
nat :: [(String,Type String String,Term String String)]
nat = bool ++
[("Nat"
,Type
,sigma ("l",Enum ["z","s"]) (Case "l" [("z","Unit")
,("s",Rec (Box "Nat"))])
)
,("zero"
,"Nat"
,Pair (Label "z") (Label "unit"))
,("suc"
,"Nat" ->- "Nat"
,lam' "n" "Nat" $ Pair (Label "s") (Fold "n")
)
,("eqNat"
,"Nat" ->- "Nat" ->- "Bool"
,lam' "m" "Nat" $
lam' "n" "Nat" $
split "m" ("lm","m'") $
split "n" ("ln","n'") $
Force (Case "lm" [("z",Case "ln" [("z",Box $ Label "true")
,("s",Box $ Label "false")])
,("s",Case "ln" [("z",Box $ Label "false")
,("s",Box $ app "eqNat" [unfold' "m'",unfold' "n'"])
])
])
)
,("EqNat"
,"Nat" ->- "Nat" ->- Type
,lam' "m" "Nat" $ lam' "n" "Nat" $ App "T" (app "eqNat" ["m","n"])
)
,("reflNat"
,pi_ ("n","Nat") (app "EqNat" ["n","n"])
,lam' "n" "Nat" $ split "n" ("nl","n'") $
Force (Case "nl" [("z",Box (Label "unit"))
,("s",Box (App "reflNat" (unfold' "n'")))
])
)
,("substNat"
,pi_ ("P","Nat" ->- Type) $ pi_ ("m","Nat") $ pi_ ("n","Nat") $
app "EqNat" ["m","n"] ->- App "P" "m" ->- App "P" "n"
,lam' "P" ("Nat" ->- Type) $ lam' "m" "Nat" $ lam' "n" "Nat" $
lam' "q" (app "EqNat" ["m","n"]) $ lam' "x" (App "P" "m") $
split "m" ("lm","m'") $
split "n" ("ln","n'") $ Force $
Case "lm" [("z",Case "ln" [("z",Case "m'" [("unit",Case "n'" [("unit",Box "x")])])
,("s",Case "q" [])
])
,("s",Case "ln" [("z",Case "q" [])
,("s",Box $
unfold_ "m'" "m'" $
unfold_ "n'" "n'" $
app "substNat" [lam' "i" "Nat" (App "P" (App "suc" "i"))
,"m'","n'","q","x"])])]
)
,("symNat"
,pi_ ("m","Nat") $ pi_ ("n","Nat") $ app "EqNat" ["m","n"] ->-
app "EqNat" ["n","m"]
,lam' "m" "Nat" $ lam' "n" "Nat" $ lam' "p" (app "EqNat" ["m","n"]) $
app "substNat" [lam' "i" "Nat" $ app "EqNat" ["i","m"]
,"m","n","p",App "reflNat" "m"]
)
,("transNat"
,pi_ ("i","Nat") $ pi_ ("j","Nat") $ pi_ ("k","Nat") $
app "EqNat" ["i","j"] ->- app "EqNat" ["j","k"] ->- app "EqNat" ["i","k"]
,lam' "i" "Nat" $ lam' "j" "Nat" $ lam' "k" "Nat" $
lam' "p" (app "EqNat" ["i","j"]) $ lam' "q" (app "EqNat" ["j","k"]) $
app "substNat" [lam' "x" "Nat" $ app "EqNat" ["i","x"]
,"j","k","q","p"
]
)
]
fin :: [(String,Type String String,Term String String)]
fin = nat ++
[("Fin"
,"Nat" ->- Type
,lam' "n" "Nat" $ split "n" ("ln","n'") $ Force $
Case "ln" [("z",Box "Empty")
,("s",Box (sigma ("l",Enum ["z","s"]) $
Case "l" [("z","Unit")
,("s",App "Fin" (unfold' "n'"))]))
]
)
]
vec :: [(String,Type String String,Term String String)]
vec = fin ++
[("Vec"
,"Nat" ->- Type ->- Type
,lam' "m" "Nat" $ lam' "a" Type $ split "m" ("lm","m'") $ Force $
Case "lm" [("z",Box "Unit")
,("s",Box ("a" -*- app "Vec" [unfold' "m'","a"]))
]
)
,("nth"
,pi_ ("a",Type) $ pi_ ("n","Nat") $ app "Vec" ["n","a"] ->- App "Fin" "n" ->- "a"
,lam' "a" Type $ lam' "n" "Nat" $ lam' "xs" (app "Vec" ["n","a"]) $ lam' "i" (App "Fin" "n") $
split "n" ("ln","n'") $ Force $
Case "ln" [("z",Case "i" [])
,("s",split "xs" ("x","xs'") $ split "i" ("li","i'") $
Case "li" [("z",Box "x")
,("s",Box (app "nth" ["a",unfold' "n'", "xs'", "i'"]))
]
)
]
)
]
fin' :: [(String,Type String String,Term String String)]
fin' = nat ++
[("Fin"
,"Nat" ->- Type
,lam' "n" "Nat" $ sigma ("l",Enum ["z","s"]) $
Case "l" [("z",sigma ("n'","Nat") (app "EqNat" [App "suc" "n'","n"]))
,("s",sigma ("n'","Nat") (Rec (Box (App "Fin" "n'")) -*- app "EqNat" [App "suc" "n'","n"]))
]
)
]
vec' :: [(String,Type String String,Term String String)]
vec' = fin' ++
[("Vec"
,"Nat" ->- Type ->- Type
,lam' "n" "Nat" $ lam' "A" Type $ sigma ("l",Enum ["nil","cons"]) $
Case "l" [("nil",app "EqNat" ["zero","n"] )
,("cons",sigma ("n'","Nat") ("A" -*- Rec (Box (app "Vec" ["n'","A"])) -*- app "EqNat" [App "suc" "n'","n"]))
]
)
,("nil"
,pi_ ("A",Type) $ app "Vec" ["zero","A"]
,lam' "A" Type $ Pair (Label "nil") (App "reflNat" "zero")
)
,("cons"
,pi_ ("n","Nat") $ pi_ ("A",Type) $ "A" ->- app "Vec" ["n","A"] ->- app "Vec" [App "suc" "n","A"]
,lam' "n" "Nat" $ lam' "A" Type $ lam' "a" "A" $ lam' "v" (app "Vec" ["n","A"]) $
Pair (Label "cons") (Pair "n" (Pair "a" (Pair (Fold "v") (App "reflNat" (App "suc" "n")))))
)
,("lookup"
,pi_ ("A",Type) $ pi_ ("n","Nat") $ (App "Fin" "n") ->- (app "Vec" ["n","A"]) ->- "A"
,lam' "A" Type $ lam' "n" "Nat" $ lam' "i" (App "Fin" "n") $ lam' "xs" (app "Vec" ["n","A"]) $
split "i" ("li","i'") $ Force $
Case "li" [("s",split "i'" ("fn","i''") $ split "i''" ("fr","feq") $
split "xs" ("xc","xs'") $
Case "xc" [("cons",split "xs'" ("xn","xs''") $ split "xs''" ("xa","xs3") $
split "xs3" ("xr","xseq") $ Box $
app "lookup" ["A","fn",(unfold' "fr"),
app "substNat" [lam' "k" "Nat" (app "Vec" ["k","A"])
,"xn","fn"
,app "transNat" [App "suc" "xn","n",App "suc" "fn"
,"xseq"
,app "symNat" [App "suc" "fn","n","feq"]
]
,unfold' "xr"]])
,("nil",Case (app "substNat" [lam' "k" "Nat" (app "EqNat" ["zero","k"]),"n",App "suc" "fn"
,app "symNat" [App "suc" "fn","n","feq"]
,"xs'"]) [])])
,("z",split "i'" ("fn","feq") $ split "xs" ("xc","xs'") $
Case "xc" [("cons",split "xs'" ("xn","xbs") $ split "xbs" ("xa","xb") $ Box "xa")
,("nil" ,Case (app "substNat" [lam' "k" "Nat" (app "EqNat" ["zero","k"]),"n",App "suc" "fn"
,app "symNat" [App "suc" "fn","n","feq"]
,"xs'"]) [])
])
]
)
,("tail"
,pi_ ("n","Nat") $ pi_ ("a",Type) $ app "Vec" [App "suc" "n","a"] ->- app "Vec" ["n","a"]
,lam' "n" "Nat" $ lam' "a" Type $ lam' "as" (app "Vec" [App "suc" "n","a"]) $
split "as" ("l","as'") $ Force $
Case "l" [("cons",split "as'" ("m","abs") $ split "abs" ("a'","bsn") $
split "bsn" ("bs","n'") $
Box (app "substNat" [lam' "x" "Nat" (app "Vec" ["x","a"])
,"m"
,"n"
,"n'"
,unfold' "bs"]
))
,("nil",Case "as'" [])
]
)
]
test1 :: Term String String
test1 = let_ vec' "tail"
| christiaanb/DepCore | Test.hs | bsd-2-clause | 31,990 | 0 | 25 | 11,652 | 15,096 | 7,598 | 7,498 | 702 | 8 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
--------------------------------------------------------------------
-- |
-- Copyright : (c) Edward Kmett and Dan Doel 2013-2014
-- License : BSD3
-- Maintainer: Edward Kmett <ekmett@gmail.com>
-- Stability : experimental
-- Portability: non-portable
--
--------------------------------------------------------------------
module Ermine.Syntax.Id
( Id(..)
, AsId(..)
) where
import Control.Applicative
import Control.Lens
import qualified Data.Binary as Binary
import Data.Binary (Binary)
import Data.Bytes.Get
import Data.Bytes.Put
import Data.Bytes.Serial
import Data.Data
import Data.Hashable
import Data.Serialize (Serialize)
import qualified Data.Serialize as Serialize
import Ermine.Syntax.Head
import Ermine.Syntax.Global
import GHC.Generics
------------------------------------------------------------------------------
-- Id
------------------------------------------------------------------------------
data Id
= GlobalId !Global
| InstanceId !Head
deriving (Show,Read,Eq,Ord,Typeable,Data,Generic)
-- _Global = _GlobalId
class AsGlobal t => AsId t where
_Id :: Prism' t Id
_InstanceId :: Prism' t Head
_InstanceId = _Id._InstanceId
instance AsGlobal Id where
_Global = prism GlobalId $ \xs -> case xs of
GlobalId x -> Right x
_ -> Left xs
instance AsId Id where
_Id = id
_InstanceId = prism InstanceId $ \xs -> case xs of
InstanceId x -> Right x
_ -> Left xs
instance Hashable Id
instance Serial Id where
serialize (GlobalId g) = putWord8 0 >> serialize g
serialize (InstanceId h) = putWord8 1 >> serialize h
deserialize = getWord8 >>= \b -> case b of
0 -> GlobalId <$> deserialize
1 -> InstanceId <$> deserialize
_ -> fail $ "get Id: Unexpected constructor code: " ++ show b
instance Binary Id where
get = deserialize
put = serialize
instance Serialize Id where
get = deserialize
put = serialize
| ekmett/ermine | src/Ermine/Syntax/Id.hs | bsd-2-clause | 2,050 | 0 | 12 | 362 | 480 | 265 | 215 | 56 | 0 |
module Traffic where
import Data.Map (Map)
import qualified Data.Map as M
import Data.List (sortBy)
import Data.Maybe (fromJust)
import Data.Ord (comparing)
import System.Random
import Text.Printf
import Test.QuickCheck
import Debug.Trace
type Position = (Double,Double)
type Speed = Double
type Route = Map (Location,Location) Speed
data Location = Location {
position :: Position
, name :: String
} deriving (Eq,Ord,Show)
data Car = Car {
distanceToDestination :: Double
, speed :: Speed
, route :: (Location,Location)
} deriving (Eq,Show)
data Environment = Environment {
locations :: [Location]
, routes :: Route
, cars :: [Car]
, noise :: [Double] -- infinite list of randomness
} deriving (Show)
createLocations :: [Location]
createLocations = map (\z -> Location (x z,y z) "X") [0,(pi/15) .. (2*pi)]
where
x theta = 100 * cos theta + 128
y theta = 100 * sin theta + 128
makeRoutes :: [Location] -> Route
makeRoutes locations = M.fromList (zip (zip locations (cycle $ tail locations)) (repeat 5))
makeCars :: Route -> [Car]
makeCars r = map (\((s,f),_) -> Car 1.0 1.0 (s,f)) (M.toList r)
createRoutes :: [((Location,Location), Speed)] -> Route
createRoutes r = M.fromList $ concatMap (\((x,y),s) -> [((x,y),s), ((y,x),s)]) r
createEnvironment = Environment {
locations = createLocations
, routes = makeRoutes createLocations
, cars = makeCars (makeRoutes createLocations)
, noise = randoms (mkStdGen 100)
}
{- Actual Logic of simulation -}
update :: Environment -> Environment
update env = env' { cars = updateCars env (cars env) }
where
env' = env { noise = drop (length (cars env)) (noise env) }
carsOnRoute :: Car -> [Car] -> [Car]
carsOnRoute car = filter (\c -> route c == route car && c /= car)
updateCars :: Environment -> [Car] -> [Car]
updateCars env cars = map (\(c,n) -> updateCar env n c) (zip cars (noise env))
updateCar :: Environment -> Double -> Car -> Car
updateCar env d car = updateCarSpeed env d (updateCarPosition env d car)
updateCarSpeed :: Environment -> Double -> Car -> Car
updateCarSpeed env d car | null nearestCars = car
| distanceBetween < 3 = car { speed = min maxSpeed (speed car * (1 + d*0.01)) }
| distanceBetween > 3 = car { speed = max 0.1 (speed car * (1 - d*0.01)) }
| otherwise = car
where
maxSpeed = min maximumAhead (fromJust $ M.lookup (route car) (routes env))
nearestCars = filter (\x -> distanceToDestination x > (distanceToDestination car))
$ sortBy (comparing distanceToDestination) (carsOnRoute car (cars env))
carAhead = head nearestCars
maximumAhead = ((speed carAhead + distanceToDestination carAhead) - distanceToDestination car)
distanceBetween = distanceToDestination (head nearestCars) - distanceToDestination car
updateCarPosition :: Environment -> Double -> Car -> Car
updateCarPosition env choice car | distanceToGo <= 0 = updateLocation env choice car
| otherwise = car { distanceToDestination = distanceToGo }
where
distanceToGo = distanceToDestination car - speed car
updateLocation :: Environment -> Double -> Car -> Car
updateLocation env choice car = car {
distanceToDestination = distanceToGo
, route = (finish,newDestination)
}
where
(start,finish) = route car
newDestination = chooseNewDestination env choice finish
distanceToGo = distanceBetween (position finish) (position newDestination)
chooseNewDestination :: Environment -> Double -> Location -> Location
chooseNewDestination env choice s = snd $ fst (choices !! truncate (choice * realToFrac (length choices)))
where
choices = filter (\((x,_),_) -> x == s) (M.toList (routes env))
carPosition :: Car -> Position
carPosition (Car d _ (start,finish)) = (x1+p*(x2-x1), y1+p*(y2-y1))
where
s@(x1,y1) = position start
e@(x2,y2) = position finish
p = 1 - (d / distanceBetween s e)
distanceBetween :: Position -> Position -> Double
distanceBetween (x1,y1) (x2,y2) = sqrt ((x1-x2)^2 + (y1-y2)^2)
{- Functions for manipulating the environment -}
changeSpeedLimit :: (Speed -> Speed) -> Environment -> Environment
changeSpeedLimit d e = e { routes = updatedRoutes }
where
updatedRoutes = M.map d (routes e)
addCar :: Environment -> Environment
addCar e = e { cars = cars' }
where
cars' = Car 1.0 1.0 (s,f) : (cars e)
((s,f),_) = head (M.toList (routes e))
removeCar :: Environment -> Environment
removeCar e = e { cars = cars' }
where
cars' = drop 1 (cars e)
stats :: Environment -> String
stats e = "Average speed: " ++ (printf "%.3f" avgSpeed)
where
c = cars e
avgSpeed = sum (map speed c) / realToFrac (length c)
{- Testing code. -}
getCarLocation :: Double -> Position -> Position -> Position
getCarLocation d s e = carPosition (Car d 0 (Location s "Start",Location e "End"))
-- |The distance we are at is calculated correctly
prop_distanceCorrect :: NonNegative Double -> Position -> Position -> Bool
prop_distanceCorrect (NonNegative d) s e | s == e = True -- prefer different positions!
| abs d > dis = True
| otherwise = abs (db - d) < 0.0001
where
dis = distanceBetween s e
pos = getCarLocation d s e
db = distanceBetween pos e
| fffej/haskellprojects | traffic/Traffic.hs | bsd-2-clause | 5,666 | 0 | 14 | 1,509 | 2,080 | 1,117 | 963 | 107 | 1 |
-- Copyright (c) 2017, Travis Bemann
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- o Redistributions of source code must retain the above copyright notice, this
-- list of conditions and the following disclaimer.
--
-- o Redistributions in binary form must reproduce the above copyright notice,
-- this list of conditions and the following disclaimer in the documentation
-- and/or other materials provided with the distribution.
--
-- o Neither the name of the copyright holder nor the names of its
-- contributors may be used to endorse or promote products derived from
-- this software without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-- ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
-- LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-- CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
-- SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
-- INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
-- CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-- ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-- POSSIBILITY OF SUCH DAMAGE.
{-# LANGUAGE OverloadedStrings, OverloadedLists, RecordWildCards #-}
module Network.IRC.Client.Amphibian.IRCConnection
(Response(..),
Error(..),
IRCConnection,
IRCConnectionState(..),
IRCConnectionEvent(..),
IRCConnectionEventSub,
IRCMessage(..),
newIRCConnection,
startIRCConnection,
stopIRCConnection,
connectIRC,
disconnectIRC,
sendIRCMessage,
subscribeIRCConnection,
recvIRCConnection,
tryRecvIRCConnection,
getIRCConnectionState,
getIRCConnectionHostname,
getIRCConnectionAddress,
getIRCConnectionPort,
isIRCConnectionStateActive)
where
import Network.IRC.Client.Amphibian.Types
import Network.IRC.Client.Amphibian.Utility
import Network.IRC.Client.Amphibian.Connection
import qualified Data.ByteString as B
import qualified Data.ByteString.Builder as BB
import qualified Data.ByteString.Lazy as BL
import qualified Data.Text as T
import qualified Data.Sequence as S
import qualified Network.Socket as NS
import Data.Text.Encoding (encodeUtf8)
import Data.Monoid (mempty,
mappend)
import Data.List (init,
last)
import Data.Functor (fmap,
(<$>))
import Data.Foldable (foldl')
import Control.Monad ((=<<),
forM_)
import Data.Sequence ((|>))
import Data.Word (Word8)
import Control.Concurrent.Async (Async,
async,
cancel)
import Control.Exception (catch,
IOException,
SomeException)
import Control.Concurrent.STM (STM,
atomically,
orElse,
retry,
TVar,
newTVar,
readTVar,
writeTVar)
import Control.Concurrent.STM.TChan (TChan,
newBroadcastTChan,
dupTChan,
writeTChan,
tryReadTChan,
readTChan)
import Control.Concurrent.STM.TQueue (TQueue,
newTQueue,
writeTQueue,
readTQueue,
tryReadTQueue,
isEmptyTQueue)
import Control.Concurrent.STM.TMVar (TMVar,
newEmptyTMVar,
putTMVar,
tryTakeTMVar,
tryReadTMVar,
takeTMVar,
readTMVar)
import Text.Printf (printf)
-- | IRC connection state.
data IRCConnectionData = IRCConnectionData
{ ircConnectionConnection :: Connection,
ircConnectionState :: IRCConnectionState,
ircConnectionHostname :: Maybe NS.HostName,
ircConnectionAddress :: Maybe NS.AddrInfo,
ircConnectionPort :: Maybe NS.PortNumber,
ircConnectionEventSub :: ConnectionEventSub,
ircConnectionBuffer :: B.ByteString }
-- | Create a new IRC connection.
newIRCConnection :: STM IRCConnection
newIRCConnection = do
running <- newTVar False
actionQueue <- newTQueue
eventQueue <- newBroadcastTChan
return IRCConnection { ircConnectionRunning = running,
ircConnectionActionQueue = actionQueue,
ircConnectionEventQueue = eventQueue }
-- | Start an IRC connection.
startIRCConnection :: IRCConnection -> IO (Either Error ())
startIRCConnection ircConnection = do
alreadyRunning <- atomically $ do
running <- readTVar $ ircConnectionRunning ircConnection
if not running
then do writeTVar (ircConnectionRunning ircConnection) True
return False
else return True
if not alreadyRunning
then do
connection <- atomically newConnection
eventSub <- atomically $ subscribeConnection connection
result <- startConnection connection
case result of
Right () -> do
let state = IRCConnectionData { ircConnectionConnection = connection,
ircConnectionState =
IRCConnectionStarted,
ircConnectionHostname = Nothing,
ircConnectionAddress = Nothing,
ircConnectionPort = Nothing,
ircConnectionEventSub = eventSub,
ircConnectionBuffer = B.empty }
async $ runUnconnected ircConnection state
return $ Right ()
failure -> return failure
else return . Left $ Error "connection already started"
-- | Stop an IRC connection.
stopIRCConnection :: IRCConnection -> STM (Response ())
stopIRCConnection ircConnection = do
running <- readTVar $ ircConnectionRunning ircConnection
response <- newEmptyTMVar
let response' = Response response
if running
then writeTQueue (ircConnectionActionQueue ircConnection) $
StopIRCConnection response'
else putTMVar response . Left $ Error "connection not started"
return response'
-- | Connect an IRC connection.
connectIRC :: IRCConnection -> NS.HostName -> NS.PortNumber -> STM (Response ())
connectIRC ircConnection hostname port = do
running <- readTVar $ ircConnectionRunning ircConnection
response <- newEmptyTMVar
let response' = Response response
if running
then writeTQueue (ircConnectionActionQueue ircConnection) $
ConnectIRC hostname port response'
else putTMVar response . Left $ Error "connection not started"
return response'
-- | Disconnect an IRC connection.
disconnectIRC :: IRCConnection -> STM (Response ())
disconnectIRC ircConnection = do
running <- readTVar $ ircConnectionRunning ircConnection
response <- newEmptyTMVar
let response' = Response response
if running
then writeTQueue (ircConnectionActionQueue ircConnection) $
DisconnectIRC response'
else putTMVar response . Left $ Error "connection not started"
return response'
-- | Send a message to an IRC connection.
sendIRCMessage :: IRCConnection -> IRCMessage -> STM (Response ())
sendIRCMessage ircConnection message = do
running <- readTVar $ ircConnectionRunning ircConnection
response <- newEmptyTMVar
let response' = Response response
if running
then writeTQueue (ircConnectionActionQueue ircConnection) $
SendIRCMessage message response'
else putTMVar response . Left $ Error "connection not started"
return response'
-- | Subscribe to an IRC connection.
subscribeIRCConnection :: IRCConnection -> STM IRCConnectionEventSub
subscribeIRCConnection ircConnection =
IRCConnectionEventSub <$> dupTChan (ircConnectionEventQueue ircConnection)
-- | Receive an event from an IRC connection.
recvIRCConnection :: IRCConnectionEventSub -> STM IRCConnectionEvent
recvIRCConnection (IRCConnectionEventSub sub) = readTChan sub
-- | Try to receive an event from an IRC connection.
tryRecvIRCConnection :: IRCConnectionEventSub -> STM (Maybe IRCConnectionEvent)
tryRecvIRCConnection (IRCConnectionEventSub sub) = tryReadTChan sub
-- | Get the state of an IRC connection.
getIRCConnectionState :: IRCConnection -> STM (Response IRCConnectionState)
getIRCConnectionState ircConnection = do
running <- readTVar $ ircConnectionRunning ircConnection
response <- newEmptyTMVar
let response' = Response response
if running
then writeTQueue (ircConnectionActionQueue ircConnection) $
GetIRCConnectionState response'
else putTMVar response $ Right IRCConnectionNotStarted
return response'
-- | Get the hostname of an IRC connection.
getIRCConnectionHostname :: IRCConnection -> STM (Response (Maybe NS.HostName))
getIRCConnectionHostname ircConnection = do
running <- readTVar $ ircConnectionRunning ircConnection
response <- newEmptyTMVar
let response' = Response response
if running
then writeTQueue (ircConnectionActionQueue ircConnection) $
GetIRCConnectionHostname response'
else putTMVar response . Left $ Error "connection not started"
return response'
-- | Get the address of an IRC connection.
getIRCConnectionAddress :: IRCConnection -> STM (Response (Maybe NS.AddrInfo))
getIRCConnectionAddress ircConnection = do
running <- readTVar $ ircConnectionRunning ircConnection
response <- newEmptyTMVar
let response' = Response response
if running
then writeTQueue (ircConnectionActionQueue ircConnection) $
GetIRCConnectionAddress response'
else putTMVar response . Left $ Error "connection not started"
return response'
-- | Get the port of an IRC connection.
getIRCConnectionPort :: IRCConnection -> STM (Response (Maybe NS.PortNumber))
getIRCConnectionPort ircConnection = do
running <- readTVar $ ircConnectionRunning ircConnection
response <- newEmptyTMVar
let response' = Response response
if running
then writeTQueue (ircConnectionActionQueue ircConnection) $
GetIRCConnectionPort response'
else putTMVar response . Left $ Error "connection not started"
return response'
-- | Get whether an IRC connection state is active.
isIRCConnectionStateActive :: IRCConnectionState -> Bool
isIRCConnectionStateActive IRCConnectionNotStarted = False
isIRCConnectionStateActive IRCConnectionStarted = False
isIRCConnectionStateActive IRCConnectionFindingAddr = True
isIRCConnectionStateActive (IRCConnectionNoAddrFound _) = False
isIRCConnectionStateActive IRCConnectionLookupCanceled = False
isIRCConnectionStateActive IRCConnectionFindingName = True
isIRCConnectionStateActive (IRCConnectionNoNameFound _) = True
isIRCConnectionStateActive IRCConnectionReverseLookupCanceled = False
isIRCConnectionStateActive IRCConnectionConnecting = True
isIRCConnectionStateActive IRCConnectionConnected = True
isIRCConnectionStateActive (IRCConnectionConnectingFailed _) = False
isIRCConnectionStateActive IRCConnectionConnectingCanceled = False
isIRCConnectionStateActive IRCConnectionDisconnected = False
isIRCConnectionStateActive (IRCConnectionDisconnectError _) = False
isIRCConnectionStateActive IRCConnectionDisconnectedByPeer = False
isIRCConnectionStateActive (IRCConnectionRecvError _) = False
isIRCConnectionStateActive (IRCConnectionSendError _) = False
-- | Run an unconnected IRC connection.
runUnconnected :: IRCConnection -> IRCConnectionData -> IO ()
runUnconnected outer ircConnection = do
let connection = ircConnectionConnection ircConnection
actionOrEvent <- atomically $ do
(Right <$> (readTQueue $ ircConnectionActionQueue outer)) `orElse`
(Left <$> (recvConnection $ ircConnectionEventSub ircConnection))
ircConnection <- updateState ircConnection
case actionOrEvent of
Right (ConnectIRC hostname port response) -> do
let ircConnection' =
ircConnection { ircConnectionBuffer = B.empty,
ircConnectionHostname = Just hostname,
ircConnectionPort = Just port,
ircConnectionAddress = Nothing,
ircConnectionState = IRCConnectionFindingAddr }
(atomically $ connect connection hostname port) >> return ()
runConnecting outer ircConnection' response
Right (DisconnectIRC (Response response)) -> do
atomically . putTMVar response . Left $ Error "not active"
runUnconnected outer ircConnection
Right (SendIRCMessage _ (Response response)) -> do
atomically . putTMVar response . Left $ Error "not connected"
runUnconnected outer ircConnection
Right (StopIRCConnection (Response response)) -> do
atomically $ do
writeTVar (ircConnectionRunning outer) False
putTMVar response $ Right ()
clearActions outer ircConnection
Right (GetIRCConnectionState (Response response)) -> do
atomically . putTMVar response . Right $ ircConnectionState ircConnection
runUnconnected outer ircConnection
Right (GetIRCConnectionHostname (Response response)) -> do
atomically . putTMVar response . Right $
ircConnectionHostname ircConnection
runUnconnected outer ircConnection
Right (GetIRCConnectionAddress (Response response)) -> do
atomically . putTMVar response . Right $
ircConnectionAddress ircConnection
runUnconnected outer ircConnection
Right (GetIRCConnectionPort (Response response)) -> do
atomically . putTMVar response . Right $ ircConnectionPort ircConnection
runUnconnected outer ircConnection
Left (RecvData _) -> runUnconnected outer ircConnection
Left event -> do
atomically . writeTChan (ircConnectionEventQueue outer) $
ircConnectionEventOfConnectionEvent event
runUnconnected outer ircConnection
-- | Run an connecting IRC connection.
runConnecting :: IRCConnection -> IRCConnectionData -> Response () -> IO ()
runConnecting outer ircConnection
connectResponse'@(Response connectResponse) = do
let connection = ircConnectionConnection ircConnection
actionOrEvent <- atomically $ do
(Right <$> (readTQueue $ ircConnectionActionQueue outer)) `orElse`
(Left <$> (recvConnection $ ircConnectionEventSub ircConnection))
ircConnection <- updateState ircConnection
case actionOrEvent of
Right (ConnectIRC hostname port (Response response)) -> do
atomically . putTMVar response . Left $ Error "already connecting"
runConnecting outer ircConnection connectResponse'
Right (DisconnectIRC (Response response)) -> do
response' <- atomically $ disconnect connection
result <- atomically $ getResponse response'
ircConnection <- updateState ircConnection
atomically $ putTMVar response result
runUnconnected outer ircConnection
Right (SendIRCMessage _ (Response response)) -> do
atomically . putTMVar response . Left $ Error "not connected"
runConnecting outer ircConnection connectResponse'
Right (StopIRCConnection (Response response)) -> do
response' <- atomically . stopConnection $
ircConnectionConnection ircConnection
result <- atomically $ getResponse response'
case result of
Right () -> return ()
Left (Error errorText) -> displayError errorText
atomically $ do
writeTVar (ircConnectionRunning outer) False
putTMVar response $ Right ()
clearActions outer ircConnection
Right (GetIRCConnectionState (Response response)) -> do
atomically . putTMVar response . Right $ ircConnectionState ircConnection
runConnecting outer ircConnection connectResponse'
Right (GetIRCConnectionHostname (Response response)) -> do
atomically . putTMVar response . Right $
ircConnectionHostname ircConnection
runConnecting outer ircConnection connectResponse'
Right (GetIRCConnectionAddress (Response response)) -> do
atomically . putTMVar response . Right $
ircConnectionAddress ircConnection
runConnecting outer ircConnection connectResponse'
Right (GetIRCConnectionPort (Response response)) -> do
atomically . putTMVar response . Right $ ircConnectionPort ircConnection
runConnecting outer ircConnection connectResponse'
Left (RecvData _) -> runUnconnected outer ircConnection
Left event -> do
atomically . writeTChan (ircConnectionEventQueue outer) $
ircConnectionEventOfConnectionEvent event
case event of
NoAddrFound failure -> do
atomically . putTMVar connectResponse $ Left failure
runUnconnected outer ircConnection
FoundAddr address -> do
let ircConnection' =
ircConnection { ircConnectionAddress = Just address }
runConnecting outer ircConnection' connectResponse'
FoundName hostname -> do
let ircConnection' =
ircConnection { ircConnectionHostname = Just hostname }
runConnecting outer ircConnection' connectResponse'
ConnectingFailed failure -> do
atomically . putTMVar connectResponse $ Left failure
runUnconnected outer ircConnection
Connected -> do
atomically . putTMVar connectResponse $ Right ()
runConnected outer ircConnection
_ -> runConnecting outer ircConnection connectResponse'
-- | Run an connected IRC connection.
runConnected :: IRCConnection -> IRCConnectionData -> IO ()
runConnected outer ircConnection = do
let connection = ircConnectionConnection ircConnection
actionOrEvent <- atomically $ do
(Right <$> (readTQueue $ ircConnectionActionQueue outer)) `orElse`
(Left <$> (recvConnection $ ircConnectionEventSub ircConnection))
ircConnection <- updateState ircConnection
case actionOrEvent of
Right (ConnectIRC hostname port (Response response)) -> do
atomically . putTMVar response . Left $ Error "already connected"
runConnected outer ircConnection
Right (DisconnectIRC (Response response)) -> do
response' <- atomically $ disconnect connection
result <- atomically $ getResponse response'
ircConnection <- updateState ircConnection
atomically $ putTMVar response result
runUnconnected outer ircConnection
Right (SendIRCMessage message (Response response)) -> do
response' <- atomically . sendData connection $ formatIRCMessage message
result <- atomically $ getResponse response'
ircConnection <- updateState ircConnection
atomically $ putTMVar response result
runConnected outer ircConnection
Right (StopIRCConnection (Response response)) -> do
response' <- atomically . stopConnection $
ircConnectionConnection ircConnection
result <- atomically $ getResponse response'
case result of
Right () -> return ()
Left (Error errorText) -> displayError errorText
atomically $ do
writeTVar (ircConnectionRunning outer) False
putTMVar response $ Right ()
clearActions outer ircConnection
Right (GetIRCConnectionState (Response response)) -> do
atomically . putTMVar response . Right $ ircConnectionState ircConnection
runConnected outer ircConnection
Right (GetIRCConnectionHostname (Response response)) -> do
atomically . putTMVar response . Right $
ircConnectionHostname ircConnection
runConnected outer ircConnection
Right (GetIRCConnectionAddress (Response response)) -> do
atomically . putTMVar response . Right $
ircConnectionAddress ircConnection
runConnected outer ircConnection
Right (GetIRCConnectionPort (Response response)) -> do
atomically . putTMVar response . Right $ ircConnectionPort ircConnection
runConnected outer ircConnection
Left (RecvData bytes) -> do
let oldBuffer = ircConnectionBuffer ircConnection
ircConnection' =
ircConnection { ircConnectionBuffer = B.append oldBuffer bytes }
ircConnection'' <- atomically $ parseIRCMessages outer ircConnection'
runConnected outer ircConnection''
Left event -> do
atomically . writeTChan (ircConnectionEventQueue outer) $
ircConnectionEventOfConnectionEvent event
case event of
DisconnectedByPeer -> runUnconnected outer ircConnection
SendError _ -> runUnconnected outer ircConnection
RecvError _ -> runUnconnected outer ircConnection
_ -> runConnected outer ircConnection
-- | Update the state for an IRC connection.
updateState :: IRCConnectionData -> IO IRCConnectionData
updateState ircConnection = do
response <- atomically . getConnectionState $
ircConnectionConnection ircConnection
result <- atomically $ getResponse response
case result of
Right state ->
return $ ircConnection { ircConnectionState =
ircConnectionStateOfConnectionState state }
Left (Error errorText) -> do
displayError errorText
return ircConnection
-- | Convert an connection state to an IRC connection state.
ircConnectionStateOfConnectionState :: ConnectionState -> IRCConnectionState
ircConnectionStateOfConnectionState ConnectionNotStarted =
IRCConnectionNotStarted
ircConnectionStateOfConnectionState ConnectionStarted =
IRCConnectionStarted
ircConnectionStateOfConnectionState ConnectionFindingAddr =
IRCConnectionFindingAddr
ircConnectionStateOfConnectionState (ConnectionNoAddrFound failure) =
IRCConnectionNoAddrFound failure
ircConnectionStateOfConnectionState ConnectionLookupCanceled =
IRCConnectionLookupCanceled
ircConnectionStateOfConnectionState ConnectionFindingName =
IRCConnectionFindingName
ircConnectionStateOfConnectionState (ConnectionNoNameFound failure) =
IRCConnectionNoNameFound failure
ircConnectionStateOfConnectionState ConnectionReverseLookupCanceled =
IRCConnectionReverseLookupCanceled
ircConnectionStateOfConnectionState ConnectionConnecting =
IRCConnectionConnecting
ircConnectionStateOfConnectionState ConnectionConnected =
IRCConnectionConnected
ircConnectionStateOfConnectionState (ConnectionConnectingFailed failure) =
IRCConnectionConnectingFailed failure
ircConnectionStateOfConnectionState ConnectionConnectingCanceled =
IRCConnectionConnectingCanceled
ircConnectionStateOfConnectionState ConnectionDisconnected =
IRCConnectionDisconnected
ircConnectionStateOfConnectionState (ConnectionDisconnectError failure) =
IRCConnectionDisconnectError failure
ircConnectionStateOfConnectionState ConnectionDisconnectedByPeer =
IRCConnectionDisconnectedByPeer
ircConnectionStateOfConnectionState (ConnectionRecvError failure) =
IRCConnectionRecvError failure
ircConnectionStateOfConnectionState (ConnectionSendError failure) =
IRCConnectionSendError failure
-- | Convert a connection event to an IRC connection event.
ircConnectionEventOfConnectionEvent :: ConnectionEvent -> IRCConnectionEvent
ircConnectionEventOfConnectionEvent (FoundAddr addr) = IRCFoundAddr addr
ircConnectionEventOfConnectionEvent (NoAddrFound failure) =
IRCNoAddrFound failure
ircConnectionEventOfConnectionEvent LookupCanceled = IRCLookupCanceled
ircConnectionEventOfConnectionEvent (FoundName hostname) =
IRCFoundName hostname
ircConnectionEventOfConnectionEvent (NoNameFound failure) =
IRCNoNameFound failure
ircConnectionEventOfConnectionEvent ReverseLookupCanceled =
IRCReverseLookupCanceled
ircConnectionEventOfConnectionEvent (ConnectingFailed failure) =
IRCConnectingFailed failure
ircConnectionEventOfConnectionEvent Connected = IRCConnected
ircConnectionEventOfConnectionEvent ConnectingCanceled = IRCConnectingCanceled
ircConnectionEventOfConnectionEvent Disconnected = IRCDisconnected
ircConnectionEventOfConnectionEvent (DisconnectError failure) =
IRCDisconnectError failure
ircConnectionEventOfConnectionEvent DisconnectedByPeer = IRCDisconnectedByPeer
ircConnectionEventOfConnectionEvent (SendError failure) = IRCSendError failure
ircConnectionEventOfConnectionEvent (RecvError failure) = IRCRecvError failure
ircConnectionEventOfConnectionEvent ConnectionStopped = IRCConnectionStopped
-- | Parse IRC messages.
parseIRCMessages :: IRCConnection -> IRCConnectionData -> STM IRCConnectionData
parseIRCMessages outer ircConnection = do
let parts = B.split (byteOfChar '\n') $ ircConnectionBuffer ircConnection
forM_ (init parts) $ \part -> do
let part' = case B.stripSuffix (encodeUtf8 "\r") part of
Just part -> part
Nothing -> part
(prefix, rest) = case B.stripPrefix (encodeUtf8 ":") part' of
Just part ->
let (prefix, rest) = splitOnSpaces part
in (Just prefix, rest)
Nothing -> (Nothing, Just part')
case rest of
Just rest ->
let (command, rest') = splitOnSpaces rest
(params, coda) =
case rest' of
Just rest' -> parseIRCParams rest' S.empty
Nothing -> (S.empty, Nothing)
ircMessage = IRCMessage { ircMessagePrefix = prefix,
ircMessageCommand = command,
ircMessageParams = params,
ircMessageCoda = coda }
in writeTChan (ircConnectionEventQueue outer) $
IRCRecvMessage ircMessage
Nothing -> return ()
return $ ircConnection { ircConnectionBuffer = last parts }
where parseIRCParams bytes params =
if B.length bytes > 0
then if B.head bytes == byteOfChar ':'
then (params, Just $ B.tail bytes)
else
let (param, rest) = splitOnSpaces bytes
in case rest of
Just rest -> parseIRCParams rest (params |> param)
Nothing -> (params |> param, Nothing)
else (params, Nothing)
-- | Format an IRC message.
formatIRCMessage :: IRCMessage -> B.ByteString
formatIRCMessage IRCMessage{..} =
let prefix = case ircMessagePrefix of
Just prefix ->
(BB.charUtf8 ':' `mappend` BB.byteString prefix)
`mappend` BB.charUtf8 ' '
Nothing -> mempty
params = fmap (\param -> BB.charUtf8 ' ' `mappend` BB.byteString param)
ircMessageParams
coda = case ircMessageCoda of
Just coda -> BB.stringUtf8 " :" `mappend` BB.byteString coda
Nothing -> mempty
includingCommand = prefix `mappend` BB.byteString ircMessageCommand
includingParams = foldl' mappend includingCommand params
includingCoda = includingParams `mappend` coda
includingNewline = includingCoda `mappend` BB.stringUtf8 "\r\n"
in BL.toStrict $ BB.toLazyByteString includingNewline
clearActions :: IRCConnection -> IRCConnectionData -> STM ()
clearActions outer ircConnection = do
action <- tryReadTQueue $ ircConnectionActionQueue outer
case action of
Just (ConnectIRC _ _ (Response response)) -> do
putTMVar response . Left $ Error "canceled"
clearActions outer ircConnection
Just (DisconnectIRC (Response response)) -> do
putTMVar response . Left $ Error "canceled"
clearActions outer ircConnection
Just (SendIRCMessage _ (Response response)) -> do
putTMVar response . Left $ Error "canceled"
clearActions outer ircConnection
Just (StopIRCConnection (Response response)) -> do
putTMVar response . Left $ Error "canceled"
clearActions outer ircConnection
Just (GetIRCConnectionState (Response response)) -> do
putTMVar response . Right $ ircConnectionState ircConnection
clearActions outer ircConnection
Just (GetIRCConnectionHostname (Response response)) -> do
putTMVar response . Right $ ircConnectionHostname ircConnection
clearActions outer ircConnection
Just (GetIRCConnectionPort (Response response)) -> do
putTMVar response . Right $ ircConnectionPort ircConnection
clearActions outer ircConnection
Just (GetIRCConnectionAddress (Response response)) -> do
putTMVar response . Right $ ircConnectionAddress ircConnection
clearActions outer ircConnection
Nothing -> return ()
| tabemann/amphibian | src/Network/IRC/Client/Amphibian/IRCConnection.hs | bsd-3-clause | 28,915 | 0 | 22 | 6,612 | 6,095 | 2,957 | 3,138 | 555 | 16 |
-- | Specialized versions of the general functions in
-- Text.PrettyPrint.Annotated.HughesPJ. Normally you shouldn't need these, but
-- they might help if ambiguities arise.
module Text.PrettyPrint.Annotated.String (
-- * The document type
Doc, TextDetails(..), AnnotDetails(..),
-- * Constructing documents
-- ** Converting values into documents
char, text, ptext, sizedText, zeroWidthText,
int, integer, float, double, rational,
-- ** Simple derived documents
semi, comma, colon, space, equals,
lparen, rparen, lbrack, rbrack, lbrace, rbrace,
-- ** Wrapping documents in delimiters
parens, brackets, braces, quotes, doubleQuotes,
maybeParens, maybeBrackets, maybeBraces, maybeQuotes, maybeDoubleQuotes,
-- ** Combining documents
empty,
(<>), (<+>), hcat, hsep,
($$), ($+$), vcat,
sep, cat,
fsep, fcat,
nest,
hang, punctuate,
-- ** Annotating documents
annotate,
-- * Predicates on documents
isEmpty,
-- * Utility functions for documents
first, reduceDoc,
-- * Rendering documents
-- ** Default rendering
render,
-- ** Annotation rendering
renderSpans, Span(..),
renderDecorated,
renderDecoratedM,
-- ** Rendering with a particular style
Style(..),
style,
renderStyle,
Mode(..),
-- ** General rendering
fullRender,
) where
import Text.PrettyPrint.Annotated.HughesPJ hiding
(text, ptext, sizedText, zeroWidthText,
render, renderStyle, renderSpans, renderDecorated, renderDecoratedM)
import qualified Text.PrettyPrint.Annotated.HughesPJ as LL
-- | Specialized version of 'LL.text'
text :: String -> Doc a
text = LL.text
{-# INLINE text #-}
-- | Specialized version of 'LL.ptext'
ptext :: String -> Doc a
ptext = LL.ptext
{-# INLINE ptext #-}
-- | Specialized version of 'LL.sizedText'
sizedText :: Int -> String -> Doc a
sizedText = LL.sizedText
{-# INLINE sizedText #-}
-- | Specialized version of 'LL.zeroWidthText'
zeroWidthText :: Int -> String -> Doc a
zeroWidthText = LL.sizedText
{-# INLINE zeroWidthText #-}
-- | Specialized version of 'LL.render'
render :: Doc a -> String
render = LL.render
{-# INLINE render #-}
-- | Specialized version of 'LL.renderStyle'
renderStyle :: Style -> Doc a -> String
renderStyle = LL.renderStyle
{-# INLINE renderStyle #-}
-- | Specialized version of 'LL.renderSpans'
renderSpans :: Doc ann -> (String,[Span ann])
renderSpans = LL.renderSpans
{-# INLINE renderSpans #-}
-- | Specialized version of 'LL.renderDecorated'
renderDecorated :: (ann -> String) -- ^ Starting an annotation.
-> (ann -> String) -- ^ Ending an annotation.
-> Doc ann -> String
renderDecorated = LL.renderDecorated
{-# INLINE renderDecorated #-}
-- | Specialized version of 'LL.renderDecoratedM'
renderDecoratedM :: (Monad m)
=> (ann -> m r) -- ^ Starting an annotation.
-> (ann -> m r) -- ^ Ending an annotation.
-> (String -> m r) -- ^ Text formatting.
-> m r -- ^ Document end.
-> Doc ann -> m r
renderDecoratedM = LL.renderDecoratedM
{-# INLINE renderDecoratedM #-}
| ddssff/pretty-listlike | src/Text/PrettyPrint/Annotated/String.hs | bsd-3-clause | 3,377 | 0 | 11 | 908 | 594 | 376 | 218 | 65 | 1 |
module HLearn.Optimization.Amoeba
where
import Control.Monad
import Control.Monad.ST
import Data.List
import Debug.Trace
import qualified Data.Vector as V
import qualified Data.Vector.Mutable as VM
import qualified Data.Vector.Storable as VS
import qualified Data.Vector.Storable.Mutable as VSM
import qualified Data.Vector.Generic as VG
import qualified Data.Vector.Generic.Mutable as VGM
import qualified Data.Vector.Algorithms.Intro as Intro
import Numeric.LinearAlgebra hiding ((<>))
import qualified Numeric.LinearAlgebra as LA
findMinAmoeba f x0 = runST $ do
-- initialize simplex
vec <- VM.new (VG.length x0+1)
VGM.write vec 0 (f x0,x0)
forM [1..VGM.length vec-1] $ \i -> do
e_i <- VGM.replicate (VG.length x0) 0
VGM.write e_i (i-1) 1
e_i' <- VG.freeze e_i
let x_i = x0 `LA.add` e_i'
VGM.write vec i (f x_i,x_i)
-- iterate
vec' <- itrM 1000 (stepAmoeba f) vec
-- return
(_,ret) <- VGM.read vec 0
return ret
stepAmoeba f vec = stepAmoebaRaw 1 2 (-1/2) (1/2) f vec
stepAmoebaRaw ::
( Fractional b
, Ord b
, VGM.MVector vec (b,a)
-- , a ~ LA.Matrix b
, a ~ LA.Vector b
, Field b
, vec ~ VM.MVector
, b ~ Double
) => b
-> b
-> b
-> b
-> (a -> b)
-> vec s (b,a)
-> ST s (vec s (b,a))
stepAmoebaRaw alpha gamma ro sigma f vec = do
Intro.sortBy (\a b -> compare (fst a) (fst b)) vec
(f_1,x_1) <- VGM.read vec 0
(f_2,x_2) <- VGM.read vec 1
(f_n1,x_n1) <- VGM.read vec $ VGM.length vec -1
x_0 <- liftM ( scale (1/fromIntegral (VGM.length vec-1))
. foldl1' (LA.add)
. init
. map snd
. V.toList
) $ VG.unsafeFreeze vec
let x_r = x_0 `LA.add` (scale alpha $ x_0 `LA.sub` x_n1)
f_r = f x_r
x_e = x_0 `LA.add` (scale gamma $ x_0 `LA.sub` x_n1)
f_e = f x_e
x_c = x_0 `LA.add` (scale ro $ x_0 `LA.sub` x_n1)
f_c = f x_c
-- check reflection
if f_1 <= f_r && f_r < f_1
then VGM.write vec (VGM.length vec-1) (f_r,x_r)
-- check expansion
else if f_r < f_1
then if f_e < f_r
then VGM.write vec (VGM.length vec-1) (f_e,x_e)
else VGM.write vec (VGM.length vec-1) (f_r,x_r)
-- check contraction
else if f_c < f_n1
then VGM.write vec (VGM.length vec-1) (f_c,x_c)
-- reduction
else forM_ [1..VGM.length vec-1] $ \i -> do
(f_i,x_i) <- VGM.read vec i
let x_i' = x_1 `LA.add` (scale sigma $ x_i `LA.sub` x_1)
f_i' = f x_i'
VGM.write vec i (f_i',x_i')
return vec
-- refMinVal <- newSTRef (-infinity)
-- refMinIndex <- newSTRef 0
-- forM [0..VGM.length vec-1] $ \i -> do
-- ival <- VGM.read vec i
-- minVal <- readSTRef refMinVal
-- if minVal < fst ival
-- then return ()
-- else do
-- writeSTRef refMinVal $ fst ival
-- writeSTRef refMinIndex i
-- undefined
itrM :: Monad m => Int -> (a -> m a) -> a -> m a
itrM 0 f a = return a
itrM i f a = do
a' <- f a
-- if a' == a
-- then trace ("no movement\n a="++show a++"\n a'="++show a') $ return ()
-- else trace ("itrM i="++show i++"; f a ="++show a'++"; a="++show a) $ return ()
itrM (i-1) f a'
| ehlemur/HLearn | src/HLearn/Optimization/Amoeba.hs | bsd-3-clause | 3,542 | 0 | 22 | 1,234 | 1,228 | 656 | 572 | -1 | -1 |
{-# LANGUAGE MagicHash #-}
module System.Random.Mersenne.Pure64.Internal
( PureMT(..)
, blockLen
, blockSize
, MTBlock(..)
) where
import GHC.Exts
import System.Random.Mersenne.Pure64.Base
-- | 'PureMT', a pure mersenne twister pseudo-random number generator
--
data PureMT = PureMT {-# UNPACK #-} !MTBlock {-# UNPACK #-} !Int MTBlock
instance Show PureMT where
show _ = show "<PureMT>"
data MTBlock = MTBlock ByteArray# | bgamari/mersenne-random-pure64 | System/Random/Mersenne/Pure64/Internal.hs | bsd-3-clause | 440 | 0 | 7 | 77 | 92 | 57 | 35 | 12 | 0 |
{-|
Module : Database.Relational.As
Description : Definition of AS.
Copyright : (c) Alexander Vieth, 2015
Licence : BSD3
Maintainer : aovieth@gmail.com
Stability : experimental
Portability : non-portable (GHC only)
-}
{-# LANGUAGE AutoDeriveTypeable #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE PolyKinds #-}
module Database.Relational.As (
AS(..)
, TABLE_ALIAS(..)
) where
import Data.Proxy
data AS term alias where
AS :: term -> alias -> AS term alias
-- | Good for giving a complete table alias: a name for the table and a name
-- for each of its columns.
data TABLE_ALIAS tableName columnNames where
TABLE_ALIAS :: TABLE_ALIAS tableName columnNames
| avieth/Relational | Database/Relational/As.hs | bsd-3-clause | 720 | 0 | 8 | 148 | 75 | 50 | 25 | 12 | 0 |
-- powerseries.hs
module MPS.Math.PowerSeries (PowerSeries (..), t, coeffsPS, coeffsPS', leadingPS, (~=), (*/),
diagonalSumPS, inversePS,
binomial1, (^%),
binomialPS, expPS, log1PS,
fromOGF, fibonacciOGF, catalanOGF, ramanujanTauOGF,
fromEGF, bernoulliEGF, bellEGF) where
import List (intersperse)
import MPS.Math.MathsPrimitives (FunctionRep (..), partialProducts, factorials, ($+), ($.) )
import MPS.Math.QQ
-- Sources
-- http://en.wikipedia.org - "trigonometric functions", "hyperbolic functions"
-- Doug McIlroy, "Power Series, Power Serious"
-- Dan Piponi, code and email
infixr 8 ^%
newtype PowerSeries = PS [QQ]
coeffsPS f = coeffsPS' f ++ repeat 0
coeffsPS' (PS as) = as
-- These are formal power series - we're not interested in questions of convergence
PS as ~= PS bs = take 10 as == take 10 bs
instance Eq PowerSeries where
f == g = error "PowerSeries.== : not defined, use ~= instead"
-- SHOW INSTANCE
instance Show PowerSeries where
show f = showLeading 10 f ++ "+..."
-- tries to show the first n non-zero terms, by looking at the first 2n terms
showLeading n (PS as) =
let powers = take n (filter (\(a,i) -> a /= 0) (zip as [0..2*n]))
in
if null powers
then "0"
else foldl1 linkTerms (map showTerm powers)
where linkTerms t u = if head u == '-' then t ++ u else t ++ "+" ++ u
showTerm (a,0) = show a
showTerm (a,i) = showCoeff a ++ showPower i
showCoeff a
| a == 1 = ""
| a == -1 = "-"
| otherwise = show a
showPower i
| i == 0 = ""
| i == 1 = "t"
| otherwise = "t^" ++ show i
leadingPS n f = print $ showLeading n f
-- NUM INSTANCE
instance Num PowerSeries where
PS as + PS bs = PS (as $+ bs)
negate (PS as) = PS (map negate as)
PS as * PS bs = PS (as $* bs)
fromInteger n = PS [fromInteger n]
(a:as) $* (b:bs) = [a*b] $+ (0 : map (a*) bs) $+ (0: map (*b) as) $+ (0 : 0 : (as $* bs))
_ $* _ = []
t = PS [0,1]
-- sum an infinite sequence of power series, on the assumption that each has at least one more leading zero than the previous
diagonalSumPS fs = PS (dsum 0 [] fs)
where dsum i as (PS bs : fs) = let bs' = drop i bs
j = length $ takeWhile (==0) bs'
in take j (as ++ repeat 0) ++ dsum (i+j) (drop j (as $+ bs')) fs
dsum i as [] = as
-- FRACTIONAL INSTANCE
instance Fractional PowerSeries where
recip (PS as) = PS (recipPS as)
PS (0:as) / PS (0:bs) = PS as / PS bs
f / g = f * recip g
-- denominator must not be divisible by t, unless numerator also is, in which case we can cancel
recipPS (0:_) = error "PowerSeries.recip: a0 == 0"
recipPS (a:as) = (1/a) : doRecip [1/a]
where doRecip bs = let b = - (as $. bs) / a in b : doRecip (b:bs)
-- [a0,a1,a2,...] * [b0,b1,b2,...] = [a0b0, a0b1+a1b0, a0b2+a1b1+a2b0, ...]
-- For bs == recip as, we want as * bs = [1,0,0,...]
-- So a0b0 == 1, a0b1+a1b0 == 0, a0b2+a1b1+a2b0 == 0, ...
-- So b0 == 1/a0, b1 == (-1/a0) a1b0, b2 == (-1/a0) (a1b1+a2b0), ...
c */ as = map (c*) as
recipPS' (0:_) = error "PowerSeries.recip: a0 == 0"
recipPS' (a:as) = 1/a : bs
where bs = (-1/a) */ ( ((1/a) */ as) $+ (0 : (as $* bs)) )
-- (a + t as)(b + t bs) == 1
-- => ab + t (a bs + b as) + t^2 (as bs) == 1
-- => ab == 1 and t a bs == - t b as - t^2 as bs
-- => the recursion bootstraps because to calculate the nth coeff of bs is dependent only on the first n-1 (or put another way, because of the 0: in the expression)
-- The running times of recipPS and recipPS' are about the same
-- COMPOSITION OF POWER SERIES
composePS (PS as) (PS (0:bs)) = PS (doCompose as [1] [])
where
doCompose (a:as) power sum =
let s:sum' = sum $+ map (a*) power
in s : doCompose as (bs $* power) sum'
doCompose [] _ sum = sum
composePS _ _ = error "compose: second argument has non-zero constant term"
-- if the second argument has non-zero constant term, then we would need to sum an infinite series to find the first term of the composite power series
-- this is not valid for *formal* power series
-- (we could do it for power series over R, say, but the code would be significantly more complicated)
-- adapted from McIlroy's version - empirically, this appears to be less efficient than my version
composePS' (PS as) (PS bs) = PS (doCompose' as bs)
where
doCompose' (a:as) (0:bs) = a : bs $* doCompose' as (0:bs)
-- adapted from McIlroy
-- the functional inverse - given f, find g such that f(g(t)) == t == g(f(t))
inversePS (PS (0:as)) = PS (0:bs)
where PS bs = recip (composePS (PS as) (PS (0:bs)))
-- CALCULUS
derivPS (PS []) = PS []
derivPS (PS (_:as)) = PS (zipWith (*) (map fromInteger [1..]) as)
-- This exactly parallels the definition of derivUP
integPS (PS as) = PS (0 : zipWith (/) as (map fromInteger [1..]))
instance FunctionRep PowerSeries where
compose = composePS
deriv = derivPS
integ = integPS
-- CALCULATING POWER SERIES FOR TRANSCENDENTAL FUNCTIONS
-- If we didn't already know the power series expansions for the elementary transcendental functions,
-- we could now calculate them from their known integro-differential properties
-- from McIlroy
exp_t :: PowerSeries
exp_t = 1 + integ exp_t
log1_t :: PowerSeries
log1_t = integ (1/(1+t))
sin_t :: PowerSeries
sin_t = integ cos_t
cos_t :: PowerSeries
cos_t = 1 - integ sin_t
tan_t = sin_t / cos_t
arcsin_t :: PowerSeries
arcsin_t = integ (sqrt (1/(1-t^2)))
arctan_t :: PowerSeries
arctan_t = integ (1 / (1+t^2))
arctanh_t :: PowerSeries
arctanh_t = integ (1 / (1-t^2))
-- EXPLICIT POWER SERIES FOR TRANSCENDENTAL FUNCTIONS
-- We can construct power series for transcendental functions explicitly.
-- (This is useful for testing, and is usually the fastest way to get these power series if we want to use them in composition)
-- Koblitz 81-2
-- (1+t)^a == sum a(a-1)...(a-n+1)/n! x^n -- valid for all a <- QQ
binomialPS :: QQ -> PowerSeries
binomialPS a =
let numerators = partialProducts (map (\m -> a - fromInteger m) [0..])
denominators = partialProducts (map fromInteger [1..])
in PS (1 : zipWith (/) numerators denominators)
binomial1 a f@(PS(0:_)) = composePS (binomialPS a) f
binomial1 _ _ = error "binomial1: only defined when a0 == 0"
-- power a f = binomial1 a (f-1)
f ^% a = binomial1 a (f-1)
-- Note that for whole numbers f ^ n is faster than f ^% n
expPS = PS (map (Q 1) factorials)
-- Note: this is the power series for log(1+t), not log t
log1PS = PS (0 : zipWith Q alternating [1..])
where alternating = 1 : (-1) : alternating
sinPS = PS (zipWith toQ numerators factorials)
where numerators = 0 : 1 : 0 : (-1) : numerators
cosPS = PS (zipWith toQ numerators factorials)
where numerators = 1 : 0 : (-1) : 0 : numerators
tanPS = sinPS / cosPS
arcsinPS =
let numerators = 0 : intersperse 0 (1 : partialProducts [1,3..])
denominators = zipWith (*) (1 : intersperse 1 (1 : partialProducts [2,4..])) (1:[1..])
in PS (zipWith toQ numerators denominators)
arctanPS = PS (zipWith toQ numerators (1 : [1..]) )
where numerators = 0 : 1 : 0 : (-1) : numerators
sinhPS = PS (zipWith toQ numerators factorials)
where numerators = 0 : 1 : numerators
coshPS = PS (zipWith toQ numerators factorials)
where numerators = 1 : 0 : numerators
tanhPS = sinhPS / coshPS
arcsinhPS =
let signs = 0 : 1 : 0 : (-1) : signs
numerators = zipWith (*) signs (0 : intersperse 0 (1 : partialProducts [1,3..]))
denominators = zipWith (*) (1 : intersperse 1 (1 : partialProducts [2,4..])) (1:[1..])
in PS (zipWith toQ numerators denominators)
arctanhPS = PS (zipWith toQ numerators (1 : [1..]))
where numerators = 0 : 1 : numerators
-- FLOATING INSTANCE
-- There are several different ways that each function could be defined. We have chosen the fastest.
-- !! Warning - some of the following are only valid for first term == 0 or == 1
-- We need to work out which these are, and put appropriate guards in
instance Floating PowerSeries where
pi = error "PowerSeries.pi : not defined"
exp f = compose expPS f
log f = integ (deriv f / f)
-- from Dan - follows from d/dx (log (f(x)) = f'(x) / f(x)
sqrt (PS (0:0:as)) = PS (0:bs) where PS bs = sqrt (PS as)
sqrt f@(PS (1:_)) = g
where g = 1 + integ (deriv f / (2 * g))
sqrt f = error ("PowerSeries.sqrt not defined for " ++ show f)
-- from McIlroy
sin f = compose sinPS f -- doSin f (deriv f)
cos f = compose cosPS f -- doCos f (deriv f)
sinh f = compose sinhPS f -- doSinh f (deriv f)
cosh f = compose coshPS f -- doCosh f (deriv f)
asin f = integ (deriv f / sqrt(1-f*f))
-- from Dan. Justification:
-- f == sin g
-- => f' == g' cos g (differentiation with chain rule)
-- == g' sqrt (1 - (sin g)^2)
-- == g' sqrt (1 - f^2)
-- => g' = f' / sqrt (1-f^2)
atan f = integ (deriv f / (1+f*f))
acos _ = error "PowerSeries.acos : not defined" -- acos f == pi/2 - asin f, so could be defined as PowerSeries RR, but not as PowerSeries QQ
asinh f = integ (deriv f / sqrt(1+f*f))
atanh f = integ (deriv f / (1-f*f))
acosh _ = error "PowerSeries.acosh : not defined"
sin' f = integ (f' * cos' f) where f' = deriv f
cos' f = 1 - integ (f' * sin' f) where f' = deriv f
-- GENERATING FUNCTIONS
fromEGF (PS as) = zipWith (*) as (map fromInteger factorials)
fromOGF (PS as) = as
fibonacciOGF = 1 / (1-t-t^2)
-- The Catalan numbers count the number of binary trees, which satisfy T = 1 + x T^2
catalanOGF = PS ts
where ts = 1 : ts $* ts
-- The ogf for the Catalan numbers comes from solving x T^2 - T + 1 == 0
-- => sum c_k t^k = (1 - sqrt (1-4t)) / 2t
catalanOGF' = (1 - sqrt (1 - 4*t)) / (2*t)
-- egf for the Bernoulli numbers is sum (b_k t^k / k!) = t/(e^t-1)
bernoulliEGF = t/(exp t - 1)
-- == t / (expPS - 1)
-- if we interpret this as a function of D, then we find that D/(e^D-1) (x^n) = B_n(x), nth Bernoulli poly
-- egf for the Bernoulli polys t e^xt / (e^t-1)
-- egf for Bell numbers is sum (b_k t^k / k!) = e^(e^t-1)
bellEGF = exp (exp t - 1)
-- == exp (expPS - 1)
-- Koblitz p122
-- Ramanujan tau function is defined by sum [tau(n) * q^n | n <- [1..] ] == q * product [(1-q^n)^24 | n <- [1..] ]
ramanujanTauOGF = t * firstProduct ^ 24
where
firstProduct =
let iterates = partialProducts [(1-t^n) | n <- [1..] ]
in PS (1 : zipWith (\f i -> coeffsPS f !! i) iterates [1..])
-- firstProduct == product [1-q^n | n <- [1..] ]
-- if we have taken the partial product up to (1-t^n), then we know that the first n coefficients are now correct
| nfjinjing/mps | src/MPS/Math/PowerSeries.hs | bsd-3-clause | 10,782 | 30 | 16 | 2,708 | 3,409 | 1,792 | 1,617 | 157 | 3 |
module Chap04 where
-- | Ex 4.1
--
-- X \subseteq R \cap (S \cap T)
-- = {- 交わりの普遍性 -}
-- X \subseteq R /\ X \subseteq (S \cap T)
-- = {- 交わりの普遍性 -}
-- X \subseteq R /\ (X \subseteq S /\ X \subseteq T)
-- = {- 連言は結合的 -}
-- (X \subseteq R /\ X \subseteq S) /\ X \subseteq T
-- = {- 交わりの普遍性 -}
-- X \subseteq (R \cap S) /\ X \subseteq T
-- = {- 交わりの普遍性 -}
-- X \subseteq (R \cap S) \cap T
-- | Ex 4.2
--
-- 左の図式
-- S . R \subseteq T
--
-- 右の図式
-- S . R \subseteq T /\ T . V \subseteq U
--
-- | Ex 4.3
--
-- (R . S) \cap (R . T) \subseteq R . (S \cap T)
--
-- R = {(0, 0), (0, 1)}
-- S = {(0, 0)}
-- T = {(1, 0)}
-- とすると,
-- R . S = {(0, 0)}
-- R . T = {(0, 0)}
-- なので R . S \cap R . T = {(0, 0)} = S
-- 一方,
-- S \cap T = {}
-- なので, R . (S \cap T) = {}
--
-- | Ex 4.4
--
-- 前順序の圏で考えると,射 A <- B は A >= B.
-- すると積はminに当たる.
-- | Ex 4.5
--
-- R \cap (S . T) = R \cap (S . ((S^op . R) \cap T)) を示す
--
-- モジュラ則を利用する.
-- (R . S) \cap T \subseteq R . (S \cap (R^op . T))
--
-- R \cap (S . T) \subseteq R \cap (S . ((S^op . R) \cap T))
-- = {- 交わりの普遍性 -}
-- R \cap (S . T) \subseteq R /\ R \cap (S . T) \subseteq S . ((S^op . R) \cap T)
-- = {- 交わりの下界 R \cap X \subseteq R -}
-- true /\ R \cap (S . T) \subseteq S . ((S^op . R) \cap T)
-- = {- 連言 -}
-- (S . T) \cap R \subseteq S . (T \cap (S^op . R))
-- = {- モジュラ則 -}
-- true
--
-- 逆方向の証明
--
-- R \cap (S . ((S^op . R) \cap T)) \subseteq R \cap (S . T)
-- = {- 交わりの普遍性 -}
-- R \cap (S . ((S^op . R) \cap T)) \subseteq R /\ R \cap (S . ((S^op . R) \cap T)) \subseteq S . T
-- = {- 交わりの下界 R \cap X \subseteq R -}
-- true /\ R \cap (S . ((S^op . R) \cap T)) \subseteq S . T
-- = {- 連言 -}
-- R \cap (S . ((S^op . R) \cap T)) \subseteq S . T
-- <= {- 交わりの下界 X \cap T \subseteq T -}
-- R \cap (S . T) \subseteq S . T
-- = {- 交わりの下界 X \cap S . T \subseteq S . T -}
-- true
--
-- | Ex 4.6
--
-- モジュラ則
-- (R . S) \cap T \subseteq R . (S \cap (R^op . T))
--
-- R
-- = {- 交わりの冪等則 -}
-- R \cap R
-- = {- id -}
-- (R . id) \cap R
-- \subseteq {- モジュラ則 -}
-- R . (id \cap (R^op . R))
-- = {- 単調性の公理 -}
-- R . id \cap R . (R^op . R)
-- \subseteq {- 交わりの下限 X \cap R \subseteq R -}
-- R . (R^op . R)
-- = {- 合成の結合則 -}
-- R . R^op . R
--
-- | Ex 4.7
--
-- A と B とが寓なら A x B も寓.
-- 対象を (A, B) とし射をポイントワイズに定義する.
-- (R, S) (a, b) = (c, d) は (cRa, dSb) とすれば良い.
-- 圏としてはそれで成立するとして寓に追加の演算について検討する.
--
-- 包含は R \subseteq R' == aRb => aR'b かつ S \subseteq S' == aSb => aS'b とすると,
-- (R, S) \subseteq (R', S') == (c, d) (R, S) (a, b) => (c, d) (R', S') (a, b) である.
-- なぜなら左辺は R \subseteq R' かつ S \subseteq S' であり,
-- 右辺は cRa => cR'a かつ dSb => dS'b であるから.
--
-- 交わりの普遍性は任意の (X, Y) について,
-- (X, Y) \subseteq ((R, R') \cap (S, S')) == (X, Y) \subseteq (R, R') /\ (X, Y) \subseteq (S, S') とポイントワイズに定義すれば良い.
--
-- 逆は (R, S)^op = (R^op, S^op) であり,
-- ((R, S)^op)^op = (R^op, S^op)^op = ((R^op)^op, (S^op)^op) = (R, S) となり,
-- 対合が成り立つ
-- また, (R, S) \subseteq (R', S') なら (R, S)^op \subseteq (R', S')^op も言える.
-- なぜなら (R^op, S^op) \subseteq (R'^op, S'^op) であり, ポイントワイズに
-- R^op \subseteq R'^op と S^op \subseteq S'^op も言えるからだ.
-- 反変については ((R, R') . (S, S'))^op = (S, S')^op . (R, R')^op もそのまま成り立つ.
--
-- | Ex 4.8
--
-- C : A <- A を余反射とする.
-- つまり C \subseteq id_A とする.
--
-- C . C
-- \subseteq {- 余反射 -}
-- id_A . C
-- = {- 恒等射は単位元 -}
-- C
--
-- | Ex 4.9
--
-- A と B が余反射で A . B と合成できたり A \cap B で交わりが取れるためには同じ型である必要がある.
-- つまり A, B : C <- C であるとする.
-- A \subseteq id_C かつ B \subseteq id_C である.
--
-- 左から右
-- A . B
-- = {- 交わりの冪等 -}
-- A . B \cap A . B
-- \subseteq {- A B 余反射 -}
-- A . id_C \cap id_C . B
-- = {- 恒等射 -}
-- A \cap B
--
-- 右から左
-- A \cap B
-- = {- 恒等射 -}
-- (A . id_C) \cap B
-- \subseteq {- モジュラ則 -}
-- A . (id_C \cap (A^op . B))
-- \subseteq {- 単調性 -}
-- A . id_C \cap A . A^op . B
-- \subseteq {- 交わりの下界 X \cap R \subseteq R -}
-- A . A^op . B
-- \subseteq {- 後述 -}
-- A . (id_C)^op . B
-- = {- (id_C)^op = id_C -}
-- A . id_C . B
-- = {- 恒等射は単位元 -}
-- A . B
--
-- 上記の後述.
-- A が余反射なら A^op も余反射である.以下にそれを論証する.
--
-- Aが余反射
-- = {- 余反射の定義 -}
-- A \subseteq id
-- = {- 逆の順序保存 (4.3) -}
-- A^op \subseteq id^op
-- = {- 恒等射の逆は恒等射 -}
-- A^op \subseteq id
-- = {- 余反射の定義 -}
-- A^op は余反射
--
-- | Ex 4.10
--
-- C を余反射とする.
-- (C . R) \cap S = C . (R \cap S) を示す.
--
-- (C . R) \cap S
-- \subseteq {- モジュラ則 -}
-- C . (R \cap (C^op . S))
-- \subseteq {- C^op も余反射 C^op \subseteq id -}
-- C . (R \cap S)
--
-- C . (R \cap S)
-- \subseteq {- 単調性 -}
-- (C . R) \cap (C . S)
-- \subseteq {- C は余反射 -}
-- (C . R) \cap S
--
-- | Ex 4.11
--
-- Ex 4.9 A, B が余反射なら A . B = A \cap B
-- Ex 4.10 の双対により (X \cap id) . C = (X . C) \cap id
--
-- 下から順に
--
-- (X \cap id) . C -- 下から一番目
-- = {- Ex 4.9 (X \cap id) は余反射 -}
-- (X \cap id) \cap C
-- = {- 交わりの交換則 -}
-- C \cap (X \cap id)
-- = {- Ex 4.9 -}
-- C . (X \cap id) -- 下から二番目
-- = {- Ex 4.9 -}
-- C \cap (X \cap id) -- ★途中
-- = {- 交わりの冪等則 -}
-- C \cap (X \cap id) \cap C
-- = {- Ex 4.10 の双対 -}
-- C \cap (X . C \cap id)
-- = {- Ex 4.9 -}
-- C . (X . C \cap id)
-- = {- Ex 4.10 -}
-- (C . X . C) \cap id -- 下から三番目
--
-- (C . X) \cap id -- 一番上の左辺
-- = {- Ex 4.10 -}
-- C . (X \cap id) -- 下から二番目
-- = {- Ex 4.9 (X \cap id) は余反射 -}
-- C \cap (X \cap id)
-- = {- 交わりの交換則 -}
-- (X \cap id) \cap C
-- = {- Ex 4.10 の双対 -}
-- (X . C) \cap id -- 一番上の右辺
--
-- | Ex 4.12
--
-- C が余反射なら ran (C . R) = C . ran R を示す.
--
-- ran (C . R)
-- = {- ran の定義 (4.12) ran R = (R . R^op) \cap id -}
-- id \cap (C . R . (C . R)^op)
-- = {- 逆 -}
-- id \cap (C . R . R^op . C^op)
-- = {- 余反射は対称的 -}
-- id \cap (C . R . R^op . C)
-- = {- Ex 4.11 (C . X . C) \cap id = C . (X \cap id) -}
-- C . (id \cap (R . R^op))
-- = {- ran の定義 (4.12) ran R = (R . R^op) \cap id -}
-- C . ran R
--
-- | Ex 4.13
--
-- R . R = R なら冪等.
-- 対称的かつ推移的なら冪等であることを示す.
--
-- 対称的 : R \subseteq R^op これは R = R^op でもある.
-- 推移的 : R . R \subseteq R
--
--
-- R
-- \subseteq {- Ex 4.6 -}
-- R . R^op . R
-- = {- R が対称的 -}
-- R . R . R
-- \subseteq {- R が推移的 -}
-- R . R
--
-- | Ex 4.14
--
-- R = R . R^op <=> R が対称的かつ推移的
--
-- (<=)
-- Ex 4.13 より対称的かつ推移的なら冪等である.
-- R . R = R だが 対称的なら R = R^op でもあるので R . R^op = R が言える.
--
-- (=>)
-- R = R . R^op
-- = {- 逆 -}
-- R^op = (R . R^op)^op
-- = {- 逆 -}
-- R^op = R^op^op . R^op
-- = {- 逆 -}
-- R^op = R . R^op
-- = {- 前提 R = R . R^op -}
-- R^op = R
-- よって対称的である.
-- また対称なので
-- R = R^op
-- = {- 合成 -}
-- R . R = R . R^op
-- = {- 前提 R = R . R^op -}
-- R . R = R
-- したがって推移的である.
--
-- | Ex 4.15
--
-- S が単一 => S = S . S^op . S
-- 逆は成り立つか? という設問
--
-- まず S がなんであれ S \subseteq S . S^op . S (Ex 4.6)
-- S が単一つまり S . S^op \subseteq id なので S . S^op . S \subseteq id . S = S でもある.
-- よって S = S . S^op . S
--
-- 逆は成り立たない.
-- S : {0, 1} <- {0, 1} で, S = {(0, 0), (1, 0)} の場合を考える.
-- S . S^op . S = {(0, 0), (1, 0)} となり S と等しい.
-- だが S . S^op = {(0,0),(0,1),(1,0),(1,1)} なので id より大きく, この S は単一ではない.
--
-- | Ex 4.16
--
-- ran (R \cap (S . T)) = ran ((R . T^op) \cap S)
--
-- 普遍性による定義 (4.11)
-- 任意の余反射 X について ran R \subseteq X == R \subseteq X . R
--
-- 直接的な定義 (4.12)
-- ran R = (R . R^op) \cap id
--
-- ran (R \cap (S . T))
-- = {- (4.15) ran (X \cap Y) = id \cap (X . Y^op) -}
-- id \cap (R . (S . T)^op)
-- = {- 逆 -}
-- id \cap (R . (T^op . S^op))
-- = {- 合成は結合的 -}
-- id \cap ((R . T^op) . S^op)
-- = {- (4.15) ran (X \cap Y) = id \cap (X . Y^op) -}
-- ran ((R . T^op) \cap S)
--
-- | Ex 4.17
--
-- dom R . f = f . dom (R . f) を示す
--
-- まず dom R = ran R^op と ran の直接的な定義 (4.12) ran R = (R . R^op) \cap id とから
-- dom R = ran R^op = (R^op . R) \cap id である.
--
-- dom R . f
-- = {- dom R = id \cap (R^op . R) -}
-- (id \cap (R^op . R)) . f
-- = {- 関数を交わりに分配 -}
-- id . f \cap (R^op . R . f)
-- = {- id は左右の単位元 -}
-- f . id \cap (R^op . R . f)
-- = {- 関数は単一, (4.16): S が単一 => モジュラ則は等式 (S . R) \cap T = S . (R \cap (S^op . T)) -}
-- f . (id \cap (f^op . R^op . R . f))
-- = {- 逆 -}
-- f . (id \cap ((R . f)^op . R . f))
-- = {- dom R = id \cap (R^op . R) -}
-- f . dom (R . f)
--
-- | Ex 4.18
--
-- 以下の通り定義すればよい
--
-- (R . S) (a, b) = \sqcup (R (a, c) \sqcap S (c, b))
-- R^op (a, b) = R (b, a)
-- (R \cap S) (a, b) = R (a, b) \sqcap S (a, b)
--
-- ???
--
-- | Ex 4.19
--
-- m^op . m = id <=> m がモノ を示す
--
-- モノ: f = g <=> m . f = m . g
--
-- (=>)
--
-- m^op . m = id なら m はモノである,を示す.
-- (=>)
-- f = g
-- = {- ライプニッツ -}
-- m . f = m . g
-- (<=)
-- m . f = m . g
-- = {- ライプニッツ -}
-- m^op . m . f = m^op . m . g
-- = {- 前提: m^op . m = id -}
-- id . f = id . g
-- = {- 恒等射は左単位元 -}
-- f = g
-- よって m がモノであることが示せた.
--
-- (<=)
--
-- (f, g) を m^op . m の表とする.
-- m^op . m = f . g^op
-- = {- 入れ換え -}
-- m^op . m . g = f
-- = {- 入れ換え -}
-- m . g = m . f
-- = {- m はモノ -}
-- g = f
-- => {- (f, g) が m^op . m の表 -}
-- m^op . m = f . g^op = f . f^op = id
--
-- | Ex 4.20
--
-- 任意の射 f について f = m . c で c . c^op = id , m^op . m = id なる関数 c, m が存在することを示す.
--
-- (m, n) を f . f^op の表とすると,
-- f . f^op = m . n^op で m^op . m \cap n^op . n = id
--
-- f は関数で関数は単一なので f . f^op \subseteq id.
-- よって m . n^op \subseteq id である.
--
-- m^op . m \cap n^op . n = id なので,
-- m . n^op \subseteq id であり,よって m \subseteq n が得られるが,
-- 関数なので m = n と等式にできる.
-- よって m^op . m = id である.
-- 前問により m がモノであることが分かる.
-- f . f^op = m . m^op なので f = m . m^op . f となり
-- f = m . c となるような c が存在する.(c = m^op . f)
--
-- m^op . m = id なので m^op . m . c = c であり, よって m^op . f = c が得られる.
-- よって,
-- c . c^op
-- = {- m^op . f = c -}
-- m^op . f . f^op . m
-- = {- (m, m) は f . f^op の表 -}
-- m^op . m . m^op . m
-- = {- m^op . m = id -}
-- id
--
-- 任意の m は同型を除いて一意であることを示す.
-- ???
-- | Ex 4.21
--
-- (f, g) が R の表なので,
-- R = f . g^op で f^op . f \cap g^op . g = id
--
-- R が単一なので
-- R . R^op \subseteq id
-- = {- R = f . g^op -}
-- f . g^op . g . f^op \subseteq id
-- = {- 入れ替え -}
-- g^op . g \subseteq f^op . f
-- = {- 交わり: R \subseteq S は R \cap S = R の省略形として定義 -}
-- g^op . g \cap f^op . f = g^op . g
-- = {- (f, g) は表 g^op . g \cap f^op . f = id なので -}
-- g^op . g = id
-- = {- Ex 2.4 -}
-- g はモノ
--
-- | Ex 4.22
--
-- R = f . g^op で R が全面なら g . g^op = id を示す
-- (memo: Sが単一とは S . S^op \subseteq id のこと)
--
-- R が全面なので
--
-- id \subseteq R^op . R
-- = {- R = f . g^op -}
-- id \subseteq g . f^op . f . g^op
-- = {- 交わり: R \subseteq S は R \cap S = R の省略形として定義 -}
-- id \cap g . f^op . f . g^op = id
-- ~~ ~~ ~~~~~~~~~~~~~~~
-- T S R
-- = {- g は関数、関数は単一なのでモジュラ則は (S . R) \cap T = S . (R \cap (S^op . T)) -}
-- g . (f^op . f . g^op \cap (g^op . id)) = id
-- = {- id は単位元 -}
-- g . (f^op . f . g^op \cap g^op) = id
-- = {- 交わりを使った単調性の公理 -}
-- id \subseteq ((g . f^op . f . g^op) \cap (g . g^op))
-- = {- 交わり: R \subseteq S は R \cap S = R の省略形として定義 -}
-- id \cap (g . f^op . f . g^op) \cap (g . g^op) = id
-- = {- 交わりの結合性 -}
-- (id \cap (g . f^op . f . g^op)) \cap g . g^op = id
-- = {- 上の通り id \cap g . f^op . f . g^op = id -}
-- id \cap g . g^op = id
-- = {- 交わり: R \subseteq S は R \cap S = R の省略形として定義 -}
-- id \subseteq g . g^op
--
-- なお, g は関数なので単一つまり g . g^op \subseteq id なので,
-- id \subseteq g . g^op \subseteq id だから g . g^op = id
--
-- | Ex 4.23
--
-- Ex 4.21 から (f, g) が R の表で, R が単一なら g^op . g = id
-- Ex 4.22 から (f, g) が R の表で, R が全面なら g . g^op = id
-- よって R が関数なら単一かつ全面なので g^op . g = g^op . g = id なので同型射である.
--
-- | Ex 4.24
--
-- h . Q^op \subseteq R かつ Q . k^op \subseteq S
-- となる全面な関係 Q が存在する場合に限り h . k^op \subseteq R . S を示す.
--
-- 単調性から
-- h . Q^op . Q . k^op \subseteq R . S
-- Qが全面なので, id \subseteq Q^op . Q だから
-- h . k^op \subseteq R . S となる.
--
-- 逆に, h . k^op \subseteq R . S とする.
-- Q = (R^op . h) \cap (S . k) と定義する.
--
-- Q が全面
-- = {- (4.18)により -}
-- id \subseteq h^op . R . S . k
-- = {- 入れ換え -}
-- h . k^op \subseteq R . S
-- = {- 仮定から -}
-- true
--
-- h . Q^op \subseteq R と Q . k^op \subseteq S を示すには,
--
-- h . Q^op
-- \subseteq {- Q の定義 -}
-- h . (R^op . h)^op
-- = {- 逆 -}
-- h . h^op . R
-- \subseteq {- h は単一 -}
-- R
--
-- Q . k^op \subseteq S は両辺相補演算をとり k . Q \subseteq S^op となるので,
-- k => h , S^op => R ととれば上の証明と同じ.
--
-- | Ex 4.25
--
-- skip
--
-- | Ex 4.26
--
-- 任意の X, Y : A <- 1 について (X \subseteq Y) == (ran X \subseteq ran Y) を示す.
--
-- A <- 1 が A の要素を指定しているとみなせば自明に思える.
--
-- ran X . !^op = X を示せれば良い.
--
-- | Ex 4.27
--
-- dom S = id \cap \Pi . S を示す
--
-- まず C を余反射とする,
-- dom S が余反射であるため, C \subseteq dom S となる任意の C も余反射である.
-- よって C を余反射と仮定しても一般性は損なわれない.
-- すると,
--
-- C \subseteq id \cap \Pi . S
-- = {- 交わり X \cap Y \subseteq Y -}
-- C \subseteq \Pi . S
-- = {- \Pi = !^op . ! -}
-- C \subseteq !^op . ! . S
-- = {- 入れ換え -}
-- ! . C \subseteq ! . S
-- = {- Ex 4.26 の双対 dom C = ! . C -}
-- dom C \subseteq dom S
-- = {- C は余反射なので dom C = C -}
-- C \subseteq dom S
--
-- | Ex 4.28
--
-- 結びの普遍性
-- \Cup H \subseteq X == forall R <- H: R \subseteq X
--
-- \Cap H = \Cup {S | R <- H: S \subseteq R} を示したい.
--
-- ???
--
-- | Ex 4.29
--
-- ran (\Cup H) = \Cup {ran X | X <- H} を示す.
--
-- ???
--
-- | Ex 4.30
--
-- R - S \subseteq X == R \subseteq S \cup X
--
-- まず(-)演算の存在を示す.
--
-- R - S = \Cap {X | R \subseteq S \cup X} と定義すればよい.
--
-- ???
--
| cutsea110/aop | src/Chap04.hs | bsd-3-clause | 16,358 | 0 | 2 | 3,777 | 564 | 563 | 1 | 1 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Test.Tasty
import Test.Tasty.HUnit
import Text.Hastache
import qualified Trurl as T
import qualified SimpleParams as S
main :: IO ()
main = defaultMain tests
tests :: TestTree
tests = testGroup "Tests" [trurlTests, simplParamsTests]
trurlTests :: TestTree
trurlTests = testGroup "Trurl unit tests"
[ testCase "getFullFileName" $
assertEqual "Checking full template path" "a/b.hs" (T.getFullFileName "a/" "b")
, testCase "getFileName" $
assertEqual "Checking file name" "abc.hs" (T.getFileName "abc")
, testCase "getFileName" $
assertEqual "Checking file name" "abc.html" (T.getFileName "abc.html")
, testCase "mkJsonContext empty" $ do
generated <- hastacheStr defaultConfig "" (T.mkJsonContext "{\"a\":11}")
assertEqual "Checking generated text" "" generated
, testCase "mkJsonContext absent variable" $ do
generated <- hastacheStr defaultConfig "{{b}}" (T.mkJsonContext "{\"a\":11}")
assertEqual "Checking generated text" "" generated
, testCase "mkJsonContext simple object" $ do
generated <- hastacheStr defaultConfig "{{a}}" (T.mkJsonContext "{\"a\":11}")
assertEqual "Checking generated text" "11" generated
, testCase "mkJsonContext complex object" $ do
generated <- hastacheStr defaultConfig "{{a}}-{{b}}" (T.mkJsonContext "{\"a\":11,\"b\":\"abc\"}")
assertEqual "Checking generated text" "11-abc" generated
, testCase "mkJsonContext complex array" $ do
generated <- hastacheStr defaultConfig "{{#abc}}{{name}}{{/abc}}" (T.mkJsonContext "{\"abc\":[{\"name\":\"1\"},{\"name\":\"2\"},{\"name\":\"3\"}]}")
assertEqual "Checking generated text" "123" generated
, testCase "mkJsonContext nested object" $ do
generated <- hastacheStr defaultConfig "{{#abc}}{{name}}{{/abc}}" (T.mkJsonContext "{\"abc\":{\"name\":\"1\"}}")
assertEqual "Checking generated text" "1" generated
, testCase "mkProjContext for empty params" $ do
generated <- hastacheStr defaultConfig "{{ProjectName}}" (T.mkProjContext "abc" "{}")
assertEqual "Checking generated text" "abc" generated
]
simplParamsTests :: TestTree
simplParamsTests = testGroup "Trurl unit tests"
[ testCase "parseEmbedded without delimiter" $
assertEqual "Checking parseEmbedded" "" (S.parseEmbedded "abc")
, testCase "parseEmbedded with delimiter" $
assertEqual "Checking parseEmbedded" "{\"name\":\"abc\",\"type\":\"efg\"}" (S.parseEmbedded "abc#efg")
, testCase "parseEmbedded with delimiter" $
assertEqual "Checking parseEmbedded" "{\"name\":\"abc\",\"type\":\"efg\",\"last\":true}" (S.parseEmbedded "abc#efg@")
, testCase "simpleParamsToJson" $
assertEqual "Checking simpleParamsToJson"
"{\"abc\":123,\"efg\":456,\"zxc\":[1,2,3],\"ttt\":[{\"name\":\"abc\",\"type\":\"efg\"},{\"name\":\"hck\",\"type\":\"qwe\"},{\"name\":\"zxc\",\"type\":\"vbn\",\"last\":true}]}"
(S.simpleParamsToJson "abc:123,efg:456,zxc:[1,2,3],ttt:[abc#efg,hck#qwe,zxc#vbn@]")
, testCase "simple params with a space" $
assertEqual "Checking simple params with a space"
"{\"props\":[{\"name\":\"cover\",\"type\":\"Text\"}, {\"name\":\"year\",\"type\":\"Integer\",\"last\":true}]}"
(S.simpleParamsToJson "props:[cover#Text, year#Integer@]")
]
| DNNX/trurl | tests/Main.hs | bsd-3-clause | 3,384 | 0 | 14 | 562 | 600 | 292 | 308 | 56 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Kmip10Spec where
import Ttlv.Data
import Ttlv.Parser.Binary
import Ttlv.Tag
import Test.Hspec
import Data.Either (isRight)
import qualified Data.ByteString as B
import qualified Data.ByteString.Base16 as B16
import qualified Data.ByteString.Lazy as L
import Ttlv.Validator.Message
import Ttlv.Validator.Objects
import Ttlv.Validator.Structures
import Kmip10Data
spec :: Spec
spec = do
describe "Use Cases" $ do
describe "1.0" $ do
describe "3.1 Basic functionality" $ do
let runIt x = do
let ttlv = decodeTtlv x
ttlv `shouldSatisfy` isRight
let Right ttlv' = ttlv
encodeTtlv ttlv' `shouldBe` x
it "3.1.1 Create / Destroy" $ do
runIt kmip_1_0__3_1_1_create_request
runIt kmip_1_0__3_1_1_create_response
runIt kmip_1_0__3_1_1_destroy_request
runIt kmip_1_0__3_1_1_destroy_response
it "3.1.2 Register / Create / Get Attributes / Destroy" $ do
runIt kmip_1_0__3_1_2_register_request
runIt kmip_1_0__3_1_2_register_response
runIt kmip_1_0__3_1_2_create_request
runIt kmip_1_0__3_1_2_create_response
runIt kmip_1_0__3_1_2_get_attributes_request
runIt kmip_1_0__3_1_2_get_attributes_response
runIt kmip_1_0__3_1_2_destroy_request1
runIt kmip_1_0__3_1_2_destroy_response1
it "3.1.3 Create / Locate / Get / Destroy" $ do
runIt kmip_1_0__3_1_3_create_request
runIt kmip_1_0__3_1_3_create_response
runIt kmip_1_0__3_1_3_locate_request1
runIt kmip_1_0__3_1_3_locate_response1
runIt kmip_1_0__3_1_3_get_request
runIt kmip_1_0__3_1_3_get_response
runIt kmip_1_0__3_1_3_destroy_request
runIt kmip_1_0__3_1_3_destroy_response
runIt kmip_1_0__3_1_3_locate_request2
runIt kmip_1_0__3_1_3_locate_response2
it "3.1.4 Dual-client use case, ID Placeholder linked Locate & Get batch" $ do
runIt kmip_1_0__3_1_4_a_register_request
runIt kmip_1_0__3_1_4_a_register_response
runIt kmip_1_0__3_1_4_a_create_request
runIt kmip_1_0__3_1_4_a_create_response
runIt kmip_1_0__3_1_4_b_locate_request
runIt kmip_1_0__3_1_4_b_locate_response
runIt kmip_1_0__3_1_4_b_get_attribute_list_request
runIt kmip_1_0__3_1_4_b_get_attribute_list_response
runIt kmip_1_0__3_1_4_b_get_attributes_request
runIt kmip_1_0__3_1_4_b_get_attributes_response
runIt kmip_1_0__3_1_4_b_add_attribute_request
runIt kmip_1_0__3_1_4_b_add_attribute_response
runIt kmip_1_0__3_1_4_b_modify_attribute_request
runIt kmip_1_0__3_1_4_b_modify_attribute_response
runIt kmip_1_0__3_1_4_b_delete_attribute_request
runIt kmip_1_0__3_1_4_b_delete_attribute_response
runIt kmip_1_0__3_1_4_a_destroy_request1
runIt kmip_1_0__3_1_4_a_destroy_response1
runIt kmip_1_0__3_1_4_a_get_request1
runIt kmip_1_0__3_1_4_a_get_response1
runIt kmip_1_0__3_1_4_a_destroy_request2
runIt kmip_1_0__3_1_4_a_destroy_response2
runIt kmip_1_0__3_1_4_a_get_request2
runIt kmip_1_0__3_1_4_a_get_response2
it "3.1.5 Register / Destroy Secret Data" $ do
runIt kmip_1_0__3_1_5_register_request
runIt kmip_1_0__3_1_5_register_response
runIt kmip_1_0__3_1_5_destroy_request
runIt kmip_1_0__3_1_5_destroy_response
describe "1.0 Validation" $ do
let runIt x y = do
let j = decodeTtlv y
j `shouldSatisfy` isRight
let Right ttlv = j
runTtlvParser x ttlv `shouldBe` Right ttlv
it "3.1.1" $ do
runIt requestMessage kmip_1_0__3_1_1_create_request
runIt responseMessage kmip_1_0__3_1_1_create_response
runIt requestMessage kmip_1_0__3_1_1_destroy_request
runIt responseMessage kmip_1_0__3_1_1_destroy_response
it "3.1.2" $ do
runIt requestMessage kmip_1_0__3_1_2_register_request
runIt responseMessage kmip_1_0__3_1_2_register_response
runIt requestMessage kmip_1_0__3_1_2_create_request
runIt responseMessage kmip_1_0__3_1_2_create_response
runIt requestMessage kmip_1_0__3_1_2_get_attributes_request
runIt responseMessage kmip_1_0__3_1_2_get_attributes_response
runIt requestMessage kmip_1_0__3_1_2_destroy_request1
runIt responseMessage kmip_1_0__3_1_2_destroy_response1
runIt requestMessage kmip_1_0__3_1_2_destroy_request2
runIt responseMessage kmip_1_0__3_1_2_destroy_response2
it "3.1.3" $ do
runIt requestMessage kmip_1_0__3_1_3_create_request
runIt responseMessage kmip_1_0__3_1_3_create_response
runIt requestMessage kmip_1_0__3_1_3_locate_request1
runIt responseMessage kmip_1_0__3_1_3_locate_response1
runIt requestMessage kmip_1_0__3_1_3_get_request
runIt responseMessage kmip_1_0__3_1_3_get_response
runIt requestMessage kmip_1_0__3_1_3_destroy_request
runIt responseMessage kmip_1_0__3_1_3_destroy_response
runIt requestMessage kmip_1_0__3_1_3_locate_request2
runIt responseMessage kmip_1_0__3_1_3_locate_response2
it "3.1.4" $ do
runIt requestMessage kmip_1_0__3_1_4_a_register_request
runIt responseMessage kmip_1_0__3_1_4_a_register_response
runIt requestMessage kmip_1_0__3_1_4_a_create_request
runIt responseMessage kmip_1_0__3_1_4_a_create_response
runIt requestMessage kmip_1_0__3_1_4_b_locate_request
runIt responseMessage kmip_1_0__3_1_4_b_locate_response
runIt requestMessage kmip_1_0__3_1_4_b_get_attribute_list_request
runIt responseMessage kmip_1_0__3_1_4_b_get_attribute_list_response
runIt requestMessage kmip_1_0__3_1_4_b_get_attributes_request
runIt responseMessage kmip_1_0__3_1_4_b_get_attributes_response
runIt requestMessage kmip_1_0__3_1_4_b_add_attribute_request
runIt responseMessage kmip_1_0__3_1_4_b_add_attribute_response
runIt requestMessage kmip_1_0__3_1_4_b_modify_attribute_request
runIt responseMessage kmip_1_0__3_1_4_b_modify_attribute_response
runIt requestMessage kmip_1_0__3_1_4_b_delete_attribute_request
runIt responseMessage kmip_1_0__3_1_4_b_delete_attribute_response
runIt requestMessage kmip_1_0__3_1_4_a_destroy_request1
runIt responseMessage kmip_1_0__3_1_4_a_destroy_response1
runIt requestMessage kmip_1_0__3_1_4_a_get_request1
runIt responseMessage kmip_1_0__3_1_4_a_get_response1
runIt requestMessage kmip_1_0__3_1_4_a_destroy_request2
runIt responseMessage kmip_1_0__3_1_4_a_destroy_response2
runIt requestMessage kmip_1_0__3_1_4_a_get_request2
runIt responseMessage kmip_1_0__3_1_4_a_get_response2
it "3.1.5" $ do
runIt requestMessage kmip_1_0__3_1_5_register_request
runIt responseMessage kmip_1_0__3_1_5_register_response
runIt requestMessage kmip_1_0__3_1_5_destroy_request
runIt responseMessage kmip_1_0__3_1_5_destroy_response
it "4.1" $ do
runIt requestMessage kmip_1_0__4_1_a_create_request
runIt responseMessage kmip_1_0__4_1_a_create_response
runIt requestMessage kmip_1_0__4_1_a_get_attribute_request
runIt responseMessage kmip_1_0__4_1_a_get_attribute_response
runIt requestMessage kmip_1_0__4_1_a_activate_request
runIt responseMessage kmip_1_0__4_1_a_activate_response
runIt requestMessage kmip_1_0__4_1_a_get_attribute_request2
runIt responseMessage kmip_1_0__4_1_a_get_attribute_response2
runIt requestMessage kmip_1_0__4_1_b_locate_request
runIt responseMessage kmip_1_0__4_1_b_locate_response
runIt requestMessage kmip_1_0__4_1_b_get_request
runIt responseMessage kmip_1_0__4_1_b_get_response
runIt requestMessage kmip_1_0__4_1_b_revoke_request
runIt responseMessage kmip_1_0__4_1_b_revoke_response
runIt requestMessage kmip_1_0__4_1_b_get_attribute_request
runIt responseMessage kmip_1_0__4_1_b_get_attribute_response
runIt requestMessage kmip_1_0__4_1_a_get_attribute_list_request
runIt responseMessage kmip_1_0__4_1_a_get_attribute_list_response
runIt requestMessage kmip_1_0__4_1_a_get_attributes_request
runIt responseMessage kmip_1_0__4_1_a_get_attributes_response
runIt requestMessage kmip_1_0__4_1_a_add_attribute_request
runIt responseMessage kmip_1_0__4_1_a_add_attribute_response
runIt requestMessage kmip_1_0__4_1_a_modify_attribute_request
runIt responseMessage kmip_1_0__4_1_a_modify_attribute_response
runIt requestMessage kmip_1_0__4_1_a_delete_attribute_request
runIt responseMessage kmip_1_0__4_1_a_delete_attribute_response
runIt requestMessage kmip_1_0__4_1_a_get_request
runIt responseMessage kmip_1_0__4_1_a_get_response
runIt requestMessage kmip_1_0__4_1_a_destroy_request
runIt responseMessage kmip_1_0__4_1_a_destroy_response
| nymacro/hs-kmip | tests/Kmip10Spec.hs | bsd-3-clause | 9,360 | 0 | 24 | 2,027 | 1,322 | 524 | 798 | 173 | 1 |
{-# LANGUAGE ExistentialQuantification #-}
module Maxhbr.Types
where
import Text.Blaze.Html5 (Html)
import Data.Text (Text)
data NavEntry
= NavEntry
{ navEntryTitle :: String
, navEntryTarget :: String
, navEntryChildren :: [NavEntry]
, navOrderingIndex :: Int
} deriving (Eq)
instance Ord NavEntry where
NavEntry{navOrderingIndex = i1, navEntryTitle = t1} `compare` NavEntry{navOrderingIndex = i2, navEntryTitle = t2}
= if i1 == i2
then t1 `compare` t2
else i1 `compare` i2
type Navigation = [NavEntry]
data Page
= Page
{ pageTitle :: String
, pageNavigation :: Navigation
, pageContent :: Html
, pageBreadcrump :: Html
, pageOutputPathes :: [FilePath]
}
data Output
= HtmlOutput Html [FilePath]
-- | TextOutput Text [FilePath]
-- | BSOutput ByteString [FilePath]
| CopyOutput FilePath [FilePath]
class Pageable a where
mkToPage :: a -> Page
type Style = String
data RenderableA
= forall a.
Renderable a => RenderableA a
class Renderable a where
getTitle :: a -> Text
getBreadCrump :: a -> [Text]
getNavigation :: Navigation -> a -> Navigation
getStyle :: a -> Style
getContent :: a -> Html
newtype Producer a b
= Producer
{ produce :: a -> [b]
}
data State
= State
{ sNavigation :: Navigation
, sOutputs :: [Output]
, sRenderables :: [RenderableA]
}
| maximilianhuber/maximilian-huber.de | src/Maxhbr/Types.hs | bsd-3-clause | 1,372 | 0 | 9 | 319 | 384 | 231 | 153 | 47 | 0 |
{-# LANGUAGE JavaScriptFFI #-}
{-# LANGUAGE OverloadedStrings #-}
-----------------------------------------------------------------------------
-- |
-- Module : Program.Controllers.GeoJSONFileImport
-- Copyright : (c) Artem Chirkin
-- License : MIT
--
-- Maintainer : Artem Chirkin <chirkin@arch.ethz.ch>
-- Stability : experimental
--
--
--
-----------------------------------------------------------------------------
module Program.Controllers.GeoJSONFileImport
( registerJSONFileImports
, loadGeoJSONFromLink
, registerClearGeometry
) where
import Data.Geometry.Structure.Feature (SomeJSONInput)
import JsHs.LikeJS.Class
import JsHs.Types
import JsHs.Callback
import JsHs.Useful
import Program.Controllers.GUI (registerLoadingFile, registerClearGeometry)
-- | Load GeoJSON file from local system
registerJSONFileImports :: (SomeJSONInput -> IO ()) -> IO ()
registerJSONFileImports = registerLoadingFile . f
where
f _ (Left str) = logText' str
f fire (Right ji) = fire ji
-- | Load GeoJSON file by url
loadGeoJSONFromLink :: JSString -> (SomeJSONInput -> IO ()) -> IO ()
loadGeoJSONFromLink url callback = do
c <- asyncCallback1 $ callback . asLikeJS
getUrlJSON url c
foreign import javascript unsafe "var xmlHttp = new XMLHttpRequest(); \
\ var json = null; \
\ var loadjson = function() { \
\ try { \
\ json = JSON.parse(xmlHttp.responseText);$2(json); \
\ } catch (err) { logText('Your browser does not like JSON file you have chosen: ' + err); } \
\ }; \
\ var errjson = function() {logText('Your browser cannot execute http request on ' + $1); }; \
\ try { \
\ if ('withCredentials' in xmlHttp) { \
\ xmlHttp.onload = loadjson; \
\ xmlHttp.onerror = errjson; \
\ xmlHttp.open( 'GET', $1, true ); \
\ } else if (typeof XDomainRequest != 'undefined') { \
\ xmlHttp = new XDomainRequest(); \
\ xmlHttp.onload = loadjson; \
\ xmlHttp.onerror = errjson; \
\ xmlHttp.open( 'GET', $1); \
\ } else { \
\ xmlHttp.onload = loadjson; \
\ xmlHttp.onerror = errjson; \
\ xmlHttp.open( 'GET', $1, true ); \
\ } \
\ xmlHttp.send( ); \
\ } catch (err) { logText(err); } "
getUrlJSON :: JSString -> Callback (JSVal -> IO ()) -> IO ()
| achirkin/ghcjs-modeler | src/Program/Controllers/GeoJSONFileImport.hs | bsd-3-clause | 2,364 | 5 | 11 | 554 | 261 | 146 | 115 | 22 | 2 |
{-|
Module : Operations.WithExternalSymbols
Description : Contains all FA operations. Each operation uses the implicit
alphabet.
-}
module Operations.Regular
( charsToSymbols
, isMacrostateAccepting
, run
, module Operations.WithExternalSymbols
, postForEachSymbol
, union
, productUnion
, intersect
, determinize
, complement
, isEmpty
, isSubsetOf
, isUniversal
) where
import Types.Fa
import Data.Set (Set)
import qualified Data.Set as Set
import Data.List ((\\), nub)
import Operations.WithExternalSymbols (post)
import qualified Operations.WithExternalSymbols as ExternalSymbols
-- |Converts 'String' to a list of symbols.
charsToSymbols :: String -> [Symbol]
charsToSymbols = fmap (: [])
-- |Determines whether a macro-state is accepting.
isMacrostateAccepting :: Ord sta => Fa sym sta -> Set sta -> Bool
isMacrostateAccepting fa states =
not $ Set.null $ states `Set.intersection` finalStates fa
-- |Returns the post states for each symbol of the alphabet.
postForEachSymbol :: (Ord sym, Ord sta) => Fa sym sta -> Set sta -> Set (Set sta)
postForEachSymbol fa state =
ExternalSymbols.postForEachSymbol fa state (symbols fa)
-- |Checks whether a FA accepts a string.
run :: (Ord sym, Ord sta) => Fa sym sta -> [sym] -> Bool
run fa =
run' (initialStates fa)
where
run' currentStates [] =
isMacrostateAccepting fa currentStates
run' currentStates (x:xs) =
run' (post fa currentStates x) xs
-- |Creates a union of two FAs.
union :: (Ord sym, Ord sta) => Fa sym sta -> Fa sym sta -> Fa sym sta
union (Fa initialStates1 finalStates1 transitions1) (Fa initialStates2 finalStates2 transitions2) =
Fa
(initialStates1 `Set.union` initialStates2)
(finalStates1 `Set.union` finalStates2)
(transitions1 `Set.union` transitions2)
-- |Creates a union of two FAs with product state ('sta1', 'sta2').
productUnion :: (Ord sym, Ord sta1, Ord sta2) => Fa sym sta1 -> Fa sym sta2 -> Fa sym (Set sta1, Set sta2)
productUnion fa1 fa2 =
ExternalSymbols.productUnion allSymbols completeFa1 completeFa2
where
allSymbols = symbols fa1 `Set.union` symbols fa2
completeFa1 = ExternalSymbols.complete allSymbols fa1
completeFa2 = ExternalSymbols.complete allSymbols fa2
-- |Creates an intersection of two FAs.
intersect :: (Ord sym, Ord sta1, Ord sta2) => Fa sym sta1 -> Fa sym sta2 -> Fa sym (sta1, sta2)
intersect fa1 fa2 =
ExternalSymbols.intersect (symbols fa1 `Set.union` symbols fa2) fa1 fa2
-- |Converts a FA to an equivalent deterministic FA.
determinize :: (Ord sym, Ord sta) => Fa sym sta -> Fa sym (Set sta)
determinize fa =
ExternalSymbols.determinize (symbols fa) fa
-- |Creates a complement of a FA.
complement :: (Ord sym, Ord sta) => Fa sym sta -> Fa sym (Set sta)
complement fa =
ExternalSymbols.complement (symbols fa) fa
-- |Checks whether a FA accepts an empty language.
isEmpty :: (Ord sym, Ord sta) => Fa sym sta -> Bool
isEmpty fa =
ExternalSymbols.isEmpty (symbols fa) fa
-- |Checks whether the first FA is subset of the second FA using the naive algorithm.
isSubsetOf :: (Ord sym, Ord sta) => Fa sym sta -> Fa sym sta -> Bool
isSubsetOf fa1 fa2 =
ExternalSymbols.isSubsetOf (symbols fa1) (symbols fa2) fa1 fa2
-- |Checks whether a FA accepts all possible strings using the naive algorithm.
isUniversal :: (Ord sym, Ord sta) => Fa sym sta -> Bool
isUniversal fa =
ExternalSymbols.isUniversal (symbols fa) fa
| jakubriha/automata | src/Operations/Regular.hs | bsd-3-clause | 3,464 | 0 | 10 | 666 | 1,000 | 526 | 474 | 65 | 2 |
{-# LANGUAGE PackageImports #-}
module Foreign.Storable (module M) where
import "base" Foreign.Storable as M
| silkapp/base-noprelude | src/Foreign/Storable.hs | bsd-3-clause | 114 | 0 | 4 | 18 | 21 | 15 | 6 | 3 | 0 |
module Module4.Task1 where
-- system code
data Color = Red | Green | Blue
-- solution code
instance Show Color where
show Red = "Red"
show Green = "Green"
show Blue = "Blue"
| dstarcev/stepic-haskell | src/Module4/Task1.hs | bsd-3-clause | 192 | 0 | 6 | 51 | 53 | 30 | 23 | 6 | 0 |
---------------------------------------------------------------------------------
-- |
-- Module : Data.SBV.Bridge.Boolector
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : erkokl@gmail.com
-- Stability : experimental
--
-- Interface to the Boolector SMT solver. Import this module if you want to use the
-- Boolector SMT prover as your backend solver. Also see:
--
-- - "Data.SBV.Bridge.ABC"
--
-- - "Data.SBV.Bridge.CVC4"
--
-- - "Data.SBV.Bridge.MathSAT"
--
-- - "Data.SBV.Bridge.Yices"
--
-- - "Data.SBV.Bridge.Z3"
--
---------------------------------------------------------------------------------
module Data.SBV.Bridge.Boolector (
-- * Boolector specific interface
sbvCurrentSolver
-- ** Proving, checking satisfiability, and safety
, prove, sat, safe, allSat, isVacuous, isTheorem, isSatisfiable
-- ** Optimization routines
, optimize, minimize, maximize
-- * Non-Boolector specific SBV interface
-- $moduleExportIntro
, module Data.SBV
) where
import Data.SBV hiding (prove, sat, safe, allSat, isVacuous, isTheorem, isSatisfiable, optimize, minimize, maximize, sbvCurrentSolver)
-- | Current solver instance, pointing to cvc4.
sbvCurrentSolver :: SMTConfig
sbvCurrentSolver = boolector
-- | Prove theorems, using the CVC4 SMT solver
prove :: Provable a
=> a -- ^ Property to check
-> IO ThmResult -- ^ Response from the SMT solver, containing the counter-example if found
prove = proveWith sbvCurrentSolver
-- | Find satisfying solutions, using the CVC4 SMT solver
sat :: Provable a
=> a -- ^ Property to check
-> IO SatResult -- ^ Response of the SMT Solver, containing the model if found
sat = satWith sbvCurrentSolver
-- | Check safety, i.e., prove that all 'sAssert' conditions are statically true in all paths
safe :: SExecutable a
=> a -- ^ Program to check the safety of
-> IO SafeResult -- ^ Response of the SMT solver, containing the unsafe model if found
safe = safeWith sbvCurrentSolver
-- | Find all satisfying solutions, using the CVC4 SMT solver
allSat :: Provable a
=> a -- ^ Property to check
-> IO AllSatResult -- ^ List of all satisfying models
allSat = allSatWith sbvCurrentSolver
-- | Check vacuity of the explicit constraints introduced by calls to the 'constrain' function, using the CVC4 SMT solver
isVacuous :: Provable a
=> a -- ^ Property to check
-> IO Bool -- ^ True if the constraints are unsatisifiable
isVacuous = isVacuousWith sbvCurrentSolver
-- | Check if the statement is a theorem, with an optional time-out in seconds, using the CVC4 SMT solver
isTheorem :: Provable a
=> Maybe Int -- ^ Optional time-out, specify in seconds
-> a -- ^ Property to check
-> IO (Maybe Bool) -- ^ Returns Nothing if time-out expires
isTheorem = isTheoremWith sbvCurrentSolver
-- | Check if the statement is satisfiable, with an optional time-out in seconds, using the CVC4 SMT solver
isSatisfiable :: Provable a
=> Maybe Int -- ^ Optional time-out, specify in seconds
-> a -- ^ Property to check
-> IO (Maybe Bool) -- ^ Returns Nothing if time-out expiers
isSatisfiable = isSatisfiableWith sbvCurrentSolver
-- | Optimize cost functions, using the CVC4 SMT solver
optimize :: (SatModel a, SymWord a, Show a, SymWord c, Show c)
=> OptimizeOpts -- ^ Parameters to optimization (Iterative, Quantified, etc.)
-> (SBV c -> SBV c -> SBool) -- ^ Betterness check: This is the comparison predicate for optimization
-> ([SBV a] -> SBV c) -- ^ Cost function
-> Int -- ^ Number of inputs
-> ([SBV a] -> SBool) -- ^ Validity function
-> IO (Maybe [a]) -- ^ Returns Nothing if there is no valid solution, otherwise an optimal solution
optimize = optimizeWith sbvCurrentSolver
-- | Minimize cost functions, using the CVC4 SMT solver
minimize :: (SatModel a, SymWord a, Show a, SymWord c, Show c)
=> OptimizeOpts -- ^ Parameters to optimization (Iterative, Quantified, etc.)
-> ([SBV a] -> SBV c) -- ^ Cost function to minimize
-> Int -- ^ Number of inputs
-> ([SBV a] -> SBool) -- ^ Validity function
-> IO (Maybe [a]) -- ^ Returns Nothing if there is no valid solution, otherwise an optimal solution
minimize = minimizeWith sbvCurrentSolver
-- | Maximize cost functions, using the CVC4 SMT solver
maximize :: (SatModel a, SymWord a, Show a, SymWord c, Show c)
=> OptimizeOpts -- ^ Parameters to optimization (Iterative, Quantified, etc.)
-> ([SBV a] -> SBV c) -- ^ Cost function to maximize
-> Int -- ^ Number of inputs
-> ([SBV a] -> SBool) -- ^ Validity function
-> IO (Maybe [a]) -- ^ Returns Nothing if there is no valid solution, otherwise an optimal solution
maximize = maximizeWith sbvCurrentSolver
{- $moduleExportIntro
The remainder of the SBV library that is common to all back-end SMT solvers, directly coming from the "Data.SBV" module.
-}
| Copilot-Language/sbv-for-copilot | Data/SBV/Bridge/Boolector.hs | bsd-3-clause | 5,395 | 0 | 14 | 1,467 | 716 | 409 | 307 | 60 | 1 |
module Exp.Test where
-- $Id$
import Exp.Property
import Autolib.Exp
import Autolib.Exp.Einfach
import Autolib.Exp.Sanity
import Autolib.Reporter
import Autolib.Reporter.Set
import Autolib.ToDoc
import Autolib.Reader
import Autolib.Size
import Autolib.Symbol
tests ps exp = sequence_ $ do p <- ps ; return $ test p exp
{-
test :: ( Symbol c, Reader [c], ToDoc [c] )
=> Property c
-> RX c
-> Reporter ()
-}
test (Max_Size s) exp = do
let g = size exp
assert ( g <= s )
$ text "Größe des Ausdrucks" <+> parens ( toDoc g ) <+> text "ist höchstens" <+> toDoc s <+> text "?"
test (Alphabet a) exp = do
sanity_alpha a exp
test (Simple) exp = do
ist_einfach exp
test (Extended) exp = do
ist_erweitert exp
test (AllowedKeys ks) exp = do
sanity_keys ks exp
test (Max_Star_Height h) exp = do
let sh = star_height exp
inform $ vcat
[ text "Ausdruck" <+> parens ( toDoc exp )
</> text "hat Sternhöhe" <+> toDoc sh
, text "höchste zulässige Höhe ist"
</> toDoc h
]
when ( sh > h ) $ reject $ text "Das ist zu hoch."
test prop exp = do
reject $ fsep [ text "test für", toDoc prop
, text "noch nicht implementiert"
]
| Erdwolf/autotool-bonn | src/Exp/Test.hs | gpl-2.0 | 1,253 | 4 | 15 | 349 | 412 | 200 | 212 | 35 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.EC2.DescribeNetworkAcls
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Describes one or more of your network ACLs.
--
-- For more information about network ACLs, see <http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_ACLs.html Network ACLs> in the /AmazonVirtual Private Cloud User Guide/.
--
-- <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-DescribeNetworkAcls.html>
module Network.AWS.EC2.DescribeNetworkAcls
(
-- * Request
DescribeNetworkAcls
-- ** Request constructor
, describeNetworkAcls
-- ** Request lenses
, dna1DryRun
, dna1Filters
, dna1NetworkAclIds
-- * Response
, DescribeNetworkAclsResponse
-- ** Response constructor
, describeNetworkAclsResponse
-- ** Response lenses
, dnarNetworkAcls
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.EC2.Types
import qualified GHC.Exts
data DescribeNetworkAcls = DescribeNetworkAcls
{ _dna1DryRun :: Maybe Bool
, _dna1Filters :: List "Filter" Filter
, _dna1NetworkAclIds :: List "item" Text
} deriving (Eq, Read, Show)
-- | 'DescribeNetworkAcls' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dna1DryRun' @::@ 'Maybe' 'Bool'
--
-- * 'dna1Filters' @::@ ['Filter']
--
-- * 'dna1NetworkAclIds' @::@ ['Text']
--
describeNetworkAcls :: DescribeNetworkAcls
describeNetworkAcls = DescribeNetworkAcls
{ _dna1DryRun = Nothing
, _dna1NetworkAclIds = mempty
, _dna1Filters = mempty
}
dna1DryRun :: Lens' DescribeNetworkAcls (Maybe Bool)
dna1DryRun = lens _dna1DryRun (\s a -> s { _dna1DryRun = a })
-- | One or more filters.
--
-- 'association.association-id' - The ID of an association ID for the ACL.
--
-- 'association.network-acl-id' - The ID of the network ACL involved in the
-- association.
--
-- 'association.subnet-id' - The ID of the subnet involved in the association.
--
-- 'default' - Indicates whether the ACL is the default network ACL for the VPC.
--
-- 'entry.cidr' - The CIDR range specified in the entry.
--
-- 'entry.egress' - Indicates whether the entry applies to egress traffic.
--
-- 'entry.icmp.code' - The ICMP code specified in the entry, if any.
--
-- 'entry.icmp.type' - The ICMP type specified in the entry, if any.
--
-- 'entry.port-range.from' - The start of the port range specified in the
-- entry.
--
-- 'entry.port-range.to' - The end of the port range specified in the entry.
--
-- 'entry.protocol' - The protocol specified in the entry ('tcp' | 'udp' | 'icmp' or
-- a protocol number).
--
-- 'entry.rule-action' - Allows or denies the matching traffic ('allow' | 'deny').
--
-- 'entry.rule-number' - The number of an entry (in other words, rule) in the
-- ACL's set of entries.
--
-- 'network-acl-id' - The ID of the network ACL.
--
-- 'tag':/key/=/value/ - The key/value combination of a tag assigned to the
-- resource.
--
-- 'tag-key' - The key of a tag assigned to the resource. This filter is
-- independent of the 'tag-value' filter. For example, if you use both the filter
-- "tag-key=Purpose" and the filter "tag-value=X", you get any resources
-- assigned both the tag key Purpose (regardless of what the tag's value is),
-- and the tag value X (regardless of what the tag's key is). If you want to
-- list only resources where Purpose is X, see the 'tag':/key/=/value/ filter.
--
-- 'tag-value' - The value of a tag assigned to the resource. This filter is
-- independent of the 'tag-key' filter.
--
-- 'vpc-id' - The ID of the VPC for the network ACL.
--
--
dna1Filters :: Lens' DescribeNetworkAcls [Filter]
dna1Filters = lens _dna1Filters (\s a -> s { _dna1Filters = a }) . _List
-- | One or more network ACL IDs.
--
-- Default: Describes all your network ACLs.
dna1NetworkAclIds :: Lens' DescribeNetworkAcls [Text]
dna1NetworkAclIds =
lens _dna1NetworkAclIds (\s a -> s { _dna1NetworkAclIds = a })
. _List
newtype DescribeNetworkAclsResponse = DescribeNetworkAclsResponse
{ _dnarNetworkAcls :: List "item" NetworkAcl
} deriving (Eq, Read, Show, Monoid, Semigroup)
-- | 'DescribeNetworkAclsResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dnarNetworkAcls' @::@ ['NetworkAcl']
--
describeNetworkAclsResponse :: DescribeNetworkAclsResponse
describeNetworkAclsResponse = DescribeNetworkAclsResponse
{ _dnarNetworkAcls = mempty
}
-- | Information about one or more network ACLs.
dnarNetworkAcls :: Lens' DescribeNetworkAclsResponse [NetworkAcl]
dnarNetworkAcls = lens _dnarNetworkAcls (\s a -> s { _dnarNetworkAcls = a }) . _List
instance ToPath DescribeNetworkAcls where
toPath = const "/"
instance ToQuery DescribeNetworkAcls where
toQuery DescribeNetworkAcls{..} = mconcat
[ "DryRun" =? _dna1DryRun
, "Filter" `toQueryList` _dna1Filters
, "NetworkAclId" `toQueryList` _dna1NetworkAclIds
]
instance ToHeaders DescribeNetworkAcls
instance AWSRequest DescribeNetworkAcls where
type Sv DescribeNetworkAcls = EC2
type Rs DescribeNetworkAcls = DescribeNetworkAclsResponse
request = post "DescribeNetworkAcls"
response = xmlResponse
instance FromXML DescribeNetworkAclsResponse where
parseXML x = DescribeNetworkAclsResponse
<$> x .@? "networkAclSet" .!@ mempty
| kim/amazonka | amazonka-ec2/gen/Network/AWS/EC2/DescribeNetworkAcls.hs | mpl-2.0 | 6,320 | 0 | 10 | 1,254 | 625 | 399 | 226 | 66 | 1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="bs-BA">
<title>Support for the Open API Specification | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | veggiespam/zap-extensions | addOns/openapi/src/main/javahelp/org/zaproxy/zap/extension/openapi/resources/help_bs_BA/helpset_bs_BA.hs | apache-2.0 | 1,000 | 80 | 66 | 164 | 423 | 214 | 209 | -1 | -1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
module Handler.HomeSpec (spec) where
import TestImport
spec :: Spec
spec = withApp $ do
describe "Homepage" $ do
it "loads the index and checks it looks right" $ do
get HomeR
statusIs 200
htmlAnyContain "h1" "a modern framework for blazing fast websites"
request $ do
setMethod "POST"
setUrl HomeR
addToken
fileByLabel "Choose a file" "test/Spec.hs" "text/plain" -- talk about self-reference
byLabel "What's on the file?" "Some Content"
statusIs 200
-- more debugging printBody
htmlAllContain ".upload-response" "text/plain"
htmlAllContain ".upload-response" "Some Content"
-- This is a simple example of using a database access in a test. The
-- test will succeed for a fresh scaffolded site with an empty database,
-- but will fail on an existing database with a non-empty user table.
it "leaves the user table empty" $ do
get HomeR
statusIs 200
users <- runDB $ selectList ([] :: [Filter User]) []
assertEq "user table empty" 0 $ length users
| SaintOlga/FruitHools | test/Handler/HomeSpec.hs | apache-2.0 | 1,257 | 0 | 19 | 400 | 206 | 92 | 114 | 25 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.