code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MonoLocalBinds #-}
module DataTypes (module DataTypes) where
import Categories
import Groups
import Order
import VectorSpaces
import GHC.Base (Eq (..), Bool (..), (&&), error, flip)
import GHC.Show (Show (..))
fst (a, b) = a
snd (a, b) = b
instance (Semigroup a, Semigroup b) => Semigroup (a, b) where
(a,b) * (c,d) = (a*c, b*d)
instance (Monoid a, Monoid b) => Monoid (a, b) where
one = (one, one)
instance (AbelianMonoid a, AbelianMonoid b) => AbelianMonoid (a, b) where
(a,b) + (c,d) = (a+c, b+d)
zero = (zero, zero)
instance (AbelianGroup a, AbelianGroup b) => AbelianGroup (a, b) where
neg (a, b) = (neg a, neg b)
instance (Ring a, Ring b) => Ring (a, b)
--(a, b) does not form a field because, for example, (1, 0)^-1 does not exist.
instance Functor ((,) b) where
fmap f (a, b) = (a, f b)
instance Functor ((->) s) where
fmap f g = f . g
type State s a = Composition ((->) s) ((,) s) a
instance CMonoid NaturalTransformation Functor Composition Id (Composition ((->) s) ((,) s)) where --state monad
mult = NaturalTransformation (Compose . liftComp flattenState) where
flattenState :: State s (State s a) -> s -> (s, a)
flattenState f = \state -> let (st, a) = unCompose f state in unCompose a st
unit = NaturalTransformation (Compose . makeState . unId) where
makeState a = \s -> (s, a)
instance Monad (Composition ((->) s) ((,) s))
instance (Field f) => AbelianMonoid (f->f) where
f + g = \x -> f x + g x
zero = \x -> zero
instance (Field f) => AbelianGroup (f->f) where
neg f = neg . f
instance (Field f) => Semigroup (f->f) where
f * g = f . g
instance (Field f) => Monoid (f->f) where
one = id
instance (Field f) => Ring (f->f)
instance (Field f) => VectorSpace (f->f) f where
(*^) c f = (*c) . f
data Vector2 a = V2 !a !a
instance (Eq a) => Eq (Vector2 a) where
(V2 a b) == (V2 c d) = (a == c) && (b == d)
instance (AbelianMonoid m) => AbelianMonoid (Vector2 m) where
(V2 a b) + (V2 c d) = V2 (a+c) (b+d)
zero = V2 zero zero
instance (AbelianGroup g) => AbelianGroup (Vector2 g) where
neg (V2 a b) = V2 (neg a) (neg b)
instance Functor Vector2 where
fmap f (V2 a b) = V2 (f a) (f b)
data Vector3 a = V3 !a !a !a
| Crazycolorz5/AlgebraicPrelude | DataTypes.hs | mit | 2,500 | 0 | 13 | 628 | 1,194 | 648 | 546 | 70 | 1 |
module Toolkit
( Builder
, none
, (<>)
, build
, encodeJSON
, decodeDouble
, decodePoint
, decodePolyline
) where
import Data.Aeson (ToJSON)
import Data.ByteString (ByteString)
import Data.ByteString.Builder (Builder)
import Data.Text (Text)
import qualified Data.Aeson as J
import qualified Data.ByteString.Builder as B
import qualified Data.ByteString.Lazy.Char8 as L
import qualified Data.Monoid as M
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.Read as T
none :: Builder
none =
mempty
(<>) :: Builder -> ByteString -> Builder
parts <> part =
parts M.<> B.byteString part
build :: Builder -> Text
build =
T.decodeUtf8 . L.toStrict . B.toLazyByteString
encodeJSON :: (ToJSON a) => a -> ByteString
encodeJSON =
L.toStrict . J.encode
decodeDouble :: Text -> Double
decodeDouble t =
case T.double t of
Right (d, "") ->
d
Right _ ->
error ("decodeDouble: invalid double: " ++ show t)
Left msg ->
error ("decodeDouble: " ++ msg ++ ": " ++ show t)
decodePoint :: Text -> [Double]
decodePoint t =
case T.splitOn "," t of
[tx, ty] ->
[decodeDouble tx, decodeDouble ty]
_ ->
error ("decodePoint: invalid point: " ++ show t)
decodePolyline :: Text -> [Double]
decodePolyline t
| l >= 2 =
concatMap decodePoint ts
| otherwise =
error ("decodePolyline: invalid polyline: " ++ show t)
where
ts = T.splitOn " " t
l = length ts
| mietek/gml-explorer | src/Toolkit.hs | mit | 1,527 | 0 | 12 | 379 | 486 | 272 | 214 | 56 | 3 |
module Routes (Request(..), Response(..)) where
import BasePrelude
import Data.Text (Text)
import qualified Data.Text.Lazy as Lazy
import Types
type Name = Text
data Request = GetPost ID
| ListPosts PostQuery
| CreatePost (Maybe ID) Token Text
| CreateCode EmailAddress
| CreateToken Code
| CreateUser EmailAddress Name
data Response = NewPost ResolvedPost
| ExistingPost ResolvedPost
| PostList [ResolvedPost]
| NewToken ResolvedToken
| NewUser ResolvedCode
| NewCode ResolvedCode
| BadToken
| BadCode
| UnknownEmail
| InvalidUsername
| ExistingNameOrEmail
| BadRequest Lazy.Text
| PostNotFound ID
| jackbowman/basilica | Routes.hs | mit | 866 | 0 | 8 | 348 | 161 | 100 | 61 | 25 | 0 |
apply func arg = func arg
| scravy/nodash | doc/Function/apply.hs | mit | 26 | 0 | 5 | 6 | 14 | 6 | 8 | 1 | 1 |
module Network.Skype.Command.ChatMember where
import Control.Monad.Trans
import Control.Monad.Trans.Control
import Data.Monoid ((<>))
import Network.Skype.Command.Utils
import Network.Skype.Core
import Network.Skype.Protocol
import qualified Data.ByteString.Char8 as BC
getAllMembers :: (MonadBaseControl IO m, MonadIO m, MonadSkype m)
=> ChatID
-> SkypeT m [ChatMemberID]
getAllMembers chatID = executeCommandWithID command $ \response ->
case response of
Chat _ (ChatMemberObjects chatMemberIDs) -> return $ Just chatMemberIDs
_ -> return Nothing
where
command = "GET CHAT " <> chatID <> " MEMBEROBJECTS"
getUserID :: (MonadBaseControl IO m, MonadIO m, MonadSkype m)
=> ChatMemberID
-> SkypeT m UserID
getUserID chatMemberID = executeCommandWithID command $ \response ->
case response of
ChatMember _ (ChatMemberIdentity userID) -> return $ Just userID
_ -> return Nothing
where
command = "GET CHATMEMBER " <> (BC.pack $ show chatMemberID) <> " IDENTITY"
getChatID :: (MonadBaseControl IO m, MonadIO m, MonadSkype m)
=> ChatMemberID
-> SkypeT m ChatID
getChatID chatMemberID = executeCommandWithID command $ \response ->
case response of
ChatMember _ (ChatMemberChatName chatID) -> return $ Just chatID
_ -> return Nothing
where
command = "GET CHATMEMBER " <> (BC.pack $ show chatMemberID) <> " CHATNAME"
getRole :: (MonadBaseControl IO m, MonadIO m, MonadSkype m)
=> ChatMemberID
-> SkypeT m ChatRole
getRole chatMemberID = executeCommandWithID command $ \response ->
case response of
ChatMember _ (ChatMemberRole role) -> return $ Just role
_ -> return Nothing
where
command = "GET CHATMEMBER " <> (BC.pack $ show chatMemberID) <> " ROLE"
isActive :: (MonadBaseControl IO m, MonadIO m, MonadSkype m)
=> ChatMemberID
-> SkypeT m Bool
isActive chatMemberID = executeCommandWithID command $ \response ->
case response of
ChatMember _ (ChatMemberIsActive active) -> return $ Just active
_ -> return Nothing
where
command = "GET CHATMEMBER " <> (BC.pack $ show chatMemberID) <> " IS_ACTIVE"
| emonkak/skype4hs | src/Network/Skype/Command/ChatMember.hs | mit | 2,372 | 0 | 12 | 665 | 657 | 339 | 318 | -1 | -1 |
module Language.STL.Lex.Normalize (normalize) where
import Data.Function
import Data.List
import Language.STL.Lex
type TokenTrans = TokenStream -> TokenStream
normalize :: TokenTrans
normalize = collapseSeps
. dropWhile ((== Separator) . tTok)
. filter (\x -> case tTok x of Comment _ -> False; _ -> True)
collapseSeps :: TokenTrans
collapseSeps = (=<<) (\xs -> if tTok (head xs) == Separator then take 1 xs else xs)
. groupBy ((==) `on` tTok)
| pikajude/stl | src/Language/STL/Lex/Normalize.hs | mit | 481 | 0 | 13 | 105 | 172 | 98 | 74 | 12 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Web.Common where
import Prelude
import Text.Blaze.Html5
import qualified Text.Blaze.Html5 as H
import Text.Blaze.Html5.Attributes as A
--instance ToMarkup S3Error where
-- toMarkup err = H.div ! class_ "row" $ do
-- H.h3 "S3 Error"
-- H.pre $ H.toHtml $ show err
formWrapper :: ToValue v => Html -> v -> Html
formWrapper x loc =
H.form ! method "POST" ! action (toValue loc) $ do
x
button ! type_ "submit" ! class_ "btn btn-default" $ "Submit"
headNavContentHtml :: Html -> Html -> Html -> Html
headNavContentHtml h n f =
docTypeHtml ! lang "en" $ do
h
body $ do
n
H.div ! class_ "container" $ H.div ! class_ "row" $
H.div ! class_ "col-md-12" $ fieldset $ f
standardHead :: Html
standardHead = H.head $ do
meta ! name "viewport" ! content "width=device-width, initial-scale=1"
link ! href "//maxcdn.bootstrapcdn.com/font-awesome/4.3.0/css/font-awesome.min.css"
! rel "stylesheet"
link ! href "//maxcdn.bootstrapcdn.com/bootstrap/3.3.1/css/bootstrap.min.css"
! rel "stylesheet"
script ! src "//code.jquery.com/jquery-2.1.1.min.js" $ mempty
script ! src "//maxcdn.bootstrapcdn.com/bootstrap/3.3.1/js/bootstrap.min.js" $ mempty
userContainer :: Html -> Html
userContainer f = headNavContentHtml
standardHead
loggedInHeader
f
guestContainer :: Html -> Html
guestContainer f = headNavContentHtml
standardHead
loggedOutHeader
f
--val :: Url -> AttributeValue
--val = toValue
loggedInHeader :: Html
loggedInHeader = nav ! class_ "navbar navbar-default" $
H.div ! class_ "container-fluid" $ do
H.div ! class_ "navbar-header" $
p ! class_ "navbar-brand" $ ":)"
-- i ! class_ "fa fa-home" $ mempty
-- ul ! class_ "nav navbar-nav" $ do
-- liDropdown (faIcon "file" $ do
-- void $ " file"
-- H.span ! class_ "caret" $ mempty)
-- (do liIcon (val UrlUploadFile) "upload" " upload a file"
-- liIcon (val UrlUploadTarball) "file-archive-o" " upload a tarball"
-- liIcon (val UrlCopyFile) "file-o" " copy a file"
-- liIcon (val UrlCopyFolder) "files-o" " copy a folder")
-- liDropdown (faIcon "cloud" $ do
-- void $ " bucket"
-- H.span ! class_ "caret" $ mempty)
-- (do liIcon (val UrlBucketList) "list-ol" " list a bucket"
-- liIcon (val UrlBucketAdd) "plus" " add a bucket"
-- liIcon (val UrlBucketLinkCF) "cloud" " link cloudfront")
-- liDropdown (faIcon "user" $ do
-- void $ " user"
-- H.span ! class_ "caret" $ mempty)
-- (do liIcon (val UrlUserSettings) "cogs" " user settings"
-- liIcon (val UrlUserPassword) "key" " update password"
-- liIcon (val UrlUserAdd) "user-plus" " add user")
-- H.ul ! class_ "nav navbar-nav navbar-right" $ do
-- liIcon (val UrlLogView) "file-text" " view log"
-- liIcon (val UrlUserLogout) "sign-out" " logout"
loggedOutHeader :: Html
loggedOutHeader = nav ! class_ "navbar navbar-default" $
H.div ! class_ "container-fluid" $ mempty--do
-- H.div ! class_ "navbar-header" $
-- a ! class_ "navbar-brand" ! href (val UrlHome) $
-- i ! class_ "fa fa-home" $ mempty
-- H.ul ! class_ "nav navbar-nav navbar-right" $ do
-- li $ a ! href (val UrlUserLogin) $ do
-- i ! class_ "fa fa-sign-in" $ mempty
-- " login"
--selectBucket :: AttributeValue -> [Bucket] -> Html
--selectBucket idName bs = do
-- select ! A.id idName ! name idName $ forM_ bs $ \bucket ->
-- option $ toHtml bucket
| schell/caltrops | src/Web/Common.hs | mit | 4,058 | 0 | 18 | 1,294 | 494 | 263 | 231 | 47 | 1 |
{-# LANGUAGE MagicHash #-}
module EmptyStageApp where
import Prelude hiding (show)
import Java
-- import javafx.stage.Stage;
import JavaFX.Types
-- import javafx.application.Application;
import JavaFX.Methods
-- public class Empty extends Application {
data {-# CLASS "org.eta.EmptyStageApp extends javafx.application.Application" #-}
EmptyStageApp = EmptyStageApp (Object# EmptyStageApp)
-- @Override
-- public void start(Stage primaryStage) throws Exception {
start :: Stage -> Java EmptyStageApp ()
-- primaryStage.setTitle("Empty Eta-JavaFX Stage");
-- primaryStage.show();
-- }
start = (<.> (setTitle "Empty Eta-JavaFX Stage" >> show))
foreign export java "start" start :: Stage -> Java EmptyStageApp ()
-- What is <.> ?
-- Execute a Java action in the Java monad of another class
-- with respect to the given object.
-- (<.>) :: (Class c) => c -> Java c a -> Java b a
| filippovitale/eta-playground | javafx-empty-stage/src/EmptyStageApp.hs | mit | 910 | 1 | 8 | 162 | 106 | 67 | 39 | -1 | -1 |
import Geometry
import Drawing
main = drawPicture myPicture
myPicture points =
drawCircle (a,b) &
drawLine (c,d) &
drawPoints [a,b,c,d] &
drawLabels [a,b,c,d] ["A","B","C","D"] &
drawLabels intersects ["X","Y"] &
drawPoints intersects &
message "Line-Circle Intersection"
where [a,b,c,d] = take 4 points
intersects = line_circle (c,d) (a,b)
| alphalambda/k12math | contrib/MHills/GeometryLessons/code/student/lesson2c.hs | mit | 396 | 0 | 12 | 100 | 173 | 96 | 77 | 13 | 1 |
-- awesome.hs
module Awesome where
addBang input = input ++ "!"
takeFour input = (!!) input 4
dropNine input = drop 9 input
returnThirdChar :: String -> Char
returnThirdChar input = (!!) input 2
myString :: String
myString = "Curry is awesome!"
letterIndex :: Int -> Char
letterIndex index = (!!) myString index
rvrs = (drop 9 myString) ++ " " ++ [(myString !! 6)] ++ [(myString !! 7)] ++" " ++(take 5 myString)
| doylew/practice | haskell/awesome.hs | mit | 419 | 0 | 11 | 81 | 164 | 89 | 75 | 11 | 1 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.UIRequestEvent
(js_getReceiver, getReceiver, UIRequestEvent, castToUIRequestEvent,
gTypeUIRequestEvent)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"receiver\"]" js_getReceiver
:: UIRequestEvent -> IO (Nullable EventTarget)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/UIRequestEvent.receiver Mozilla UIRequestEvent.receiver documentation>
getReceiver ::
(MonadIO m) => UIRequestEvent -> m (Maybe EventTarget)
getReceiver self
= liftIO (nullableToMaybe <$> (js_getReceiver (self))) | manyoo/ghcjs-dom | ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/UIRequestEvent.hs | mit | 1,394 | 6 | 10 | 165 | 371 | 236 | 135 | 24 | 1 |
module Ohua.ALang.Refs where
import Ohua.Types
id :: QualifiedBinding
id = "ohua.lang/id"
-- transforms into `ifFun` and `select`
ifThenElse :: QualifiedBinding
ifThenElse = "ohua.lang/if"
-- TODO: maybe these functions belong into the concrete passes
-- semantically a different function from `ifThenElse`
ifFun :: QualifiedBinding
ifFun = "ohua.lang/ifFun"
select :: QualifiedBinding
select = "ohua.lang/select"
-- transforms into `smapFun` and `collect`
smap :: QualifiedBinding
smap = "ohua.lang/smap"
-- TODO: maybe these functions belong into the concrete passes
-- semantically a different function from `smap`
smapFun :: QualifiedBinding
smapFun = "ohua.lang/smapFun"
collect :: QualifiedBinding
collect = "ohua.lang/collect"
-- transforms into `seqFun`
seq :: QualifiedBinding
seq = "ohua.lang/seq"
-- TODO: maybe these functions belong into the concrete passes
-- semantically a different function from `seq`
seqFun :: QualifiedBinding
seqFun = "ohua.lang/seqFun"
recur :: QualifiedBinding
recur = "ohua.lang/recur"
nth :: QualifiedBinding
nth = "ohua.lang/nth"
ctrl :: QualifiedBinding
ctrl = "ohua.lang/ctrl"
| ohua-dev/ohua-core | core/src/Ohua/ALang/Refs.hs | epl-1.0 | 1,134 | 0 | 4 | 157 | 141 | 89 | 52 | 26 | 1 |
module Main
where
import Control.Exception (SomeException)
import Control.Monad (void, when)
import Control.Monad.Catch (catchAll)
import Data.Conduit (($$), ($=), Conduit, awaitForever, yield)
import Data.List (isPrefixOf)
import System.Environment (getEnv)
import Web.HZulip
main :: IO ()
main = withZulipEnv $ do
lift $ putStrLn "Subscribing to all streams..."
void addAllSubscriptions
lift $ putStrLn "Echoing..."
catchAll startEchoer onZulipError
startEchoer :: ZulipM ()
startEchoer = sourceZulipMessages 30 $= echoConduit $$ sinkZulipMessages
echoConduit :: Conduit Message ZulipM (String, [String], String, String)
echoConduit = loop
where loop = awaitForever processMessage >> loop
processMessage msg = do
nr <- lift $ nonRecursive msg
let c = messageContent msg
lift $ lift $ putStr "here"
when (nr && "echo " `isPrefixOf` c) $
let c' = drop 5 c in case messageType msg of
"stream" ->
let Left stream = messageDisplayRecipient msg
topic = messageSubject msg
in yield ("stream", [stream], topic, c')
"private" ->
let Right users = messageDisplayRecipient msg
recipients = map userEmail users
in lift (lift $ putStr "here") >>
yield ("private", recipients, "", c')
_ -> return ()
onZulipError :: SomeException -> ZulipM ()
onZulipError ex = lift $ putStrLn "Zulip Client errored:" >> print ex
nonRecursive :: Message -> ZulipM Bool
nonRecursive msg = do
z <- ask
return $ clientEmail z /= userEmail (messageSender msg)
withZulipEnv :: ZulipM a -> IO a
withZulipEnv action = do
user <- getEnv "ZULIP_USER"
key <- getEnv "ZULIP_KEY"
withZulipCreds user key action
| yamadapc/hzulip | examples/src/ZulipConduitBot.hs | gpl-2.0 | 1,937 | 0 | 21 | 600 | 574 | 287 | 287 | 46 | 3 |
{-
Haskell implementation of yes tool.
http://linux.die.net/man/1/yes
-}
module Main where
import System.Environment(getArgs)
main:: IO ()
main = do as <- getArgs
--mapM_ putStrLn $ repeat $ if as == [] then "y" else (head as)
(mapM_ putStrLn).repeat $ if as == [] then "y" else (head as)
| huseyinyilmaz/hs-gnu-core-utils | src/yes.hs | gpl-2.0 | 311 | 0 | 10 | 70 | 73 | 40 | 33 | 5 | 2 |
{-# LANGUAGE ScopedTypeVariables, OverloadedStrings #-}
{-
Copyright (C) 2006-2014 John MacFarlane <jgm@berkeley.edu>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Text.Pandoc.Readers.LaTeX
Copyright : Copyright (C) 2006-2014 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <jgm@berkeley.edu>
Stability : alpha
Portability : portable
Conversion of LaTeX to 'Pandoc' document.
-}
module Text.Pandoc.Readers.LaTeX ( readLaTeX,
rawLaTeXInline,
rawLaTeXBlock,
inlineCommand,
handleIncludes
) where
import Text.Pandoc.Definition
import Text.Pandoc.Walk
import Text.Pandoc.Shared
import Text.Pandoc.Options
import Text.Pandoc.Parsing hiding ((<|>), many, optional, space,
mathDisplay, mathInline)
import qualified Text.Pandoc.UTF8 as UTF8
import Data.Char ( chr, ord )
import Control.Monad.Trans (lift)
import Control.Monad
import Text.Pandoc.Builder
import Data.Char (isLetter, isAlphaNum)
import Control.Applicative
import Data.Monoid
import Data.Maybe (fromMaybe)
import System.Environment (getEnv)
import System.FilePath (replaceExtension, (</>))
import Data.List (intercalate, intersperse)
import qualified Data.Map as M
import qualified Control.Exception as E
import System.FilePath (takeExtension, addExtension)
import Text.Pandoc.Highlighting (fromListingsLanguage)
-- | Parse LaTeX from string and return 'Pandoc' document.
readLaTeX :: ReaderOptions -- ^ Reader options
-> String -- ^ String to parse (assumes @'\n'@ line endings)
-> Pandoc
readLaTeX opts = readWith parseLaTeX def{ stateOptions = opts }
parseLaTeX :: LP Pandoc
parseLaTeX = do
bs <- blocks
eof
st <- getState
let meta = stateMeta st
let (Pandoc _ bs') = doc bs
return $ Pandoc meta bs'
type LP = Parser [Char] ParserState
anyControlSeq :: LP String
anyControlSeq = do
char '\\'
next <- option '\n' anyChar
name <- case next of
'\n' -> return ""
c | isLetter c -> (c:) <$> (many letter <* optional sp)
| otherwise -> return [c]
return name
controlSeq :: String -> LP String
controlSeq name = try $ do
char '\\'
case name of
"" -> mzero
[c] | not (isLetter c) -> string [c]
cs -> string cs <* notFollowedBy letter <* optional sp
return name
dimenarg :: LP String
dimenarg = try $ do
ch <- option "" $ string "="
num <- many1 digit
dim <- oneOfStrings ["pt","pc","in","bp","cm","mm","dd","cc","sp"]
return $ ch ++ num ++ dim
sp :: LP ()
sp = skipMany1 $ satisfy (\c -> c == ' ' || c == '\t')
<|> (try $ newline <* lookAhead anyChar <* notFollowedBy blankline)
isLowerHex :: Char -> Bool
isLowerHex x = x >= '0' && x <= '9' || x >= 'a' && x <= 'f'
tildeEscape :: LP Char
tildeEscape = try $ do
string "^^"
c <- satisfy (\x -> x >= '\0' && x <= '\128')
d <- if isLowerHex c
then option "" $ count 1 (satisfy isLowerHex)
else return ""
if null d
then case ord c of
x | x >= 64 && x <= 127 -> return $ chr (x - 64)
| otherwise -> return $ chr (x + 64)
else return $ chr $ read ('0':'x':c:d)
comment :: LP ()
comment = do
char '%'
skipMany (satisfy (/='\n'))
optional newline
return ()
bgroup :: LP ()
bgroup = () <$ char '{'
<|> () <$ controlSeq "bgroup"
<|> () <$ controlSeq "begingroup"
egroup :: LP ()
egroup = () <$ char '}'
<|> () <$ controlSeq "egroup"
<|> () <$ controlSeq "endgroup"
grouped :: Monoid a => LP a -> LP a
grouped parser = try $ bgroup *> (mconcat <$> manyTill parser egroup)
braced :: LP String
braced = bgroup *> (concat <$> manyTill
( many1 (satisfy (\c -> c /= '\\' && c /= '}' && c /= '{'))
<|> try (string "\\}")
<|> try (string "\\{")
<|> try (string "\\\\")
<|> ((\x -> "{" ++ x ++ "}") <$> braced)
<|> count 1 anyChar
) egroup)
bracketed :: Monoid a => LP a -> LP a
bracketed parser = try $ char '[' *> (mconcat <$> manyTill parser (char ']'))
mathDisplay :: LP String -> LP Inlines
mathDisplay p = displayMath <$> (try p >>= applyMacros' . trim)
mathInline :: LP String -> LP Inlines
mathInline p = math <$> (try p >>= applyMacros')
mathChars :: LP String
mathChars = concat <$>
many ( many1 (satisfy (\c -> c /= '$' && c /='\\'))
<|> (\c -> ['\\',c]) <$> (try $ char '\\' *> anyChar)
)
quoted' :: (Inlines -> Inlines) -> LP String -> LP () -> LP Inlines
quoted' f starter ender = do
startchs <- starter
try ((f . mconcat) <$> manyTill inline ender) <|> lit startchs
double_quote :: LP Inlines
double_quote =
( quoted' doubleQuoted (try $ string "``") (void $ try $ string "''")
<|> quoted' doubleQuoted (string "“") (void $ char '”')
-- the following is used by babel for localized quotes:
<|> quoted' doubleQuoted (try $ string "\"`") (void $ try $ string "\"'")
<|> quoted' doubleQuoted (string "\"") (void $ char '"')
)
single_quote :: LP Inlines
single_quote =
( quoted' singleQuoted (string "`") (try $ char '\'' >> notFollowedBy letter)
<|> quoted' singleQuoted (string "‘") (try $ char '’' >> notFollowedBy letter)
)
inline :: LP Inlines
inline = (mempty <$ comment)
<|> (space <$ sp)
<|> inlineText
<|> inlineCommand
<|> inlineGroup
<|> (char '-' *> option (str "-")
((char '-') *> option (str "–") (str "—" <$ char '-')))
<|> double_quote
<|> single_quote
<|> (str "”" <$ try (string "''"))
<|> (str "”" <$ char '”')
<|> (str "’" <$ char '\'')
<|> (str "’" <$ char '’')
<|> (str "\160" <$ char '~')
<|> (mathDisplay $ string "$$" *> mathChars <* string "$$")
<|> (mathInline $ char '$' *> mathChars <* char '$')
<|> (superscript <$> (char '^' *> tok))
<|> (subscript <$> (char '_' *> tok))
<|> (guardEnabled Ext_literate_haskell *> char '|' *> doLHSverb)
<|> (str . (:[]) <$> tildeEscape)
<|> (str . (:[]) <$> oneOf "[]")
<|> (str . (:[]) <$> oneOf "#&") -- TODO print warning?
-- <|> (str <$> count 1 (satisfy (\c -> c /= '\\' && c /='\n' && c /='}' && c /='{'))) -- eat random leftover characters
inlines :: LP Inlines
inlines = mconcat <$> many (notFollowedBy (char '}') *> inline)
inlineGroup :: LP Inlines
inlineGroup = do
ils <- grouped inline
if isNull ils
then return mempty
else return $ spanWith nullAttr ils
-- we need the span so we can detitlecase bibtex entries;
-- we need to know when something is {C}apitalized
block :: LP Blocks
block = (mempty <$ comment)
<|> (mempty <$ ((spaceChar <|> newline) *> spaces))
<|> environment
<|> macro
<|> blockCommand
<|> paragraph
<|> grouped block
<|> (mempty <$ char '&') -- loose & in table environment
blocks :: LP Blocks
blocks = mconcat <$> many block
blockCommand :: LP Blocks
blockCommand = try $ do
name <- anyControlSeq
guard $ name /= "begin" && name /= "end"
star <- option "" (string "*" <* optional sp)
let name' = name ++ star
case M.lookup name' blockCommands of
Just p -> p
Nothing -> case M.lookup name blockCommands of
Just p -> p
Nothing -> mzero
inBrackets :: Inlines -> Inlines
inBrackets x = (str "[") <> x <> (str "]")
-- eat an optional argument and one or more arguments in braces
ignoreInlines :: String -> (String, LP Inlines)
ignoreInlines name = (name, doraw <|> (mempty <$ optargs))
where optargs = skipopts *> skipMany (try $ optional sp *> braced)
contseq = '\\':name
doraw = (rawInline "latex" . (contseq ++) . snd) <$>
(getOption readerParseRaw >>= guard >> (withRaw optargs))
ignoreBlocks :: String -> (String, LP Blocks)
ignoreBlocks name = (name, doraw <|> (mempty <$ optargs))
where optargs = skipopts *> skipMany (try $ optional sp *> braced)
contseq = '\\':name
doraw = (rawBlock "latex" . (contseq ++) . snd) <$>
(getOption readerParseRaw >>= guard >> (withRaw optargs))
blockCommands :: M.Map String (LP Blocks)
blockCommands = M.fromList $
[ ("par", mempty <$ skipopts)
, ("title", mempty <$ (skipopts *> tok >>= addMeta "title"))
, ("subtitle", mempty <$ (skipopts *> tok >>= addMeta "subtitle"))
, ("author", mempty <$ (skipopts *> authors))
-- -- in letter class, temp. store address & sig as title, author
, ("address", mempty <$ (skipopts *> tok >>= addMeta "address"))
, ("signature", mempty <$ (skipopts *> authors))
, ("date", mempty <$ (skipopts *> tok >>= addMeta "date"))
-- sectioning
, ("chapter", updateState (\s -> s{ stateHasChapters = True })
*> section nullAttr 0)
, ("chapter*", updateState (\s -> s{ stateHasChapters = True })
*> section ("",["unnumbered"],[]) 0)
, ("section", section nullAttr 1)
, ("section*", section ("",["unnumbered"],[]) 1)
, ("subsection", section nullAttr 2)
, ("subsection*", section ("",["unnumbered"],[]) 2)
, ("subsubsection", section nullAttr 3)
, ("subsubsection*", section ("",["unnumbered"],[]) 3)
, ("paragraph", section nullAttr 4)
, ("paragraph*", section ("",["unnumbered"],[]) 4)
, ("subparagraph", section nullAttr 5)
, ("subparagraph*", section ("",["unnumbered"],[]) 5)
-- beamer slides
, ("frametitle", section nullAttr 3)
, ("framesubtitle", section nullAttr 4)
-- letters
, ("opening", (para . trimInlines) <$> (skipopts *> tok))
, ("closing", skipopts *> closing)
--
, ("hrule", pure horizontalRule)
, ("rule", skipopts *> tok *> tok *> pure horizontalRule)
, ("item", skipopts *> loose_item)
, ("documentclass", skipopts *> braced *> preamble)
, ("centerline", (para . trimInlines) <$> (skipopts *> tok))
, ("caption", skipopts *> tok >>= setCaption)
, ("PandocStartInclude", startInclude)
, ("PandocEndInclude", endInclude)
, ("bibliography", mempty <$ (skipopts *> braced >>=
addMeta "bibliography" . splitBibs))
, ("addbibresource", mempty <$ (skipopts *> braced >>=
addMeta "bibliography" . splitBibs))
] ++ map ignoreBlocks
-- these commands will be ignored unless --parse-raw is specified,
-- in which case they will appear as raw latex blocks
[ "newcommand", "renewcommand", "newenvironment", "renewenvironment"
-- newcommand, etc. should be parsed by macro, but we need this
-- here so these aren't parsed as inline commands to ignore
, "special", "pdfannot", "pdfstringdef"
, "bibliographystyle"
, "maketitle", "makeindex", "makeglossary"
, "addcontentsline", "addtocontents", "addtocounter"
-- \ignore{} is used conventionally in literate haskell for definitions
-- that are to be processed by the compiler but not printed.
, "ignore"
, "hyperdef"
, "markboth", "markright", "markleft"
, "hspace", "vspace"
]
addMeta :: ToMetaValue a => String -> a -> LP ()
addMeta field val = updateState $ \st ->
st{ stateMeta = addMetaField field val $ stateMeta st }
splitBibs :: String -> [Inlines]
splitBibs = map (str . flip replaceExtension "bib" . trim) . splitBy (==',')
setCaption :: Inlines -> LP Blocks
setCaption ils = do
updateState $ \st -> st{ stateCaption = Just ils }
return mempty
resetCaption :: LP ()
resetCaption = updateState $ \st -> st{ stateCaption = Nothing }
authors :: LP ()
authors = try $ do
char '{'
let oneAuthor = mconcat <$>
many1 (notFollowedBy' (controlSeq "and") >>
(inline <|> mempty <$ blockCommand))
-- skip e.g. \vspace{10pt}
auths <- sepBy oneAuthor (controlSeq "and")
char '}'
addMeta "author" (map trimInlines auths)
section :: Attr -> Int -> LP Blocks
section (ident, classes, kvs) lvl = do
hasChapters <- stateHasChapters `fmap` getState
let lvl' = if hasChapters then lvl + 1 else lvl
skipopts
contents <- grouped inline
lab <- option ident $ try (spaces >> controlSeq "label" >> spaces >> braced)
attr' <- registerHeader (lab, classes, kvs) contents
return $ headerWith attr' lvl' contents
inlineCommand :: LP Inlines
inlineCommand = try $ do
name <- anyControlSeq
guard $ name /= "begin" && name /= "end"
guard $ not $ isBlockCommand name
parseRaw <- getOption readerParseRaw
star <- option "" (string "*")
let name' = name ++ star
let raw = do
rawargs <- withRaw (skipopts *> option "" dimenarg *> many braced)
let rawcommand = '\\' : name ++ star ++ snd rawargs
transformed <- applyMacros' rawcommand
if transformed /= rawcommand
then parseFromString inlines transformed
else if parseRaw
then return $ rawInline "latex" rawcommand
else return mempty
case M.lookup name' inlineCommands of
Just p -> p <|> raw
Nothing -> case M.lookup name inlineCommands of
Just p -> p <|> raw
Nothing -> raw
unlessParseRaw :: LP ()
unlessParseRaw = getOption readerParseRaw >>= guard . not
isBlockCommand :: String -> Bool
isBlockCommand s = maybe False (const True) $ M.lookup s blockCommands
inlineCommands :: M.Map String (LP Inlines)
inlineCommands = M.fromList $
[ ("emph", extractSpaces emph <$> tok)
, ("textit", extractSpaces emph <$> tok)
, ("textsl", extractSpaces emph <$> tok)
, ("textsc", extractSpaces smallcaps <$> tok)
, ("sout", extractSpaces strikeout <$> tok)
, ("textsuperscript", extractSpaces superscript <$> tok)
, ("textsubscript", extractSpaces subscript <$> tok)
, ("textbackslash", lit "\\")
, ("backslash", lit "\\")
, ("slash", lit "/")
, ("textbf", extractSpaces strong <$> tok)
, ("textnormal", extractSpaces (spanWith ("",["nodecor"],[])) <$> tok)
, ("ldots", lit "…")
, ("dots", lit "…")
, ("mdots", lit "…")
, ("sim", lit "~")
, ("label", unlessParseRaw >> (inBrackets <$> tok))
, ("ref", unlessParseRaw >> (inBrackets <$> tok))
, ("(", mathInline $ manyTill anyChar (try $ string "\\)"))
, ("[", mathDisplay $ manyTill anyChar (try $ string "\\]"))
, ("ensuremath", mathInline $ braced)
, ("P", lit "¶")
, ("S", lit "§")
, ("$", lit "$")
, ("%", lit "%")
, ("&", lit "&")
, ("#", lit "#")
, ("_", lit "_")
, ("{", lit "{")
, ("}", lit "}")
-- old TeX commands
, ("em", extractSpaces emph <$> inlines)
, ("it", extractSpaces emph <$> inlines)
, ("sl", extractSpaces emph <$> inlines)
, ("bf", extractSpaces strong <$> inlines)
, ("rm", inlines)
, ("itshape", extractSpaces emph <$> inlines)
, ("slshape", extractSpaces emph <$> inlines)
, ("scshape", extractSpaces smallcaps <$> inlines)
, ("bfseries", extractSpaces strong <$> inlines)
, ("/", pure mempty) -- italic correction
, ("aa", lit "å")
, ("AA", lit "Å")
, ("ss", lit "ß")
, ("o", lit "ø")
, ("O", lit "Ø")
, ("L", lit "Ł")
, ("l", lit "ł")
, ("ae", lit "æ")
, ("AE", lit "Æ")
, ("oe", lit "œ")
, ("OE", lit "Œ")
, ("pounds", lit "£")
, ("euro", lit "€")
, ("copyright", lit "©")
, ("textasciicircum", lit "^")
, ("textasciitilde", lit "~")
, ("`", option (str "`") $ try $ tok >>= accent grave)
, ("'", option (str "'") $ try $ tok >>= accent acute)
, ("^", option (str "^") $ try $ tok >>= accent circ)
, ("~", option (str "~") $ try $ tok >>= accent tilde)
, ("\"", option (str "\"") $ try $ tok >>= accent umlaut)
, (".", option (str ".") $ try $ tok >>= accent dot)
, ("=", option (str "=") $ try $ tok >>= accent macron)
, ("c", option (str "c") $ try $ tok >>= accent cedilla)
, ("v", option (str "v") $ try $ tok >>= accent hacek)
, ("u", option (str "u") $ try $ tok >>= accent breve)
, ("i", lit "i")
, ("\\", linebreak <$ (optional (bracketed inline) *> optional sp))
, (",", pure mempty)
, ("@", pure mempty)
, (" ", lit "\160")
, ("ps", pure $ str "PS." <> space)
, ("TeX", lit "TeX")
, ("LaTeX", lit "LaTeX")
, ("bar", lit "|")
, ("textless", lit "<")
, ("textgreater", lit ">")
, ("thanks", (note . mconcat) <$> (char '{' *> manyTill block (char '}')))
, ("footnote", (note . mconcat) <$> (char '{' *> manyTill block (char '}')))
, ("verb", doverb)
, ("lstinline", doverb)
, ("Verb", doverb)
, ("texttt", (code . stringify . toList) <$> tok)
, ("url", (unescapeURL <$> braced) >>= \url ->
pure (link url "" (str url)))
, ("href", (unescapeURL <$> braced <* optional sp) >>= \url ->
tok >>= \lab ->
pure (link url "" lab))
, ("includegraphics", skipopts *> (unescapeURL <$> braced) >>= mkImage)
, ("enquote", enquote)
, ("cite", citation "cite" AuthorInText False)
, ("citep", citation "citep" NormalCitation False)
, ("citep*", citation "citep*" NormalCitation False)
, ("citeal", citation "citeal" NormalCitation False)
, ("citealp", citation "citealp" NormalCitation False)
, ("citealp*", citation "citealp*" NormalCitation False)
, ("autocite", citation "autocite" NormalCitation False)
, ("footcite", inNote <$> citation "footcite" NormalCitation False)
, ("parencite", citation "parencite" NormalCitation False)
, ("supercite", citation "supercite" NormalCitation False)
, ("footcitetext", inNote <$> citation "footcitetext" NormalCitation False)
, ("citeyearpar", citation "citeyearpar" SuppressAuthor False)
, ("citeyear", citation "citeyear" SuppressAuthor False)
, ("autocite*", citation "autocite*" SuppressAuthor False)
, ("cite*", citation "cite*" SuppressAuthor False)
, ("parencite*", citation "parencite*" SuppressAuthor False)
, ("textcite", citation "textcite" AuthorInText False)
, ("citet", citation "citet" AuthorInText False)
, ("citet*", citation "citet*" AuthorInText False)
, ("citealt", citation "citealt" AuthorInText False)
, ("citealt*", citation "citealt*" AuthorInText False)
, ("textcites", citation "textcites" AuthorInText True)
, ("cites", citation "cites" NormalCitation True)
, ("autocites", citation "autocites" NormalCitation True)
, ("footcites", inNote <$> citation "footcites" NormalCitation True)
, ("parencites", citation "parencites" NormalCitation True)
, ("supercites", citation "supercites" NormalCitation True)
, ("footcitetexts", inNote <$> citation "footcitetexts" NormalCitation True)
, ("Autocite", citation "Autocite" NormalCitation False)
, ("Footcite", citation "Footcite" NormalCitation False)
, ("Parencite", citation "Parencite" NormalCitation False)
, ("Supercite", citation "Supercite" NormalCitation False)
, ("Footcitetext", inNote <$> citation "Footcitetext" NormalCitation False)
, ("Citeyearpar", citation "Citeyearpar" SuppressAuthor False)
, ("Citeyear", citation "Citeyear" SuppressAuthor False)
, ("Autocite*", citation "Autocite*" SuppressAuthor False)
, ("Cite*", citation "Cite*" SuppressAuthor False)
, ("Parencite*", citation "Parencite*" SuppressAuthor False)
, ("Textcite", citation "Textcite" AuthorInText False)
, ("Textcites", citation "Textcites" AuthorInText True)
, ("Cites", citation "Cites" NormalCitation True)
, ("Autocites", citation "Autocites" NormalCitation True)
, ("Footcites", citation "Footcites" NormalCitation True)
, ("Parencites", citation "Parencites" NormalCitation True)
, ("Supercites", citation "Supercites" NormalCitation True)
, ("Footcitetexts", inNote <$> citation "Footcitetexts" NormalCitation True)
, ("citetext", complexNatbibCitation NormalCitation)
, ("citeauthor", (try (tok *> optional sp *> controlSeq "citetext") *>
complexNatbibCitation AuthorInText)
<|> citation "citeauthor" AuthorInText False)
, ("nocite", mempty <$ (citation "nocite" NormalCitation False >>=
addMeta "nocite"))
] ++ map ignoreInlines
-- these commands will be ignored unless --parse-raw is specified,
-- in which case they will appear as raw latex blocks:
[ "noindent", "index" ]
mkImage :: String -> LP Inlines
mkImage src = do
let alt = str "image"
case takeExtension src of
"" -> do
defaultExt <- getOption readerDefaultImageExtension
return $ image (addExtension src defaultExt) "" alt
_ -> return $ image src "" alt
inNote :: Inlines -> Inlines
inNote ils =
note $ para $ ils <> str "."
unescapeURL :: String -> String
unescapeURL ('\\':x:xs) | isEscapable x = x:unescapeURL xs
where isEscapable c = c `elem` "#$%&~_^\\{}"
unescapeURL (x:xs) = x:unescapeURL xs
unescapeURL [] = ""
enquote :: LP Inlines
enquote = do
skipopts
context <- stateQuoteContext <$> getState
if context == InDoubleQuote
then singleQuoted <$> withQuoteContext InSingleQuote tok
else doubleQuoted <$> withQuoteContext InDoubleQuote tok
doverb :: LP Inlines
doverb = do
marker <- anyChar
code <$> manyTill (satisfy (/='\n')) (char marker)
doLHSverb :: LP Inlines
doLHSverb = codeWith ("",["haskell"],[]) <$> manyTill (satisfy (/='\n')) (char '|')
lit :: String -> LP Inlines
lit = pure . str
accent :: (Char -> String) -> Inlines -> LP Inlines
accent f ils =
case toList ils of
(Str (x:xs) : ys) -> return $ fromList $ (Str (f x ++ xs) : ys)
[] -> mzero
_ -> return ils
grave :: Char -> String
grave 'A' = "À"
grave 'E' = "È"
grave 'I' = "Ì"
grave 'O' = "Ò"
grave 'U' = "Ù"
grave 'a' = "à"
grave 'e' = "è"
grave 'i' = "ì"
grave 'o' = "ò"
grave 'u' = "ù"
grave c = [c]
acute :: Char -> String
acute 'A' = "Á"
acute 'E' = "É"
acute 'I' = "Í"
acute 'O' = "Ó"
acute 'U' = "Ú"
acute 'Y' = "Ý"
acute 'a' = "á"
acute 'e' = "é"
acute 'i' = "í"
acute 'o' = "ó"
acute 'u' = "ú"
acute 'y' = "ý"
acute 'C' = "Ć"
acute 'c' = "ć"
acute 'L' = "Ĺ"
acute 'l' = "ĺ"
acute 'N' = "Ń"
acute 'n' = "ń"
acute 'R' = "Ŕ"
acute 'r' = "ŕ"
acute 'S' = "Ś"
acute 's' = "ś"
acute 'Z' = "Ź"
acute 'z' = "ź"
acute c = [c]
circ :: Char -> String
circ 'A' = "Â"
circ 'E' = "Ê"
circ 'I' = "Î"
circ 'O' = "Ô"
circ 'U' = "Û"
circ 'a' = "â"
circ 'e' = "ê"
circ 'i' = "î"
circ 'o' = "ô"
circ 'u' = "û"
circ 'C' = "Ĉ"
circ 'c' = "ĉ"
circ 'G' = "Ĝ"
circ 'g' = "ĝ"
circ 'H' = "Ĥ"
circ 'h' = "ĥ"
circ 'J' = "Ĵ"
circ 'j' = "ĵ"
circ 'S' = "Ŝ"
circ 's' = "ŝ"
circ 'W' = "Ŵ"
circ 'w' = "ŵ"
circ 'Y' = "Ŷ"
circ 'y' = "ŷ"
circ c = [c]
tilde :: Char -> String
tilde 'A' = "Ã"
tilde 'a' = "ã"
tilde 'O' = "Õ"
tilde 'o' = "õ"
tilde 'I' = "Ĩ"
tilde 'i' = "ĩ"
tilde 'U' = "Ũ"
tilde 'u' = "ũ"
tilde 'N' = "Ñ"
tilde 'n' = "ñ"
tilde c = [c]
umlaut :: Char -> String
umlaut 'A' = "Ä"
umlaut 'E' = "Ë"
umlaut 'I' = "Ï"
umlaut 'O' = "Ö"
umlaut 'U' = "Ü"
umlaut 'a' = "ä"
umlaut 'e' = "ë"
umlaut 'i' = "ï"
umlaut 'o' = "ö"
umlaut 'u' = "ü"
umlaut c = [c]
dot :: Char -> String
dot 'C' = "Ċ"
dot 'c' = "ċ"
dot 'E' = "Ė"
dot 'e' = "ė"
dot 'G' = "Ġ"
dot 'g' = "ġ"
dot 'I' = "İ"
dot 'Z' = "Ż"
dot 'z' = "ż"
dot c = [c]
macron :: Char -> String
macron 'A' = "Ā"
macron 'E' = "Ē"
macron 'I' = "Ī"
macron 'O' = "Ō"
macron 'U' = "Ū"
macron 'a' = "ā"
macron 'e' = "ē"
macron 'i' = "ī"
macron 'o' = "ō"
macron 'u' = "ū"
macron c = [c]
cedilla :: Char -> String
cedilla 'c' = "ç"
cedilla 'C' = "Ç"
cedilla 's' = "ş"
cedilla 'S' = "Ş"
cedilla 't' = "ţ"
cedilla 'T' = "Ţ"
cedilla 'e' = "ȩ"
cedilla 'E' = "Ȩ"
cedilla 'h' = "ḩ"
cedilla 'H' = "Ḩ"
cedilla 'o' = "o̧"
cedilla 'O' = "O̧"
cedilla c = [c]
hacek :: Char -> String
hacek 'A' = "Ǎ"
hacek 'a' = "ǎ"
hacek 'C' = "Č"
hacek 'c' = "č"
hacek 'D' = "Ď"
hacek 'd' = "ď"
hacek 'E' = "Ě"
hacek 'e' = "ě"
hacek 'G' = "Ǧ"
hacek 'g' = "ǧ"
hacek 'H' = "Ȟ"
hacek 'h' = "ȟ"
hacek 'I' = "Ǐ"
hacek 'i' = "ǐ"
hacek 'j' = "ǰ"
hacek 'K' = "Ǩ"
hacek 'k' = "ǩ"
hacek 'L' = "Ľ"
hacek 'l' = "ľ"
hacek 'N' = "Ň"
hacek 'n' = "ň"
hacek 'O' = "Ǒ"
hacek 'o' = "ǒ"
hacek 'R' = "Ř"
hacek 'r' = "ř"
hacek 'S' = "Š"
hacek 's' = "š"
hacek 'T' = "Ť"
hacek 't' = "ť"
hacek 'U' = "Ǔ"
hacek 'u' = "ǔ"
hacek 'Z' = "Ž"
hacek 'z' = "ž"
hacek c = [c]
breve :: Char -> String
breve 'A' = "Ă"
breve 'a' = "ă"
breve 'E' = "Ĕ"
breve 'e' = "ĕ"
breve 'G' = "Ğ"
breve 'g' = "ğ"
breve 'I' = "Ĭ"
breve 'i' = "ĭ"
breve 'O' = "Ŏ"
breve 'o' = "ŏ"
breve 'U' = "Ŭ"
breve 'u' = "ŭ"
breve c = [c]
tok :: LP Inlines
tok = try $ grouped inline <|> inlineCommand <|> str <$> (count 1 $ inlineChar)
opt :: LP Inlines
opt = bracketed inline <* optional sp
skipopts :: LP ()
skipopts = skipMany opt
inlineText :: LP Inlines
inlineText = str <$> many1 inlineChar
inlineChar :: LP Char
inlineChar = noneOf "\\$%^_&~#{}^'`\"‘’“”-[] \t\n"
environment :: LP Blocks
environment = do
controlSeq "begin"
name <- braced
case M.lookup name environments of
Just p -> p <|> rawEnv name
Nothing -> rawEnv name
rawEnv :: String -> LP Blocks
rawEnv name = do
let addBegin x = "\\begin{" ++ name ++ "}" ++ x
parseRaw <- getOption readerParseRaw
if parseRaw
then (rawBlock "latex" . addBegin) <$>
(withRaw (env name blocks) >>= applyMacros' . snd)
else env name blocks
----
type IncludeParser = ParserT [Char] [String] IO String
-- | Replace "include" commands with file contents.
handleIncludes :: String -> IO String
handleIncludes s = do
res <- runParserT includeParser' [] "input" s
case res of
Right s' -> return s'
Left e -> error $ show e
includeParser' :: IncludeParser
includeParser' =
concat <$> many (comment' <|> escaped' <|> blob' <|> include'
<|> startMarker' <|> endMarker'
<|> verbCmd' <|> verbatimEnv' <|> backslash')
comment' :: IncludeParser
comment' = do
char '%'
xs <- manyTill anyChar newline
return ('%':xs ++ "\n")
escaped' :: IncludeParser
escaped' = try $ string "\\%" <|> string "\\\\"
verbCmd' :: IncludeParser
verbCmd' = fmap snd <$>
withRaw $ try $ do
string "\\verb"
c <- anyChar
manyTill anyChar (char c)
verbatimEnv' :: IncludeParser
verbatimEnv' = fmap snd <$>
withRaw $ try $ do
string "\\begin"
name <- braced'
guard $ name `elem` ["verbatim", "Verbatim", "lstlisting",
"minted", "alltt"]
manyTill anyChar (try $ string $ "\\end{" ++ name ++ "}")
blob' :: IncludeParser
blob' = try $ many1 (noneOf "\\%")
backslash' :: IncludeParser
backslash' = string "\\"
braced' :: IncludeParser
braced' = try $ char '{' *> manyTill (satisfy (/='}')) (char '}')
include' :: IncludeParser
include' = do
fs' <- try $ do
char '\\'
name <- try (string "include")
<|> try (string "input")
<|> string "usepackage"
-- skip options
skipMany $ try $ char '[' *> (manyTill anyChar (char ']'))
fs <- (map trim . splitBy (==',')) <$> braced'
return $ if name == "usepackage"
then map (flip replaceExtension ".sty") fs
else map (flip replaceExtension ".tex") fs
pos <- getPosition
containers <- getState
let fn = case containers of
(f':_) -> f'
[] -> "input"
-- now process each include file in order...
rest <- getInput
results' <- forM fs' (\f -> do
when (f `elem` containers) $
fail "Include file loop!"
contents <- lift $ readTeXFile f
return $ "\\PandocStartInclude{" ++ f ++ "}" ++
contents ++ "\\PandocEndInclude{" ++
fn ++ "}{" ++ show (sourceLine pos) ++ "}{"
++ show (sourceColumn pos) ++ "}")
setInput $ concat results' ++ rest
return ""
startMarker' :: IncludeParser
startMarker' = try $ do
string "\\PandocStartInclude"
fn <- braced'
updateState (fn:)
setPosition $ newPos fn 1 1
return $ "\\PandocStartInclude{" ++ fn ++ "}"
endMarker' :: IncludeParser
endMarker' = try $ do
string "\\PandocEndInclude"
fn <- braced'
ln <- braced'
co <- braced'
updateState tail
setPosition $ newPos fn (fromMaybe 1 $ safeRead ln) (fromMaybe 1 $ safeRead co)
return $ "\\PandocEndInclude{" ++ fn ++ "}{" ++ ln ++ "}{" ++
co ++ "}"
readTeXFile :: FilePath -> IO String
readTeXFile f = do
texinputs <- E.catch (getEnv "TEXINPUTS") $ \(_ :: E.SomeException) ->
return "."
let ds = splitBy (==':') texinputs
readFileFromDirs ds f
readFileFromDirs :: [FilePath] -> FilePath -> IO String
readFileFromDirs [] _ = return ""
readFileFromDirs (d:ds) f =
E.catch (UTF8.readFile $ d </> f) $ \(_ :: E.SomeException) ->
readFileFromDirs ds f
----
keyval :: LP (String, String)
keyval = try $ do
key <- many1 alphaNum
val <- option "" $ char '=' >> many1 alphaNum
skipMany spaceChar
optional (char ',')
skipMany spaceChar
return (key, val)
keyvals :: LP [(String, String)]
keyvals = try $ char '[' *> manyTill keyval (char ']')
alltt :: String -> LP Blocks
alltt t = walk strToCode <$> parseFromString blocks
(substitute " " "\\ " $ substitute "%" "\\%" $
concat $ intersperse "\\\\\n" $ lines t)
where strToCode (Str s) = Code nullAttr s
strToCode x = x
rawLaTeXBlock :: Parser [Char] ParserState String
rawLaTeXBlock = snd <$> try (withRaw (environment <|> blockCommand))
rawLaTeXInline :: Parser [Char] ParserState Inline
rawLaTeXInline = do
raw <- (snd <$> withRaw inlineCommand) <|> (snd <$> withRaw blockCommand)
RawInline "latex" <$> applyMacros' raw
addImageCaption :: Blocks -> LP Blocks
addImageCaption = walkM go
where go (Image alt (src,tit)) = do
mbcapt <- stateCaption <$> getState
case mbcapt of
Just ils -> return (Image (toList ils) (src, "fig:"))
Nothing -> return (Image alt (src,tit))
go x = return x
addTableCaption :: Blocks -> LP Blocks
addTableCaption = walkM go
where go (Table c als ws hs rs) = do
mbcapt <- stateCaption <$> getState
case mbcapt of
Just ils -> return (Table (toList ils) als ws hs rs)
Nothing -> return (Table c als ws hs rs)
go x = return x
environments :: M.Map String (LP Blocks)
environments = M.fromList
[ ("document", env "document" blocks <* skipMany anyChar)
, ("letter", env "letter" letter_contents)
, ("figure", env "figure" $
resetCaption *> skipopts *> blocks >>= addImageCaption)
, ("center", env "center" blocks)
, ("table", env "table" $
resetCaption *> skipopts *> blocks >>= addTableCaption)
, ("tabular", env "tabular" simpTable)
, ("quote", blockQuote <$> env "quote" blocks)
, ("quotation", blockQuote <$> env "quotation" blocks)
, ("verse", blockQuote <$> env "verse" blocks)
, ("itemize", bulletList <$> listenv "itemize" (many item))
, ("description", definitionList <$> listenv "description" (many descItem))
, ("enumerate", ordered_list)
, ("alltt", alltt =<< verbEnv "alltt")
, ("code", guardEnabled Ext_literate_haskell *>
(codeBlockWith ("",["sourceCode","literate","haskell"],[]) <$>
verbEnv "code"))
, ("verbatim", codeBlock <$> (verbEnv "verbatim"))
, ("Verbatim", do options <- option [] keyvals
let kvs = [ (if k == "firstnumber"
then "startFrom"
else k, v) | (k,v) <- options ]
let classes = [ "numberLines" |
lookup "numbers" options == Just "left" ]
let attr = ("",classes,kvs)
codeBlockWith attr <$> (verbEnv "Verbatim"))
, ("lstlisting", do options <- option [] keyvals
let kvs = [ (if k == "firstnumber"
then "startFrom"
else k, v) | (k,v) <- options ]
let classes = [ "numberLines" |
lookup "numbers" options == Just "left" ]
++ maybe [] (:[]) (lookup "language" options
>>= fromListingsLanguage)
let attr = (fromMaybe "" (lookup "label" options),classes,kvs)
codeBlockWith attr <$> (verbEnv "lstlisting"))
, ("minted", do options <- option [] keyvals
lang <- grouped (many1 $ satisfy (/='}'))
let kvs = [ (if k == "firstnumber"
then "startFrom"
else k, v) | (k,v) <- options ]
let classes = [ lang | not (null lang) ] ++
[ "numberLines" |
lookup "linenos" options == Just "true" ]
let attr = ("",classes,kvs)
codeBlockWith attr <$> (verbEnv "minted"))
, ("obeylines", parseFromString
(para . trimInlines . mconcat <$> many inline) =<<
intercalate "\\\\\n" . lines <$> verbEnv "obeylines")
, ("displaymath", mathEnv Nothing "displaymath")
, ("equation", mathEnv Nothing "equation")
, ("equation*", mathEnv Nothing "equation*")
, ("gather", mathEnv (Just "gathered") "gather")
, ("gather*", mathEnv (Just "gathered") "gather*")
, ("multline", mathEnv (Just "gathered") "multline")
, ("multline*", mathEnv (Just "gathered") "multline*")
, ("eqnarray", mathEnv (Just "aligned") "eqnarray")
, ("eqnarray*", mathEnv (Just "aligned") "eqnarray*")
, ("align", mathEnv (Just "aligned") "align")
, ("align*", mathEnv (Just "aligned") "align*")
, ("alignat", mathEnv (Just "aligned") "alignat")
, ("alignat*", mathEnv (Just "aligned") "alignat*")
]
letter_contents :: LP Blocks
letter_contents = do
bs <- blocks
st <- getState
-- add signature (author) and address (title)
let addr = case lookupMeta "address" (stateMeta st) of
Just (MetaBlocks [Plain xs]) ->
para $ trimInlines $ fromList xs
_ -> mempty
return $ addr <> bs -- sig added by \closing
closing :: LP Blocks
closing = do
contents <- tok
st <- getState
let extractInlines (MetaBlocks [Plain ys]) = ys
extractInlines (MetaBlocks [Para ys ]) = ys
extractInlines _ = []
let sigs = case lookupMeta "author" (stateMeta st) of
Just (MetaList xs) ->
para $ trimInlines $ fromList $
intercalate [LineBreak] $ map extractInlines xs
_ -> mempty
return $ para (trimInlines contents) <> sigs
item :: LP Blocks
item = blocks *> controlSeq "item" *> skipopts *> blocks
loose_item :: LP Blocks
loose_item = do
ctx <- stateParserContext `fmap` getState
if ctx == ListItemState
then mzero
else return mempty
descItem :: LP (Inlines, [Blocks])
descItem = do
blocks -- skip blocks before item
controlSeq "item"
optional sp
ils <- opt
bs <- blocks
return (ils, [bs])
env :: String -> LP a -> LP a
env name p = p <*
(try (controlSeq "end" *> braced >>= guard . (== name))
<?> ("\\end{" ++ name ++ "}"))
listenv :: String -> LP a -> LP a
listenv name p = try $ do
oldCtx <- stateParserContext `fmap` getState
updateState $ \st -> st{ stateParserContext = ListItemState }
res <- env name p
updateState $ \st -> st{ stateParserContext = oldCtx }
return res
mathEnv :: Maybe String -> String -> LP Blocks
mathEnv innerEnv name = para <$> mathDisplay (inner <$> verbEnv name)
where inner x = case innerEnv of
Nothing -> x
Just y -> "\\begin{" ++ y ++ "}\n" ++ x ++
"\\end{" ++ y ++ "}"
verbEnv :: String -> LP String
verbEnv name = do
skipopts
optional blankline
let endEnv = try $ controlSeq "end" *> braced >>= guard . (== name)
res <- manyTill anyChar endEnv
return $ stripTrailingNewlines res
ordered_list :: LP Blocks
ordered_list = do
optional sp
(_, style, delim) <- option (1, DefaultStyle, DefaultDelim) $
try $ char '[' *> anyOrderedListMarker <* char ']'
spaces
optional $ try $ controlSeq "setlength" *> grouped (controlSeq "itemindent") *> braced
spaces
start <- option 1 $ try $ do controlSeq "setcounter"
grouped (string "enum" *> many1 (oneOf "iv"))
optional sp
num <- grouped (many1 digit)
spaces
return $ (read num + 1 :: Int)
bs <- listenv "enumerate" (many item)
return $ orderedListWith (start, style, delim) bs
paragraph :: LP Blocks
paragraph = do
x <- trimInlines . mconcat <$> many1 inline
if x == mempty
then return mempty
else return $ para x
preamble :: LP Blocks
preamble = mempty <$> manyTill preambleBlock beginDoc
where beginDoc = lookAhead $ try $ controlSeq "begin" *> string "{document}"
preambleBlock = (void comment)
<|> (void sp)
<|> (void blanklines)
<|> (void macro)
<|> (void blockCommand)
<|> (void anyControlSeq)
<|> (void braced)
<|> (void anyChar)
-------
-- citations
addPrefix :: [Inline] -> [Citation] -> [Citation]
addPrefix p (k:ks) = k {citationPrefix = p ++ citationPrefix k} : ks
addPrefix _ _ = []
addSuffix :: [Inline] -> [Citation] -> [Citation]
addSuffix s ks@(_:_) =
let k = last ks
in init ks ++ [k {citationSuffix = citationSuffix k ++ s}]
addSuffix _ _ = []
simpleCiteArgs :: LP [Citation]
simpleCiteArgs = try $ do
first <- optionMaybe $ toList <$> opt
second <- optionMaybe $ toList <$> opt
char '{'
optional sp
keys <- manyTill citationLabel (char '}')
let (pre, suf) = case (first , second ) of
(Just s , Nothing) -> (mempty, s )
(Just s , Just t ) -> (s , t )
_ -> (mempty, mempty)
conv k = Citation { citationId = k
, citationPrefix = []
, citationSuffix = []
, citationMode = NormalCitation
, citationHash = 0
, citationNoteNum = 0
}
return $ addPrefix pre $ addSuffix suf $ map conv keys
citationLabel :: LP String
citationLabel = optional sp *>
(many1 (satisfy isBibtexKeyChar)
<* optional sp
<* optional (char ',')
<* optional sp)
where isBibtexKeyChar c = isAlphaNum c || c `elem` ".:;?!`'()/*@_+=-[]*"
cites :: CitationMode -> Bool -> LP [Citation]
cites mode multi = try $ do
cits <- if multi
then many1 simpleCiteArgs
else count 1 simpleCiteArgs
let cs = concat cits
return $ case mode of
AuthorInText -> case cs of
(c:rest) -> c {citationMode = mode} : rest
[] -> []
_ -> map (\a -> a {citationMode = mode}) cs
citation :: String -> CitationMode -> Bool -> LP Inlines
citation name mode multi = do
(c,raw) <- withRaw $ cites mode multi
return $ cite c (rawInline "latex" $ "\\" ++ name ++ raw)
complexNatbibCitation :: CitationMode -> LP Inlines
complexNatbibCitation mode = try $ do
let ils = (toList . trimInlines . mconcat) <$>
many (notFollowedBy (oneOf "\\};") >> inline)
let parseOne = try $ do
skipSpaces
pref <- ils
cit' <- inline -- expect a citation
let citlist = toList cit'
cits' <- case citlist of
[Cite cs _] -> return cs
_ -> mzero
suff <- ils
skipSpaces
optional $ char ';'
return $ addPrefix pref $ addSuffix suff $ cits'
(c:cits, raw) <- withRaw $ grouped parseOne
return $ cite (c{ citationMode = mode }:cits)
(rawInline "latex" $ "\\citetext" ++ raw)
-- tables
parseAligns :: LP [Alignment]
parseAligns = try $ do
char '{'
let maybeBar = skipMany $ sp <|> () <$ char '|' <|> () <$ try (string "@{}")
maybeBar
let cAlign = AlignCenter <$ char 'c'
let lAlign = AlignLeft <$ char 'l'
let rAlign = AlignRight <$ char 'r'
let parAlign = AlignLeft <$ (char 'p' >> braced)
let alignChar = cAlign <|> lAlign <|> rAlign <|> parAlign
aligns' <- sepEndBy alignChar maybeBar
spaces
char '}'
spaces
return aligns'
hline :: LP ()
hline = () <$ (try $ spaces >> controlSeq "hline")
lbreak :: LP ()
lbreak = () <$ (try $ spaces *> controlSeq "\\")
amp :: LP ()
amp = () <$ (try $ spaces *> char '&')
parseTableRow :: Int -- ^ number of columns
-> LP [Blocks]
parseTableRow cols = try $ do
let tableCellInline = notFollowedBy (amp <|> lbreak) >> inline
let tableCell = (plain . trimInlines . mconcat) <$> many tableCellInline
cells' <- sepBy1 tableCell amp
let numcells = length cells'
guard $ numcells <= cols && numcells >= 1
guard $ cells' /= [mempty]
-- note: a & b in a three-column table leaves an empty 3rd cell:
let cells'' = cells' ++ replicate (cols - numcells) mempty
spaces
return cells''
simpTable :: LP Blocks
simpTable = try $ do
spaces
aligns <- parseAligns
let cols = length aligns
optional hline
header' <- option [] $ try (parseTableRow cols <* lbreak <* hline)
rows <- sepEndBy (parseTableRow cols) (lbreak <* optional hline)
spaces
skipMany (comment *> spaces)
let header'' = if null header'
then replicate cols mempty
else header'
lookAhead $ controlSeq "end" -- make sure we're at end
return $ table mempty (zip aligns (repeat 0)) header'' rows
startInclude :: LP Blocks
startInclude = do
fn <- braced
setPosition $ newPos fn 1 1
return mempty
endInclude :: LP Blocks
endInclude = do
fn <- braced
ln <- braced
co <- braced
setPosition $ newPos fn (fromMaybe 1 $ safeRead ln) (fromMaybe 1 $ safeRead co)
return mempty
| bgamari/pandoc | src/Text/Pandoc/Readers/LaTeX.hs | gpl-2.0 | 43,124 | 0 | 29 | 11,599 | 14,992 | 7,704 | 7,288 | 1,103 | 5 |
{-# LANGUAGE ScopedTypeVariables #-}
{-
Copyright (C) 2009 John MacFarlane <jgm@berkeley.edu>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- | Useful functions for defining wiki handlers.
-}
module Network.Gitit.Framework (
-- * Combinators for dealing with users
withUserFromSession
, withUserFromHTTPAuth
, authenticateUserThat
, authenticate
, getLoggedInUser
-- * Combinators to exclude certain actions
, unlessNoEdit
, unlessNoDelete
-- * Guards for routing
, guardCommand
, guardPath
, guardIndex
, guardBareBase
-- * Functions to get info from the request
, getPath
, getPage
, getReferer
, getWikiBase
, uriPath
-- * Useful predicates
, isPage
, isPageFile
, isDiscussPage
, isDiscussPageFile
, isSourceCode
-- * Combinators that change the request locally
, withMessages
-- * Miscellaneous
, urlForPage
, pathForPage
, getMimeTypeForExtension
, validate
, filestoreFromConfig
)
where
import Safe
import Network.Gitit.Server
import Network.Gitit.State
import Network.Gitit.Types
import Data.FileStore
import Data.Char (toLower)
import Control.Monad (mzero, liftM, unless, MonadPlus)
import qualified Data.Map as M
import qualified Data.ByteString.UTF8 as UTF8
import qualified Data.ByteString.Lazy.UTF8 as LazyUTF8
import Data.Maybe (fromJust, fromMaybe)
import Data.List (intercalate, isPrefixOf, isInfixOf)
import System.FilePath ((<.>), takeExtension, takeFileName)
import Text.Highlighting.Kate
import Text.ParserCombinators.Parsec
import Network.URL (decString, encString)
import Network.URI (isUnescapedInURI)
import Data.ByteString.Base64 (decodeLenient)
import Network.HTTP (urlEncodeVars)
-- | Require a logged in user if the authentication level demands it.
-- Run the handler if a user is logged in, otherwise redirect
-- to login page.
authenticate :: AuthenticationLevel -> Handler -> Handler
authenticate = authenticateUserThat (const True)
-- | Like 'authenticate', but with a predicate that the user must satisfy.
authenticateUserThat :: (User -> Bool) -> AuthenticationLevel -> Handler -> Handler
authenticateUserThat predicate level handler = do
cfg <- getConfig
if level <= requireAuthentication cfg
then do
mbUser <- getLoggedInUser
rq <- askRq
let url = rqUri rq ++ rqQuery rq
case mbUser of
Nothing -> tempRedirect ("/_login?" ++ urlEncodeVars [("destination", url)]) $ toResponse ()
Just u -> if predicate u
then handler
else error "Not authorized."
else handler
-- | Run the handler after setting @REMOTE_USER@ with the user from
-- the session.
withUserFromSession :: Handler -> Handler
withUserFromSession handler = withData $ \(sk :: Maybe SessionKey) -> do
mbSd <- maybe (return Nothing) getSession sk
cfg <- getConfig
mbUser <- case mbSd of
Nothing -> return Nothing
Just sd -> do
addCookie (MaxAge $ sessionTimeout cfg) (mkCookie "sid" (show $ fromJust sk)) -- refresh timeout
getUser $! sessionUser sd
let user = maybe "" uUsername mbUser
localRq (setHeader "REMOTE_USER" user) handler
-- | Run the handler after setting @REMOTE_USER@ from the "authorization"
-- header. Works with simple HTTP authentication or digest authentication.
withUserFromHTTPAuth :: Handler -> Handler
withUserFromHTTPAuth handler = do
req <- askRq
let user = case getHeader "authorization" req of
Nothing -> ""
Just authHeader -> case parse pAuthorizationHeader "" (UTF8.toString authHeader) of
Left _ -> ""
Right u -> u
localRq (setHeader "REMOTE_USER" user) handler
-- | Returns @Just@ logged in user or @Nothing@.
getLoggedInUser :: GititServerPart (Maybe User)
getLoggedInUser = do
req <- askRq
case maybe "" UTF8.toString (getHeader "REMOTE_USER" req) of
"" -> return Nothing
u -> do
mbUser <- getUser u
case mbUser of
Just user -> return $ Just user
Nothing -> return $ Just User{uUsername = u, uEmail = "", uPassword = undefined}
pAuthorizationHeader :: GenParser Char st String
pAuthorizationHeader = try pBasicHeader <|> pDigestHeader
pDigestHeader :: GenParser Char st String
pDigestHeader = do
_ <- string "Digest username=\""
result' <- many (noneOf "\"")
_ <- char '"'
return result'
pBasicHeader :: GenParser Char st String
pBasicHeader = do
_ <- string "Basic "
result' <- many (noneOf " \t\n")
return $ takeWhile (/=':') $ UTF8.toString
$ decodeLenient $ UTF8.fromString result'
-- | @unlessNoEdit responder fallback@ runs @responder@ unless the
-- page has been designated not editable in configuration; in that
-- case, runs @fallback@.
unlessNoEdit :: Handler
-> Handler
-> Handler
unlessNoEdit responder fallback = withData $ \(params :: Params) -> do
cfg <- getConfig
page <- getPage
if page `elem` noEdit cfg
then withMessages ("Page is locked." : pMessages params) fallback
else responder
-- | @unlessNoDelete responder fallback@ runs @responder@ unless the
-- page has been designated not deletable in configuration; in that
-- case, runs @fallback@.
unlessNoDelete :: Handler
-> Handler
-> Handler
unlessNoDelete responder fallback = withData $ \(params :: Params) -> do
cfg <- getConfig
page <- getPage
if page `elem` noDelete cfg
then withMessages ("Page cannot be deleted." : pMessages params) fallback
else responder
-- | Returns the current path (subtracting initial commands like @\/_edit@).
getPath :: ServerMonad m => m String
getPath = liftM (intercalate "/" . rqPaths) askRq
-- | Returns the current page name (derived from the path).
getPage :: GititServerPart String
getPage = do
conf <- getConfig
path' <- getPath
if null path'
then return (frontPage conf)
else if isPage path'
then return path'
else mzero -- fail if not valid page name
-- | Returns the contents of the "referer" header.
getReferer :: ServerMonad m => m String
getReferer = do
req <- askRq
base' <- getWikiBase
return $ case getHeader "referer" req of
Just r -> case UTF8.toString r of
"" -> base'
s -> s
Nothing -> base'
-- | Returns the base URL of the wiki in the happstack server.
-- So, if the wiki handlers are behind a @dir 'foo'@, getWikiBase will
-- return @\/foo/@. getWikiBase doesn't know anything about HTTP
-- proxies, so if you use proxies to map a gitit wiki to @\/foo/@,
-- you'll still need to follow the instructions in README.
getWikiBase :: ServerMonad m => m String
getWikiBase = do
path' <- getPath
uri' <- liftM (fromJust . decString True . rqUri) askRq
case calculateWikiBase path' uri' of
Just b -> return b
Nothing -> error $ "Could not getWikiBase: (path, uri) = " ++ show (path',uri')
-- | The pure core of 'getWikiBase'.
calculateWikiBase :: String -> String -> Maybe String
calculateWikiBase path' uri' =
let revpaths = reverse . filter (not . null) $ splitOn '/' path'
revuris = reverse . filter (not . null) $ splitOn '/' uri'
in if revpaths `isPrefixOf` revuris
then let revbase = drop (length revpaths) revuris
-- a path like _feed is not part of the base...
revbase' = case revbase of
(x:xs) | startsWithUnderscore x -> xs
xs -> xs
base' = intercalate "/" $ reverse revbase'
in Just $ if null base' then "" else '/' : base'
else Nothing
startsWithUnderscore :: String -> Bool
startsWithUnderscore ('_':_) = True
startsWithUnderscore _ = False
splitOn :: Eq a => a -> [a] -> [[a]]
splitOn c cs =
let (next, rest) = break (==c) cs
in case rest of
[] -> [next]
(_:rs) -> next : splitOn c rs
-- | Returns path portion of URI, without initial @\/@.
-- Consecutive spaces are collapsed. We don't want to distinguish
-- @Hi There@ and @Hi There@.
uriPath :: String -> String
uriPath = unwords . words . drop 1 . takeWhile (/='?')
isPage :: String -> Bool
isPage "" = False
isPage ('_':_) = False
isPage s = all (`notElem` "*?") s && not (".." `isInfixOf` s) && not ("/_" `isInfixOf` s)
-- for now, we disallow @*@ and @?@ in page names, because git filestore
-- does not deal with them properly, and darcs filestore disallows them.
isPageFile :: FilePath -> Bool
isPageFile f = takeExtension f == ".page"
isDiscussPage :: String -> Bool
isDiscussPage ('@':xs) = isPage xs
isDiscussPage _ = False
isDiscussPageFile :: FilePath -> Bool
isDiscussPageFile ('@':xs) = isPageFile xs
isDiscussPageFile _ = False
isSourceCode :: String -> Bool
isSourceCode path' =
let langs = languagesByFilename $ takeFileName path'
in not (null langs || takeExtension path' == ".svg")
-- allow svg to be served as image
-- | Returns encoded URL path for the page with the given name, relative to
-- the wiki base.
urlForPage :: String -> String
urlForPage page = '/' : encString False isUnescapedInURI page
-- | Returns the filestore path of the file containing the page's source.
pathForPage :: String -> FilePath
pathForPage page = page <.> "page"
-- | Retrieves a mime type based on file extension.
getMimeTypeForExtension :: String -> GititServerPart String
getMimeTypeForExtension ext = do
mimes <- liftM mimeMap getConfig
return $ fromMaybe "application/octet-stream"
(M.lookup (dropWhile (== '.') $ map toLower ext) mimes)
-- | Simple helper for validation of forms.
validate :: [(Bool, String)] -- ^ list of conditions and error messages
-> [String] -- ^ list of error messages
validate = foldl go []
where go errs (condition, msg) = if condition then msg:errs else errs
guardCommand :: String -> GititServerPart ()
guardCommand command = withData $ \(com :: Command) ->
case com of
Command (Just c) | c == command -> return ()
_ -> mzero
guardPath :: (String -> Bool) -> GititServerPart ()
guardPath pred' = guardRq (pred' . rqUri)
-- | Succeeds if path is an index path: e.g. @\/foo\/bar/@.
guardIndex :: GititServerPart ()
guardIndex = do
base <- getWikiBase
uri' <- liftM rqUri askRq
let localpath = drop (length base) uri'
unless (length localpath > 1 && lastNote "guardIndex" uri' == '/')
mzero
-- Guard against a path like @\/wiki@ when the wiki is being
-- served at @\/wiki@.
guardBareBase :: GititServerPart ()
guardBareBase = do
base' <- getWikiBase
uri' <- liftM rqUri askRq
unless (not (null base') && base' == uri')
mzero
-- | Runs a server monad in a local context after setting
-- the "message" request header.
withMessages :: ServerMonad m => [String] -> m a -> m a
withMessages messages handler = do
req <- askRq
let inps = filter (\(n,_) -> n /= "message") $ rqInputsQuery req
let newInp msg = ("message", Input {
inputValue = Right
$ LazyUTF8.fromString msg
, inputFilename = Nothing
, inputContentType = ContentType {
ctType = "text"
, ctSubtype = "plain"
, ctParameters = [] }
})
localRq (\rq -> rq{ rqInputsQuery = map newInp messages ++ inps }) handler
-- | Returns a filestore object derived from the
-- repository path and filestore type specified in configuration.
filestoreFromConfig :: Config -> FileStore
filestoreFromConfig conf =
case repositoryType conf of
Git -> gitFileStore $ repositoryPath conf
Darcs -> darcsFileStore $ repositoryPath conf
Mercurial -> mercurialFileStore $ repositoryPath conf
| thielema/gitit | Network/Gitit/Framework.hs | gpl-2.0 | 13,760 | 0 | 20 | 4,259 | 2,846 | 1,480 | 1,366 | 244 | 4 |
{-# LANGUAGE DeriveDataTypeable #-}
-----------------------------------------------------------------------------
-- |
-- Module : HEP.Physics.Analysis.Common.Prospino
-- Copyright : (c) 2013 Ian-Woo Kim
--
-- License : GPL-3
-- Maintainer : Ian-Woo Kim <ianwookim@gmail.com>
-- Stability : experimental
-- Portability : GHC
--
-- Get cross section and total number from ME/PS matched events
--
-----------------------------------------------------------------------------
module HEP.Physics.Analysis.Common.Prospino where
import qualified Data.Aeson.Generic as G
import Data.Aeson.Types
import Data.Data
data CrossSectionResult = CrossSectionResult { xsecRefLO :: Double
, xsecRefNLO :: Double
, xsecRefMultiSquarkLO :: Double
, xsecRefMultiSquarkNLO :: Double
, xsecKFactor :: Double
}
deriving (Show, Eq, Data, Typeable)
instance ToJSON CrossSectionResult where
toJSON = G.toJSON
| wavewave/lhc-analysis-collection | lib/HEP/Physics/Analysis/Common/Prospino.hs | gpl-3.0 | 1,170 | 0 | 8 | 387 | 116 | 78 | 38 | 13 | 0 |
module Network.AMQP.Utils.Options where
import qualified Data.ByteString.Char8 as BS
import Data.Default.Class
import Data.Int (Int64)
import qualified Data.Map as M
import Data.Maybe
import Data.Text (Text, pack)
import Data.Version (showVersion)
import Data.Word (Word16)
import Network.AMQP
import Network.AMQP.Types
import Network.Socket (PortNumber)
import Paths_amqp_utils (version)
import System.Console.GetOpt
portnumber :: Args -> PortNumber
portnumber a
| (port a) == Nothing && (tls a) = 5671
| (port a) == Nothing = 5672
| otherwise = fromJust (port a)
-- | A data type for our options
data Args =
Args
{ server :: String
, port :: Maybe PortNumber
, tls :: Bool
, vHost :: String
, currentExchange :: String
, bindings :: [(String, String)]
, rKey :: String
, anRiss :: Maybe Int64
, fileProcess :: Maybe String
, qName :: Maybe String
, cert :: Maybe String
, key :: Maybe String
, user :: String
, pass :: String
, preFetch :: Word16
, heartBeat :: Maybe Word16
, tempDir :: Maybe String
, additionalArgs :: [String]
, connectionName :: Maybe String
, tmpQName :: String
, inputFile :: String
, outputFile :: String
, lineMode :: Bool
, confirm :: Bool
, msgid :: Maybe Text
, msgtype :: Maybe Text
, userid :: Maybe Text
, appid :: Maybe Text
, clusterid :: Maybe Text
, contenttype :: Maybe Text
, contentencoding :: Maybe Text
, replyto :: Maybe Text
, prio :: Maybe Octet
, corrid :: Maybe Text
, msgexp :: Maybe Text
, msgheader :: Maybe FieldTable
, fnheader :: [String]
, suffix :: [String]
, magic :: Bool
, persistent :: Maybe DeliveryMode
, ack :: Bool
, requeuenack :: Bool
, rpc_timeout :: Double
, connect_timeout :: Int
, simple :: Bool
, cleanupTmpFile :: Bool
, removeSentFile :: Bool
, initialScan :: Bool
}
instance Default Args where
def =
Args
"localhost"
Nothing
False
"/"
""
[]
""
Nothing
Nothing
Nothing
Nothing
Nothing
"guest"
"guest"
1
Nothing
Nothing
[]
Nothing
""
"-"
"-"
False
False
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
Nothing
[]
[]
False
Nothing
True
True
5
600
False
False
False
False
-- | all options
allOptions :: [(String, OptDescr (Args -> Args))]
allOptions =
[ ( "k"
, Option
['r']
["bindingkey"]
(ReqArg
(\s o -> o {bindings = (currentExchange o, s) : (bindings o)})
"BINDINGKEY")
("AMQP binding key (default: #)"))
, ( "kr"
, Option
['X']
["execute"]
(OptArg
(\s o ->
o
{ fileProcess = Just (fromMaybe callback s)
, tempDir = Just (fromMaybe "/tmp" (tempDir o))
})
"EXE")
("Callback Script File (implies -t) (-X without arg: " ++
callback ++ ")"))
, ( "kr"
, Option
['a']
["args"]
(ReqArg (\s o -> o {additionalArgs = s : (additionalArgs o)}) "ARG")
"additional argument for -X callback")
, ( "kr"
, Option
['t']
["tempdir", "target"]
(OptArg (\s o -> o {tempDir = Just (fromMaybe "/tmp" s)}) "DIR")
"tempdir (default: no file creation, -t without arg: /tmp)")
, ( "k"
, Option
['f']
["prefetch"]
(ReqArg (\s o -> o {preFetch = read s}) "INT")
("Prefetch count. (0=unlimited, 1=off, default: " ++
show (preFetch def) ++ ")"))
, ( "kr"
, Option
['A']
["ack"]
(NoArg (\o -> o {ack = not (ack o)}))
("Toggle ack messages (default: " ++ show (ack def) ++ ")"))
, ( "kr"
, Option
['R']
["requeuenack"]
(NoArg (\o -> o {requeuenack = not (requeuenack o)}))
("Toggle requeue when rejected (default: " ++
show (requeuenack def) ++ ")"))
, ( "a"
, Option
['r']
["routingkey"]
(ReqArg (\s o -> o {rKey = s}) "ROUTINGKEY")
"AMQP routing key")
, ( "ap"
, Option
['f']
["inputfile"]
(ReqArg (\s o -> o {inputFile = s}) "INPUTFILE")
("Message input file (default: " ++ (inputFile def) ++ ")"))
, ( "p"
, Option
['O']
["outputfile"]
(ReqArg (\s o -> o {outputFile = s}) "OUTPUTFILE")
("Message output file (default: " ++ (outputFile def) ++ ")"))
, ( "a"
, Option
['l']
["linemode"]
(NoArg (\o -> o {lineMode = not (lineMode o)}))
("Toggle line-by-line mode (default: " ++ show (lineMode def) ++ ")"))
, ( "a"
, Option
['C']
["confirm"]
(NoArg (\o -> o {confirm = not (confirm o)}))
("Toggle confirms (default: " ++ show (confirm def) ++ ")"))
, ( "a"
, Option
[]
["msgid"]
(ReqArg (\s o -> o {msgid = Just $ pack s}) "ID")
"Message ID")
, ( "a"
, Option
[]
["type"]
(ReqArg (\s o -> o {msgtype = Just $ pack s}) "TYPE")
"Message Type")
, ( "a"
, Option
[]
["userid"]
(ReqArg (\s o -> o {userid = Just $ pack s}) "USERID")
"Message User-ID")
, ( "a"
, Option
[]
["appid"]
(ReqArg (\s o -> o {appid = Just $ pack s}) "APPID")
"Message App-ID")
, ( "a"
, Option
[]
["clusterid"]
(ReqArg (\s o -> o {clusterid = Just $ pack s}) "CLUSTERID")
"Message Cluster-ID")
, ( "a"
, Option
[]
["contenttype"]
(ReqArg (\s o -> o {contenttype = Just $ pack s}) "CONTENTTYPE")
"Message Content-Type")
, ( "a"
, Option
[]
["contentencoding"]
(ReqArg (\s o -> o {contentencoding = Just $ pack s}) "CONTENTENCODING")
"Message Content-Encoding")
, ( "a"
, Option
[]
["replyto"]
(ReqArg (\s o -> o {replyto = Just $ pack s}) "REPLYTO")
"Message Reply-To")
, ( "p"
, Option
['t']
["rpc_timeout"]
(ReqArg (\s o -> o {rpc_timeout = read s}) "SECONDS")
("How long to wait for reply (default: " ++
show (rpc_timeout def) ++ ")"))
, ( "a"
, Option
[]
["prio"]
(ReqArg (\s o -> o {prio = Just $ read s}) "PRIO")
"Message Priority")
, ( "ap"
, Option
[]
["corrid"]
(ReqArg (\s o -> o {corrid = Just $ pack s}) "CORRID")
"Message CorrelationID")
, ( "ap"
, Option
[]
["exp"]
(ReqArg (\s o -> o {msgexp = Just $ pack s}) "EXP")
"Message Expiration")
, ( "ap"
, Option
['h']
["header"]
(ReqArg
(\s o -> o {msgheader = addheader (msgheader o) s})
"HEADER=VALUE")
"Message Headers")
, ( "a"
, Option
['F']
["fnheader"]
(ReqArg (\s o -> o {fnheader = s : (fnheader o)}) "HEADERNAME")
"Put filename into this header")
, ( "a"
, Option
['S']
["suffix"]
(ReqArg (\s o -> o {suffix = s : (suffix o)}) "SUFFIX")
"Allowed file suffixes in hotfolder mode")
, ( "a"
, Option
['u']
["remove"]
(NoArg (\o -> o {removeSentFile = not (removeSentFile o)}))
("Toggle removal of sent file in hotfolder mode (default: " ++ show (removeSentFile def) ++ ")"))
, ( "a"
, Option
['d']
["dirscan"]
(NoArg (\o -> o {initialScan = not (initialScan o)}))
("Toggle initial directory scan in hotfolder mode (default: " ++ show (initialScan def) ++ ")"))
, ( "a"
, Option
['m']
["magic"]
(NoArg (\o -> o {magic = not (magic o)}))
("Toggle setting content-type and -encoding from file contents (default: " ++
show (magic def) ++ ")"))
, ( "a"
, Option
['e']
["persistent"]
(NoArg (\o -> o {persistent = Just Persistent}))
"Set persistent delivery")
, ( "a"
, Option
['E']
["nonpersistent"]
(NoArg (\o -> o {persistent = Just NonPersistent}))
"Set nonpersistent delivery")
, ( "krp"
, Option
['l']
["charlimit"]
(ReqArg (\s o -> o {anRiss = Just (read s)}) "INT")
"limit number of shown body chars (default: unlimited)")
, ( "kr"
, Option
['q']
["queue"]
(ReqArg (\s o -> o {qName = Just s}) "QUEUENAME")
"Ignore Exchange and bind to existing Queue")
, ( "kr"
, Option
['i']
["simple"]
(NoArg
(\o -> o {simple = True, cleanupTmpFile = not (cleanupTmpFile o)}))
"call callback with one arg (filename) only")
, ( "kr"
, Option
['j']
["cleanup"]
(NoArg (\o -> o {cleanupTmpFile = not (cleanupTmpFile o)}))
"Toggle remove tempfile after script call. Default False, but default True if --simple (-i)")
, ( "krp"
, Option
['Q']
["qname"]
(ReqArg (\s o -> o {tmpQName = s}) "TEMPQNAME")
"Name for temporary exclusive Queue")
, ( "akrp"
, Option
['x']
["exchange"]
(ReqArg (\s o -> o {currentExchange = s}) "EXCHANGE")
("AMQP Exchange (default: \"\")"))
, ( "akrp"
, Option
['o']
["server"]
(ReqArg (\s o -> o {server = s}) "SERVER")
("AMQP Server (default: " ++ server def ++ ")"))
, ( "akrp"
, Option
['y']
["vhost"]
(ReqArg (\s o -> o {vHost = s}) "VHOST")
("AMQP Virtual Host (default: " ++ vHost def ++ ")"))
, ( "akrp"
, Option
['p']
["port"]
(ReqArg (\s o -> o {port = Just (read s)}) "PORT")
("Server Port Number (default: " ++ show (portnumber def) ++ ")"))
, ( "akrp"
, Option
['T']
["tls"]
(NoArg (\o -> o {tls = not (tls o)}))
("Toggle TLS (default: " ++ show (tls def) ++ ")"))
, ( "akrp"
, Option
['c']
["cert"]
(ReqArg (\s o -> o {cert = Just s}) "CERTFILE")
("TLS Client Certificate File"))
, ( "akrp"
, Option
['k']
["key"]
(ReqArg (\s o -> o {key = Just s}) "KEYFILE")
("TLS Client Private Key File"))
, ( "akrp"
, Option
['U']
["user"]
(ReqArg (\s o -> o {user = s}) "USERNAME")
("Username for Auth"))
, ( "akrp"
, Option
['P']
["pass"]
(ReqArg (\s o -> o {pass = s}) "PASSWORD")
("Password for Auth"))
, ( "akrp"
, Option
['s']
["heartbeats"]
(ReqArg (\s o -> o {heartBeat = (Just (read s))}) "INT")
"heartbeat interval (0=disable, default: set by server)")
, ( "akrp"
, Option
['n']
["name"]
(ReqArg (\s o -> o {connectionName = Just s}) "NAME")
"connection name, will be shown in RabbitMQ web interface")
, ( "akrp"
, Option
['w']
["connect_timeout"]
(ReqArg (\s o -> o {connect_timeout = read s}) "SECONDS")
("timeout for establishing initial connection (default: " ++
show (connect_timeout def) ++ ")"))
]
-- | Options for the executables
options :: Char -> [OptDescr (Args -> Args)]
options exename = map snd $ filter ((elem exename) . fst) allOptions
-- | Add a header with a String value
addheader :: Maybe FieldTable -> String -> Maybe FieldTable
addheader Nothing string =
Just $ FieldTable $ M.singleton (getkey string) (getval string)
addheader (Just (FieldTable oldheader)) string =
Just $ FieldTable $ M.insert (getkey string) (getval string) oldheader
getkey :: String -> Text
getkey s = pack $ takeWhile (/= '=') s
getval :: String -> FieldValue
getval s = FVString $ BS.pack $ tail $ dropWhile (/= '=') s
-- | 'parseargs' exename argstring
-- applies options onto argstring
parseargs :: Char -> [String] -> IO Args
parseargs exename argstring =
case getOpt Permute opt argstring of
(o, [], []) -> return $ foldl (flip id) def o
(_, _, errs) ->
ioError $ userError $ concat errs ++ usageInfo (usage exename) opt
where
opt = options exename
-- | the default callback for the -X option
callback :: String
callback = "/usr/lib/haskell-amqp-utils/callback"
usage :: Char -> String
usage exename =
"\n\
\amqp-utils " ++
(showVersion version) ++
"\n\n\
\Usage:\n" ++
(longname exename) ++
" [options]\n\n\
\Options:"
longname :: Char -> String
longname 'a' = "agitprop"
longname 'k' = "konsum"
longname 'r' = "arbeite"
longname 'p' = "plane"
longname _ = "command"
| woffs/haskell-amqp-utils | Network/AMQP/Utils/Options.hs | gpl-3.0 | 13,445 | 0 | 18 | 4,937 | 4,252 | 2,372 | 1,880 | 467 | 2 |
{- ============================================================================
| Copyright 2011 Matthew D. Steele <mdsteele@alum.mit.edu> |
| |
| This file is part of Fallback. |
| |
| Fallback is free software: you can redistribute it and/or modify it under |
| the terms of the GNU General Public License as published by the Free |
| Software Foundation, either version 3 of the License, or (at your option) |
| any later version. |
| |
| Fallback is distributed in the hope that it will be useful, but WITHOUT |
| ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for |
| more details. |
| |
| You should have received a copy of the GNU General Public License along |
| with Fallback. If not, see <http://www.gnu.org/licenses/>. |
============================================================================ -}
module Fallback.Mode.SaveGame
(newSaveGameMode, newSaveBeforeQuittingMode, newSaveBeforeLeavingMode)
where
import Control.Applicative ((<$>))
import Control.Monad (when)
import Fallback.Draw (Sprite, handleScreen, paintScreen)
import Fallback.Event
import Fallback.Mode.Base
import Fallback.Mode.Dialog
import Fallback.Mode.Error (popupIfErrors)
import Fallback.Scenario.Save
import Fallback.State.Resources (Resources)
import Fallback.View (View, fromAction, viewHandler, viewPaint)
import Fallback.View.SaveGame
-------------------------------------------------------------------------------
newSaveGameMode :: Resources -> (SavedGameSummary -> IO NextMode) -> Sprite
-> SavedGame -> Mode -> View a b -> a -> IO Mode
newSaveGameMode resources onSave screenshot savedGame
prevMode bgView bgInput = do
view <- do
summaries <- loadSavedGameSummaries
newSaveGameView resources bgView bgInput screenshot
(savedGameLocation savedGame) summaries
let mode EvQuit =
ChangeMode <$> newQuitWithoutSavingMode resources mode view ()
mode event = do
action <- handleScreen $ viewHandler view () event
when (event == EvTick) $ paintScreen (viewPaint view ())
case fromAction action of
Nothing -> return SameMode
Just CancelSaveGame -> return (ChangeMode prevMode)
Just (DoSaveGame name) -> do
popupIfErrors resources view () (return mode)
(saveGame name screenshot savedGame) $ \summary -> do
onSave summary
return mode
-------------------------------------------------------------------------------
data SaveBeforeQuittingResponse = CancelQuit | QuitWithoutSave | SaveAndQuit
newSaveBeforeQuittingMode :: Resources -> Sprite -> SavedGame -> Mode
-> View a b -> a -> IO Mode
newSaveBeforeQuittingMode resources screenshot savedGame
prevMode bgView bgInput =
newHorizontalDialogMode resources text buttons nextMode bgView bgInput where
text = "Would you like to save your game before quitting?"
buttons = [("Save", [KeyReturn], SaveAndQuit),
("Don't Save", [KeyD], QuitWithoutSave),
("Cancel", [KeyEscape], CancelQuit)]
nextMode CancelQuit = return (ChangeMode prevMode)
nextMode QuitWithoutSave = return DoQuit
nextMode SaveAndQuit =
ChangeMode <$> newSaveGameMode resources onSave screenshot savedGame
prevMode bgView bgInput
onSave _ = return DoQuit
newSaveBeforeLeavingMode :: Resources -> Modes -> Sprite -> SavedGame -> Mode
-> View a b -> a -> IO Mode
newSaveBeforeLeavingMode resources modes screenshot savedGame
prevMode bgView bgInput =
newHorizontalDialogMode resources text buttons nextMode bgView bgInput where
text = "Would you like to save your game before returning to the title\
\ screen?"
buttons = [("Save", [KeyReturn], SaveAndQuit),
("Don't Save", [KeyD], QuitWithoutSave),
("Cancel", [KeyEscape], CancelQuit)]
nextMode CancelQuit = return (ChangeMode prevMode)
nextMode QuitWithoutSave = leave
nextMode SaveAndQuit =
ChangeMode <$> newSaveGameMode resources (const leave) screenshot
savedGame prevMode bgView bgInput
leave = ChangeMode <$> newMainMenuMode' modes
-------------------------------------------------------------------------------
| mdsteele/fallback | src/Fallback/Mode/SaveGame.hs | gpl-3.0 | 5,000 | 0 | 21 | 1,532 | 832 | 438 | 394 | 65 | 4 |
import System.Process
import System.Directory
main = do
path <- findExecutable "ls"
case path of
Nothing -> error "ls doesn't exist"
Just _ -> createProcess (proc "ls" ["-a", "-l", "-h"])
| daewon/til | haskell/haskell_by_example/exec.hs | mpl-2.0 | 214 | 0 | 13 | 55 | 71 | 35 | 36 | 7 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Genomics.References.Bases.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists the bases in a reference, optionally restricted to a range. For
-- the definitions of references and other genomics resources, see
-- [Fundamentals of Google
-- Genomics](https:\/\/cloud.google.com\/genomics\/fundamentals-of-google-genomics)
-- Implements
-- [GlobalAllianceApi.getReferenceBases](https:\/\/github.com\/ga4gh\/schemas\/blob\/v0.5.1\/src\/main\/resources\/avro\/referencemethods.avdl#L221).
--
-- /See:/ <https://cloud.google.com/genomics Genomics API Reference> for @genomics.references.bases.list@.
module Network.Google.Resource.Genomics.References.Bases.List
(
-- * REST Resource
ReferencesBasesListResource
-- * Creating a Request
, referencesBasesList
, ReferencesBasesList
-- * Request Lenses
, rblXgafv
, rblUploadProtocol
, rblPp
, rblAccessToken
, rblStart
, rblUploadType
, rblReferenceId
, rblBearerToken
, rblEnd
, rblPageToken
, rblPageSize
, rblCallback
) where
import Network.Google.Genomics.Types
import Network.Google.Prelude
-- | A resource alias for @genomics.references.bases.list@ method which the
-- 'ReferencesBasesList' request conforms to.
type ReferencesBasesListResource =
"v1" :>
"references" :>
Capture "referenceId" Text :>
"bases" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "pp" Bool :>
QueryParam "access_token" Text :>
QueryParam "start" (Textual Int64) :>
QueryParam "uploadType" Text :>
QueryParam "bearer_token" Text :>
QueryParam "end" (Textual Int64) :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListBasesResponse
-- | Lists the bases in a reference, optionally restricted to a range. For
-- the definitions of references and other genomics resources, see
-- [Fundamentals of Google
-- Genomics](https:\/\/cloud.google.com\/genomics\/fundamentals-of-google-genomics)
-- Implements
-- [GlobalAllianceApi.getReferenceBases](https:\/\/github.com\/ga4gh\/schemas\/blob\/v0.5.1\/src\/main\/resources\/avro\/referencemethods.avdl#L221).
--
-- /See:/ 'referencesBasesList' smart constructor.
data ReferencesBasesList = ReferencesBasesList'
{ _rblXgafv :: !(Maybe Xgafv)
, _rblUploadProtocol :: !(Maybe Text)
, _rblPp :: !Bool
, _rblAccessToken :: !(Maybe Text)
, _rblStart :: !(Maybe (Textual Int64))
, _rblUploadType :: !(Maybe Text)
, _rblReferenceId :: !Text
, _rblBearerToken :: !(Maybe Text)
, _rblEnd :: !(Maybe (Textual Int64))
, _rblPageToken :: !(Maybe Text)
, _rblPageSize :: !(Maybe (Textual Int32))
, _rblCallback :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ReferencesBasesList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rblXgafv'
--
-- * 'rblUploadProtocol'
--
-- * 'rblPp'
--
-- * 'rblAccessToken'
--
-- * 'rblStart'
--
-- * 'rblUploadType'
--
-- * 'rblReferenceId'
--
-- * 'rblBearerToken'
--
-- * 'rblEnd'
--
-- * 'rblPageToken'
--
-- * 'rblPageSize'
--
-- * 'rblCallback'
referencesBasesList
:: Text -- ^ 'rblReferenceId'
-> ReferencesBasesList
referencesBasesList pRblReferenceId_ =
ReferencesBasesList'
{ _rblXgafv = Nothing
, _rblUploadProtocol = Nothing
, _rblPp = True
, _rblAccessToken = Nothing
, _rblStart = Nothing
, _rblUploadType = Nothing
, _rblReferenceId = pRblReferenceId_
, _rblBearerToken = Nothing
, _rblEnd = Nothing
, _rblPageToken = Nothing
, _rblPageSize = Nothing
, _rblCallback = Nothing
}
-- | V1 error format.
rblXgafv :: Lens' ReferencesBasesList (Maybe Xgafv)
rblXgafv = lens _rblXgafv (\ s a -> s{_rblXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
rblUploadProtocol :: Lens' ReferencesBasesList (Maybe Text)
rblUploadProtocol
= lens _rblUploadProtocol
(\ s a -> s{_rblUploadProtocol = a})
-- | Pretty-print response.
rblPp :: Lens' ReferencesBasesList Bool
rblPp = lens _rblPp (\ s a -> s{_rblPp = a})
-- | OAuth access token.
rblAccessToken :: Lens' ReferencesBasesList (Maybe Text)
rblAccessToken
= lens _rblAccessToken
(\ s a -> s{_rblAccessToken = a})
-- | The start position (0-based) of this query. Defaults to 0.
rblStart :: Lens' ReferencesBasesList (Maybe Int64)
rblStart
= lens _rblStart (\ s a -> s{_rblStart = a}) .
mapping _Coerce
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
rblUploadType :: Lens' ReferencesBasesList (Maybe Text)
rblUploadType
= lens _rblUploadType
(\ s a -> s{_rblUploadType = a})
-- | The ID of the reference.
rblReferenceId :: Lens' ReferencesBasesList Text
rblReferenceId
= lens _rblReferenceId
(\ s a -> s{_rblReferenceId = a})
-- | OAuth bearer token.
rblBearerToken :: Lens' ReferencesBasesList (Maybe Text)
rblBearerToken
= lens _rblBearerToken
(\ s a -> s{_rblBearerToken = a})
-- | The end position (0-based, exclusive) of this query. Defaults to the
-- length of this reference.
rblEnd :: Lens' ReferencesBasesList (Maybe Int64)
rblEnd
= lens _rblEnd (\ s a -> s{_rblEnd = a}) .
mapping _Coerce
-- | The continuation token, which is used to page through large result sets.
-- To get the next page of results, set this parameter to the value of
-- \`nextPageToken\` from the previous response.
rblPageToken :: Lens' ReferencesBasesList (Maybe Text)
rblPageToken
= lens _rblPageToken (\ s a -> s{_rblPageToken = a})
-- | The maximum number of bases to return in a single page. If unspecified,
-- defaults to 200Kbp (kilo base pairs). The maximum value is 10Mbp (mega
-- base pairs).
rblPageSize :: Lens' ReferencesBasesList (Maybe Int32)
rblPageSize
= lens _rblPageSize (\ s a -> s{_rblPageSize = a}) .
mapping _Coerce
-- | JSONP
rblCallback :: Lens' ReferencesBasesList (Maybe Text)
rblCallback
= lens _rblCallback (\ s a -> s{_rblCallback = a})
instance GoogleRequest ReferencesBasesList where
type Rs ReferencesBasesList = ListBasesResponse
type Scopes ReferencesBasesList =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/genomics",
"https://www.googleapis.com/auth/genomics.readonly"]
requestClient ReferencesBasesList'{..}
= go _rblReferenceId _rblXgafv _rblUploadProtocol
(Just _rblPp)
_rblAccessToken
_rblStart
_rblUploadType
_rblBearerToken
_rblEnd
_rblPageToken
_rblPageSize
_rblCallback
(Just AltJSON)
genomicsService
where go
= buildClient
(Proxy :: Proxy ReferencesBasesListResource)
mempty
| rueshyna/gogol | gogol-genomics/gen/Network/Google/Resource/Genomics/References/Bases/List.hs | mpl-2.0 | 8,120 | 0 | 23 | 1,999 | 1,255 | 723 | 532 | 169 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.CloudFormation.SignalResource
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Sends a signal to the specified resource with a success or failure status.
-- You can use the SignalResource API in conjunction with a creation policy or
-- update policy. AWS CloudFormation doesn't proceed with a stack creation or
-- update until resources receive the required number of signals or the timeout
-- period is exceeded. The SignalResource API is useful in cases where you want
-- to send signals from anywhere other than an Amazon EC2 instance.
--
-- <http://docs.aws.amazon.com/AWSCloudFormation/latest/APIReference/API_SignalResource.html>
module Network.AWS.CloudFormation.SignalResource
(
-- * Request
SignalResource
-- ** Request constructor
, signalResource
-- ** Request lenses
, srLogicalResourceId
, srStackName
, srStatus
, srUniqueId
-- * Response
, SignalResourceResponse
-- ** Response constructor
, signalResourceResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.CloudFormation.Types
import qualified GHC.Exts
data SignalResource = SignalResource
{ _srLogicalResourceId :: Text
, _srStackName :: Text
, _srStatus :: ResourceSignalStatus
, _srUniqueId :: Text
} deriving (Eq, Read, Show)
-- | 'SignalResource' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'srLogicalResourceId' @::@ 'Text'
--
-- * 'srStackName' @::@ 'Text'
--
-- * 'srStatus' @::@ 'ResourceSignalStatus'
--
-- * 'srUniqueId' @::@ 'Text'
--
signalResource :: Text -- ^ 'srStackName'
-> Text -- ^ 'srLogicalResourceId'
-> Text -- ^ 'srUniqueId'
-> ResourceSignalStatus -- ^ 'srStatus'
-> SignalResource
signalResource p1 p2 p3 p4 = SignalResource
{ _srStackName = p1
, _srLogicalResourceId = p2
, _srUniqueId = p3
, _srStatus = p4
}
-- | The logical ID of the resource that you want to signal. The logical ID is the
-- name of the resource that given in the template.
srLogicalResourceId :: Lens' SignalResource Text
srLogicalResourceId =
lens _srLogicalResourceId (\s a -> s { _srLogicalResourceId = a })
-- | The stack name or unique stack ID that includes the resource that you want to
-- signal.
srStackName :: Lens' SignalResource Text
srStackName = lens _srStackName (\s a -> s { _srStackName = a })
-- | The status of the signal, which is either success or failure. A failure
-- signal causes AWS CloudFormation to immediately fail the stack creation or
-- update.
srStatus :: Lens' SignalResource ResourceSignalStatus
srStatus = lens _srStatus (\s a -> s { _srStatus = a })
-- | A unique ID of the signal. When you signal Amazon EC2 instances or Auto
-- Scaling groups, specify the instance ID that you are signaling as the unique
-- ID. If you send multiple signals to a single resource (such as signaling a
-- wait condition), each signal requires a different unique ID.
srUniqueId :: Lens' SignalResource Text
srUniqueId = lens _srUniqueId (\s a -> s { _srUniqueId = a })
data SignalResourceResponse = SignalResourceResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'SignalResourceResponse' constructor.
signalResourceResponse :: SignalResourceResponse
signalResourceResponse = SignalResourceResponse
instance ToPath SignalResource where
toPath = const "/"
instance ToQuery SignalResource where
toQuery SignalResource{..} = mconcat
[ "LogicalResourceId" =? _srLogicalResourceId
, "StackName" =? _srStackName
, "Status" =? _srStatus
, "UniqueId" =? _srUniqueId
]
instance ToHeaders SignalResource
instance AWSRequest SignalResource where
type Sv SignalResource = CloudFormation
type Rs SignalResource = SignalResourceResponse
request = post "SignalResource"
response = nullResponse SignalResourceResponse
| romanb/amazonka | amazonka-cloudformation/gen/Network/AWS/CloudFormation/SignalResource.hs | mpl-2.0 | 4,976 | 0 | 9 | 1,141 | 541 | 334 | 207 | 67 | 1 |
{-# LANGUAGE OverloadedStrings, TypeOperators #-}
module Controller.Paths
( pathId
, PartyTarget(..)
, pathPartyTarget
, AuthorizeTarget(..)
, pathAuthorizeTarget
, VolumeAccessTarget(..)
, pathVolumeAccessTarget
, pathSegment
, pathSlotId
, TagId(..)
, pathTagId
) where
import qualified Data.Invertible as I
import Data.String (fromString)
import qualified Web.Route.Invertible as R
import Web.Route.Invertible (Parameter, PathString)
-- import Servant
import Model.Kind
import Model.Id.Types
import Model.Party.Types
import Model.Container.Types
import Model.Segment
import Model.Slot.Types
import Model.Tag.Types
-- import HTTP.Path
import HTTP.Path.Parser
type PathParameter = Parameter PathString
idIso :: IdType a I.<-> Id a
idIso = -- [I.biCase|a <-> Id a|]
((\a -> Id a)
R.:<->:
(\(Id a) -> a))
pathIdWith :: forall a . (Kinded a) => PathParser (IdType a) -> PathParser (Id a)
pathIdWith p = fromString (kindOf (undefined :: a)) >/> idIso >$< p
pathId :: forall a . (PathParameter (IdType a), Kinded a) => PathParser (Id a)
pathId = pathIdWith R.parameter
-- | The target party for some action?
data PartyTarget
= TargetProfile -- ^ Actor's own party
| TargetParty (Id Party) -- ^ Someone else's party
-- | Typical examples of pathPartyTarget:
-- /profile becomes TargetProfile
-- /party/10 becomes TargetParty (Id 10)
pathPartyTarget :: R.Path PartyTarget
pathPartyTarget = -- [I.biCase|
-- Left () <-> TargetProfile
-- Right i <-> TargetParty i
-- |]
((\p -> case p of
Left () -> TargetProfile
Right i -> TargetParty i)
R.:<->:
(\r -> case r of
TargetProfile -> Left ()
TargetParty i -> Right i))
>$< ("profile" |/| pathId)
-- | This is a trailing part of connection between two parties. For a given party, the second
-- party mentioned as the target here is either the parent that the child is applying to such as
-- ((TargetParty currentUserAsChildId), (AuthorizeTarget True parentId))
-- or the child that the parent has authorized
-- ((TargetParty currentUserAsParentId), (AuthorizeTarget False childId))
data AuthorizeTarget = AuthorizeTarget
{ authorizeApply :: Bool -- ^ Whether this authorize action is referring to applying from a child to a parent
, authorizeTarget :: Id Party
}
pathAuthorizeTarget :: PathParser AuthorizeTarget
pathAuthorizeTarget = -- [I.biCase|(a, t) <-> AuthorizeTarget a t|]
((\(a, t) -> AuthorizeTarget a t)
R.:<->:
(\(AuthorizeTarget a t) -> (a, t)))
>$<
(I.isRight >$< ("authorize" |/| "apply")
</> idIso >$< R.parameter)
newtype VolumeAccessTarget = VolumeAccessTarget
{ volumeAccessTarget :: Id Party
}
pathVolumeAccessTarget :: PathParser VolumeAccessTarget
pathVolumeAccessTarget =
"access"
>/> -- [I.biCase|i <-> VolumeAccessTarget (Id i)|]
((\i -> VolumeAccessTarget (Id i))
R.:<->:
(\(VolumeAccessTarget (Id i)) -> i))
>$< R.parameter
slotIdIso :: (Id Container, Segment) I.<-> SlotId
slotIdIso = -- [I.biCase|(c, s) <-> SlotId c s|]
((\(c, s) -> SlotId c s)
R.:<->:
(\(SlotId c s) -> (c, s)))
pathSegment :: PathParser Segment
pathSegment = fullSegment =/= R.parameter
pathSlot :: PathParser SlotId
pathSlot = slotIdIso >$< (idIso >$< R.parameter </> pathSegment)
pathSlotId :: PathParser (Id Slot)
pathSlotId = pathIdWith pathSlot
data TagId = TagId
{ tagIdKeyword :: Bool
, tagIdName :: TagName
}
pathTagId :: PathParser TagId
pathTagId = -- [I.biCase|(b, t) <-> TagId b t|]
((\(b, t) -> TagId b t)
R.:<->:
(\(TagId b t) -> (b, t)))
>$<
(I.isRight >$< ("tag" |/| "keyword") </> R.parameter)
| databrary/databrary | src/Controller/Paths.hs | agpl-3.0 | 3,723 | 0 | 14 | 782 | 936 | 538 | 398 | -1 | -1 |
module Main where
import System.Environment
import Control.Applicative
main :: IO ()
main = do
args <- getArgs
let firstArg = getFirstArg args
let primeFactor = largestPrimeFactorV1 firstArg
putStrLn (show primeFactor)
toInt n = read n :: Integer
getFirstArg :: [String] -> Integer
getFirstArg args = head $ map (toInt) args
squareRootRounded :: Integer -> Integer
squareRootRounded = round . sqrt . fromIntegral
factors :: Integer -> [Integer]
factors n = filter ((==0) . mod n) [squareRootRounded n,(squareRootRounded n)-1..1]
candidates :: Integer -> [Integer]
candidates n = [3,5..squareRootRounded n]
--candidates n = [2..n-1]
isPrime :: Integer -> Bool
isPrime n = all ((/=0) . rem n) (candidates n)
isNotPrime :: Integer -> Bool
isNotPrime n = any ((==0) . rem n) (candidates n)
largestPrimeFactor :: Integer -> Integer
largestPrimeFactor = head . dropWhile (isNotPrime) . factors
largestPrimeFactorV1 :: Integer -> Integer
largestPrimeFactorV1 = head . dropWhile (not . isPrime) . factors
| karun012/projecteuler-haskell | src/PrimeFactors.hs | unlicense | 1,025 | 9 | 14 | 178 | 400 | 197 | 203 | 26 | 1 |
ans l = product $ map sum l
main = do
_ <- getLine
c <- getContents
let i = map (map read) $ map words $ lines c :: [[Int]]
o = ans i
print o
| a143753/AOJ | 2922.hs | apache-2.0 | 157 | 0 | 14 | 52 | 92 | 43 | 49 | 7 | 1 |
module Example.LocalCounters where
import Control.Concurrent
import Control.Concurrent.Chan
import Control.Monad
import Control.Monad.Crdt
import Control.Monad.Reader
import Data.Crdt.Counter
import Data.Crdt.Transport.Local
action :: Crdt Counter ()
action = update $ CounterUpdate 1
thread :: CrdtVar Counter -> Int -> Chan String -> IO ()
thread originalVar delay log = do
me <- myThreadId
var <- dupCrdtVar originalVar
writeChan log $ show me
forkUpdater var
forever $ do
snap <- readCrdtVar var
writeChan log $ show (me, snap)
runCrdtOnVar action var
threadDelay $ delay * 1000
main = do
v <- newCrdtVar $ Counter 0
log <- newChan
forkIO $ thread v 1000 log
forkIO $ thread v 2000 log
forkIO $ thread v 3000 log
forkIO $ forever $ do
threadDelay 1000000
writeChan log ""
forever $ do
line <- readChan log
putStrLn line
| edofic/crdt | src/Example/LocalCounters.hs | apache-2.0 | 885 | 0 | 12 | 191 | 318 | 149 | 169 | 33 | 1 |
{-
We slightly instrument the OpenDatatype version. The example touches
upon some feature interaction between multiparameter type classes,
overlapping instances and existential quantification. The code has
been tested with GHC 6.10.4, and the current behavior of GHC has been
like this for a while now.
See this discussion on the GHC mailing list for some background:
http://www.haskell.org/pipermail/glasgow-haskell-bugs/2006-July/005712.html
-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE OverlappingInstances #-}
-- Some kinds of shape as different datatypes
data Rectangle = Rectangle Int Int Int Int
data Circle = Circle Int Int Int
-- A type bound to explicitly collect all kinds of shape
class Shape x
where
draw :: x -> IO ()
instance Shape Rectangle
where
draw _ = putStrLn "Drawing a rectangle."
instance Shape Circle
where
draw _ = putStrLn "Drawing a circle."
instance Shape x
where
draw _ = putStrLn "Not sure what I am drawing."
-- An envelope for shapes
data AnyShape = forall x. Shape x => AnyShape x
-- Intersection with overlapping instances
class (Shape x, Shape y) => Intersect x y
where
intersect :: x -> y -> String
-- Generic instances for intersection
instance (Shape x, Shape y) => Intersect x y
where
intersect s1 s2 = "(Shape x, Shape y) => Intersect x y"
-- Intersection of any shape with a rectangle
instance Shape x => Intersect x Rectangle
where
intersect x (Rectangle x1 x2 y1 y2) = "Shape x => Intersect x Rectangle (or v.v.)"
-- A variation on intersect with one opaque shape
intersect' :: Shape x => x -> AnyShape -> String
intersect' x (AnyShape y) = intersect x y
{-
-- Intersection of a rectangle with any shape
-- This instance is only accepted by type checking with -XIncoherentInstances.
instance Intersect x Rectangle => Intersect Rectangle x
where
intersect r x = intersect x r
-}
-- Intersection of two rectangles
instance Intersect Rectangle Rectangle
where
intersect (Rectangle x1 x2 y1 y2) (Rectangle a1 a2 b1 b2) = "Intersect Rectangle Rectangle"
-- Intersection of two circles
instance Intersect Circle Circle
where
intersect (Circle x y r) (Circle x2 y2 r2) = "Intersect Circle Circle"
-- Intersection of a circle and a rectangle
instance Intersect Circle Rectangle
where
intersect (Circle x y r) (Rectangle x1 x2 y1 y2) = "Intersect Circle Rectangle (or v.v.)"
instance Intersect Rectangle Circle
where
intersect r c = intersect c r
{-
-- Boolean-based code would not reveal instance selection.
intersectMany :: [AnyShape] -> Bool
intersectMany [] = False
intersectMany (_:[]) = False
intersectMany ((AnyShape x):(AnyShape y):z) =
intersect x y
|| intersectMany (AnyShape x:z)
|| intersectMany (AnyShape y:z)
-}
-- Intersection for a list of shapes
intersectMany :: [AnyShape] -> String
intersectMany [] = ""
intersectMany (x:[]) = ""
intersectMany ((AnyShape x):(AnyShape y):z) =
intersect x y
++ if null z
then ""
else
", "
++ intersectMany (AnyShape x:z)
++ ", "
++ intersectMany (AnyShape y:z)
-- Test cases
r1 = Rectangle 1 2 3 4
r2 = Rectangle 5 6 7 8
c1 = Circle 1 2 3
c2 = Circle 3 4 5
shapes = [AnyShape r1, AnyShape r2, AnyShape c1, AnyShape c2]
test1 = intersect r1 r2 -- "Intersect Rectangle Rectangle"
test2 = intersect r1 c1 -- "Intersect Circle Rectangle (or v.v.)"
test3 = intersect c1 c2 -- "Intersect Circle Circle"
test4 = intersectMany shapes -- uses always the generic instance
test5 = mapM_ (\(AnyShape s) -> draw s) shapes -- draws with the precise instance of Shape
-- Line up tests
main =
do
print $ test1
print $ test2
print $ test3
print $ test4
test5
| egaburov/funstuff | Haskell/tytag/xproblem_src/samples/shapes/Haskell/FeatureInteraction/Main.hs | apache-2.0 | 3,928 | 0 | 13 | 829 | 773 | 399 | 374 | 62 | 2 |
{-# LANGUAGE FlexibleInstances #-}
module Auth0.Types where
--------------------------------------------------------------------------------
import Data.Aeson
import Data.ByteString (ByteString)
import Servant.API hiding (Verb)
import Data.Text
import Data.Tagged
--------------------------------------------------------------------------------
data TenantTag
type Tenant = Tagged TenantTag ByteString
mkTenant :: ByteString -> Tenant
mkTenant = Tagged
data AccessTokenTag
type AccessToken = Tagged AccessTokenTag Text
mkAccessToken :: Text -> AccessToken
mkAccessToken = Tagged
instance ToHttpApiData AccessToken where
toUrlPiece = untag
data ClientIdTag
type ClientId = Tagged ClientIdTag Text
mkClientId :: Text -> ClientId
mkClientId = Tagged
instance ToHttpApiData ClientId where
toUrlPiece = untag
data ClientSecretTag
type ClientSecret = Tagged ClientSecretTag Text
mkClientSecret :: Text -> ClientSecret
mkClientSecret = Tagged
data ResponseType
= Code
| Token
deriving (Show)
instance ToHttpApiData ResponseType where
toUrlPiece Code = "code"
toUrlPiece Token = "token"
instance ToJSON ResponseType where
toJSON Code = "code"
toJSON Token = "token"
data GrantType
= Password
| AuthorizationCode
| ClientCredentials
| OTP
deriving (Show)
instance ToJSON GrantType where
toJSON Password = "password"
toJSON AuthorizationCode = "authorization_code"
toJSON ClientCredentials = "client_credentials"
toJSON OTP = "http://auth0.com/oauth/grant-type/mfa-otp"
| alasconnect/auth0 | src/Auth0/Types.hs | apache-2.0 | 1,540 | 0 | 6 | 244 | 311 | 175 | 136 | -1 | -1 |
module Ovid.Run where
import Control.Monad.State.Strict
import qualified Data.Map as M
import Data.Maybe (fromJust)
import Ovid.Environment (extendedStaticEnvironment,Ann,AdditionalAnnotation(..))
import Framework (evalCounter)
import Ovid.Constraints
import WebBits.JavaScript.JavaScript
import CFA
import CFA.Labels (Label (..), unsafeLabelIx)
import Ovid.Abstraction
import Ovid.Prelude (JsCFAState)
import Ovid.ConstraintUtils (emptyJsCFAState)
import Ovid.DOM (topLevelPreprocessing,topLevelIds)
import WebBits.JavaScript.Environment hiding (Ann)
type AnalysisT m a = CfaT Value (StateT (JsCFAState Contour) m) a
makeTopLevelEnv :: M.Map String Label -- ^inferred top-level environemnt of
-- the program. This environment excludes
-- top-level identifiers that the
-- program does not use.
-> Label -- ^next available label
-> (M.Map String Label,Label)
makeTopLevelEnv inferredEnv nextLabel =
insertIds topLevelIds (inferredEnv,nextLabel) where
insertIds [] (env,lbl) = (env,lbl)
insertIds (id:ids) (env,lbl@(IxLabel ix))= case M.lookup id env of
Just _ -> insertIds ids (env,lbl) -- used by program, no need to insert
Nothing -> insertIds ids (M.insert id (IxLabel (ix+1)) env,IxLabel (ix+1))
insertIds _ _ = error "Run.hs : expected IxLabel"
mergeTopLevelEnv :: M.Map String Label
-> Ann
-> Ann
mergeTopLevelEnv topLevelEnv (pos,lbl,extra) = (pos,lbl,extra') where
extra' = case extra of
NA -> NA
-- M.union is left-biased for duplicates. This preserves shadowing.
FnA env locals this -> FnA (M.union env topLevelEnv) locals this
-- (inferredEnv,nextLabel)
runCFA :: Int -- ^ length of the contour
-> [ParsedStatement]
-> IO (Cache Value)
runCFA contourLength parsedStmts = do
let (labelledStmts,env,nextLabel) = extendedStaticEnvironment 0 parsedStmts
let (topLevelEnv,nextLabel') = makeTopLevelEnv env nextLabel
let labelledStmts' = map (fmap $ mergeTopLevelEnv topLevelEnv) labelledStmts
let ce = M.fromList $ map (\lbl -> (lbl,topContour))
(map snd $ M.toList topLevelEnv)
let run = runCfa contourLength (fromJust $ unsafeLabelIx nextLabel') $ do
topLevelEnv <- topLevelPreprocessing topLevelEnv
st <- lift get
liftIO $ putStrLn "Basic env:\n"
liftIO $ mapM_ (putStrLn.show) (M.toList topLevelEnv)
lift $ put st { jscfasBuiltins = topLevelEnv }
--
window <- case M.lookup "window" topLevelEnv of
Just lbl -> return (lbl,topContour)
Nothing -> fail "runCFA: could not find window"
windowSet <- case M.lookup "window-props" topLevelEnv of
Just lbl -> return (lbl,topContour)
Nothing -> fail "runCFA: could not find window-props"
topThis <- case M.lookup "this" topLevelEnv of
Just lbl -> return (lbl,topContour)
Nothing -> fail "runCFA: could not find this (global)"
let props = [(s,(l,topContour)) | (s,l) <- M.toList topLevelEnv,
s /= "this"]
mapM_ (\(id,set) -> newValue (AProperty id (ValueSet set)) windowSet)
props
subsetOf window topThis
--
mapM_ (stmt ce topContour) labelledStmts'
(_,cache,_) <- evalStateT run emptyJsCFAState
return cache
| brownplt/ovid | src/Ovid/Run.hs | bsd-2-clause | 3,478 | 0 | 20 | 906 | 1,012 | 531 | 481 | 65 | 4 |
module Test where
test :: Int
test = 100
| chip2n/clementine | Test.hs | bsd-3-clause | 42 | 0 | 4 | 10 | 14 | 9 | 5 | 3 | 1 |
module Text.FastAleck.ByteString.Lazy
( module Text.FastAleck
, fastAleck
) where
--------------------------------------------------------------------------------
import qualified Data.ByteString.Lazy as BL
--------------------------------------------------------------------------------
import Text.FastAleck
import qualified Text.FastAleck.Internal as I
--------------------------------------------------------------------------------
fastAleck :: FastAleckConfig -> BL.ByteString -> BL.ByteString
fastAleck config = BL.fromChunks . map (I.fastAleck config) . BL.toChunks
{-# INLINE fastAleck #-}
| jaspervdj/fast-aleck-hs | src/Text/FastAleck/ByteString/Lazy.hs | bsd-3-clause | 630 | 0 | 10 | 76 | 94 | 58 | 36 | 9 | 1 |
import Language.MiniStg.Parser (parseStg)
main = do
s <- getLine
let t = parseStg s
putStrLn . show $ t
| Neuromancer42/ministgwasm | app/stgParse.hs | bsd-3-clause | 129 | 0 | 10 | 44 | 47 | 23 | 24 | 5 | 1 |
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE ViewPatterns #-}
{-# language DuplicateRecordFields #-}
{-# language OverloadedStrings #-}
module Main where
import Control.Arrow ((&&&))
import Control.Monad (join)
import Data.List (sortBy)
import qualified Data.Map as M
import Data.Maybe (isJust)
import Data.Monoid ((<>))
import Data.Ord (comparing, Down (..))
import Data.Set (insert)
import Data.Text (pack, unpack)
import Data.Text.Lens (_Text)
import Data.Time.Format (formatTime, defaultTimeLocale)
import Data.Time.Parse (strptime)
import Data.Traversable (for)
import Erdos
import GHC.Exts (fromList)
import SitePipe hiding (getTags)
import SitePipe.Readers
import Text.Pandoc
import Text.Pandoc.Highlighting
import Text.Pandoc.Options
import Utils
fromResult :: Result a -> a
fromResult (Success a) = a
fromResult (Error a) = error a
postFormat :: String
postFormat = "/[0-9]{4}-[0-9]{2}-[0-9]{2}-"
toSlug :: String -> String
toSlug = replaceAll ("/posts" <> postFormat) (const "")
. replaceAll ("/misc/") (const "")
. replaceAll "\\.html" (const "")
main :: IO ()
main = site $ do
let l = _Object . at "url" . _Just . _String . _Text
rawMisc <- resourceLoader sandyReader ["misc/*.markdown"]
let misc =
flip fmap rawMisc $
\x ->
let url = x ^?! l
slug = toSlug url
in x & l .~ slug <> "/index.html"
& _Object . at "page_title" .~ x ^?! _Object . at "title"
& _Object . at "canonical_url" ?~ _String . _Text # slug
& _Object . at "slug" ?~ _String . _Text # slug
& _Object . at "has_prev" ?~ _Bool # False
& _Object . at "has_next" ?~ _Bool # False
& _Object . at "has_related" ?~ _Bool # False
& _Object . at "html_tags" .~ Nothing
& _Object . at "zulu" ?~ _String . _Text # ""
& _Object . at "date" ?~ _String . _Text # ""
rawPosts <- sortBy (comparing (^?! l))
<$> resourceLoader sandyReader ["posts/*.markdown"]
let urls = fmap (^?! l) rawPosts
getEm' = getNextAndPrev urls
posts' =
flip fmap rawPosts $
\x ->
let url = x ^?! l
(fmap toSlug -> prev, fmap toSlug -> next) = getEm' url
tagsOf = x ^? _Object . at "tags" . _Just . _String . _Text
related = x ^? _Object . at "related" . _Just
date = fst . (^?! _Just) . strptime "%Y-%m-%d %H:%M"
$ x ^?! _Object . at "date" . _Just . _String . _Text
slug = toSlug url
in x & l .~ "blog/" <> slug <> "/index.html"
& _Object . at "page_title" .~ x ^?! _Object . at "title"
& _Object . at "canonical_url" ?~ _String . _Text # ("blog/" <> slug)
& _Object . at "slug" ?~ _String . _Text # slug
& _Object . at "has_prev" ?~ _Bool # isJust prev
& _Object . at "has_next" ?~ _Bool # isJust next
& _Object . at "has_related" ?~ _Bool # isJust related
& _Object . at "related" ?~ (maybe (Array $ fromList []) id related :: Value)
& _Object . at "html_tags" .~
fmap (\y -> _String . _Text # makeTags y) tagsOf
& _Object . at "prev" .~
fmap (review $ _String . _Text) prev
& _Object . at "next" .~
fmap (review $ _String . _Text) next
& _Object . at "zulu" ?~ _String . _Text #
formatTime defaultTimeLocale "%Y-%m-%dT%H:%M:%SZ" date
& _Object . at "date" ?~ _String . _Text #
formatTime defaultTimeLocale "%B %e, %Y" date
slugList = M.fromList
$ fmap ((^?! _Object . at "slug" . _Just . _String) &&& id) posts
posts = flip fmap posts' $ \post ->
post & _Object . at "related" . _Just . _Array
%~ fmap (\x -> maybe (error $ "bad related slug: " <> x ^?! _String . _Text) id
$ M.lookup (x ^?! _String) slugList)
let tags = getTags makeTagUrl $ reverse posts
newest = last posts
feed :: String -> Value
feed url = object
[ "posts" .= take 10 (reverse posts)
, "domain" .= ("http://reasonablypolymorphic.com" :: String)
, "url" .= url
, "last_updated" .= (newest ^?! _Object . at "zulu" . _Just . _String)
]
writeTemplate' "post.html" . pure
$ newest
& _Object . at "url" ?~ _String # "/index.html"
& _Object . at "page_title" ?~ _String # "Home"
let byYear = reverse
. flip groupOnKey (reverse posts)
$ \x -> reverse
. take 4
. reverse
$ x ^?! _Object . at "date" . _Just . _String . _Text
writeTemplate' "archive.html" . pure
$ object
[ "url" .= ("/blog/archives/index.html" :: String)
, "page_title" .= ("Archives" :: String)
, "years" .= (flip fmap byYear $ \(year, ps) ->
object
[ "posts" .= ps
, "year" .= year
]
)
]
erdos <- sortBy (comparing $ Down
. read @Int . takeWhile (/= '-')
. drop (length $ id @String "/erdos/")
. emUrl)
. fmap (fromResult . fromJSON)
<$> resourceLoader markdownReader ["erdos/*.markdown"]
writeTemplate' "erdos.html" . pure
$ object
[ "url" .= ("/erdos/index.html" :: String)
, "page_title" .= ("Erdos Project" :: String)
, "spans" .= buildCitySpan erdos
, "slug" .= ("erdos" :: String)
]
writeTemplate' "post.html" $ posts ++ misc
writeTemplate' "tag.html" tags
writeTemplate' "rss.xml" . pure $ feed "feed.rss"
writeTemplate' "atom.xml" . pure $ feed "atom.xml"
copyFiles
[ "css"
, "js"
, "images"
, "data"
]
copyFilesWith (drop 7) [ "static/*" ]
pure ()
writeTemplate' :: ToJSON a => String -> [a] -> SiteM ()
writeTemplate' a = writeTemplate ("templates/" <> a)
sandyReader :: String -> IO String
sandyReader =
mkPandocReaderWith
(\ro -> readMarkdown ro { readerExtensions = foldr enableExtension
pandocExtensions
extensions
} . pack)
pure
(fmap unpack . writeHtml5String pandocMathCompiler)
extensions :: [Extension]
extensions =
[ Ext_tex_math_dollars
-- , Ext_latex_macros
, Ext_footnotes
]
pandocMathCompiler :: WriterOptions
pandocMathCompiler =
let mathExtensions = extensions
newExtensions = foldr enableExtension pandocExtensions mathExtensions
writerOptions = def
{ writerExtensions = newExtensions
, writerHTMLMathMethod = MathJax ""
, writerHighlightStyle = Just haddock
}
in writerOptions
| isovector/we-can-really-solve-this | src/Main.hs | bsd-3-clause | 7,593 | 0 | 67 | 2,874 | 2,133 | 1,089 | 1,044 | -1 | -1 |
module Lib where
import Types
f = 1
| theor/zorkell | src/Lib.hs | bsd-3-clause | 37 | 0 | 4 | 9 | 12 | 8 | 4 | 3 | 1 |
-- | This module exports the Filter type used to create read and facet queries.
module Data.Factual.Shared.Filter
(
-- * Filter type
Field
, Filter(..)
-- * Helper functions
, filtersPair
) where
import Data.List.Utils (join, replace)
-- | A Field is a String representation of the field name.
type Field = String
-- | The Filter type is used to represent various filters in a read or facets query.
data Filter = EqualNum Field Double -- ^ A numeric field has to match a number exactly.
| EqualStr Field String -- ^ A string field has to match a string exactly.
| NotEqualNum Field Double -- ^ A numeric field must equal a specific number.
| NotEqualStr Field String -- ^ A string field must equal a specific string.
| InNumList Field [Double] -- ^ A numeric field must be equal to any of the numbers in a list.
| InStrList Field [String] -- ^ A string field must be equal to any of the strings in a list.
| NotInNumList Field [Double] -- ^ A numeric field must not be equal to any of the numbers in a list.
| NotInStrList Field [String] -- ^ A string field must not be equal to any of the strings in a list.
| BeginsWith Field String -- ^ A string field must begin with a specific string.
| NotBeginsWith Field String -- ^ A string field must not begin with a specific string.
| BeginsWithAny Field [String] -- ^ A string field must begin with any of the strings in a list.
| NotBeginsWithAny Field [String] -- ^ A string field must not begin with any of the strings in a list.
| IsBlank Field -- ^ A field must be blank.
| IsNotBlank Field -- ^ A field must not be blank.
| GreaterThan Field Double -- ^ A field must be greater than the given value.
| GreaterThanOrEqualTo Field Double -- ^ A field must be greater than or equal to the given value.
| LessThan Field Double -- ^ A field must be less than the given value.
| LessThanOrEqualTo Field Double -- ^ A field must be less than or equal to the given value.
| SearchFilter Field String -- ^ A field must match of full text search with the given string.
| And [Filter] -- ^ Form an AND condition with the filters in the list.
| Or [Filter] -- ^ Form an OR condition with the filters in the list.
deriving Eq
-- Filter is a member of Show to help generate query strings.
instance Show Filter where
show (EqualNum field num) = (showStr field) ++ ":" ++ (show num)
show (EqualStr field str) = (showStr field) ++ ":" ++ (showStr str)
show (NotEqualNum field num) = (showStr field) ++ ":{" ++ (show "$neq") ++ ":" ++ (show num) ++ "}"
show (NotEqualStr field str) = (showStr field) ++ ":{" ++ (show "$neq") ++ ":" ++ (showStr str) ++ "}"
show (InNumList field nums) = (showStr field) ++ ":{" ++ (show "$in") ++ ":[" ++ (join "," $ map show nums) ++ "]}"
show (InStrList field strs) = (showStr field) ++ ":{" ++ (show "$in") ++ ":[" ++ (join "," $ map showStr strs) ++ "]}"
show (NotInNumList field nums) = (showStr field) ++ ":{" ++ (show "$nin") ++ ":[" ++ (join "," $ map show nums) ++ "]}"
show (NotInStrList field strs) = (showStr field) ++ ":{" ++ (show "$nin") ++ ":[" ++ (join "," $ map showStr strs) ++ "]}"
show (BeginsWith field str) = (showStr field) ++ ":{" ++ (show "$bw") ++ ":" ++ (showStr str) ++ "}"
show (NotBeginsWith field str) = (showStr field) ++ ":{" ++ (show "$nbw") ++ ":" ++ (showStr str) ++ "}"
show (BeginsWithAny field strs) = (showStr field) ++ ":{" ++ (show "$bwin") ++ ":[" ++ (join "," $ map showStr strs) ++ "]}"
show (NotBeginsWithAny field strs) = (showStr field) ++ ":{" ++ (show "$nbwin") ++ ":[" ++ (join "," $ map showStr strs) ++ "]}"
show (IsBlank field) = (showStr field) ++ ":{\"$blank\":true}"
show (IsNotBlank field) = (showStr field) ++ ":{\"$blank\":false}"
show (GreaterThan field num) = (showStr field) ++ ":{" ++ (show "$gt") ++ ":" ++ (show num) ++ "}"
show (GreaterThanOrEqualTo field num) = (showStr field) ++ ":{" ++ (show "$gte") ++ ":" ++ (show num) ++ "}"
show (LessThan field num) = (showStr field) ++ ":{" ++ (show "$lt") ++ ":" ++ (show num) ++ "}"
show (LessThanOrEqualTo field num) = (showStr field) ++ ":{" ++ (show "$lte") ++ ":" ++ (show num) ++ "}"
show (SearchFilter field str) = (showStr field) ++ ":{" ++ (show "$search") ++ ":" ++ (showStr str) ++ "}"
show (And filters) = (show "$and") ++ ":[" ++ (join "," $ map showFilter filters) ++ "]"
show (Or filters) = (show "$or") ++ ":[" ++ (join "," $ map showFilter filters) ++ "]"
-- The following helper functions are used in generating query params.
showFilter :: Filter -> String
showFilter filter = "{" ++ (show filter) ++ "}"
showStr :: String -> String
showStr str = "\"" ++ replace "\"" "\\\"" str ++ "\""
filtersPair :: [Filter] -> (String, String)
filtersPair [] = ("filters", "")
filtersPair fs = ("filters", "{" ++ (join "," $ map show fs) ++ "}")
| rudyl313/factual-haskell-driver | Data/Factual/Shared/Filter.hs | bsd-3-clause | 5,050 | 0 | 12 | 1,205 | 1,461 | 767 | 694 | 58 | 1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances, UndecidableInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- |
-- Module : Text.Syntax.Poly.Parser.ReadP
-- Copyright : 2012 Kei Hibino
-- License : BSD3
--
-- Maintainer : ex8k.hibino@gmail.com
-- Stability : experimental
-- Portability : unknown
--
-- This module includes 'Syntax' instance implementation for 'ReadP'.
module Text.Syntax.Parser.ReadP (runAsReadP) where
import Data.List (find)
import Text.Syntax.Parser.Instances ()
import Text.Syntax.Poly.Class
(TryAlternative, Syntax(token))
import Text.Syntax.Poly.Type (RunAsParser, ErrorString, errorString)
import Text.ParserCombinators.ReadP (ReadP, get, readP_to_S)
-- | 'TryAlternative' instance of 'ReadP', method definitions is default.
instance TryAlternative ReadP
-- | 'Syntax' instance of 'Char' and 'ReadP'
instance Syntax Char ReadP where
token = get
-- | Run syntax as 'ReadP'.
runAsReadP :: RunAsParser Char String a ErrorString
runAsReadP parser s =
case find ((== []) . snd) $ readP_to_S parser s of
Just (a, _) -> Right a
Nothing -> Left $ errorString "parse error"
| schernichkin/haskell-invertible-syntax-poly | src/Text/Syntax/Parser/ReadP.hs | bsd-3-clause | 1,156 | 0 | 11 | 185 | 209 | 127 | 82 | 18 | 2 |
{-# language CPP #-}
-- | = Name
--
-- VK_ANDROID_external_memory_android_hardware_buffer - device extension
--
-- == VK_ANDROID_external_memory_android_hardware_buffer
--
-- [__Name String__]
-- @VK_ANDROID_external_memory_android_hardware_buffer@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
-- 130
--
-- [__Revision__]
-- 4
--
-- [__Extension and Version Dependencies__]
--
-- - Requires Vulkan 1.0
--
-- - Requires @VK_KHR_sampler_ycbcr_conversion@
--
-- - Requires @VK_KHR_external_memory@
--
-- - Requires @VK_EXT_queue_family_foreign@
--
-- - Requires @VK_KHR_dedicated_allocation@
--
-- [__Contact__]
--
-- - Jesse Hall
-- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_ANDROID_external_memory_android_hardware_buffer] @critsec%0A<<Here describe the issue or question you have about the VK_ANDROID_external_memory_android_hardware_buffer extension>> >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
-- 2021-09-30
--
-- [__IP Status__]
-- No known IP claims.
--
-- [__Contributors__]
--
-- - Ray Smith, ARM
--
-- - Chad Versace, Google
--
-- - Jesse Hall, Google
--
-- - Tobias Hector, Imagination
--
-- - James Jones, NVIDIA
--
-- - Tony Zlatinski, NVIDIA
--
-- - Matthew Netsch, Qualcomm
--
-- - Andrew Garrard, Samsung
--
-- == Description
--
-- This extension enables an application to import Android
-- 'AHardwareBuffer' objects created outside of the Vulkan device into
-- Vulkan memory objects, where they /can/ be bound to images and buffers.
-- It also allows exporting an 'AHardwareBuffer' from a Vulkan memory
-- object for symmetry with other operating systems. But since not all
-- 'AHardwareBuffer' usages and formats have Vulkan equivalents, exporting
-- from Vulkan provides strictly less functionality than creating the
-- 'AHardwareBuffer' externally and importing it.
--
-- Some 'AHardwareBuffer' images have implementation-defined /external
-- formats/ that /may/ not correspond to Vulkan formats. Sampler Y′CBCR
-- conversion /can/ be used to sample from these images and convert them to
-- a known color space.
--
-- == New Base Types
--
-- - 'AHardwareBuffer'
--
-- == New Commands
--
-- - 'getAndroidHardwareBufferPropertiesANDROID'
--
-- - 'getMemoryAndroidHardwareBufferANDROID'
--
-- == New Structures
--
-- - 'AndroidHardwareBufferPropertiesANDROID'
--
-- - 'MemoryGetAndroidHardwareBufferInfoANDROID'
--
-- - Extending 'AndroidHardwareBufferPropertiesANDROID':
--
-- - 'AndroidHardwareBufferFormatPropertiesANDROID'
--
-- - Extending 'Vulkan.Core10.Image.ImageCreateInfo',
-- 'Vulkan.Core11.Promoted_From_VK_KHR_sampler_ycbcr_conversion.SamplerYcbcrConversionCreateInfo':
--
-- - 'ExternalFormatANDROID'
--
-- - Extending
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.ImageFormatProperties2':
--
-- - 'AndroidHardwareBufferUsageANDROID'
--
-- - Extending 'Vulkan.Core10.Memory.MemoryAllocateInfo':
--
-- - 'ImportAndroidHardwareBufferInfoANDROID'
--
-- If
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_format_feature_flags2 VK_KHR_format_feature_flags2>
-- is supported:
--
-- - Extending 'AndroidHardwareBufferPropertiesANDROID':
--
-- - 'AndroidHardwareBufferFormatProperties2ANDROID'
--
-- == New Enum Constants
--
-- - 'ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME'
--
-- - 'ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION'
--
-- - Extending
-- 'Vulkan.Core11.Enums.ExternalMemoryHandleTypeFlagBits.ExternalMemoryHandleTypeFlagBits':
--
-- - 'Vulkan.Core11.Enums.ExternalMemoryHandleTypeFlagBits.EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID'
--
-- - Extending 'Vulkan.Core10.Enums.StructureType.StructureType':
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID'
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID'
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID'
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID'
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID'
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID'
--
-- If
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_format_feature_flags2 VK_KHR_format_feature_flags2>
-- is supported:
--
-- - Extending 'Vulkan.Core10.Enums.StructureType.StructureType':
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID'
--
-- == Issues
--
-- 1) Other external memory objects are represented as weakly-typed handles
-- (e.g. Win32 'Vulkan.Extensions.VK_NV_external_memory_win32.HANDLE' or
-- POSIX file descriptor), and require a handle type parameter along with
-- handles. 'AHardwareBuffer' is strongly typed, so naming the handle type
-- is redundant. Does symmetry justify adding handle type
-- parameters\/fields anyway?
--
-- __RESOLVED__: No. The handle type is already provided in places that
-- treat external memory objects generically. In the places we would add
-- it, the application code that would have to provide the handle type
-- value is already dealing with 'AHardwareBuffer'-specific
-- commands\/structures; the extra symmetry would not be enough to make
-- that code generic.
--
-- 2) The internal layout and therefore size of a 'AHardwareBuffer' image
-- may depend on native usage flags that do not have corresponding Vulkan
-- counterparts. Do we provide this information to
-- 'Vulkan.Core10.Image.createImage' somehow, or allow the allocation size
-- reported by 'Vulkan.Core10.MemoryManagement.getImageMemoryRequirements'
-- to be approximate?
--
-- __RESOLVED__: Allow the allocation size to be unspecified when
-- allocating the memory. It has to work this way for exported image memory
-- anyway, since 'AHardwareBuffer' allocation happens in
-- 'Vulkan.Core10.Memory.allocateMemory', and internally is performed by a
-- separate HAL, not the Vulkan implementation itself. There is a similar
-- issue with 'Vulkan.Core10.Image.getImageSubresourceLayout': the layout
-- is determined by the allocator HAL, so it is not known until the image
-- is bound to memory.
--
-- 3) Should the result of sampling an external-format image with the
-- suggested Y′CBCR conversion parameters yield the same results as using a
-- @samplerExternalOES@ in OpenGL ES?
--
-- __RESOLVED__: This would be desirable, so that apps converting from
-- OpenGL ES to Vulkan could get the same output given the same input. But
-- since sampling and conversion from Y′CBCR images is so loosely defined
-- in OpenGL ES, multiple implementations do it in a way that does not
-- conform to Vulkan’s requirements. Modifying the OpenGL ES implementation
-- would be difficult, and would change the output of existing unmodified
-- applications. Changing the output only for applications that are being
-- modified gives developers the chance to notice and mitigate any
-- problems. Implementations are encouraged to minimize differences as much
-- as possible without causing compatibility problems for existing OpenGL
-- ES applications or violating Vulkan requirements.
--
-- 4) Should an 'AHardwareBuffer' with @AHARDWAREBUFFER_USAGE_CPU_*@ usage
-- be mappable in Vulkan? Should it be possible to export an
-- @AHardwareBuffers@ with such usage?
--
-- __RESOLVED__: Optional, and mapping in Vulkan is not the same as
-- @AHardwareBuffer_lock@. The semantics of these are different: mapping in
-- memory is persistent, just gives a raw view of the memory contents, and
-- does not involve ownership. @AHardwareBuffer_lock@ gives the host
-- exclusive access to the buffer, is temporary, and allows for
-- reformatting copy-in\/copy-out. Implementations are not required to
-- support host-visible memory types for imported Android hardware buffers
-- or resources backed by them. If a host-visible memory type is supported
-- and used, the memory can be mapped in Vulkan, but doing so follows
-- Vulkan semantics: it is just a raw view of the data and does not imply
-- ownership (this means implementations must not internally call
-- @AHardwareBuffer_lock@ to implement 'Vulkan.Core10.Memory.mapMemory', or
-- assume the application has done so). Implementations are not required to
-- support linear-tiled images backed by Android hardware buffers, even if
-- the 'AHardwareBuffer' has CPU usage. There is no reliable way to
-- allocate memory in Vulkan that can be exported to a 'AHardwareBuffer'
-- with CPU usage.
--
-- 5) Android may add new 'AHardwareBuffer' formats and usage flags over
-- time. Can reference to them be added to this extension, or do they need
-- a new extension?
--
-- __RESOLVED__: This extension can document the interaction between the
-- new AHB formats\/usages and existing Vulkan features. No new Vulkan
-- features or implementation requirements can be added. The extension
-- version number will be incremented when this additional documentation is
-- added, but the version number does not indicate that an implementaiton
-- supports Vulkan memory or resources that map to the new
-- 'AHardwareBuffer' features: support for that must be queried with
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceImageFormatProperties2'
-- or is implied by successfully allocating a 'AHardwareBuffer' outside of
-- Vulkan that uses the new feature and has a GPU usage flag.
--
-- In essence, these are new features added to a new Android API level,
-- rather than new Vulkan features. The extension will only document how
-- existing Vulkan features map to that new Android feature.
--
-- == Version History
--
-- - Revision 4, 2021-09-30 (Jon Leech)
--
-- - Add interaction with @VK_KHR_format_feature_flags2@ to @vk.xml@
--
-- - Revision 3, 2019-08-27 (Jon Leech)
--
-- - Update revision history to correspond to XML version number
--
-- - Revision 2, 2018-04-09 (Petr Kraus)
--
-- - Markup fixes and remove incorrect Draft status
--
-- - Revision 1, 2018-03-04 (Jesse Hall)
--
-- - Initial version
--
-- == See Also
--
-- 'AHardwareBuffer', 'AndroidHardwareBufferFormatPropertiesANDROID',
-- 'AndroidHardwareBufferPropertiesANDROID',
-- 'AndroidHardwareBufferUsageANDROID', 'ExternalFormatANDROID',
-- 'ImportAndroidHardwareBufferInfoANDROID',
-- 'MemoryGetAndroidHardwareBufferInfoANDROID',
-- 'getAndroidHardwareBufferPropertiesANDROID',
-- 'getMemoryAndroidHardwareBufferANDROID'
--
-- == Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_ANDROID_external_memory_android_hardware_buffer Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_ANDROID_external_memory_android_hardware_buffer ( getAndroidHardwareBufferPropertiesANDROID
, getMemoryAndroidHardwareBufferANDROID
, ImportAndroidHardwareBufferInfoANDROID(..)
, AndroidHardwareBufferUsageANDROID(..)
, AndroidHardwareBufferPropertiesANDROID(..)
, MemoryGetAndroidHardwareBufferInfoANDROID(..)
, AndroidHardwareBufferFormatPropertiesANDROID(..)
, ExternalFormatANDROID(..)
, AndroidHardwareBufferFormatProperties2ANDROID(..)
, ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION
, pattern ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION
, ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME
, pattern ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME
, AHardwareBuffer
) where
import Vulkan.Internal.Utils (traceAroundEvent)
import Control.Exception.Base (bracket)
import Control.Monad (unless)
import Control.Monad.IO.Class (liftIO)
import Data.Typeable (eqT)
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Marshal.Alloc (callocBytes)
import Foreign.Marshal.Alloc (free)
import GHC.Base (when)
import GHC.IO (throwIO)
import GHC.Ptr (castPtr)
import GHC.Ptr (nullFunPtr)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Cont (evalContT)
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero(..))
import Control.Monad.IO.Class (MonadIO)
import Data.String (IsString)
import Data.Type.Equality ((:~:)(Refl))
import Data.Typeable (Typeable)
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import GHC.IO.Exception (IOErrorType(..))
import GHC.IO.Exception (IOException(..))
import Foreign.Ptr (FunPtr)
import Foreign.Ptr (Ptr)
import Data.Word (Word32)
import Data.Word (Word64)
import Data.Kind (Type)
import Control.Monad.Trans.Cont (ContT(..))
import Vulkan.CStruct.Extends (forgetExtensions)
import Vulkan.CStruct.Extends (Chain)
import Vulkan.Core11.Enums.ChromaLocation (ChromaLocation)
import Vulkan.Core10.ImageView (ComponentMapping)
import Vulkan.Core10.Handles (Device)
import Vulkan.Core10.Handles (Device(..))
import Vulkan.Core10.Handles (Device(Device))
import Vulkan.Dynamic (DeviceCmds(pVkGetAndroidHardwareBufferPropertiesANDROID))
import Vulkan.Dynamic (DeviceCmds(pVkGetMemoryAndroidHardwareBufferANDROID))
import Vulkan.Core10.Handles (DeviceMemory)
import Vulkan.Core10.FundamentalTypes (DeviceSize)
import Vulkan.Core10.Handles (Device_T)
import Vulkan.CStruct.Extends (Extends)
import Vulkan.CStruct.Extends (Extendss)
import Vulkan.CStruct.Extends (Extensible(..))
import Vulkan.Core10.Enums.Format (Format)
import Vulkan.Core10.Enums.FormatFeatureFlagBits (FormatFeatureFlags)
import Vulkan.Core13.Enums.FormatFeatureFlags2 (FormatFeatureFlags2)
import Vulkan.CStruct.Extends (PeekChain)
import Vulkan.CStruct.Extends (PeekChain(..))
import Vulkan.CStruct.Extends (PokeChain)
import Vulkan.CStruct.Extends (PokeChain(..))
import Vulkan.Core10.Enums.Result (Result)
import Vulkan.Core10.Enums.Result (Result(..))
import Vulkan.Core11.Enums.SamplerYcbcrModelConversion (SamplerYcbcrModelConversion)
import Vulkan.Core11.Enums.SamplerYcbcrRange (SamplerYcbcrRange)
import Vulkan.CStruct.Extends (SomeStruct)
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Exception (VulkanException(..))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID))
import Vulkan.Core10.Enums.Result (Result(SUCCESS))
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkGetAndroidHardwareBufferPropertiesANDROID
:: FunPtr (Ptr Device_T -> Ptr AHardwareBuffer -> Ptr (SomeStruct AndroidHardwareBufferPropertiesANDROID) -> IO Result) -> Ptr Device_T -> Ptr AHardwareBuffer -> Ptr (SomeStruct AndroidHardwareBufferPropertiesANDROID) -> IO Result
-- | vkGetAndroidHardwareBufferPropertiesANDROID - Get Properties of External
-- Memory Android Hardware Buffers
--
-- == Return Codes
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-successcodes Success>]
--
-- - 'Vulkan.Core10.Enums.Result.SUCCESS'
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-errorcodes Failure>]
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_HOST_MEMORY'
--
-- - 'Vulkan.Extensions.VK_KHR_external_memory.ERROR_INVALID_EXTERNAL_HANDLE_KHR'
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_ANDROID_external_memory_android_hardware_buffer VK_ANDROID_external_memory_android_hardware_buffer>,
-- 'AndroidHardwareBufferPropertiesANDROID', 'Vulkan.Core10.Handles.Device'
getAndroidHardwareBufferPropertiesANDROID :: forall a io
. (Extendss AndroidHardwareBufferPropertiesANDROID a, PokeChain a, PeekChain a, MonadIO io)
=> -- | @device@ is the logical device that will be importing @buffer@.
--
-- #VUID-vkGetAndroidHardwareBufferPropertiesANDROID-device-parameter#
-- @device@ /must/ be a valid 'Vulkan.Core10.Handles.Device' handle
Device
-> -- | @buffer@ is the Android hardware buffer which will be imported.
--
-- #VUID-vkGetAndroidHardwareBufferPropertiesANDROID-buffer-01884# @buffer@
-- /must/ be a valid Android hardware buffer object with at least one of
-- the @AHARDWAREBUFFER_USAGE_GPU_*@ flags in its
-- @AHardwareBuffer_Desc@::@usage@
--
-- #VUID-vkGetAndroidHardwareBufferPropertiesANDROID-buffer-parameter#
-- @buffer@ /must/ be a valid pointer to a valid 'AHardwareBuffer' value
(Ptr AHardwareBuffer)
-> io (AndroidHardwareBufferPropertiesANDROID a)
getAndroidHardwareBufferPropertiesANDROID device buffer = liftIO . evalContT $ do
let vkGetAndroidHardwareBufferPropertiesANDROIDPtr = pVkGetAndroidHardwareBufferPropertiesANDROID (case device of Device{deviceCmds} -> deviceCmds)
lift $ unless (vkGetAndroidHardwareBufferPropertiesANDROIDPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkGetAndroidHardwareBufferPropertiesANDROID is null" Nothing Nothing
let vkGetAndroidHardwareBufferPropertiesANDROID' = mkVkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROIDPtr
pPProperties <- ContT (withZeroCStruct @(AndroidHardwareBufferPropertiesANDROID _))
r <- lift $ traceAroundEvent "vkGetAndroidHardwareBufferPropertiesANDROID" (vkGetAndroidHardwareBufferPropertiesANDROID' (deviceHandle (device)) (buffer) (forgetExtensions (pPProperties)))
lift $ when (r < SUCCESS) (throwIO (VulkanException r))
pProperties <- lift $ peekCStruct @(AndroidHardwareBufferPropertiesANDROID _) pPProperties
pure $ (pProperties)
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkGetMemoryAndroidHardwareBufferANDROID
:: FunPtr (Ptr Device_T -> Ptr MemoryGetAndroidHardwareBufferInfoANDROID -> Ptr (Ptr AHardwareBuffer) -> IO Result) -> Ptr Device_T -> Ptr MemoryGetAndroidHardwareBufferInfoANDROID -> Ptr (Ptr AHardwareBuffer) -> IO Result
-- | vkGetMemoryAndroidHardwareBufferANDROID - Get an Android hardware buffer
-- for a memory object
--
-- = Description
--
-- Each call to 'getMemoryAndroidHardwareBufferANDROID' /must/ return an
-- Android hardware buffer with a new reference acquired in addition to the
-- reference held by the 'Vulkan.Core10.Handles.DeviceMemory'. To avoid
-- leaking resources, the application /must/ release the reference by
-- calling @AHardwareBuffer_release@ when it is no longer needed. When
-- called with the same handle in
-- 'MemoryGetAndroidHardwareBufferInfoANDROID'::@memory@,
-- 'getMemoryAndroidHardwareBufferANDROID' /must/ return the same Android
-- hardware buffer object. If the device memory was created by importing an
-- Android hardware buffer, 'getMemoryAndroidHardwareBufferANDROID' /must/
-- return that same Android hardware buffer object.
--
-- == Return Codes
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-successcodes Success>]
--
-- - 'Vulkan.Core10.Enums.Result.SUCCESS'
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-errorcodes Failure>]
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_TOO_MANY_OBJECTS'
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_HOST_MEMORY'
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_ANDROID_external_memory_android_hardware_buffer VK_ANDROID_external_memory_android_hardware_buffer>,
-- 'Vulkan.Core10.Handles.Device',
-- 'MemoryGetAndroidHardwareBufferInfoANDROID'
getMemoryAndroidHardwareBufferANDROID :: forall io
. (MonadIO io)
=> -- | @device@ is the logical device that created the device memory being
-- exported.
--
-- #VUID-vkGetMemoryAndroidHardwareBufferANDROID-device-parameter# @device@
-- /must/ be a valid 'Vulkan.Core10.Handles.Device' handle
Device
-> -- | @pInfo@ is a pointer to a 'MemoryGetAndroidHardwareBufferInfoANDROID'
-- structure containing parameters of the export operation.
--
-- #VUID-vkGetMemoryAndroidHardwareBufferANDROID-pInfo-parameter# @pInfo@
-- /must/ be a valid pointer to a valid
-- 'MemoryGetAndroidHardwareBufferInfoANDROID' structure
MemoryGetAndroidHardwareBufferInfoANDROID
-> io (Ptr AHardwareBuffer)
getMemoryAndroidHardwareBufferANDROID device info = liftIO . evalContT $ do
let vkGetMemoryAndroidHardwareBufferANDROIDPtr = pVkGetMemoryAndroidHardwareBufferANDROID (case device of Device{deviceCmds} -> deviceCmds)
lift $ unless (vkGetMemoryAndroidHardwareBufferANDROIDPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkGetMemoryAndroidHardwareBufferANDROID is null" Nothing Nothing
let vkGetMemoryAndroidHardwareBufferANDROID' = mkVkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROIDPtr
pInfo <- ContT $ withCStruct (info)
pPBuffer <- ContT $ bracket (callocBytes @(Ptr AHardwareBuffer) 8) free
r <- lift $ traceAroundEvent "vkGetMemoryAndroidHardwareBufferANDROID" (vkGetMemoryAndroidHardwareBufferANDROID' (deviceHandle (device)) pInfo (pPBuffer))
lift $ when (r < SUCCESS) (throwIO (VulkanException r))
pBuffer <- lift $ peek @(Ptr AHardwareBuffer) pPBuffer
pure $ (pBuffer)
-- | VkImportAndroidHardwareBufferInfoANDROID - Import memory from an Android
-- hardware buffer
--
-- = Description
--
-- If the 'Vulkan.Core10.Memory.allocateMemory' command succeeds, the
-- implementation /must/ acquire a reference to the imported hardware
-- buffer, which it /must/ release when the device memory object is freed.
-- If the command fails, the implementation /must/ not retain a reference.
--
-- == Valid Usage
--
-- - #VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-01880# If
-- @buffer@ is not @NULL@, Android hardware buffers /must/ be supported
-- for import, as reported by
-- 'Vulkan.Core11.Promoted_From_VK_KHR_external_memory_capabilities.ExternalImageFormatProperties'
-- or
-- 'Vulkan.Core11.Promoted_From_VK_KHR_external_memory_capabilities.ExternalBufferProperties'
--
-- - #VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-01881# If
-- @buffer@ is not @NULL@, it /must/ be a valid Android hardware buffer
-- object with @AHardwareBuffer_Desc@::@usage@ compatible with Vulkan
-- as described in
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#memory-external-android-hardware-buffer Android Hardware Buffers>
--
-- == Valid Usage (Implicit)
--
-- - #VUID-VkImportAndroidHardwareBufferInfoANDROID-sType-sType# @sType@
-- /must/ be
-- 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID'
--
-- - #VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-parameter#
-- @buffer@ /must/ be a valid pointer to an 'AHardwareBuffer' value
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_ANDROID_external_memory_android_hardware_buffer VK_ANDROID_external_memory_android_hardware_buffer>,
-- 'Vulkan.Core10.Enums.StructureType.StructureType'
data ImportAndroidHardwareBufferInfoANDROID = ImportAndroidHardwareBufferInfoANDROID
{ -- | @buffer@ is the Android hardware buffer to import.
buffer :: Ptr AHardwareBuffer }
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (ImportAndroidHardwareBufferInfoANDROID)
#endif
deriving instance Show ImportAndroidHardwareBufferInfoANDROID
instance ToCStruct ImportAndroidHardwareBufferInfoANDROID where
withCStruct x f = allocaBytes 24 $ \p -> pokeCStruct p x (f p)
pokeCStruct p ImportAndroidHardwareBufferInfoANDROID{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr (Ptr AHardwareBuffer))) (buffer)
f
cStructSize = 24
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr (Ptr AHardwareBuffer))) (zero)
f
instance FromCStruct ImportAndroidHardwareBufferInfoANDROID where
peekCStruct p = do
buffer <- peek @(Ptr AHardwareBuffer) ((p `plusPtr` 16 :: Ptr (Ptr AHardwareBuffer)))
pure $ ImportAndroidHardwareBufferInfoANDROID
buffer
instance Storable ImportAndroidHardwareBufferInfoANDROID where
sizeOf ~_ = 24
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero ImportAndroidHardwareBufferInfoANDROID where
zero = ImportAndroidHardwareBufferInfoANDROID
zero
-- | VkAndroidHardwareBufferUsageANDROID - Struct containing Android hardware
-- buffer usage flags
--
-- = Description
--
-- The @androidHardwareBufferUsage@ field /must/ include Android hardware
-- buffer usage flags listed in the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#memory-external-android-hardware-buffer-usage AHardwareBuffer Usage Equivalence>
-- table when the corresponding Vulkan image usage or image creation flags
-- are included in the @usage@ or @flags@ fields of
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceImageFormatInfo2'.
-- It /must/ include at least one GPU usage flag
-- (@AHARDWAREBUFFER_USAGE_GPU_*@), even if none of the corresponding
-- Vulkan usages or flags are requested.
--
-- Note
--
-- Requiring at least one GPU usage flag ensures that Android hardware
-- buffer memory will be allocated in a memory pool accessible to the
-- Vulkan implementation, and that specializing the memory layout based on
-- usage flags does not prevent it from being compatible with Vulkan.
-- Implementations /may/ avoid unnecessary restrictions caused by this
-- requirement by using vendor usage flags to indicate that only the Vulkan
-- uses indicated in
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.ImageFormatProperties2'
-- are required.
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_ANDROID_external_memory_android_hardware_buffer VK_ANDROID_external_memory_android_hardware_buffer>,
-- 'Vulkan.Core10.Enums.StructureType.StructureType'
data AndroidHardwareBufferUsageANDROID = AndroidHardwareBufferUsageANDROID
{ -- | @androidHardwareBufferUsage@ returns the Android hardware buffer usage
-- flags.
androidHardwareBufferUsage :: Word64 }
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (AndroidHardwareBufferUsageANDROID)
#endif
deriving instance Show AndroidHardwareBufferUsageANDROID
instance ToCStruct AndroidHardwareBufferUsageANDROID where
withCStruct x f = allocaBytes 24 $ \p -> pokeCStruct p x (f p)
pokeCStruct p AndroidHardwareBufferUsageANDROID{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Word64)) (androidHardwareBufferUsage)
f
cStructSize = 24
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Word64)) (zero)
f
instance FromCStruct AndroidHardwareBufferUsageANDROID where
peekCStruct p = do
androidHardwareBufferUsage <- peek @Word64 ((p `plusPtr` 16 :: Ptr Word64))
pure $ AndroidHardwareBufferUsageANDROID
androidHardwareBufferUsage
instance Storable AndroidHardwareBufferUsageANDROID where
sizeOf ~_ = 24
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero AndroidHardwareBufferUsageANDROID where
zero = AndroidHardwareBufferUsageANDROID
zero
-- | VkAndroidHardwareBufferPropertiesANDROID - Properties of External Memory
-- Android Hardware Buffers
--
-- == Valid Usage (Implicit)
--
-- - #VUID-VkAndroidHardwareBufferPropertiesANDROID-sType-sType# @sType@
-- /must/ be
-- 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID'
--
-- - #VUID-VkAndroidHardwareBufferPropertiesANDROID-pNext-pNext# Each
-- @pNext@ member of any structure (including this one) in the @pNext@
-- chain /must/ be either @NULL@ or a pointer to a valid instance of
-- 'AndroidHardwareBufferFormatProperties2ANDROID' or
-- 'AndroidHardwareBufferFormatPropertiesANDROID'
--
-- - #VUID-VkAndroidHardwareBufferPropertiesANDROID-sType-unique# The
-- @sType@ value of each struct in the @pNext@ chain /must/ be unique
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_ANDROID_external_memory_android_hardware_buffer VK_ANDROID_external_memory_android_hardware_buffer>,
-- 'Vulkan.Core10.FundamentalTypes.DeviceSize',
-- 'Vulkan.Core10.Enums.StructureType.StructureType',
-- 'getAndroidHardwareBufferPropertiesANDROID'
data AndroidHardwareBufferPropertiesANDROID (es :: [Type]) = AndroidHardwareBufferPropertiesANDROID
{ -- | @pNext@ is @NULL@ or a pointer to a structure extending this structure.
next :: Chain es
, -- | @allocationSize@ is the size of the external memory
allocationSize :: DeviceSize
, -- | @memoryTypeBits@ is a bitmask containing one bit set for every memory
-- type which the specified Android hardware buffer /can/ be imported as.
memoryTypeBits :: Word32
}
deriving (Typeable)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (AndroidHardwareBufferPropertiesANDROID (es :: [Type]))
#endif
deriving instance Show (Chain es) => Show (AndroidHardwareBufferPropertiesANDROID es)
instance Extensible AndroidHardwareBufferPropertiesANDROID where
extensibleTypeName = "AndroidHardwareBufferPropertiesANDROID"
setNext AndroidHardwareBufferPropertiesANDROID{..} next' = AndroidHardwareBufferPropertiesANDROID{next = next', ..}
getNext AndroidHardwareBufferPropertiesANDROID{..} = next
extends :: forall e b proxy. Typeable e => proxy e -> (Extends AndroidHardwareBufferPropertiesANDROID e => b) -> Maybe b
extends _ f
| Just Refl <- eqT @e @AndroidHardwareBufferFormatProperties2ANDROID = Just f
| Just Refl <- eqT @e @AndroidHardwareBufferFormatPropertiesANDROID = Just f
| otherwise = Nothing
instance (Extendss AndroidHardwareBufferPropertiesANDROID es, PokeChain es) => ToCStruct (AndroidHardwareBufferPropertiesANDROID es) where
withCStruct x f = allocaBytes 32 $ \p -> pokeCStruct p x (f p)
pokeCStruct p AndroidHardwareBufferPropertiesANDROID{..} f = evalContT $ do
lift $ poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID)
pNext'' <- fmap castPtr . ContT $ withChain (next)
lift $ poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) pNext''
lift $ poke ((p `plusPtr` 16 :: Ptr DeviceSize)) (allocationSize)
lift $ poke ((p `plusPtr` 24 :: Ptr Word32)) (memoryTypeBits)
lift $ f
cStructSize = 32
cStructAlignment = 8
pokeZeroCStruct p f = evalContT $ do
lift $ poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID)
pNext' <- fmap castPtr . ContT $ withZeroChain @es
lift $ poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) pNext'
lift $ poke ((p `plusPtr` 16 :: Ptr DeviceSize)) (zero)
lift $ poke ((p `plusPtr` 24 :: Ptr Word32)) (zero)
lift $ f
instance (Extendss AndroidHardwareBufferPropertiesANDROID es, PeekChain es) => FromCStruct (AndroidHardwareBufferPropertiesANDROID es) where
peekCStruct p = do
pNext <- peek @(Ptr ()) ((p `plusPtr` 8 :: Ptr (Ptr ())))
next <- peekChain (castPtr pNext)
allocationSize <- peek @DeviceSize ((p `plusPtr` 16 :: Ptr DeviceSize))
memoryTypeBits <- peek @Word32 ((p `plusPtr` 24 :: Ptr Word32))
pure $ AndroidHardwareBufferPropertiesANDROID
next allocationSize memoryTypeBits
instance es ~ '[] => Zero (AndroidHardwareBufferPropertiesANDROID es) where
zero = AndroidHardwareBufferPropertiesANDROID
()
zero
zero
-- | VkMemoryGetAndroidHardwareBufferInfoANDROID - Structure describing an
-- Android hardware buffer memory export operation
--
-- == Valid Usage
--
-- - #VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-handleTypes-01882#
-- 'Vulkan.Core11.Enums.ExternalMemoryHandleTypeFlagBits.EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID'
-- /must/ have been included in
-- 'Vulkan.Core11.Promoted_From_VK_KHR_external_memory.ExportMemoryAllocateInfo'::@handleTypes@
-- when @memory@ was created
--
-- - #VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-pNext-01883# If
-- the @pNext@ chain of the 'Vulkan.Core10.Memory.MemoryAllocateInfo'
-- used to allocate @memory@ included a
-- 'Vulkan.Core11.Promoted_From_VK_KHR_dedicated_allocation.MemoryDedicatedAllocateInfo'
-- with non-@NULL@ @image@ member, then that @image@ /must/ already be
-- bound to @memory@
--
-- == Valid Usage (Implicit)
--
-- - #VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-sType-sType#
-- @sType@ /must/ be
-- 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID'
--
-- - #VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-pNext-pNext#
-- @pNext@ /must/ be @NULL@
--
-- - #VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-memory-parameter#
-- @memory@ /must/ be a valid 'Vulkan.Core10.Handles.DeviceMemory'
-- handle
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_ANDROID_external_memory_android_hardware_buffer VK_ANDROID_external_memory_android_hardware_buffer>,
-- 'Vulkan.Core10.Handles.DeviceMemory',
-- 'Vulkan.Core10.Enums.StructureType.StructureType',
-- 'getMemoryAndroidHardwareBufferANDROID'
data MemoryGetAndroidHardwareBufferInfoANDROID = MemoryGetAndroidHardwareBufferInfoANDROID
{ -- | @memory@ is the memory object from which the Android hardware buffer
-- will be exported.
memory :: DeviceMemory }
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (MemoryGetAndroidHardwareBufferInfoANDROID)
#endif
deriving instance Show MemoryGetAndroidHardwareBufferInfoANDROID
instance ToCStruct MemoryGetAndroidHardwareBufferInfoANDROID where
withCStruct x f = allocaBytes 24 $ \p -> pokeCStruct p x (f p)
pokeCStruct p MemoryGetAndroidHardwareBufferInfoANDROID{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr DeviceMemory)) (memory)
f
cStructSize = 24
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr DeviceMemory)) (zero)
f
instance FromCStruct MemoryGetAndroidHardwareBufferInfoANDROID where
peekCStruct p = do
memory <- peek @DeviceMemory ((p `plusPtr` 16 :: Ptr DeviceMemory))
pure $ MemoryGetAndroidHardwareBufferInfoANDROID
memory
instance Storable MemoryGetAndroidHardwareBufferInfoANDROID where
sizeOf ~_ = 24
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero MemoryGetAndroidHardwareBufferInfoANDROID where
zero = MemoryGetAndroidHardwareBufferInfoANDROID
zero
-- | VkAndroidHardwareBufferFormatPropertiesANDROID - Structure describing
-- the image format properties of an Android hardware buffer
--
-- = Description
--
-- If the Android hardware buffer has one of the formats listed in the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#memory-external-android-hardware-buffer-formats Format Equivalence table>,
-- then @format@ /must/ have the equivalent Vulkan format listed in the
-- table. Otherwise, @format@ /may/ be
-- 'Vulkan.Core10.Enums.Format.FORMAT_UNDEFINED', indicating the Android
-- hardware buffer /can/ only be used with an external format.
--
-- The @formatFeatures@ member /must/ include
-- 'Vulkan.Core10.Enums.FormatFeatureFlagBits.FORMAT_FEATURE_SAMPLED_IMAGE_BIT'
-- and at least one of
-- 'Vulkan.Core10.Enums.FormatFeatureFlagBits.FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT'
-- or
-- 'Vulkan.Core10.Enums.FormatFeatureFlagBits.FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT',
-- and /should/ include
-- 'Vulkan.Core10.Enums.FormatFeatureFlagBits.FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT'
-- and
-- 'Vulkan.Core10.Enums.FormatFeatureFlagBits.FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT'.
--
-- Note
--
-- The @formatFeatures@ member only indicates the features available when
-- using an
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#memory-external-android-hardware-buffer-external-formats external-format image>
-- created from the Android hardware buffer. Images from Android hardware
-- buffers with a format other than
-- 'Vulkan.Core10.Enums.Format.FORMAT_UNDEFINED' are subject to the format
-- capabilities obtained from
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceFormatProperties2',
-- and
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceImageFormatProperties2'
-- with appropriate parameters. These sets of features are independent of
-- each other, e.g. the external format will support sampler Y′CBCR
-- conversion even if the non-external format does not, and writing to
-- non-external format images is possible but writing to external format
-- images is not.
--
-- Android hardware buffers with the same external format /must/ have the
-- same support for
-- 'Vulkan.Core10.Enums.FormatFeatureFlagBits.FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT',
-- 'Vulkan.Core10.Enums.FormatFeatureFlagBits.FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT',
-- 'Vulkan.Core10.Enums.FormatFeatureFlagBits.FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT',
-- 'Vulkan.Core10.Enums.FormatFeatureFlagBits.FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT',
-- 'Vulkan.Core10.Enums.FormatFeatureFlagBits.FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT',
-- and
-- 'Vulkan.Core10.Enums.FormatFeatureFlagBits.FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT'.
-- in @formatFeatures@. Other format features /may/ differ between Android
-- hardware buffers that have the same external format. This allows
-- applications to use the same
-- 'Vulkan.Core11.Handles.SamplerYcbcrConversion' object (and samplers and
-- pipelines created from them) for any Android hardware buffers that have
-- the same external format.
--
-- If @format@ is not 'Vulkan.Core10.Enums.Format.FORMAT_UNDEFINED', then
-- the value of @samplerYcbcrConversionComponents@ /must/ be valid when
-- used as the @components@ member of
-- 'Vulkan.Core11.Promoted_From_VK_KHR_sampler_ycbcr_conversion.SamplerYcbcrConversionCreateInfo'
-- with that format. If @format@ is
-- 'Vulkan.Core10.Enums.Format.FORMAT_UNDEFINED', all members of
-- @samplerYcbcrConversionComponents@ /must/ be the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#resources-image-views-identity-mappings identity swizzle>.
--
-- Implementations /may/ not always be able to determine the color model,
-- numerical range, or chroma offsets of the image contents, so the values
-- in 'AndroidHardwareBufferFormatPropertiesANDROID' are only suggestions.
-- Applications /should/ treat these values as sensible defaults to use in
-- the absence of more reliable information obtained through some other
-- means. If the underlying physical device is also usable via OpenGL ES
-- with the
-- <https://www.khronos.org/registry/OpenGL/extensions/OES/OES_EGL_image_external.txt GL_OES_EGL_image_external>
-- extension, the implementation /should/ suggest values that will produce
-- similar sampled values as would be obtained by sampling the same
-- external image via @samplerExternalOES@ in OpenGL ES using equivalent
-- sampler parameters.
--
-- Note
--
-- Since
-- <https://www.khronos.org/registry/OpenGL/extensions/OES/OES_EGL_image_external.txt GL_OES_EGL_image_external>
-- does not require the same sampling and conversion calculations as Vulkan
-- does, achieving identical results between APIs /may/ not be possible on
-- some implementations.
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_ANDROID_external_memory_android_hardware_buffer VK_ANDROID_external_memory_android_hardware_buffer>,
-- 'Vulkan.Core11.Enums.ChromaLocation.ChromaLocation',
-- 'Vulkan.Core10.ImageView.ComponentMapping',
-- 'Vulkan.Core10.Enums.Format.Format',
-- 'Vulkan.Core10.Enums.FormatFeatureFlagBits.FormatFeatureFlags',
-- 'Vulkan.Core11.Enums.SamplerYcbcrModelConversion.SamplerYcbcrModelConversion',
-- 'Vulkan.Core11.Enums.SamplerYcbcrRange.SamplerYcbcrRange',
-- 'Vulkan.Core10.Enums.StructureType.StructureType'
data AndroidHardwareBufferFormatPropertiesANDROID = AndroidHardwareBufferFormatPropertiesANDROID
{ -- | @format@ is the Vulkan format corresponding to the Android hardware
-- buffer’s format, or 'Vulkan.Core10.Enums.Format.FORMAT_UNDEFINED' if
-- there is not an equivalent Vulkan format.
format :: Format
, -- | @externalFormat@ is an implementation-defined external format identifier
-- for use with 'ExternalFormatANDROID'. It /must/ not be zero.
externalFormat :: Word64
, -- | @formatFeatures@ describes the capabilities of this external format when
-- used with an image bound to memory imported from @buffer@.
formatFeatures :: FormatFeatureFlags
, -- | @samplerYcbcrConversionComponents@ is the component swizzle that
-- /should/ be used in
-- 'Vulkan.Core11.Promoted_From_VK_KHR_sampler_ycbcr_conversion.SamplerYcbcrConversionCreateInfo'.
samplerYcbcrConversionComponents :: ComponentMapping
, -- | @suggestedYcbcrModel@ is a suggested color model to use in the
-- 'Vulkan.Core11.Promoted_From_VK_KHR_sampler_ycbcr_conversion.SamplerYcbcrConversionCreateInfo'.
suggestedYcbcrModel :: SamplerYcbcrModelConversion
, -- | @suggestedYcbcrRange@ is a suggested numerical value range to use in
-- 'Vulkan.Core11.Promoted_From_VK_KHR_sampler_ycbcr_conversion.SamplerYcbcrConversionCreateInfo'.
suggestedYcbcrRange :: SamplerYcbcrRange
, -- | @suggestedXChromaOffset@ is a suggested X chroma offset to use in
-- 'Vulkan.Core11.Promoted_From_VK_KHR_sampler_ycbcr_conversion.SamplerYcbcrConversionCreateInfo'.
suggestedXChromaOffset :: ChromaLocation
, -- | @suggestedYChromaOffset@ is a suggested Y chroma offset to use in
-- 'Vulkan.Core11.Promoted_From_VK_KHR_sampler_ycbcr_conversion.SamplerYcbcrConversionCreateInfo'.
suggestedYChromaOffset :: ChromaLocation
}
deriving (Typeable)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (AndroidHardwareBufferFormatPropertiesANDROID)
#endif
deriving instance Show AndroidHardwareBufferFormatPropertiesANDROID
instance ToCStruct AndroidHardwareBufferFormatPropertiesANDROID where
withCStruct x f = allocaBytes 72 $ \p -> pokeCStruct p x (f p)
pokeCStruct p AndroidHardwareBufferFormatPropertiesANDROID{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Format)) (format)
poke ((p `plusPtr` 24 :: Ptr Word64)) (externalFormat)
poke ((p `plusPtr` 32 :: Ptr FormatFeatureFlags)) (formatFeatures)
poke ((p `plusPtr` 36 :: Ptr ComponentMapping)) (samplerYcbcrConversionComponents)
poke ((p `plusPtr` 52 :: Ptr SamplerYcbcrModelConversion)) (suggestedYcbcrModel)
poke ((p `plusPtr` 56 :: Ptr SamplerYcbcrRange)) (suggestedYcbcrRange)
poke ((p `plusPtr` 60 :: Ptr ChromaLocation)) (suggestedXChromaOffset)
poke ((p `plusPtr` 64 :: Ptr ChromaLocation)) (suggestedYChromaOffset)
f
cStructSize = 72
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Format)) (zero)
poke ((p `plusPtr` 24 :: Ptr Word64)) (zero)
poke ((p `plusPtr` 32 :: Ptr FormatFeatureFlags)) (zero)
poke ((p `plusPtr` 36 :: Ptr ComponentMapping)) (zero)
poke ((p `plusPtr` 52 :: Ptr SamplerYcbcrModelConversion)) (zero)
poke ((p `plusPtr` 56 :: Ptr SamplerYcbcrRange)) (zero)
poke ((p `plusPtr` 60 :: Ptr ChromaLocation)) (zero)
poke ((p `plusPtr` 64 :: Ptr ChromaLocation)) (zero)
f
instance FromCStruct AndroidHardwareBufferFormatPropertiesANDROID where
peekCStruct p = do
format <- peek @Format ((p `plusPtr` 16 :: Ptr Format))
externalFormat <- peek @Word64 ((p `plusPtr` 24 :: Ptr Word64))
formatFeatures <- peek @FormatFeatureFlags ((p `plusPtr` 32 :: Ptr FormatFeatureFlags))
samplerYcbcrConversionComponents <- peekCStruct @ComponentMapping ((p `plusPtr` 36 :: Ptr ComponentMapping))
suggestedYcbcrModel <- peek @SamplerYcbcrModelConversion ((p `plusPtr` 52 :: Ptr SamplerYcbcrModelConversion))
suggestedYcbcrRange <- peek @SamplerYcbcrRange ((p `plusPtr` 56 :: Ptr SamplerYcbcrRange))
suggestedXChromaOffset <- peek @ChromaLocation ((p `plusPtr` 60 :: Ptr ChromaLocation))
suggestedYChromaOffset <- peek @ChromaLocation ((p `plusPtr` 64 :: Ptr ChromaLocation))
pure $ AndroidHardwareBufferFormatPropertiesANDROID
format externalFormat formatFeatures samplerYcbcrConversionComponents suggestedYcbcrModel suggestedYcbcrRange suggestedXChromaOffset suggestedYChromaOffset
instance Storable AndroidHardwareBufferFormatPropertiesANDROID where
sizeOf ~_ = 72
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero AndroidHardwareBufferFormatPropertiesANDROID where
zero = AndroidHardwareBufferFormatPropertiesANDROID
zero
zero
zero
zero
zero
zero
zero
zero
-- | VkExternalFormatANDROID - Structure containing an Android hardware
-- buffer external format
--
-- = Description
--
-- If @externalFormat@ is zero, the effect is as if the
-- 'ExternalFormatANDROID' structure was not present. Otherwise, the
-- @image@ will have the specified external format.
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_ANDROID_external_memory_android_hardware_buffer VK_ANDROID_external_memory_android_hardware_buffer>,
-- 'Vulkan.Core10.Enums.StructureType.StructureType'
data ExternalFormatANDROID = ExternalFormatANDROID
{ -- | @externalFormat@ is an implementation-defined identifier for the
-- external format
--
-- #VUID-VkExternalFormatANDROID-externalFormat-01894# @externalFormat@
-- /must/ be @0@ or a value returned in the @externalFormat@ member of
-- 'AndroidHardwareBufferFormatPropertiesANDROID' by an earlier call to
-- 'getAndroidHardwareBufferPropertiesANDROID'
externalFormat :: Word64 }
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (ExternalFormatANDROID)
#endif
deriving instance Show ExternalFormatANDROID
instance ToCStruct ExternalFormatANDROID where
withCStruct x f = allocaBytes 24 $ \p -> pokeCStruct p x (f p)
pokeCStruct p ExternalFormatANDROID{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Word64)) (externalFormat)
f
cStructSize = 24
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Word64)) (zero)
f
instance FromCStruct ExternalFormatANDROID where
peekCStruct p = do
externalFormat <- peek @Word64 ((p `plusPtr` 16 :: Ptr Word64))
pure $ ExternalFormatANDROID
externalFormat
instance Storable ExternalFormatANDROID where
sizeOf ~_ = 24
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero ExternalFormatANDROID where
zero = ExternalFormatANDROID
zero
-- | VkAndroidHardwareBufferFormatProperties2ANDROID - Structure describing
-- the image format properties of an Android hardware buffer
--
-- = Description
--
-- The bits reported in @formatFeatures@ /must/ include the bits reported
-- in the corresponding fields of
-- 'AndroidHardwareBufferFormatPropertiesANDROID'::@formatFeatures@.
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_ANDROID_external_memory_android_hardware_buffer VK_ANDROID_external_memory_android_hardware_buffer>,
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_format_feature_flags2 VK_KHR_format_feature_flags2>,
-- 'Vulkan.Core11.Enums.ChromaLocation.ChromaLocation',
-- 'Vulkan.Core10.ImageView.ComponentMapping',
-- 'Vulkan.Core10.Enums.Format.Format',
-- 'Vulkan.Core13.Enums.FormatFeatureFlags2.FormatFeatureFlags2',
-- 'Vulkan.Core11.Enums.SamplerYcbcrModelConversion.SamplerYcbcrModelConversion',
-- 'Vulkan.Core11.Enums.SamplerYcbcrRange.SamplerYcbcrRange',
-- 'Vulkan.Core10.Enums.StructureType.StructureType'
data AndroidHardwareBufferFormatProperties2ANDROID = AndroidHardwareBufferFormatProperties2ANDROID
{ -- | @format@ is the Vulkan format corresponding to the Android hardware
-- buffer’s format, or 'Vulkan.Core10.Enums.Format.FORMAT_UNDEFINED' if
-- there is not an equivalent Vulkan format.
format :: Format
, -- | @externalFormat@ is an implementation-defined external format identifier
-- for use with 'ExternalFormatANDROID'. It /must/ not be zero.
externalFormat :: Word64
, -- | @formatFeatures@ describes the capabilities of this external format when
-- used with an image bound to memory imported from @buffer@.
formatFeatures :: FormatFeatureFlags2
, -- | @samplerYcbcrConversionComponents@ is the component swizzle that
-- /should/ be used in
-- 'Vulkan.Core11.Promoted_From_VK_KHR_sampler_ycbcr_conversion.SamplerYcbcrConversionCreateInfo'.
samplerYcbcrConversionComponents :: ComponentMapping
, -- | @suggestedYcbcrModel@ is a suggested color model to use in the
-- 'Vulkan.Core11.Promoted_From_VK_KHR_sampler_ycbcr_conversion.SamplerYcbcrConversionCreateInfo'.
suggestedYcbcrModel :: SamplerYcbcrModelConversion
, -- | @suggestedYcbcrRange@ is a suggested numerical value range to use in
-- 'Vulkan.Core11.Promoted_From_VK_KHR_sampler_ycbcr_conversion.SamplerYcbcrConversionCreateInfo'.
suggestedYcbcrRange :: SamplerYcbcrRange
, -- | @suggestedXChromaOffset@ is a suggested X chroma offset to use in
-- 'Vulkan.Core11.Promoted_From_VK_KHR_sampler_ycbcr_conversion.SamplerYcbcrConversionCreateInfo'.
suggestedXChromaOffset :: ChromaLocation
, -- | @suggestedYChromaOffset@ is a suggested Y chroma offset to use in
-- 'Vulkan.Core11.Promoted_From_VK_KHR_sampler_ycbcr_conversion.SamplerYcbcrConversionCreateInfo'.
suggestedYChromaOffset :: ChromaLocation
}
deriving (Typeable)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (AndroidHardwareBufferFormatProperties2ANDROID)
#endif
deriving instance Show AndroidHardwareBufferFormatProperties2ANDROID
instance ToCStruct AndroidHardwareBufferFormatProperties2ANDROID where
withCStruct x f = allocaBytes 72 $ \p -> pokeCStruct p x (f p)
pokeCStruct p AndroidHardwareBufferFormatProperties2ANDROID{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Format)) (format)
poke ((p `plusPtr` 24 :: Ptr Word64)) (externalFormat)
poke ((p `plusPtr` 32 :: Ptr FormatFeatureFlags2)) (formatFeatures)
poke ((p `plusPtr` 40 :: Ptr ComponentMapping)) (samplerYcbcrConversionComponents)
poke ((p `plusPtr` 56 :: Ptr SamplerYcbcrModelConversion)) (suggestedYcbcrModel)
poke ((p `plusPtr` 60 :: Ptr SamplerYcbcrRange)) (suggestedYcbcrRange)
poke ((p `plusPtr` 64 :: Ptr ChromaLocation)) (suggestedXChromaOffset)
poke ((p `plusPtr` 68 :: Ptr ChromaLocation)) (suggestedYChromaOffset)
f
cStructSize = 72
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Format)) (zero)
poke ((p `plusPtr` 24 :: Ptr Word64)) (zero)
poke ((p `plusPtr` 32 :: Ptr FormatFeatureFlags2)) (zero)
poke ((p `plusPtr` 40 :: Ptr ComponentMapping)) (zero)
poke ((p `plusPtr` 56 :: Ptr SamplerYcbcrModelConversion)) (zero)
poke ((p `plusPtr` 60 :: Ptr SamplerYcbcrRange)) (zero)
poke ((p `plusPtr` 64 :: Ptr ChromaLocation)) (zero)
poke ((p `plusPtr` 68 :: Ptr ChromaLocation)) (zero)
f
instance FromCStruct AndroidHardwareBufferFormatProperties2ANDROID where
peekCStruct p = do
format <- peek @Format ((p `plusPtr` 16 :: Ptr Format))
externalFormat <- peek @Word64 ((p `plusPtr` 24 :: Ptr Word64))
formatFeatures <- peek @FormatFeatureFlags2 ((p `plusPtr` 32 :: Ptr FormatFeatureFlags2))
samplerYcbcrConversionComponents <- peekCStruct @ComponentMapping ((p `plusPtr` 40 :: Ptr ComponentMapping))
suggestedYcbcrModel <- peek @SamplerYcbcrModelConversion ((p `plusPtr` 56 :: Ptr SamplerYcbcrModelConversion))
suggestedYcbcrRange <- peek @SamplerYcbcrRange ((p `plusPtr` 60 :: Ptr SamplerYcbcrRange))
suggestedXChromaOffset <- peek @ChromaLocation ((p `plusPtr` 64 :: Ptr ChromaLocation))
suggestedYChromaOffset <- peek @ChromaLocation ((p `plusPtr` 68 :: Ptr ChromaLocation))
pure $ AndroidHardwareBufferFormatProperties2ANDROID
format externalFormat formatFeatures samplerYcbcrConversionComponents suggestedYcbcrModel suggestedYcbcrRange suggestedXChromaOffset suggestedYChromaOffset
instance Storable AndroidHardwareBufferFormatProperties2ANDROID where
sizeOf ~_ = 72
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero AndroidHardwareBufferFormatProperties2ANDROID where
zero = AndroidHardwareBufferFormatProperties2ANDROID
zero
zero
zero
zero
zero
zero
zero
zero
type ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION = 4
-- No documentation found for TopLevel "VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION"
pattern ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION :: forall a . Integral a => a
pattern ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION = 4
type ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME = "VK_ANDROID_external_memory_android_hardware_buffer"
-- No documentation found for TopLevel "VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME"
pattern ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME = "VK_ANDROID_external_memory_android_hardware_buffer"
data AHardwareBuffer
| expipiplus1/vulkan | src/Vulkan/Extensions/VK_ANDROID_external_memory_android_hardware_buffer.hs | bsd-3-clause | 59,777 | 0 | 17 | 9,967 | 7,605 | 4,437 | 3,168 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE TypeApplications #-}
module Biscuits where
import Control.Lens
import Data.Maybe (maybeToList)
import Data.Generics.Product
import Data.Generics.Sum
import GHC.Generics (Generic)
-- $setup
--
-- >>> :set -XTypeApplications
-- >>> :set -XDataKinds
-- >>> :set -XFlexibleContexts
data Item = Item
{ name :: String
, cost :: Cost
} deriving (Generic, Show)
newtype Cost = Cost Double deriving (Generic, Show)
data Invoice p = Invoice
{ item :: Item
, name :: String
, number :: Int
, priority :: p
} deriving (Generic, Show)
data Orders = Orders [ Invoice Int ] [ Invoice (Int, Double) ]
deriving (Generic, Show)
-- |
-- >>> view (field @"name") bourbon
-- "Bourbon"
-- >>> bourbon & field @"cost" .~ Cost 110
-- Item {name = "Bourbon", cost = Cost 110.0}
--
-- >>> bourbon & field @"cost" %~ (\(Cost c) -> (Cost (c + 5)))
-- Item {name = "Bourbon", cost = Cost 105.0}
--
-- >>> Invoice bourbon "Johnny" 2 2 & field @"priority" %~ (\i -> (i, 0))
-- Invoice {item = Item {name = "Bourbon", cost = Cost 100.0}, name = "Johnny", number = 2, priority = (2,0)}
--
-- >>> view (field @"weight") bourbon
-- ...
-- ... The type Item does not contain a field named 'weight'.
-- ...
--
-- >>> bourbon & typed @Cost .~ Cost 200
-- Item {name = "Bourbon", cost = Cost 200.0}
--
-- >>> bourbon & typed %~ ("Chocolate " ++)
-- Item {name = "Chocolate Bourbon", cost = Cost 100.0}
--
-- >>> view (position @1) (42, "foo")
-- 42
-- >>> view (position @1) (42, "foo", False)
-- 42
-- >>> view (position @2) orders
-- [Invoice {item = Item {name = "Bourbon", cost = Cost 100.0}, name = "George", number = 2, priority = (0,3.0)}]
--
-- >>> view (position @2) orders
-- [Invoice {item = Item {name = "Bourbon", cost = Cost 100.0}, name = "George", number = 2, priority = (0,3.0)}]
--
-- >>> view (position @3) orders
-- ...
-- ... The type Orders does not contain a field at position 3
-- ...
--
-- >>> view (super @Item) (WItem "Bourbon" (Cost 2000) (Weight 0.03))
-- Item {name = "Bourbon", cost = Cost 2000.0}
--
-- >>> (WItem "Bourbon+" (Cost 500) (Weight 0.03)) & super @Item .~ bourbon
-- WItem {name = "Bourbon", cost = Cost 100.0, weight = Weight 3.0e-2}
--
-- >>> DInt 1 ^? _Ctor @"DInt"
-- Just 1
--
-- >>> _Typed # (False, "wurble") :: D
-- DPair False "wurble"
--
-- >>> EChar 'a' ^? _Sub @D
-- Nothing
--
-- >>> _Sub # DInt 10 :: E
-- EInt 10
bourbon :: Item
bourbon = Item "Bourbon" (Cost 100)
orders :: Orders
orders = Orders [Invoice bourbon "Earl" 1 0 , Invoice bourbon "Johnny" 2 2]
[Invoice bourbon "George" 2 (0, 3)]
nameOfItem :: Invoice p -> String
nameOfItem = view (field @"item" . field @"name")
thankYou :: Orders -> Orders
thankYou = over (types @Cost) (\(Cost c) -> Cost (c * 0.85))
thankYouPriority :: Orders -> Orders
thankYouPriority = over (position @2 . types @Cost) (\(Cost c) -> Cost (c * 0.85))
upgrade :: Double -> Invoice Int -> Invoice (Int, Double)
upgrade bribe invoice = over (param @0) (\i -> (i, bribe)) invoice
audit :: Orders -> [Item]
audit = toListOf (types @Item)
newtype Weight = Weight Double deriving (Generic, Show)
data WeighedItem = WItem
{ name :: String
, cost :: Cost
, weight :: Weight
} deriving (Generic, Show)
data D = DInt Int | DPair Bool String
deriving (Generic, Show)
data E = EInt Int | EPair Bool String | EChar Char
deriving (Generic, Show)
costInc :: HasTypes t Cost => t -> t
costInc = over (types @Cost) (\(Cost c) -> Cost (c + 5))
modifyPriority :: (Int -> Int) -> Invoice Int -> Invoice Int
modifyPriority = over (types @Int)
treeIncParam :: HasParam 0 s s Int Int => s -> s
treeIncParam = over (param @0) (+ 1)
instance Functor Invoice where
fmap = over (param @0)
| kcsongor/generic-lens | generic-lens/examples/Biscuits.hs | bsd-3-clause | 3,926 | 0 | 10 | 819 | 849 | 493 | 356 | 58 | 1 |
module Main ( main ) where
-- import System.Exit
-- import TinyASM.Instruction
import TinyASM.Parser
import TinyASM.Compiler
import TinyASM.VM
import System.Directory
import System.FilePath.Posix (pathSeparator)
import Data.List (isInfixOf)
main :: IO ()
main = do
-- let files = [
-- "test-suite\\programs\\unit\\add_0x0A.asm",
-- "test-suite\\programs\\unit\\add_0x0B.asm"
-- ]
-- mapM_ runFile files
let path = "test-suite" ++ [pathSeparator] ++ "programs" ++ [pathSeparator] ++ "unit" ++ [pathSeparator]
allFiles <- getDirectoryContents path
let allUnitProgramFiles = reverse $ map (path++) $ filter (isInfixOf "_0x") allFiles
putStrLn $ show allUnitProgramFiles
mapM_ runFile allUnitProgramFiles
-- let generatedByteCodeStrings = map toString byteCodes
-- testResults = [
-- generatedByteCodeStrings == expectedByteCodeStrings
-- ]
--
-- if any (==False) testResults then exitFailure else return ()
runFile :: String -> IO ()
runFile file = do
putStrLn $ "- - - - - - - - - - - - - - - -\nParsing: " ++ file
-- contents <- readFile "test-suite\\programs\\unit\\jeq_0x14.asm"
contents <- readFile file
let lns = contents
let inst = parse lns
-- inst <- fmap parse (readFile file)
-- inst2 = parseString lns
putStrLn $ "Contents:\n" ++ lns ++ "\n"
putStrLn $ show inst
putStrLn $ show $ compileInstructions inst
-- putStrLn $ show lns
-- putStrLn $ show $ inst
let vm = run VM {
stack = compileInstructions inst,
memory = [0x00 | _ <- [(0 :: Int)..255] ],
screen = []
}
putStrLn $ show $ vm
| ocus/TinyASM_Haskell | test-suite/TinyASM.Parser.hs | bsd-3-clause | 1,724 | 0 | 17 | 461 | 357 | 189 | 168 | 28 | 1 |
{-# LANGUAGE FlexibleInstances #-}
module Hslogic.Types where
import Text.PrettyPrint((<>), hcat, text, Doc, char,punctuate)
import Data.Hashable
import qualified Data.HashMap.Lazy as H
data VarName = VarName String deriving (Eq, Read)
instance Show VarName where
show (VarName v) = v
instance Hashable VarName where
hash (VarName s) = hash s
hashWithSalt i (VarName s) = hashWithSalt i s
mk_var :: String -> VarName
mk_var = VarName
data Term
= Var VarName
| Fn String [Term]
deriving (Eq,Read)
data Clause
= Clause {
clauseHead :: Term,
clausePremises :: [Term]
} deriving (Eq,Read)
-- | The set of valid goals
--
-- Formulas are (currently) distinct from clauses but they should probably be one and the same
data Formula = T Term
| Term :-> Formula -- ^Intuitionistic implication, hypothesis maybe used zero or more times to prove consequence
| Term :-@ Formula -- ^Linear implication, hypothesis must be used one and only one time to prove consequence
| Term :* Formula -- ^Multiplicative conjunction (in linear context) or more simply conjunction (in intuitionistic context)
deriving (Eq,Read)
newtype Subst = Subst { substMap :: (H.HashMap VarName Term) } deriving Eq
class PrettyPrintable a where
pp :: a -> Doc
instance PrettyPrintable VarName where
pp v = text $ show v
instance PrettyPrintable Term where
pp (Var v) = pp v
pp (Fn n []) = text n
pp (Fn n (t:ts)) = text n
<> char '('
<> pp t
<> hcat [char ',' <> pp t' | t' <- ts ]
<> char ')'
instance Show Term where
show = show . pp
instance PrettyPrintable Clause where
pp (Clause h []) = pp h <> char '.'
pp (Clause h (p:ps)) = pp h <> text " <= " <> pp p <> hcat [text ", " <> pp p' | p' <- ps ] <> char '.'
instance Show Clause where
show = show . pp
-- |Pretty print a term
--
-- >>> pretty (Fn "install" [ Var (VarName "X") ])
-- install(X)
-- >>> pretty (Fn "copy" [])
-- copy
pretty :: Term -> Doc
pretty t = pp t
instance PrettyPrintable (VarName,Term) where
pp (k,v) = pp k <> text " -> " <> pp v
instance PrettyPrintable Subst where
pp s = char '['
<> hcat (punctuate (char ',') (map pp (H.toList $ substMap s)))
<> char ']'
instance Show Subst where
show = show . pp
instance PrettyPrintable Formula where
pp (T t) = pp t
pp (t :-> t') = pp t <> text " => "<> pp t'
pp (t :-@ t') = pp t <> text " -o "<> pp t'
pp (t :* t') = pp t <> text " , " <> pp t'
instance Show Formula where
show = show . pp
| abailly/hslogic | src/Hslogic/Types.hs | bsd-3-clause | 2,635 | 0 | 15 | 723 | 888 | 461 | 427 | 64 | 1 |
-- |
-- Module : Data.ByteString.UTF8.Normalize
-- Copyright : (c) 2016 Harendra Kumar
--
-- License : BSD-3-Clause
-- Maintainer : harendra.kumar@gmail.com
-- Stability : experimental
-- Portability : GHC
--
-- Unicode normalization for @ByteString@ data type.
--
module Data.ByteString.UTF8.Normalize
{-# DEPRECATED "Convert ByteString to Text and then normalize" #-}
(
-- * Normalization Modes
NormalizationMode(..)
-- * Normalization API
, normalize
) where
import Data.ByteString (ByteString)
import Data.Text.Encoding (decodeUtf8, encodeUtf8)
import Data.Unicode.Types (NormalizationMode(..))
import qualified Data.Text.Normalize as T
-- This is now simply a wrapper over Text normalization
-- | Perform Unicode normalization on a UTF8 encoded @ByteString@ according to
-- the specified normalization mode.
normalize :: NormalizationMode -> ByteString -> ByteString
normalize mode = (encodeUtf8 . T.normalize mode . decodeUtf8)
| harendra-kumar/unicode-transforms | Data/ByteString/UTF8/Normalize.hs | bsd-3-clause | 981 | 0 | 9 | 170 | 122 | 82 | 40 | 11 | 1 |
{-
Based on [1]. The calculation of planetary and luni-solar terms are
kept separate as a optimization. Probably premature since I'm not
sure how much it buys us but is should save some 7000 multiplications
and additions per evaluation of the nutation parameters.
Any reference to chapters, sections, or equations are implicitly
referring to [1] unless otherwise specified.
[1] http://aa.usno.navy.mil/publications/docs/Circular_179.pdf
-}
module IAU2000.Nutation where
import Astro.Time
import Numeric.Units.Dimensional.Prelude
import IAU2000.Table53
import IAU2000.FundamentalArguments (fundamentalArguments)
import Control.Monad.Reader
import qualified Prelude
-- Full Series
-- ===========
-- | Returns pairs of sines and cosines of the 678 luni-solar and 687
-- planetary @Phi_i@ terms from [1]. The @Phi_i@ terms are calculated
-- according to eq (5.16).
trigTerms :: Floating a => E TT a -> [(Dimensionless a, Dimensionless a)]
trigTerms tt = fmap (toXY . sum . zipWith (*) args) multipliers
where
toXY x = (sin x, cos x)
args = fundamentalArguments tt
-- | Returns the nutation angles @(DeltaPhi, DeltaEps)@ at the given epoch.
-- @DeltaPhi@ is the nutation in longitude and @DeltaEps@ is the nutation
-- in obliquity measured in the ecliptic system of date as described in
-- chapter 5.4.2 of [1].
-- The @Int@ argument is the number of terms to use in the nutation series.
nutationAngles :: Floating a => Int -> E TT a -> (Angle a, Angle a)
nutationAngles n tt = (sum $ take n deltaPhiTerms, sum $ take n deltaEpsTerms) where
deltaPhiTerms = zipWith (\(s, s_dot, c') (sinPhi, cosPhi) -> (s + s_dot * t) * sinPhi + c' * cosPhi) phiCoeffs (trigTerms tt)
deltaEpsTerms = zipWith (\(c, c_dot, s') (sinPhi, cosPhi) -> (c + c_dot * t) * cosPhi + s' * sinPhi) epsCoeffs (trigTerms tt)
t = sinceJ2000 tt
-- Variations of the series
-- ========================
-- | The full IAU 2000A nutation series. Calculates the direction of the
-- celestial pole in the GCRS with an accuracy of 0.2 mas ([Kaplan2005]
-- p.47).
nutationAngles2000A :: Floating a => E TT a -> (Angle a, Angle a)
nutationAngles2000A = nutationAngles 1365 -- Could use 'maxBound'.
-- | The truncated IAU 2000B nutation series. Duplicates the full series
-- ('nutationAngles2000A') to within a milliarcsecond for input epochs
-- between 1995 and 2050 ([Kaplan2005] p.47).
nutationAngles2000B :: Floating a => E TT a -> (Angle a, Angle a)
nutationAngles2000B = nutationAngles 77
-- | A truncated nutation series with 488 terms. Duplicates the full series'
-- ('nutationAngles2000A') to within 0.1 milliarcsecond accuracy between
-- 1700 and 2300. This emulates a subroutine provided by NOVAS
-- ([Kaplan2005] p.47).
nutationAngles488 :: Floating a => E TT a -> (Angle a, Angle a)
nutationAngles488 = nutationAngles 488
| bjornbm/astro | src/IAU2000/Nutation.hs | bsd-3-clause | 2,820 | 0 | 15 | 474 | 500 | 275 | 225 | 22 | 1 |
{-# OPTIONS_GHC -Wno-orphans #-}
module ArbitraryTypes where
import System.Random
import Test.QuickCheck
import Model
instance Arbitrary Character where
arbitrary = do
Positive hp <- arbitrary
Positive ac <- arbitrary
c <- arbitrary
return Character
{ hitPoints = hp
, armourClass = ac
, piece = Piece c
}
instance Arbitrary StdGen where
arbitrary = fmap mkStdGen arbitrary
| camelpunch/rhascal | test/ArbitraryTypes.hs | bsd-3-clause | 453 | 0 | 11 | 135 | 105 | 55 | 50 | 16 | 0 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Type subsumption and unification
-}
{-# LANGUAGE CPP, MultiWayIf, TupleSections #-}
module TcUnify (
-- Full-blown subsumption
tcWrapResult, tcWrapResultO, tcSkolemise,
tcSubTypeHR, tcSubType, tcSubType_NC, tcSubTypeDS, tcSubTypeDS_O,
tcSubTypeDS_NC, tcSubTypeDS_NC_O,
checkConstraints, buildImplication, buildImplicationFor,
-- Various unifications
unifyType_, unifyType, unifyTheta, unifyKind, noThing,
uType,
--------------------------------
-- Holes
tcInfer,
matchExpectedListTy,
matchExpectedPArrTy,
matchExpectedTyConApp,
matchExpectedAppTy,
matchExpectedFunTys,
matchActualFunTys, matchActualFunTysPart,
matchExpectedFunKind,
wrapFunResCoercion
) where
#include "HsVersions.h"
import HsSyn
import TyCoRep
import TcMType
import TcRnMonad
import TcType
import Type
import Coercion
import TcEvidence
import Name ( isSystemName )
import Inst
import TyCon
import TysWiredIn
import Var
import VarEnv
import VarSet
import ErrUtils
import DynFlags
import BasicTypes
import Name ( Name )
import Bag
import Util
import Outputable
import FastString
import Control.Monad
{-
************************************************************************
* *
matchExpected functions
* *
************************************************************************
Note [Herald for matchExpectedFunTys]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The 'herald' always looks like:
"The equation(s) for 'f' have"
"The abstraction (\x.e) takes"
"The section (+ x) expects"
"The function 'f' is applied to"
This is used to construct a message of form
The abstraction `\Just 1 -> ...' takes two arguments
but its type `Maybe a -> a' has only one
The equation(s) for `f' have two arguments
but its type `Maybe a -> a' has only one
The section `(f 3)' requires 'f' to take two arguments
but its type `Int -> Int' has only one
The function 'f' is applied to two arguments
but its type `Int -> Int' has only one
Note [matchExpectedFunTys]
~~~~~~~~~~~~~~~~~~~~~~~~~~
matchExpectedFunTys checks that a sigma has the form
of an n-ary function. It passes the decomposed type to the
thing_inside, and returns a wrapper to coerce between the two types
It's used wherever a language construct must have a functional type,
namely:
A lambda expression
A function definition
An operator section
-}
-- Use this one when you have an "expected" type.
matchExpectedFunTys :: SDoc -- See Note [Herald for matchExpectedFunTys]
-> Arity
-> TcSigmaType -- deeply skolemised
-> TcM (HsWrapper, [TcSigmaType], TcSigmaType)
-- If matchExpectedFunTys n ty = (wrap, [t1,..,tn], ty_r)
-- then wrap : (t1 -> ... -> tn -> ty_r) "->" ty
-- This function is always called with a deeply skolemised expected result
-- type. This means that matchActualFunTys will never actually instantiate,
-- and the returned HsWrapper will be reversible (that is, just a coercion).
-- So we just piggyback on matchActualFunTys. This is just a bit dodgy, but
-- it's much better than duplicating all the logic in matchActualFunTys.
-- To keep expected/actual working out properly, we tell matchActualFunTys
-- to swap the arguments to unifyType.
matchExpectedFunTys herald arity ty
= ASSERT( is_deeply_skolemised ty )
do { (wrap, arg_tys, res_ty)
<- match_fun_tys True herald
(Shouldn'tHappenOrigin "matchExpectedFunTys")
arity ty [] arity
; return $
case symWrapper_maybe wrap of
Just wrap' -> (wrap', arg_tys, res_ty)
Nothing -> pprPanic "matchExpectedFunTys" (ppr wrap $$ ppr ty) }
where
is_deeply_skolemised (TyVarTy {}) = True
is_deeply_skolemised (AppTy {}) = True
is_deeply_skolemised (TyConApp {}) = True
is_deeply_skolemised (LitTy {}) = True
is_deeply_skolemised (CastTy ty _) = is_deeply_skolemised ty
is_deeply_skolemised (CoercionTy {}) = True
is_deeply_skolemised (ForAllTy (Anon _) res) = is_deeply_skolemised res
is_deeply_skolemised (ForAllTy (Named {}) _) = False
matchActualFunTys :: SDoc -- See Note [Herald for matchExpectedFunTys]
-> CtOrigin
-> Arity
-> TcSigmaType
-> TcM (HsWrapper, [TcSigmaType], TcSigmaType)
matchActualFunTys herald ct_orig arity ty
= matchActualFunTysPart herald ct_orig arity ty [] arity
-- | Variant of 'matchActualFunTys' that works when supplied only part
-- (that is, to the right of some arrows) of the full function type
matchActualFunTysPart :: SDoc -- See Note [Herald for matchExpectedFunTys]
-> CtOrigin
-> Arity
-> TcSigmaType
-> [TcSigmaType] -- reversed args. See (*) below.
-> Arity -- overall arity of the function, for errs
-> TcM (HsWrapper, [TcSigmaType], TcSigmaType)
matchActualFunTysPart = match_fun_tys False
match_fun_tys :: Bool -- True <=> swap the args when unifying,
-- for better expected/actual in error messages;
-- see comments with matchExpectedFunTys
-> SDoc
-> CtOrigin
-> Arity
-> TcSigmaType
-> [TcSigmaType]
-> Arity
-> TcM (HsWrapper, [TcSigmaType], TcSigmaType)
match_fun_tys swap_tys herald ct_orig arity orig_ty orig_old_args full_arity
= go arity orig_old_args orig_ty
-- If matchActualFunTys n ty = (wrap, [t1,..,tn], ty_r)
-- then wrap : ty "->" (t1 -> ... -> tn -> ty_r)
--
-- Does not allocate unnecessary meta variables: if the input already is
-- a function, we just take it apart. Not only is this efficient,
-- it's important for higher rank: the argument might be of form
-- (forall a. ty) -> other
-- If allocated (fresh-meta-var1 -> fresh-meta-var2) and unified, we'd
-- hide the forall inside a meta-variable
-- (*) Sometimes it's necessary to call matchActualFunTys with only part
-- (that is, to the right of some arrows) of the type of the function in
-- question. (See TcExpr.tcArgs.) This argument is the reversed list of
-- arguments already seen (that is, not part of the TcSigmaType passed
-- in elsewhere).
where
-- This function has a bizarre mechanic: it accumulates arguments on
-- the way down and also builds an argument list on the way up. Why:
-- 1. The returns args list and the accumulated args list might be different.
-- The accumulated args include all the arg types for the function,
-- including those from before this function was called. The returned
-- list should include only those arguments produced by this call of
-- matchActualFunTys
--
-- 2. The HsWrapper can be built only on the way up. It seems (more)
-- bizarre to build the HsWrapper but not the arg_tys.
--
-- Refactoring is welcome.
go :: Arity
-> [TcSigmaType] -- accumulator of arguments (reversed)
-> TcSigmaType -- the remainder of the type as we're processing
-> TcM (HsWrapper, [TcSigmaType], TcSigmaType)
go 0 _ ty = return (idHsWrapper, [], ty)
go n acc_args ty
| not (null tvs && null theta)
= do { (wrap1, rho) <- topInstantiate ct_orig ty
; (wrap2, arg_tys, res_ty) <- go n acc_args rho
; return (wrap2 <.> wrap1, arg_tys, res_ty) }
where
(tvs, theta, _) = tcSplitSigmaTy ty
go n acc_args ty
| Just ty' <- coreView ty = go n acc_args ty'
go n acc_args (ForAllTy (Anon arg_ty) res_ty)
= ASSERT( not (isPredTy arg_ty) )
do { (wrap_res, tys, ty_r) <- go (n-1) (arg_ty : acc_args) res_ty
; return ( mkWpFun idHsWrapper wrap_res arg_ty (mkFunTys tys ty_r)
, arg_ty:tys, ty_r ) }
go n acc_args ty@(TyVarTy tv)
| ASSERT( isTcTyVar tv) isMetaTyVar tv
= do { cts <- readMetaTyVar tv
; case cts of
Indirect ty' -> go n acc_args ty'
Flexi -> defer n ty (isReturnTyVar tv) }
-- In all other cases we bale out into ordinary unification
-- However unlike the meta-tyvar case, we are sure that the
-- number of arguments doesn't match arity of the original
-- type, so we can add a bit more context to the error message
-- (cf Trac #7869).
--
-- It is not always an error, because specialized type may have
-- different arity, for example:
--
-- > f1 = f2 'a'
-- > f2 :: Monad m => m Bool
-- > f2 = undefined
--
-- But in that case we add specialized type into error context
-- anyway, because it may be useful. See also Trac #9605.
go n acc_args ty = addErrCtxtM (mk_ctxt (reverse acc_args) ty) $
defer n ty False
------------
-- If we decide that a ReturnTv (see Note [ReturnTv] in TcType) should
-- really be a function type, then we need to allow the
-- result types also to be a ReturnTv.
defer n fun_ty is_return
= do { arg_tys <- replicateM n new_flexi
; res_ty <- new_flexi
; let unif_fun_ty = mkFunTys arg_tys res_ty
; co <- if swap_tys
then mkTcSymCo <$> unifyType noThing unif_fun_ty fun_ty
else unifyType noThing fun_ty unif_fun_ty
; return (mkWpCastN co, arg_tys, res_ty) }
where
-- preserve ReturnTv-ness
new_flexi :: TcM TcType
new_flexi | is_return = (mkTyVarTy . fst) <$> newOpenReturnTyVar
| otherwise = newOpenFlexiTyVarTy
------------
mk_ctxt :: [TcSigmaType] -> TcSigmaType -> TidyEnv -> TcM (TidyEnv, MsgDoc)
mk_ctxt arg_tys res_ty env
= do { let ty = mkFunTys arg_tys res_ty
; (env1, zonked) <- zonkTidyTcType env ty
-- zonking might change # of args
; let (zonked_args, _) = tcSplitFunTys zonked
n_actual = length zonked_args
(env2, unzonked) = tidyOpenType env1 ty
; return (env2, mk_msg unzonked zonked n_actual) }
mk_msg full_ty ty n_args
= herald <+> speakNOf full_arity (text "argument") <> comma $$
if n_args == full_arity
then text "its type is" <+> quotes (pprType full_ty) <>
comma $$
text "it is specialized to" <+> quotes (pprType ty)
else sep [text "but its type" <+> quotes (pprType ty),
if n_args == 0 then text "has none"
else text "has only" <+> speakN n_args]
----------------------
matchExpectedListTy :: TcRhoType -> TcM (TcCoercionN, TcRhoType)
-- Special case for lists
matchExpectedListTy exp_ty
= do { (co, [elt_ty]) <- matchExpectedTyConApp listTyCon exp_ty
; return (co, elt_ty) }
----------------------
matchExpectedPArrTy :: TcRhoType -> TcM (TcCoercionN, TcRhoType)
-- Special case for parrs
matchExpectedPArrTy exp_ty
= do { (co, [elt_ty]) <- matchExpectedTyConApp parrTyCon exp_ty
; return (co, elt_ty) }
---------------------
matchExpectedTyConApp :: TyCon -- T :: forall kv1 ... kvm. k1 -> ... -> kn -> *
-> TcRhoType -- orig_ty
-> TcM (TcCoercionN, -- T k1 k2 k3 a b c ~N orig_ty
[TcSigmaType]) -- Element types, k1 k2 k3 a b c
-- It's used for wired-in tycons, so we call checkWiredInTyCon
-- Precondition: never called with FunTyCon
-- Precondition: input type :: *
-- Postcondition: (T k1 k2 k3 a b c) is well-kinded
matchExpectedTyConApp tc orig_ty
= go orig_ty
where
go ty
| Just ty' <- coreView ty
= go ty'
go ty@(TyConApp tycon args)
| tc == tycon -- Common case
= return (mkTcNomReflCo ty, args)
go (TyVarTy tv)
| ASSERT( isTcTyVar tv) isMetaTyVar tv
= do { cts <- readMetaTyVar tv
; case cts of
Indirect ty -> go ty
Flexi -> defer }
go _ = defer
-- If the common case does not occur, instantiate a template
-- T k1 .. kn t1 .. tm, and unify with the original type
-- Doing it this way ensures that the types we return are
-- kind-compatible with T. For example, suppose we have
-- matchExpectedTyConApp T (f Maybe)
-- where data T a = MkT a
-- Then we don't want to instantate T's data constructors with
-- (a::*) ~ Maybe
-- because that'll make types that are utterly ill-kinded.
-- This happened in Trac #7368
defer
= ASSERT2( classifiesTypeWithValues res_kind, ppr tc )
do { (k_subst, kvs') <- newMetaTyVars kvs
; let arg_kinds' = substTys k_subst arg_kinds
kappa_tys = mkTyVarTys kvs'
; tau_tys <- mapM newFlexiTyVarTy arg_kinds'
; co <- unifyType noThing (mkTyConApp tc (kappa_tys ++ tau_tys)) orig_ty
; return (co, kappa_tys ++ tau_tys) }
(bndrs, res_kind) = splitPiTys (tyConKind tc)
(kvs, arg_kinds) = partitionBinders bndrs
----------------------
matchExpectedAppTy :: TcRhoType -- orig_ty
-> TcM (TcCoercion, -- m a ~N orig_ty
(TcSigmaType, TcSigmaType)) -- Returns m, a
-- If the incoming type is a mutable type variable of kind k, then
-- matchExpectedAppTy returns a new type variable (m: * -> k); note the *.
matchExpectedAppTy orig_ty
= go orig_ty
where
go ty
| Just ty' <- coreView ty = go ty'
| Just (fun_ty, arg_ty) <- tcSplitAppTy_maybe ty
= return (mkTcNomReflCo orig_ty, (fun_ty, arg_ty))
go (TyVarTy tv)
| ASSERT( isTcTyVar tv) isMetaTyVar tv
= do { cts <- readMetaTyVar tv
; case cts of
Indirect ty -> go ty
Flexi -> defer }
go _ = defer
-- Defer splitting by generating an equality constraint
defer
= do { ty1 <- newFlexiTyVarTy kind1
; ty2 <- newFlexiTyVarTy kind2
; co <- unifyType noThing (mkAppTy ty1 ty2) orig_ty
; return (co, (ty1, ty2)) }
orig_kind = typeKind orig_ty
kind1 = mkFunTy liftedTypeKind orig_kind
kind2 = liftedTypeKind -- m :: * -> k
-- arg type :: *
{-
************************************************************************
* *
Subsumption checking
* *
************************************************************************
Note [Subsumption checking: tcSubType]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
All the tcSubType calls have the form
tcSubType actual_ty expected_ty
which checks
actual_ty <= expected_ty
That is, that a value of type actual_ty is acceptable in
a place expecting a value of type expected_ty. I.e. that
actual ty is more polymorphic than expected_ty
It returns a coercion function
co_fn :: actual_ty ~ expected_ty
which takes an HsExpr of type actual_ty into one of type
expected_ty.
These functions do not actually check for subsumption. They check if
expected_ty is an appropriate annotation to use for something of type
actual_ty. This difference matters when thinking about visible type
application. For example,
forall a. a -> forall b. b -> b
DOES NOT SUBSUME
forall a b. a -> b -> b
because the type arguments appear in a different order. (Neither does
it work the other way around.) BUT, these types are appropriate annotations
for one another. Because the user directs annotations, it's OK if some
arguments shuffle around -- after all, it's what the user wants.
Bottom line: none of this changes with visible type application.
There are a number of wrinkles (below).
Notice that Wrinkle 1 and 2 both require eta-expansion, which technically
may increase termination. We just put up with this, in exchange for getting
more predictable type inference.
Wrinkle 1: Note [Deep skolemisation]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We want (forall a. Int -> a -> a) <= (Int -> forall a. a->a)
(see section 4.6 of "Practical type inference for higher rank types")
So we must deeply-skolemise the RHS before we instantiate the LHS.
That is why tc_sub_type starts with a call to tcSkolemise (which does the
deep skolemisation), and then calls the DS variant (which assumes
that expected_ty is deeply skolemised)
Wrinkle 2: Note [Co/contra-variance of subsumption checking]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider g :: (Int -> Int) -> Int
f1 :: (forall a. a -> a) -> Int
f1 = g
f2 :: (forall a. a -> a) -> Int
f2 x = g x
f2 will typecheck, and it would be odd/fragile if f1 did not.
But f1 will only typecheck if we have that
(Int->Int) -> Int <= (forall a. a->a) -> Int
And that is only true if we do the full co/contravariant thing
in the subsumption check. That happens in the FunTy case of
tcSubTypeDS_NC_O, and is the sole reason for the WpFun form of
HsWrapper.
Another powerful reason for doing this co/contra stuff is visible
in Trac #9569, involving instantiation of constraint variables,
and again involving eta-expansion.
Wrinkle 3: Note [Higher rank types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider tc150:
f y = \ (x::forall a. a->a). blah
The following happens:
* We will infer the type of the RHS, ie with a res_ty = alpha.
* Then the lambda will split alpha := beta -> gamma.
* And then we'll check tcSubType IsSwapped beta (forall a. a->a)
So it's important that we unify beta := forall a. a->a, rather than
skolemising the type.
-}
-- | Call this variant when you are in a higher-rank situation and
-- you know the right-hand type is deeply skolemised.
tcSubTypeHR :: Outputable a
=> CtOrigin -- ^ of the actual type
-> Maybe a -- ^ If present, it has type ty_actual
-> TcSigmaType -> TcRhoType -> TcM HsWrapper
tcSubTypeHR orig = tcSubTypeDS_NC_O orig GenSigCtxt
tcSubType :: Outputable a
=> UserTypeCtxt -> Maybe a -- ^ If present, it has type ty_actual
-> TcSigmaType -> TcSigmaType -> TcM HsWrapper
-- Checks that actual <= expected
-- Returns HsWrapper :: actual ~ expected
tcSubType ctxt maybe_thing ty_actual ty_expected
= addSubTypeCtxt ty_actual ty_expected $
do { traceTc "tcSubType" (vcat [ pprUserTypeCtxt ctxt
, ppr maybe_thing
, ppr ty_actual
, ppr ty_expected ])
; tc_sub_type origin origin ctxt ty_actual ty_expected }
where
origin = TypeEqOrigin { uo_actual = ty_actual
, uo_expected = ty_expected
, uo_thing = mkErrorThing <$> maybe_thing }
tcSubTypeDS :: Outputable a => UserTypeCtxt -> Maybe a -- ^ has type ty_actual
-> TcSigmaType -> TcRhoType -> TcM HsWrapper
-- Just like tcSubType, but with the additional precondition that
-- ty_expected is deeply skolemised (hence "DS")
tcSubTypeDS ctxt m_expr ty_actual ty_expected
= addSubTypeCtxt ty_actual ty_expected $
tcSubTypeDS_NC ctxt m_expr ty_actual ty_expected
-- | Like 'tcSubTypeDS', but takes a 'CtOrigin' to use when instantiating
-- the "actual" type
tcSubTypeDS_O :: Outputable a
=> CtOrigin -> UserTypeCtxt
-> Maybe a -> TcSigmaType -> TcRhoType
-> TcM HsWrapper
tcSubTypeDS_O orig ctxt maybe_thing ty_actual ty_expected
= addSubTypeCtxt ty_actual ty_expected $
do { traceTc "tcSubTypeDS_O" (vcat [ pprCtOrigin orig
, pprUserTypeCtxt ctxt
, ppr ty_actual
, ppr ty_expected ])
; tcSubTypeDS_NC_O orig ctxt maybe_thing ty_actual ty_expected }
addSubTypeCtxt :: TcType -> TcType -> TcM a -> TcM a
addSubTypeCtxt ty_actual ty_expected thing_inside
| isRhoTy ty_actual -- If there is no polymorphism involved, the
, isRhoTy ty_expected -- TypeEqOrigin stuff (added by the _NC functions)
= thing_inside -- gives enough context by itself
| otherwise
= addErrCtxtM mk_msg thing_inside
where
mk_msg tidy_env
= do { (tidy_env, ty_actual) <- zonkTidyTcType tidy_env ty_actual
; (tidy_env, ty_expected) <- zonkTidyTcType tidy_env ty_expected
; let msg = vcat [ hang (text "When checking that:")
4 (ppr ty_actual)
, nest 2 (hang (text "is more polymorphic than:")
2 (ppr ty_expected)) ]
; return (tidy_env, msg) }
---------------
-- The "_NC" variants do not add a typechecker-error context;
-- the caller is assumed to do that
tcSubType_NC :: UserTypeCtxt -> TcSigmaType -> TcSigmaType -> TcM HsWrapper
tcSubType_NC ctxt ty_actual ty_expected
= do { traceTc "tcSubType_NC" (vcat [pprUserTypeCtxt ctxt, ppr ty_actual, ppr ty_expected])
; tc_sub_type origin origin ctxt ty_actual ty_expected }
where
origin = TypeEqOrigin { uo_actual = ty_actual
, uo_expected = ty_expected
, uo_thing = Nothing }
tcSubTypeDS_NC :: Outputable a
=> UserTypeCtxt
-> Maybe a -- ^ If present, this has type ty_actual
-> TcSigmaType -> TcRhoType -> TcM HsWrapper
tcSubTypeDS_NC ctxt maybe_thing ty_actual ty_expected
= do { traceTc "tcSubTypeDS_NC" (vcat [pprUserTypeCtxt ctxt, ppr ty_actual, ppr ty_expected])
; tcSubTypeDS_NC_O origin ctxt maybe_thing ty_actual ty_expected }
where
origin = TypeEqOrigin { uo_actual = ty_actual
, uo_expected = ty_expected
, uo_thing = mkErrorThing <$> maybe_thing }
tcSubTypeDS_NC_O :: Outputable a
=> CtOrigin -- origin used for instantiation only
-> UserTypeCtxt
-> Maybe a
-> TcSigmaType -> TcRhoType -> TcM HsWrapper
-- Just like tcSubType, but with the additional precondition that
-- ty_expected is deeply skolemised
tcSubTypeDS_NC_O inst_orig ctxt m_thing ty_actual ty_expected
= tc_sub_type_ds eq_orig inst_orig ctxt ty_actual ty_expected
where
eq_orig = TypeEqOrigin { uo_actual = ty_actual, uo_expected = ty_expected
, uo_thing = mkErrorThing <$> m_thing}
---------------
tc_sub_type :: CtOrigin -- origin used when calling uType
-> CtOrigin -- origin used when instantiating
-> UserTypeCtxt -> TcSigmaType -> TcSigmaType -> TcM HsWrapper
tc_sub_type eq_orig inst_orig ctxt ty_actual ty_expected
| Just tv_actual <- tcGetTyVar_maybe ty_actual -- See Note [Higher rank types]
= do { lookup_res <- lookupTcTyVar tv_actual
; case lookup_res of
Filled ty_actual' -> tc_sub_type eq_orig inst_orig
ctxt ty_actual' ty_expected
-- It's tempting to see if tv_actual can unify with a polytype
-- and, if so, call uType; otherwise, skolemise first. But this
-- is wrong, because skolemising will bump the TcLevel and the
-- unification will fail anyway.
-- It's also tempting to call uUnfilledVar directly, but calling
-- uType seems safer in the presence of possible refactoring
-- later.
Unfilled _ -> mkWpCastN <$>
uType eq_orig TypeLevel ty_actual ty_expected }
| otherwise -- See Note [Deep skolemisation]
= do { (sk_wrap, inner_wrap) <- tcSkolemise ctxt ty_expected $
\ _ sk_rho ->
tc_sub_type_ds eq_orig inst_orig ctxt
ty_actual sk_rho
; return (sk_wrap <.> inner_wrap) }
---------------
tc_sub_type_ds :: CtOrigin -- used when calling uType
-> CtOrigin -- used when instantiating
-> UserTypeCtxt -> TcSigmaType -> TcRhoType -> TcM HsWrapper
-- Just like tcSubType, but with the additional precondition that
-- ty_expected is deeply skolemised
tc_sub_type_ds eq_orig inst_orig ctxt ty_actual ty_expected
= go ty_actual ty_expected
where
go ty_a ty_e | Just ty_a' <- coreView ty_a = go ty_a' ty_e
| Just ty_e' <- coreView ty_e = go ty_a ty_e'
go (TyVarTy tv_a) ty_e
= do { lookup_res <- lookupTcTyVar tv_a
; case lookup_res of
Filled ty_a' ->
do { traceTc "tcSubTypeDS_NC_O following filled act meta-tyvar:"
(ppr tv_a <+> text "-->" <+> ppr ty_a')
; tc_sub_type_ds eq_orig inst_orig ctxt ty_a' ty_e }
Unfilled _ -> unify }
go ty_a (TyVarTy tv_e)
= do { dflags <- getDynFlags
; tclvl <- getTcLevel
; lookup_res <- lookupTcTyVar tv_e
; case lookup_res of
Filled ty_e' ->
do { traceTc "tcSubTypeDS_NC_O following filled exp meta-tyvar:"
(ppr tv_e <+> text "-->" <+> ppr ty_e')
; tc_sub_type eq_orig inst_orig ctxt ty_a ty_e' }
Unfilled details
| canUnifyWithPolyType dflags details
&& isTouchableMetaTyVar tclvl tv_e -- don't want skolems here
-> unify
-- We've avoided instantiating ty_actual just in case ty_expected is
-- polymorphic. But we've now assiduously determined that it is *not*
-- polymorphic. So instantiate away. This is needed for e.g. test
-- typecheck/should_compile/T4284.
| otherwise
-> inst_and_unify }
go (ForAllTy (Anon act_arg) act_res) (ForAllTy (Anon exp_arg) exp_res)
| not (isPredTy act_arg)
, not (isPredTy exp_arg)
= -- See Note [Co/contra-variance of subsumption checking]
do { res_wrap <- tc_sub_type_ds eq_orig inst_orig ctxt act_res exp_res
; arg_wrap
<- tc_sub_type eq_orig (GivenOrigin (SigSkol GenSigCtxt exp_arg))
ctxt exp_arg act_arg
; return (mkWpFun arg_wrap res_wrap exp_arg exp_res) }
-- arg_wrap :: exp_arg ~ act_arg
-- res_wrap :: act-res ~ exp_res
go ty_a ty_e
| let (tvs, theta, _) = tcSplitSigmaTy ty_a
, not (null tvs && null theta)
= do { (in_wrap, in_rho) <- topInstantiate inst_orig ty_a
; body_wrap <- tcSubTypeDS_NC_O inst_orig ctxt noThing in_rho ty_e
; return (body_wrap <.> in_wrap) }
| otherwise -- Revert to unification
= inst_and_unify
-- It's still possible that ty_actual has nested foralls. Instantiate
-- these, as there's no way unification will succeed with them in.
-- See typecheck/should_compile/T11305 for an example of when this
-- is important. The problem is that we're checking something like
-- a -> forall b. b -> b <= alpha beta gamma
-- where we end up with alpha := (->)
inst_and_unify = do { (wrap, rho_a) <- deeplyInstantiate inst_orig ty_actual
-- if we haven't recurred through an arrow, then
-- the eq_orig will list ty_actual. In this case,
-- we want to update the origin to reflect the
-- instantiation. If we *have* recurred through
-- an arrow, it's better not to update.
; let eq_orig' = case eq_orig of
TypeEqOrigin { uo_actual = orig_ty_actual }
| orig_ty_actual `tcEqType` ty_actual
, not (isIdHsWrapper wrap)
-> eq_orig { uo_actual = rho_a }
_ -> eq_orig
; cow <- uType eq_orig' TypeLevel rho_a ty_expected
; return (mkWpCastN cow <.> wrap) }
-- use versions without synonyms expanded
unify = mkWpCastN <$> uType eq_orig TypeLevel ty_actual ty_expected
-----------------
-- needs both un-type-checked (for origins) and type-checked (for wrapping)
-- expressions
tcWrapResult :: HsExpr Name -> HsExpr TcId -> TcSigmaType -> TcRhoType
-> TcM (HsExpr TcId)
tcWrapResult rn_expr = tcWrapResultO (exprCtOrigin rn_expr)
-- | Sometimes we don't have a @HsExpr Name@ to hand, and this is more
-- convenient.
tcWrapResultO :: CtOrigin -> HsExpr TcId -> TcSigmaType -> TcRhoType
-> TcM (HsExpr TcId)
tcWrapResultO orig expr actual_ty res_ty
= do { traceTc "tcWrapResult" (vcat [ text "Actual: " <+> ppr actual_ty
, text "Expected:" <+> ppr res_ty ])
; cow <- tcSubTypeDS_NC_O orig GenSigCtxt
(Just expr) actual_ty res_ty
; return (mkHsWrap cow expr) }
-----------------------------------
wrapFunResCoercion
:: [TcType] -- Type of args
-> HsWrapper -- HsExpr a -> HsExpr b
-> TcM HsWrapper -- HsExpr (arg_tys -> a) -> HsExpr (arg_tys -> b)
wrapFunResCoercion arg_tys co_fn_res
| isIdHsWrapper co_fn_res
= return idHsWrapper
| null arg_tys
= return co_fn_res
| otherwise
= do { arg_ids <- newSysLocalIds (fsLit "sub") arg_tys
; return (mkWpLams arg_ids <.> co_fn_res <.> mkWpEvVarApps arg_ids) }
-----------------------------------
-- | Infer a type using a type "checking" function by passing in a ReturnTv,
-- which can unify with *anything*. See also Note [ReturnTv] in TcType
tcInfer :: (TcType -> TcM a) -> TcM (a, TcType)
tcInfer tc_check
= do { (ret_tv, ret_kind) <- newOpenReturnTyVar
; res <- tc_check (mkTyVarTy ret_tv)
; details <- readMetaTyVar ret_tv
; res_ty <- case details of
Indirect ty -> return ty
Flexi -> -- Checking was uninformative
do { traceTc "Defaulting un-filled ReturnTv to a TauTv" (ppr ret_tv)
; tau_ty <- newFlexiTyVarTy ret_kind
; writeMetaTyVar ret_tv tau_ty
; return tau_ty }
; return (res, res_ty) }
{-
************************************************************************
* *
\subsection{Generalisation}
* *
************************************************************************
-}
-- | Take an "expected type" and strip off quantifiers to expose the
-- type underneath, binding the new skolems for the @thing_inside@.
-- The returned 'HsWrapper' has type @specific_ty -> expected_ty@.
tcSkolemise :: UserTypeCtxt -> TcSigmaType
-> ([TcTyVar] -> TcType -> TcM result)
-- ^ thing_inside is passed only the *type* variables, not
-- *coercion* variables. They are only ever used for scoped type
-- variables.
-> TcM (HsWrapper, result)
-- ^ The expression has type: spec_ty -> expected_ty
tcSkolemise ctxt expected_ty thing_inside
-- We expect expected_ty to be a forall-type
-- If not, the call is a no-op
= do { traceTc "tcSkolemise" Outputable.empty
; (wrap, tvs', given, rho') <- deeplySkolemise expected_ty
; lvl <- getTcLevel
; when debugIsOn $
traceTc "tcSkolemise" $ vcat [
ppr lvl,
text "expected_ty" <+> ppr expected_ty,
text "inst tyvars" <+> ppr tvs',
text "given" <+> ppr given,
text "inst type" <+> ppr rho' ]
-- Generally we must check that the "forall_tvs" havn't been constrained
-- The interesting bit here is that we must include the free variables
-- of the expected_ty. Here's an example:
-- runST (newVar True)
-- Here, if we don't make a check, we'll get a type (ST s (MutVar s Bool))
-- for (newVar True), with s fresh. Then we unify with the runST's arg type
-- forall s'. ST s' a. That unifies s' with s, and a with MutVar s Bool.
-- So now s' isn't unconstrained because it's linked to a.
--
-- However [Oct 10] now that the untouchables are a range of
-- TcTyVars, all this is handled automatically with no need for
-- extra faffing around
-- Use the *instantiated* type in the SkolemInfo
-- so that the names of displayed type variables line up
; let skol_info = SigSkol ctxt (mkFunTys (map varType given) rho')
; (ev_binds, result) <- checkConstraints skol_info tvs' given $
thing_inside tvs' rho'
; return (wrap <.> mkWpLet ev_binds, result) }
-- The ev_binds returned by checkConstraints is very
-- often empty, in which case mkWpLet is a no-op
checkConstraints :: SkolemInfo
-> [TcTyVar] -- Skolems
-> [EvVar] -- Given
-> TcM result
-> TcM (TcEvBinds, result)
checkConstraints skol_info skol_tvs given thing_inside
= do { (implics, ev_binds, result)
<- buildImplication skol_info skol_tvs given thing_inside
; emitImplications implics
; return (ev_binds, result) }
buildImplication :: SkolemInfo
-> [TcTyVar] -- Skolems
-> [EvVar] -- Given
-> TcM result
-> TcM (Bag Implication, TcEvBinds, result)
buildImplication skol_info skol_tvs given thing_inside
= do { tc_lvl <- getTcLevel
; deferred_type_errors <- goptM Opt_DeferTypeErrors <||>
goptM Opt_DeferTypedHoles
; if null skol_tvs && null given && (not deferred_type_errors ||
not (isTopTcLevel tc_lvl))
then do { res <- thing_inside
; return (emptyBag, emptyTcEvBinds, res) }
-- Fast path. We check every function argument with
-- tcPolyExpr, which uses tcSkolemise and hence checkConstraints.
-- But with the solver producing unlifted equalities, we need
-- to have an EvBindsVar for them when they might be deferred to
-- runtime. Otherwise, they end up as top-level unlifted bindings,
-- which are verboten. See also Note [Deferred errors for coercion holes]
-- in TcErrors.
else
do { (tclvl, wanted, result) <- pushLevelAndCaptureConstraints thing_inside
; (implics, ev_binds) <- buildImplicationFor tclvl skol_info skol_tvs given wanted
; return (implics, ev_binds, result) }}
buildImplicationFor :: TcLevel -> SkolemInfo -> [TcTyVar]
-> [EvVar] -> WantedConstraints
-> TcM (Bag Implication, TcEvBinds)
buildImplicationFor tclvl skol_info skol_tvs given wanted
| isEmptyWC wanted && null given
-- Optimisation : if there are no wanteds, and no givens
-- don't generate an implication at all.
-- Reason for the (null given): we don't want to lose
-- the "inaccessible alternative" error check
= return (emptyBag, emptyTcEvBinds)
| otherwise
= ASSERT2( all isTcTyVar skol_tvs, ppr skol_tvs )
ASSERT2( all isSkolemTyVar skol_tvs, ppr skol_tvs )
do { ev_binds_var <- newTcEvBinds
; env <- getLclEnv
; let implic = Implic { ic_tclvl = tclvl
, ic_skols = skol_tvs
, ic_no_eqs = False
, ic_given = given
, ic_wanted = wanted
, ic_status = IC_Unsolved
, ic_binds = Just ev_binds_var
, ic_env = env
, ic_info = skol_info }
; return (unitBag implic, TcEvBinds ev_binds_var) }
{-
************************************************************************
* *
Boxy unification
* *
************************************************************************
The exported functions are all defined as versions of some
non-exported generic functions.
-}
-- | Unify two types, discarding a resultant coercion. Any constraints
-- generated will still need to be solved, however.
unifyType_ :: Outputable a => Maybe a -- ^ If present, has type 'ty1'
-> TcTauType -> TcTauType -> TcM ()
unifyType_ thing ty1 ty2 = void $ unifyType thing ty1 ty2
unifyType :: Outputable a => Maybe a -- ^ If present, has type 'ty1'
-> TcTauType -> TcTauType -> TcM TcCoercion
-- Actual and expected types
-- Returns a coercion : ty1 ~ ty2
unifyType thing ty1 ty2 = uType origin TypeLevel ty1 ty2
where
origin = TypeEqOrigin { uo_actual = ty1, uo_expected = ty2
, uo_thing = mkErrorThing <$> thing }
-- | Use this instead of 'Nothing' when calling 'unifyType' without
-- a good "thing" (where the "thing" has the "actual" type passed in)
-- This has an 'Outputable' instance, avoiding amgiguity problems.
noThing :: Maybe (HsExpr Name)
noThing = Nothing
unifyKind :: Outputable a => Maybe a -> TcKind -> TcKind -> TcM Coercion
unifyKind thing ty1 ty2 = uType origin KindLevel ty1 ty2
where origin = TypeEqOrigin { uo_actual = ty1, uo_expected = ty2
, uo_thing = mkErrorThing <$> thing }
---------------
unifyPred :: PredType -> PredType -> TcM TcCoercion
-- Actual and expected types
unifyPred = unifyType noThing
---------------
unifyTheta :: TcThetaType -> TcThetaType -> TcM [TcCoercion]
-- Actual and expected types
unifyTheta theta1 theta2
= do { checkTc (equalLength theta1 theta2)
(vcat [text "Contexts differ in length",
nest 2 $ parens $ text "Use RelaxedPolyRec to allow this"])
; zipWithM unifyPred theta1 theta2 }
{-
%************************************************************************
%* *
uType and friends
%* *
%************************************************************************
uType is the heart of the unifier.
-}
------------
uType, uType_defer
:: CtOrigin
-> TypeOrKind
-> TcType -- ty1 is the *actual* type
-> TcType -- ty2 is the *expected* type
-> TcM Coercion
--------------
-- It is always safe to defer unification to the main constraint solver
-- See Note [Deferred unification]
uType_defer origin t_or_k ty1 ty2
= do { hole <- newCoercionHole
; loc <- getCtLocM origin (Just t_or_k)
; emitSimple $ mkNonCanonical $
CtWanted { ctev_dest = HoleDest hole
, ctev_pred = mkPrimEqPred ty1 ty2
, ctev_loc = loc }
-- Error trace only
-- NB. do *not* call mkErrInfo unless tracing is on, because
-- it is hugely expensive (#5631)
; whenDOptM Opt_D_dump_tc_trace $ do
{ ctxt <- getErrCtxt
; doc <- mkErrInfo emptyTidyEnv ctxt
; traceTc "utype_defer" (vcat [ppr hole, ppr ty1,
ppr ty2, pprCtOrigin origin, doc])
}
; return (mkHoleCo hole Nominal ty1 ty2) }
--------------
uType origin t_or_k orig_ty1 orig_ty2
= do { tclvl <- getTcLevel
; traceTc "u_tys " $ vcat
[ text "tclvl" <+> ppr tclvl
, sep [ ppr orig_ty1, text "~", ppr orig_ty2]
, pprCtOrigin origin]
; co <- go orig_ty1 orig_ty2
; if isReflCo co
then traceTc "u_tys yields no coercion" Outputable.empty
else traceTc "u_tys yields coercion:" (ppr co)
; return co }
where
go :: TcType -> TcType -> TcM Coercion
-- The arguments to 'go' are always semantically identical
-- to orig_ty{1,2} except for looking through type synonyms
-- Variables; go for uVar
-- Note that we pass in *original* (before synonym expansion),
-- so that type variables tend to get filled in with
-- the most informative version of the type
go (TyVarTy tv1) ty2
= do { lookup_res <- lookupTcTyVar tv1
; case lookup_res of
Filled ty1 -> do { traceTc "found filled tyvar" (ppr tv1 <+> text ":->" <+> ppr ty1)
; go ty1 ty2 }
Unfilled ds1 -> uUnfilledVar origin t_or_k NotSwapped tv1 ds1 ty2 }
go ty1 (TyVarTy tv2)
= do { lookup_res <- lookupTcTyVar tv2
; case lookup_res of
Filled ty2 -> do { traceTc "found filled tyvar" (ppr tv2 <+> text ":->" <+> ppr ty2)
; go ty1 ty2 }
Unfilled ds2 -> uUnfilledVar origin t_or_k IsSwapped tv2 ds2 ty1 }
-- See Note [Expanding synonyms during unification]
go ty1@(TyConApp tc1 []) (TyConApp tc2 [])
| tc1 == tc2
= return $ mkReflCo Nominal ty1
-- See Note [Expanding synonyms during unification]
--
-- Also NB that we recurse to 'go' so that we don't push a
-- new item on the origin stack. As a result if we have
-- type Foo = Int
-- and we try to unify Foo ~ Bool
-- we'll end up saying "can't match Foo with Bool"
-- rather than "can't match "Int with Bool". See Trac #4535.
go ty1 ty2
| Just ty1' <- coreView ty1 = go ty1' ty2
| Just ty2' <- coreView ty2 = go ty1 ty2'
go (CastTy t1 co1) t2
= do { co_tys <- go t1 t2
; return (mkCoherenceLeftCo co_tys co1) }
go t1 (CastTy t2 co2)
= do { co_tys <- go t1 t2
; return (mkCoherenceRightCo co_tys co2) }
-- Functions (or predicate functions) just check the two parts
go (ForAllTy (Anon fun1) arg1) (ForAllTy (Anon fun2) arg2)
= do { co_l <- uType origin t_or_k fun1 fun2
; co_r <- uType origin t_or_k arg1 arg2
; return $ mkFunCo Nominal co_l co_r }
-- Always defer if a type synonym family (type function)
-- is involved. (Data families behave rigidly.)
go ty1@(TyConApp tc1 _) ty2
| isTypeFamilyTyCon tc1 = defer ty1 ty2
go ty1 ty2@(TyConApp tc2 _)
| isTypeFamilyTyCon tc2 = defer ty1 ty2
go (TyConApp tc1 tys1) (TyConApp tc2 tys2)
-- See Note [Mismatched type lists and application decomposition]
| tc1 == tc2, length tys1 == length tys2
= ASSERT2( isGenerativeTyCon tc1 Nominal, ppr tc1 )
do { cos <- zipWith3M (uType origin) t_or_ks tys1 tys2
; return $ mkTyConAppCo Nominal tc1 cos }
where
(bndrs, _) = splitPiTys (tyConKind tc1)
t_or_ks = case t_or_k of
KindLevel -> repeat KindLevel
TypeLevel -> map (\bndr -> if isNamedBinder bndr
then KindLevel
else TypeLevel)
bndrs
go (LitTy m) ty@(LitTy n)
| m == n
= return $ mkNomReflCo ty
-- See Note [Care with type applications]
-- Do not decompose FunTy against App;
-- it's often a type error, so leave it for the constraint solver
go (AppTy s1 t1) (AppTy s2 t2)
= go_app s1 t1 s2 t2
go (AppTy s1 t1) (TyConApp tc2 ts2)
| Just (ts2', t2') <- snocView ts2
= ASSERT( mightBeUnsaturatedTyCon tc2 )
go_app s1 t1 (TyConApp tc2 ts2') t2'
go (TyConApp tc1 ts1) (AppTy s2 t2)
| Just (ts1', t1') <- snocView ts1
= ASSERT( mightBeUnsaturatedTyCon tc1 )
go_app (TyConApp tc1 ts1') t1' s2 t2
go (CoercionTy co1) (CoercionTy co2)
= do { let ty1 = coercionType co1
ty2 = coercionType co2
; kco <- uType (KindEqOrigin orig_ty1 orig_ty2 origin (Just t_or_k))
KindLevel
ty1 ty2
; return $ mkProofIrrelCo Nominal kco co1 co2 }
-- Anything else fails
-- E.g. unifying for-all types, which is relative unusual
go ty1 ty2 = defer ty1 ty2
------------------
defer ty1 ty2 -- See Note [Check for equality before deferring]
| ty1 `tcEqType` ty2 = return (mkNomReflCo ty1)
| otherwise = uType_defer origin t_or_k ty1 ty2
------------------
go_app s1 t1 s2 t2
= do { co_s <- uType origin t_or_k s1 s2
; co_t <- uType origin t_or_k t1 t2
; return $ mkAppCo co_s co_t }
{- Note [Check for equality before deferring]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Particularly in ambiguity checks we can get equalities like (ty ~ ty).
If ty involves a type function we may defer, which isn't very sensible.
An egregious example of this was in test T9872a, which has a type signature
Proxy :: Proxy (Solutions Cubes)
Doing the ambiguity check on this signature generates the equality
Solutions Cubes ~ Solutions Cubes
and currently the constraint solver normalises both sides at vast cost.
This little short-cut in 'defer' helps quite a bit.
Note [Care with type applications]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Note: type applications need a bit of care!
They can match FunTy and TyConApp, so use splitAppTy_maybe
NB: we've already dealt with type variables and Notes,
so if one type is an App the other one jolly well better be too
Note [Mismatched type lists and application decomposition]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we find two TyConApps, you might think that the argument lists
are guaranteed equal length. But they aren't. Consider matching
w (T x) ~ Foo (T x y)
We do match (w ~ Foo) first, but in some circumstances we simply create
a deferred constraint; and then go ahead and match (T x ~ T x y).
This came up in Trac #3950.
So either
(a) either we must check for identical argument kinds
when decomposing applications,
(b) or we must be prepared for ill-kinded unification sub-problems
Currently we adopt (b) since it seems more robust -- no need to maintain
a global invariant.
Note [Expanding synonyms during unification]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We expand synonyms during unification, but:
* We expand *after* the variable case so that we tend to unify
variables with un-expanded type synonym. This just makes it
more likely that the inferred types will mention type synonyms
understandable to the user
* We expand *before* the TyConApp case. For example, if we have
type Phantom a = Int
and are unifying
Phantom Int ~ Phantom Char
it is *wrong* to unify Int and Char.
* The problem case immediately above can happen only with arguments
to the tycon. So we check for nullary tycons *before* expanding.
This is particularly helpful when checking (* ~ *), because * is
now a type synonym.
Note [Deferred Unification]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
We may encounter a unification ty1 ~ ty2 that cannot be performed syntactically,
and yet its consistency is undetermined. Previously, there was no way to still
make it consistent. So a mismatch error was issued.
Now these unifications are deferred until constraint simplification, where type
family instances and given equations may (or may not) establish the consistency.
Deferred unifications are of the form
F ... ~ ...
or x ~ ...
where F is a type function and x is a type variable.
E.g.
id :: x ~ y => x -> y
id e = e
involves the unification x = y. It is deferred until we bring into account the
context x ~ y to establish that it holds.
If available, we defer original types (rather than those where closed type
synonyms have already been expanded via tcCoreView). This is, as usual, to
improve error messages.
************************************************************************
* *
uVar and friends
* *
************************************************************************
@uVar@ is called when at least one of the types being unified is a
variable. It does {\em not} assume that the variable is a fixed point
of the substitution; rather, notice that @uVar@ (defined below) nips
back into @uTys@ if it turns out that the variable is already bound.
-}
uUnfilledVar :: CtOrigin
-> TypeOrKind
-> SwapFlag
-> TcTyVar -> TcTyVarDetails -- Tyvar 1
-> TcTauType -- Type 2
-> TcM Coercion
-- "Unfilled" means that the variable is definitely not a filled-in meta tyvar
-- It might be a skolem, or untouchable, or meta
uUnfilledVar origin t_or_k swapped tv1 details1 (TyVarTy tv2)
| tv1 == tv2 -- Same type variable => no-op
= return (mkNomReflCo (mkTyVarTy tv1))
| otherwise -- Distinct type variables
= do { lookup2 <- lookupTcTyVar tv2
; case lookup2 of
Filled ty2'
-> uUnfilledVar origin t_or_k swapped tv1 details1 ty2'
Unfilled details2
-> uUnfilledVars origin t_or_k swapped tv1 details1 tv2 details2
}
uUnfilledVar origin t_or_k swapped tv1 details1 non_var_ty2
-- ty2 is not a type variable
= case details1 of
MetaTv { mtv_ref = ref1 }
-> do { dflags <- getDynFlags
; mb_ty2' <- checkTauTvUpdate dflags origin t_or_k tv1 non_var_ty2
; case mb_ty2' of
Just (ty2', co_k) -> maybe_sym swapped <$>
updateMeta tv1 ref1 ty2' co_k
Nothing -> do { traceTc "Occ/type-family defer"
(ppr tv1 <+> dcolon <+> ppr (tyVarKind tv1)
$$ ppr non_var_ty2 $$ ppr (typeKind non_var_ty2))
; defer }
}
_other -> do { traceTc "Skolem defer" (ppr tv1); defer } -- Skolems of all sorts
where
defer = unSwap swapped (uType_defer origin t_or_k) (mkTyVarTy tv1) non_var_ty2
-- Occurs check or an untouchable: just defer
-- NB: occurs check isn't necessarily fatal:
-- eg tv1 occured in type family parameter
----------------
uUnfilledVars :: CtOrigin
-> TypeOrKind
-> SwapFlag
-> TcTyVar -> TcTyVarDetails -- Tyvar 1
-> TcTyVar -> TcTyVarDetails -- Tyvar 2
-> TcM Coercion
-- Invarant: The type variables are distinct,
-- Neither is filled in yet
uUnfilledVars origin t_or_k swapped tv1 details1 tv2 details2
= do { traceTc "uUnfilledVars for" (ppr tv1 <+> text "and" <+> ppr tv2)
; traceTc "uUnfilledVars" ( text "trying to unify" <+> ppr k1
<+> text "with" <+> ppr k2)
; co_k <- uType kind_origin KindLevel k1 k2
; let no_swap ref = maybe_sym swapped <$>
updateMeta tv1 ref ty2 (mkSymCo co_k)
do_swap ref = maybe_sym (flipSwap swapped) <$>
updateMeta tv2 ref ty1 co_k
; case (details1, details2) of
{ ( MetaTv { mtv_info = i1, mtv_ref = ref1 }
, MetaTv { mtv_info = i2, mtv_ref = ref2 } )
| nicer_to_update_tv1 tv1 i1 i2 -> no_swap ref1
| otherwise -> do_swap ref2
; (MetaTv { mtv_ref = ref1 }, _) -> no_swap ref1
; (_, MetaTv { mtv_ref = ref2 }) -> do_swap ref2
-- Can't do it in-place, so defer
-- This happens for skolems of all sorts
; _ -> do { traceTc "deferring because I can't find a meta-tyvar:"
(pprTcTyVarDetails details1 <+> pprTcTyVarDetails details2)
; unSwap swapped (uType_defer origin t_or_k) ty1 ty2 } } }
where
k1 = tyVarKind tv1
k2 = tyVarKind tv2
ty1 = mkTyVarTy tv1
ty2 = mkTyVarTy tv2
kind_origin = KindEqOrigin ty1 ty2 origin (Just t_or_k)
-- | apply sym iff swapped
maybe_sym :: SwapFlag -> Coercion -> Coercion
maybe_sym IsSwapped = mkSymCo
maybe_sym NotSwapped = id
nicer_to_update_tv1 :: TcTyVar -> MetaInfo -> MetaInfo -> Bool
nicer_to_update_tv1 _ _ SigTv = True
nicer_to_update_tv1 _ SigTv _ = False
-- Try not to update SigTvs; and try to update sys-y type
-- variables in preference to ones gotten (say) by
-- instantiating a polymorphic function with a user-written
-- type sig
nicer_to_update_tv1 _ ReturnTv _ = True
nicer_to_update_tv1 _ _ ReturnTv = False
-- ReturnTvs are really holes just begging to be filled in.
-- Let's oblige.
nicer_to_update_tv1 tv1 _ _ = isSystemName (Var.varName tv1)
----------------
checkTauTvUpdate :: DynFlags
-> CtOrigin
-> TypeOrKind
-> TcTyVar -- tv :: k1
-> TcType -- ty :: k2
-> TcM (Maybe ( TcType -- possibly-expanded ty
, Coercion )) -- :: k2 ~N k1
-- (checkTauTvUpdate tv ty)
-- We are about to update the TauTv/ReturnTv tv with ty.
-- Check (a) that tv doesn't occur in ty (occurs check)
-- (b) that kind(ty) is a sub-kind of kind(tv)
--
-- We have two possible outcomes:
-- (1) Return the type to update the type variable with,
-- [we know the update is ok]
-- (2) Return Nothing,
-- [the update might be dodgy]
--
-- Note that "Nothing" does not mean "definite error". For example
-- type family F a
-- type instance F Int = Int
-- consider
-- a ~ F a
-- This is perfectly reasonable, if we later get a ~ Int. For now, though,
-- we return Nothing, leaving it to the later constraint simplifier to
-- sort matters out.
checkTauTvUpdate dflags origin t_or_k tv ty
| SigTv <- info
= ASSERT( not (isTyVarTy ty) )
return Nothing
| otherwise
= do { ty <- zonkTcType ty
; co_k <- uType kind_origin KindLevel (typeKind ty) (tyVarKind tv)
; if | is_return_tv -> -- ReturnTv: a simple occurs-check is all that we need
-- See Note [ReturnTv] in TcType
if tv `elemVarSet` tyCoVarsOfType ty
then return Nothing
else return (Just (ty, co_k))
| defer_me ty -> -- Quick test
-- Failed quick test so try harder
case occurCheckExpand dflags tv ty of
OC_OK ty2 | defer_me ty2 -> return Nothing
| otherwise -> return (Just (ty2, co_k))
_ -> return Nothing
| otherwise -> return (Just (ty, co_k)) }
where
kind_origin = KindEqOrigin (mkTyVarTy tv) ty origin (Just t_or_k)
details = tcTyVarDetails tv
info = mtv_info details
is_return_tv = isReturnTyVar tv
impredicative = canUnifyWithPolyType dflags details
defer_me :: TcType -> Bool
-- Checks for (a) occurrence of tv
-- (b) type family applications
-- (c) foralls
-- See Note [Conservative unification check]
defer_me (LitTy {}) = False
defer_me (TyVarTy tv') = tv == tv'
defer_me (TyConApp tc tys) = isTypeFamilyTyCon tc || any defer_me tys
|| not (impredicative || isTauTyCon tc)
defer_me (ForAllTy bndr t) = defer_me (binderType bndr) || defer_me t
|| (isNamedBinder bndr && not impredicative)
defer_me (AppTy fun arg) = defer_me fun || defer_me arg
defer_me (CastTy ty co) = defer_me ty || defer_me_co co
defer_me (CoercionTy co) = defer_me_co co
-- We don't really care if there are type families in a coercion,
-- but we still can't have an occurs-check failure
defer_me_co co = tv `elemVarSet` tyCoVarsOfCo co
{-
Note [Conservative unification check]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When unifying (tv ~ rhs), w try to avoid creating deferred constraints
only for efficiency. However, we do not unify (the defer_me check) if
a) There's an occurs check (tv is in fvs(rhs))
b) There's a type-function call in 'rhs'
If we fail defer_me we use occurCheckExpand to try to make it pass,
(see Note [Type synonyms and the occur check]) and then use defer_me
again to check. Example: Trac #4917)
a ~ Const a b
where type Const a b = a. We can solve this immediately, even when
'a' is a skolem, just by expanding the synonym.
We always defer type-function calls, even if it's be perfectly safe to
unify, eg (a ~ F [b]). Reason: this ensures that the constraint
solver gets to see, and hence simplify the type-function call, which
in turn might simplify the type of an inferred function. Test ghci046
is a case in point.
More mysteriously, test T7010 gave a horrible error
T7010.hs:29:21:
Couldn't match type `Serial (ValueTuple Float)' with `IO Float'
Expected type: (ValueTuple Vector, ValueTuple Vector)
Actual type: (ValueTuple Vector, ValueTuple Vector)
because an insoluble type function constraint got mixed up with
a soluble one when flattening. I never fully understood this, but
deferring type-function applications made it go away :-(.
T5853 also got a less-good error message with more aggressive
unification of type functions.
Moreover the Note [Type family sharing] gives another reason, but
again I'm not sure if it's really valid.
Note [Type synonyms and the occur check]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Generally speaking we try to update a variable with type synonyms not
expanded, which improves later error messages, unless looking
inside a type synonym may help resolve a spurious occurs check
error. Consider:
type A a = ()
f :: (A a -> a -> ()) -> ()
f = \ _ -> ()
x :: ()
x = f (\ x p -> p x)
We will eventually get a constraint of the form t ~ A t. The ok function above will
properly expand the type (A t) to just (), which is ok to be unified with t. If we had
unified with the original type A t, we would lead the type checker into an infinite loop.
Hence, if the occurs check fails for a type synonym application, then (and *only* then),
the ok function expands the synonym to detect opportunities for occurs check success using
the underlying definition of the type synonym.
The same applies later on in the constraint interaction code; see TcInteract,
function @occ_check_ok@.
Note [Type family sharing]
~~~~~~~~~~~~~~~~~~~~~~~~~~
We must avoid eagerly unifying type variables to types that contain function symbols,
because this may lead to loss of sharing, and in turn, in very poor performance of the
constraint simplifier. Assume that we have a wanted constraint:
{
m1 ~ [F m2],
m2 ~ [F m3],
m3 ~ [F m4],
D m1,
D m2,
D m3
}
where D is some type class. If we eagerly unify m1 := [F m2], m2 := [F m3], m3 := [F m4],
then, after zonking, our constraint simplifier will be faced with the following wanted
constraint:
{
D [F [F [F m4]]],
D [F [F m4]],
D [F m4]
}
which has to be flattened by the constraint solver. In the absence of
a flat-cache, this may generate a polynomially larger number of
flatten skolems and the constraint sets we are working with will be
polynomially larger.
Instead, if we defer the unifications m1 := [F m2], etc. we will only
be generating three flatten skolems, which is the maximum possible
sharing arising from the original constraint. That's why we used to
use a local "ok" function, a variant of TcType.occurCheckExpand.
HOWEVER, we *do* now have a flat-cache, which effectively recovers the
sharing, so there's no great harm in losing it -- and it's generally
more efficient to do the unification up-front.
Note [Non-TcTyVars in TcUnify]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Because the same code is now shared between unifying types and unifying
kinds, we sometimes will see proper TyVars floating around the unifier.
Example (from test case polykinds/PolyKinds12):
type family Apply (f :: k1 -> k2) (x :: k1) :: k2
type instance Apply g y = g y
When checking the instance declaration, we first *kind-check* the LHS
and RHS, discovering that the instance really should be
type instance Apply k3 k4 (g :: k3 -> k4) (y :: k3) = g y
During this kind-checking, all the tyvars will be TcTyVars. Then, however,
as a second pass, we desugar the RHS (which is done in functions prefixed
with "tc" in TcTyClsDecls"). By this time, all the kind-vars are proper
TyVars, not TcTyVars, get some kind unification must happen.
Thus, we always check if a TyVar is a TcTyVar before asking if it's a
meta-tyvar.
This used to not be necessary for type-checking (that is, before * :: *)
because expressions get desugared via an algorithm separate from
type-checking (with wrappers, etc.). Types get desugared very differently,
causing this wibble in behavior seen here.
-}
data LookupTyVarResult -- The result of a lookupTcTyVar call
= Unfilled TcTyVarDetails -- SkolemTv or virgin MetaTv
| Filled TcType
lookupTcTyVar :: TcTyVar -> TcM LookupTyVarResult
lookupTcTyVar tyvar
| MetaTv { mtv_ref = ref } <- details
= do { meta_details <- readMutVar ref
; case meta_details of
Indirect ty -> return (Filled ty)
Flexi -> do { is_touchable <- isTouchableTcM tyvar
-- Note [Unifying untouchables]
; if is_touchable then
return (Unfilled details)
else
return (Unfilled vanillaSkolemTv) } }
| otherwise
= return (Unfilled details)
where
details = tcTyVarDetails tyvar
-- | Fill in a meta-tyvar
updateMeta :: TcTyVar -- ^ tv to fill in, tv :: k1
-> TcRef MetaDetails -- ^ ref to tv's metadetails
-> TcType -- ^ ty2 :: k2
-> Coercion -- ^ kind_co :: k2 ~N k1
-> TcM Coercion -- ^ :: tv ~N ty2 (= ty2 |> kind_co ~N ty2)
updateMeta tv1 ref1 ty2 kind_co
= do { let ty2_refl = mkNomReflCo ty2
(ty2', co) = ( ty2 `mkCastTy` kind_co
, mkCoherenceLeftCo ty2_refl kind_co )
; writeMetaTyVarRef tv1 ref1 ty2'
; return co }
{-
Note [Unifying untouchables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We treat an untouchable type variable as if it was a skolem. That
ensures it won't unify with anything. It's a slight had, because
we return a made-up TcTyVarDetails, but I think it works smoothly.
-}
-- | Breaks apart a function kind into its pieces.
matchExpectedFunKind :: Arity -- ^ # of args remaining, only for errors
-> TcType -- ^ type, only for errors
-> TcKind -- ^ function kind
-> TcM (Coercion, TcKind, TcKind)
-- ^ co :: old_kind ~ arg -> res
matchExpectedFunKind num_args_remaining ty = go
where
go k | Just k' <- coreView k = go k'
go k@(TyVarTy kvar)
| isTcTyVar kvar, isMetaTyVar kvar
= do { maybe_kind <- readMetaTyVar kvar
; case maybe_kind of
Indirect fun_kind -> go fun_kind
Flexi -> defer (isReturnTyVar kvar) k }
go k@(ForAllTy (Anon arg) res)
= return (mkNomReflCo k, arg, res)
go other = defer False other
defer is_return k
= do { arg_kind <- new_flexi
; res_kind <- new_flexi
; let new_fun = mkFunTy arg_kind res_kind
thing = mkTypeErrorThingArgs ty num_args_remaining
origin = TypeEqOrigin { uo_actual = k
, uo_expected = new_fun
, uo_thing = Just thing
}
; co <- uType origin KindLevel k new_fun
; return (co, arg_kind, res_kind) }
where
new_flexi | is_return = newReturnTyVarTy liftedTypeKind
| otherwise = newMetaKindVar
| gridaphobe/ghc | compiler/typecheck/TcUnify.hs | bsd-3-clause | 65,150 | 240 | 42 | 20,041 | 8,790 | 4,774 | 4,016 | -1 | -1 |
{-# LANGUAGE CPP,TemplateHaskell,DeriveDataTypeable #-}
{- | This module implements Windows Codepage number 1257 which encodes the estonian, latvian and lithuanian language.
See <http://en.wikipedia.org/wiki/CP1257> for more information.
-}
module Data.Encoding.CP1257
(CP1257(..)) where
import Data.Array ((!),Array)
import Data.Word (Word8)
import Data.ByteString (all)
import Data.Map (Map,lookup,member)
import Data.Encoding.Base
import Prelude hiding (lookup,all)
import Control.OldException (throwDyn)
import Data.Typeable
data CP1257 = CP1257 deriving (Eq,Show,Typeable)
instance Encoding CP1257 where
encode _ = encodeSinglebyte (\c -> case lookup c encodeMap of
Just v -> v
Nothing -> throwDyn (HasNoRepresentation c))
encodable _ c = member c encodeMap
decode _ = decodeSinglebyte (decodeArr!)
decodable _ = all (\w -> decodeArr!w /= '\xFFFD')
decodeArr :: Array Word8 Char
#ifndef __HADDOCK__
decodeArr = $(decodingArray "CP1257.TXT")
#endif
encodeMap :: Map Char Word8
#ifndef __HADDOCK__
encodeMap = $(encodingMap "CP1257.TXT")
#endif
| abuiles/turbinado-blog | tmp/dependencies/encoding-0.4.1/Data/Encoding/CP1257.hs | bsd-3-clause | 1,069 | 2 | 14 | 150 | 291 | 163 | 128 | 23 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module GlobWorks ( script ) where
import Prelude
import Data.Monoid
import Shell
-- This test ensures that globs actually work
-- have an embedded glob.
script :: ShellM ()
script = do
run $ command "echo" ["README" <> anyChars]
return ()
| travitch/shellDSL | tests/inputs/GlobWorks.hs | bsd-3-clause | 280 | 0 | 10 | 51 | 65 | 36 | 29 | 9 | 1 |
{-# LANGUAGE FlexibleInstances #-}
module Color where
import Control.Applicative ((<$>), (<*>))
import Data.Monoid (mappend)
import Data.Semigroup (Semigroup, (<>))
import Data.Vector (Vector)
import Data.Word (Word8)
import Test.QuickCheck.Arbitrary (Arbitrary(..))
-- | an RGB value
data RGB a = RGB
{ r :: a
, g :: a
, b :: a
} deriving (Eq, Ord, Read, Show)
instance (Num a) => Semigroup (RGB a) where
(RGB r0 g0 b0) <> (RGB r1 g1 b1) =
RGB (r0 + r1) (g0 + g1) (b0 + b1)
instance Arbitrary (RGB Word8) where
arbitrary = RGB <$> arbitrary <*> arbitrary <*> arbitrary
instance Arbitrary (RGB Double) where
arbitrary = rgb_w2d <$> arbitrary
rgb_w2d :: RGB Word8 -> RGB Double
rgb_w2d (RGB r g b) = RGB (w2d r) (w2d g) (w2d b)
where
w2d w = (fromIntegral w / 255)
rgb_d2w :: RGB Double -> RGB Word8
rgb_d2w (RGB r g b) = RGB (d2w r) (d2w g) (d2w b)
where
d2w w = round (w * 255)
scaleRGB :: (Num a) => RGB a -> a -> RGB a
scaleRGB (RGB r g b) s = RGB (r*s) (g*s) (b*s)
-- | standard colors
blackRGB :: RGB Word8
blackRGB = RGB 0x00 0x00 0x00
redRGB :: RGB Word8
redRGB = RGB 0xff 0x00 0x00
greenRGB :: RGB Word8
greenRGB = RGB 0x00 0xff 0x00
blueRGB :: RGB Word8
blueRGB = RGB 0x00 0x00 0xff
-- | an HSL value
data HSL a = HSL
{ h :: a
, s :: a
, l :: a
} deriving (Eq, Ord, Read, Show)
hsl2rgb :: (Ord a, Fractional a) =>
HSL a
-> RGB a
hsl2rgb (HSL _ 0 l) = RGB l l l
hsl2rgb (HSL h s l) =
let t2 | l < 0.5 = l * (1 + s)
| otherwise = (l + s) - (l * s)
t1 = (2.0 * l) - t2
hk = h / 360.0
t3r = case hk + (1/3) of
n | n > 1.0 -> n - 1.0
| otherwise -> n
t3g = hk
t3b = case hk - (1/3) of
n | n < 0 -> n + 1.0
| otherwise -> n
in RGB { r = colorComponent t1 t2 t3r
, g = colorComponent t1 t2 t3g
, b = colorComponent t1 t2 t3b
}
where
colorComponent t1 t2 t3
| t3 < 1/6 = t1 + ((t2 - t1) * 6.0 * t3)
| t3 >= 1/6 && t3 < 1/2 = t2
| t3 >= 1/2 && t3 < 2/3 = (t1 + ((t2 - t1) * (2/3 - t3) * 6.0))
| otherwise = t1
rgb2hsl :: (Ord a, Eq a, Fractional a) =>
RGB a
-> HSL a
rgb2hsl (RGB r g b) =
let max = maximum [r, g, b]
min = minimum [r, g, b]
l = (min + max) / 2
s | min == max = 0.0
| l <= 0.5 = (max - min) / (max + min)
| otherwise = (max - min) / (2 - (max + min))
h | min == max = 0.0
| max == r && g >= b = 60 * ((g - b) / (max - min))
| max == r = 360 + (60 * ((g - b) / (max - min)))
| max == g = 120 + (60 * ((b - r) / (max - min)))
| otherwise = 240 + (60 * ((r - g) / (max - min)))
in HSL h s l
prop_1 :: RGB Word8 -> Bool
prop_1 rgb = rgb_d2w (rgb_w2d rgb) == rgb
prop_2 :: RGB Word8 -> Bool
prop_2 rgb = rgb_d2w (hsl2rgb (rgb2hsl (rgb_w2d rgb))) == rgb
| n-heptane-lab/blinkomatic | Color.hs | bsd-3-clause | 2,996 | 0 | 16 | 1,049 | 1,548 | 808 | 740 | 85 | 1 |
module Main where
import System.Environment
import Data.Maybe (fromMaybe)
import Control.Monad (when)
import Control.Monad.Except
import System.IO
import System.Exit
import System.Directory
import Control.Exception (try, SomeException)
import VersionManager as VM
import App
import Server
import Config
empty :: App -> IO ()
empty app =
do when (help . flags $ app) (putStrLn usageApp >> exitSuccess)
when (version . flags $ app) (putStrLn "Det 0.1.0" >> exitSuccess)
failWithUsage ["Missing Command\n"]
run :: App -> IO ()
run app = case action app of
Config -> Config.config app
Cluster -> Config.cluster app
Start -> Server.run app pull
Init -> VM.init app
Commit -> VM.commit app
Diff -> VM.diff app
History -> VM.history app
Rollback -> VM.rollback app
Empty -> empty app
_ -> print app
main :: IO ()
main =
do path <- fromMaybe ("~/.det") <$> lookupEnv "DET_PATH"
setEnv "DET_PATH" path
err <- try (compileApp >>= Main.run)
:: IO (Either SomeException ())
case err of
Left e ->
do hPutStrLn stderr "Error:"
hPutStrLn stderr (show e)
_ -> return ()
| lambda-zorn/det | src/Main.hs | bsd-3-clause | 1,236 | 0 | 14 | 349 | 430 | 213 | 217 | 41 | 10 |
-- |
-- Module : $Header$
-- Copyright : (c) 2013-2014 Galois, Inc.
-- License : BSD3
-- Maintainer : cryptol@galois.com
-- Stability : provisional
-- Portability : portable
module Cryptol.TypeCheck.Solver.Selector (tryHasGoal) where
import Cryptol.TypeCheck.AST
import Cryptol.TypeCheck.InferTypes
import Cryptol.TypeCheck.Monad( InferM, unify, newGoals, lookupNewtype
, newType, applySubst, addHasGoal, solveHasGoal
)
import Cryptol.TypeCheck.Subst(listSubst,apSubst)
import Cryptol.Utils.PP(text,pp,ordinal,(<+>))
import Cryptol.Utils.Panic(panic)
import Control.Monad(forM,guard)
recordType :: [Name] -> InferM Type
recordType labels =
do fields <- forM labels $ \l ->
do t <- newType (text "record field" <+> pp l) KType
return (l,t)
return (TRec fields)
tupleType :: Int -> InferM Type
tupleType n =
do fields <- mapM (\x -> newType (ordinal x <+> text "tuple field") KType)
[ 0 .. (n-1) ]
return (tTuple fields)
listType :: Int -> InferM Type
listType n =
do elems <- newType (text "sequence element type") KType
return (tSeq (tNum n) elems)
improveSelector :: Selector -> Type -> InferM (Expr -> Expr)
improveSelector sel outerT =
case sel of
RecordSel _ mb -> cvt recordType mb
TupleSel _ mb -> cvt tupleType mb
ListSel _ mb -> cvt listType mb
where
cvt _ Nothing = return id
cvt f (Just a) = do ty <- f a
cs <- unify ty outerT
case cs of
[] -> return id
_ -> do newGoals CtExactType cs
return (`ECast` ty)
{- | Compute the type of a field based on the selector.
The given type should be "zonked" (i.e., substitution was applied to it),
and (outermost) type synonyms have been expanded.
-}
solveSelector :: Selector -> Type -> InferM (Maybe Type)
solveSelector sel outerT =
case (sel, outerT) of
(RecordSel l _, ty) ->
case ty of
TRec fs -> return (lookup l fs)
TCon (TC TCSeq) [len,el] -> liftSeq len el
TCon (TC TCFun) [t1,t2] -> liftFun t1 t2
TCon (TC (TCNewtype (UserTC x _))) ts ->
do mb <- lookupNewtype x
case mb of
Nothing -> return Nothing
Just nt ->
case lookup l (ntFields nt) of
Nothing -> return Nothing
Just t ->
do let su = listSubst (zip (map tpVar (ntParams nt)) ts)
newGoals (CtPartialTypeFun $ UserTyFun x)
$ apSubst su $ ntConstraints nt
return $ Just $ apSubst su t
_ -> return Nothing
(TupleSel n _, ty) ->
case ty of
TCon (TC (TCTuple m)) ts ->
return $ do guard (0 <= n && n < m)
return $ ts !! n
TCon (TC TCSeq) [len,el] -> liftSeq len el
TCon (TC TCFun) [t1,t2] -> liftFun t1 t2
_ -> return Nothing
(ListSel n _, TCon (TC TCSeq) [l,t]) ->
do newGoals CtSelector [ (l .+. tNum (1::Int)) >== tNum n ]
return (Just t)
_ -> return Nothing
where
liftSeq len el =
do mb <- solveSelector sel el
return $ do el' <- mb
return (TCon (TC TCSeq) [len,el'])
liftFun t1 t2 =
do mb <- solveSelector sel t2
return $ do t2' <- mb
return (TCon (TC TCFun) [t1,t2'])
-- | Solve has-constraints.
tryHasGoal :: HasGoal -> InferM ()
tryHasGoal has
| TCon (PC (PHas sel)) [ th, ft ] <- goal (hasGoal has) =
do outerCast <- improveSelector sel th
outerT <- tNoUser `fmap` applySubst th
mbInnerT <- solveSelector sel outerT
case mbInnerT of
Nothing -> addHasGoal has
Just innerT ->
do cs <- unify innerT ft
innerCast <- case cs of
[] -> return id
_ -> do newGoals CtExactType cs
return (`ECast` ft)
solveHasGoal (hasName has) (innerCast . (`ESel` sel) . outerCast)
| otherwise = panic "hasGoalSolved"
[ "Unexpected selector proposition:"
, show (hasGoal has)
]
| TomMD/cryptol | src/Cryptol/TypeCheck/Solver/Selector.hs | bsd-3-clause | 4,366 | 0 | 30 | 1,585 | 1,476 | 730 | 746 | 96 | 13 |
module Test.Category.Properties.Monad where
import Test.QuickCheck
import Test.QuickCheck.Function
import qualified Control.Category.Hask.Laws.Monad as Laws
etaNaturalityWith gen_fab gen_a eq =
forAll gen_fab $ \(Fun _ fab) ->
forAll gen_a $ \a ->
Laws.etaNaturalityWith eq fab a
muNaturalityWith gen_fab gen_mma eq =
forAll gen_fab $ \(Fun _ fab) ->
forAll gen_mma $ \mma ->
Laws.muNaturalityWith eq fab mma
law1With gen_mmma eq =
forAll gen_mmma $ Laws.law1With eq
law2With gen_ma eq =
forAll gen_ma $ Laws.law2With eq
law3With gen_ma eq =
forAll gen_ma $ Laws.law3With eq
muNaturality gen_fab gen_mma =
muNaturalityWith gen_fab gen_mma (==)
law1 gen_mmma =
law1With gen_mmma (==)
law2 gen_ma =
law2With gen_ma (==)
law3 gen_ma =
law3With gen_ma (==)
| andorp/category-test-laws | src/Test/Category/Properties/Monad.hs | bsd-3-clause | 791 | 0 | 10 | 145 | 266 | 138 | 128 | 26 | 1 |
{-# LANGUAGE OverloadedStrings #-}
--------------------------------------------------------------------
-- |
-- Module : Data.Minecraft.Types
-- Copyright : (c) 2016 Michael Carpenter
-- License : BSD3
-- Maintainer : Michael Carpenter <oldmanmike.dev@gmail.com>
-- Stability : experimental
-- Portability : portable
--
--------------------------------------------------------------------
module Data.Minecraft.Types
( Array
, Buffer
, mkBuffer
, Container
, CompressedNBT
, Count (..)
, EntityMetadata
, Position
, PositionIBI (..)
, PositionIII (..)
, PositionISI (..)
, RestBuffer
, Slot
, VarInt
, putVarInt
, getVarInt
, putWord16
, getWord16
, putInt8
, putInt16
, putInt32
, putInt64
, getInt16
, getInt32
, getInt64
, putFloat
, getFloat
, putDouble
, getDouble
, putBool
, getBool
, putText
, getText
, putByteString
, getByteString
, putPosition
, getPosition
, putUUID
, getUUID
, putSlot
, getSlot
, putArray
, getArray
, putBuffer
, getBuffer
, putRestBuffer
, getRestBuffer
, putEntityMetadata
, getEntityMetadata
, putNBT
, getNBT
) where
import Data.Bits
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as BC
import Data.Int
import Data.NBT
import qualified Data.Serialize as S
import qualified Data.Text as T
import Data.Word
import Data.UUID
type Array = B.ByteString -- Undefined
data Buffer a = Buf a B.ByteString
deriving (Show,Eq)
mkBuffer :: Enum a => B.ByteString -> Buffer a
mkBuffer b = Buf (toEnum . B.length $ b) b
type Container = B.ByteString -- Undefined
type CompressedNBT = B.ByteString -- Undefined
data Count a = Count a
deriving (Show,Eq)
type EntityMetadata = B.ByteString -- Undefined
type Position = Word64 -- Undefined
data PositionIBI = PositionIBI
{ ibi_x :: Int32
, ibi_y :: Word8
, ibi_z :: Int32
} deriving (Show,Eq)
data PositionIII = PositionIII
{ iii_x :: Int32
, iii_y :: Int32
, iii_z :: Int32
} deriving (Show,Eq)
data PositionISI = PositionISI
{ isi_x :: Int32
, isi_y :: Int16
, isi_z :: Int32
} deriving (Show,Eq)
type RestBuffer = B.ByteString -- Undefined
type Slot = NBT
type VarInt = Int
-- Adapted from the protocol-buffers library, but only for Serialize and Ints
putVarInt :: VarInt -> S.Put
putVarInt i | i < 0x80 = S.putWord8 (fromIntegral i)
| otherwise = S.putWord8 (fromIntegral (i .&. 0x7F) .|. 0x80) >> putVarInt (i `shiftR` 7)
{-# INLINE putVarInt #-}
getVarInt :: S.Get VarInt
getVarInt = do
w <- S.getWord8
if testBit w 7 then go 7 (fromIntegral (w .&. 0x7F))
else return (fromIntegral w)
where
go n val = do
w' <- S.getWord8
if testBit w' 7 then go (n+7) (val .|. ((fromIntegral (w' .&. 0x7F)) `shiftL` n))
else return (val .|. ((fromIntegral w') `shiftL` n))
{-# INLINE getVarInt #-}
-------------------------------------------------------------------------------
putWord16 :: Word16 -> S.PutM ()
putWord16 w = undefined
getWord16 :: S.Get Word16
getWord16 = undefined
putInt8 :: Int8 -> S.PutM ()
putInt8 i = S.put i
{-# INLINE putInt8 #-}
putInt16 :: Int16 -> S.PutM ()
putInt16 i = S.put i
{-# INLINE putInt16 #-}
putInt32 :: Int32 -> S.PutM ()
putInt32 i = S.put i
{-# INLINE putInt32 #-}
putInt64 :: Int64 -> S.PutM ()
putInt64 i = S.put i
{-# INLINE putInt64 #-}
getInt16 :: S.Get Int16
getInt16 = S.getInt16be
{-# INLINE getInt16 #-}
getInt32 :: S.Get Int32
getInt32 = S.getInt32be
{-# INLINE getInt32 #-}
getInt64 :: S.Get Int64
getInt64 = S.getInt64be
{-# INLINE getInt64 #-}
putFloat :: Float -> S.PutM ()
putFloat f = undefined
getFloat :: S.Get Float
getFloat = undefined
putDouble :: Double -> S.PutM ()
putDouble d = undefined
getDouble :: S.Get Double
getDouble = undefined
putBool :: Bool -> S.PutM ()
putBool b = undefined
getBool :: S.Get Bool
getBool = undefined
putText :: T.Text -> S.PutM ()
putText t = do
putVarInt . T.length $ t
S.putByteString . BC.pack . T.unpack $ t
{-# INLINE putText #-}
getText :: S.Get T.Text
getText = do
len <- getVarInt
if len /= 0
then fmap (T.pack . BC.unpack) (S.getByteString len)
else return ""
{-# INLINE getText #-}
putByteString :: B.ByteString -> S.PutM ()
putByteString b = do
putVarInt . B.length $ b
S.putByteString b
{-# INLINE putByteString #-}
getByteString :: S.Get B.ByteString
getByteString = getVarInt >>= S.getByteString
{-# INLINE getByteString #-}
putPosition :: Position -> S.PutM ()
putPosition p = undefined
getPosition :: S.Get Position
getPosition = undefined
putUUID :: UUID -> S.PutM ()
putUUID u = undefined
getUUID :: S.Get UUID
getUUID = undefined
putSlot :: Slot -> S.PutM ()
putSlot s = undefined
getSlot :: S.Get Slot
getSlot = undefined
putArray :: Array -> S.PutM ()
putArray a = undefined
getArray :: S.Get Array
getArray = undefined
putBuffer :: Buffer a -> S.PutM ()
putBuffer = undefined
getBuffer :: S.Get (Buffer a)
getBuffer = undefined
putRestBuffer :: RestBuffer -> S.PutM ()
putRestBuffer r = undefined
getRestBuffer :: S.Get RestBuffer
getRestBuffer = undefined
putEntityMetadata :: EntityMetadata -> S.PutM ()
putEntityMetadata e = undefined
getEntityMetadata :: S.Get EntityMetadata
getEntityMetadata = undefined
putNBT :: NBT -> S.PutM ()
putNBT n = undefined
getNBT :: S.Get NBT
getNBT = undefined
| oldmanmike/hs-minecraft-protocol | src/Data/Minecraft/Types.hs | bsd-3-clause | 5,454 | 0 | 18 | 1,160 | 1,627 | 900 | 727 | 194 | 3 |
module Scurry.Comm.Util (
readLength,
DestAddr(..),
-- debugFrame,
bsToEthHdr,
) where
import Data.Binary
import qualified Data.ByteString.Lazy as BS
import qualified Data.ByteString as BSS
import Scurry.Types.Network
readLength :: Int
readLength = 1560
data DestAddr = DestSingle EndPoint
| DestList [EndPoint]
deriving (Show)
bsToEthHdr :: BSS.ByteString -> EthernetHeader
bsToEthHdr d = decode (BS.fromChunks [d])
{-
-- |Takes an ethernet frame pair and prints some debug
-- information about it.
debugFrame :: (EthernetHeader,BSS.ByteString) -> IO ()
debugFrame (h,f) = putStrLn $ concat [(show h)," => Length: ",(show $ BSS.length f)]
-}
| dmagyar/scurry | src/Scurry/Comm/Util.hs | bsd-3-clause | 685 | 0 | 8 | 129 | 120 | 75 | 45 | 15 | 1 |
--
--
--
-----------------
-- Exercise 11.6.
-----------------
--
--
--
module E'11''6 where
import Prelude hiding ( id )
id :: a -> a
id x = x
f :: Int -> Bool -- Example "f":
f 0 = False
f 1 = True
f _ = undefined
-- Behaviour and type instance of "(id $ f)" :
------------------------------------------------
--
-- "(id $ f)" means 'apply "id" to "f"'.
--
--
-- Proposition: (id $ f) = f
--
-- Proof: (id $ f)
-- | $
-- = (id f)
-- = id f
-- | id
-- = f
--
-- "id" is used with the most general type instance "(Int -> Bool) -> (Int -> Bool)",
-- since the application of ($) simplifies to normal function application.
-- Behaviour and type instance of "(f $ id)" :
------------------------------------------------
--
-- Proposition: (f $ id) will result in a compiler error.
--
-- Proof: (f $ id)
-- | $
-- = (f id)
-- = f id | ☠ ; contradiction
-- | "f" declares integer input, but
-- | "id" declares something polymorphic.
--
-- There is no instance at all.
{- GHCi>
f id
-}
--
-- <interactive>:2:3:
-- Couldn't match expected type `Int' with actual type `a0 -> a0'
-- Probable cause: `id' is applied to too few arguments
-- In the first argument of `f', namely `id'
-- In the expression: f id
-- Behaviour and type instance of "id ($)" :
----------------------------------------------
--
-- It is just the function application.
--
-- Proposition: id ($) = ($)
--
-- Proof: id ($)
-- | id
-- = ($)
--
-- The instance is polymorphic as well:
-- "((a -> b) -> a -> b) -> ((a -> b) -> a -> b)".
{- GHCi>
:t (id ($))
-}
-- (id ($)) :: (a -> b) -> a -> b
-- What type does "f" have if "f $ id" is properly typed?
---------------------------------------------------------
--
-- The type of "f" is "(a -> b) -> b".
--
-- Note: With my current knowledge about the type system
-- I would argument the same way I did in exercise 11.2 .
| pascal-knodel/haskell-craft | _/links/E'11''6.hs | mit | 2,206 | 0 | 5 | 741 | 132 | 104 | 28 | 8 | 1 |
{-# LANGUAGE PackageImports #-}
import "HackVote" Application (getApplicationDev)
import Network.Wai.Handler.Warp
(runSettings, defaultSettings, setPort)
import Control.Concurrent (forkIO)
import System.Directory (doesFileExist, removeFile)
import System.Exit (exitSuccess)
import Control.Concurrent (threadDelay)
main :: IO ()
main = do
putStrLn "Starting devel application"
(port, app) <- getApplicationDev
forkIO $ runSettings (setPort port defaultSettings) app
loop
loop :: IO ()
loop = do
threadDelay 100000
e <- doesFileExist "yesod-devel/devel-terminate"
if e then terminateDevel else loop
terminateDevel :: IO ()
terminateDevel = exitSuccess
| MaxGabriel/hackvote-yesod | devel.hs | cc0-1.0 | 679 | 0 | 10 | 104 | 186 | 100 | 86 | 21 | 2 |
{- |
Module : ./Common/LaTeX_funs.hs
Description : auxiliary functions for LaTeX printing
Copyright : (c) Klaus Luettich, Uni Bremen 2002-2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : portable
Auxiliary functions for LaTeX printing
Functions to calculate the length of a given word as it would be
printed with LaTeX according to one of four categories of words
useful for CASL:
* keywords -- all the things that were printed in boldface
* structid -- all the names used in the structured context of CASL
* annotation -- all the comments and annotations of CASL in a smaller font
* axiom -- identifiers in math mode for CASL Basic specs
-}
module Common.LaTeX_funs
( calcLineLen
, axiom_width
, latex_macro
, flushright
, casl_comment_latex
, casl_normal_latex
, hc_sty_small_keyword
, hc_sty_plain_keyword
, hc_sty_casl_keyword
, hc_sty_axiom
, hc_sty_structid
, hc_sty_structid_indexed
, hc_sty_id
, startTab, endTab, setTab
, setTabWSp
, startAnno
, endAnno
, escapeSpecial
, escapeLatex
) where
import qualified Data.Map as Map
import Data.Char
import Data.List (isPrefixOf)
import Common.LaTeX_maps
import Common.Lib.Pretty as Pretty
import Common.Parsec
import Text.ParserCombinators.Parsec as Parsec
-- | a constant String for starting a LaTeX indentation with tab stop
startTab :: String
startTab = "\\@begT@"
-- | a constant String for releasing a LaTeX indentation with tab stop
endTab :: String
endTab = "\\@endT@"
-- | a constant String to set a tab stop and enable it
setTab :: String
setTab = "\\="
-- | a constant String indicating the start of a space based indentation
setTabWSp :: String
setTabWSp = "\\@setTS@{"
{- | functions for calculating an integer value according to a given
length in LaTeX points.
-}
calcLineLen :: Int -> Int
calcLineLen len = scaleDown $ len * 351
-- Units per mm found in: Karsten Guenther, "Einfuehrung in LaTeX2e" (p.376)
scaleDown :: Int -> Int
scaleDown = (`div` 44) . (+ 15)
{- functions to calculate a word-width in integer with a given word
type or purpose
-}
data Word_type =
Keyword | StructId | Normal | Comment | Annotation | AnnotationBold | Axiom
deriving (Show, Eq)
calc_word_width :: Word_type -> String -> Int
calc_word_width wt s = scaleDown $ calc_word_widthAux wt s
calc_word_widthAux :: Word_type -> String -> Int
calc_word_widthAux wt s = Map.findWithDefault
(sum_char_width_deb (showString "In map \"" . shows wt . showString "\" \'")
wFM k_wFM s - correction) s wFM
where (wFM, k_wFM) = case wt of
Keyword -> (keyword_map, key_keyword_map)
StructId -> (structid_map, key_structid_map)
Comment -> (comment_map, key_comment_map)
Annotation -> (annotation_map, key_annotation_map)
AnnotationBold -> (annotationbf_map,
key_annotationbf_map)
Axiom -> (axiom_map, key_axiom_map)
Normal -> (normal_map, key_normal_map)
correction = case wt of
Axiom -> itCorrection s
_ -> 0
itCorrection :: String -> Int
itCorrection [] = 0
itCorrection s
| length s < 2 || head s == '\\' = 0
| otherwise = itCorrection' 0 s
where itCorrection' :: Int -> String -> Int
itCorrection' _ [] = error "itCorrection' applied to empty List"
itCorrection' r ys@[y1, y2]
| not (isAlphaNum y1) = r
| not (isAlphaNum y2) = r
| otherwise = r + lookupCorrection ys
itCorrection' r (y1 : ys@(y2 : _))
| not (isAlphaNum y1) = itCorrection' r ys
| otherwise =
itCorrection'
(r + lookupCorrection [y1, y2])
ys
itCorrection' _ _ = error ("itCorrection' doesn't work with " ++ s)
lookupCorrection str = Map.findWithDefault def_cor str
italiccorrection_map
def_cor = 610
sum_char_width_deb :: (String -> String) -- only used for an hackie debug thing
-> Map.Map String Int
-> Map.Map Char [String] -> String -> Int
sum_char_width_deb _pref_fun cFM key_cFM s = sum_char_width' s 0
where sum_char_width' [] r = r
sum_char_width' [c] r = r + case c of
'}' -> 0
'{' -> 0
' ' -> lookupWithDefault_cFM "~"
_ -> lookupWithDefault_cFM [c]
sum_char_width' full@(c1 : rest@(c2 : cs)) r
| isLigature [c1, c2] = case Map.lookup [c1, c2] cFM of
Just l -> sum_char_width' cs (r + l)
Nothing -> sum_char_width' rest nl
| [c1, c2] == "\\ " =
sum_char_width' cs (r + lookupWithDefault_cFM "~")
| c1 == ' ' =
sum_char_width' rest (r + lookupWithDefault_cFM "~")
| otherwise = case prefixIsKey full key_cFM of
Just key -> sum_char_width'
(drop (length key) full)
$ r + (cFM Map.! key)
Nothing -> if c1 == '\\' then
sum_char_width'
(dropWhile isAlpha rest)
$ r + lookupWithDefault_cFM "~"
else sum_char_width' rest nl
where nl = r + lookupWithDefault_cFM [c1]
lookupWithDefault_cFM s' = Map.findWithDefault 2200 s' cFM
-- 2200 may not be optimal
prefixIsKey :: String -> Map.Map Char [String] -> Maybe String
prefixIsKey [] _ = Nothing
prefixIsKey ls@(c : _) key_cFM = case filter (`isPrefixOf` ls)
$ Map.findWithDefault [] c key_cFM of
[] -> Nothing
s : _ -> Just s
isLigature :: String -> Bool
isLigature s = case s of
[_, _] -> Map.findWithDefault False s ligatures
_ -> False
keyword_width, structid_width, axiom_width, annotationbf_width,
comment_width, normal_width :: String -> Int
annotationbf_width = calc_word_width AnnotationBold
keyword_width = calc_word_width Keyword
structid_width = calc_word_width StructId
comment_width = calc_word_width Comment
normal_width = calc_word_width Normal
axiom_width = sum . map (calc_word_width Axiom) . parseAxiomString
{- |
latex_macro creates a document ('Doc') containing String
that has a zero width.
So it can be used for LaTeX-macros not needing any space, i.e.
@\textit{@ or @}@ -}
latex_macro :: String -> Doc
latex_macro = sizedText 0
casl_keyword_latex, casl_annotationbf_latex,
casl_axiom_latex,
casl_comment_latex, casl_structid_latex,
casl_normal_latex :: String -> Doc
casl_annotationbf_latex s = sizedText (annotationbf_width s) s
casl_structid_latex s = sizedText (structid_width s) s
casl_comment_latex s = sizedText (comment_width s) s
casl_keyword_latex s = sizedText (keyword_width s) s
casl_normal_latex s = sizedText (normal_width s) s
casl_axiom_latex s = sizedText (axiom_width s) s
-- | sort, op, pred, type and its plurals
hc_sty_casl_keyword :: String -> Doc
hc_sty_casl_keyword str =
sizedText (keyword_width "preds") $ '\\' : map toUpper str
hc_sty_plain_keyword :: String -> Doc
hc_sty_plain_keyword kw =
latex_macro "\\KW{" <> casl_keyword_latex (escapeSpecial kw)
<> latex_macro "}"
hc_sty_small_keyword :: String -> Doc
hc_sty_small_keyword kw =
latex_macro "\\KW{" <> casl_annotationbf_latex (escapeSpecial kw)
<> latex_macro "}"
hc_sty_axiom, hc_sty_structid, hc_sty_id, hc_sty_structid_indexed
:: String -> Doc
hc_sty_structid sid = latex_macro "\\SId{" <> sid_doc <> latex_macro "}"
where sid_doc = casl_structid_latex (escapeSpecial sid)
hc_sty_structid_indexed sid =
latex_macro "\\SIdIndex{" <> sid_doc <> latex_macro "}"
where sid_doc = casl_structid_latex (escapeSpecial sid)
hc_sty_id i = latex_macro "\\Id{" <> id_doc <> latex_macro "}"
where id_doc = casl_axiom_latex i
hc_sty_axiom ax = latex_macro "\\Ax{" <> ax_doc <> latex_macro "}"
where ax_doc = casl_axiom_latex ax
-- | flush argument doc to the right
flushright :: Doc -> Doc
flushright = (latex_macro "\\`" <>)
-- | a constant String for the start of annotations
startAnno :: String
startAnno = "{\\small{}"
-- | a constant string ending an annotation
endAnno :: String
endAnno = "%@%small@}"
escapeSpecial :: String -> String
escapeSpecial = concatMap $ \ c -> if elem c "_%$&{}#" then '\\' : [c] else
Map.findWithDefault [c] c escapeMap
{- http://dhelta.net/hprojects/HaTeX/code/HaTeX-3.1.0/Text/LaTeX/Base/Syntax.hs
changes _ to \_{} -}
escapeLatex :: String -> String
escapeLatex = concatMap $ \ c -> case () of
()
| elem c "_%$&{}#" -> "\\Ax{\\" ++ c : "}"
| elem c "<|>=-!()[]?:;,./*+@" -> "\\Ax{" ++ c : "}"
| otherwise -> Map.findWithDefault [c] c escapeMap
parseAxiomString :: String -> [String]
parseAxiomString s = case parse axiomString "" s of
Left _ -> [s]
Right l -> l
axiomString :: CharParser st [String]
axiomString = do
l <- many parseAtom
eof
return $ concat l
parseAtom :: CharParser st [String]
parseAtom = fmap concat
((tryString "\\Ax{" <|> tryString "\\Id{" <|> string "{")
>> many parseAtom << Parsec.char '}')
<|> do
b <- Parsec.char '\\'
s <- fmap (: []) (satisfy (\ c -> isSpace c
|| elem c "_~^|\'\",;:.`\\{}[]%$&#()"))
<|> many1 letter
return [b : s]
<|> do
s <- many1 letter
return [s]
<|> do
c <- satisfy (/= '}')
return [[c]]
-- | a character map for special latex characters
escapeMap :: Map.Map Char String
escapeMap = Map.fromList
[('\\', "\\Ax{\\setminus}"),
('^', "\\Ax{\\hat{\\ }}"),
('"', "''"),
('~', "\\Ax{\\sim}"),
('\160', "\\ "),
('\162', "\\Id{\\textcent}"),
('\164', "\\Id{\\textcurrency}"),
('\165', "\\Id{\\textyen}"),
('\166', "\\Id{\\textbrokenbar}"),
('\170', "\\Id{\\textordfeminine}"),
('\171', "\\Id{\\guillemotleft}"),
('\172', "\\Ax{\\neg}"),
('\173', "-"),
('\174', "\\Id{\\textregistered}"),
('\175', "\\Ax{\\bar{\\ }}"),
('\176', "\\Id{\\textdegree}"),
('\177', "\\Ax{\\pm}"),
('\178', "\\Ax{^2}"),
('\179', "\\Ax{^3}"),
('\180', "\\Ax{\\acute{\\ }}"),
('\181', "\\Ax{\\mu}"),
('\185', "\\Ax{^1}"),
('\186', "\\Id{\\textordmasculine}"),
('\187', "\\Id{\\guillemotright}"),
('\192', "\\Ax{\\grave{A}}"),
('\193', "\\Ax{\\acute{A}}"),
('\200', "\\Ax{\\grave{E}}"),
('\201', "\\Ax{\\acute{E}}"),
('\204', "\\Ax{\\grave{I}}"),
('\205', "\\Ax{\\acute{I}}"),
('\208', "\\Id{\\DH}"),
('\210', "\\Ax{\\grave{O}}"),
('\211', "\\Ax{\\acute{O}}"),
('\215', "\\Ax{\\times}"),
('\217', "\\Ax{\\grave{U}}"),
('\218', "\\Ax{\\acute{U}}"),
('\221', "\\Ax{\\acute{Y}}"),
('\222', "\\Id{\\TH}"),
('\224', "\\Ax{\\grave{a}}"),
('\225', "\\Ax{\\acute{a}}"),
('\232', "\\Ax{\\grave{e}}"),
('\233', "\\Ax{\\acute{e}}"),
('\236', "\\Ax{\\grave{\\Id{\\i}}}"),
('\237', "\\Ax{\\acute{\\Id{\\i}}}"),
('\240', "\\Id{\\dh}"),
('\242', "\\Ax{\\grave{o}}"),
('\243', "\\Ax{\\acute{o}}"),
('\247', "\\Ax{\\div}"),
('\249', "\\Ax{\\grave{u}}"),
('\250', "\\Ax{\\acute{u}}"),
('\253', "\\Ax{\\acute{y}}"),
('\254', "\\Id{\\th}")]
{- acute and grave characters don't work in a tabbing environment
\textcent upto textbrokenbar requires \usepackage{textcomp}
whereas \guillemot, eth, and thorn \usepackage[T1]{fontenc}
-}
| spechub/Hets | Common/LaTeX_funs.hs | gpl-2.0 | 11,851 | 0 | 18 | 3,111 | 2,836 | 1,544 | 1,292 | 256 | 9 |
{- |
Module : ./Static/FromXmlUtils.hs
Description : theory datastructure for development graphs
Copyright : (c) Christian Maeder, Simon Ulbricht, Uni Bremen 20011
License : GPLv2 or higher, see LICENSE.txt
Maintainer : maeder@dfki.de
Stability : provisional
Portability : non-portable(Logic)
theory datastructure for development graphs
-}
module Static.FromXmlUtils where
import Static.GTheory
import Logic.Prover
import Logic.Logic
import Logic.Grothendieck
import Common.AnnoState
import Common.Doc
import Common.ExtSign
import Common.GlobalAnnotations
import Common.Parsec
import Common.Result
import Common.Utils
import Text.ParserCombinators.Parsec
import qualified Data.Set as Set
import qualified Data.Map as Map
data BasicExtResponse = Failure Bool -- True means fatal (give up)
| Success G_theory Int (Set.Set G_symbol) Bool
extendByBasicSpec :: GlobalAnnos -> String -> G_theory
-> (BasicExtResponse, String)
extendByBasicSpec ga str
gt@(G_theory lid syn eSig@(ExtSign sign syms) si sens _)
= let tstr = trimLeft str in
if null tstr then (Success gt 0 Set.empty True, "") else
case basicSpecParser Nothing lid of
Nothing -> (Failure True, "missing basic spec parser")
Just p -> case basic_analysis lid of
Nothing -> (Failure True, "missing basic analysis")
Just f -> case runParser (p Map.empty << eof) (emptyAnnos ()) "" tstr of
Left err -> (Failure False, show err)
Right bs -> let
Result ds res = f (bs, sign, ga)
in case res of
Just (_, ExtSign sign2 syms2, sens2) | not (hasErrors ds) ->
let sameSig = sign2 == sign
finExtSign = ExtSign sign2 $ Set.union syms syms2
in
(Success (G_theory lid syn (if sameSig then eSig else finExtSign)
(if sameSig then si else startSigId)
(joinSens (toThSens sens2) sens) startThId)
(length sens2)
(Set.map (G_symbol lid) $ Set.difference syms2 syms)
sameSig
, if sameSig then
if null sens2 then "" else
show (vcat $ map (print_named lid) sens2)
else "")
_ -> (Failure False, showRelDiags 1 ds)
deleteHiddenSymbols :: String -> G_sign -> Result G_sign
deleteHiddenSymbols syms gs@(G_sign lid (ExtSign sig _) _) = let
str = trimLeft syms in if null str then return gs else
case parse_symb_items lid of
Nothing -> fail $ "no symbol parser for " ++ language_name lid
Just sbpa -> case runParser (sepBy1 sbpa anComma << eof)
(emptyAnnos ()) "" str of
Left err -> fail $ show err
Right sms -> do
rm <- stat_symb_items lid sig sms
let sym1 = symset_of lid sig
sym2 = Set.filter (\ s -> any (matches lid s) rm) sym1
sig2 <- fmap dom $ cogenerated_sign lid sym2 sig
return $ G_sign lid (mkExtSign sig2) startSigId
-- | reconstruct the morphism from symbols maps
getMorphism :: G_sign
-> String -- ^ the symbol mappings
-> Result G_morphism
getMorphism (G_sign lid (ExtSign sig _) _) syms =
let str = trimLeft syms in
if null str then return $ mkG_morphism lid $ ide sig else
case parse_symb_map_items lid of
Nothing -> fail $ "no symbol map parser for " ++ language_name lid
Just smpa -> case runParser (sepBy1 smpa anComma << eof)
(emptyAnnos ()) "" str of
Left err -> fail $ show err
Right sms -> do
rm <- stat_symb_map_items lid sig Nothing sms
fmap (mkG_morphism lid) $ induced_from_morphism lid rm sig
-- | get the gmorphism for a gmorphism name
translateByGName :: LogicGraph -> G_sign
-> String -- ^ the name of the morphism
-> Result GMorphism
translateByGName lg gsig gname =
let str = trim gname in
if null str then ginclusion lg gsig gsig else do
cmor <- lookupComorphism str lg
gEmbedComorphism cmor gsig
-- | get the gmorphism for a gmorphism name with symbols maps
getGMorphism :: LogicGraph -> G_sign
-> String -- ^ the name of the gmorphism
-> String -- ^ the symbol mappings
-> Result GMorphism
getGMorphism lg gsig gname syms = do
gmor1 <- translateByGName lg gsig gname
gmor2 <- fmap gEmbed $ getMorphism (cod gmor1) syms
composeMorphisms gmor1 gmor2
| spechub/Hets | Static/FromXmlUtils.hs | gpl-2.0 | 4,409 | 0 | 33 | 1,218 | 1,267 | 630 | 637 | 90 | 10 |
{- |
Module : $Header$
Description : Gtk GUI for the consistency checker
Copyright : (c) Thiemo Wiedemeyer, Uni Bremen 2009
License : GPLv2 or higher, see LICENSE.txt
Maintainer : raider@informatik.uni-bremen.de
Stability : provisional
Portability : portable
This module provides a GUI for the consistency checker.
-}
module GUI.GtkConsistencyChecker where
import Graphics.UI.Gtk
import Graphics.UI.Gtk.Glade
import GUI.GtkUtils
import qualified GUI.Glade.NodeChecker as ConsistencyChecker
import GUI.GraphTypes
import Static.DevGraph
import Static.DgUtils
import Static.PrintDevGraph ()
import Static.GTheory
import Static.History
import Interfaces.GenericATPState (guiDefaultTimeLimit)
import Logic.Grothendieck
import Logic.Comorphism (AnyComorphism (..))
import Comorphisms.LogicGraph (logicGraph)
import Common.LibName (LibName)
import Common.Result
import Control.Concurrent (forkIO, killThread)
import Control.Concurrent.MVar
import Control.Monad (foldM_, join, when)
import Proofs.AbstractState
import Proofs.ConsistencyCheck
import Data.Graph.Inductive.Graph (LNode)
import qualified Data.Map as Map
import Data.List
import Data.Maybe
data Finder = Finder { fName :: String
, finder :: G_cons_checker
, comorphism :: [AnyComorphism]
, selected :: Int }
instance Eq Finder where
(==) (Finder { fName = n1, comorphism = c1 })
(Finder { fName = n2, comorphism = c2 }) = n1 == n2 && c1 == c2
data FNode = FNode { name :: String
, node :: LNode DGNodeLab
, sublogic :: G_sublogics
, cStatus :: ConsistencyStatus }
-- | Get a markup string containing name and color
instance Show FNode where
show FNode { name = n, cStatus = s } =
"<span color=\"" ++ cStatusToColor s ++ "\">" ++ cStatusToPrefix s ++ n ++
"</span>"
instance Eq FNode where
(==) f1 f2 = compare f1 f2 == EQ
instance Ord FNode where
compare (FNode { name = n1, cStatus = s1 })
(FNode { name = n2, cStatus = s2 }) = case compare s1 s2 of
EQ -> compare n1 n2
c -> c
-- | Displays the consistency checker window
showConsistencyChecker :: Maybe Int -> GInfo -> LibEnv -> IO (Result LibEnv)
showConsistencyChecker mn gi@(GInfo { libName = ln }) le =
case mn of
Nothing -> showConsistencyCheckerMain mn gi le
Just n -> let
dg = lookupDGraph ln le
lbl = labDG dg n
in if case globalTheory lbl of
Just (G_theory _ _ _ _ sens _) -> Map.null sens
Nothing -> True
then do
infoDialogExt "No sentences" $ "Node " ++
getDGNodeName lbl
++ " has no sentences and is thus trivially consistent"
return $ return le
else showConsistencyCheckerMain mn gi le
-- | Displays the consistency checker window
showConsistencyCheckerMain :: Maybe Int -> GInfo -> LibEnv -> IO (Result LibEnv)
showConsistencyCheckerMain mn (GInfo { libName = ln }) le = do
wait <- newEmptyMVar
showConsistencyCheckerAux wait mn ln le
le' <- takeMVar wait
return $ Result [] $ Just le'
-- | Displays the consistency checker window
showConsistencyCheckerAux
:: MVar LibEnv -> Maybe Int -> LibName -> LibEnv -> IO ()
showConsistencyCheckerAux res mn ln le = postGUIAsync $ do
xml <- getGladeXML ConsistencyChecker.get
-- get objects
window <- xmlGetWidget xml castToWindow "NodeChecker"
btnClose <- xmlGetWidget xml castToButton "btnClose"
btnResults <- xmlGetWidget xml castToButton "btnResults"
-- get nodes view and buttons
trvNodes <- xmlGetWidget xml castToTreeView "trvNodes"
btnNodesAll <- xmlGetWidget xml castToButton "btnNodesAll"
btnNodesNone <- xmlGetWidget xml castToButton "btnNodesNone"
btnNodesInvert <- xmlGetWidget xml castToButton "btnNodesInvert"
btnNodesUnchecked <- xmlGetWidget xml castToButton "btnNodesUnchecked"
btnNodesTimeout <- xmlGetWidget xml castToButton "btnNodesTimeout"
cbInclThms <- xmlGetWidget xml castToCheckButton "cbInclThms"
-- get checker view and buttons
cbComorphism <- xmlGetWidget xml castToComboBox "cbComorphism"
lblSublogic <- xmlGetWidget xml castToLabel "lblSublogic"
sbTimeout <- xmlGetWidget xml castToSpinButton "sbTimeout"
btnCheck <- xmlGetWidget xml castToButton "btnCheck"
btnStop <- xmlGetWidget xml castToButton "btnStop"
trvFinder <- xmlGetWidget xml castToTreeView "trvFinder"
windowSetTitle window "Consistency Checker"
spinButtonSetValue sbTimeout $ fromIntegral guiDefaultTimeLimit
let widgets = [ toWidget sbTimeout
, toWidget cbComorphism
, toWidget lblSublogic ]
checkWidgets = widgets ++ [ toWidget btnClose
, toWidget btnNodesAll
, toWidget btnNodesNone
, toWidget btnNodesInvert
, toWidget btnNodesUnchecked
, toWidget btnNodesTimeout
, toWidget btnResults ]
switch b = do
widgetSetSensitive btnStop $ not b
widgetSetSensitive btnCheck b
widgetSetSensitive btnStop False
widgetSetSensitive btnCheck False
threadId <- newEmptyMVar
wait <- newEmptyMVar
mView <- newEmptyMVar
let dg = lookupDGraph ln le
nodes = labNodesDG dg
selNodes = partition (\ (FNode { node = (_, l)}) -> case globalTheory l of
Just (G_theory _ _ _ _ sens _) -> Map.null sens
Nothing -> True)
sls = map sublogicOfTh $ mapMaybe (globalTheory . snd) nodes
(emptyNodes, others) = selNodes
$ map (\ (n@(_, l), s) -> FNode (getDGNodeName l) n s
$ getConsistencyOf l)
$ zip nodes sls
-- setup data
listNodes <- setListData trvNodes show $ sort others
listFinder <- setListData trvFinder fName []
-- setup comorphism combobox
comboBoxSetModelText cbComorphism
shC <- after cbComorphism changed
$ setSelectedComorphism trvFinder listFinder cbComorphism
-- setup view selection actions
let update = do
mf <- getSelectedSingle trvFinder listFinder
updateComorphism trvFinder listFinder cbComorphism shC
widgetSetSensitive btnCheck $ isJust mf
setListSelectorSingle trvFinder update
let upd = updateNodes trvNodes listNodes
(\ b s -> do
labelSetLabel lblSublogic $ show s
updateFinder trvFinder listFinder b s)
(do
labelSetLabel lblSublogic "No sublogic"
listStoreClear listFinder
activate widgets False
widgetSetSensitive btnCheck False)
(activate widgets True >> widgetSetSensitive btnCheck True)
shN <- setListSelectorMultiple trvNodes btnNodesAll btnNodesNone
btnNodesInvert upd
-- bindings
let selectWithAux f u = do
signalBlock shN
sel <- treeViewGetSelection trvNodes
treeSelectionSelectAll sel
rs <- treeSelectionGetSelectedRows sel
mapM_ ( \ ~p@(row : []) -> do
fn <- listStoreGetValue listNodes row
(if f fn then treeSelectionSelectPath else treeSelectionUnselectPath)
sel p) rs
signalUnblock shN
u
selectWith f = selectWithAux $ f . cStatus
onClicked btnNodesUnchecked
$ selectWith (== ConsistencyStatus CSUnchecked "") upd
onClicked btnNodesTimeout $ selectWith (== ConsistencyStatus CSTimeout "") upd
onClicked btnResults $ showModelView mView "Models" listNodes emptyNodes
onClicked btnClose $ widgetDestroy window
onClicked btnStop $ takeMVar threadId >>= killThread >>= putMVar wait
onClicked btnCheck $ do
activate checkWidgets False
timeout <- spinButtonGetValueAsInt sbTimeout
inclThms <- toggleButtonGetActive cbInclThms
(updat, pexit) <- progressBar "Checking consistency" "please wait..."
nodes' <- getSelectedMultiple trvNodes listNodes
mf <- getSelectedSingle trvFinder listFinder
f <- case mf of
Nothing -> error "Consistency checker: internal error"
Just (_, f) -> return f
switch False
tid <- forkIO $ do
check False inclThms ln le dg f timeout listNodes updat nodes'
putMVar wait ()
putMVar threadId tid
forkIO_ $ do
takeMVar wait
postGUIAsync $ do
switch True
tryTakeMVar threadId
showModelView mView "Results of consistency check" listNodes emptyNodes
signalBlock shN
sortNodes trvNodes listNodes
signalUnblock shN
upd
activate checkWidgets True
pexit
onDestroy window $ do
nodes' <- listStoreToList listNodes
let changes = foldl (\ cs (FNode { node = (i, l), cStatus = s }) ->
if (\ st -> st /= CSConsistent && st /= CSInconsistent)
$ sType s then cs
else
let n = (i, if sType s == CSInconsistent then
markNodeInconsistent "" l
else markNodeConsistent "" l)
in SetNodeLab l n : cs
) [] nodes'
dg' = changesDGH dg changes
putMVar res $ Map.insert ln (groupHistory dg (DGRule "Consistency") dg') le
selectWithAux (maybe ((== ConsistencyStatus CSUnchecked "") . cStatus)
(\ n -> (== n) . fst . node) mn) upd
widgetShow window
sortNodes :: TreeView -> ListStore FNode -> IO ()
sortNodes trvNodes listNodes = do
sel <- getSelectedMultiple trvNodes listNodes
nodes <- listStoreToList listNodes
let sn = sort nodes
updateListData listNodes sn
selector <- treeViewGetSelection trvNodes
mapM_ (\ (_, FNode { name = n }) -> treeSelectionSelectPath selector
[fromMaybe (error "Node not found!") $ findIndex ((n ==) . name) sn]
) sel
-- | Called when node selection is changed. Updates finder list
updateNodes :: TreeView -> ListStore FNode -> (Bool -> G_sublogics -> IO ())
-> IO () -> IO () -> IO ()
updateNodes view listNodes update lock unlock = do
nodes <- getSelectedMultiple view listNodes
if null nodes then lock
else let sls = map (sublogic . snd) nodes in
maybe lock (\ sl -> unlock >> update (length nodes == 1) sl)
$ foldl (\ ma b -> case ma of
Just a -> joinSublogics b a
Nothing -> Nothing) (Just $ head sls) $ tail sls
-- | Update the list of finder
updateFinder :: TreeView -> ListStore Finder -> Bool -> G_sublogics -> IO ()
updateFinder view list useNonBatch sl = do
old <- listStoreToList list
cs <- getConsCheckers $ findComorphismPaths logicGraph sl
let new = Map.elems $ foldr (\ (cc, c) m ->
let n = getCcName cc
f = Map.findWithDefault (Finder n cc [] 0) n m
in Map.insert n (f { comorphism = c : comorphism f}) m) Map.empty
$ (if useNonBatch then id else filter (getCcBatch . fst))
cs
when (old /= new) $ do
-- update list and try to select previous finder
selected' <- getSelectedSingle view list
sel <- treeViewGetSelection view
listStoreClear list
mapM_ (listStoreAppend list) $ mergeFinder old new
maybe (selectFirst view)
(\ (_, f) -> let i = findIndex ((fName f ==) . fName) new in
maybe (selectFirst view) (treeSelectionSelectPath sel . (: [])) i
) selected'
-- | Try to select previous selected comorphism if possible
mergeFinder :: [Finder] -> [Finder] -> [Finder]
mergeFinder old new = let m' = Map.fromList $ map (\ f -> (fName f, f)) new in
Map.elems $ foldl (\ m (Finder { fName = n, comorphism = cc, selected = i}) ->
case Map.lookup n m of
Nothing -> m
Just f@(Finder { comorphism = cc' }) -> let c = cc !! i in
Map.insert n (f { selected = fromMaybe 0 $ elemIndex c cc' }) m
) m' old
check :: Bool -> Bool -> LibName -> LibEnv -> DGraph -> Finder -> Int
-> ListStore FNode -> (Double -> String -> IO ()) -> [(Int, FNode)]
-> IO ()
check dispr inclThms ln le dg (Finder _ cc cs i) timeout listNodes update
nodes = let
count' = fromIntegral $ length nodes
c = cs !! i in
foldM_ (\ count (row, fn@(FNode { name = n', node = n })) -> do
postGUISync $ update (count / count') n'
res <- consistencyCheck inclThms cc c ln le dg n timeout
let res' = if dispr then cInvert res else res
postGUISync $ listStoreSetValue listNodes row fn { cStatus = res' }
return $ count + 1) 0 nodes
updateComorphism :: TreeView -> ListStore Finder -> ComboBox
-> ConnectId ComboBox -> IO ()
updateComorphism view list cbComorphism sh = do
signalBlock sh
model <- comboBoxGetModelText cbComorphism
listStoreClear model
mfinder <- getSelectedSingle view list
case mfinder of
Just (_, f) -> do
mapM_ (comboBoxAppendText cbComorphism) $ expand f
comboBoxSetActive cbComorphism $ selected f
Nothing -> return ()
signalUnblock sh
expand :: Finder -> [String]
expand = map show . comorphism
setSelectedComorphism :: TreeView -> ListStore Finder -> ComboBox -> IO ()
setSelectedComorphism view list cbComorphism = do
mfinder <- getSelectedSingle view list
case mfinder of
Just (i, f) -> do
sel <- comboBoxGetActive cbComorphism
listStoreSetValue list i f { selected = sel }
Nothing -> return ()
-- | Displays the model view window
showModelViewAux :: MVar (IO ()) -> String -> ListStore FNode -> [FNode]
-> IO ()
showModelViewAux lock title list other = do
xml <- getGladeXML ConsistencyChecker.get
-- get objects
window <- xmlGetWidget xml castToWindow "ModelView"
btnClose <- xmlGetWidget xml castToButton "btnResClose"
frNodes <- xmlGetWidget xml castToFrame "frResNodes"
trvNodes <- xmlGetWidget xml castToTreeView "trvResNodes"
tvModel <- xmlGetWidget xml castToTextView "tvResModel"
windowSetTitle window title
-- setup text view
buffer <- textViewGetBuffer tvModel
textBufferInsertAtCursor buffer ""
tagTable <- textBufferGetTagTable buffer
font <- textTagNew Nothing
set font [ textTagFont := "FreeMono" ]
textTagTableAdd tagTable font
start <- textBufferGetStartIter buffer
end <- textBufferGetEndIter buffer
textBufferApplyTag buffer font start end
-- setup list view
let filterNodes = filter ((/= ConsistencyStatus CSUnchecked "") . cStatus)
nodes <- listStoreToList list
listNodes <- setListData trvNodes show $ sort $ filterNodes $ other ++ nodes
setListSelectorSingle trvNodes $ do
mn <- getSelectedSingle trvNodes listNodes
case mn of
Nothing -> textBufferSetText buffer ""
Just (_, n) -> textBufferSetText buffer $ show $ cStatus n
-- setup actions
onClicked btnClose $ widgetDestroy window
onDestroy window $ takeMVar lock >>= const (return ())
putMVar lock $ do
sel' <- getSelectedSingle trvNodes listNodes
sel <- treeViewGetSelection trvNodes
nodes'' <- listStoreToList list
let nodes' = sort $ filterNodes nodes''
updateListData listNodes $ sort (other ++ nodes')
maybe (selectFirst trvNodes) (treeSelectionSelectPath sel . (: []))
$ maybe Nothing (\ (_, n) -> findIndex ((name n ==) . name) nodes') sel'
selectFirst trvNodes
widgetSetSizeRequest window 800 600
widgetSetSizeRequest frNodes 250 (-1)
widgetShow window
-- | Displays the model view window
showModelView :: MVar (IO ()) -> String -> ListStore FNode -> [FNode] -> IO ()
showModelView lock title list other = do
isNotOpen <- isEmptyMVar lock
if isNotOpen then showModelViewAux lock title list other
else join (readMVar lock)
| mariefarrell/Hets | GUI/GtkConsistencyChecker.hs | gpl-2.0 | 15,640 | 307 | 20 | 4,036 | 4,019 | 2,155 | 1,864 | 331 | 6 |
-- | Initialize a database, populating it with "freshdb.json" if needed
module Lamdu.Data.Db.Init
( initDb
) where
import qualified Data.Property as Property
import qualified GUI.Momentu.State as M
import qualified Lamdu.Data.Anchors as Anchors
import Lamdu.Data.Db.Layout (DbM)
import qualified Lamdu.Data.Db.Layout as DbLayout
import qualified Lamdu.GUI.WidgetIdIRef as WidgetIdIRef
import qualified Lamdu.GUI.WidgetIds as WidgetIds
import Revision.Deltum.Rev.Branch (Branch)
import qualified Revision.Deltum.Rev.Branch as Branch
import Revision.Deltum.Rev.Version (Version)
import qualified Revision.Deltum.Rev.Version as Version
import qualified Revision.Deltum.Rev.View as View
import Revision.Deltum.Transaction (Transaction)
import qualified Revision.Deltum.Transaction as Transaction
import Lamdu.Prelude
type T = Transaction
setName :: Monad m => Branch m -> Text -> T m ()
setName = Property.setP . Anchors.assocBranchNameRef
newBranch :: Monad m => Text -> Version m -> T m (Branch m)
newBranch name ver =
do
branch <- Branch.new ver
setName branch name
pure branch
initDb :: Transaction.Store DbM -> T DbLayout.ViewM () -> IO ()
initDb db importAct =
DbLayout.runDbTransaction db $
do
emptyVersion <- Version.makeInitialVersion []
master <- newBranch "master" emptyVersion
view <- View.new master
Transaction.writeIRef DbLayout.guiState (M.GUIState WidgetIds.defaultCursor mempty)
Transaction.writeIRef DbLayout.dbSchemaVersion DbLayout.curDbSchemaVersion
let writeRevAnchor f = Transaction.writeIRef (f DbLayout.revisionIRefs)
writeRevAnchor DbLayout.view view
writeRevAnchor DbLayout.branches [master]
writeRevAnchor DbLayout.currentBranch master
writeRevAnchor DbLayout.redos []
let paneWId = WidgetIdIRef.fromIRef $ DbLayout.panes DbLayout.codeIRefs
let initGuiState = M.GUIState paneWId mempty
DbLayout.runViewTransaction view $
do
let writeCodeAnchor f = Transaction.writeIRef (f DbLayout.codeIRefs)
let writeGuiAnchor f = Transaction.writeIRef (f DbLayout.guiIRefs)
writeCodeAnchor DbLayout.globals mempty
writeCodeAnchor DbLayout.panes mempty
writeCodeAnchor DbLayout.tids mempty
writeCodeAnchor DbLayout.tags mempty
writeGuiAnchor DbLayout.preJumps []
writeGuiAnchor DbLayout.preGuiState initGuiState
writeGuiAnchor DbLayout.postGuiState initGuiState
importAct
-- Prevent undo into the invalid empty revision
newVer <- Branch.curVersion master
Version.preventUndo newVer
| lamdu/lamdu | src/Lamdu/Data/Db/Init.hs | gpl-3.0 | 2,792 | 0 | 17 | 657 | 671 | 343 | 328 | 56 | 1 |
module Main where
import Song
import Grammar
import Util
import System.Random.MWC.Monad
import System.IO
import Data.List.Split(splitEvery)
import Data.Time
import System.Locale
import System.Process
import System.Directory
import System.FilePath
import Control.Monad
import Haskore.Interface.MIDI.Render as R
import Haskore.Music.GeneralMIDI as MIDI hiding (drum)
import Haskore.Music as M
import Haskore.Basic.Duration as D
import Haskore.Melody
midiFile = "input.mid"
soundfont = "/usr/share/soundfonts/FluidR3_GM2-2.sf2"
fileFormat = "wav"
instruments = [Piccolo, Flute, AcousticGuitarNylon, SynthVoice, DistortionGuitar, ElectricPiano1,
BlownBottle, Celesta, ChurchOrgan, Clarinet, Glockenspiel, Ocarina, OverdrivenGuitar, PizzicatoStrings]
-- instrument =
samplePiece allNotes allOct allDur instrument = line $ map (MIDI.fromMelodyNullAttr instrument) notes -- (Prelude.take 10 notes)
where notes = [ n oct dur () | oct <- allOct, n <- allNotes, dur <- allDur]
-- allNotes =
-- [c,
-- d,
-- e,
-- f,
-- g,
-- a,
-- b]
-- allNotes = [
-- cf, c, cs,
-- df, d, ds,
-- ef, e, es,
-- ff, f, fs,
-- gf, g, gs,
-- af, a, as,
-- bf, b, bs]
res f _ dur _ = f dur
-- allOct = [0..4]
-- allDur = [qn] -- wn,wn,wn,hn,hn,hn,hn,qn,qn,qn,en,en,en]
main :: IO ()
main = do
forM_ instruments $ \ instrument -> do
let allNotes =
[c,
d,
e,
f,
g,
a,
b]
dur = qn/6
forM_ [0..4] $ \ octave -> do
forM_ (zip allNotes [0..]) $ \ (note,noteName) -> do
let outputFile = outputDirectory </>
(show instrument) ++ "_oct_" ++ (show octave) ++ "_note_" ++ (show noteName) ++ "_dur_" ++ (show dur) <.> fileFormat
outputDirectory = "output" </> (show instrument)
piece :: MusicPiece
piece = samplePiece [note] [octave] [dur] instrument
R.fileFromGeneralMIDIMusic midiFile piece
createDirectoryIfMissing True outputDirectory
rawSystem "fluidsynth" ["-g", "5", soundfont, midiFile, ("-F"++outputFile)]
return ()
do
let outputFile = "output" </> (show instrument) ++ "_complete" <.> fileFormat
R.fileFromGeneralMIDIMusic midiFile (samplePiece allNotes [0..4] [dur] instrument)
rawSystem "fluidsynth" ["-g", "0.5", soundfont, midiFile, ("-F"++outputFile)]
return ()
| Tener/ProCoGraM | src/GenerateSampleTones.hs | bsd-3-clause | 2,610 | 0 | 31 | 777 | 665 | 375 | 290 | 54 | 1 |
module GHC.Primitives where
import Utilities
data PrimOp = Add | Subtract | Multiply | Divide | Modulo | Equal | LessThan | LessThanEqual
deriving (Eq, Ord, Show)
data Literal = Int Integer
deriving (Eq, Show)
instance Pretty PrimOp where
pPrint Add = text "(+)"
pPrint Subtract = text "(-)"
pPrint Multiply = text "(*)"
pPrint Divide = text "div"
pPrint Modulo = text "mod"
pPrint Equal = text "(==)"
pPrint LessThan = text "(<)"
pPrint LessThanEqual = text "(<=)"
instance Pretty Literal where
pPrintPrec level prec (Int i) | level == haskellLevel = prettyParen (prec >= appPrec) $ pPrintPrec level appPrec i <+> text ":: Int"
| otherwise = pPrintPrec level prec i
| beni55/cps-core | GHC/Primitives.hs | bsd-3-clause | 827 | 0 | 11 | 273 | 248 | 124 | 124 | 18 | 0 |
{-# LANGUAGE ParallelArrays #-}
{-# OPTIONS -fvectorise #-}
module Vectorised (sumSq) where
import Data.Array.Parallel
import Data.Array.Parallel.Prelude.Int as I
import Data.Array.Parallel.Prelude.Double as D
import qualified Prelude
sumSq :: Int -> Double
{-# NOINLINE sumSq #-}
sumSq n
= D.sumP
(mapP (\x -> x D.* x)
(mapP D.fromInt
(enumFromToP 1 n))) | mainland/dph | dph-examples/examples/smoke/prims/SumSquares/dph/Vectorised.hs | bsd-3-clause | 368 | 14 | 11 | 61 | 118 | 71 | 47 | 14 | 1 |
module Main (main) where
import Text.ParserCombinators.Parsec ( parse )
import Text.ParserCombinators.Parsec.Rfc2821
-- Read an SMTP command from standard input, parse it,
-- return the result, and loop until EOF.
main :: IO ()
main = do
input <- getContents
mapM_ (print . parse smtpCmd "") [ l ++ "\r\n" | l <- lines input ]
| meteogrid/mime-mail-parser | example/smtp-test.hs | bsd-3-clause | 334 | 0 | 11 | 62 | 89 | 49 | 40 | 7 | 1 |
-- |
-- Module : Crypto.Number.Prime
-- License : BSD-style
-- Maintainer : Vincent Hanquez <vincent@snarc.org>
-- Stability : experimental
-- Portability : Good
{-# LANGUAGE BangPatterns #-}
module Crypto.Number.Prime
(
generatePrime
, generateSafePrime
, isProbablyPrime
, findPrimeFrom
, findPrimeFromWith
, primalityTestMillerRabin
, primalityTestNaive
, primalityTestFermat
, isCoprime
) where
import Crypto.Number.Compat
import Crypto.Number.Generate
import Crypto.Number.Basic (sqrti, gcde)
import Crypto.Number.ModArithmetic (expSafe)
import Crypto.Random.Types
import Crypto.Random.Probabilistic
import Crypto.Error
import Data.Bits
-- | Returns if the number is probably prime.
-- First a list of small primes are implicitely tested for divisibility,
-- then a fermat primality test is used with arbitrary numbers and
-- then the Miller Rabin algorithm is used with an accuracy of 30 recursions.
isProbablyPrime :: Integer -> Bool
isProbablyPrime !n
| any (\p -> p `divides` n) (filter (< n) firstPrimes) = False
| n >= 2 && n <= 2903 = True
| primalityTestFermat 50 (n `div` 2) n = primalityTestMillerRabin 30 n
| otherwise = False
-- | Generate a prime number of the required bitsize (i.e. in the range
-- [2^(b-1)+2^(b-2), 2^b)).
--
-- May throw a 'CryptoError_PrimeSizeInvalid' if the requested size is less
-- than 5 bits, as the smallest prime meeting these conditions is 29.
-- This function requires that the two highest bits are set, so that when
-- multiplied with another prime to create a key, it is guaranteed to be of
-- the proper size.
generatePrime :: MonadRandom m => Int -> m Integer
generatePrime bits = do
if bits < 5 then
throwCryptoError $ CryptoFailed $ CryptoError_PrimeSizeInvalid
else do
sp <- generateParams bits (Just SetTwoHighest) True
let prime = findPrimeFrom sp
if prime < 1 `shiftL` bits then
return $ prime
else generatePrime bits
-- | Generate a prime number of the form 2p+1 where p is also prime.
-- it is also knowed as a Sophie Germaine prime or safe prime.
--
-- The number of safe prime is significantly smaller to the number of prime,
-- as such it shouldn't be used if this number is supposed to be kept safe.
--
-- May throw a 'CryptoError_PrimeSizeInvalid' if the requested size is less than
-- 6 bits, as the smallest safe prime with the two highest bits set is 59.
generateSafePrime :: MonadRandom m => Int -> m Integer
generateSafePrime bits = do
if bits < 6 then
throwCryptoError $ CryptoFailed $ CryptoError_PrimeSizeInvalid
else do
sp <- generateParams bits (Just SetTwoHighest) True
let p = findPrimeFromWith (\i -> isProbablyPrime (2*i+1)) (sp `div` 2)
let val = 2 * p + 1
if val < 1 `shiftL` bits then
return $ val
else generateSafePrime bits
-- | Find a prime from a starting point where the property hold.
findPrimeFromWith :: (Integer -> Bool) -> Integer -> Integer
findPrimeFromWith prop !n
| even n = findPrimeFromWith prop (n+1)
| otherwise =
if not (isProbablyPrime n)
then findPrimeFromWith prop (n+2)
else
if prop n
then n
else findPrimeFromWith prop (n+2)
-- | Find a prime from a starting point with no specific property.
findPrimeFrom :: Integer -> Integer
findPrimeFrom n =
case gmpNextPrime n of
GmpSupported p -> p
GmpUnsupported -> findPrimeFromWith (\_ -> True) n
-- | Miller Rabin algorithm return if the number is probably prime or composite.
-- the tries parameter is the number of recursion, that determines the accuracy of the test.
primalityTestMillerRabin :: Int -> Integer -> Bool
primalityTestMillerRabin tries !n =
case gmpTestPrimeMillerRabin tries n of
GmpSupported b -> b
GmpUnsupported -> probabilistic run
where
run
| n <= 3 = error "Miller-Rabin requires tested value to be > 3"
| even n = return False
| tries <= 0 = error "Miller-Rabin tries need to be > 0"
| otherwise = loop <$> generateTries tries
!nm1 = n-1
!nm2 = n-2
(!s,!d) = (factorise 0 nm1)
generateTries 0 = return []
generateTries t = do
v <- generateBetween 2 nm2
vs <- generateTries (t-1)
return (v:vs)
-- factorise n-1 into the form 2^s*d
factorise :: Integer -> Integer -> (Integer, Integer)
factorise !si !vi
| vi `testBit` 0 = (si, vi)
| otherwise = factorise (si+1) (vi `shiftR` 1) -- probably faster to not shift v continuously, but just once.
expmod = expSafe
-- when iteration reach zero, we have a probable prime
loop [] = True
loop (w:ws) = let x = expmod w d n
in if x == (1 :: Integer) || x == nm1
then loop ws
else loop' ws ((x*x) `mod` n) 1
-- loop from 1 to s-1. if we reach the end then it's composite
loop' ws !x2 !r
| r == s = False
| x2 == 1 = False
| x2 /= nm1 = loop' ws ((x2*x2) `mod` n) (r+1)
| otherwise = loop ws
{-
n < z -> witness to test
1373653 [2,3]
9080191 [31,73]
4759123141 [2,7,61]
2152302898747 [2,3,5,7,11]
3474749660383 [2,3,5,7,11,13]
341550071728321 [2,3,5,7,11,13,17]
-}
-- | Probabilitic Test using Fermat primility test.
-- Beware of Carmichael numbers that are Fermat liars, i.e. this test
-- is useless for them. always combines with some other test.
primalityTestFermat :: Int -- ^ number of iterations of the algorithm
-> Integer -- ^ starting a
-> Integer -- ^ number to test for primality
-> Bool
primalityTestFermat n a p = and $ map expTest [a..(a+fromIntegral n)]
where !pm1 = p-1
expTest i = expSafe i pm1 p == 1
-- | Test naively is integer is prime.
-- while naive, we skip even number and stop iteration at i > sqrt(n)
primalityTestNaive :: Integer -> Bool
primalityTestNaive n
| n <= 1 = False
| n == 2 = True
| even n = False
| otherwise = search 3
where !ubound = snd $ sqrti n
search !i
| i > ubound = True
| i `divides` n = False
| otherwise = search (i+2)
-- | Test is two integer are coprime to each other
isCoprime :: Integer -> Integer -> Bool
isCoprime m n = case gcde m n of (_,_,d) -> d == 1
-- | List of the first primes till 2903.
firstPrimes :: [Integer]
firstPrimes =
[ 2 , 3 , 5 , 7 , 11 , 13 , 17 , 19 , 23 , 29
, 31 , 37 , 41 , 43 , 47 , 53 , 59 , 61 , 67 , 71
, 73 , 79 , 83 , 89 , 97 , 101 , 103 , 107 , 109 , 113
, 127 , 131 , 137 , 139 , 149 , 151 , 157 , 163 , 167 , 173
, 179 , 181 , 191 , 193 , 197 , 199 , 211 , 223 , 227 , 229
, 233 , 239 , 241 , 251 , 257 , 263 , 269 , 271 , 277 , 281
, 283 , 293 , 307 , 311 , 313 , 317 , 331 , 337 , 347 , 349
, 353 , 359 , 367 , 373 , 379 , 383 , 389 , 397 , 401 , 409
, 419 , 421 , 431 , 433 , 439 , 443 , 449 , 457 , 461 , 463
, 467 , 479 , 487 , 491 , 499 , 503 , 509 , 521 , 523 , 541
, 547 , 557 , 563 , 569 , 571 , 577 , 587 , 593 , 599 , 601
, 607 , 613 , 617 , 619 , 631 , 641 , 643 , 647 , 653 , 659
, 661 , 673 , 677 , 683 , 691 , 701 , 709 , 719 , 727 , 733
, 739 , 743 , 751 , 757 , 761 , 769 , 773 , 787 , 797 , 809
, 811 , 821 , 823 , 827 , 829 , 839 , 853 , 857 , 859 , 863
, 877 , 881 , 883 , 887 , 907 , 911 , 919 , 929 , 937 , 941
, 947 , 953 , 967 , 971 , 977 , 983 , 991 , 997 , 1009 , 1013
, 1019 , 1021 , 1031 , 1033 , 1039 , 1049 , 1051 , 1061 , 1063 , 1069
, 1087 , 1091 , 1093 , 1097 , 1103 , 1109 , 1117 , 1123 , 1129 , 1151
, 1153 , 1163 , 1171 , 1181 , 1187 , 1193 , 1201 , 1213 , 1217 , 1223
, 1229 , 1231 , 1237 , 1249 , 1259 , 1277 , 1279 , 1283 , 1289 , 1291
, 1297 , 1301 , 1303 , 1307 , 1319 , 1321 , 1327 , 1361 , 1367 , 1373
, 1381 , 1399 , 1409 , 1423 , 1427 , 1429 , 1433 , 1439 , 1447 , 1451
, 1453 , 1459 , 1471 , 1481 , 1483 , 1487 , 1489 , 1493 , 1499 , 1511
, 1523 , 1531 , 1543 , 1549 , 1553 , 1559 , 1567 , 1571 , 1579 , 1583
, 1597 , 1601 , 1607 , 1609 , 1613 , 1619 , 1621 , 1627 , 1637 , 1657
, 1663 , 1667 , 1669 , 1693 , 1697 , 1699 , 1709 , 1721 , 1723 , 1733
, 1741 , 1747 , 1753 , 1759 , 1777 , 1783 , 1787 , 1789 , 1801 , 1811
, 1823 , 1831 , 1847 , 1861 , 1867 , 1871 , 1873 , 1877 , 1879 , 1889
, 1901 , 1907 , 1913 , 1931 , 1933 , 1949 , 1951 , 1973 , 1979 , 1987
, 1993 , 1997 , 1999 , 2003 , 2011 , 2017 , 2027 , 2029 , 2039 , 2053
, 2063 , 2069 , 2081 , 2083 , 2087 , 2089 , 2099 , 2111 , 2113 , 2129
, 2131 , 2137 , 2141 , 2143 , 2153 , 2161 , 2179 , 2203 , 2207 , 2213
, 2221 , 2237 , 2239 , 2243 , 2251 , 2267 , 2269 , 2273 , 2281 , 2287
, 2293 , 2297 , 2309 , 2311 , 2333 , 2339 , 2341 , 2347 , 2351 , 2357
, 2371 , 2377 , 2381 , 2383 , 2389 , 2393 , 2399 , 2411 , 2417 , 2423
, 2437 , 2441 , 2447 , 2459 , 2467 , 2473 , 2477 , 2503 , 2521 , 2531
, 2539 , 2543 , 2549 , 2551 , 2557 , 2579 , 2591 , 2593 , 2609 , 2617
, 2621 , 2633 , 2647 , 2657 , 2659 , 2663 , 2671 , 2677 , 2683 , 2687
, 2689 , 2693 , 2699 , 2707 , 2711 , 2713 , 2719 , 2729 , 2731 , 2741
, 2749 , 2753 , 2767 , 2777 , 2789 , 2791 , 2797 , 2801 , 2803 , 2819
, 2833 , 2837 , 2843 , 2851 , 2857 , 2861 , 2879 , 2887 , 2897 , 2903
]
{-# INLINE divides #-}
divides :: Integer -> Integer -> Bool
divides i n = n `mod` i == 0
| vincenthz/cryptonite | Crypto/Number/Prime.hs | bsd-3-clause | 9,982 | 0 | 19 | 3,223 | 2,763 | 1,622 | 1,141 | 161 | 5 |
-- |
-- Copyright : Anders Claesson 2017
-- Maintainer : Anders Claesson <anders.claesson@gmail.com>
-- License : BSD-3
--
module HOPS.GF.Hankel
( hankelMatrix
, hankel
, hankel1
) where
import Data.Vector (Vector)
import qualified Data.Vector as V
import HOPS.GF.Rat
import HOPS.Utils.Matrix (det)
hankelMatrix :: Vector Rat -> Vector (Vector Rat)
hankelMatrix v = V.iterateN (V.length v) (\u -> V.snoc (V.tail u) Indet) v
hankel :: Vector Rat -> Vector Rat
hankel v = V.reverse $ V.map det subMatrices
where
n = V.length v
subMatrices = V.iterateN n (V.init . V.map V.init) (hankelMatrix v)
hankel1 :: Vector Rat -> Vector Rat
hankel1 = (`V.snoc` Indet) . hankel . V.tail
| akc/gfscript | HOPS/GF/Hankel.hs | bsd-3-clause | 712 | 0 | 11 | 146 | 249 | 136 | 113 | 16 | 1 |
module Adjrn.Crypto where
import Crypto.Cipher.AES (AES256)
import Crypto.Cipher.Types
import Crypto.Data.Padding
import Crypto.Error
import Crypto.Hash (hashWith, SHA256(..))
import Crypto.Random.Types
import Data.ByteArray
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import Data.Text (Text)
import Data.Text.Encoding (encodeUtf8, decodeUtf8)
-- Jrnl files are encrypted with AES256 in CBC mode and PKCS7
-- padding. The initialisation vector (IV) is stored at the START of
-- these files. These functions take this into account.
decrypt :: ByteString -> ByteString -> Either String Text
decrypt pw txt = do
let hashed = hashWith SHA256 pw
(ivRaw, jrnl) = BS.splitAt 16 txt
aes <- onCryptoFailure (Left . show) Right
$ cipherInit (convert hashed :: ByteString)
:: Either String AES256
iv <- maybe (Left "Encrypted data doesn't start with valid IV") Right
$ makeIV ivRaw
decd <- unpad' (blockSize aes) $ cbcDecrypt aes iv jrnl
return $ decodeUtf8 decd
unpad' :: Int -> ByteString -> Either String ByteString
unpad' k ctxt = maybe (Left "Couldn't unpad decrypted journal") Right
$ unpad (PKCS7 k) ctxt
randomIV :: IO ByteString
randomIV = getRandomBytes 16
--This function is currently only used for testing purposes.
encrypt :: Text -> Text -> ByteString -> ByteString
encrypt pw plaintext ivRaw = let
key :: ByteString
key = convert $ hashWith SHA256 $ encodeUtf8 pw
aes :: AES256
aes = either (error . show) id $ eitherCryptoError $ cipherInit key
iv = maybe (error "invalid iv") id $ makeIV ivRaw
plain = encodeUtf8 plaintext
padded = pad (PKCS7 (blockSize aes)) plain
in ivRaw `BS.append` cbcEncrypt aes iv padded
| timds/adjourn | src/Adjrn/Crypto.hs | bsd-3-clause | 1,828 | 0 | 13 | 431 | 504 | 263 | 241 | 38 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Build
-- Copyright : Isaac Jones 2003-2005,
-- Ross Paterson 2006,
-- Duncan Coutts 2007-2008, 2012
-- License : BSD3
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- This is the entry point to actually building the modules in a package. It
-- doesn't actually do much itself, most of the work is delegated to
-- compiler-specific actions. It does do some non-compiler specific bits like
-- running pre-processors.
--
module Distribution.Simple.Build (
build, repl,
startInterpreter,
initialBuildSteps,
writeAutogenFiles,
) where
import Distribution.Package
import qualified Distribution.Simple.GHC as GHC
import qualified Distribution.Simple.GHCJS as GHCJS
import qualified Distribution.Simple.JHC as JHC
import qualified Distribution.Simple.LHC as LHC
import qualified Distribution.Simple.UHC as UHC
import qualified Distribution.Simple.HaskellSuite as HaskellSuite
import qualified Distribution.Simple.Build.Macros as Build.Macros
import qualified Distribution.Simple.Build.PathsModule as Build.PathsModule
import qualified Distribution.Simple.Program.HcPkg as HcPkg
import Distribution.Simple.Compiler hiding (Flag)
import Distribution.PackageDescription hiding (Flag)
import qualified Distribution.InstalledPackageInfo as IPI
import qualified Distribution.ModuleName as ModuleName
import Distribution.Simple.Setup
import Distribution.Simple.BuildTarget
import Distribution.Simple.PreProcess
import Distribution.Simple.LocalBuildInfo
import Distribution.Simple.Program.Types
import Distribution.Simple.Program.Db
import Distribution.Simple.BuildPaths
import Distribution.Simple.Configure
import Distribution.Simple.Register
import Distribution.Simple.Test.LibV09
import Distribution.Simple.Utils
import Distribution.System
import Distribution.Text
import Distribution.Verbosity
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.List
( intersect )
import Control.Monad
( when, unless )
import System.FilePath
( (</>), (<.>) )
import System.Directory
( getCurrentDirectory )
-- -----------------------------------------------------------------------------
-- |Build the libraries and executables in this package.
build :: PackageDescription -- ^ Mostly information from the .cabal file
-> LocalBuildInfo -- ^ Configuration information
-> BuildFlags -- ^ Flags that the user passed to build
-> [ PPSuffixHandler ] -- ^ preprocessors to run before compiling
-> IO ()
build pkg_descr lbi flags suffixes
| fromFlag (buildAssumeDepsUpToDate flags) = do
-- TODO: if checkBuildTargets ignores a target we may accept
-- a --assume-deps-up-to-date with multiple arguments. Arguably, we should
-- error early in this case.
targets <- readBuildTargets pkg_descr (buildArgs flags)
(cname, _) <- checkBuildTargets verbosity pkg_descr targets >>= \r -> case r of
[] -> die "In --assume-deps-up-to-date mode you must specify a target"
[target'] -> return target'
_ -> die "In --assume-deps-up-to-date mode you can only build a single target"
-- NB: do NOT 'createInternalPackageDB'; we don't want to delete it.
-- But this means we have to be careful about unregistering
-- ourselves.
let dbPath = internalPackageDBPath lbi distPref
internalPackageDB = SpecificPackageDB dbPath
clbi = getComponentLocalBuildInfo lbi cname
comp = getComponent pkg_descr cname
-- TODO: do we need to unregister libraries? In any case, this would
-- need to be done in the buildLib functionality.
-- Do the build
initialBuildSteps distPref pkg_descr lbi clbi verbosity
let bi = componentBuildInfo comp
progs' = addInternalBuildTools pkg_descr lbi bi (withPrograms lbi)
lbi' = lbi {
withPrograms = progs',
withPackageDB = withPackageDB lbi ++ [internalPackageDB]
}
buildComponent verbosity (buildNumJobs flags) pkg_descr
lbi' suffixes comp clbi distPref
| otherwise = do
targets <- readBuildTargets pkg_descr (buildArgs flags)
targets' <- checkBuildTargets verbosity pkg_descr targets
let componentsToBuild = componentsInBuildOrder lbi (map fst targets')
info verbosity $ "Component build order: "
++ intercalate ", " (map (showComponentName . componentLocalName) componentsToBuild)
when (null targets) $
-- Only bother with this message if we're building the whole package
setupMessage verbosity "Building" (packageId pkg_descr)
internalPackageDB <- createInternalPackageDB verbosity lbi distPref
-- TODO: we're computing this twice, do it once!
withComponentsInBuildOrder pkg_descr lbi (map fst targets') $ \comp clbi -> do
initialBuildSteps distPref pkg_descr lbi clbi verbosity
let bi = componentBuildInfo comp
progs' = addInternalBuildTools pkg_descr lbi bi (withPrograms lbi)
lbi' = lbi {
withPrograms = progs',
withPackageDB = withPackageDB lbi ++ [internalPackageDB]
}
buildComponent verbosity (buildNumJobs flags) pkg_descr
lbi' suffixes comp clbi distPref
where
distPref = fromFlag (buildDistPref flags)
verbosity = fromFlag (buildVerbosity flags)
repl :: PackageDescription -- ^ Mostly information from the .cabal file
-> LocalBuildInfo -- ^ Configuration information
-> ReplFlags -- ^ Flags that the user passed to build
-> [ PPSuffixHandler ] -- ^ preprocessors to run before compiling
-> [String]
-> IO ()
repl pkg_descr lbi flags suffixes args = do
let distPref = fromFlag (replDistPref flags)
verbosity = fromFlag (replVerbosity flags)
targets <- readBuildTargets pkg_descr args
targets' <- case targets of
[] -> return $ take 1 [ componentName c
| c <- pkgEnabledComponents pkg_descr ]
[target] -> fmap (map fst) (checkBuildTargets verbosity pkg_descr [target])
_ -> die $ "The 'repl' command does not support multiple targets at once."
let componentsToBuild = componentsInBuildOrder lbi targets'
componentForRepl = last componentsToBuild
debug verbosity $ "Component build order: "
++ intercalate ", "
[ showComponentName (componentLocalName clbi) | clbi <- componentsToBuild ]
internalPackageDB <- createInternalPackageDB verbosity lbi distPref
let lbiForComponent comp lbi' =
lbi' {
withPackageDB = withPackageDB lbi ++ [internalPackageDB],
withPrograms = addInternalBuildTools pkg_descr lbi'
(componentBuildInfo comp) (withPrograms lbi')
}
-- build any dependent components
sequence_
[ do let cname = componentLocalName clbi
comp = getComponent pkg_descr cname
lbi' = lbiForComponent comp lbi
initialBuildSteps distPref pkg_descr lbi clbi verbosity
buildComponent verbosity NoFlag
pkg_descr lbi' suffixes comp clbi distPref
| clbi <- init componentsToBuild ]
-- REPL for target components
let clbi = componentForRepl
cname = componentLocalName clbi
comp = getComponent pkg_descr cname
lbi' = lbiForComponent comp lbi
initialBuildSteps distPref pkg_descr lbi clbi verbosity
replComponent verbosity pkg_descr lbi' suffixes comp clbi distPref
-- | Start an interpreter without loading any package files.
startInterpreter :: Verbosity -> ProgramDb -> Compiler -> Platform
-> PackageDBStack -> IO ()
startInterpreter verbosity programDb comp platform packageDBs =
case compilerFlavor comp of
GHC -> GHC.startInterpreter verbosity programDb comp platform packageDBs
GHCJS -> GHCJS.startInterpreter verbosity programDb comp platform packageDBs
_ -> die "A REPL is not supported with this compiler."
buildComponent :: Verbosity
-> Flag (Maybe Int)
-> PackageDescription
-> LocalBuildInfo
-> [PPSuffixHandler]
-> Component
-> ComponentLocalBuildInfo
-> FilePath
-> IO ()
buildComponent verbosity numJobs pkg_descr lbi suffixes
comp@(CLib lib) clbi distPref = do
preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes
extras <- preprocessExtras comp lbi
info verbosity $ "Building library " ++ libName lib ++ "..."
let libbi = libBuildInfo lib
lib' = lib { libBuildInfo = addExtraCSources libbi extras }
buildLib verbosity numJobs pkg_descr lbi lib' clbi
-- Register the library in-place, so exes can depend
-- on internally defined libraries.
pwd <- getCurrentDirectory
let -- The in place registration uses the "-inplace" suffix, not an ABI hash
installedPkgInfo = inplaceInstalledPackageInfo pwd distPref pkg_descr
(AbiHash "") lib' lbi clbi
registerPackage verbosity (compiler lbi) (withPrograms lbi) HcPkg.MultiInstance
(withPackageDB lbi) installedPkgInfo
buildComponent verbosity numJobs pkg_descr lbi suffixes
comp@(CExe exe) clbi _ = do
preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes
extras <- preprocessExtras comp lbi
info verbosity $ "Building executable " ++ exeName exe ++ "..."
let ebi = buildInfo exe
exe' = exe { buildInfo = addExtraCSources ebi extras }
buildExe verbosity numJobs pkg_descr lbi exe' clbi
buildComponent verbosity numJobs pkg_descr lbi suffixes
comp@(CTest test@TestSuite { testInterface = TestSuiteExeV10{} })
clbi _distPref = do
let exe = testSuiteExeV10AsExe test
preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes
extras <- preprocessExtras comp lbi
info verbosity $ "Building test suite " ++ testName test ++ "..."
let ebi = buildInfo exe
exe' = exe { buildInfo = addExtraCSources ebi extras }
buildExe verbosity numJobs pkg_descr lbi exe' clbi
buildComponent verbosity numJobs pkg_descr lbi0 suffixes
comp@(CTest
test@TestSuite { testInterface = TestSuiteLibV09{} })
clbi -- This ComponentLocalBuildInfo corresponds to a detailed
-- test suite and not a real component. It should not
-- be used, except to construct the CLBIs for the
-- library and stub executable that will actually be
-- built.
distPref = do
pwd <- getCurrentDirectory
let (pkg, lib, libClbi, lbi, ipi, exe, exeClbi) =
testSuiteLibV09AsLibAndExe pkg_descr test clbi lbi0 distPref pwd
preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes
extras <- preprocessExtras comp lbi
info verbosity $ "Building test suite " ++ testName test ++ "..."
buildLib verbosity numJobs pkg lbi lib libClbi
-- NB: need to enable multiple instances here, because on 7.10+
-- the package name is the same as the library, and we still
-- want the registration to go through.
registerPackage verbosity (compiler lbi) (withPrograms lbi) HcPkg.MultiInstance
(withPackageDB lbi) ipi
let ebi = buildInfo exe
exe' = exe { buildInfo = addExtraCSources ebi extras }
buildExe verbosity numJobs pkg_descr lbi exe' exeClbi
buildComponent _ _ _ _ _
(CTest TestSuite { testInterface = TestSuiteUnsupported tt })
_ _ =
die $ "No support for building test suite type " ++ display tt
buildComponent verbosity numJobs pkg_descr lbi suffixes
comp@(CBench bm@Benchmark { benchmarkInterface = BenchmarkExeV10 {} })
clbi _ = do
let (exe, exeClbi) = benchmarkExeV10asExe bm clbi
preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes
extras <- preprocessExtras comp lbi
info verbosity $ "Building benchmark " ++ benchmarkName bm ++ "..."
let ebi = buildInfo exe
exe' = exe { buildInfo = addExtraCSources ebi extras }
buildExe verbosity numJobs pkg_descr lbi exe' exeClbi
buildComponent _ _ _ _ _
(CBench Benchmark { benchmarkInterface = BenchmarkUnsupported tt })
_ _ =
die $ "No support for building benchmark type " ++ display tt
-- | Add extra C sources generated by preprocessing to build
-- information.
addExtraCSources :: BuildInfo -> [FilePath] -> BuildInfo
addExtraCSources bi extras = bi { cSources = new }
where new = Set.toList $ old `Set.union` exs
old = Set.fromList $ cSources bi
exs = Set.fromList extras
replComponent :: Verbosity
-> PackageDescription
-> LocalBuildInfo
-> [PPSuffixHandler]
-> Component
-> ComponentLocalBuildInfo
-> FilePath
-> IO ()
replComponent verbosity pkg_descr lbi suffixes
comp@(CLib lib) clbi _ = do
preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes
extras <- preprocessExtras comp lbi
let libbi = libBuildInfo lib
lib' = lib { libBuildInfo = libbi { cSources = cSources libbi ++ extras } }
replLib verbosity pkg_descr lbi lib' clbi
replComponent verbosity pkg_descr lbi suffixes
comp@(CExe exe) clbi _ = do
preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes
extras <- preprocessExtras comp lbi
let ebi = buildInfo exe
exe' = exe { buildInfo = ebi { cSources = cSources ebi ++ extras } }
replExe verbosity pkg_descr lbi exe' clbi
replComponent verbosity pkg_descr lbi suffixes
comp@(CTest test@TestSuite { testInterface = TestSuiteExeV10{} })
clbi _distPref = do
let exe = testSuiteExeV10AsExe test
preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes
extras <- preprocessExtras comp lbi
let ebi = buildInfo exe
exe' = exe { buildInfo = ebi { cSources = cSources ebi ++ extras } }
replExe verbosity pkg_descr lbi exe' clbi
replComponent verbosity pkg_descr lbi0 suffixes
comp@(CTest
test@TestSuite { testInterface = TestSuiteLibV09{} })
clbi distPref = do
pwd <- getCurrentDirectory
let (pkg, lib, libClbi, lbi, _, _, _) =
testSuiteLibV09AsLibAndExe pkg_descr test clbi lbi0 distPref pwd
preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes
extras <- preprocessExtras comp lbi
let libbi = libBuildInfo lib
lib' = lib { libBuildInfo = libbi { cSources = cSources libbi ++ extras } }
replLib verbosity pkg lbi lib' libClbi
replComponent _ _ _ _
(CTest TestSuite { testInterface = TestSuiteUnsupported tt })
_ _ =
die $ "No support for building test suite type " ++ display tt
replComponent verbosity pkg_descr lbi suffixes
comp@(CBench bm@Benchmark { benchmarkInterface = BenchmarkExeV10 {} })
clbi _ = do
let (exe, exeClbi) = benchmarkExeV10asExe bm clbi
preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes
extras <- preprocessExtras comp lbi
let ebi = buildInfo exe
exe' = exe { buildInfo = ebi { cSources = cSources ebi ++ extras } }
replExe verbosity pkg_descr lbi exe' exeClbi
replComponent _ _ _ _
(CBench Benchmark { benchmarkInterface = BenchmarkUnsupported tt })
_ _ =
die $ "No support for building benchmark type " ++ display tt
----------------------------------------------------
-- Shared code for buildComponent and replComponent
--
-- | Translate a exe-style 'TestSuite' component into an exe for building
testSuiteExeV10AsExe :: TestSuite -> Executable
testSuiteExeV10AsExe test@TestSuite { testInterface = TestSuiteExeV10 _ mainFile } =
Executable {
exeName = testName test,
modulePath = mainFile,
buildInfo = testBuildInfo test
}
testSuiteExeV10AsExe TestSuite{} = error "testSuiteExeV10AsExe: wrong kind"
-- | Translate a lib-style 'TestSuite' component into a lib + exe for building
testSuiteLibV09AsLibAndExe :: PackageDescription
-> TestSuite
-> ComponentLocalBuildInfo
-> LocalBuildInfo
-> FilePath
-> FilePath
-> (PackageDescription,
Library, ComponentLocalBuildInfo,
LocalBuildInfo,
IPI.InstalledPackageInfo,
Executable, ComponentLocalBuildInfo)
testSuiteLibV09AsLibAndExe pkg_descr
test@TestSuite { testInterface = TestSuiteLibV09 _ m }
clbi lbi distPref pwd =
(pkg, lib, libClbi, lbi, ipi, exe, exeClbi)
where
bi = testBuildInfo test
lib = Library {
libName = testName test,
exposedModules = [ m ],
reexportedModules = [],
requiredSignatures = [],
exposedSignatures = [],
libExposed = True,
libBuildInfo = bi
}
-- This is, like, the one place where we use a CTestName for a library.
-- Should NOT use library name, since that could conflict!
PackageIdentifier pkg_name pkg_ver = package pkg_descr
compat_name = computeCompatPackageName pkg_name (CTestName (testName test))
compat_key = computeCompatPackageKey (compiler lbi) compat_name pkg_ver (componentUnitId clbi)
libClbi = LibComponentLocalBuildInfo
{ componentPackageDeps = componentPackageDeps clbi
, componentLocalName = CLibName (testName test)
, componentIsPublic = False
, componentIncludes = componentIncludes clbi
, componentUnitId = componentUnitId clbi
, componentCompatPackageName = compat_name
, componentCompatPackageKey = compat_key
, componentExposedModules = [IPI.ExposedModule m Nothing]
}
pkg = pkg_descr {
package = (package pkg_descr) { pkgName = compat_name }
, buildDepends = targetBuildDepends $ testBuildInfo test
, executables = []
, testSuites = []
, libraries = [lib]
}
ipi = inplaceInstalledPackageInfo pwd distPref pkg (AbiHash "") lib lbi libClbi
testDir = buildDir lbi </> stubName test
</> stubName test ++ "-tmp"
testLibDep = thisPackageVersion $ package pkg
exe = Executable {
exeName = stubName test,
modulePath = stubFilePath test,
buildInfo = (testBuildInfo test) {
hsSourceDirs = [ testDir ],
targetBuildDepends = testLibDep
: (targetBuildDepends $ testBuildInfo test),
targetBuildRenaming = Map.empty
}
}
-- | The stub executable needs a new 'ComponentLocalBuildInfo'
-- that exposes the relevant test suite library.
deps = (IPI.installedUnitId ipi, packageId ipi)
: (filter (\(_, x) -> let PackageName name = pkgName x
in name == "Cabal" || name == "base")
(componentPackageDeps clbi))
exeClbi = ExeComponentLocalBuildInfo {
-- TODO: this is a hack, but as long as this is unique
-- (doesn't clobber something) we won't run into trouble
componentUnitId = mkUnitId (stubName test),
componentLocalName = CExeName (stubName test),
componentPackageDeps = deps,
componentIncludes = zip (map fst deps) (repeat defaultRenaming)
}
testSuiteLibV09AsLibAndExe _ TestSuite{} _ _ _ _ = error "testSuiteLibV09AsLibAndExe: wrong kind"
-- | Translate a exe-style 'Benchmark' component into an exe for building
benchmarkExeV10asExe :: Benchmark -> ComponentLocalBuildInfo
-> (Executable, ComponentLocalBuildInfo)
benchmarkExeV10asExe bm@Benchmark { benchmarkInterface = BenchmarkExeV10 _ f }
clbi =
(exe, exeClbi)
where
exe = Executable {
exeName = benchmarkName bm,
modulePath = f,
buildInfo = benchmarkBuildInfo bm
}
exeClbi = ExeComponentLocalBuildInfo {
componentUnitId = componentUnitId clbi,
componentLocalName = CExeName (benchmarkName bm),
componentPackageDeps = componentPackageDeps clbi,
componentIncludes = componentIncludes clbi
}
benchmarkExeV10asExe Benchmark{} _ = error "benchmarkExeV10asExe: wrong kind"
addInternalBuildTools :: PackageDescription -> LocalBuildInfo -> BuildInfo
-> ProgramDb -> ProgramDb
addInternalBuildTools pkg lbi bi progs =
foldr updateProgram progs internalBuildTools
where
internalBuildTools =
[ simpleConfiguredProgram toolName (FoundOnSystem toolLocation)
| toolName <- toolNames
, let toolLocation = buildDir lbi </> toolName </> toolName <.> exeExtension ]
toolNames = intersect buildToolNames internalExeNames
internalExeNames = map exeName (executables pkg)
buildToolNames = map buildToolName (buildTools bi)
where
buildToolName (Dependency (PackageName name) _ ) = name
-- TODO: build separate libs in separate dirs so that we can build
-- multiple libs, e.g. for 'LibTest' library-style test suites
buildLib :: Verbosity -> Flag (Maybe Int)
-> PackageDescription -> LocalBuildInfo
-> Library -> ComponentLocalBuildInfo -> IO ()
buildLib verbosity numJobs pkg_descr lbi lib clbi =
case compilerFlavor (compiler lbi) of
GHC -> GHC.buildLib verbosity numJobs pkg_descr lbi lib clbi
GHCJS -> GHCJS.buildLib verbosity numJobs pkg_descr lbi lib clbi
JHC -> JHC.buildLib verbosity pkg_descr lbi lib clbi
LHC -> LHC.buildLib verbosity pkg_descr lbi lib clbi
UHC -> UHC.buildLib verbosity pkg_descr lbi lib clbi
HaskellSuite {} -> HaskellSuite.buildLib verbosity pkg_descr lbi lib clbi
_ -> die "Building is not supported with this compiler."
buildExe :: Verbosity -> Flag (Maybe Int)
-> PackageDescription -> LocalBuildInfo
-> Executable -> ComponentLocalBuildInfo -> IO ()
buildExe verbosity numJobs pkg_descr lbi exe clbi =
case compilerFlavor (compiler lbi) of
GHC -> GHC.buildExe verbosity numJobs pkg_descr lbi exe clbi
GHCJS -> GHCJS.buildExe verbosity numJobs pkg_descr lbi exe clbi
JHC -> JHC.buildExe verbosity pkg_descr lbi exe clbi
LHC -> LHC.buildExe verbosity pkg_descr lbi exe clbi
UHC -> UHC.buildExe verbosity pkg_descr lbi exe clbi
_ -> die "Building is not supported with this compiler."
replLib :: Verbosity -> PackageDescription -> LocalBuildInfo
-> Library -> ComponentLocalBuildInfo -> IO ()
replLib verbosity pkg_descr lbi lib clbi =
case compilerFlavor (compiler lbi) of
-- 'cabal repl' doesn't need to support 'ghc --make -j', so we just pass
-- NoFlag as the numJobs parameter.
GHC -> GHC.replLib verbosity NoFlag pkg_descr lbi lib clbi
GHCJS -> GHCJS.replLib verbosity NoFlag pkg_descr lbi lib clbi
_ -> die "A REPL is not supported for this compiler."
replExe :: Verbosity -> PackageDescription -> LocalBuildInfo
-> Executable -> ComponentLocalBuildInfo -> IO ()
replExe verbosity pkg_descr lbi exe clbi =
case compilerFlavor (compiler lbi) of
GHC -> GHC.replExe verbosity NoFlag pkg_descr lbi exe clbi
GHCJS -> GHCJS.replExe verbosity NoFlag pkg_descr lbi exe clbi
_ -> die "A REPL is not supported for this compiler."
initialBuildSteps :: FilePath -- ^"dist" prefix
-> PackageDescription -- ^mostly information from the .cabal file
-> LocalBuildInfo -- ^Configuration information
-> ComponentLocalBuildInfo
-> Verbosity -- ^The verbosity to use
-> IO ()
initialBuildSteps _distPref pkg_descr lbi clbi verbosity = do
-- check that there's something to build
unless (not . null $ allBuildInfo pkg_descr) $ do
let name = display (packageId pkg_descr)
die $ "No libraries, executables, tests, or benchmarks "
++ "are enabled for package " ++ name ++ "."
createDirectoryIfMissingVerbose verbosity True (componentBuildDir lbi clbi)
writeAutogenFiles verbosity pkg_descr lbi clbi
-- | Generate and write out the Paths_<pkg>.hs and cabal_macros.h files
--
writeAutogenFiles :: Verbosity
-> PackageDescription
-> LocalBuildInfo
-> ComponentLocalBuildInfo
-> IO ()
writeAutogenFiles verbosity pkg lbi clbi = do
createDirectoryIfMissingVerbose verbosity True (autogenModulesDir lbi clbi)
let pathsModulePath = autogenModulesDir lbi clbi
</> ModuleName.toFilePath (autogenModuleName pkg) <.> "hs"
rewriteFile pathsModulePath (Build.PathsModule.generate pkg lbi clbi)
let cppHeaderPath = autogenModulesDir lbi clbi </> cppHeaderName
rewriteFile cppHeaderPath (Build.Macros.generate pkg lbi clbi)
| bennofs/cabal | Cabal/Distribution/Simple/Build.hs | bsd-3-clause | 25,910 | 0 | 18 | 7,107 | 5,494 | 2,821 | 2,673 | 435 | 7 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sk-SK">
<title>Reveal | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/reveal/src/main/javahelp/org/zaproxy/zap/extension/reveal/resources/help_sk_SK/helpset_sk_SK.hs | apache-2.0 | 967 | 83 | 52 | 158 | 394 | 208 | 186 | -1 | -1 |
module ListWatched where
import qualified Github.Repos.Watching as Github
import Data.List (intercalate)
import Data.Maybe (fromMaybe)
main = do
possibleRepos <- Github.reposWatchedBy "mike-burns"
putStrLn $ either (("Error: "++) . show)
(intercalate "\n\n" . map formatRepo)
possibleRepos
formatRepo repo =
(Github.repoName repo) ++ "\t" ++
(fromMaybe "" $ Github.repoDescription repo) ++ "\n" ++
(Github.repoHtmlUrl repo) ++ "\n" ++
(fromMaybe "" $ Github.repoCloneUrl repo) ++ "\t" ++
(formatDate $ Github.repoUpdatedAt repo) ++ "\n" ++
formatLanguage (Github.repoLanguage repo) ++
"watchers: " ++ (show $ Github.repoWatchers repo) ++ "\t" ++
"forks: " ++ (show $ Github.repoForks repo)
formatDate (Just date) = show . Github.fromGithubDate $ date
formatDate Nothing = ""
formatLanguage (Just language) = "language: " ++ language ++ "\t"
formatLanguage Nothing = ""
| olorin/github | samples/Repos/Watching/ListWatched.hs | bsd-3-clause | 948 | 0 | 22 | 197 | 311 | 159 | 152 | 22 | 1 |
{-# LANGUAGE OverloadedStrings, GeneralizedNewtypeDeriving, DeriveDataTypeable #-}
module Lamdu.Data.DbLayout
( DbM, runDbTransaction
, ViewM, runViewTransaction
, CodeProps, codeProps, codeIRefs
, RevisionProps, revisionProps, revisionIRefs
, module Lamdu.Data.Anchors
) where
import Control.Applicative (Applicative)
import Control.Monad.IO.Class (MonadIO)
import Data.ByteString.Char8 ()
import Data.Store.Db (Db)
import Data.Store.IRef (IRef, Tag)
import Data.Store.Rev.View (View)
import Data.Store.Transaction (Transaction)
import Data.Typeable (Typeable)
import Lamdu.Data.Anchors (Code(..), Revision(..), assocNameRef, SpecialFunctions(..))
import qualified Data.Store.Db as Db
import qualified Data.Store.IRef as IRef
import qualified Data.Store.Rev.View as View
import qualified Data.Store.Transaction as Transaction
import qualified Lamdu.Data.Anchors as Anchors
type T = Transaction
newtype DbM a = DbM { dbM :: IO a }
deriving (Functor, Applicative, Monad, MonadIO, Typeable)
newtype ViewM a = ViewM { viewM :: T DbM a }
deriving (Functor, Applicative, Monad, Typeable)
runDbTransaction :: Db -> T DbM a -> IO a
runDbTransaction db = dbM . Transaction.run (Transaction.onStoreM DbM (Db.store db))
runViewTransaction :: View (Tag DbM) -> T ViewM a -> T DbM a
runViewTransaction v = viewM . (Transaction.run . Transaction.onStoreM ViewM . View.store) v
codeIRefs :: Code (IRef (Tag ViewM)) (Tag ViewM)
codeIRefs = Code
{ panes = IRef.anchor "panes"
, clipboards = IRef.anchor "clipboards"
, globals = IRef.anchor "globals"
, specialFunctions = IRef.anchor "specialFuncs"
, ffiEnv = IRef.anchor "ffiEnv"
, preJumps = IRef.anchor "prejumps"
, preCursor = IRef.anchor "precursor"
, postCursor = IRef.anchor "postcursor"
, tags = IRef.anchor "tags"
}
revisionIRefs :: Revision (IRef t) t
revisionIRefs = Revision
{ branches = IRef.anchor "branches"
, currentBranch = IRef.anchor "currentBranch"
, cursor = IRef.anchor "cursor"
, redos = IRef.anchor "redos"
, view = IRef.anchor "view"
}
type CodeProps = Anchors.CodeProps ViewM
type RevisionProps = Anchors.RevisionProps DbM
codeProps :: CodeProps
codeProps = Anchors.onCode Transaction.mkPropertyFromIRef codeIRefs
revisionProps :: RevisionProps
revisionProps = Anchors.onRevision Transaction.mkPropertyFromIRef revisionIRefs
| BrennonTWilliams/lamdu | Lamdu/Data/DbLayout.hs | gpl-3.0 | 2,344 | 0 | 11 | 344 | 693 | 401 | 292 | 54 | 1 |
import Test.Hspec
import BTree
main :: IO ()
main = hspec $ do
describe "insert" $ do
it "inserts a value into a tree" $ do
let value = (1::Int)
tree = insert value empty in
member value tree `shouldBe` True
describe "balance" $ do
it "balances a tree" $ do
let tree = foldl (flip insert) empty ([1,2,3]::[Int]) in
(balance tree) `shouldBe` (foldl (flip insert) empty [2,1,3])
describe "==" $ do
it "returns True if two trees ar equal" $ do
foldl (flip insert) empty ([1,2,3,4,5]::[Int])
== foldl (flip insert) empty ([1,2,3,4,5]::[Int])
`shouldBe` True
it "returns False if two trees are different" $ do
foldl (flip insert) empty ([1,2,3,4,5]::[Int])
== foldl (flip insert) empty ([1,2,4,3,5]::[Int])
`shouldBe` False
describe "fromList" $ do
it "returns a balanced tree" $ do
fromList [1,2,3,4,5] `shouldBe` foldl (flip insert) empty ([1,2,3,4,5]::[Int])
describe "values" $ do
it "returns a list of the tree's values " $ do
((values . fromList) [1,2,3,4,5]::[Int]) `shouldBe` [1,2,3,4,5]
| tsujigiri/btree-hs | spec/Spec.hs | mit | 1,238 | 0 | 20 | 405 | 557 | 301 | 256 | 28 | 1 |
module Data.Map.Extensions where
import Data.List as List
import Data.List.Extensions as ListExt
import Data.Map as Map
import Data.Maybe as Maybe
import Debug.Trace as Trace
import Prelude.Extensions as PreludeExt
notNull :: Map a b -> Bool
notNull = ((.) not Map.null)
lookupIf :: Ord a => a -> (Map a b) -> (Bool, b)
lookupIf = \key map -> (splitMaybe (Map.lookup key map))
insertIfAbsent :: Ord a => a -> b -> (Map a b) -> (Maybe b, Map a b)
insertIfAbsent = \key value map -> let
result = (Map.lookup key map)
in (result, ifElse (isJust result) map (Map.insert key value map))
inverse :: Ord a => Ord b => (Map a b) -> (Map b [a])
inverse = \map -> let
swap = \(a, b) -> (b, [a])
pairs = (List.map swap (Map.toList map))
in (Map.fromListWith (++) pairs)
injectiveInverse :: Ord a => Ord b => (Map a b) -> (Map b a)
injectiveInverse = \map -> (Map.map List.head (inverse map))
deleteWithSelector :: Ord k => ((Map k a) -> (k, a)) -> (Map k a) -> (Map k a)
deleteWithSelector = \selector map -> (Map.delete (fst (selector map)) map)
deleteMin :: Ord k => (Map k a) -> (Map k a)
deleteMin = (deleteWithSelector Map.findMin)
deleteMax :: Ord k => (Map k a) -> (Map k a)
deleteMax = (deleteWithSelector Map.findMax)
splitLess :: Ord k => k -> (Map k a) -> (Map k a, Map k a)
splitLess = \key map -> let
(less, equal, greater) = (Map.splitLookup key map)
in (less, ifElse (isJust equal) (Map.insert key (fromJust equal) greater) greater)
splitLessEqual :: Ord k => k -> (Map k a) -> (Map k a, Map k a)
splitLessEqual = \key map -> let
(less, equal, greater) = (Map.splitLookup key map)
in (ifElse (isJust equal) (Map.insert key (fromJust equal) less) less, greater)
mapKeysAndValues :: Ord k1 => Ord k2 => ((k1, v1) -> (k2, v2)) -> (Map.Map k1 v1) -> (Map.Map k2 v2)
mapKeysAndValues = \transform map -> let
in (Map.fromList (List.map transform (Map.toList map)))
mapKeys :: Ord k1 => Ord k2 => (k1 -> k2) -> (Map.Map k1 v) -> (Map.Map k2 v)
mapKeys = \transform map -> let
pairTransform = \(k, v) -> (transform k, v)
in (mapKeysAndValues pairTransform map)
memoize :: Ord a => (a -> b) -> (a -> Map a b -> (b, Map a b))
memoize = \function -> let
memoized = \input cache -> let
(is_cached, cached) = (lookupIf input cache)
output = (function input)
in (ifElse is_cached (cached, cache) (output, Map.insert input output cache))
in memoized
findExtremeWithDefault :: Ord k => (Map k a -> (k, a)) -> (k, a) -> Map k a -> (k, a)
findExtremeWithDefault = \selector thedefault map -> (ifElse (Map.null map) thedefault (selector map))
findMinWithDefault :: Ord k => (k, a) -> Map k a -> (k, a)
findMinWithDefault = (findExtremeWithDefault Map.findMin)
findMaxWithDefault :: Ord k => (k, a) -> Map k a -> (k, a)
findMaxWithDefault = (findExtremeWithDefault Map.findMax)
conflicts :: Ord k => Eq a => (Map k a) -> (Map k a) -> [k]
conflicts = \a b -> let
intersection = (Map.intersectionWith (++) (Map.map ListExt.singleton a) (Map.map ListExt.singleton b))
disagreements = (Map.filter ((.) not ListExt.allEqual) intersection)
in (Map.keys disagreements)
hasConflicts :: Ord k => Eq a => (Map k a) -> (Map k a) -> Bool
hasConflicts = \a b -> (ListExt.notNull (conflicts a b))
fromKeyList :: Ord k => (k -> a) -> [k] -> (Map k a)
fromKeyList = \f keys -> (Map.fromList (List.map (\k -> (k, f k)) keys))
showLines :: (Show k, Show a) => (Map k a) -> String
showLines = ((.) unlines ((.) (List.map show) Map.toList))
debugLookup :: (Ord k, Show k, Show a) => (Map k a) -> k -> a
debugLookup = \map key -> let
error_output = (concat ["\nfailed to find key:\n", show key, "\n\nin map:\n", showLines map, "\n\n"])
trace_output = (ifElse (Map.member key map) "" error_output)
in (Trace.trace trace_output ((!) map key))
| stevedonnelly/haskell | code/Data/Map/Extensions.hs | mit | 3,835 | 1 | 17 | 792 | 1,929 | 1,024 | 905 | -1 | -1 |
{- Author: Jeff Newbern
Maintainer: Jeff Newbern <jnewbern@nomaware.com>
Time-stamp: <Fri Jul 25 10:05:53 2003>
License: GPL
-}
{- DESCRIPTION
Example 12 - Using the MonadError class
Usage: Compile the code and execute the resulting program
with various arguments. For each argument that is
a valid hexadecimal number, the program will print
the decimal representation of the number. For each
argument that is not a valid hexadecimal number, the
program will print an error message describing the
location of the first invalid character.
Try: ./ex12 7f beef 10 able f00d
-}
import Monad
import System
import Control.Monad.Error
import Char
-- This is the type of our parse error representation.
data ParseError = Err {location::Int, reason::String}
-- We make it an instance of the Error class
instance Error ParseError where
noMsg = Err 0 "Parse Error"
strMsg s = Err 0 s
-- For our monad type constructor, we use Either ParseError
-- which represents failure using Left ParseError or a
-- successful result of type a using Right a.
type ParseMonad = Either ParseError
-- parseHexDigit attempts to convert a single hex digit into
-- an Integer in the ParseMonad monad and throws an error on an
-- invalid character
parseHexDigit :: Char -> Int -> ParseMonad Integer
parseHexDigit c idx = if isHexDigit c then
return (toInteger (digitToInt c))
else
throwError (Err idx ("Invalid character '" ++ [c] ++ "'"))
-- parseHex parses a string containing a hexadecimal number into
-- an Integer in the ParseMonad monad. A parse error from parseHexDigit
-- will cause an exceptional return from parseHex.
parseHex :: String -> ParseMonad Integer
parseHex s = parseHex' s 0 1
where parseHex' [] val _ = return val
parseHex' (c:cs) val idx = do d <- parseHexDigit c idx
parseHex' cs ((val * 16) + d) (idx + 1)
-- toString converts an Integer into a String in the ParseMonad monad
toString :: Integer -> ParseMonad String
toString n = return $ show n
-- convert takes a String containing a hexadecimal representation of
-- a number to a String containing a decimal representation of that
-- number. A parse error on the input String will generate a
-- descriptive error message as the output String.
convert :: String -> String
convert s = let (Right str) = do { n <- parseHex s; toString n } `catchError` printError
in str
where printError e = return $ "At index " ++ (show (location e)) ++ ":" ++ (reason e)
-- convert arguments from hex to decimal and print results
main :: IO ()
main = do args <- getArgs
mapM_ (putStrLn . convert) args
-- END OF FILE
| maurotrb/hs-exercises | AllAboutMonads/examples/example12.hs | mit | 2,752 | 4 | 13 | 649 | 453 | 240 | 213 | 27 | 2 |
-----------------------------------------------------------------------------
--
-- Module : Language.PureScript.Sugar.Names.Exports
-- License : MIT (http://opensource.org/licenses/MIT)
--
-- Maintainer : Phil Freeman <paf31@cantab.net>, Gary Burgess <gary.burgess@gmail.com>
-- Stability : experimental
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-}
module Language.PureScript.Sugar.Names.Exports
( findExportable
, resolveExports
) where
import Prelude ()
import Prelude.Compat
import Data.List (find, intersect)
import Data.Maybe (fromMaybe, mapMaybe)
import Data.Foldable (traverse_)
import Control.Monad
import Control.Monad.Error.Class (MonadError(..))
import qualified Data.Map as M
import Language.PureScript.Crash
import Language.PureScript.AST
import Language.PureScript.Names
import Language.PureScript.Errors
import Language.PureScript.Sugar.Names.Env
-- |
-- Finds all exportable members of a module, disregarding any explicit exports.
--
findExportable :: forall m. (Applicative m, MonadError MultipleErrors m) => Module -> m Exports
findExportable (Module _ _ mn ds _) =
rethrow (addHint (ErrorInModule mn)) $ foldM updateExports nullExports ds
where
updateExports :: Exports -> Declaration -> m Exports
updateExports exps (TypeClassDeclaration tcn _ _ ds') = do
exps' <- exportTypeClass exps tcn mn
foldM go exps' ds'
where
go exps'' (TypeDeclaration name _) = exportValue exps'' name mn
go exps'' (PositionedDeclaration pos _ d) = rethrowWithPosition pos $ go exps'' d
go _ _ = internalError "Invalid declaration in TypeClassDeclaration"
updateExports exps (DataDeclaration _ tn _ dcs) = exportType exps tn (map fst dcs) mn
updateExports exps (TypeSynonymDeclaration tn _ _) = exportType exps tn [] mn
updateExports exps (ExternDataDeclaration tn _) = exportType exps tn [] mn
updateExports exps (ValueDeclaration name _ _ _) = exportValue exps name mn
updateExports exps (ExternDeclaration name _) = exportValue exps name mn
updateExports exps (PositionedDeclaration pos _ d) = rethrowWithPosition pos $ updateExports exps d
updateExports exps _ = return exps
-- |
-- Resolves the exports for a module, filtering out members that have not been
-- exported and elaborating re-exports of other modules.
--
resolveExports :: forall m. (Applicative m, MonadError MultipleErrors m) => Env -> ModuleName -> Imports -> Exports -> [DeclarationRef] -> m Exports
resolveExports env mn imps exps refs =
rethrow (addHint (ErrorInModule mn)) $ do
filtered <- filterModule mn exps refs
foldM elaborateModuleExports filtered refs
where
-- Takes the current module's imports, the accumulated list of exports, and a
-- `DeclarationRef` for an explicit export. When the ref refers to another
-- module, export anything from the imports that matches for that module.
elaborateModuleExports :: Exports -> DeclarationRef -> m Exports
elaborateModuleExports result (PositionedDeclarationRef pos _ r) =
rethrowWithPosition pos $ elaborateModuleExports result r
elaborateModuleExports result (ModuleRef name) | name == mn = do
let types' = exportedTypes result ++ exportedTypes exps
let classes' = exportedTypeClasses result ++ exportedTypeClasses exps
let values' = exportedValues result ++ exportedValues exps
return result { exportedTypes = types'
, exportedTypeClasses = classes'
, exportedValues = values' }
elaborateModuleExports result (ModuleRef name) = do
let isPseudo = isPseudoModule name
when (not isPseudo && not (isImportedModule name)) $
throwError . errorMessage . UnknownExportModule $ name
let reTypes = extract isPseudo name (importedTypes imps)
let reDctors = extract isPseudo name (importedDataConstructors imps)
let reClasses = extract isPseudo name (importedTypeClasses imps)
let reValues = extract isPseudo name (importedValues imps)
result' <- foldM (\exps' ((tctor, dctors), mn') -> exportType exps' tctor dctors mn') result (resolveTypeExports reTypes reDctors)
result'' <- foldM (uncurry . exportTypeClass) result' (map resolveClass reClasses)
foldM (uncurry . exportValue) result'' (map resolveValue reValues)
elaborateModuleExports result _ = return result
-- Extracts a list of values for a module based on a lookup table. If the
-- boolean is true the values are filtered by the qualification of the
extract :: Bool -> ModuleName -> M.Map (Qualified a) (Qualified a, ModuleName) -> [Qualified a]
extract True name = map fst . M.elems . M.filterWithKey (\k _ -> eqQual name k)
extract False name = map fst . M.elems . M.filter (eqQual name . fst)
-- Check whether a module name refers to a "pseudo module" that came into
-- existence in an import scope due to importing one or more modules as
-- qualified.
isPseudoModule :: ModuleName -> Bool
isPseudoModule = testQuals M.keys
where
-- Test for the presence of a `ModuleName` in a set of imports, using a
-- function to either extract the keys or values. We test the keys to see if a
-- value being re-exported belongs to a qualified module, and we test the
-- values if that fails to see whether the value has been imported at all.
testQuals :: (forall a. M.Map (Qualified a) (Qualified a, ModuleName) -> [Qualified a]) -> ModuleName -> Bool
testQuals f mn' = any (eqQual mn') (f (importedTypes imps))
|| any (eqQual mn') (f (importedDataConstructors imps))
|| any (eqQual mn') (f (importedTypeClasses imps))
|| any (eqQual mn') (f (importedValues imps))
-- Check whether a module name refers to a module that has been imported
-- without qualification into an import scope.
isImportedModule :: ModuleName -> Bool
isImportedModule = flip elem (importedModules imps)
-- Check whether a module name matches that of a qualified value.
eqQual :: ModuleName -> Qualified a -> Bool
eqQual mn'' (Qualified (Just mn''') _) = mn'' == mn'''
eqQual _ _ = False
-- Constructs a list of types with their data constructors and the original
-- module they were defined in from a list of type and data constructor names.
resolveTypeExports :: [Qualified ProperName] -> [Qualified ProperName] -> [((ProperName, [ProperName]), ModuleName)]
resolveTypeExports tctors dctors = map go tctors
where
go :: Qualified ProperName -> ((ProperName, [ProperName]), ModuleName)
go (Qualified (Just mn'') name) = fromMaybe (internalError "Missing value in resolveTypeExports") $ do
exps' <- envModuleExports <$> mn'' `M.lookup` env
((_, dctors'), mnOrig) <- find (\((name', _), _) -> name == name') (exportedTypes exps')
let relevantDctors = mapMaybe (\(Qualified mn''' dctor) -> if mn''' == Just mnOrig then Just dctor else Nothing) dctors
return ((name, intersect relevantDctors dctors'), mnOrig)
go (Qualified Nothing _) = internalError "Unqualified value in resolveTypeExports"
-- Looks up an imported class and re-qualifies it with the original module it
-- came from.
resolveClass :: Qualified ProperName -> (ProperName, ModuleName)
resolveClass className = splitQual $ fromMaybe (internalError "Missing value in resolveClass") $
resolve exportedTypeClasses className
-- Looks up an imported value and re-qualifies it with the original module it
-- came from.
resolveValue :: Qualified Ident -> (Ident, ModuleName)
resolveValue ident = splitQual $ fromMaybe (internalError "Missing value in resolveValue") $
resolve exportedValues ident
resolve :: (Eq a) => (Exports -> [(a, ModuleName)]) -> Qualified a -> Maybe (Qualified a)
resolve f (Qualified (Just mn'') a) = do
exps' <- envModuleExports <$> mn'' `M.lookup` env
mn''' <- snd <$> find ((== a) . fst) (f exps')
return $ Qualified (Just mn''') a
resolve _ _ = internalError "Unqualified value in resolve"
-- A partial function that takes a qualified value and extracts the value and
-- qualified module components.
splitQual :: Qualified a -> (a, ModuleName)
splitQual (Qualified (Just mn'') a) = (a, mn'')
splitQual _ = internalError "Unqualified value in splitQual"
-- |
-- Filters the full list of exportable values, types, and classes for a module
-- based on a list of export declaration references.
--
filterModule :: forall m. (Applicative m, MonadError MultipleErrors m) => ModuleName -> Exports -> [DeclarationRef] -> m Exports
filterModule mn exps refs = do
types <- foldM (filterTypes $ exportedTypes exps) [] refs
values <- foldM (filterValues $ exportedValues exps) [] refs
classes <- foldM (filterClasses $ exportedTypeClasses exps) [] refs
return exps { exportedTypes = types , exportedTypeClasses = classes , exportedValues = values }
where
-- Takes a list of all the exportable types with their data constructors, the
-- accumulated list of filtered exports, and a `DeclarationRef` for an
-- explicit export. When the ref refers to a type in the list of exportable
-- values, the type and specified data constructors are included in the
-- result.
filterTypes :: [((ProperName, [ProperName]), ModuleName)] -> [((ProperName, [ProperName]), ModuleName)] -> DeclarationRef -> m [((ProperName, [ProperName]), ModuleName)]
filterTypes exps' result (PositionedDeclarationRef pos _ r) =
rethrowWithPosition pos $ filterTypes exps' result r
filterTypes exps' result (TypeRef name expDcons) =
case (\((name', _), mn') -> name == name' && mn == mn') `find` exps' of
Nothing -> throwError . errorMessage . UnknownExportType $ name
Just ((_, dcons), _) -> do
let expDcons' = fromMaybe dcons expDcons
traverse_ (checkDcon name dcons) expDcons'
return $ ((name, expDcons'), mn) : result
filterTypes _ result _ = return result
-- Ensures a data constructor is exportable for a given type. Takes a type
-- name, a list of exportable data constructors for the type, and the name of
-- the data constructor to check.
checkDcon :: ProperName -> [ProperName] -> ProperName -> m ()
checkDcon tcon exps' name =
unless (name `elem` exps') $
throwError . errorMessage $ UnknownExportDataConstructor tcon name
-- Takes a list of all the exportable classes, the accumulated list of
-- filtered exports, and a `DeclarationRef` for an explicit export. When the
-- ref refers to a class in the list of exportable classes, the class is
-- included in the result.
filterClasses :: [(ProperName, ModuleName)] -> [(ProperName, ModuleName)] -> DeclarationRef -> m [(ProperName, ModuleName)]
filterClasses exps' result (PositionedDeclarationRef pos _ r) =
rethrowWithPosition pos $ filterClasses exps' result r
filterClasses exps' result (TypeClassRef name) =
if (name, mn) `elem` exps'
then return $ (name, mn) : result
else throwError . errorMessage . UnknownExportTypeClass $ name
filterClasses _ result _ = return result
-- Takes a list of all the exportable values, the accumulated list of filtered
-- exports, and a `DeclarationRef` for an explicit export. When the ref refers
-- to a value in the list of exportable values, the value is included in the
-- result.
filterValues :: [(Ident, ModuleName)] -> [(Ident, ModuleName)] -> DeclarationRef -> m [(Ident, ModuleName)]
filterValues exps' result (PositionedDeclarationRef pos _ r) =
rethrowWithPosition pos $ filterValues exps' result r
filterValues exps' result (ValueRef name) =
if (name, mn) `elem` exps'
then return $ (name, mn) : result
else throwError . errorMessage . UnknownExportValue $ name
filterValues _ result _ = return result
| michaelficarra/purescript | src/Language/PureScript/Sugar/Names/Exports.hs | mit | 11,875 | 0 | 20 | 2,258 | 2,975 | 1,561 | 1,414 | 140 | 10 |
{-
-}
import Control.Monad
import Data.Either
import Data.Foldable as D
import Data.List ()
import qualified Data.Sequence as SEQ
import Text.Printf
import Shuffle
data Suit = Clubs | Diamonds | Hearts | Spades
deriving (Eq)
instance Show Suit where
show Clubs = "C"
show Diamonds = "D"
show Hearts = "H"
show Spades = "S"
data Rank = Ace
| R2
| R3
| R4
| R5
| R6
| R7
| R8
| R9
| R10
| Jack
| Queen
| King
deriving (Eq, Ord, Bounded)
instance Show Rank where
show Ace = "A"
show R2 = "2"
show R3 = "3"
show R4 = "4"
show R5 = "5"
show R6 = "6"
show R7 = "7"
show R8 = "8"
show R9 = "9"
show R10 = "10"
show Jack = "J"
show Queen = "Q"
show King = "K"
instance Enum Rank where
fromEnum r
| r == Ace = 1
| r == R2 = 2
| r == R3 = 3
| r == R4 = 4
| r == R5 = 5
| r == R6 = 6
| r == R7 = 7
| r == R8 = 8
| r == R9 = 9
| r == R10 = 10
| r == Jack = 11
| r == Queen = 12
| r == King = 13
| otherwise = error "Prelude.Enum.Rank.fromEnum: bad argument"
toEnum n
| n == 1 = Ace
| n == 2 = R2
| n == 3 = R3
| n == 4 = R4
| n == 5 = R5
| n == 6 = R6
| n == 7 = R7
| n == 8 = R8
| n == 9 = R9
| n == 10 = R10
| n == 11 = Jack
| n == 12 = Queen
| n == 13 = King
| otherwise = error "Prelude.Enum.Rank.toEnum: bad argument"
-- | the size of a complete run of a suit.
-- | There should be some way to get this from the definition of Rank
runSize :: Int
runSize = 13
data Card = Card Suit Rank
instance Show Card where
show (Card s r) = show s ++ show r
type Deck = [Card]
newdeck :: Deck
newdeck =
[Card s r | s <- [Clubs, Diamonds, Hearts, Spades],
r <- [ Ace
, R2
, R3
, R4
, R5
, R6
, R7
, R8
, R9
, R10
, Jack
, Queen
, King
]
]
playdecks :: [Card]
playdecks = D.concat $ Prelude.replicate 2 newdeck
-- | One column of cards, with an idex of the row where the cards start
-- | being visible.
data Stack = Stack { cards :: SEQ.Seq Card
, visible :: Int
} deriving (Show)
-- | The array of card stacks that forms the playing area
type Tableau = SEQ.Seq Stack
-- | Command to append part (or all) of a Stack to to another Stack
data MoveCommand = MoveCommand { sourceStack :: Int
, sourceIndex :: Int
, destStack :: Int
}
instance Show MoveCommand where
show m = "(" ++ show (sourceStack m) ++ ", " ++ show (sourceIndex m) ++ ") -> " ++
show (destStack m)
-- | main entry point
main :: IO ()
main =
mainloop
mainloop :: IO ()
mainloop = do (s, t) <- newgame
playloop s t
playloop :: SEQ.Seq Card -> Tableau -> IO ()
playloop s t = do displayTableau t
putStrLn ""
putStrLn (show (SEQ.length s) ++ " cards in left in deck")
putStrLn ""
line <- getLine
case words line of
["quit"] -> return ()
["new"] -> mainloop
["deal"] -> case deal s t of
Left err -> do
putStrLn err
playloop s t
Right (s', t') ->
playloop s' t'
"move":xs -> case move xs t of
Left err -> do
putStrLn err
playloop s t
Right t' ->
playloop s t'
"eat":[xs] -> case eat xs t of
Left err -> do
putStrLn err
playloop s t
Right t' ->
playloop s t'
["scan"] -> let _ = scan t in
-- fmap print ms
playloop s t
_ -> do
putStrLn ("unknown input: '" ++ line ++ "'")
playloop s t
-- | start a new game
newgame :: IO (SEQ.Seq Card, Tableau)
newgame = do
deck <- SEQ.fromList <$> shuffle playdecks
return (SEQ.drop startcount deck, newtableau startsizes (SEQ.take startcount deck))
where
startsizes = [6, 5, 5, 6, 5, 5, 6, 5, 5, 6]
startcount = sum startsizes
newtableau :: [Int] -> SEQ.Seq Card -> Tableau
newtableau ns deck = newtableau' ns deck SEQ.empty
newtableau' :: [Int] -> SEQ.Seq Card -> Tableau -> Tableau
newtableau' [] _ t = t
newtableau' (n:ns) cs t =
newtableau' ns (SEQ.drop n cs) (t SEQ.|> Stack {cards=SEQ.take n cs, visible=n-1})
displayTableau :: Tableau -> IO ()
displayTableau t =
let count = D.foldr max 0 (fmap (SEQ.length . cards) t)
revt = SEQ.reverse t
in do
putStrLn " 1 2 3 4 5 6 7 8 9 10"
putStrLn " ================================================"
for_ [0..count-1] (\i -> putStrLn (printf "%3d|" (i+1) ++ D.foldr (f i) "" revt))
where
f i s a = a ++ showStackRow i s ++ " "
-- | display one entry from the Stack cards item
showStackRow :: Int -> Stack -> String
showStackRow i s | i < visible s = "..."
| i >= SEQ.length (cards s) = " "
| otherwise = printf "%3s" $ show (SEQ.index (cards s) i)
-- | Deal out a new Card to each stack
deal :: SEQ.Seq Card -> Tableau -> Either String (SEQ.Seq Card, Tableau)
deal cs t
| SEQ.null cs = Left "empty deck"
| any (SEQ.null . cards) t = Left "empty column"
| otherwise =
let w = SEQ.length t
ss = SEQ.zip (SEQ.take w cs) t
cs' = SEQ.drop w cs
f (c, s) t' = Stack {cards=cards s SEQ.|> c, visible=visible s} SEQ.<| t'
in Right (cs', foldr f SEQ.empty ss)
move :: [String] -> Tableau -> Either String Tableau
move xs t = do
mc <- parseMove xs
_ <- moveIsValid t mc
performMove t mc
parseMove :: [String] -> Either String MoveCommand
parseMove xs
| length xs == 3 =
return MoveCommand{sourceStack = head is - 1
, sourceIndex = is !! 1 -1
, destStack = is !! 2 -1
}
| otherwise =
Left "unparseable move command"
where
is = rights $ map parseInt xs
moveIsValid :: Tableau -> MoveCommand -> Either String ()
moveIsValid t mc = do
s <- getStack t (sourceStack mc)
d <- getStack t (destStack mc)
when (sourceIndex mc < visible s) $
Left ("cut point " ++ show (sourceIndex mc) ++ " < visible " ++ show (visible s))
when (sourceIndex mc >= SEQ.length (cards s)) $
Left ("cut point " ++ show (sourceIndex mc) ++ " >= length " ++ show (SEQ.length (cards s)))
if SEQ.null (cards d) then
Right ()
else
let cs = cards s
cutSeq = SEQ.drop (sourceIndex mc) cs
Card _ cfr = SEQ.index cutSeq 0
cd = cards d
Card _ dlr = SEQ.index cd (SEQ.length cd - 1)
in do
unless (dlr == succ cfr) $
Left ("dest rank " ++ show dlr ++ " not successor of " ++ show cfr)
unless (validRun cutSeq) $
Left "cards to be moved not a valid run"
Right ()
performMove :: Tableau -> MoveCommand -> Either String Tableau
performMove t mc = do
s <- getStack t (sourceStack mc)
d <- getStack t (destStack mc)
(s', c) <- cut s (sourceIndex mc)
let d' = paste d c in
return (SEQ.update (sourceStack mc) s' $ SEQ.update (destStack mc) d' t)
-- | cut a Sequence of cards from the source source stack
cut :: Stack -> Int -> Either String (Stack, SEQ.Seq Card)
cut s i
| i < visible s = Left ("cut point " ++ show i ++ " < visible " ++ show (visible s))
| i == visible s && visible s == 0 =
return (Stack {cards=SEQ.empty, visible=0}, cards s)
| i == visible s =
return (Stack {cards=SEQ.take i (cards s), visible=visible s-1}, SEQ.drop i (cards s))
| otherwise =
return (Stack {cards=SEQ.take i (cards s), visible=visible s}, SEQ.drop i (cards s))
-- | paste a Sequence at the end of a stack
paste :: Stack -> SEQ.Seq Card -> Stack
paste s d = Stack {cards=cards s SEQ.>< d, visible=visible s}
eat :: String -> Tableau -> Either String Tableau
eat xs t = do
x <- parseInt xs
let stackIndex = x -1 in do
_ <- eatIsValid t stackIndex
performEat t stackIndex
eatIsValid :: Tableau -> Int -> Either String ()
eatIsValid t stackIndex = do
s <- getStack t stackIndex
let
cs = cards s
len = SEQ.length cs
dropLen = len - runSize in do
when (len < runSize) $
Left ("too few cards to eat " ++ show len)
unless (validRun (SEQ.drop dropLen cs)) $
Left "not a valid run"
-- | return true if a sequence of cards is in order and all of a suit
validRun :: SEQ.Seq Card -> Bool
validRun cs
| SEQ.length cs <= 1 = True
| otherwise =
let
n = SEQ.length cs - 1
Card ns nr = SEQ.index cs n
m = n - 1
Card ms mr = SEQ.index cs m in
(ms == ns && mr == succ nr) && validRun (SEQ.take n cs)
performEat :: Tableau -> Int -> Either String Tableau
performEat t stackIndex = do
s <- getStack t stackIndex
let cs = cards s
i = SEQ.length cs - runSize
v = visible s
v' = if v == i then v - 1 else v
s' = Stack {cards=SEQ.take i cs, visible=v'} in
return (SEQ.update stackIndex s' t)
-- | find all valid moves in the tableau
scan :: Tableau -> SEQ.Seq MoveCommand
scan t = SEQ.foldrWithIndex outer SEQ.empty t
-- SEQ.foldrWithIndex (Int -> a -> b -> b) -> b -> Seq a -> b Source #
where
outer :: Int -> Stack -> SEQ.Seq MoveCommand -> SEQ.Seq MoveCommand
outer i s acc = SEQ.foldrWithIndex inner acc t
inner :: Int -> Stack -> SEQ.Seq MoveCommand -> SEQ.Seq MoveCommand
parseInt :: String -> Either String Int
parseInt s = case reads s :: [(Int, String)] of
[(n, "")] -> Right n
_ -> Left ("unparseable int '" ++ s ++ "'")
getStack :: Tableau -> Int -> Either String Stack
getStack t i
| i < 0 = Left ("index too small " ++ show i)
| i >= SEQ.length t = Left ("index too large " ++ show i)
| otherwise = return (SEQ.index t i)
| dougfort/fog-cards | cards.hs | mit | 10,715 | 0 | 18 | 4,050 | 3,946 | 1,940 | 2,006 | 283 | 10 |
{-# LANGUAGE CPP #-}
module Melchior.Dom.Internal.Fragments (
Html
, Fragment
#ifdef __UHC_TARGET_JS__
, Text
, JDiv
, JSpan
, Attribute
#endif
, addClassTo
, addAttribute
) where
import Melchior.Data.String (JSString, stringToJSString, jsStringToString)
type Html = JSString
data Fragment = Text Html | JDiv [Fragment] | JSpan [Fragment] | Attribute [(String, [String])] Fragment
instance Show Fragment where
show (Text h) = jsStringToString h
show (JDiv f) = "<div>" ++ (concatMap show f) ++ "</div>"
show (JSpan f) = "<span>" ++ (concatMap show f) ++ "</div>"
show (Attribute t f) = "<"++(tag f)++" "++(collapse t)++">"++(concatMap show $ fragments f)++"</"++(tag f)++">"
fragments :: Fragment -> [Fragment]
fragments (Text _) = []
fragments (JDiv d) = d
fragments (JSpan s) = s
fragments (Attribute _ f) = fragments f
tag :: Fragment -> String
tag (Text _) = ""
tag (JDiv _) = "div"
tag (JSpan _) = "span"
tag (Attribute s f) = (tag f) ++ " " ++ (collapse s)
collapse :: [(String, [String])] -> String
collapse [] = ""
collapse (x:xs) = (fst x)++"='"++(foldl (\x y -> y++" "++x) "" $ snd x)++"' "++(collapse xs)
addClassTo :: String -> Fragment -> Fragment
addClassTo k f = addAttribute "class" k f
addAttribute :: String -> String -> Fragment -> Fragment
addAttribute _ _ (Text h) = (Text h)
addAttribute k v (JDiv d) = Attribute [(k, [v])] (JDiv d)
addAttribute k v (JSpan s) = Attribute [(k, [v])] (JSpan s)
addAttribute k v (Attribute s f) = Attribute (addToOrCreateKey k v s) f
addToOrCreateKey :: String -> String -> [(String, [String])] -> [(String, [String])]
addToOrCreateKey k v a = case filtered of
[] -> (k, [v]):a
(x:[]) -> (k, v:(snd x)):filtered
_ -> [] -- fail? should never be more than one key... invariant
where
filtered = filter (\x -> (fst x) == k) a
| kjgorman/melchior | Melchior/Dom/Internal/Fragments.hs | mit | 1,836 | 0 | 15 | 363 | 877 | 470 | 407 | 40 | 3 |
module Tools (AppConfig(..), appConfig
, InMemStorage, newStorage, writeToStorage, readFromStorage
, logIt, errorToString
) where
-- This module helps us keep the rest of the code beginner-friendly
import Control.Monad.Trans
import Control.Monad.Trans.Either
import Control.Error (note)
import Text.Read
import System.Environment
import Control.Concurrent.STM
-- AppConfig stuff
data AppConfig = AppConfig
{ cfgToken :: String
, cfgServerHost :: String
, cfgServerPort :: Int
, cfgProjectId :: Int
, cfgEntityUrl :: String
}
appConfig :: EitherT String IO AppConfig
appConfig = do
token <- getEnvString "GITLAB_TOKEN"
serverHost <- getEnvString "SERVER_HOST"
serverPort <- getEnvInt "SERVER_PORT"
projectId <- getEnvInt "PROJECT_ID"
entityUrl <- getEnvString "ENTITY_URL"
return $ AppConfig token serverHost serverPort projectId entityUrl
getEnvString :: String -> EitherT String IO String
getEnvString name = do
val <- liftIO $ lookupEnv name
hoistEither $ note errorNote val
where
errorNote = "Error: " ++ name ++ " not found in the environment vars."
getEnvInt :: String -> EitherT String IO Int
getEnvInt name = do
val <- getEnvString name
hoistEither $ note errorNote (readMaybe val)
where
errorNote = "Error: couldn't parse int value of env var " ++ name ++ "."
-- InMemStorage stuff
type InMemStorage a = TVar [a]
newStorage :: IO (InMemStorage a)
newStorage = atomically $ newTVar []
writeToStorage :: InMemStorage a -> [a] -> IO ()
writeToStorage storage stats = atomically $ writeTVar storage stats
readFromStorage :: InMemStorage a -> IO [a]
readFromStorage = readTVarIO
-- Various
logIt :: Show a => IO a -> IO a
logIt toLog = do
result <- toLog
print result
return result
errorToString :: Show e => EitherT e IO a -> EitherT String IO a
errorToString m = do
result <- liftIO $ runEitherT m
case result of
Left err -> left . show $ err
Right val -> right val
| miciek/mr-stats-haskell-servant | src/Tools.hs | mit | 1,986 | 0 | 11 | 406 | 577 | 293 | 284 | 51 | 2 |
module Y2018.M03.D14.Exercise where
{--
Today's Haskell exercise we pull from the community:
http://www.glc.us.es/~jalonso/exercitium/suma-de-las-sumas-de-los-cuadrados-de-los-divisores/
The sums of the sums of the squares of the divisors of n, or:
--}
sumaSumasCuadradosDivisores :: Integer -> Integer
sumaSumasCuadradosDivisores n = undefined
-- how would you go about doing this? For small n, okay, give it a go!
-- for rather large n, there's some interesting discussion (in Spanish) at
-- the URL above.
| geophf/1HaskellADay | exercises/HAD/Y2018/M03/D14/Exercise.hs | mit | 514 | 0 | 5 | 74 | 29 | 19 | 10 | 3 | 1 |
isPalindrome :: (Eq a) => [a] -> Bool
isPalindrome xs = xs == reverse xs
euler004 :: Int -> Integer
euler004 n = maximum [ z | x <- [upper,upper-1..lower],
y <- [upper,upper-1..x],
let z = x*y,
isPalindrome $ show z ]
where
upper = (10 ^ n) - 1
lower = 10 ^ (n - 1)
main :: IO ()
main = print $ euler004 3
| marknsikora/euler | euler004/euler004.hs | mit | 399 | 0 | 11 | 160 | 181 | 95 | 86 | 11 | 1 |
-- building-functions-5.hs
module Rvrs where
rvrs :: String -> String
rvrs x = concat [lastWord, " ", middleWord, " ", firstWord] where
firstWord = take 5 x
middleWord = take 2 $ drop 6 x
lastWord = drop 9 x
main :: IO ()
main = print $ rvrs "Curry is awesome" | RazvanCosmeanu/Haskellbook | ch03/building-functions-5.hs | mit | 283 | 0 | 8 | 74 | 101 | 54 | 47 | 8 | 1 |
{-# LANGUAGE BangPatterns #-}
import qualified BigTable.Blaze as Blaze
import qualified BigTable.Lucid as Lucid
import qualified BigTable.Nice as Nice
import Control.Monad (forM_)
import qualified Data.Text.Lazy.IO as T
import qualified Weigh as Mem
{-# NOINLINE rows #-}
rows :: Int -> [[Int]]
rows i = replicate i [1..10]
main :: IO ()
main = do
{-
-- Sanity checks
let
check l f g =
if f' == g'
then putStrLn ("OK: " ++ l)
else do
putStrLn ("FAILED: " ++ l)
putStrLn "\n### f:"
T.putStrLn f'
putStrLn "\n### g:"
T.putStrLn g'
where
f' = f (rows 10)
g' = g (rows 10)
check "nice = blaze" Nice.bigTable Blaze.bigTable
check "nice = lucid" Nice.bigTable Lucid.bigTable
check "lucid = blaze" Lucid.bigTable Blaze.bigTable
-}
Mem.mainWith $ forM_ [10, 100, 1000] $ \i -> do
let table = rows i
Blaze.weight table
Nice.weight table
Lucid.weight table
| TransportEngineering/nice-html | benchmarks/Mem.hs | mit | 1,012 | 0 | 14 | 307 | 168 | 96 | 72 | 17 | 1 |
{-# OPTIONS_GHC -Wall #-}
{-# LANGUAGE LambdaCase, OverloadedStrings #-}
import qualified Data.ByteString.Char8 as BC
import qualified Data.ByteString.Lazy.Char8 as BLC
import qualified Data.Text as T
import Control.Monad (liftM2)
import Data.Attoparsec.ByteString.Lazy (eitherResult, parse)
import Data.Binary.Put (Put, runPut)
import Data.Int (Int64)
import Data.List (uncons)
import System.IO (IOMode(WriteMode), withFile)
import System.Environment (getArgs)
import CSVChunk (foldMapChunks, mapConcatChunks)
import Data.ProtoBlob (lenMessagePutM)
import ProtoDB.Parser ( tallyRowHeuristic', tallyRows
, CellBlock(..), attemptDecode, finalGuess, parseCSVBody
)
import ProtoDB.Types (ProtoCellType(ProtoStringType))
import ProtoDB.Writer (
WritableField(..), putProtoCell, mkProtoDB, expandTypes, toProtoField
)
errSelf :: String -> a
errSelf = error . ("csv-type-profile.hs."++)
defChunkSize :: Int64
defChunkSize = 10*1024*1024
profileSuffix :: String
profileSuffix = ".PDB.profile"
rewriteSuffix :: String
rewriteSuffix = ".PDB.protoblob"
main :: IO ()
main = getArgs >>= \case
(('-':'-':'p':_):paths) -> mapM_ (\ path
-> writeFile (path++profileSuffix) . show
=<< profileChunks defChunkSize path
) paths
(('-':'-':'n':_):n:paths) -> mapM_ (\ path
-> writeFile (path++profileSuffix) . show . profileN (read n)
=<< BLC.readFile path
) paths
(('-':'-':'r':_):paths) -> mapM_ reWriteFile paths
_ -> putStrLn $ "USEAGE: \n"
++" --profile list of input CSV files \n"
++" --n #linesSample list of input CSV files \n"
++" --rewrite datablockName.csv \n"
++"(rewrite mode must be done on a file which has already had --profile run on it)"
reWriteFile :: FilePath -> IO ()
reWriteFile csv = withFile (csv++rewriteSuffix) WriteMode $ \ out -> do
flds <- read <$> readFile (csv++profileSuffix)
BLC.hPut out $ runPut $ lenMessagePutM $ mkProtoDB (T.pack csv) flds
BLC.hPut out $ runPut $ mapM_ (lenMessagePutM . toProtoField) flds
mapConcatChunks defChunkSize csv
(const $ BC.empty)
(const $ reWriteChunk' $ expandTypes flds)
(BC.hPut out)
reWriteChunk' :: [ProtoCellType] -> BC.ByteString -> BC.ByteString
reWriteChunk' profile
= BLC.toStrict . runPut . reWriteChunk profile . BLC.fromStrict
reWriteChunk :: [ProtoCellType] -> BLC.ByteString -> Put
reWriteChunk profile = mapM_ putProtoCell
. either (error . ("csv-type-profile.hs.reWriteChunk: parse error: \n"++)) id
. eitherResult . parse (parseCSVBody profile)
profileChunks :: Int64 -> FilePath -> IO [WritableField]
profileChunks chunkSize path = liftM2 (zipWith $ \ hdrFld fldType
-> WritableField (attemptDecode hdrFld) fldType []
)
(BLC.split ',' . BLC.copy . fst
. maybe (errSelf $ "onFileChunks: file "++path++" is empty") id
. uncons . BLC.lines <$> BLC.readFile path
)
(fmap (map (maybe ProtoStringType id . finalGuess) . openCellBlock)
$ foldMapChunks chunkSize path $ \ _
->CellBlock . tallyRows . map (BLC.split ',') . BLC.lines . BLC.fromStrict
)
profileN :: Int -> BLC.ByteString -> [Maybe ProtoCellType]
profileN sampleLines = tallyRowHeuristic' . map (BLC.split ',')
. (if sampleLines > 0 then take sampleLines else id) . BLC.lines
| MadSciGuys/protodb | csv-to-proto.hs | mit | 3,247 | 0 | 17 | 558 | 1,049 | 563 | 486 | 71 | 4 |
-- Copyright (c) 2016-present, SoundCloud Ltd.
-- All rights reserved.
--
-- This source code is distributed under the terms of a MIT license,
-- found in the LICENSE file.
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
module Kubernetes.Model.V1.EventList
( EventList (..)
, kind
, apiVersion
, metadata
, items
, mkEventList
) where
import Control.Lens.TH (makeLenses)
import Data.Aeson.TH (defaultOptions,
deriveJSON,
fieldLabelModifier)
import Data.Text (Text)
import GHC.Generics (Generic)
import Kubernetes.Model.Unversioned.ListMeta (ListMeta)
import Kubernetes.Model.V1.Event (Event)
import Prelude hiding (drop, error, max,
min)
import qualified Prelude as P
import Test.QuickCheck (Arbitrary, arbitrary)
import Test.QuickCheck.Instances ()
-- | EventList is a list of events.
data EventList = EventList
{ _kind :: !(Maybe Text)
, _apiVersion :: !(Maybe Text)
, _metadata :: !(Maybe ListMeta)
, _items :: !([Event])
} deriving (Show, Eq, Generic)
makeLenses ''EventList
$(deriveJSON defaultOptions{fieldLabelModifier = (\n -> if n == "_type_" then "type" else P.drop 1 n)} ''EventList)
instance Arbitrary EventList where
arbitrary = EventList <$> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary
-- | Use this method to build a EventList
mkEventList :: [Event] -> EventList
mkEventList xitemsx = EventList Nothing Nothing Nothing xitemsx
| soundcloud/haskell-kubernetes | lib/Kubernetes/Model/V1/EventList.hs | mit | 1,971 | 0 | 14 | 749 | 351 | 210 | 141 | 43 | 1 |
{- |
module: $Header$
description: Primitive natural number functions
license: MIT
maintainer: Joe Leslie-Hurd <joe@gilith.com>
stability: provisional
portability: portable
-}
module OpenTheory.Primitive.Natural
( Natural,
shiftLeft,
shiftRight )
where
import Data.Bits
import qualified Data.Maybe as Maybe
import qualified Test.QuickCheck
newtype Natural =
Natural { unNatural :: Integer }
deriving (Eq, Ord)
shiftLeft :: Natural -> Natural -> Natural
shiftLeft (Natural x) k = Natural (shiftL x (fromIntegral k))
shiftRight :: Natural -> Natural -> Natural
shiftRight (Natural x) k = Natural (shiftR x (fromIntegral k))
instance Show Natural where
show x = show (unNatural x)
instance Num Natural where
x + y = Natural (unNatural x + unNatural y)
x - y =
if x < y
then error "OpenTheory.Primitive.Natural.-"
else Natural (unNatural x - unNatural y)
x * y = Natural (unNatural x * unNatural y)
abs x = x
signum x = if unNatural x == 0 then x else Natural 1
fromInteger x =
if x < 0
then error "OpenTheory.Primitive.Natural.fromInteger"
else Natural x
instance Real Natural where
toRational x = toRational (unNatural x)
instance Enum Natural where
toEnum x =
if x < 0
then error "OpenTheory.Primitive.Natural.toEnum"
else Natural (toEnum x)
fromEnum x = fromEnum (unNatural x)
instance Integral Natural where
divMod x y =
if y == 0
then error "OpenTheory.Primitive.Natural.divMod"
else
let (d,m) = divMod (unNatural x) (unNatural y)
in (Natural d, Natural m)
quotRem x y =
if y == 0
then error "OpenTheory.Primitive.Natural.quotRem"
else
let (q,r) = quotRem (unNatural x) (unNatural y)
in (Natural q, Natural r)
toInteger = unNatural
instance Read Natural where
readsPrec =
\p -> Maybe.mapMaybe f . readsPrec p
where
f (n,s) = if n < 0 then Nothing else Just (Natural n, s)
instance Data.Bits.Bits Natural where
x .&. y = Natural (unNatural x .&. unNatural y)
x .|. y = Natural (unNatural x .|. unNatural y)
xor x y = Natural (xor (unNatural x) (unNatural y))
complement _ = error "OpenTheory.Primitive.Natural.complement"
shift x k = Natural (shift (unNatural x) k)
shiftL x k = Natural (shiftL (unNatural x) k)
shiftR x k = Natural (shiftR (unNatural x) k)
rotate _ _ = error "OpenTheory.Primitive.Natural.rotate"
bitSize _ = error "OpenTheory.Primitive.Natural.bitSize"
isSigned _ = False
testBit x k = testBit (unNatural x) k
bit k = Natural (bit k)
popCount x = popCount (unNatural x)
instance Test.QuickCheck.Arbitrary Natural where
arbitrary = fmap fromRandomInteger Test.QuickCheck.arbitrary
where
fromRandomInteger x =
Natural (if x < 0 then -(x + 1) else x)
| gilith/opentheory | data/haskell/opentheory-primitive/src/OpenTheory/Primitive/Natural.hs | mit | 2,855 | 0 | 13 | 697 | 990 | 497 | 493 | 71 | 1 |
module Main where
import Test.Framework as TF (defaultMain, testGroup, Test)
import qualified Light.Geometry.VectorTest as VT
import qualified Light.Geometry.PointTest as PT
import qualified Light.Geometry.MatrixTest as MT
tests :: [TF.Test]
tests = [ testGroup "Vector" VT.tests
, testGroup "Point" PT.tests
, testGroup "Matrix" MT.tests
]
main :: IO ()
main = defaultMain tests
| jtdubs/Light | testsuite/Tests.hs | mit | 411 | 0 | 7 | 82 | 112 | 69 | 43 | 11 | 1 |
{-# LANGUAGE RecordWildCards #-}
{- | All the concrete options.
Notes:
* The term \"option\" refers to a flag or combination of flags that
together form a part of a command's configuration. Ideally, options
should be orthogonal to each other, so we can freely combine them.
* A primitive (indivisible) option has an associate value type.
* An option named \"xyzActions\" represents a set of flags that act as
mutually exclusive sub-commands. They typically have a dedicated value
type named \"XyzAction\".
* This module is probably best imported qualified. This is in contrast to
the current practice of using subtly differing names to avoid name
clashes for closely related items. For instance, the data constructors
for an option's value type and the corresponding data constructors in
'F.DarcsFlag' may coincide. This is also why we import "Darcs.UI.Flags"
qualified here.
* When the new options system is finally in place, no code other than the
one for constructing options should directly refer to 'F.DarcsFlag'
constructors.
-}
module Darcs.UI.Options.All
( DarcsOption
-- root
, RootAction (..)
, rootActions
-- all commands
, StdCmdAction (..)
, stdCmdActions
, debug
, Verbosity (..) -- re-export
, verbosity
, timings
, anyVerbosity
, preHook
, postHook
, hooks
, UseCache (..) -- re-export
, useCache
-- interactivity
, XmlOutput (..)
, xmloutput
, DryRun (..) -- re-export
, dryRun
, dryRunXml
, interactive
, pipe
, WantGuiPause (..) -- re-export
, pauseForGui
, askdeps
-- patch selection
, module Darcs.UI.Options.Matching -- re-export
, SelectDeps (..)
, selectDeps
, changesReverse
, matchMaxcount
-- local or remote repo(s)
, WorkRepo (..) -- re-export
, workRepo
, workingRepoDir
, RemoteRepos (..) -- re-export
, remoteRepos
, possiblyRemoteRepo
, reponame
, notInRemote
, notInRemoteFlagName
, RepoCombinator (..)
, repoCombinator
, allowUnrelatedRepos
, justThisRepo
, WithWorkingDir (..) -- re-export
, useWorkingDir
, SetDefault (..) -- re-export
, setDefault
-- patch meta-data
, patchname
, author
, AskLongComment (..)
, askLongComment
, keepDate
, Logfile (..)
, logfile
-- looking for changes
, LookFor (..)
, LookForAdds (..) -- re-export
, LookForMoves (..) -- re-export
, LookForReplaces (..) -- re-export
, lookfor
-- files to consider
, UseIndex (..) -- re-export
, ScanKnown (..) -- re-export
, diffing
, includeBoring
, allowProblematicFilenames
, allowCaseDifferingFilenames
, allowWindowsReservedFilenames
, onlyToFiles
, useIndex
, recursive
-- differences
, DiffAlgorithm (..) -- re-export
, diffAlgorithm
, WithContext (..)
, withContext
, unidiff
, ExternalDiff (..)
, extDiff
-- tests
, TestChanges (..)
, testChanges
, RunTest (..) -- re-export
, test
, LeaveTestDir (..) -- re-export
, leaveTestDir
-- mail related
, HeaderFields (..)
, headerFields
, sendToContext
, sendmail
, sendmailCmd
, charset
, editDescription
, ccApply
, reply
, happyForwarding
-- patch bundles
, applyAs
, Sign (..)
, sign
, Verify (..)
, verify
-- merging patches
, AllowConflicts (..) -- re-export
, conflicts
, ExternalMerge (..) -- re-export
, useExternalMerge
-- optimizations
, Compression (..) -- re-export
, compress
, usePacks
, WithPatchIndex (..) -- re-export
, patchIndex
, patchIndexYes
, Reorder (..) -- re-export
, reorder
, minimize
, storeInMemory
-- miscellaneous
, Output (..)
, output
, Summary (..)
, summary
, RemoteDarcs (..) -- re-export
, NetworkOptions (..)
, network
, UMask (..) -- re-export
, umask
, SetScriptsExecutable (..) -- re-export
, setScriptsExecutable
, restrictPaths
-- command specific
-- amend
, amendUnrecord
, selectAuthor
-- annotate
, humanReadable
, machineReadable
-- clone
, CloneKind (..)
, partial
-- dist
, distname
, distzip
-- convert import/export, init
, marks
, readMarks
, writeMarks
, PatchFormat (..)
, patchFormat
, hashed
-- log
, ChangesFormat (..)
, changesFormat
-- replace
, tokens
, forceReplace
-- test
, TestStrategy (..)
, testStrategy
-- show files/index
, files
, directories
, pending
, nullFlag
-- gzcrcs
, GzcrcsAction (..)
, gzcrcsActions
-- optimize
, siblings
, reorderPatches
, optimizePatchIndex
) where
import Prelude hiding ( (^) )
import Data.Char ( isDigit )
import Data.List ( intercalate )
import Data.Maybe ( listToMaybe )
import Darcs.Repository.Flags
( Compression (..)
, RemoteDarcs (..)
, Reorder (..)
, Verbosity (..)
, UseCache (..)
, UMask (..)
, DryRun (..)
, LookForAdds (..)
, LookForMoves (..)
, LookForReplaces (..)
, DiffAlgorithm (..)
, RunTest (..)
, SetScriptsExecutable (..)
, LeaveTestDir (..)
, RemoteRepos (..)
, SetDefault (..)
, UseIndex (..)
, ScanKnown (..)
, CloneKind (..)
, ExternalMerge (..)
, WorkRepo (..)
, AllowConflicts (..)
, WantGuiPause (..)
, WithPatchIndex (..)
, WithWorkingDir (..)
)
import qualified Darcs.UI.Options.Flags as F ( DarcsFlag(..) )
import Darcs.UI.Options.Core
import Darcs.UI.Options.Iso
import Darcs.UI.Options.Util
import Darcs.UI.Options.Matching
-- * Type instantiations
-- | 'DarcsOption' instantiates the first two type parameters of 'OptSpec' to
-- what we need in darcs. The first parameter is instantiated to
-- The flag type is instantiate to 'Flag'.
type DarcsOption = OptSpec DarcsOptDescr Flag
type RawDarcsOption = forall v. v -> RawOptSpec Flag v
-- * Root command
-- | Options for darcs iself that act like sub-commands.
data RootAction = RootHelp | Version | ExactVersion | ListCommands deriving (Eq, Show)
rootActions :: PrimDarcsOption (Maybe RootAction)
rootActions = withDefault Nothing
[ RawNoArg ['h'] ["help", "overview"] F.Help (Just RootHelp)
"show a brief description of all darcs commands and top-level options"
, RawNoArg ['v'] ["version"] F.Version (Just Version) "show the darcs version"
, RawNoArg [] ["exact-version"] F.ExactVersion (Just ExactVersion)
"show the exact darcs version"
-- the switch --commands is here for compatibility only
, RawNoArg [] ["commands","list-options"] F.ListCommands (Just ListCommands)
"show plain list of available options and commands, for auto-completion"
]
-- * Common to all commands
-- ** Standard command actions
data StdCmdAction = Help | ListOptions | Disable deriving (Eq, Show)
stdCmdActions :: PrimDarcsOption (Maybe StdCmdAction)
stdCmdActions = withDefault Nothing
[ RawNoArg [] ["help"] F.Help (Just Help)
"show a brief description of the command and its options"
, RawNoArg [] ["list-options"] F.ListOptions (Just ListOptions)
"show plain list of available options and commands, for auto-completion"
, RawNoArg [] ["disable"] F.Disable (Just Disable) "disable this command" ]
-- ** Verbosity related
debug :: PrimDarcsOption Bool
debug = singleNoArg [] ["debug"] F.Debug "give only debug output"
debugHttp :: PrimDarcsOption Bool
debugHttp = singleNoArg [] ["debug-http"] F.DebugHTTP "debug output from libcurl"
verbosity :: PrimDarcsOption Verbosity
verbosity = withDefault NormalVerbosity
[ RawNoArg ['q'] ["quiet"] F.Quiet Quiet "suppress informational output"
, RawNoArg [] ["standard-verbosity"] F.NormalVerbosity NormalVerbosity
"neither verbose nor quiet output"
, RawNoArg ['v'] ["verbose"] F.Verbose Verbose "give verbose output" ]
timings :: PrimDarcsOption Bool
timings = singleNoArg [] ["timings"] F.Timings "provide debugging timings information"
anyVerbosity :: DarcsOption a (Bool -> Bool -> Verbosity -> Bool -> a)
anyVerbosity = debug ^ debugHttp ^ verbosity ^ timings where
-- ** Hooks
hooks :: DarcsOption a (Maybe String -> Bool -> Maybe String -> Bool -> a)
hooks = preHook ^ postHook
preHook :: DarcsOption a (Maybe String -> Bool -> a)
preHook = prehookCmd ^ hookPrompt "prehook" F.AskPrehook F.RunPrehook
postHook :: DarcsOption a (Maybe String -> Bool -> a)
postHook = posthookCmd ^ hookPrompt "posthook" F.AskPosthook F.RunPosthook
prehookCmd :: PrimDarcsOption (Maybe String)
prehookCmd = withDefault Nothing
[ RawStrArg [] ["prehook"] F.PrehookCmd unF Just unV
"COMMAND" "specify command to run before this darcs command"
, RawNoArg [] ["no-prehook"] F.NoPrehook Nothing
"don't run prehook command" ]
where unF f = [ s | F.PrehookCmd s <- [f] ]
unV v = [ s | Just s <- [v] ]
posthookCmd :: PrimDarcsOption (Maybe String)
posthookCmd = withDefault Nothing
[ RawStrArg [] ["posthook"] F.PosthookCmd unF Just unV "COMMAND"
"specify command to run after this darcs command"
, RawNoArg [] ["no-posthook"] F.NoPosthook Nothing
"don't run posthook command" ]
where unF f = [ s | F.PosthookCmd s <- [f] ]
unV v = [ s | Just s <- [v] ]
hookPrompt :: String -> Flag -> Flag -> PrimDarcsOption Bool
hookPrompt name fask frun = withDefault False
[ RawNoArg [] ["prompt-"++name] fask True
("prompt before running "++name)
, RawNoArg [] ["run-"++name] frun False
("run "++name++" command without prompting") ]
-- ** Misc
useCache :: PrimDarcsOption UseCache
useCache = (imap . cps) (Iso fw bw) $ singleNoArg [] ["no-cache"] F.NoCache "don't use patch caches"
where
fw True = NoUseCache
fw False = YesUseCache
bw NoUseCache = True
bw YesUseCache = False
-- * Interactivity related
{- TODO: these options interact (no pun intended) in complex ways that are
very hard to figure out for users as well as maintainers. I think the only
solution here is a more radical (and probably incompatible) re-design
involving all interactivity related options. That is beyond the goals of
this sub-project (which is already large enough).
-}
data XmlOutput = NoXml | YesXml deriving (Eq, Show)
xmloutput :: PrimDarcsOption XmlOutput
xmloutput = withDefault NoXml [__xmloutput YesXml]
__xmloutput :: RawDarcsOption
__xmloutput val = RawNoArg [] ["xml-output"] F.XMLOutput val "generate XML formatted output"
-- | NOTE: I'd rather work to have no uses of dryRunNoxml, so that any time
-- --dry-run is a possibility, automated users can examine the results more
-- easily with --xml.
--
-- See also issue2397.
dryRun :: PrimDarcsOption DryRun
dryRun = (imap . cps) (Iso fw bw) $ singleNoArg [] ["dry-run"] F.DryRun "don't actually take the action"
where
fw True = YesDryRun
fw False = NoDryRun
bw YesDryRun = True
bw NoDryRun = False
dryRunXml :: DarcsOption a (DryRun -> XmlOutput -> a)
dryRunXml = dryRun ^ xmloutput
__dryrun :: RawDarcsOption
__dryrun val = RawNoArg [] ["dry-run"] F.DryRun val "don't actually take the action"
pipe :: PrimDarcsOption Bool
pipe = singleNoArg [] ["pipe"] F.Pipe "ask user interactively for the patch metadata"
interactive :: PrimDarcsOption (Maybe Bool)
interactive = withDefault Nothing
[ RawNoArg ['a'] ["all","no-interactive"] F.All (Just False) "answer yes to all patches"
, RawNoArg ['i'] ["interactive"] F.Interactive (Just True) "prompt user interactively" ]
pauseForGui :: PrimDarcsOption WantGuiPause
pauseForGui = withDefault YesWantGuiPause
[ RawNoArg [] ["pause-for-gui"] F.PauseForGui YesWantGuiPause
"pause for an external diff or merge command to finish"
, RawNoArg [] ["no-pause-for-gui"] F.NoPauseForGui NoWantGuiPause
"return immediately after external diff or merge command finishes" ]
askdeps :: PrimDarcsOption Bool
askdeps = withDefault False
[ RawNoArg [] ["ask-deps"] F.AskDeps True "manually select dependencies"
, RawNoArg [] ["no-ask-deps"] F.NoAskDeps False "automatically select dependencies" ]
-- * Patch selection related
data SelectDeps = NoDeps | AutoDeps | PromptDeps deriving (Eq, Show)
selectDeps :: PrimDarcsOption SelectDeps
selectDeps = withDefault PromptDeps
[ RawNoArg [] ["no-deps"] F.DontGrabDeps NoDeps
"don't automatically fulfill dependencies"
, RawNoArg [] ["auto-deps","dont-prompt-for-dependencies"] F.DontPromptForDependencies AutoDeps
"don't ask about patches that are depended on by matched patches (with --match or --patch)"
, RawNoArg [] ["prompt-deps","prompt-for-dependencies"] F.PromptForDependencies PromptDeps
"prompt about patches that are depended on by matched patches" ]
changesReverse :: PrimDarcsOption Bool
changesReverse = withDefault False
[ RawNoArg [] ["reverse"] F.Reverse True "show/consider changes in reverse order"
, RawNoArg [] ["no-reverse"] F.Forward False "show/consider changes in the usual order" ]
-- | TODO: Returning @-1@ if the argument cannot be parsed as an integer is
-- not something I expected to find in a Haskell program. Instead, the flag
-- should take either a plain 'String' argument (leaving it to a later stage
-- to parse the 'String' to an 'Int'), or else a @'Maybe' 'Int'@, taking
-- the possibility of a failed parse into account.
matchMaxcount :: PrimDarcsOption (Maybe Int)
matchMaxcount = OptSpec {..} where
ounparse k (Just n) = k [ F.MaxCount n ]
ounparse k Nothing = k []
oparse k fs = k $ listToMaybe [ s | F.MaxCount s <- fs ]
ocheck fs = case [ "--max-count="++show n | F.MaxCount n <- fs ] of
cfs@(_:_:_) -> ["conflicting flags: " ++ intercalate ", " cfs]
_ -> []
odesc = [ strArg [] ["max-count"] (F.MaxCount . toInt) "NUMBER"
"return only NUMBER results" ]
toInt s = if not (null s) && all isDigit s then read s else (-1)
-- * Local or remote repo
workRepo :: PrimDarcsOption WorkRepo
workRepo = imap (Iso fw bw) $ workingRepoDir ^ possiblyRemoteRepo where
fw k (WorkRepoDir s) = k (Just s) Nothing
fw k (WorkRepoPossibleURL s) = k Nothing (Just s)
fw k WorkRepoCurrentDir = k Nothing Nothing
bw k (Just s) _ = k (WorkRepoDir s)
bw k Nothing (Just s) = k (WorkRepoPossibleURL s)
bw k Nothing Nothing = k WorkRepoCurrentDir
workingRepoDir :: PrimDarcsOption (Maybe String)
workingRepoDir = singleStrArg [] ["repodir"] F.WorkRepoDir arg "DIRECTORY"
"specify the repository directory in which to run"
where arg (F.WorkRepoDir s) = Just s
arg _ = Nothing
-- | @--repodir@ is there for compatibility, should be removed eventually
--
-- IMHO the whole option can disappear; it overlaps with using an extra (non-option)
-- argument, which is how e.g. @darcs get@ is usually invoked.
reponame :: PrimDarcsOption (Maybe String)
reponame = singleStrArg [] ["repo-name","repodir"] F.NewRepo arg "DIRECTORY" "path of output directory"
where arg (F.NewRepo s) = Just s; arg _ = Nothing
possiblyRemoteRepo :: PrimDarcsOption (Maybe String)
possiblyRemoteRepo = singleStrArg [] ["repo"] F.WorkRepoUrl arg "URL"
"specify the repository URL"
where arg (F.WorkRepoUrl s) = Just s
arg _ = Nothing
remoteRepos :: PrimDarcsOption RemoteRepos
remoteRepos = (imap . cps) (Iso fw bw) $ multiStrArg [] ["remote-repo"] F.RemoteRepo mkV "URL"
"specify the remote repository URL to work with"
where mkV fs = [ s | F.RemoteRepo s <- fs ]
fw ss = RemoteRepos ss
bw (RemoteRepos ss) = ss
notInRemoteFlagName :: String
notInRemoteFlagName = "not-in-remote"
notInRemote :: PrimDarcsOption [Maybe String]
notInRemote =
multiOptStrArg [] [notInRemoteFlagName] F.NotInRemote args "URL/PATH" $
"select all patches not in the default push/pull repository or at "
++ "location URL/PATH"
where
args fs = [s | F.NotInRemote s <- fs]
data RepoCombinator = Intersection | Union | Complement deriving (Eq, Show)
repoCombinator :: PrimDarcsOption RepoCombinator
repoCombinator = withDefault Union
[ RawNoArg [] ["intersection"] F.Intersection Intersection
"take intersection of all repositories"
, RawNoArg [] ["union"] F.Union Union
"take union of all repositories"
, RawNoArg [] ["complement"] F.Complement Complement
"take complement of repositories (in order listed)" ]
allowUnrelatedRepos :: PrimDarcsOption Bool
allowUnrelatedRepos = singleNoArg [] ["ignore-unrelated-repos"] F.AllowUnrelatedRepos
"do not check if repositories are unrelated"
justThisRepo :: PrimDarcsOption Bool
justThisRepo = singleNoArg [] ["just-this-repo"] F.JustThisRepo
"Limit the check or repair to the current repo"
-- | convert, clone, init
useWorkingDir :: PrimDarcsOption WithWorkingDir
useWorkingDir = withDefault WithWorkingDir
[ RawNoArg [] ["with-working-dir"] F.UseWorkingDir WithWorkingDir
"Create a working directory (normal repository)"
, RawNoArg [] ["no-working-dir"] F.UseNoWorkingDir NoWorkingDir
"Do not create a working directory (bare repository)" ]
setDefault :: PrimDarcsOption (Maybe Bool)
setDefault = withDefault Nothing
[ RawNoArg [] ["set-default"] F.SetDefault (Just True) "set default repository"
, RawNoArg [] ["no-set-default"] F.NoSetDefault (Just False) "don't set default repository" ]
-- * Specifying patch meta-data
patchname :: PrimDarcsOption (Maybe String)
patchname = singleStrArg ['m'] ["name"] F.PatchName arg "PATCHNAME"
"name of patch"
where arg (F.PatchName s) = Just s
arg _ = Nothing
author :: PrimDarcsOption (Maybe String)
author = singleStrArg ['A'] ["author"] F.Author arg
"EMAIL" "specify author id"
where arg (F.Author s) = Just s
arg _ = Nothing
data AskLongComment = NoEditLongComment | YesEditLongComment | PromptLongComment
deriving (Eq, Show)
-- TODO: fix non-default behavior
askLongComment :: PrimDarcsOption (Maybe AskLongComment)
askLongComment = withDefault Nothing
[ RawNoArg [] ["edit-long-comment"] F.EditLongComment (Just YesEditLongComment)
"edit the long comment by default"
, RawNoArg [] ["skip-long-comment"] F.NoEditLongComment (Just NoEditLongComment)
"don't give a long comment"
, RawNoArg [] ["prompt-long-comment"] F.PromptLongComment (Just PromptLongComment)
"prompt for whether to edit the long comment" ]
keepDate :: PrimDarcsOption Bool
keepDate = withDefault False
[ RawNoArg [] ["keep-date"] F.KeepDate True
"keep the date of the original patch"
, RawNoArg [] ["no-keep-date"] F.NoKeepDate False
"use the current date for the amended patch" ]
-- record, send
data Logfile = Logfile
{ _logfile :: Maybe AbsolutePath
, _rmlogfile :: Bool
}
logfile :: PrimDarcsOption Logfile
logfile = imap (Iso fw bw) (__logfile ^ __rmlogfile) where
fw k (Logfile x y) = k x y
bw k x y = k (Logfile x y)
__logfile :: PrimDarcsOption (Maybe AbsolutePath)
__logfile = singleAbsPathArg [] ["logfile"] F.LogFile arg "FILE"
"give patch name and comment in file"
where arg (F.LogFile s) = Just s
arg _ = Nothing
__rmlogfile :: PrimDarcsOption Bool
__rmlogfile = withDefault False
[ RawNoArg [] ["delete-logfile"] F.RmLogFile True
"delete the logfile when done"
, RawNoArg [] ["no-delete-logfile"] F.DontRmLogFile False
"keep the logfile when done" ]
-- * Looking for changes
data LookFor = LookFor
{ adds :: LookForAdds
, replaces :: LookForReplaces
, moves :: LookForMoves
}
lookfor :: PrimDarcsOption LookFor
lookfor = imap (Iso fw bw) (lookforadds ^ lookforreplaces ^ lookformoves) where
fw k (LookFor a r m) = k a r m
bw k a r m = k (LookFor a r m)
lookforadds :: PrimDarcsOption LookForAdds
lookforadds = withDefault NoLookForAdds
[ RawNoArg ['l'] ["look-for-adds"] F.LookForAdds YesLookForAdds
"look for (non-boring) files that could be added"
, RawNoArg [] ["dont-look-for-adds","no-look-for-adds"] F.NoLookForAdds NoLookForAdds
"don't look for any files that could be added" ]
lookforreplaces :: PrimDarcsOption LookForReplaces
lookforreplaces = withDefault NoLookForReplaces
[ RawNoArg [] ["look-for-replaces"] F.LookForReplaces YesLookForReplaces
"look for replaces that could be marked"
, RawNoArg [] ["dont-look-for-replaces","no-look-for-replaces"]
F.NoLookForReplaces NoLookForReplaces
"don't look for any replaces" ]
lookformoves :: PrimDarcsOption LookForMoves
lookformoves = withDefault NoLookForMoves
[ RawNoArg [] ["look-for-moves"] F.LookForMoves YesLookForMoves
"look for files that may be moved/renamed"
, RawNoArg [] ["dont-look-for-moves","no-look-for-moves"]
F.NoLookForMoves NoLookForMoves
"don't look for any files that could be moved/renamed" ]
-- * Files to consider
diffing :: PrimDarcsOption (UseIndex, ScanKnown, DiffAlgorithm)
diffing = imap (Iso curry3 uncurry3) $ useIndex ^ scanKnown ^ diffAlgorithm
where
uncurry3 k x y z = k (x,y,z)
curry3 k (x,y,z) = k x y z
useIndex :: PrimDarcsOption UseIndex
useIndex = (imap . cps) (Iso fw bw) ignoreTimes where
fw False = UseIndex
fw True = IgnoreIndex
bw UseIndex = False
bw IgnoreIndex = True
scanKnown :: PrimDarcsOption ScanKnown
scanKnown = imap (Iso fw bw) $ lookforadds ^ includeBoring where
fw k ScanKnown = k NoLookForAdds False
fw k ScanAll = k YesLookForAdds False
fw k ScanBoring = k YesLookForAdds True
bw k NoLookForAdds _ = k ScanKnown
bw k YesLookForAdds False = k ScanAll
bw k YesLookForAdds True = k ScanBoring
includeBoring :: PrimDarcsOption Bool
includeBoring = withDefault False
[ RawNoArg [] ["boring"] F.Boring True "don't skip boring files"
, RawNoArg [] ["no-boring"] F.SkipBoring False "skip boring files" ]
allowProblematicFilenames :: DarcsOption a (Bool -> Bool -> a)
allowProblematicFilenames = allowCaseDifferingFilenames ^ allowWindowsReservedFilenames
allowCaseDifferingFilenames :: PrimDarcsOption Bool
allowCaseDifferingFilenames = withDefault False
[ RawNoArg [] ["case-ok"] F.AllowCaseOnly True
"don't refuse to add files differing only in case"
, RawNoArg [] ["no-case-ok"] F.DontAllowCaseOnly False
"refuse to add files whose name differ only in case" ]
allowWindowsReservedFilenames :: PrimDarcsOption Bool
allowWindowsReservedFilenames = withDefault False
[ RawNoArg [] ["reserved-ok"] F.AllowWindowsReserved True
"don't refuse to add files with Windows-reserved names"
, RawNoArg [] ["no-reserved-ok"] F.DontAllowWindowsReserved False
"refuse to add files with Windows-reserved names" ]
-- | TODO: see issue2395
onlyToFiles :: PrimDarcsOption Bool
onlyToFiles = withDefault False
[ RawNoArg [] ["only-to-files"] F.OnlyChangesToFiles True
"show only changes to specified files"
, RawNoArg [] ["no-only-to-files"] F.ChangesToAllFiles False
"show changes to all files" ]
ignoreTimes :: PrimDarcsOption Bool
ignoreTimes = withDefault False
[ RawNoArg [] ["ignore-times"] F.IgnoreTimes True
"don't trust the file modification times"
, RawNoArg [] ["no-ignore-times"] F.DontIgnoreTimes False
"trust modification times to find modified files" ]
recursive :: PrimDarcsOption Bool
recursive = withDefault False
[ RawNoArg ['r'] ["recursive"] F.Recursive True "recurse into subdirectories"
, RawNoArg [] ["not-recursive","no-recursive"] F.NoRecursive False ("don't recurse into subdirectories") ]
-- * Differences
diffAlgorithm :: PrimDarcsOption DiffAlgorithm
diffAlgorithm = withDefault PatienceDiff
[ RawNoArg [] ["myers"] F.UseMyersDiff MyersDiff
"use myers diff algorithm"
, RawNoArg [] ["patience"] F.UsePatienceDiff PatienceDiff
"use patience diff algorithm" ]
data WithContext = NoContext | YesContext deriving (Eq, Show)
withContext :: PrimDarcsOption WithContext
withContext = (imap . cps) (Iso fw bw) $ withDefault False
[ RawNoArg ['u'] ["unified"] F.Unified True
"output changes in a darcs-specific format similar to diff -u"
, RawNoArg [] ["no-unified"] F.NonUnified False
"output changes in darcs' usual format" ]
where fw False = NoContext
fw True = YesContext
bw NoContext = False
bw YesContext = True
unidiff :: PrimDarcsOption Bool
unidiff = withDefault True
[ RawNoArg ['u'] ["unified"] F.Unified True "pass -u option to diff"
, RawNoArg [] ["no-unified"] F.NonUnified False "output patch in diff's dumb format" ]
data ExternalDiff = ExternalDiff { _diffCmd :: Maybe String, _diffOpts :: [String] } deriving (Eq, Show)
extDiff :: PrimDarcsOption ExternalDiff
extDiff = imap (Iso fw bw) $ extDiffCmd ^ extDiffOpts where
fw k (ExternalDiff cmd opts) = k cmd opts
bw k cmd opts = k (ExternalDiff cmd opts)
extDiffCmd :: PrimDarcsOption (Maybe String)
extDiffCmd = singleStrArg [] ["diff-command"] F.DiffCmd arg "COMMAND"
"specify diff command (ignores --diff-opts)"
where arg (F.DiffCmd s) = Just s
arg _ = Nothing
extDiffOpts :: PrimDarcsOption [String]
extDiffOpts = multiStrArg [] ["diff-opts"] F.DiffFlags mkV "OPTIONS"
"options to pass to diff"
where mkV fs = [ s | F.DiffFlags s <- fs ]
-- * Runnign tests
data TestChanges = NoTestChanges | YesTestChanges LeaveTestDir deriving (Eq)
testChanges :: PrimDarcsOption TestChanges
testChanges = imap (Iso fw bw) $ test ^ leaveTestDir where
fw k NoTestChanges = k NoRunTest {- undefined -} YesLeaveTestDir
fw k (YesTestChanges ltd) = k YesRunTest ltd
bw k NoRunTest _ = k NoTestChanges
bw k YesRunTest ltd = k (YesTestChanges ltd)
test :: PrimDarcsOption RunTest
test = withDefault NoRunTest
[ RawNoArg [] ["test"] F.Test YesRunTest "run the test script"
, RawNoArg [] ["no-test"] F.NoTest NoRunTest "don't run the test script" ]
leaveTestDir :: PrimDarcsOption LeaveTestDir
leaveTestDir = withDefault YesLeaveTestDir
[ RawNoArg [] ["leave-test-directory"]
F.LeaveTestDir YesLeaveTestDir "don't remove the test directory"
, RawNoArg [] ["remove-test-directory"]
F.NoLeaveTestDir NoLeaveTestDir "remove the test directory" ]
-- * Mail related
data HeaderFields = HeaderFields
{ _to, _cc :: [String]
, _from, _subject, _inReplyTo :: Maybe String
}
headerFields :: PrimDarcsOption HeaderFields
headerFields = imap (Iso fw bw) $ to ^ cc ^ from ^ subject ^ inReplyTo where
fw k (HeaderFields t f c s i) = k t f c s i
bw k t f c s i = k (HeaderFields t f c s i)
from :: PrimDarcsOption (Maybe String)
from = singleStrArg [] ["from"] F.Author arg
"EMAIL" "specify email address"
where arg (F.Author s) = Just s
arg _ = Nothing
to :: PrimDarcsOption [String]
to = multiStrArg [] ["to"] F.Target mkV "EMAIL" "specify destination email"
where mkV fs = [ s | F.Target s <- fs ]
cc :: PrimDarcsOption [String]
cc = multiStrArg [] ["cc"] F.Cc mkV "EMAIL" "mail results to additional EMAIL(s)"
where mkV fs = [ s | F.Cc s <- fs ]
subject :: PrimDarcsOption (Maybe String)
subject = singleStrArg [] ["subject"] F.Subject arg
"SUBJECT" "specify mail subject"
where arg (F.Subject s) = Just s
arg _ = Nothing
inReplyTo :: PrimDarcsOption (Maybe String)
inReplyTo = singleStrArg [] ["in-reply-to"] F.InReplyTo arg
"EMAIL" "specify in-reply-to header"
where arg (F.InReplyTo s) = Just s
arg _ = Nothing
sendToContext :: PrimDarcsOption (Maybe AbsolutePath)
sendToContext = singleAbsPathArg [] ["context"] F.Context arg "FILENAME"
"send to context stored in FILENAME"
where arg (F.Context s) = Just s
arg _ = Nothing
-- TODO: do something about the nonsensical case (False, Just s)
--
-- Some of the tests actually do this (pass --sendmail-command without
-- passing --mail) and it's unclear if it's deliberate or just a historical
-- accident after the issue2204 changes. We should untangle that and
-- perhaps turn this into a single option with an optional argument.
-- The other question to resolve is the interaction with the 'output'
-- options to darcs send.
sendmailIso :: Iso (Bool -> Maybe String -> a) ((Bool, Maybe String) -> a)
sendmailIso = Iso uncurry curry
sendmail :: PrimDarcsOption (Bool, Maybe String)
sendmail = imap sendmailIso $ mail ^ sendmailCmd
mail :: PrimDarcsOption Bool
mail = singleNoArg [] ["mail"] F.Mail "send patch using sendmail"
sendmailCmd :: PrimDarcsOption (Maybe String)
sendmailCmd = singleStrArg [] ["sendmail-command"] F.SendmailCmd arg "COMMAND"
"specify sendmail command"
where arg (F.SendmailCmd s) = Just s
arg _ = Nothing
minimize :: PrimDarcsOption Bool
minimize = withDefault True
[ RawNoArg [] ["minimize"] F.Minimize True "minimize context of patch bundle"
, RawNoArg [] ["no-minimize"] F.NoMinimize False ("don't minimize context of patch bundle") ]
charset :: PrimDarcsOption (Maybe String)
charset = singleStrArg [] ["charset"] F.Charset arg
"CHARSET" "specify mail charset"
where arg (F.Charset s) = Just s
arg _ = Nothing
editDescription :: PrimDarcsOption Bool
editDescription = withDefault True
[ RawNoArg [] ["edit-description"] F.EditDescription True
"edit the patch bundle description"
, RawNoArg [] ["dont-edit-description","no-edit-description"] F.NoEditDescription False
"don't edit the patch bundle description" ]
-- TODO: turn these two into a combined option
ccApply :: PrimDarcsOption (Maybe String)
ccApply = singleStrArg [] ["cc"] F.Cc arg
"EMAIL" "mail results to additional EMAIL(s). Requires --reply"
where arg (F.Cc s) = Just s
arg _ = Nothing
reply :: PrimDarcsOption (Maybe String)
reply = singleStrArg [] ["reply"] F.Reply arg "FROM"
"reply to email-based patch using FROM address"
where arg (F.Reply s) = Just s
arg _ = Nothing
happyForwarding :: PrimDarcsOption Bool
happyForwarding = withDefault False
[ RawNoArg [] ["happy-forwarding"] F.HappyForwarding True
"forward unsigned messages without extra header"
, RawNoArg [] ["no-happy-forwarding"] F.NoHappyForwarding False
"don't forward unsigned messages without extra header" ]
-- * Patch bundle related
applyAs :: PrimDarcsOption (Maybe String)
applyAs = withDefault Nothing
[ RawStrArg [] ["apply-as"] F.ApplyAs unF Just unV "USERNAME"
"apply patch as another user using sudo"
, RawNoArg [] ["no-apply-as"] F.NonApply Nothing
"don't use sudo to apply as another user" ]
where
unF f = [ s | F.ApplyAs s <- [f] ]
unV x = [ s | Just s <- [x] ]
data Sign = NoSign | Sign | SignAs String | SignSSL String deriving (Eq, Show)
sign :: PrimDarcsOption Sign
sign = withDefault NoSign
[ RawNoArg [] ["sign"] F.Sign Sign "sign the patch with your gpg key"
, RawStrArg [] ["sign-as"] F.SignAs unFSignAs SignAs unSignAs "KEYID"
"sign the patch with a given keyid"
, RawStrArg [] ["sign-ssl"] F.SignSSL unFSignSSL SignSSL unSignSSL "IDFILE"
"sign the patch using openssl with a given private key"
, RawNoArg [] ["dont-sign","no-sign"] F.NoSign NoSign "don't sign the patch" ]
where unFSignAs f = [ s | F.SignAs s <- [f] ]
unSignAs v = [ s | SignAs s <- [v] ]
unFSignSSL f = [ s | F.SignSSL s <- [f] ]
unSignSSL v = [ s | SignSSL s <- [v] ]
data Verify = NoVerify | VerifyKeyring AbsolutePath | VerifySSL AbsolutePath deriving (Eq, Show)
verify :: PrimDarcsOption Verify
verify = withDefault NoVerify
[ RawAbsPathArg [] ["verify"] F.Verify unFKeyring VerifyKeyring unVKeyring "PUBRING"
"verify that the patch was signed by a key in PUBRING"
, RawAbsPathArg [] ["verify-ssl"] F.VerifySSL unFSSL VerifySSL unVSSL "KEYS"
"verify using openSSL with authorized keys from file KEYS"
, RawNoArg [] ["no-verify"] F.NonVerify NoVerify
"don't verify patch signature" ]
where
unFKeyring f = [ s | F.Verify s <- [f] ]
unVKeyring x = [ s | VerifyKeyring s <- [x] ]
unFSSL f = [ s | F.VerifySSL s <- [f] ]
unVSSL x = [ s | VerifySSL s <- [x] ]
-- * Merging patches
-- applyConflictOptions = conflicts NoAllowConflicts
-- pullConflictOptions = conflicts YesAllowConflictsAndMark
conflicts :: AllowConflicts -> PrimDarcsOption (Maybe AllowConflicts)
conflicts def = withDefault (Just def)
[ RawNoArg [] ["mark-conflicts"]
F.MarkConflicts (Just YesAllowConflictsAndMark) "mark conflicts"
, RawNoArg [] ["allow-conflicts"]
F.AllowConflicts (Just YesAllowConflicts) "allow conflicts, but don't mark them"
-- , RawNoArg [] ["no-resolve-conflicts"]
-- NoAllowConflicts "equivalent to --dont-allow-conflicts, for backwards compatibility"
, RawNoArg [] ["dont-allow-conflicts","no-allow-conflicts","no-resolve-conflicts"]
F.NoAllowConflicts (Just NoAllowConflicts) "fail if there are patches that would create conflicts"
, RawNoArg [] ["skip-conflicts"]
F.SkipConflicts Nothing "filter out any patches that would create conflicts" ]
-- Technically not an isomorphism, see 'sendmailIso'.
useExternalMerge :: PrimDarcsOption ExternalMerge
useExternalMerge = imap (Iso fw bw) $ singleStrArg [] ["external-merge"] F.ExternalMerge arg
"COMMAND" "use external tool to merge conflicts"
where
arg (F.ExternalMerge s) = Just s
arg _ = Nothing
bw k (Just s) = k (YesExternalMerge s)
bw k Nothing = k NoExternalMerge
fw k (YesExternalMerge s) = k (Just s)
fw k NoExternalMerge = k Nothing
-- * Optimizations
compress :: PrimDarcsOption Compression
compress = withDefault GzipCompression
[ RawNoArg [] ["compress"] F.Compress GzipCompression "compress patch data"
, RawNoArg [] ["dont-compress","no-compress"] F.NoCompress NoCompression "don't compress patch data" ]
usePacks :: PrimDarcsOption Bool
usePacks = withDefault True
[ RawNoArg [] ["packs"] F.Packs True "use repository packs"
, RawNoArg [] ["no-packs"] F.NoPacks False "don't use repository packs" ]
-- for init, clone and convert: patch index disabled by default
patchIndex :: PrimDarcsOption WithPatchIndex
patchIndex = withDefault NoPatchIndex [__patchIndex YesPatchIndex, __noPatchIndex NoPatchIndex]
-- for log and annotate: patch index enabled by default
patchIndexYes :: PrimDarcsOption WithPatchIndex
patchIndexYes = withDefault YesPatchIndex [__patchIndex YesPatchIndex, __noPatchIndex NoPatchIndex]
__patchIndex, __noPatchIndex :: RawDarcsOption
__patchIndex val = RawNoArg [] ["with-patch-index"] F.PatchIndexFlag val "build patch index"
__noPatchIndex val = RawNoArg [] ["no-patch-index"] F.NoPatchIndexFlag val "don't build patch index"
-- diff, dist
storeInMemory :: PrimDarcsOption Bool
storeInMemory = withDefault False
[ RawNoArg [] ["store-in-memory"] F.StoreInMemory True
"do patch application in memory rather than on disk"
, RawNoArg [] ["no-store-in-memory"] F.ApplyOnDisk False
"do patch application on disk" ]
-- * Output
data Output = Output AbsolutePathOrStd
| OutputAutoName AbsolutePath
deriving (Eq, Show)
output :: PrimDarcsOption (Maybe Output)
output = withDefault Nothing
[ RawAbsPathOrStdArg ['o'] ["output"]
F.Output unOutputF (Just . Output) unOutput
"FILE" "specify output filename"
, RawOptAbsPathArg ['O'] ["output-auto-name"]
F.OutputAutoName unOutputAutoNameF (Just . OutputAutoName) unOutputAutoName
"." "DIRECTORY"
"output to automatically named file in DIRECTORY, default: current directory"
]
where
unOutputF f = [ p | F.Output p <- [f] ]
unOutput (Just (Output p)) = [p]
unOutput _ = []
unOutputAutoNameF f = [ p | F.OutputAutoName p <- [f] ]
unOutputAutoName (Just (OutputAutoName p)) = [p]
unOutputAutoName _ = []
-- * Miscellaneous
data Summary = NoSummary | YesSummary deriving (Eq, Show)
summary :: PrimDarcsOption (Maybe Summary)
summary = withDefault Nothing
[ RawNoArg ['s'] ["summary"] F.Summary (Just YesSummary) "summarize changes"
, RawNoArg [] ["no-summary"] F.NoSummary (Just NoSummary) "don't summarize changes" ]
-- | TODO: reconsider this grouping of options
data NetworkOptions = NetworkOptions
{ noHttpPipelining :: Bool
, remoteDarcs :: RemoteDarcs }
networkIso :: Iso (Bool -> Maybe String -> a) (NetworkOptions -> a)
networkIso = Iso fw bw where
fw k (NetworkOptions x (RemoteDarcs y)) = k x (Just y)
fw k (NetworkOptions x DefaultRemoteDarcs) = k x Nothing
bw k x (Just y) = k (NetworkOptions x (RemoteDarcs y))
bw k x Nothing = k (NetworkOptions x DefaultRemoteDarcs)
network :: PrimDarcsOption NetworkOptions
network = imap networkIso
$ singleNoArg [] ["no-http-pipelining"] F.NoHTTPPipelining "disable HTTP pipelining"
^ singleStrArg [] ["remote-darcs"] F.RemoteDarcsOpt arg "COMMAND"
"name of the darcs executable on the remote server"
where arg (F.RemoteDarcsOpt s) = Just s
arg _ = Nothing
umask :: PrimDarcsOption UMask
umask = (imap . cps) (Iso fw bw) $ singleStrArg [] ["umask"] F.UMask arg "UMASK"
"specify umask to use when writing"
where
arg (F.UMask s) = Just s
arg _ = Nothing
fw (Just s) = YesUMask s
fw Nothing = NoUMask
bw (YesUMask s) = Just s
bw NoUMask = Nothing
setScriptsExecutable :: PrimDarcsOption SetScriptsExecutable
setScriptsExecutable = withDefault NoSetScriptsExecutable
[ RawNoArg [] ["set-scripts-executable"] F.SetScriptsExecutable YesSetScriptsExecutable
"make scripts executable"
, RawNoArg [] ["dont-set-scripts-executable","no-set-scripts-executable"]
F.DontSetScriptsExecutable NoSetScriptsExecutable "don't make scripts executable" ]
restrictPaths :: PrimDarcsOption Bool
restrictPaths = withDefault True
[ RawNoArg [] ["restrict-paths"] F.RestrictPaths True
"don't allow darcs to touch external files or repo metadata"
, RawNoArg [] ["dont-restrict-paths","no-restrict-paths"]
F.DontRestrictPaths False
"allow darcs to modify any file or directory (unsafe)" ]
-- * Specific to a single command
-- ** amend
amendUnrecord :: PrimDarcsOption Bool
amendUnrecord = withDefault False
[ RawNoArg [] ["unrecord"] F.AmendUnrecord True "remove changes from the patch"
, RawNoArg [] ["record"] F.NoAmendUnrecord False "add more changes to the patch" ]
selectAuthor :: PrimDarcsOption Bool
selectAuthor = singleNoArg [] ["select-author"] F.SelectAuthor
"select author id from a menu"
-- ** annotate
-- | TODO: These should be mutually exclusive, but are they? The code is almost inscrutable.
humanReadable :: PrimDarcsOption Bool
humanReadable = withDefault False [__humanReadable True]
__humanReadable :: RawDarcsOption
__humanReadable val = RawNoArg [] ["human-readable"] F.HumanReadable val "give human-readable output"
-- | See above.
machineReadable :: PrimDarcsOption Bool
machineReadable = singleNoArg [] ["machine-readable"] F.MachineReadable "give machine-readable output"
-- ** clone
partial :: PrimDarcsOption CloneKind
partial = withDefault NormalClone
[ RawNoArg [] ["lazy"] F.Lazy LazyClone "get patch files only as needed"
, RawNoArg [] ["complete"] F.Complete CompleteClone "get a complete copy of the repository" ]
-- ** convert import/export
marks :: DarcsOption a (Maybe String -> Maybe String -> a)
marks = readMarks ^ writeMarks
readMarks :: PrimDarcsOption (Maybe String)
readMarks = singleStrArg [] ["read-marks"] F.ReadMarks arg
"FILE" "continue conversion, previously checkpointed by --write-marks"
where arg (F.ReadMarks s) = Just s
arg _ = Nothing
writeMarks :: PrimDarcsOption (Maybe String)
writeMarks = singleStrArg [] ["write-marks"] F.WriteMarks arg
"FILE" "checkpoint conversion to continue it later"
where arg (F.WriteMarks s) = Just s
arg _ = Nothing
-- | Deprecated flag, still present to output an error message.
hashed :: PrimDarcsOption ()
hashed = deprecated
[ "All repositories are now \"hashed\", so this option was removed."
, "Use --darcs-1 to get the effect that --hashed had previously." ] $
[ RawNoArg [] ["hashed"] F.Hashed () "deprecated, use --darcs-1 instead" ]
data PatchFormat = PatchFormat1 | PatchFormat2 deriving (Eq, Show)
patchFormat :: PrimDarcsOption PatchFormat
patchFormat = withDefault PatchFormat2
[ RawNoArg [] ["darcs-2"] F.UseFormat2 PatchFormat2
"Standard darcs patch format"
, RawNoArg [] ["darcs-1"] F.UseFormat1 PatchFormat1
"Older patch format (for compatibility)"]
-- ** dist
distname :: PrimDarcsOption (Maybe String)
distname = singleStrArg ['d'] ["dist-name"] F.DistName arg "DISTNAME" "name of version"
where arg (F.DistName s) = Just s
arg _ = Nothing
distzip :: PrimDarcsOption Bool
distzip = singleNoArg [] ["zip"] F.DistZip "generate zip archive instead of gzip'ed tar"
-- ** log
data ChangesFormat = HumanReadable | GenContext | GenXml | NumberPatches | CountPatches deriving (Eq, Show)
changesFormat :: PrimDarcsOption (Maybe ChangesFormat)
changesFormat = withDefault Nothing
[ RawNoArg [] ["context"] F.GenContext (Just GenContext) "give output suitable for get --context"
, __xmloutput (Just GenXml)
, __humanReadable (Just HumanReadable)
, RawNoArg [] ["number"] F.NumberPatches (Just NumberPatches) "number the changes"
, RawNoArg [] ["count"] F.Count (Just CountPatches) "output count of changes" ]
-- ** replace
tokens :: PrimDarcsOption (Maybe String)
tokens = singleStrArg [] ["token-chars"] F.Toks arg "\"[CHARS]\""
"define token to contain these characters"
where arg (F.Toks s) = Just s; arg _ = Nothing
forceReplace :: PrimDarcsOption Bool
forceReplace = withDefault False
[ RawNoArg ['f'] ["force"] F.ForceReplace True
"proceed with replace even if 'new' token already exists"
, RawNoArg [] ["no-force"] F.NonForce False
"don't force the replace if it looks scary" ]
-- ** test
data TestStrategy = Once | Linear | Backoff | Bisect deriving (Eq, Show)
testStrategy :: PrimDarcsOption TestStrategy
testStrategy = withDefault Once
[ RawNoArg [] ["once"] F.Once Once "run test on current version only"
, RawNoArg [] ["linear"] F.Linear Linear "locate the most recent version lacking an error"
, RawNoArg [] ["backoff"] F.Backoff Backoff "exponential backoff search"
, RawNoArg [] ["bisect"] F.Bisect Bisect "binary instead of linear search" ]
-- ** show files/index
files :: PrimDarcsOption Bool
files = withDefault True
[ RawNoArg [] ["files"] F.Files True "include files in output"
, RawNoArg [] ["no-files"] F.NoFiles False "don't include files in output" ]
directories :: PrimDarcsOption Bool
directories = withDefault True
[ RawNoArg [] ["directories"] F.Directories True "include directories in output"
, RawNoArg [] ["no-directories"] F.NoDirectories False "don't include directories in output" ]
pending :: PrimDarcsOption Bool
pending = withDefault True
[ RawNoArg [] ["pending"] F.Pending True "reflect pending patches in output"
, RawNoArg [] ["no-pending"] F.NoPending False "only included recorded patches in output" ]
-- "null" is already taken
nullFlag :: PrimDarcsOption Bool
nullFlag = singleNoArg ['0'] ["null"] F.NullFlag "separate file names by NUL characters"
-- ** gzcrcs
data GzcrcsAction = GzcrcsCheck | GzcrcsRepair deriving (Eq, Show)
gzcrcsActions :: PrimDarcsOption (Maybe GzcrcsAction)
gzcrcsActions = withDefault Nothing
[ RawNoArg [] ["check"] F.Check (Just GzcrcsCheck) "Specify checking mode"
, RawNoArg [] ["repair"] F.Repair (Just GzcrcsRepair) "Specify repair mode" ]
-- ** optimize
siblings :: PrimDarcsOption [AbsolutePath]
siblings = multiAbsPathArg [] ["sibling"] F.Sibling mkV "URL" "specify a sibling directory"
where mkV fs = [ s | F.Sibling s <- fs ]
reorderPatches :: PrimDarcsOption Bool
reorderPatches = singleNoArg [] ["reorder-patches"] F.Reorder "reorder the patches in the repository"
reorder :: PrimDarcsOption Reorder
reorder = withDefault NoReorder
[ RawNoArg [] ["reorder-patches"] F.Reorder Reorder
"reorder the patches in the repository"
, RawNoArg [] ["no-reorder-patches"] F.NoReorder NoReorder
"don't reorder the patches in the repository" ]
optimizePatchIndex :: PrimDarcsOption (Maybe WithPatchIndex)
optimizePatchIndex = withDefault Nothing
[ __patchIndex (Just YesPatchIndex)
, __noPatchIndex (Just NoPatchIndex) ]
| DavidAlphaFox/darcs | src/Darcs/UI/Options/All.hs | gpl-2.0 | 43,966 | 0 | 13 | 8,501 | 10,982 | 5,891 | 5,091 | -1 | -1 |
module Chess.Internal.FEN where
import Chess.Internal.Move
import Chess.Internal.Board
import Chess.Internal.Piece
import Data.List
import Data.Char
writeBoard :: Board -> String
writeBoard = intercalate "/" . lines . concatMap emptyToNum . group . printBoardCompact
where emptyToNum str@(' ':_) = show $ length str
emptyToNum str = str
writePlayer :: Color -> String
writePlayer White = "w"
writePlayer Black = "b"
writeCastlings :: [CastlingType] -> [CastlingType] -> String
writeCastlings [] [] = "-"
writeCastlings white black = sort (map (castlingToChar White) white ++ map (castlingToChar Black) black)
where castlingToChar White Long = 'Q'
castlingToChar White Short = 'K'
castlingToChar Black Long = 'q'
castlingToChar Black Short = 'k'
writeEnPassant :: Maybe Coordinates -> String
writeEnPassant Nothing = "-"
writeEnPassant (Just coordinate) = printCoordinate coordinate
readBoard :: String -> Maybe Board
readBoard str | length parts /= 8 = Nothing
| otherwise = parseBoardCompact $ unlines parts
where numToEmpty x | isNumber x = replicate (digitToInt x) ' '
| otherwise = [x]
parts = split (== '/') $ concatMap numToEmpty str
readPlayer :: String -> Maybe Color
readPlayer "w" = Just White
readPlayer "b" = Just Black
readPlayer _ = Nothing
readCastlings :: String -> Maybe ([CastlingType], [CastlingType])
readCastlings "-" = Just ([], [])
readCastlings str = case readCastlings' str of
Nothing -> Nothing
Just (whites, blacks) -> if castlingCountValid whites && castlingCountValid blacks then Just (whites, blacks)
else Nothing
where castlingCountValid castlings = sort (nub castlings) == sort castlings
readCastlings' :: String -> Maybe ([CastlingType], [CastlingType])
readCastlings' str = do whiteCastlings <- mapM toCastling whites
blackCastlings <- mapM toCastling blacks
return (whiteCastlings, blackCastlings)
where whites = filter isUpper str
blacks = filter isLower str
toCastling 'q' = Just Long
toCastling 'k' = Just Short
toCastling 'Q' = Just Long
toCastling 'K' = Just Short
toCastling _ = Nothing
readEnPassant :: String -> Maybe (Maybe Coordinates)
readEnPassant "-" = Just Nothing
readEnPassant str = case parseCoordinate str of
Nothing -> Nothing
coordinate -> Just coordinate
readNumberWithLimit :: (Ord a, Read a) => a -> String -> Maybe a
readNumberWithLimit limit str = case readMaybe str of
Nothing -> Nothing
Just number -> if number >= limit
then Just number
else Nothing
readMaybe :: Read a => String -> Maybe a
readMaybe = fmap fst . find (null . snd) . reads
split :: (Char -> Bool) -> String -> [String]
split p str = case dropWhile p str of
"" -> []
str' -> w : split p str''
where (w, str'') = break p str'
| nablaa/hchesslib | src/Chess/Internal/FEN.hs | gpl-2.0 | 3,387 | 0 | 10 | 1,145 | 981 | 491 | 490 | 69 | 5 |
{-# LANGUAGE ExistentialQuantification, DeriveDataTypeable
, GeneralizedNewtypeDeriving #-}
{- |
Module : ./Static/GTheory.hs
Description : theory datastructure for development graphs
Copyright : (c) Till Mossakowski, Uni Bremen 2002-2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : till@informatik.uni-bremen.de
Stability : provisional
Portability : non-portable(Logic)
theory datastructure for development graphs
-}
module Static.GTheory where
import Logic.Prover
import Logic.Logic
import Logic.ExtSign
import Logic.Grothendieck
import Logic.Comorphism
import Logic.Coerce
import qualified Common.OrderedMap as OMap
import ATerm.Lib
import Common.Lib.Graph as Tree
import Common.Amalgamate -- for now
import Common.Keywords
import Common.AS_Annotation
import Common.Doc
import Common.DocUtils
import Common.ExtSign
import Common.IRI
import Common.Result
import Data.Graph.Inductive.Graph as Graph
import Data.List
import qualified Data.Map as Map
import Data.Typeable
import Control.Monad (foldM)
import Control.Exception
-- a theory index describing a set of sentences
newtype ThId = ThId Int
deriving (Typeable, Show, Eq, Ord, Enum, ShATermConvertible)
startThId :: ThId
startThId = ThId 0
-- | Grothendieck theories with lookup indices
data G_theory = forall lid sublogics
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol proof_tree .
Logic lid sublogics
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol proof_tree => G_theory
{ gTheoryLogic :: lid
, gTheorySyntax :: Maybe IRI
, gTheorySign :: ExtSign sign symbol
, gTheorySignIdx :: SigId -- ^ index to lookup 'G_sign' (using 'signOf')
, gTheorySens :: ThSens sentence (AnyComorphism, BasicProof)
, gTheorySelfIdx :: ThId -- ^ index to lookup this 'G_theory' in theory map
} deriving Typeable
createGThWith :: G_theory -> SigId -> ThId -> G_theory
createGThWith (G_theory gtl gsub gts _ _ _) si =
G_theory gtl gsub gts si noSens
coerceThSens ::
( Logic lid1 sublogics1 basic_spec1 sentence1 symb_items1 symb_map_items1
sign1 morphism1 symbol1 raw_symbol1 proof_tree1
, Logic lid2 sublogics2 basic_spec2 sentence2 symb_items2 symb_map_items2
sign2 morphism2 symbol2 raw_symbol2 proof_tree2
, Monad m, Typeable b)
=> lid1 -> lid2 -> String -> ThSens sentence1 b -> m (ThSens sentence2 b)
coerceThSens = primCoerce
instance Eq G_theory where
G_theory l1 ser1 sig1 ind1 sens1 ind1'
== G_theory l2 ser2 sig2 ind2 sens2 ind2' = ser1 == ser2
&& G_sign l1 sig1 ind1 == G_sign l2 sig2 ind2
&& (ind1' > startThId && ind2' > startThId && ind1' == ind2'
|| coerceThSens l1 l2 "" sens1 == Just sens2)
instance Show G_theory where
show (G_theory _ _ sign _ sens _) =
shows sign $ '\n' : show sens
instance Pretty G_theory where
pretty g = prettyFullGTheory (gTheorySyntax g) g
prettyFullGTheory :: Maybe IRI -> G_theory -> Doc
prettyFullGTheory sm g = prettyGTheorySL g $++$ prettyGTheory sm g
prettyGTheorySL :: G_theory -> Doc
prettyGTheorySL g = keyword logicS <+> structId (show $ sublogicOfTh g)
prettyGTheory :: Maybe IRI -> G_theory -> Doc
prettyGTheory sm g = case simplifyTh g of
G_theory lid _ sign@(ExtSign s _) _ sens _ -> let l = toNamedList sens in
if null l && ext_is_subsig lid sign (ext_empty_signature lid) then
specBraces Common.Doc.empty else printTheory sm lid (s, l)
-- | compute sublogic of a theory
sublogicOfTh :: G_theory -> G_sublogics
sublogicOfTh (G_theory lid _ (ExtSign sigma _) _ sens _) =
let sub = foldl lub
(minSublogic sigma)
(map snd $ OMap.toList $
OMap.map (minSublogic . sentence)
sens)
in G_sublogics lid sub
-- | get theorem names with their best proof results
getThGoals :: G_theory -> [(String, Maybe BasicProof)]
getThGoals (G_theory _ _ _ _ sens _) = map toGoal . OMap.toList
$ OMap.filter (not . isAxiom) sens
where toGoal (n, st) = let ts = thmStatus st in
(n, if null ts then Nothing else Just $ maximum $ map snd ts)
-- | get axiom names plus True for former theorem
getThAxioms :: G_theory -> [(String, Bool)]
getThAxioms (G_theory _ _ _ _ sens _) = map
(\ (k, s) -> (k, wasTheorem s))
$ OMap.toList $ OMap.filter isAxiom sens
-- | get sentence names
getThSens :: G_theory -> [String]
getThSens (G_theory _ _ _ _ sens _) = map fst $ OMap.toList sens
-- | simplify a theory (throw away qualifications)
simplifyTh :: G_theory -> G_theory
simplifyTh (G_theory lid gsyn sigma@(ExtSign s _) ind1 sens ind2) =
G_theory lid gsyn sigma ind1
(OMap.map (mapValue $ simplify_sen lid s) sens) ind2
-- | apply a comorphism to a theory
mapG_theory :: AnyComorphism -> G_theory -> Result G_theory
mapG_theory (Comorphism cid) (G_theory lid _ (ExtSign sign _) ind1 sens ind2) =
do
bTh <- coerceBasicTheory lid (sourceLogic cid)
("unapplicable comorphism '" ++ language_name cid ++ "'\n")
(sign, toNamedList sens)
(sign', sens') <- wrapMapTheory cid bTh
return $ G_theory (targetLogic cid) Nothing (mkExtSign sign')
ind1 (toThSens sens') ind2
-- | Translation of a G_theory along a GMorphism
translateG_theory :: GMorphism -> G_theory -> Result G_theory
translateG_theory (GMorphism cid _ _ morphism2 _)
(G_theory lid _ (ExtSign sign _) _ sens _) = do
let tlid = targetLogic cid
bTh <- coerceBasicTheory lid (sourceLogic cid)
"translateG_theory" (sign, toNamedList sens)
(_, sens'') <- wrapMapTheory cid bTh
sens''' <- mapM (mapNamedM $ map_sen tlid morphism2) sens''
return $ G_theory tlid Nothing (mkExtSign $ cod morphism2)
startSigId (toThSens sens''') startThId
-- | Join the sentences of two G_theories
joinG_sentences :: Monad m => G_theory -> G_theory -> m G_theory
joinG_sentences (G_theory lid1 syn sig1 ind sens1 _)
(G_theory lid2 _ sig2 _ sens2 _) = do
sens2' <- coerceThSens lid2 lid1 "joinG_sentences" sens2
sig2' <- coerceSign lid2 lid1 "joinG_sentences" sig2
return $ assert (plainSign sig1 == plainSign sig2')
$ G_theory lid1 syn sig1 ind (joinSens sens1 sens2') startThId
-- | Intersect the sentences of two G_theories, G_sign is the intersection of their signatures
intersectG_sentences :: Monad m => G_sign -> G_theory -> G_theory -> m G_theory
intersectG_sentences gsig@(G_sign lidS signS indS)
(G_theory lid1 syn sig1 ind sens1 _)
(G_theory lid2 _ sig2 _ sens2 _) = do
sens1' <- coerceThSens lid1 lidS "intersectG_sentences1" sens1
sens2' <- coerceThSens lid2 lidS "intersectG_sentences2" sens2
return $ G_theory lidS Nothing signS indS (intersectSens sens1' sens2') startThId
-- | flattening the sentences form a list of G_theories
flatG_sentences :: Monad m => G_theory -> [G_theory] -> m G_theory
flatG_sentences = foldM joinG_sentences
-- | Get signature of a theory
signOf :: G_theory -> G_sign
signOf (G_theory lid _ sign ind _ _) = G_sign lid sign ind
-- | create theory without sentences
noSensGTheory :: Logic lid sublogics basic_spec sentence symb_items
symb_map_items sign morphism symbol raw_symbol proof_tree
=> lid -> ExtSign sign symbol -> SigId -> G_theory
noSensGTheory lid sig si = G_theory lid Nothing sig si noSens startThId
data BasicProof =
forall lid sublogics
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol proof_tree .
Logic lid sublogics
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol proof_tree =>
BasicProof lid (ProofStatus proof_tree)
| Guessed
| Conjectured
| Handwritten
deriving Typeable
instance Eq BasicProof where
Guessed == Guessed = True
Conjectured == Conjectured = True
Handwritten == Handwritten = True
BasicProof lid1 p1 == BasicProof lid2 p2 =
coerceProofStatus lid1 lid2 "Eq BasicProof" p1 == Just p2
_ == _ = False
instance Ord BasicProof where
Guessed <= _ = True
Conjectured <= x = case x of
Guessed -> False
_ -> True
Handwritten <= x = case x of
Guessed -> False
Conjectured -> False
_ -> True
BasicProof lid1 pst1 <= x =
case x of
BasicProof lid2 pst2
| isProvedStat pst1 && not (isProvedStat pst2) -> False
| not (isProvedStat pst1) && isProvedStat pst2 -> True
| otherwise -> case primCoerce lid1 lid2 "" pst1 of
Nothing -> False
Just pst1' -> pst1' <= pst2
_ -> False
instance Show BasicProof where
show (BasicProof _ p1) = show p1
show Guessed = "Guessed"
show Conjectured = "Conjectured"
show Handwritten = "Handwritten"
-- | test a theory sentence
isProvenSenStatus :: SenStatus a (AnyComorphism, BasicProof) -> Bool
isProvenSenStatus = any (isProvedBasically . snd) . thmStatus
isProvedBasically :: BasicProof -> Bool
isProvedBasically b = case b of
BasicProof _ pst -> isProvedStat pst
_ -> False
getValidAxioms
:: G_theory -- ^ old global theory
-> G_theory -- ^ new global theory
-> [String] -- ^ unchanged axioms
getValidAxioms
(G_theory lid1 _ _ _ sens1 _)
(G_theory lid2 _ _ _ sens2 _) =
case coerceThSens lid1 lid2 "" sens1 of
Nothing -> []
Just sens -> OMap.keys $ OMap.filterWithKey (\ k s ->
case OMap.lookup k sens of
Just s2 -> isAxiom s && isAxiom s2 && sentence s == sentence s2
_ -> False) sens2
invalidateProofs
:: G_theory -- ^ old global theory
-> G_theory -- ^ new global theory
-> G_theory -- ^ local theory with proven goals
-> (Bool, G_theory) -- ^ no changes and new local theory with deleted proofs
invalidateProofs oTh nTh (G_theory lid syn sig si sens _) =
let vAxs = getValidAxioms oTh nTh
oAxs = map fst $ getThAxioms oTh
iValAxs = vAxs \\ oAxs
validProofs (_, bp) = case bp of
BasicProof _ pst -> not . any (`elem` iValAxs) $ usedAxioms pst
_ -> True
newSens = OMap.map
(\ s -> if isAxiom s then (True, s) else
let (ps, ups) = partition validProofs $ thmStatus s
in (null ups, s { senAttr = ThmStatus ps })) sens
in ( all fst $ OMap.elems newSens
, G_theory lid syn sig si (OMap.map snd newSens) startThId)
{- | mark sentences as proven if an identical axiom or other proven sentence
is part of the same theory. -}
proveSens :: Logic lid sublogics basic_spec sentence symb_items
symb_map_items sign morphism symbol raw_symbol proof_tree
=> lid -> ThSens sentence (AnyComorphism, BasicProof)
-> ThSens sentence (AnyComorphism, BasicProof)
proveSens lid sens = let
(axs, ths) = OMap.partition (\ s -> isAxiom s || isProvenSenStatus s) sens
in Map.union axs $ proveSensAux lid axs ths
proveLocalSens :: G_theory -> G_theory -> G_theory
proveLocalSens (G_theory glid _ _ _ gsens _)
lth@(G_theory lid syn sig ind sens _) =
case coerceThSens glid lid "proveLocalSens" gsens of
Just lsens -> G_theory lid syn sig ind
(proveSensAux lid (OMap.filter (\ s -> isAxiom s || isProvenSenStatus s)
lsens) sens) startThId
Nothing -> lth
{- | mark sentences as proven if an identical axiom or other proven sentence
is part of a given global theory. -}
proveSensAux :: Logic lid sublogics basic_spec sentence symb_items
symb_map_items sign morphism symbol raw_symbol proof_tree
=> lid -> ThSens sentence (AnyComorphism, BasicProof)
-> ThSens sentence (AnyComorphism, BasicProof)
-> ThSens sentence (AnyComorphism, BasicProof)
proveSensAux lid axs ths = let
axSet = Map.fromList $ map (\ (n, s) -> (sentence s, n)) $ OMap.toList axs
in Map.mapWithKey (\ i e -> let sen = OMap.ele e in
case Map.lookup (sentence sen) axSet of
Just ax ->
e { OMap.ele = sen { senAttr = ThmStatus $
( Comorphism $ mkIdComorphism lid $ top_sublogic lid
, BasicProof lid
(openProofStatus i "hets" $ empty_proof_tree lid)
{ usedAxioms = [ax]
, goalStatus = Proved True }) : thmStatus sen } }
_ -> e) ths
{- | mark all sentences of a local theory that have been proven via a prover
over a global theory (with the same signature) as proven. Also mark
duplicates of proven sentences as proven. Assume that the sentence names of
the local theory are identical to the global theory. -}
propagateProofs :: G_theory -> G_theory -> G_theory
propagateProofs locTh@(G_theory lid1 syn sig ind lsens _)
(G_theory lid2 _ _ _ gsens _) =
case coerceThSens lid2 lid1 "" gsens of
Just ps ->
if Map.null ps then locTh else
G_theory lid1 syn sig ind
(proveSens lid1 $ Map.union (Map.intersection ps lsens) lsens)
startThId
Nothing -> error "propagateProofs"
-- | Grothendieck diagrams
type GDiagram = Gr G_theory (Int, GMorphism)
-- | checks whether a connected GDiagram is homogeneous
isHomogeneousGDiagram :: GDiagram -> Bool
isHomogeneousGDiagram = all (\ (_, _, (_, phi)) -> isHomogeneous phi) . labEdges
-- | homogenise a GDiagram to a targeted logic
homogeniseGDiagram :: Logic lid sublogics
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol proof_tree
=> lid -- ^ the target logic to be coerced to
-> GDiagram -- ^ the GDiagram to be homogenised
-> Result (Gr sign (Int, morphism))
homogeniseGDiagram targetLid diag = do
let convertNode (n, gth) = do
G_sign srcLid extSig _ <- return $ signOf gth
extSig' <- coerceSign srcLid targetLid "" extSig
return (n, plainSign extSig')
convertEdge (n1, n2, (nr, GMorphism cid _ _ mor _ ))
= if isIdComorphism (Comorphism cid) then
do mor' <- coerceMorphism (targetLogic cid) targetLid "" mor
return (n1, n2, (nr, mor'))
else fail $
"Trying to coerce a morphism between different logics.\n" ++
"Heterogeneous specifications are not fully supported yet."
convertNodes cDiag [] = return cDiag
convertNodes cDiag (lNode : lNodes) = do
convNode <- convertNode lNode
let cDiag' = insNode convNode cDiag
convertNodes cDiag' lNodes
convertEdges cDiag [] = return cDiag
convertEdges cDiag (lEdge : lEdges) = do
convEdge <- convertEdge lEdge
let cDiag' = insEdge convEdge cDiag
convertEdges cDiag' lEdges
dNodes = labNodes diag
dEdges = labEdges diag
-- insert converted nodes to an empty diagram
cDiag <- convertNodes Graph.empty dNodes
-- insert converted edges to the diagram containing only nodes
convertEdges cDiag dEdges
{- | Coerce GMorphisms in the list of (diagram node, GMorphism) pairs
to morphisms in given logic -}
homogeniseSink :: Logic lid sublogics
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol proof_tree
=> lid -- ^ the target logic to which morphisms will be coerced
-> [(Node, GMorphism)] -- ^ the list of edges to be homogenised
-> Result [(Node, morphism)]
homogeniseSink targetLid dEdges =
-- See homogeniseDiagram for comments on implementation.
let convertMorphism (n, GMorphism cid _ _ mor _) =
if isIdComorphism (Comorphism cid) then
do mor' <- coerceMorphism (targetLogic cid) targetLid "" mor
return (n, mor')
else fail $
"Trying to coerce a morphism between different logics.\n" ++
"Heterogeneous specifications are not fully supported yet."
convEdges [] = return []
convEdges (e : es) = do
ce <- convertMorphism e
ces <- convEdges es
return (ce : ces)
in convEdges dEdges
{- amalgamabilty check for heterogeneous diagrams
currently only checks whether the diagram is
homogeneous and if so, calls amalgamability check
for the specific logic -}
gEnsuresAmalgamability :: [CASLAmalgOpt] -- the options
-> GDiagram -- the diagram
-> [(Int, GMorphism)] -- the sink
-> Result Amalgamates
gEnsuresAmalgamability options gd sink =
if isHomogeneousGDiagram gd && all (isHomogeneous . snd) sink then
case labNodes gd of
(_, G_theory lid _ _ _ _ _) : _ -> do
diag <- homogeniseGDiagram lid gd
sink' <- homogeniseSink lid sink
ensures_amalgamability lid (options, diag, sink', Graph.empty)
_ -> error "heterogeneous amalgability check: no nodes"
else error "heterogeneous amalgability check not yet implemented"
| gnn/Hets | Static/GTheory.hs | gpl-2.0 | 17,192 | 0 | 26 | 4,499 | 4,611 | 2,341 | 2,270 | 327 | 4 |
-- |
-- Copyright : (c) 2011,2012 Simon Meier
-- License : GPL v3 (see LICENSE)
--
-- Maintainer : Simon Meier <iridcode@gmail.com>
-- Portability : GHC only
--
-- Big-step proofs using case distinctions on the possible sources of a fact.
module Theory.Constraint.Solver.CaseDistinctions (
-- * Precomputed case distinctions
-- ** Queries
unsolvedChainConstraints
-- ** Construction
, precomputeCaseDistinctions
, refineWithTypingAsms
-- ** Application
, solveWithCaseDistinction
-- ** Redundant cases
, removeRedundantCases
) where
import Prelude hiding (id, (.))
import Safe
import Data.Foldable (asum)
import qualified Data.Map as M
import qualified Data.Set as S
import Control.Basics
import Control.Category
import Control.Monad.Disj
import Control.Monad.Reader
import Control.Monad.State (gets)
import Control.Parallel.Strategies
import System.IO.Error
import System.Environment
import System.IO.Unsafe
import Text.PrettyPrint.Highlight
import Extension.Data.Label
import Extension.Prelude
import Theory.Constraint.Solver.Contradictions (contradictorySystem)
import Theory.Constraint.Solver.Goals
import Theory.Constraint.Solver.Reduction
import Theory.Constraint.Solver.Simplify
-- import Theory.Constraint.Solver.Types
import Theory.Constraint.System
import Theory.Model
import Control.Monad.Bind
import Debug.Trace
------------------------------------------------------------------------------
-- Precomputing case distinctions
------------------------------------------------------------------------------
-- | The number of remaining chain constraints of each case.
unsolvedChainConstraints :: CaseDistinction -> [Int]
unsolvedChainConstraints =
map (length . unsolvedChains . snd) . getDisj . get cdCases
-- Construction
---------------
-- | The initial case distinction if the given goal is required and the
-- given typing assumptions are justified.
initialCaseDistinction
:: ProofContext
-> [LNGuarded] -- ^ Axioms.
-> Goal
-> CaseDistinction
initialCaseDistinction ctxt axioms goal =
CaseDistinction goal cases
where
polish ((name, se), _) = ([name], se)
se0 = insertLemmas axioms $ emptySystem UntypedCaseDist $ get pcDiffContext ctxt
cases = fmap polish $
runReduction instantiate ctxt se0 (avoid (goal, se0))
instantiate = do
insertGoal goal False
solveGoal goal
-- | Refine a source case distinction by applying the additional proof step.
refineCaseDistinction
:: ProofContext
-> Reduction (a, [String]) -- proof step with result and path extension
-> CaseDistinction
-> ([a], CaseDistinction)
refineCaseDistinction ctxt proofStep th =
( map fst $ getDisj refinement
, set cdCases newCases th )
where
newCases = Disj . removeRedundantCases ctxt stableVars snd
. map (second (modify sSubst (restrict stableVars)))
. getDisj $ snd <$> refinement
stableVars = frees (get cdGoal th)
fs = avoid th
refinement = do
(names, se) <- get cdCases th
((x, names'), se') <- fst <$> runReduction proofStep ctxt se fs
return (x, (combine names names', se'))
-- Combine names such that the coerce rule is blended out.
combine [] ns' = ns'
combine ("coerce":ns) ns' = combine ns ns'
combine (n :_) _ = [n]
-- | Solves all chain and splitting goals as well as all premise goals solvable
-- with one of the given precomputed requires case distinction theorems, while
-- repeatedly simplifying the proof state.
--
-- Returns the names of the steps applied.
solveAllSafeGoals :: [CaseDistinction] -> Reduction [String]
solveAllSafeGoals ths' =
solve ths' []
where
extensiveSplitting = unsafePerformIO $
(getEnv "TAMARIN_EXTENSIVE_SPLIT" >> return True) `catchIOError` \_ -> return False
safeGoal _ (_, (_, LoopBreaker)) = False
safeGoal doSplit (goal, _ ) =
case goal of
ChainG _ _ -> True
ActionG _ fa -> not (isKUFact fa)
PremiseG _ fa -> not (isKUFact fa)
DisjG _ -> doSplit
-- Uncomment to get more extensive case splitting
SplitG _ -> doSplit --extensiveSplitting &&
-- SplitG _ -> False
usefulGoal (_, (_, Useful)) = True
usefulGoal _ = False
isKDPrem (PremiseG _ fa,_) = isKDFact fa
isKDPrem _ = False
isChainPrem1 (ChainG _ (_,PremIdx 1),_) = True
isChainPrem1 _ = False
solve :: [CaseDistinction] -> [String] -> Reduction [String]
solve ths caseNames = do
simplifySystem
ctxt <- ask
contradictoryIf =<< gets (contradictorySystem ctxt)
goals <- gets openGoals
chains <- gets unsolvedChains
-- try to either solve a safe goal or use one of the precomputed case
-- distinctions
let noChainGoals = null [ () | (ChainG _ _, _) <- goals ]
-- we perform equation splits, if there is a chain goal starting
-- from a message variable; i.e., a chain constraint that is no
-- open goal.
splitAllowed = noChainGoals && not (null chains)
safeGoals = fst <$> filter (safeGoal splitAllowed) goals
kdPremGoals = fst <$> filter (\g -> isKDPrem g || isChainPrem1 g) goals
usefulGoals = fst <$> filter usefulGoal goals
nextStep :: Maybe (Reduction [String], Maybe CaseDistinction)
nextStep =
((\x -> (fmap return (solveGoal x), Nothing)) <$> headMay kdPremGoals) <|>
((\x -> (fmap return (solveGoal x), Nothing)) <$> headMay safeGoals) <|>
(asum $ map (solveWithCaseDistinctionAndReturn ctxt ths) usefulGoals)
case nextStep of
Nothing -> return caseNames
Just (step, Nothing) -> (\x -> solve ths (caseNames ++ x)) =<< step
Just (step, Just usedCase) -> (\x -> solve (filterCases usedCase ths) (caseNames ++ x)) =<< step
filterCases :: CaseDistinction -> [CaseDistinction] -> [CaseDistinction]
filterCases usedCase cds = filter (\x -> usedCase /= x) cds
------------------------------------------------------------------------------
-- Redundant Case Distinctions --
------------------------------------------------------------------------------
-- | Given a list of stable variables (that are referenced from outside and cannot be simply
-- renamed) and a list containing systems, this function returns a subsequence of the list
-- such that for all removed systems, there is a remaining system that is equal modulo
-- renaming of non-stable variables.
removeRedundantCases :: ProofContext -> [LVar] -> (a -> System) -> [a] -> [a]
removeRedundantCases ctxt stableVars getSys cases0 =
-- usually, redundant cases only occur with the multiset and bilinear pairing theories
if enableBP msig || enableMSet msig then cases else cases0
where
-- decorate with index and normed version of the system
decoratedCases = map (second addNormSys) $ zip [(0::Int)..] cases0
-- drop cases where the normed systems coincide
cases = map (fst . snd) . sortOn fst . sortednubOn (snd . snd) $ decoratedCases
addNormSys = id &&& ((modify sEqStore dropNameHintsBound) . renameDropNameHints . getSys)
-- this is an ordering that works well in the cases we tried
orderedVars sys =
filter ((/= LSortNode) . lvarSort) $ map fst . sortOn snd . varOccurences $ sys
-- rename except for stable variables, drop name hints, and import ordered vars first
renameDropNameHints sys =
(`evalFresh` avoid stableVars) . (`evalBindT` stableVarBindings) $ do
_ <- renameDropNamehint (orderedVars sys)
renameDropNamehint sys
where
stableVarBindings = M.fromList (map (\v -> (v, v)) stableVars)
msig = mhMaudeSig . get pcMaudeHandle $ ctxt
------------------------------------------------------------------------------
-- Applying precomputed case distinctions
------------------------------------------------------------------------------
-- | Match a precomputed 'CaseDistinction' to a goal. Returns the instantiated
-- 'CaseDistinction' with the given goal if possible
matchToGoal
:: ProofContext -- ^ Proof context used for refining the case distinction.
-> CaseDistinction -- ^ Case distinction to use.
-> Goal -- ^ Goal to match
-> Maybe CaseDistinction
-- ^ An adapted version of the case distinction with the given goal
matchToGoal ctxt th0 goalTerm =
if not $ maybeMatcher (goalTerm, get cdGoal th0) then Nothing else
case (goalTerm, get cdGoal th) of
( PremiseG (iTerm, premIdxTerm) faTerm
,PremiseG pPat@(iPat, _ ) faPat ) ->
case doMatch (faTerm `matchFact` faPat <> iTerm `matchLVar` iPat) of
[] -> Nothing
subst:_ ->
let refine = do
modM sEdges (substNodePrem pPat (iPat, premIdxTerm))
refineSubst subst
in Just $ snd $ refineCaseDistinction ctxt refine (set cdGoal goalTerm th)
(ActionG iTerm faTerm, ActionG iPat faPat) ->
case doMatch (faTerm `matchFact` faPat <> iTerm `matchLVar` iPat) of
[] -> Nothing
subst:_ -> Just $ snd $ refineCaseDistinction ctxt
(refineSubst subst) (set cdGoal goalTerm th)
-- No other matches possible, as we only precompute case distinctions for
-- premises and KU-actions.
_ -> Nothing
where
-- this code reflects the precomputed cases in 'precomputeCaseDistinctions'
maybeMatcher (PremiseG _ faTerm, PremiseG _ faPat) = factTag faTerm == factTag faPat
maybeMatcher ( ActionG _ (Fact KUFact [tTerm])
, ActionG _ (Fact KUFact [tPat])) =
case (viewTerm tPat, viewTerm tTerm) of
(Lit (Var v),_) | lvarSort v == LSortFresh -> sortOfLNTerm tPat == LSortFresh
(FApp o _, FApp o' _) -> o == o'
_ -> True
maybeMatcher _ = False
th = (`evalFresh` avoid goalTerm) . rename $ th0
substNodePrem from to = S.map
(\ e@(Edge c p) -> if p == from then Edge c to else e)
doMatch match = runReader (solveMatchLNTerm match) (get pcMaudeHandle ctxt)
refineSubst subst = do
void (solveSubstEqs SplitNow subst)
void substSystem
return ((), [])
-- | Try to solve a premise goal or 'KU' action using the first precomputed
-- case distinction with a matching premise. Also returns the used case distinction.
solveWithCaseDistinctionAndReturn :: ProofContext
-> [CaseDistinction]
-> Goal
-> Maybe (Reduction [String], Maybe CaseDistinction)
solveWithCaseDistinctionAndReturn hnd ths goal = do
-- goal <- toBigStepGoal goal0
asum [ applyCaseDistinction hnd th goal | th <- ths ]
-- | Try to solve a premise goal or 'KU' action using the first precomputed
-- case distinction with a matching premise.
solveWithCaseDistinction :: ProofContext
-> [CaseDistinction]
-> Goal
-> Maybe (Reduction [String])
solveWithCaseDistinction hnd ths goal =
case (solveWithCaseDistinctionAndReturn hnd ths goal) of
Nothing -> Nothing
Just (x, _) -> Just x
-- | Apply a precomputed case distinction theorem to a required fact.
applyCaseDistinction :: ProofContext
-> CaseDistinction -- ^ Case distinction theorem.
-> Goal -- ^ Required goal
-> Maybe (Reduction [String], Maybe CaseDistinction)
applyCaseDistinction ctxt th0 goal = case matchToGoal ctxt th0 goal of
Just th -> Just ((do
markGoalAsSolved "precomputed" goal
(names, sysTh0) <- disjunctionOfList $ getDisj $ get cdCases th
sysTh <- (`evalBindT` keepVarBindings) . someInst $ sysTh0
conjoinSystem sysTh
return names), Just th0)
Nothing -> Nothing
where
keepVarBindings = M.fromList (map (\v -> (v, v)) (frees goal))
-- | Saturate the case distinctions with respect to each other such that no
-- additional splitting is introduced; i.e., only rules with a single or no
-- conclusion are used for the saturation.
saturateCaseDistinctions
:: ProofContext -> [CaseDistinction] -> [CaseDistinction]
saturateCaseDistinctions ctxt thsInit =
(go thsInit 1)
where
go :: [CaseDistinction] -> Integer -> [CaseDistinction]
go ths n =
if (any or (changes `using` parList rdeepseq)) && (n <= 3)
then go ths' (n + 1)
else if (n > 3)
then trace "saturateCaseDistinctions: Saturation aborted, more than 3 iterations." ths'
else ths'
where
(changes, ths') = unzip $ map (refineCaseDistinction ctxt solver) ths
goodTh th = length (getDisj (get cdCases th)) <= 1
solver = do names <- solveAllSafeGoals (filter goodTh ths)
return (not $ null names, names)
-- | Precompute a saturated set of case distinctions.
precomputeCaseDistinctions
:: ProofContext
-> [LNGuarded] -- ^ Axioms.
-> [CaseDistinction]
precomputeCaseDistinctions ctxt axioms =
map cleanupCaseNames $ saturateCaseDistinctions ctxt rawCaseDists
where
cleanupCaseNames = modify cdCases $ fmap $ first $
filter (not . null)
. map (filter (`elem` '_' : ['a'..'z'] ++ ['A'..'Z'] ++ ['0'..'9']))
rawCaseDists =
initialCaseDistinction ctxt axioms <$> (protoGoals ++ msgGoals)
-- construct case distinction starting from facts from non-special rules
protoGoals = someProtoGoal <$> absProtoFacts
msgGoals = someKUGoal <$> absMsgFacts
getProtoFact (Fact KUFact _ ) = mzero
getProtoFact (Fact KDFact _ ) = mzero
getProtoFact fa = return fa
absFact (Fact tag ts) = (tag, length ts)
nMsgVars n = [ varTerm (LVar "t" LSortMsg i) | i <- [1..fromIntegral n] ]
someProtoGoal :: (FactTag, Int) -> Goal
someProtoGoal (tag, arity) =
PremiseG (someNodeId, PremIdx 0) (Fact tag (nMsgVars arity))
someKUGoal :: LNTerm -> Goal
someKUGoal m = ActionG someNodeId (kuFact m)
someNodeId = LVar "i" LSortNode 0
-- FIXME: Also use facts from proof context.
rules = get pcRules ctxt
absProtoFacts = sortednub $ do
ru <- joinAllRules rules
fa <- absFact <$> (getProtoFact =<< (get rConcs ru ++ get rPrems ru))
-- exclude facts handled specially by the prover
guard (not $ fst fa `elem` [OutFact, InFact, FreshFact])
return fa
absMsgFacts :: [LNTerm]
absMsgFacts = asum $ sortednub $
[ return $ varTerm (LVar "t" LSortFresh 1)
, if enableBP msig then return $ fAppC EMap $ nMsgVars (2::Int) else []
, [ fAppNoEq o $ nMsgVars k
| o@(_,(k,priv)) <- S.toList . noEqFunSyms $ msig
, NoEq o `S.notMember` implicitFunSig, k > 0 || priv==Private]
]
msig = mhMaudeSig . get pcMaudeHandle $ ctxt
-- | Refine a set of case distinction by exploiting additional typing
-- assumptions.
refineWithTypingAsms
:: [LNGuarded] -- ^ Typing assumptions to use.
-> ProofContext -- ^ Proof context to use.
-> [CaseDistinction] -- ^ Original, untyped case distinctions.
-> [CaseDistinction] -- ^ Refined, typed case distinctions.
refineWithTypingAsms [] _ cases0 =
fmap ((modify cdCases . fmap . second) (set sCaseDistKind TypedCaseDist)) $ cases0
refineWithTypingAsms assumptions ctxt cases0 =
fmap (modifySystems removeFormulas) $
saturateCaseDistinctions ctxt $
modifySystems updateSystem <$> cases0
where
modifySystems = modify cdCases . fmap . second
updateSystem se =
modify sFormulas (S.union (S.fromList assumptions)) $
set sCaseDistKind TypedCaseDist $ se
removeFormulas =
modify sGoals (M.filterWithKey isNoDisjGoal)
. set sFormulas S.empty
. set sSolvedFormulas S.empty
isNoDisjGoal (DisjG _) _ = False
isNoDisjGoal _ _ = True
| samscott89/tamarin-prover | lib/theory/src/Theory/Constraint/Solver/CaseDistinctions.hs | gpl-3.0 | 16,809 | 0 | 21 | 4,745 | 3,938 | 2,092 | 1,846 | 264 | 11 |
elementAt :: [a] -> Int -> a
elementAt xs n = xs !! (n-1)
| medik/lang-hack | Haskell/Ninety-Nine_Haskell_Problems/3.hs | gpl-3.0 | 58 | 0 | 7 | 14 | 38 | 20 | 18 | 2 | 1 |
module Colors (green, red, normal)
where
esc :: String
esc = "\x1b"
normal :: String
normal = esc ++ "[0m"
green :: String
green = esc ++ "[32m"
red :: String
red = esc ++ "[31m"
| seppeljordan/haskell-calendar | src/Colors.hs | gpl-3.0 | 187 | 0 | 5 | 44 | 67 | 40 | 27 | 9 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-
Problem: The file farm.csv contains data on the US farm population
(millions of persons) from 1935 to 1980. Make a scatterplot of these
data and include the least-squares regression line of farm population
on year. (Moore, David S. The Basic Practice of Statistics. 4th
ed. New York: W. H. Freeman, 2007, p. 133, exercise 5.9.)
Here we use a template to generate a gnuplot script that generates the
plot, which will then be included in a LaTeX document. (I tried the
Haskell gnuplot wrapper, but it doesn't support the epslatex terminal,
and in any case this approach is actually much simpler and clearer.)
Cf. the "plot", "lm" and "abline" functions in R:
<http://www.statmethods.net/graphs/scatterplot.html>.
-}
module Main where
import System.Console.CmdArgs.Implicit
import Control.Monad (unless)
import Text.CSV.ByteString (CSV, parseCSV)
import qualified Data.ByteString as B
import qualified Data.ByteString.Lex.Double as BD
import qualified Data.Text as T
import Data.Text.Encoding (encodeUtf8, decodeUtf8)
import qualified Data.Packed.Vector as Vector
import qualified Numeric.GSL.Fitting.Linear as Linear
import qualified Text.StringTemplate as Tpl
main = do
-- Get the command line args
args <- cmdArgs farm
let errs = checkArgs args in
unless (null errs) $ error (unlines errs)
-- Read the data file
csvStr <- B.readFile (dataFile args)
let (xs, ys) = case parseCSV csvStr of
Nothing -> error $ "Couldn't parse CSV file " ++ dataFile args
Just csv -> csvToDoubles csv
-- Do the calculation
let (intercept, slope) = linearRegression xs ys
-- Generate a gnuplot script
let attrs = [ ("outputFile", outputFile args),
("intercept", show intercept),
("slope", show slope),
("dataFile", dataFile args) ]
tplStr <- B.readFile (templateFile args)
let tpl = Tpl.newSTMP (T.unpack (decodeUtf8 tplStr))
B.writeFile (scriptFile args)
(encodeUtf8 (T.pack (Tpl.render (Tpl.setManyAttrib attrs tpl))))
-- Command-line option processing
data Farm = Farm { dataFile :: String,
templateFile :: String,
scriptFile :: String,
outputFile :: String }
deriving (Data, Typeable, Show, Eq)
farm = Farm {
dataFile = def &= explicit &= name "d" &= name "data" &= typFile
&= help "data file in CSV format",
templateFile = def &= explicit &= name "t" &= name "template" &= typFile
&= help "template file",
scriptFile = def &= explicit &= name "s" &= name "script" &= typFile
&= help "gnuplot script file to generate",
outputFile = def &= explicit &= name "o" &= name "output" &= typFile
&= help "LaTeX output file to generate" }
checkArgs :: Farm -> [String]
checkArgs args =
["data filename required" | null (dataFile args)] ++
["template filename required" | null (templateFile args)] ++
["script filename required" | null (scriptFile args)] ++
["output filename required" | null (outputFile args)]
-- Parse the numbers in the data file
csvToDoubles :: CSV -> ([Double], [Double])
csvToDoubles csv =
foldr convRow ([], []) csv where
convRow row (xAcc, yAcc) = case row of
x:y:[] -> (fieldToDouble x : xAcc,
fieldToDouble y : yAcc)
_ -> error "Expected two columns per row"
where fieldToDouble field =
case BD.readDouble field of
Nothing -> error $ "Couldn't parse field " ++
T.unpack (decodeUtf8 field)
Just (value, _) -> value
-- Fit the regression line
linearRegression :: [Double] -> [Double] -> (Double, Double)
linearRegression xs ys =
let (intercept, slope, cov00, cov01, cov11, chiSq) =
Linear.linear (Vector.fromList xs) (Vector.fromList ys)
in (intercept, slope)
| benjamingeer/StatsRecipes | haskell/simple-linear-regression/Farm.hs | gpl-3.0 | 3,893 | 0 | 16 | 932 | 968 | 517 | 451 | 66 | 3 |
{-# OPTIONS -fno-warn-orphans #-}
{-# LANGUAGE TemplateHaskell #-}
module Lamdu.Config (Layers(..), Config(..), delKeys) where
import Data.Aeson (ToJSON(..), FromJSON(..))
import Data.Aeson.TH (deriveJSON)
import Data.Vector.Vector2 (Vector2(..))
import Foreign.C.Types (CDouble)
import Graphics.DrawingCombinators.Utils () -- Read instance for Color
import qualified Graphics.DrawingCombinators as Draw
import qualified Graphics.UI.Bottle.EventMap as E
data Layers = Layers
{ layerInactiveHole
, layerCursorBG
, layerTypes
, layerCollapsedCompactBG
, layerCollapsedExpandedBG
, layerChoiceBG
, layerActiveHoleBG
, layerNameCollisionBG
, layerLabeledApplyBG
, layerParensHighlightBG
, layerActivePane
, layerMax :: Int
} deriving (Eq)
data Config = Config
{ layers :: Layers
, baseColor :: Draw.Color
, baseTextSize :: Int
, helpTextColor :: Draw.Color
, helpTextSize :: Int
, helpInputDocColor :: Draw.Color
, helpBGColor :: Draw.Color
, invalidCursorBGColor :: Draw.Color
, quitKeys :: [E.ModKey]
, undoKeys :: [E.ModKey]
, redoKeys :: [E.ModKey]
, makeBranchKeys :: [E.ModKey]
, jumpToBranchesKeys :: [E.ModKey]
, overlayDocKeys :: [E.ModKey]
, addNextParamKeys :: [E.ModKey]
, delBranchKeys :: [E.ModKey]
, closePaneKeys :: [E.ModKey]
, movePaneDownKeys :: [E.ModKey]
, movePaneUpKeys :: [E.ModKey]
, replaceKeys :: [E.ModKey]
, pickResultKeys :: [E.ModKey]
, pickAndMoveToNextHoleKeys :: [E.ModKey]
, jumpToNextHoleKeys :: [E.ModKey]
, jumpToPrevHoleKeys :: [E.ModKey]
, jumpToDefinitionKeys :: [E.ModKey]
, delForwardKeys :: [E.ModKey]
, delBackwardKeys :: [E.ModKey]
, wrapKeys :: [E.ModKey]
, debugModeKeys :: [E.ModKey]
, newDefinitionKeys :: [E.ModKey]
, definitionColor :: Draw.Color
, atomColor :: Draw.Color
, parameterColor :: Draw.Color
, paramOriginColor :: Draw.Color
, literalIntColor :: Draw.Color
, previousCursorKeys :: [E.ModKey]
, holeResultCount :: Int
, holeResultScaleFactor :: Vector2 Double
, holeResultPadding :: Vector2 Double
, holeResultInjectedScaleExponent :: Double
, holeSearchTermScaleFactor :: Vector2 Double
, holeNumLabelScaleFactor :: Vector2 Double
, holeNumLabelColor :: Draw.Color
, holeInactiveExtraSymbolColor :: Draw.Color
, typeErrorHoleWrapBackgroundColor :: Draw.Color
, deletableHoleBackgroundColor :: Draw.Color
, activeHoleBackgroundColor :: Draw.Color
, inactiveHoleBackgroundColor :: Draw.Color
, tagScaleFactor :: Vector2 Double
, fieldTagScaleFactor :: Vector2 Double
, fieldTint :: Draw.Color
, inferredValueScaleFactor :: Vector2 Double
, inferredValueTint :: Draw.Color
, parenHighlightColor :: Draw.Color
, addWhereItemKeys :: [E.ModKey]
, lambdaColor :: Draw.Color
, lambdaTextSize :: Int
, rightArrowColor :: Draw.Color
, rightArrowTextSize :: Int
, whereColor :: Draw.Color
, whereScaleFactor :: Vector2 Double
, whereLabelScaleFactor :: Vector2 Double
, typeScaleFactor :: Vector2 Double
, squareParensScaleFactor :: Vector2 Double
, foreignModuleColor :: Draw.Color
, foreignVarColor :: Draw.Color
, cutKeys :: [E.ModKey]
, pasteKeys :: [E.ModKey]
, inactiveTintColor :: Draw.Color
, activeDefBGColor :: Draw.Color
, inferredTypeTint :: Draw.Color
, inferredTypeErrorBGColor :: Draw.Color
, inferredTypeBGColor :: Draw.Color
-- For definitions
, collapsedForegroundColor :: Draw.Color
-- For parameters
, collapsedCompactBGColor :: Draw.Color
, collapsedExpandedBGColor :: Draw.Color
, collapsedExpandKeys :: [E.ModKey]
, collapsedCollapseKeys :: [E.ModKey]
, monomorphicDefOriginForegroundColor :: Draw.Color
, polymorphicDefOriginForegroundColor :: Draw.Color
, builtinOriginNameColor :: Draw.Color
, cursorBGColor :: Draw.Color
, listBracketTextSize :: Int
, listBracketColor :: Draw.Color
, listCommaTextSize :: Int
, listCommaColor :: Draw.Color
, listAddItemKeys :: [E.ModKey]
, selectedBranchColor :: Draw.Color
, jumpLHStoRHSKeys :: [E.ModKey]
, jumpRHStoLHSKeys :: [E.ModKey]
, shrinkBaseFontKeys :: [E.ModKey]
, enlargeBaseFontKeys :: [E.ModKey]
, enlargeFactor :: Double
, shrinkFactor :: Double
, defTypeLabelTextSize :: Int
, defTypeLabelColor :: Draw.Color
, defTypeBoxScaleFactor :: Vector2 Double
, acceptKeys :: [E.ModKey]
, autoGeneratedNameTint :: Draw.Color
, collisionSuffixTextColor :: Draw.Color
, collisionSuffixBGColor :: Draw.Color
, collisionSuffixScaleFactor :: Vector2 Double
, paramDefSuffixScaleFactor :: Vector2 Double
, enterSubexpressionKeys :: [E.ModKey]
, leaveSubexpressionKeys :: [E.ModKey]
, replaceInferredValueKeys :: [E.ModKey]
, keepInferredValueKeys :: [E.ModKey]
, acceptInferredValueKeys :: [E.ModKey]
, nextInfoModeKeys :: [E.ModKey]
, recordTypeParensColor :: Draw.Color
, recordValParensColor :: Draw.Color
, recordAddFieldKeys :: [E.ModKey]
, presentationChoiceScaleFactor :: Vector2 Double
, presentationChoiceColor :: Draw.Color
, labeledApplyBGColor :: Draw.Color
} deriving (Eq)
delKeys :: Config -> [E.ModKey]
delKeys config = delForwardKeys config ++ delBackwardKeys config
deriveJSON id ''Vector2
deriveJSON id ''Draw.Color
deriveJSON id ''E.ModState
deriveJSON id ''E.ModKey
deriveJSON id ''E.Key
deriveJSON id ''Layers
deriveJSON id ''Config
instance FromJSON CDouble where
parseJSON = fmap (realToFrac :: Double -> CDouble) . parseJSON
instance ToJSON CDouble where
toJSON = toJSON . (realToFrac :: CDouble -> Double)
| sinelaw/lamdu | Lamdu/Config.hs | gpl-3.0 | 5,566 | 0 | 10 | 949 | 1,376 | 848 | 528 | 154 | 1 |
{-# LANGUAGE ScopedTypeVariables, OverloadedStrings #-}
module Database.Design.Ampersand.FSpec.ToFSpec.Calc
( deriveProofs
, showProof, showPrf, assembleECAs, conjuncts, genPAclause
, commaEngPandoc, commaNLPandoc, commaEngPandoc', commaNLPandoc', commaPandocAnd --TODO: this shouldt be here!
, quadsOfRules
-- , testInterface
) where
import Database.Design.Ampersand.Basics
import Data.List hiding (head)
import Data.Monoid
import GHC.Exts (sortWith)
--import Data.ByteString.Char8
--import Data.ByteString.Lazy.Char8
import Database.Design.Ampersand.Core.AbstractSyntaxTree hiding (sortWith)
import Database.Design.Ampersand.ADL1
import Database.Design.Ampersand.ADL1.Expression
import Database.Design.Ampersand.Classes
import Database.Design.Ampersand.FSpec.FSpec
import Database.Design.Ampersand.FSpec.ShowADL (ShowADL(..), showREL)
import Database.Design.Ampersand.FSpec.ShowECA (showECA)
import Database.Design.Ampersand.FSpec.ToFSpec.NormalForms
import Database.Design.Ampersand.Misc (Lang(..),Options(..),PandocFormat(ReST),string2Blocks)
import Text.Pandoc.Builder
import Prelude hiding (head)
fatal :: Int -> String -> a
fatal = fatalMsg "FSpec.ToFSpec.Calc"
head :: [a] -> a
head [] = fatal 30 "head must not be used on an empty list!"
head (a:_) = a
-- testInterface :: FSpec -> Interface -> String
-- Deze functie is bedoeld om te bedenken hoe interfaces moeten worden afgeleid uit een vers vertaalde ObjectDef.
-- Nadat deze goed werkt kunnen de bewijsgenerator en de codegenerator worden gemaakt.
-- testInterface :: FSpec -> Interface -> String
-- testInterface fSpec ifc
-- = "\nInterface "++ name ifc++"("++intercalate ", " [showADL r++":"++name (target r) | r<-rels]++")\n"++
-- " - The parameters correspond to editable fields in a user interface.\n "++
-- showADL ifc++"\n"++
-- " - Invariants:\n "++intercalate "\n " [showADL rule | rule<-invs]++"\n"++
-- " - Derivation of clauses for ECA-rules:" ++
-- concat [showClause fSpec (makeCjcts (getOpts fSpec) rule) | rule<-invs]++"\n"++
--{-
-- " - ECA rules:"++concat [ "\n\n "++showECA "\n " (eca{ecaAction=normPA (getOpts fSpec) (ecaAction eca)})
-- ++"\n------ Derivation ----->"++showProof (codeBlock . ("\n "++) . showECA "\n ") (proofPA (getOpts fSpec) (ecaAction eca))++"\n<------End Derivation --"
-- | eca<-ecaRs]++"\n\n"++
---}
-- " - Visible relations:\n "++intercalate "\n " (spread 80 ", " [showADL r | r<-vis])++"\n"
-- where
---- showQ i (rel, shs,conj,r)
---- = "\nQuad "++show i++":\nrelation: "++showADL rel++":\nshifts: "++concat ["\n"++showADLe s |s<-shs]++"\nconjunct: "++showADL conj++"\nrule: "++showADL r++""
----TODO: Deze code komt ook voor in ADL2FSpec.hs. Dat lijkt dubbelop, en derhalve niet goed.
-- rels = nub (recur (ifcObj ifc))
-- where recur obj = [editMph (objctx o) | o<-attributes obj, editable (objctx o)]++[r | o<-attributes obj, r<-recur o]
-- vis = nub (rels++map (I . target) rels)
-- -- visible r = r `elem` vis
-- invs = [rule | rule<-invariants fSpec, (not.null) (map makeDeclaration (relsUsedIn rule) `isc` vis)]
-- -- qs = vquads fSpec
-- -- (ecaRs, _) = assembleECAs fSpec (allDecls fSpec)
---- editable (ERel Rel{} _) = True --WHY?? Stef, welke functie is de juiste?? TODO deze functie staat ook in ADL2FSpec.hs, maar is daar ANDERS(!)...
---- editable _ = False
---- editMph (ERel r@Rel{} _) = r --WHY?? Stef, welke functie is de juiste?? TODO deze functie staat ook in ADL2FSpec.hs, maar is daar ANDERS(!)...
---- editMph e = fatal 64 $ "cannot determine an editable declaration in a composite expression: "++show e
-- -- De functie spread verspreidt strings over kolommen met een breedte van n.
-- -- Deze functie garandeert dat alle strings worden afgedrukt in de aangegeven volgorde.
-- -- Hij probeert daarbij zo weinig mogelijk regels te gebruiken,
-- -- en alleen de grens van n te overschrijden als een string zelf langer is dan n.
-- spread :: Int -> String -> [String] -> [String]
-- spread n str = f ""
-- where f stored [] = [stored | not (null stored)]
-- f [] (cs:css) = f cs css
-- f stored (cs:css) | length stored > n = stored: f cs css
-- | length new <= n = f new css
-- | otherwise = stored: f cs css
-- where new = stored++str++cs
testConfluence :: A_Context -> Blocks
testConfluence context
= let tcss = [(expr,tcs) | expr<-expressionsIn context, let tcs=dfProofs expr, length tcs>1]
sumt = sum (map (length.snd) tcss)
in
para ("Confluence analysis statistics from "<>(str.show.length.expressionsIn) context<>" expressions."<>linebreak)<>
para ("This script contains "<>linebreak<>(str.show.length) tcss<> " non-confluent expressions "<>linebreak)<>
para (linebreak<>"Total number of derived expressions: "<>(str.show) sumt<>linebreak)<>
para ("Confluence analysis for "<>(str.name) context)<>
mconcat
[ para (linebreak<>"expression: "<>(str . showADL) expr<>linebreak)<>
bulletList [ showProof (para.str.showADL) prf | (_,prf)<-tcs ]
| (expr,tcs)<-tcss]
deriveProofs :: Options -> A_Context -> Blocks
deriveProofs opts context
= testConfluence context<>
para (linebreak<>"--------------"<>linebreak)<>
para ("Rules and their conjuncts for "<>(str.name) context)<>
bulletList [ para ("rule r: "<>str (showADL r)<>linebreak<>
"rrexp r: "<>str (showADL (rrexp r))<>linebreak<>
"conjNF: "<>str (showADL (conjNF opts (rrexp r)))<>linebreak<>
interText linebreak [ " conj: "<>str (showADL conj) | conj<-conjuncts opts r ]
)
| r<-allRules context]<>
para ("Transformation of user specified rules into ECA rules for "<>(str.name) context)<>
para (linebreak<>"--------------"<>linebreak<>"First step: determine the "<>(str.show.length) quads<>" quads:")<>
bulletList [ para ( "-- quad ------------"<>linebreak<>"When relation "<>(str . showADL . qDcl) q<>" is changed,"
<>linebreak<>(str . showADL . qRule) q
<>(if (length . qConjuncts) q<=1 then space else " ("<>(str . show . length . qConjuncts) q<>" conjuncts)")
<>" must be restored."<>linebreak<>"This quad has conjunct: "<>(str . showADL . rc_conjunct) x
<>" and "<>(str.show.length.rc_dnfClauses) x<>" dnf clauses."
) <>
bulletList [ para (linebreak<>"Dnf clause "<>str (showADL dc)) | dc<-rc_dnfClauses x]
| q<-quads, x<-qConjuncts q ] <>
para (linebreak<>linebreak<>"Second step: assemble dnf clauses.") <>
bulletList [ para ( "Dnf clause "<>str (showADL dc)
<>linebreak<>"is derived from rule "<>str (showADL r)
<>linebreak
<>case ms of
[] -> "No relations affect this clause."
[rel] -> "It can be called when relation " <>str (showADL rel)<>" is affected."
_ -> "It can be called when relations "<>str (commaEng "or" [showADL rel | rel<-ms])<>" are affected."
)
| (ms,dc,r)<-
[ (nub [ dcl |(dcl,_,_)<-cl],dc,r)
| cl<-eqCl (\(_,_,dc)->dc) [(qDcl q,dc,qRule q) |q<-quads, x<-qConjuncts q, dc<-rc_dnfClauses x]
, let (_,dc,r) = head cl
]
]<>
para (linebreak<>"Third step: determine "<>(str.show.length.udefrules) context<>" ECA rules"<>
if verboseP opts
then " (Turn --verbose off if you want to see ECA rules only)"
else " (Turn on --verbose if you want to see more detail)"
)<>
( if verboseP opts then para ( "--------------"<>linebreak)<>bulletList derivations else fromList [] )<>
bulletList [ para ( "-- ECA Rule "<>(str.show.ecaNum) ecarule<>" ---------")<>
codeBlock ("\n "++showECA "\n " ecarule{ecaAction=normPA opts (ecaAction ecarule)})<>
bulletList [ para (linebreak<>"delta expression"<>linebreak<>space<>str (showADL d)
<>linebreak<>"derivation:"
)<>
(showProof (para.str.showADL).dfProof opts) d<> -- Produces its result in disjunctive normal form
para ("disjunctly normalized delta expression"<>linebreak<>(str.showADL.disjNF opts) d)
| verboseP opts, e@Do{}<-[ecaAction ecarule], let d = paDelta e ]
| ecarule <- ecaRs]
{-
++
[ linebreak<>"--------------", linebreak, linebreak<>"Fourth step: cascade blocking rules"
, linebreak
]++
interText []
[ [linebreak<>"-- Raw ECA rule "<>(str.show.ecaNum) er<>"------------"<>linebreak<>str (showECA "\n " er)]
| er<- ecaRs]
++
[ linebreak<>"--------------", linebreak, linebreak<>"Fifth step: preEmpt the rules (= optimize)"
, linebreak
]++
{- TODO: readdress preEmpt. It is wrong
interText []
[ [linebreak<>"-- Preempted ECA rule "<>(str.show.ecaNum) er<>"------------"<>linebreak<>str (showECA "\n " er)]
| er<- preEmpt opts ecaRs]
++ -}
{-
[ linebreak<>"--------------", linebreak]
++ -- TODO: make an ontological analysis, which explains the delete behaviour.
[ Str "Ontological analysis: ", linebreak<>" "]
++
interText [linebreak, linebreak<>" "]
[ [Str (name ifc)<>"("]
++ interText ", "
[str (name a)<>"["<>(str.name.target.ctx) a<>"]"
|a<-attributes (ifcObj ifc)]
++ [Str "):", linebreak<>" "]
| ifc<-interfaceS fSpec]
++
[ linebreak<>"--------------", linebreak
<>"Analyzing interfaces:", linebreak<>" "]
++
interText [linebreak<>" "]
[[Str (testInterface fSpec ifc)]
| ifc<-take 1 (interfaceG fSpec)]
++
[ linebreak<>"--------------", linebreak]
-}
-}
where
-- visible _ = True -- We take all quads into account.
quads = quadsOfRules opts (allRules context) -- the quads that are derived for this fSpec specify dnf clauses, meant to maintain rule r, to be called when relation rel is affected (rel is in r).
-- interText :: (Data.String.IsString a, Data.Monoid.Monoid a) => a -> [a] -> a
interText _ [] = ""
interText inbetween (xs:xss) = xs<>inbetween<>interText inbetween xss
derivations :: [Blocks]
ecaRs :: [ECArule]
(ecaRs, derivations) = assembleECAs opts context (relsDefdIn context)
{-
[ str ("Available code fragments on rule "<>name rule<>":", linebreak ]<>
interText [linebreak] [showADL rule<> " yields\n"<>interText "\n\n"
[ ["event = ", str (show ev), space, str (showADL rel), linebreak ] <>
[str (showADL r<>"["<>showADL rel<>":="<>showADL (actSem opts ev (EDcD rel) (delta (sign rel)))<>"] = r'"), linebreak ] <>
["r' = "] <> conjProof r' <> [linebreak ] <>
["viols = r'-"] <> disjProof (ECpl r') <> [ linebreak ] <>
"violations, considering that the valuation of "<>showADL rel<>" has just been changed to "<>showADL (actSem opts ev (EDcD rel) (delta (sign rel)))<>
" "<>conjProof (ECpl r) <>"\n"<>
"reaction? evaluate r |- r' ("<>(str.showADL.conjNF opts) (notCpl r .\/. r')<>")"<>
conjProof (notCpl r .\/. r')<>"\n"<>
"delta: r-/\\r' = "<>conjProof (EIsc[notCpl r,r'])<>
"\nNow compute a reaction\n(isTrue.conjNF opts) (notCpl r .\/. r') = "<>show ((isTrue.conjNF opts) (notCpl r .\/. r'))<>"\n"<>
(if null (lambda ev (ERel rel ) r)
then "lambda "<>showADL rel<>" ("<>showADL r<>") = empty\n"
else -- for debug purposes:
-- "lambda "<>show ev<>" "<>showADL rel<>" ("<>showADL r<>") = \n"<>(interText "\n\n".map showPr.lambda ev (ERel rel)) r<>"\n"<>
-- "derivMono ("<>showADL r<>") "<>show ev<>" "<>showADL rel<>"\n = "<>({-interText "\n". map -}showPr.derivMono r ev) rel<>"\n"<>
-- "\nNow compute checkMono opts r ev rel = \n"<>show (checkMono opts r ev rel)<>"\n"<>
if (isTrue.conjNF opts) (notCpl r .\/. r')
then "A reaction is not required, because r |- r'. Proof:"<>conjProof (notCpl r .\/. r')<>"\n"
else if checkMono opts r ev rel
then "A reaction is not required, because r |- r'. Proof:"{-<>(str.showPr.derivMono r ev) rel-}<>"NIET TYPECORRECT: (showPr.derivMono r ev) rel"<>"\n" --WHY? Stef, gaarne herstellen...Deze fout vond ik nadat ik het type van showProof had opgegeven.
else let ERel _ _ = delta (sign rel) in
"An appropriate reaction on this event is required."
-- showECA "\n " (ECA (On ev rel) delt (genPAclause visible Ins r viols conj [rule]) 0)
)
| rel<-relsUsedIn r -- nub [x |x<-relsUsedIn r, not (isIdent x)] -- TODO: include proofs that allow: isIdent rel'
, ev<-[Ins,Del]
, r'<-[subst (rel, actSem opts ev (EDcD rel) (delta (sign rel))) r]
-- , viols<-[conjNF opts (ECpl r')]
, True ] -- (isTrue.conjNF opts) (notCpl r .\/. r')
| r<-[dc | cs<-[makeCjcts opts rule], (_,dnfClauses)<-cs, dc<-dnfClauses]
]
where e = rrexp rule
prf = cfProof (getOpts fSpec) e
(exx',_,_) = last prf
-- conjProof = showProof (para.str.showADL) . cfProof (getOpts fSpec)
disjProof = showProof (para.str.showADL) . dfProof (getOpts fSpec)
-- showPr = showProof (para.str.showADL) -- hoort bij de uitgecommentaarde code hierboven...
--TODO: See ticket #105
-}
-- Stel we voeren een actie a uit, die een(1) van de volgende twee is:
-- {r} INS rel INTO expr {r'} ofwel
-- {r} DEL rel FROM expr {r'}
-- Dan toetst checkMono of r|-r' waar is op grond van de afleiding uit derivMono.
-- Als dat waar is, betekent dat dat invariant r waar blijft wanneer actie a wordt uitgevoerd.
checkMono :: Options
-> Expression
-> InsDel
-> Declaration
-> Bool
checkMono opts expr ev dcl
= case ruleType conclusion of
Truth -> fatal 247 "derivMono came up with a Truth!"
_ -> simplify expr == simplify (antecedent conclusion) &&
simplify (subst (dcl, actSem opts ev (EDcD dcl) (delta (sign dcl))) expr) ==
simplify (consequent conclusion)
where (conclusion,_,_) = last (derivMono expr ev dcl)
type Proof expr = [(expr,[String],String)]
reversePrf :: Proof e -> Proof e
reversePrf [] = []
reversePrf [s] = [s]
reversePrf ((r,cs,e'):prf@((r',_ ,_):_)) = init rp++[(r',cs,rev e'),(r,[],"")]
where rp = reversePrf prf
rev "==>" = "<=="
rev "<==" = "==>"
rev "-->" = "<--"
rev "<--" = "-->"
rev x = x
showProof :: (expr->Blocks) -> Proof expr -> Blocks
showProof shw [(expr,ss,_)] = shw expr<> para ( str(" { "++intercalate " and " ss++" }"))
showProof shw ((expr,ss,equ):prf) = shw expr<>
para (if null ss then str equ else
if null equ then str (unwords ss) else
str equ<>str (" { "++intercalate " and " ss++" }"))<>
showProof shw prf
--where e'= if null prf then "" else let (expr,_,_):_ = prf in showHS options "" expr
showProof _ [] = fromList []
showPrf :: (expr->String) -> Proof expr -> [String]
showPrf shw [(expr,_ ,_)] = [ " "++shw expr]
showPrf shw ((expr,ss,equ):prf) = [ " "++shw expr] ++
(if null ss then [ equ ] else
if null equ then [ unwords ss ] else
[ equ++" { "++intercalate " and " ss++" }" ])++
showPrf shw prf
showPrf _ [] = []
-- derivMono provides a derivation to prove that (precondition) r is a subset of (postcondition) r'.
-- This is useful in proving that an action {expr} a {expr'} maintains its invariant, i.e. that expr|-expr' holds (proven by monotony properties)
-- Derivmono gives a derivation only.
derivMono :: Expression -> InsDel -> Declaration -> [(Rule, [String], String)]
derivMono expr -- preconditie van actie a
tOp -- de actie (Ins of Del)
dcl' -- re relatie, zodat de actie bestaat uit INSERT rel' INTO expr of DELETE rel' FROM expr
= f (head (lambda tOp (EDcD dcl') expr++[[]])) (start tOp)
where
f :: [(Expression, [String], whatever)]
-> (Expression, Expression)
-> [(Rule, [String], String)]
f [] (_,_) = []
f [(e',_,_)] (neg',pos')
= [(rule (subst (dcl',neg') e') (subst (dcl',pos') e'),[],"")]
f ((e',["invert"],_): prf@((_,_,_):_)) (neg',pos')
= (rule (subst (dcl',neg') e') (subst (dcl',pos') e'),["r |- s <=> s- |- r-"],"<=>"):
f prf (pos',neg')
f ((e1,_,_): prf@((e2,_,_):_)) (neg',pos')
= (rule (subst (dcl',neg') e1) (subst (dcl',pos') e1),["Monotony of "++showOp e2],"==>"):
f prf (neg',pos')
start Ins = (EDcD dcl',EDcD dcl' .\/. delta (sign dcl'))
start Del = (EDcD dcl' ./\. notCpl (delta (sign dcl')),EDcD dcl')
rule :: Expression -> Expression -> Rule
rule neg' pos' | isTrue neg' = Ru { rrnm = ""
, rrfps = Origin "rule generated for isTrue neg' by Calc"
, rrexp = pos'
, rrmean = AMeaning
[A_Markup Dutch ReST (string2Blocks ReST "Waarom wordt deze regel hier aangemaakt? (In Calc.hs, regel 402)")
,A_Markup English ReST (string2Blocks ReST "Why is this rule created? (In Calc.hs, line 403)")] --TODO Stef, gaarne de explanations aanvullen/verwijderen. Dank! Han.
, rrmsg = []
, rrviol = Nothing
, rrtyp = sign neg' {- (neg `meet` pos) -}
, rrdcl = Nothing
, r_env = ""
, r_usr = Multiplicity
, isSignal = fatal 336 $ "erroneous reference to isSignal in rule ("++showADL neg'++") |- ("++showADL pos'++")"
}
| otherwise = Ru { rrnm = ""
, rrfps = Origin "rule generated for not(isTrue neg') by Calc"
, rrexp = neg' .|-. pos'
, rrmean = AMeaning
[A_Markup Dutch ReST (string2Blocks ReST "Waarom wordt deze regel hier aangemaakt? (In Calc.hs, regel 332)")
,A_Markup English ReST (string2Blocks ReST "Why is this rule created? (In Calc.hs, line 333)")] --TODO Stef, gaarne de explanations aanvullen/verwijderen. Dank! Han.
, rrmsg = []
, rrviol = Nothing
, rrtyp = sign neg' {- (neg `meet` pos) -}
, rrdcl = Nothing
, r_env = ""
, r_usr = Multiplicity
, isSignal = fatal 352 $ "illegal reference to isSignal in rule ("++showADL neg'++") |- ("++showADL pos'++")"
}
showOp expr' = case expr' of
EEqu{} -> "="
EImp{} -> "|-"
EIsc{} -> "/\\"
EUni{} -> "\\/"
EDif{} -> "-"
ELrs{} -> "/"
ERrs{} -> "\\"
EDia{} -> "<>"
ECps{} -> ";"
ERad{} -> "!"
EPrd{} -> "*"
EKl0{} -> "*"
EKl1{} -> "+"
EFlp{} -> "~"
ECpl{} -> "-"
_ -> ""
{- The purpose of function lambda is to generate a derivation.
Rewrite rules:
-r;-s -> -(r!s)
-}
lambda :: InsDel -> Expression
-> Expression
-> [Proof Expression]
lambda tOp' e' expr' = [reversePrf[(e'',txt,op)
| (e'',_,txt,op)<-prf]
| prf<-lam tOp' e' expr' ]
where
lam :: InsDel -> Expression -> Expression ->
[[(Expression,Expression -> Expression,[String],String)]]
lam tOp e3 expr =
case expr of
EIsc{} | e3==expr -> [[(e3,id,[],"")]]
| length (const' expr)>0 -> [(expr,\_->expr, [derivtext tOp "mono" (inter' expr) expr],"<--") :prf
| prf<-lam tOp e3 (inter' expr)
]
| and [isNeg f |f<-exprIsc2list expr]
-> let deMrg = deMorganEIsc expr in
[(expr, deMorganEIsc, [derivtext tOp "equal" deMrg expr],"==") :prf | prf<-lam tOp e3 deMrg]
| or[null p |p<-fPrfs] -> []
| otherwise -> [(expr,\_->expr, [derivtext tOp "mono" (first lc) expr],"<--") : lc]
EUni{} | e3==expr -> [[(e3,id,[],"")]]
| length (const' expr)>0 -> [(expr,\_->expr, [derivtext tOp "mono" (inter' expr) expr],"<--") :prf
| prf<-lam tOp e3 (inter' expr)
]
| and [isNeg f |f<-exprUni2list expr]
-> let deMrg = deMorganEUni expr in
[(expr, deMorganEUni, [derivtext tOp "equal" deMrg expr],"==") :prf | prf<-lam tOp e3 deMrg]
| or[null p |p<-fPrfs] -> []
| otherwise -> [(expr,\_->expr, [derivtext tOp "mono" (first lc) expr],"<--") : lc]
ECps{} | e3==expr -> [[(e3,id,[],"")]]
| and [isNeg f |f<-exprCps2list expr]
-> let deMrg = deMorganECps expr in
[(expr, deMorganECps, [derivtext tOp "equal" deMrg expr],"==")
:prf
| prf<-lam tOp e3 deMrg
] -- isNeg is nog niet helemaal correct.
| or[null p|p<-fPrfs] -> []
| otherwise -> [(expr,\_->expr, [derivtext tOp "mono" (first lc) expr],"<--"): lc]
ERad{} | e3==expr -> [[(e3,id,[],"")]]
| and [isNeg f |f<-exprRad2list expr]
-> let deMrg = deMorganERad expr in
[(expr, deMorganERad, [derivtext tOp "equal" deMrg expr],"==") :prf | prf<-lam tOp e3 deMrg] -- isNeg is nog niet helemaal correct.
| or[null p |p<-fPrfs] -> []
| otherwise -> [(expr,\_->expr, [derivtext tOp "mono" (first lc) expr],"<--"): lc]
EKl0 x -> [(expr,\e->EKl0 e,[derivtext tOp "mono" x expr],"<--") :prf | prf<-lam tOp e3 x]
EKl1 x -> [(expr,\e->EKl1 e,[derivtext tOp "mono" x expr],"<--") :prf | prf<-lam tOp e3 x]
ECpl x -> [(expr,\e->ECpl e,["invert"],"<--") :prf | prf<-lam (inv tOp) e3 x]
EBrk x -> lam tOp e3 x
_ -> [[(e3,id,[],"")]]
where
sgn = sign expr
fPrfs = case expr of
EUni{} -> [lam tOp e3 f |f<-exprUni2list expr, isVar f e3]
EIsc{} -> [lam tOp e3 f |f<-exprIsc2list expr, isVar f e3]
ECps{} -> [lam tOp e3 f |f<-exprCps2list expr, isVar f e3]
ERad{} -> [lam tOp e3 f |f<-exprRad2list expr, isVar f e3]
_ -> fatal 428 ("fPrfs is not defined.Consult your dealer!")
lc = longstcomn vars++concat (drop (length rc-1) (sortWith length rc))
rc = remainders vars vars
vars = map head fPrfs
const' e@EUni{} = [f |f<-exprUni2list e, isConst f e3]
const' e@EIsc{} = [f |f<-exprIsc2list e, isConst f e3]
const' expr'' = fatal 440 $ "'const'("++ show expr''++")' is not defined.Consult your dealer!"
inter' e@EUni{} = foldr (.\/.) (notCpl (EDcV sgn)) [f |f<-exprUni2list e, isVar f e3]
inter' e@EIsc{} = if and [sgn==sign f | f<-exprIsc2list e, isVar f e3]
then foldr (./\.) (EDcV sgn) [f | f<-exprIsc2list e, isVar f e3]
else fatal 532 ("signature error in inter' "++show [(showADL f,showSign (sign f)) | f<-exprIsc2list e, isVar f e3])
inter' expr'' = fatal 443 $ "'inter'("++ show expr''++")' is not defined.Consult your dealer!"
-- lam tOp e f = []
-- longstcomn determines the longest prefix common to all xs in xss.
longstcomn :: (Eq a) => [[(a, b, c, d)]] -> [(a, b, c, d)]
longstcomn xss | or [null xs | xs<-xss] = []
| length (eqCl first xss)==1 = head [head prf | prf<-xss]: longstcomn [tail prf | prf<-xss]
| otherwise = []
-- remainders determines the remainders.
remainders :: (Eq a) => [[(a, b, c, d)]] -> [[(a, b, c, d)]] -> [[(a, b, c, d)]]
remainders _ xss | or [null xs | xs<-xss] = xss
| length (eqCl first xss)==1 = remainders xss [tail prf | prf<-xss]
| otherwise = xss
isConst :: (ConceptStructure a, ConceptStructure b) => a->b->Bool
isConst e f = null (relsUsedIn e `isc` relsUsedIn f)
isVar :: (ConceptStructure a, ConceptStructure b) => a->b->Bool
isVar e f = not (isConst e f)
derivtext :: InsDel -> String -> Expression -> Expression -> String
derivtext tOp "invert" e'' expr = sh tOp++showADL e''++" means "++sh (inv tOp)++showADL expr++"."
derivtext tOp "mono" e'' expr = "("++showADL e''++"->"++showADL expr++") is monotonous, so "++sh tOp++showADL e''++" means "++sh tOp++showADL expr++"."
derivtext _ txt _ _ = txt
sh :: InsDel -> String
sh Ins = "insert into "
sh Del = "delete from "
inv :: InsDel -> InsDel
inv Ins = Del
inv Del = Ins
first :: [(a,b,c,d)] -> a
first ((e'',_,_,_):_) = e''
first _ = fatal 472 "wrong pattern in first"
ruleType :: Rule -> RuleType
ruleType r = case rrexp r of
EEqu{} -> Equivalence
EImp{} -> Implication
_ -> Truth
-- | Action semantics for inserting a delta into a relation dcl.
actSem :: Options -> InsDel -> Expression -> Expression -> Expression
actSem opts Ins dcl delt | sign dcl/=sign delt = fatal 598 "Type error in actSem Ins"
| dcl==delt = dcl
| otherwise = disjNF opts (dcl .\/. delt)
actSem opts Del dcl delt | sign dcl/=sign delt = fatal 598 "Type error in actSem Del"
| dcl==delt = notCpl (EDcV (sign dcl))
| otherwise = conjNF opts (dcl ./\. notCpl delt)
-- | assembleECAs assembles larger chunks of code, because it combines acts that are triggered by the same event.
assembleECAs :: Options -> A_Context -> [Declaration] -> ([ECArule],[Blocks])
assembleECAs options context editables
= unzip [eca i | (eca,i) <- zip ecas [(1::Int)..]]
where
ecas :: [Int->(ECArule,Blocks)]
ecas
= [ (\ruleNr->( ECA ecaEvt delt normEcaAct ruleNr
, para ("Let us analyse what happens "<>str (show (On ev rel))<>".")<>
bulletList [ txt | (_,_,_,txt)<-acts]<>
( if length ecaProof>1
then para ("The resulting action is:\n ")<>
showProof (codeBlock . ("\n "++) . showECA "\n ") ecaProof
else fromList []
)<>
para ("These results lead to the following ECA-rule:\n ")<>
(codeBlock . ("\n "++) . showECA "\n ".ecaRule) ruleNr
)
)
| rel <- editables -- allDecls fSpec ++ [ Isn c | c<-allConcepts fSpec, c/=ONE] -- This is the relation in which a delta is being inserted or deleted.
-- , let relEq = [ q | q<-vquads fSpec, qDcl q==rel] -- Gather the quads with the same declaration (qDcl). A quad has a declaration (qDcl), a rule (qRule) and clauses qConjuncts
, let EDcD delt = delta (sign rel) -- delt is a placeholder for the pairs that have been inserted or deleted in rel.
, ev<-[Ins,Del]
, let acts = [ -- go through all the events that affect that clause:
( normPA options act -- a normalized action for this event-conjunct combination
, conjunct -- the conjunct
, map snd conjEqClass -- the rule-expression of which conjunct is a part
, para ("Let us analyse clause "<>str (showADL expr)<>" from rule "<>commaEngPandoc' "and" (map (singleQuoted.str.name.snd) conjEqClass)<>".")<>
para ("event = "<>str (show ev)<>space<>str (showREL rel)<>" means doing the following substitution")<>
para (str (showADL clause<>"["<>showREL rel<>":="<>showADL (actSem options ev (EDcD rel) (delta (sign rel)))<>"] = clause'"))<>
para ("clause' = "<>str (showADL ex')<>
if clause'==ex'
then ", which is already in conjunctive normal form."<>linebreak
else ", which has conjunctive normal form: "<>linebreak<>str (showADL clause')
)<>
para ("Let us compute the violations to see whether invariance is maintained."<>linebreak<>
"This means to negate the result (notClau = notCpl clause'): ")<>
(showProof (para.str.showADL). cfProof options) notClau<>
para ("So, notClau has CNF: "<>str (showADL viols )<>linebreak<>
( if viols==viols'
then "This expression is in disjunctive normal form as well."
else str ("In DNF, notClau is: "<>showADL viols'<>".")))<>
( if isTrue clause'
then para ("This result proves the absence of violations, so a reaction of doing nothing is appropriate."<>linebreak
<>"Just for fun, let us try to derive whether clause |- clause' is true... ")<>
(showProof (para.str.showADL). cfProof options) (expr .|-. clause')
else para ("This result does not prove the absence of violations, so we cannot conclude that invariance is maintained."<>linebreak<>
"We must compute a reaction to compensate for violations..."<>linebreak<>
"That would be to reinsert violations that originate from "<>
( if ev==Ins
then str (showADL (conjNF options negs))<>" into "<> str (showADL (disjNF options poss))<>"."
else str (showADL (disjNF options poss))<>" into "<> str (showADL (conjNF options negs))<>"."
)<>linebreak<>"deltFr: ")<>
(showProof (para.str.showADL). dfProof options) deltFr<>
( let pr=proofPA options act in
if length pr>1
then para "Now let us remove redundancy from the ECA action:\n "<>
showProof (codeBlock . ("\n "++) . showECA "\n ") (proofPA options act)
else fromList []
)
{- <> "To finish the analysis of case "<>str (show ev)<>space<>str (showADL rel)
<>", let us compute the contents of "<>str (showADL toExpr)<>" after insertion of viols."<>linebreak
<>
( if length (nub [sign viols, sign viols', sign toExpr])>1
then fatal 248 ("viols"<>showSign (sign viols) <>" "<>showADL viols <>"\n"<>
"viols'"<>showSign (sign viols')<>" "<>showADL viols'<>"\n"<>
"toExpr"<>showSign (sign toExpr)<>" "<>showADL toExpr)
else if ev==Ins
then (showProof (para.str.showADL). cfProof options) (viols'.\/.toExpr)<>linebreak
else (showProof (para.str.showADL). dfProof options) (notCpl viols./\.toExpr)<>linebreak
) -}
)
)
| conjEqClass <- [] -- TODO: implement this once we can test it (note: computing eq. class is no longer necessary)
-- conjEqClass <- eqCl fst [ (qConjuncts q, qRule q) | q<-relEq ]
, conjunct <- (fst.head) conjEqClass -- get conjuncts from the clauses
, clause <- rc_dnfClauses conjunct -- the DNF form of each clause
, let expr = dnf2expr clause -- Note that this differs from: rc_conjunct conjunct, because the type may be different.
, let vee = EDcV (sign expr)
, let ex' = subst (rel, actSem options ev (EDcD rel) (delta (sign rel))) expr -- the clause after the edit action
, let clause' = conjNF options ex' -- its CNF
, not (isTrue clause')
, let notClau = notCpl clause' -- the violations after the edit action
, let viols = conjNF options notClau -- the violations after the edit action
, let viols' = disjNF options notClau -- the violations after the edit action
, let negs = if (length.nub.map sign) (vee:antcs clause)>1
then fatal 265 ("type inconsistencies in antcs: "++show (map showADL (vee:antcs clause)))
else foldr (./\.) vee (antcs clause)
, let poss = if (length.nub.map sign) (vee:conss clause)>1
then fatal 265 ("type inconsistencies in conss: "++show (map showADL (vee:conss clause)))
else foldr (.\/.) (notCpl vee) (conss clause)
, let frExpr = case ev of
Ins -> disjNF options (notCpl negs)
Del -> disjNF options poss
, let deltFr = if sign poss/=sign negs
then fatal 274 ("type inconsistencies in deltFr: "++showADL clause)
else if ev==Ins
then (subst (rel, actSem options ev (EDcD rel) (delta (sign rel)))) negs ./\. notCpl poss
else (notCpl . subst (rel, actSem options ev (EDcD rel) (delta (sign rel)))) poss ./\. negs
, let deltFr' = disjNF options deltFr
, rel `elem` relsMentionedIn frExpr
, let toExpr = if ev==Ins
then disjNF options poss
else disjNF options (notCpl negs)
, let visible r = r `elem` editables
, if length (nub (map sign [toExpr, deltFr', expr]))>1
then fatal 285 "type problem"
else True
, let act = genPAclause visible Ins toExpr deltFr' [(expr, map snd conjEqClass)]
]
, let ecaAct = ALL (map fst4 acts
-- The following acts add the implicit rules, which allows the user to add and delete atoms from concepts in a safe way.
++ [act' | (ev',rel',act')<-rulesDecls++rulesGens rel, ev==ev', rel==rel' ]
)
[ (rc_conjunct conj,ruls) | (_,conj,ruls,_)<-acts] --motivation is of type [(Expression,[Rule])]
, let normEcaAct = normPA options ecaAct
, let ecaProof = proofPA options ecaAct
, let ecaEvt = On ev rel
, let ecaRule = ECA ecaEvt delt normEcaAct
]
-- the following eca-rules are derived from the typing rules, rather than explicit rules specified by users.
-- This concerns the following rules:
-- Each declaration "RELATION r[A*B]" represents the rule "RULE r[A*B] |- V[A*B]"
-- Besides, for every A, B: "RULE I[A]*I[B] = V[A*B]"
-- For every "CLASSIFY A ISA B" we get "RULE I[A] |- I[B]"
-- For every "CLASSIFY A IS B /\ C" we get "RULE I[A] |- I[B]" and "RULE I[A] |- I[C]"
-- The results of both classify statements are available through vgens.
-- Finally, for every Atom c a there is a rule "RULE 'a' |- I[c]"
-- The eca-rules that can be derived from these rules are produced by ecasFromTypes
{- V[A*B] is implicit, so we don't generate ECA (On Ins rel) delt (Do Ins (Vs (Sign a b)) delt motive).
Neither do we generate ECA (On Del (Vs (Sign a b))) delt (Do Del rel delt motive).
However, we do generate: ECA (On Ins rel) delt (Do Ins (Isn a) ((dlt.<>.flp dlt).-.EDcI a) motive)
and: ECA (On Ins rel) delt (Do Ins (Isn b) ((flp dlt.<>.dlt).-.EDcI b) motive),
because the delta to be inserted might contain new atoms.
Similarly, upon deletion of an atom from a concept we must delete the appropriate links from relations that share this concept.
So, we generate ECA (On Del (Isn a)) delt (Do Del rel (delt.:.V[A*B]) motive)
and: ECA (On Del (Isn b)) delt (Do Del rel (V[A*B].:.delt) motive)
ECA-rules that are derivable from relation declarations are generated by rulesDecls
-}
rulesDecls :: [(InsDel, Declaration, PAclause)]
rulesDecls
= concat
[ [ (Ins, rel, Do Ins (Isn a) ((dlt.:.flp dlt ./\. EDcI a).-.EDcI a) [])
, (Ins, rel, Do Ins (Isn b) ((flp dlt.:.dlt ./\. EDcI b).-.EDcI b) [])
, (Del, Isn a, Do Del rel (delta (Sign a a).:.vee) [])
, (Del, Isn b, Do Del rel (vee.:.delta (Sign b b)) [])
]
| rel <- relsDefdIn context
, let dlt = delta (sign rel)
, let a=source rel, let b=target rel
, let vee = (EDcV . sign) rel
]
{- We generate:
for every "RULE I[A] |- I[B]": ECA (On Ins (Isn a)) delt (Do Ins (Isn b) (EDcD delt) motive)
and: ECA (On Del (Isn b)) delt (Do Del (Isn a) (EDcD delt) motive)
for every "RULE 'a' |- I[c]": ECA (On Del (Isn c)) ('a','a') (Blk motive)
-}
rulesGens :: Declaration -> [(InsDel, Declaration, PAclause)]
rulesGens rel
= concat
[ [ (Ins, Isn s, Do Ins (Isn g) dlt [])
, (Del, Isn g, Do Del (Isn s) dlt [])
]
| let dlt = delta (sign rel), (s,g) <- concatMap genericAndSpecifics (gens context)
]
fst4 (x,_,_,_) = x
-- | de functie genPAclause beschrijft de voornaamste mogelijkheden om een expressie delta' te verwerken in expr (met tOp'==Ins of tOp==Del)
-- TODO: Vind een wetenschappelijk artikel waar de hier beschreven transformatie uitputtend wordt behandeld.
-- TODO: Deze code is onvolledig en misschien zelfs fout....
genPAclause :: (Declaration->Bool) -- ^True if a relation may be changed (i.e. is editable)
-> InsDel -- ^the type of action: Insert or Delete
-> Expression -- ^the expression in which a delete or insert takes place
-> Expression -- ^the delta to be inserted or deleted
-> [(Expression,[Rule])] -- ^the motivation, consisting of the conjuncts (traced back to their rules) that are being restored by this code fragment.
-> PAclause
genPAclause editAble tOp' expr1 delta1 motive = genPAcl delta1 tOp' expr1
where
testPA i l r ex
= if (source l,target r)/=(source ex,target ex)
then fatal i ("test with sign deltaX = ["++show (source l)++"*"++show (target r)++"], and sign expr = "++show (sign ex)++":\ndeltaX = "++showADL (l.:.r)++"\nexpr = "++show ex)
else if source r/=target l
then fatal i ("test with source r = "++show (source r)++", and target l = "++show (target l)++":\nl"++showSign (sign l)++" = "++showADL l++"\nr"++showSign (sign r)++" = "++showADL r++"\nexpr = "++show ex)
else id
genPAcl deltaX tOp expr =
case (tOp, expr) of
(_ , EEqu{}) -> Blk [(expr, nub [r |(_,rs)<-motive, r<-rs])]
(_ , EImp{}) -> Blk [(expr, nub [r |(_,rs)<-motive, r<-rs])]
(_ , EFlp x) -> genPAcl (flp deltaX) tOp x
(_ , EBrk x) -> genPAcl deltaX tOp x
(Ins, ECpl x) -> genPAcl deltaX Del x
(Del, ECpl x) -> genPAcl deltaX Ins x
(Ins, EUni{}) -> CHC [ genPAcl deltaX Ins f | f<-exprUni2list expr{-, not (f==expr1 && Ins/=tOp') -}] motive -- the filter prevents self compensating PA-clauses.
(Ins, EIsc{}) -> ALL [ genPAcl deltaX Ins f | f<-exprIsc2list expr ] motive
(Del, EUni{}) -> ALL [ genPAcl deltaX Del f | f<-exprUni2list expr {-, not (f==expr1 && Del/=tOp') -}] motive -- the filter prevents self compensating PA-clauses.
(Del, EIsc{}) -> CHC [ genPAcl deltaX Del f | f<-exprIsc2list expr ] motive
(Ins, EDif (l,r)) -> CHC [ genPAcl deltaX Ins l, genPAcl deltaX Del r ] motive
(Del, EDif (l,r)) -> CHC [ genPAcl deltaX Del l, genPAcl deltaX Ins r ] motive
(Ins, EDia (l,r)) -> CHC [ ALL [ genPAcl (testPA 986 (deltaX) (flp r) l $ deltaX.:.flp r ) Ins l
, genPAcl (testPA 987 (flp l) (deltaX) r $ flp l.:.deltaX ) Ins r] motive
, ALL [ genPAcl (testPA 988 (deltaX) (notCpl (flp r)) l $ deltaX.:.notCpl (flp r)) Del l
, genPAcl (testPA 989 (deltaX) (flp r) l $ deltaX.:.flp r ) Ins l] motive
, ALL [ genPAcl (testPA 990 (notCpl (flp l)) (deltaX) r $ notCpl (flp l).:.deltaX) Del r
, genPAcl (testPA 991 (flp l) (deltaX) r $ flp l.:.deltaX ) Ins r] motive
, ALL [ genPAcl (testPA 992 (deltaX) (notCpl (flp r)) l $ deltaX.:.notCpl (flp r)) Del l
, genPAcl (testPA 993 (notCpl (flp l)) (deltaX) r $ notCpl (flp l).:.deltaX) Del r] motive
] motive
(Del, EDia (l,r)) -> GCH [ (Del, (testPA 995 (deltaX) (flp r) l $ deltaX.:.flp r), genPAcl (EMp1 "a" (source l).*.EMp1 "b" (target l)) tOp l)
, (Ins, (testPA 996 (deltaX) (flp (notCpl r)) l $ deltaX.:.flp (notCpl r)), genPAcl (EMp1 "a" (source l).*.EMp1 "b" (target l)) tOp l)
, (Del, (testPA 997 (flp l) (deltaX) r $ flp l.:.deltaX), genPAcl (EMp1 "a" (source r).*.EMp1 "b" (target r)) tOp r)
, (Ins, (testPA 998 (notCpl (flp l)) (deltaX) r $ notCpl (flp l).:.deltaX), genPAcl (EMp1 "a" (source r).*.EMp1 "b" (target r)) tOp r)
] motive
(Ins, ERrs (l,r)) -> CHC [ genPAcl (testPA 1000 (notCpl r) (flp deltaX) l $ notCpl r.:.flp deltaX) Del l
, genPAcl (testPA 1001 (l) (deltaX) r $ l.:.deltaX) Ins r
] motive
(Del, ERrs (l,r)) -> GCH [ (Ins, (testPA 1003 (notCpl r) (flp deltaX) l $ notCpl r.:.flp deltaX), genPAcl (EMp1 "a" (source l).*.EMp1 "b" (target l)) tOp l)
, (Del, (testPA 1004 (l) (deltaX) r $ l.:.deltaX), genPAcl (EMp1 "a" (source r).*.EMp1 "b" (target r)) tOp r)
] motive
(Ins, ELrs (l,r)) -> CHC [ genPAcl (testPA 1006 (flp deltaX) (notCpl l) r $ flp deltaX.:.notCpl l) Del r
, genPAcl (testPA 1007 (deltaX) (r) l $ deltaX.:.r ) Ins l
] motive
(Del, ELrs (l,r)) -> GCH [ (Ins, (testPA 1009 (flp deltaX) (notCpl l) r $ flp deltaX.:.notCpl l), genPAcl (EMp1 "a" (source r).*.EMp1 "b" (target r)) tOp r)
, (Del, (testPA 1010 (deltaX) (r) l $ deltaX.:.r), genPAcl (EMp1 "a" (source l).*.EMp1 "b" (target l)) tOp l)
] motive
(Ins, ECps (l,r)) -> CHC [ GCH [ (Ins, (testPA 1012 (deltaX) (flp r) l $ deltaX.:.flp r), genPAcl (EMp1 "a" (source l).*.EMp1 "b" (target l)) tOp l)
, (Ins, (testPA 1013 (flp l) (deltaX) r $ flp l.:.deltaX), genPAcl (EMp1 "a" (source r).*.EMp1 "b" (target r)) tOp r)
] motive
, New (source r) (\x->ALL [ genPAcl (deltaX.*.EMp1 x (target l)) Ins l
, genPAcl (EMp1 x (source r).*.deltaX) Ins r] motive) motive
] motive
(Del, ECps (l,r)) -> CHC [ genPAcl (testPA 1018 (deltaX) (flp r) l $ deltaX.:.flp r) Del l
, genPAcl (testPA 1019 (flp l) (deltaX) r $ flp l.:.deltaX) Del r
] motive
(Ins, ERad (l,r)) -> CHC [ genPAcl (testPA 1021 (deltaX) (notCpl (flp r)) l $ deltaX.:.notCpl (flp r)) Ins l
, genPAcl (testPA 1022 (notCpl (flp l)) (deltaX) r $ notCpl (flp l).:.deltaX) Ins r
] motive
(Del, ERad (l,r)) -> CHC [ GCH [ (Del, (testPA 1024 (deltaX) (flp r) l $ deltaX.:.flp r), genPAcl (EMp1 "a" (source l).*.EMp1 "b" (target l)) tOp l)
, (Del, (testPA 1025 (flp l) (deltaX) r $ flp l.:.deltaX), genPAcl (EMp1 "a" (source r).*.EMp1 "b" (target r)) tOp r)
] motive
, New (source r) (\_->Nop motive) motive
] motive
(Ins, EPrd (l,r)) -> ALL [ genPAcl (EDcV (Sign ONE (source deltaX)).:.deltaX) Ins (EDcV (Sign ONE (source r)).:.r)
, genPAcl (deltaX.:.EDcV (Sign (target deltaX) ONE)) Ins (l.:.EDcV (Sign (target l) ONE))
] motive
(Del, EPrd (l,r)) -> ALL [ genPAcl (EDcV (Sign ONE (source deltaX)).:.deltaX) Del (EDcV (Sign ONE (source r)).:.r)
, genPAcl (deltaX.:.EDcV (Sign (target deltaX) ONE)) Del (l.:.EDcV (Sign (target l) ONE))
] motive
(_ , EKl0 x ) -> genPAcl (deltaK0 deltaX tOp x) tOp x
(_ , EKl1 x ) -> genPAcl (deltaK1 deltaX tOp x) tOp x
(_ , EDcD d) -> if editAble d then Do tOp d deltaX motive else Blk [(expr, nub [r |(_,rs)<-motive, r<-rs])]
(_ , EDcI c) -> if editAble (Isn c) then Do tOp (Isn c) deltaX motive else Blk [(expr, nub [r |(_,rs)<-motive, r<-rs])]
(_ , EDcV{}) -> Blk [(expr, nub [r |(_,rs)<-motive, r<-rs])]
(_ , EMp1{}) -> Blk [(expr, nub [r |(_,rs)<-motive, r<-rs])]
(_ , EEps{}) -> Nop [(expr, nub [r |(_,rs)<-motive, r<-rs])]
{- (_ , _) -> fatal 767 ( "(Stef?) Non-exhaustive patterns in the recursive call\n"
++"doCod ("++showADL deltaX++") -- deltaX\n "++show tOp++" -- tOp\n ("++showADL expr++") -- expr\n"++
"within function\ndoCode "++show tOp'++" -- tOp'\n ("++showADL expr1++") -- expr1\n ("++showADL delta1++") -- delta1\n"++
concat
[ "while trying to maintain conjunct "++showADL conjunct++
"\nfrom rule "++intercalate "\n " [show r | r<-rs]
| (conjunct,rs)<-motive ] ++
if null motive then "null motive" else ""
)
-}
deltaK0 :: t -> InsDel -> t1 -> t
deltaK0 delta' Ins _ = delta' -- error! (tijdelijk... moet berekenen welke paren in x gezet moeten worden zodat delta |- x*)
deltaK0 delta' Del _ = delta' -- error! (tijdelijk... moet berekenen welke paren uit x verwijderd moeten worden zodat delta/\x* leeg is)
deltaK1 :: t -> InsDel -> t1 -> t
deltaK1 delta' Ins _ = delta' -- error! (tijdelijk... moet berekenen welke paren in x gezet moeten worden zodat delta |- x+)
deltaK1 delta' Del _ = delta' -- error! (tijdelijk... moet berekenen welke paren uit x verwijderd moeten worden zodat delta/\x+ leeg is)
commaEngPandoc' :: Inlines -> [Inlines] -> Inlines
commaEngPandoc' s [a,b,c] = a <> ", " <> b <> ", " <> s <> space <> c
commaEngPandoc' s [a,b] = a <> space <> s <> space <> b
commaEngPandoc' _ [a] = a
commaEngPandoc' s (a:as) = a <> ", " <> commaEngPandoc' s as
commaEngPandoc' _ [] = mempty
commaEngPandoc :: Inline -> [Inline] -> [Inline]
commaEngPandoc s [a,b,c] = [a,Str ", ",b,Str ", ",s, Str " ", c]
commaEngPandoc s [a,b] = [a,Str " ",s, Str " ", b]
commaEngPandoc _ [a] = [a]
commaEngPandoc s (a:as) = [a, Str ", "]++commaEngPandoc s as
commaEngPandoc _ [] = []
commaNLPandoc' :: Inlines -> [Inlines] -> Inlines
commaNLPandoc' s [a,b] = a <> space <> s <> space <> b
commaNLPandoc' _ [a] = a
commaNLPandoc' s (a:as) = a <> ", " <> commaNLPandoc' s as
commaNLPandoc' _ [] = mempty
commaNLPandoc :: Inline -> [Inline] -> [Inline]
commaNLPandoc s [a,b] = [a,Str " ",s, Str " ", b]
commaNLPandoc _ [a] = [a]
commaNLPandoc s (a:as) = [a, Str ", "]++commaNLPandoc s as
commaNLPandoc _ [] = []
commaPandocAnd :: Lang -> [Inlines] -> Inlines
commaPandocAnd Dutch = commaEngPandoc' "en"
commaPandocAnd English = commaNLPandoc' "and"
quadsOfRules :: Options -> [Rule] -> [Quad]
quadsOfRules opts rules
= makeAllQuads (converse [ (conj, rc_orgRules conj) | conj <- makeAllConjs opts rules ])
-- Quads embody the "switchboard" of rules. A quad represents a "proto-rule" with the following meaning:
-- whenever relation r is affected (i.e. tuples in r are inserted or deleted),
-- the rule may have to be restored using functionality from one of the clauses.
makeAllQuads :: [(Rule, [Conjunct])] -> [Quad]
makeAllQuads conjsPerRule =
[ Quad { qDcl = d
, qRule = rule
, qConjuncts = conjs
}
| (rule,conjs) <- conjsPerRule, d <-relsUsedIn rule
]
{-
-- If one rule r blocks upon an event, e.g. e@(ON Ins rel), while another ECA rule r'
-- maintains something else with that same event e, we can save r' the trouble.
-- After all, event e will block anyway.
-- preEmpt tries to simplify ECArules by predicting whether a rule will block.
preEmpt :: Options -> [ECArule] -> [ECArule]
preEmpt opts ers = pr [length ers] (10::Int)
where
pr :: [Int] -> Int -> [ECArule]
pr ls n
| n == 0 = fatal 633 $ "too many cascading levels in preEmpt "++show ls
| (not.null) cascaded = pr (length cascaded:ls)
-- ([er{ecaAction=normPA opts (ecaAction er)} | er<-cascaded] ++uncasced)
(n-1)
| otherwise = [er{ecaAction=normPA opts (ecaAction er)} | er<-uncasced]
where
-- preEmpt divides all ECA rules in uncascaded rules and cascaded rules.
-- cascaded rules are those rules that have a Do component with event e, where e is known to block (for some other reason)
new = [er{ecaAction=normPA opts (ecaAction er)} | er<-ers]
cascaded = [er{ecaAction=action'} | er<-new, let (c,action') = cascade (eDcl (ecaTriggr er)) (ecaAction er), c]
uncasced = [er | er<-new, let (c,_) = cascade (eDcl (ecaTriggr er)) (ecaAction er), not c]
-- cascade inserts a block on the place where a Do component exists that matches the blocking event.
-- cascade :: Relation -> PAclause -> (Bool, PAclause)
cascade dcl (Do srt to _ _) | (not.null) blkErs = (True, ecaAction (head blkErs))
where blkErs = [er | er<-ers
, Blk _<-[ecaAction er]
, let t = ecaTriggr er
, eSrt t == srt
, eDcl t == to
, not (dcl ==to)
]
cascade _ c@Do{} = (False, c)
cascade rel (New c clause m) = ((fst.cascade rel.clause) "dummystr", New c (snd.cascade rel.clause) m)
cascade rel (Rmv c clause m) = ((fst.cascade rel.clause) "dummystr", Rmv c (snd.cascade rel.clause) m)
--cascade rel (Sel c e cl m) = ((fst.cascade rel.cl) "dummystr", Sel c e (snd.cascade rel.cl) m)
cascade rel (CHC ds m) = (any (fst.cascade rel) ds, CHC (map (snd.cascade rel) ds) m)
cascade rel (ALL ds m) = (any (fst.cascade rel) ds, ALL (map (snd.cascade rel) ds) m)
cascade _ (Nop m) = (False, Nop m)
cascade _ (Blk m) = (False, Blk m)
cascade _ (Let _ _ _) = fatal 611 "Deze constructor is niet gedefinieerd" -- HJO, 20131205:Toegevoegd om warning te verwijderen
cascade _ (Ref _) = fatal 612 "Deze constructor is niet gedefinieerd" -- HJO, 20131205:Toegevoegd om warning te verwijderen
cascade _ (GCH{}) = fatal 655 "Deze constructor is niet gedefinieerd" -- SJO, 20140428:Toegevoegd om warning te verwijderen
-}
| DanielSchiavini/ampersand | src/Database/Design/Ampersand/FSpec/ToFSpec/Calc.hs | gpl-3.0 | 56,338 | 0 | 33 | 20,488 | 13,447 | 7,033 | 6,414 | 531 | 37 |
-- -*-haskell-*-
-- Vision (for the Voice): an XMMS2 client.
--
-- Author: Oleg Belozeorov
-- Created: 28 Jun. 2010
--
-- Copyright (C) 2010 Oleg Belozeorov
--
-- This program is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public License as
-- published by the Free Software Foundation; either version 3 of
-- the License, or (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
--
{-# LANGUAGE Rank2Types #-}
module Location.View
( withView
, locationView
, locationSel
, locationEntry
, locationComp
) where
import Control.Applicative
import Control.Monad
import Control.Monad.Trans
import Data.List
import Data.Char
import Data.Maybe
import Graphics.UI.Gtk
import Builder
import Environment
import Location.Model
import Location.PathComp
data View
= View { _view :: TreeView
, _sel :: TreeSelection
, _entry :: Entry
, _comp :: PathComp
}
locationView = _view ?_Location_View
locationSel = _sel ?_Location_View
locationEntry = _entry ?_Location_View
locationComp = _comp ?_Location_View
newtype Wrap a = Wrap { unWrap :: (?_Location_View :: View) => a }
withView = withView' . Wrap
withView' w = do
v <- mkView
let ?_Location_View = v
treeViewSetModel locationView sortModel
treeSelectionSetMode locationSel SelectionMultiple
column <- treeViewColumnNew
treeViewAppendColumn locationView column
treeViewColumnSetTitle column "Name"
treeViewColumnSetSortOrder column =<< getSortOrder
treeViewColumnSetSortIndicator column True
treeViewColumnSetClickable column True
column `onColClicked` do
order <- treeViewColumnGetSortOrder column
let order' = case order of
SortAscending -> SortDescending
SortDescending -> SortAscending
treeViewColumnSetSortOrder column order'
setSortOrder order'
cell <- cellRendererPixbufNew
treeViewColumnPackStart column cell False
cellLayoutSetAttributeFunc column cell sortModel $ \iter -> do
item <- itemByIter iter
cell `set` [ cellPixbufStockId :=
if iIsDir item
then stockDirectory
else stockFile ]
cell <- cellRendererTextNew
treeViewColumnPackStart column cell True
cellLayoutSetAttributeFunc column cell sortModel $ \iter -> do
item <- itemByIter iter
cell `set` [ cellText := iName item ]
treeViewSetEnableSearch locationView True
treeViewSetSearchEqualFunc locationView $ Just $ \str iter -> do
item <- itemByIter iter
return $ isInfixOf (map toLower str) (map toLower $ iName item)
entrySetCompletion locationEntry $ pathComp locationComp
locationEntry `onEditableChanged` do
url <- entryGetText locationEntry
updatePathComp locationComp url
locationEntry `on` keyPressEvent $ tryEvent $ do
[] <- eventModifier
"Tab" <- eventKeyName
liftIO $ do
(url, modify, ofs) <- makeURL <$> entryGetText locationEntry
when modify $ do
pos <- editableGetPosition locationEntry
entrySetText locationEntry url
editableSetPosition locationEntry $ pos + ofs
updatePathComp locationComp url
entryCompletionInsertPrefix $ pathComp locationComp
entryCompletionComplete $ pathComp locationComp
unWrap w
mkView = do
view <- getObject castToTreeView "location-view"
sel <- treeViewGetSelection view
entry <- getObject castToEntry "location-entry"
comp <- makePathComp
return View { _view = view
, _sel = sel
, _entry = entry
, _comp = comp
}
makeURL url
| "://" `isInfixOf` url =
(url, False, 0)
| "~/" `isPrefixOf` url && isJust homeDir =
let pfx = "file://" ++ fromJust homeDir
len = length pfx + length url - 1
in (pfx ++ tail url, True, len)
| otherwise =
("file://" ++ url, True, 7)
| upwawet/vision | src/Location/View.hs | gpl-3.0 | 4,131 | 0 | 18 | 961 | 932 | 466 | 466 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AndroidEnterprise.Users.SetAvailableProductSet
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Modifies the set of products a user is entitled to access.
--
-- /See:/ <https://developers.google.com/android/work/play/emm-api Google Play EMM API Reference> for @androidenterprise.users.setAvailableProductSet@.
module Network.Google.Resource.AndroidEnterprise.Users.SetAvailableProductSet
(
-- * REST Resource
UsersSetAvailableProductSetResource
-- * Creating a Request
, usersSetAvailableProductSet
, UsersSetAvailableProductSet
-- * Request Lenses
, usapsEnterpriseId
, usapsPayload
, usapsUserId
) where
import Network.Google.AndroidEnterprise.Types
import Network.Google.Prelude
-- | A resource alias for @androidenterprise.users.setAvailableProductSet@ method which the
-- 'UsersSetAvailableProductSet' request conforms to.
type UsersSetAvailableProductSetResource =
"androidenterprise" :>
"v1" :>
"enterprises" :>
Capture "enterpriseId" Text :>
"users" :>
Capture "userId" Text :>
"availableProductSet" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] ProductSet :> Put '[JSON] ProductSet
-- | Modifies the set of products a user is entitled to access.
--
-- /See:/ 'usersSetAvailableProductSet' smart constructor.
data UsersSetAvailableProductSet = UsersSetAvailableProductSet'
{ _usapsEnterpriseId :: !Text
, _usapsPayload :: !ProductSet
, _usapsUserId :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'UsersSetAvailableProductSet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'usapsEnterpriseId'
--
-- * 'usapsPayload'
--
-- * 'usapsUserId'
usersSetAvailableProductSet
:: Text -- ^ 'usapsEnterpriseId'
-> ProductSet -- ^ 'usapsPayload'
-> Text -- ^ 'usapsUserId'
-> UsersSetAvailableProductSet
usersSetAvailableProductSet pUsapsEnterpriseId_ pUsapsPayload_ pUsapsUserId_ =
UsersSetAvailableProductSet'
{ _usapsEnterpriseId = pUsapsEnterpriseId_
, _usapsPayload = pUsapsPayload_
, _usapsUserId = pUsapsUserId_
}
-- | The ID of the enterprise.
usapsEnterpriseId :: Lens' UsersSetAvailableProductSet Text
usapsEnterpriseId
= lens _usapsEnterpriseId
(\ s a -> s{_usapsEnterpriseId = a})
-- | Multipart request metadata.
usapsPayload :: Lens' UsersSetAvailableProductSet ProductSet
usapsPayload
= lens _usapsPayload (\ s a -> s{_usapsPayload = a})
-- | The ID of the user.
usapsUserId :: Lens' UsersSetAvailableProductSet Text
usapsUserId
= lens _usapsUserId (\ s a -> s{_usapsUserId = a})
instance GoogleRequest UsersSetAvailableProductSet
where
type Rs UsersSetAvailableProductSet = ProductSet
type Scopes UsersSetAvailableProductSet =
'["https://www.googleapis.com/auth/androidenterprise"]
requestClient UsersSetAvailableProductSet'{..}
= go _usapsEnterpriseId _usapsUserId (Just AltJSON)
_usapsPayload
androidEnterpriseService
where go
= buildClient
(Proxy :: Proxy UsersSetAvailableProductSetResource)
mempty
| rueshyna/gogol | gogol-android-enterprise/gen/Network/Google/Resource/AndroidEnterprise/Users/SetAvailableProductSet.hs | mpl-2.0 | 4,053 | 0 | 16 | 885 | 465 | 277 | 188 | 75 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.PubSub.Projects.Topics.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes the topic with the given name. Returns \`NOT_FOUND\` if the
-- topic does not exist. After a topic is deleted, a new topic may be
-- created with the same name; this is an entirely new topic with none of
-- the old configuration or subscriptions. Existing subscriptions to this
-- topic are not deleted, but their \`topic\` field is set to
-- \`_deleted-topic_\`.
--
-- /See:/ <https://cloud.google.com/pubsub/docs Google Cloud Pub/Sub API Reference> for @pubsub.projects.topics.delete@.
module Network.Google.Resource.PubSub.Projects.Topics.Delete
(
-- * REST Resource
ProjectsTopicsDeleteResource
-- * Creating a Request
, projectsTopicsDelete
, ProjectsTopicsDelete
-- * Request Lenses
, ptdXgafv
, ptdUploadProtocol
, ptdPp
, ptdAccessToken
, ptdUploadType
, ptdTopic
, ptdBearerToken
, ptdCallback
) where
import Network.Google.Prelude
import Network.Google.PubSub.Types
-- | A resource alias for @pubsub.projects.topics.delete@ method which the
-- 'ProjectsTopicsDelete' request conforms to.
type ProjectsTopicsDeleteResource =
"v1" :>
Capture "topic" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "pp" Bool :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "bearer_token" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] Empty
-- | Deletes the topic with the given name. Returns \`NOT_FOUND\` if the
-- topic does not exist. After a topic is deleted, a new topic may be
-- created with the same name; this is an entirely new topic with none of
-- the old configuration or subscriptions. Existing subscriptions to this
-- topic are not deleted, but their \`topic\` field is set to
-- \`_deleted-topic_\`.
--
-- /See:/ 'projectsTopicsDelete' smart constructor.
data ProjectsTopicsDelete = ProjectsTopicsDelete'
{ _ptdXgafv :: !(Maybe Xgafv)
, _ptdUploadProtocol :: !(Maybe Text)
, _ptdPp :: !Bool
, _ptdAccessToken :: !(Maybe Text)
, _ptdUploadType :: !(Maybe Text)
, _ptdTopic :: !Text
, _ptdBearerToken :: !(Maybe Text)
, _ptdCallback :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ProjectsTopicsDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ptdXgafv'
--
-- * 'ptdUploadProtocol'
--
-- * 'ptdPp'
--
-- * 'ptdAccessToken'
--
-- * 'ptdUploadType'
--
-- * 'ptdTopic'
--
-- * 'ptdBearerToken'
--
-- * 'ptdCallback'
projectsTopicsDelete
:: Text -- ^ 'ptdTopic'
-> ProjectsTopicsDelete
projectsTopicsDelete pPtdTopic_ =
ProjectsTopicsDelete'
{ _ptdXgafv = Nothing
, _ptdUploadProtocol = Nothing
, _ptdPp = True
, _ptdAccessToken = Nothing
, _ptdUploadType = Nothing
, _ptdTopic = pPtdTopic_
, _ptdBearerToken = Nothing
, _ptdCallback = Nothing
}
-- | V1 error format.
ptdXgafv :: Lens' ProjectsTopicsDelete (Maybe Xgafv)
ptdXgafv = lens _ptdXgafv (\ s a -> s{_ptdXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
ptdUploadProtocol :: Lens' ProjectsTopicsDelete (Maybe Text)
ptdUploadProtocol
= lens _ptdUploadProtocol
(\ s a -> s{_ptdUploadProtocol = a})
-- | Pretty-print response.
ptdPp :: Lens' ProjectsTopicsDelete Bool
ptdPp = lens _ptdPp (\ s a -> s{_ptdPp = a})
-- | OAuth access token.
ptdAccessToken :: Lens' ProjectsTopicsDelete (Maybe Text)
ptdAccessToken
= lens _ptdAccessToken
(\ s a -> s{_ptdAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
ptdUploadType :: Lens' ProjectsTopicsDelete (Maybe Text)
ptdUploadType
= lens _ptdUploadType
(\ s a -> s{_ptdUploadType = a})
-- | Name of the topic to delete. Format is
-- \`projects\/{project}\/topics\/{topic}\`.
ptdTopic :: Lens' ProjectsTopicsDelete Text
ptdTopic = lens _ptdTopic (\ s a -> s{_ptdTopic = a})
-- | OAuth bearer token.
ptdBearerToken :: Lens' ProjectsTopicsDelete (Maybe Text)
ptdBearerToken
= lens _ptdBearerToken
(\ s a -> s{_ptdBearerToken = a})
-- | JSONP
ptdCallback :: Lens' ProjectsTopicsDelete (Maybe Text)
ptdCallback
= lens _ptdCallback (\ s a -> s{_ptdCallback = a})
instance GoogleRequest ProjectsTopicsDelete where
type Rs ProjectsTopicsDelete = Empty
type Scopes ProjectsTopicsDelete =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/pubsub"]
requestClient ProjectsTopicsDelete'{..}
= go _ptdTopic _ptdXgafv _ptdUploadProtocol
(Just _ptdPp)
_ptdAccessToken
_ptdUploadType
_ptdBearerToken
_ptdCallback
(Just AltJSON)
pubSubService
where go
= buildClient
(Proxy :: Proxy ProjectsTopicsDeleteResource)
mempty
| rueshyna/gogol | gogol-pubsub/gen/Network/Google/Resource/PubSub/Projects/Topics/Delete.hs | mpl-2.0 | 5,955 | 0 | 17 | 1,409 | 863 | 506 | 357 | 120 | 1 |
{- |
Module : LoadCallbacks
Description : Loads callbacks from file (needed due to TH stage restriction).
License : Apache
Stability : experimental
Portability : unportable
-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
module LoadCallbacks where
import Bio.Motions.Callback.Parser.TH
[callbacksFile|config/callbacks|]
| Motions/motions | LoadCallbacks.hs | apache-2.0 | 652 | 0 | 4 | 87 | 30 | 25 | 5 | 13 | 0 |
module KthDifferences.A327460Spec (main, spec) where
import Test.Hspec
import KthDifferences.A327460 (a327460, a327460_list)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "A327460" $ do
it "correctly computes the first 10 elements" $
map a327460 [1..10] `shouldBe` [1, 3, 9, 5, 12, 10, 23, 8, 22, 17]
it "knows that the 56th term differs from A327762" $
a327460 56 `shouldBe` 101
| peterokagey/haskellOEIS | test/KthDifferences/A327460Spec.hs | apache-2.0 | 410 | 0 | 11 | 79 | 142 | 80 | 62 | 11 | 1 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QFont.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:20
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Gui.QFont (
QqFont(..)
,QqFont_nf(..)
,qFontCacheStatistics
,qFontCleanup
,defaultFamily
,fromString
,qFontInitialize
,qFontInsertSubstitution
,qFontInsertSubstitutions
,kerning
,lastResortFamily
,lastResortFont
,rawName
,qFontRemoveSubstitution
,setBold
,setFamily
,setFixedPitch
,setItalic
,setKerning
,setOverline
,setPixelSize
,setPointSize
,setPointSizeF
,setRawMode
,setRawName
,setStretch
,setStrikeOut
,QsetStyleHint(..)
,setStyleStrategy
,setUnderline
,setWeight
,stretch
,styleStrategy
,qFontSubstitute
,qFontSubstitutes
,qFontSubstitutions
,qFont_delete
)
where
import Foreign.C.Types
import Qth.ClassTypes.Core
import Qtc.Enums.Base
import Qtc.Enums.Gui.QFont
import Qtc.Classes.Base
import Qtc.Classes.Qccs
import Qtc.Classes.Core
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui
import Qtc.ClassTypes.Gui
class QqFont x1 where
qFont :: x1 -> IO (QFont ())
instance QqFont (()) where
qFont ()
= withQFontResult $
qtc_QFont
foreign import ccall "qtc_QFont" qtc_QFont :: IO (Ptr (TQFont ()))
instance QqFont ((QFont t1)) where
qFont (x1)
= withQFontResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QFont1 cobj_x1
foreign import ccall "qtc_QFont1" qtc_QFont1 :: Ptr (TQFont t1) -> IO (Ptr (TQFont ()))
instance QqFont ((String)) where
qFont (x1)
= withQFontResult $
withCWString x1 $ \cstr_x1 ->
qtc_QFont2 cstr_x1
foreign import ccall "qtc_QFont2" qtc_QFont2 :: CWString -> IO (Ptr (TQFont ()))
instance QqFont ((String, Int)) where
qFont (x1, x2)
= withQFontResult $
withCWString x1 $ \cstr_x1 ->
qtc_QFont3 cstr_x1 (toCInt x2)
foreign import ccall "qtc_QFont3" qtc_QFont3 :: CWString -> CInt -> IO (Ptr (TQFont ()))
instance QqFont ((QFont t1, QPaintDevice t2)) where
qFont (x1, x2)
= withQFontResult $
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QFont4 cobj_x1 cobj_x2
foreign import ccall "qtc_QFont4" qtc_QFont4 :: Ptr (TQFont t1) -> Ptr (TQPaintDevice t2) -> IO (Ptr (TQFont ()))
instance QqFont ((QFont t1, QWidget t2)) where
qFont (x1, x2)
= withQFontResult $
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QFont4_widget cobj_x1 cobj_x2
foreign import ccall "qtc_QFont4_widget" qtc_QFont4_widget :: Ptr (TQFont t1) -> Ptr (TQWidget t2) -> IO (Ptr (TQFont ()))
instance QqFont ((String, Int, Int)) where
qFont (x1, x2, x3)
= withQFontResult $
withCWString x1 $ \cstr_x1 ->
qtc_QFont5 cstr_x1 (toCInt x2) (toCInt x3)
foreign import ccall "qtc_QFont5" qtc_QFont5 :: CWString -> CInt -> CInt -> IO (Ptr (TQFont ()))
instance QqFont ((String, Int, Int, Bool)) where
qFont (x1, x2, x3, x4)
= withQFontResult $
withCWString x1 $ \cstr_x1 ->
qtc_QFont6 cstr_x1 (toCInt x2) (toCInt x3) (toCBool x4)
foreign import ccall "qtc_QFont6" qtc_QFont6 :: CWString -> CInt -> CInt -> CBool -> IO (Ptr (TQFont ()))
class QqFont_nf x1 where
qFont_nf :: x1 -> IO (QFont ())
instance QqFont_nf (()) where
qFont_nf ()
= withObjectRefResult $
qtc_QFont
instance QqFont_nf ((QFont t1)) where
qFont_nf (x1)
= withObjectRefResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QFont1 cobj_x1
instance QqFont_nf ((String)) where
qFont_nf (x1)
= withObjectRefResult $
withCWString x1 $ \cstr_x1 ->
qtc_QFont2 cstr_x1
instance QqFont_nf ((String, Int)) where
qFont_nf (x1, x2)
= withObjectRefResult $
withCWString x1 $ \cstr_x1 ->
qtc_QFont3 cstr_x1 (toCInt x2)
instance QqFont_nf ((QFont t1, QPaintDevice t2)) where
qFont_nf (x1, x2)
= withObjectRefResult $
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QFont4 cobj_x1 cobj_x2
instance QqFont_nf ((QFont t1, QWidget t2)) where
qFont_nf (x1, x2)
= withObjectRefResult $
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QFont4_widget cobj_x1 cobj_x2
instance QqFont_nf ((String, Int, Int)) where
qFont_nf (x1, x2, x3)
= withObjectRefResult $
withCWString x1 $ \cstr_x1 ->
qtc_QFont5 cstr_x1 (toCInt x2) (toCInt x3)
instance QqFont_nf ((String, Int, Int, Bool)) where
qFont_nf (x1, x2, x3, x4)
= withObjectRefResult $
withCWString x1 $ \cstr_x1 ->
qtc_QFont6 cstr_x1 (toCInt x2) (toCInt x3) (toCBool x4)
instance Qbold (QFont a) (()) where
bold x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_bold cobj_x0
foreign import ccall "qtc_QFont_bold" qtc_QFont_bold :: Ptr (TQFont a) -> IO CBool
qFontCacheStatistics :: (()) -> IO ()
qFontCacheStatistics ()
= qtc_QFont_cacheStatistics
foreign import ccall "qtc_QFont_cacheStatistics" qtc_QFont_cacheStatistics :: IO ()
qFontCleanup :: (()) -> IO ()
qFontCleanup ()
= qtc_QFont_cleanup
foreign import ccall "qtc_QFont_cleanup" qtc_QFont_cleanup :: IO ()
defaultFamily :: QFont a -> (()) -> IO (String)
defaultFamily x0 ()
= withStringResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_defaultFamily cobj_x0
foreign import ccall "qtc_QFont_defaultFamily" qtc_QFont_defaultFamily :: Ptr (TQFont a) -> IO (Ptr (TQString ()))
instance QexactMatch (QFont a) (()) where
exactMatch x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_exactMatch cobj_x0
foreign import ccall "qtc_QFont_exactMatch" qtc_QFont_exactMatch :: Ptr (TQFont a) -> IO CBool
instance Qfamily (QFont a) (()) where
family x0 ()
= withStringResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_family cobj_x0
foreign import ccall "qtc_QFont_family" qtc_QFont_family :: Ptr (TQFont a) -> IO (Ptr (TQString ()))
instance QfixedPitch (QFont a) (()) where
fixedPitch x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_fixedPitch cobj_x0
foreign import ccall "qtc_QFont_fixedPitch" qtc_QFont_fixedPitch :: Ptr (TQFont a) -> IO CBool
fromString :: QFont a -> ((String)) -> IO (Bool)
fromString x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QFont_fromString cobj_x0 cstr_x1
foreign import ccall "qtc_QFont_fromString" qtc_QFont_fromString :: Ptr (TQFont a) -> CWString -> IO CBool
qFontInitialize :: (()) -> IO ()
qFontInitialize ()
= qtc_QFont_initialize
foreign import ccall "qtc_QFont_initialize" qtc_QFont_initialize :: IO ()
qFontInsertSubstitution :: ((String, String)) -> IO ()
qFontInsertSubstitution (x1, x2)
= withCWString x1 $ \cstr_x1 ->
withCWString x2 $ \cstr_x2 ->
qtc_QFont_insertSubstitution cstr_x1 cstr_x2
foreign import ccall "qtc_QFont_insertSubstitution" qtc_QFont_insertSubstitution :: CWString -> CWString -> IO ()
qFontInsertSubstitutions :: ((String, [String])) -> IO ()
qFontInsertSubstitutions (x1, x2)
= withCWString x1 $ \cstr_x1 ->
withQListString x2 $ \cqlistlen_x2 cqliststr_x2 ->
qtc_QFont_insertSubstitutions cstr_x1 cqlistlen_x2 cqliststr_x2
foreign import ccall "qtc_QFont_insertSubstitutions" qtc_QFont_insertSubstitutions :: CWString -> CInt -> Ptr (Ptr CWchar) -> IO ()
instance QisCopyOf (QFont a) ((QFont t1)) where
isCopyOf x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QFont_isCopyOf cobj_x0 cobj_x1
foreign import ccall "qtc_QFont_isCopyOf" qtc_QFont_isCopyOf :: Ptr (TQFont a) -> Ptr (TQFont t1) -> IO CBool
instance Qitalic (QFont a) (()) where
italic x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_italic cobj_x0
foreign import ccall "qtc_QFont_italic" qtc_QFont_italic :: Ptr (TQFont a) -> IO CBool
kerning :: QFont a -> (()) -> IO (Bool)
kerning x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_kerning cobj_x0
foreign import ccall "qtc_QFont_kerning" qtc_QFont_kerning :: Ptr (TQFont a) -> IO CBool
instance Qkey (QFont a) (()) (IO (String)) where
key x0 ()
= withStringResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_key cobj_x0
foreign import ccall "qtc_QFont_key" qtc_QFont_key :: Ptr (TQFont a) -> IO (Ptr (TQString ()))
lastResortFamily :: QFont a -> (()) -> IO (String)
lastResortFamily x0 ()
= withStringResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_lastResortFamily cobj_x0
foreign import ccall "qtc_QFont_lastResortFamily" qtc_QFont_lastResortFamily :: Ptr (TQFont a) -> IO (Ptr (TQString ()))
lastResortFont :: QFont a -> (()) -> IO (String)
lastResortFont x0 ()
= withStringResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_lastResortFont cobj_x0
foreign import ccall "qtc_QFont_lastResortFont" qtc_QFont_lastResortFont :: Ptr (TQFont a) -> IO (Ptr (TQString ()))
instance Qoverline (QFont a) (()) where
overline x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_overline cobj_x0
foreign import ccall "qtc_QFont_overline" qtc_QFont_overline :: Ptr (TQFont a) -> IO CBool
instance QpixelSize (QFont a) (()) where
pixelSize x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_pixelSize cobj_x0
foreign import ccall "qtc_QFont_pixelSize" qtc_QFont_pixelSize :: Ptr (TQFont a) -> IO CInt
instance QpointSize (QFont a) (()) where
pointSize x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_pointSize cobj_x0
foreign import ccall "qtc_QFont_pointSize" qtc_QFont_pointSize :: Ptr (TQFont a) -> IO CInt
instance QpointSizeF (QFont a) (()) where
pointSizeF x0 ()
= withDoubleResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_pointSizeF cobj_x0
foreign import ccall "qtc_QFont_pointSizeF" qtc_QFont_pointSizeF :: Ptr (TQFont a) -> IO CDouble
instance QrawMode (QFont a) (()) where
rawMode x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_rawMode cobj_x0
foreign import ccall "qtc_QFont_rawMode" qtc_QFont_rawMode :: Ptr (TQFont a) -> IO CBool
rawName :: QFont a -> (()) -> IO (String)
rawName x0 ()
= withStringResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_rawName cobj_x0
foreign import ccall "qtc_QFont_rawName" qtc_QFont_rawName :: Ptr (TQFont a) -> IO (Ptr (TQString ()))
qFontRemoveSubstitution :: ((String)) -> IO ()
qFontRemoveSubstitution (x1)
= withCWString x1 $ \cstr_x1 ->
qtc_QFont_removeSubstitution cstr_x1
foreign import ccall "qtc_QFont_removeSubstitution" qtc_QFont_removeSubstitution :: CWString -> IO ()
instance Qresolve (QFont a) ((Int)) (IO ()) where
resolve x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_resolve1 cobj_x0 (toCUInt x1)
foreign import ccall "qtc_QFont_resolve1" qtc_QFont_resolve1 :: Ptr (TQFont a) -> CUInt -> IO ()
instance Qresolve (QFont a) (()) (IO (Int)) where
resolve x0 ()
= withUnsignedIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_resolve cobj_x0
foreign import ccall "qtc_QFont_resolve" qtc_QFont_resolve :: Ptr (TQFont a) -> IO CUInt
instance Qresolve (QFont a) ((QFont t1)) (IO (QFont ())) where
resolve x0 (x1)
= withQFontResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QFont_resolve2 cobj_x0 cobj_x1
foreign import ccall "qtc_QFont_resolve2" qtc_QFont_resolve2 :: Ptr (TQFont a) -> Ptr (TQFont t1) -> IO (Ptr (TQFont ()))
setBold :: QFont a -> ((Bool)) -> IO ()
setBold x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_setBold cobj_x0 (toCBool x1)
foreign import ccall "qtc_QFont_setBold" qtc_QFont_setBold :: Ptr (TQFont a) -> CBool -> IO ()
setFamily :: QFont a -> ((String)) -> IO ()
setFamily x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QFont_setFamily cobj_x0 cstr_x1
foreign import ccall "qtc_QFont_setFamily" qtc_QFont_setFamily :: Ptr (TQFont a) -> CWString -> IO ()
setFixedPitch :: QFont a -> ((Bool)) -> IO ()
setFixedPitch x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_setFixedPitch cobj_x0 (toCBool x1)
foreign import ccall "qtc_QFont_setFixedPitch" qtc_QFont_setFixedPitch :: Ptr (TQFont a) -> CBool -> IO ()
setItalic :: QFont a -> ((Bool)) -> IO ()
setItalic x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_setItalic cobj_x0 (toCBool x1)
foreign import ccall "qtc_QFont_setItalic" qtc_QFont_setItalic :: Ptr (TQFont a) -> CBool -> IO ()
setKerning :: QFont a -> ((Bool)) -> IO ()
setKerning x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_setKerning cobj_x0 (toCBool x1)
foreign import ccall "qtc_QFont_setKerning" qtc_QFont_setKerning :: Ptr (TQFont a) -> CBool -> IO ()
setOverline :: QFont a -> ((Bool)) -> IO ()
setOverline x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_setOverline cobj_x0 (toCBool x1)
foreign import ccall "qtc_QFont_setOverline" qtc_QFont_setOverline :: Ptr (TQFont a) -> CBool -> IO ()
setPixelSize :: QFont a -> ((Int)) -> IO ()
setPixelSize x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_setPixelSize cobj_x0 (toCInt x1)
foreign import ccall "qtc_QFont_setPixelSize" qtc_QFont_setPixelSize :: Ptr (TQFont a) -> CInt -> IO ()
setPointSize :: QFont a -> ((Int)) -> IO ()
setPointSize x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_setPointSize cobj_x0 (toCInt x1)
foreign import ccall "qtc_QFont_setPointSize" qtc_QFont_setPointSize :: Ptr (TQFont a) -> CInt -> IO ()
setPointSizeF :: QFont a -> ((Double)) -> IO ()
setPointSizeF x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_setPointSizeF cobj_x0 (toCDouble x1)
foreign import ccall "qtc_QFont_setPointSizeF" qtc_QFont_setPointSizeF :: Ptr (TQFont a) -> CDouble -> IO ()
setRawMode :: QFont a -> ((Bool)) -> IO ()
setRawMode x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_setRawMode cobj_x0 (toCBool x1)
foreign import ccall "qtc_QFont_setRawMode" qtc_QFont_setRawMode :: Ptr (TQFont a) -> CBool -> IO ()
setRawName :: QFont a -> ((String)) -> IO ()
setRawName x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QFont_setRawName cobj_x0 cstr_x1
foreign import ccall "qtc_QFont_setRawName" qtc_QFont_setRawName :: Ptr (TQFont a) -> CWString -> IO ()
setStretch :: QFont a -> ((Int)) -> IO ()
setStretch x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_setStretch cobj_x0 (toCInt x1)
foreign import ccall "qtc_QFont_setStretch" qtc_QFont_setStretch :: Ptr (TQFont a) -> CInt -> IO ()
setStrikeOut :: QFont a -> ((Bool)) -> IO ()
setStrikeOut x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_setStrikeOut cobj_x0 (toCBool x1)
foreign import ccall "qtc_QFont_setStrikeOut" qtc_QFont_setStrikeOut :: Ptr (TQFont a) -> CBool -> IO ()
instance QsetStyle (QFont a) ((QFontStyle)) where
setStyle x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_setStyle cobj_x0 (toCLong $ qEnum_toInt x1)
foreign import ccall "qtc_QFont_setStyle" qtc_QFont_setStyle :: Ptr (TQFont a) -> CLong -> IO ()
class QsetStyleHint x1 where
setStyleHint :: QFont a -> x1 -> IO ()
instance QsetStyleHint ((QFontStyleHint)) where
setStyleHint x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_setStyleHint cobj_x0 (toCLong $ qEnum_toInt x1)
foreign import ccall "qtc_QFont_setStyleHint" qtc_QFont_setStyleHint :: Ptr (TQFont a) -> CLong -> IO ()
instance QsetStyleHint ((QFontStyleHint, StyleStrategy)) where
setStyleHint x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_setStyleHint1 cobj_x0 (toCLong $ qEnum_toInt x1) (toCLong $ qEnum_toInt x2)
foreign import ccall "qtc_QFont_setStyleHint1" qtc_QFont_setStyleHint1 :: Ptr (TQFont a) -> CLong -> CLong -> IO ()
setStyleStrategy :: QFont a -> ((StyleStrategy)) -> IO ()
setStyleStrategy x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_setStyleStrategy cobj_x0 (toCLong $ qEnum_toInt x1)
foreign import ccall "qtc_QFont_setStyleStrategy" qtc_QFont_setStyleStrategy :: Ptr (TQFont a) -> CLong -> IO ()
setUnderline :: QFont a -> ((Bool)) -> IO ()
setUnderline x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_setUnderline cobj_x0 (toCBool x1)
foreign import ccall "qtc_QFont_setUnderline" qtc_QFont_setUnderline :: Ptr (TQFont a) -> CBool -> IO ()
setWeight :: QFont a -> ((Int)) -> IO ()
setWeight x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_setWeight cobj_x0 (toCInt x1)
foreign import ccall "qtc_QFont_setWeight" qtc_QFont_setWeight :: Ptr (TQFont a) -> CInt -> IO ()
stretch :: QFont a -> (()) -> IO (Int)
stretch x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_stretch cobj_x0
foreign import ccall "qtc_QFont_stretch" qtc_QFont_stretch :: Ptr (TQFont a) -> IO CInt
instance QstrikeOut (QFont a) (()) where
strikeOut x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_strikeOut cobj_x0
foreign import ccall "qtc_QFont_strikeOut" qtc_QFont_strikeOut :: Ptr (TQFont a) -> IO CBool
instance Qstyle (QFont a) (()) (IO (QFontStyle)) where
style x0 ()
= withQEnumResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_style cobj_x0
foreign import ccall "qtc_QFont_style" qtc_QFont_style :: Ptr (TQFont a) -> IO CLong
instance QstyleHint (QFont a) (()) (IO (QFontStyleHint)) where
styleHint x0 ()
= withQEnumResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_styleHint cobj_x0
foreign import ccall "qtc_QFont_styleHint" qtc_QFont_styleHint :: Ptr (TQFont a) -> IO CLong
styleStrategy :: QFont a -> (()) -> IO (StyleStrategy)
styleStrategy x0 ()
= withQEnumResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_styleStrategy cobj_x0
foreign import ccall "qtc_QFont_styleStrategy" qtc_QFont_styleStrategy :: Ptr (TQFont a) -> IO CLong
qFontSubstitute :: ((String)) -> IO (String)
qFontSubstitute (x1)
= withStringResult $
withCWString x1 $ \cstr_x1 ->
qtc_QFont_substitute cstr_x1
foreign import ccall "qtc_QFont_substitute" qtc_QFont_substitute :: CWString -> IO (Ptr (TQString ()))
qFontSubstitutes :: ((String)) -> IO ([String])
qFontSubstitutes (x1)
= withQListStringResult $ \arr ->
withCWString x1 $ \cstr_x1 ->
qtc_QFont_substitutes cstr_x1 arr
foreign import ccall "qtc_QFont_substitutes" qtc_QFont_substitutes :: CWString -> Ptr (Ptr (TQString ())) -> IO CInt
qFontSubstitutions :: (()) -> IO ([String])
qFontSubstitutions ()
= withQListStringResult $ \arr ->
qtc_QFont_substitutions arr
foreign import ccall "qtc_QFont_substitutions" qtc_QFont_substitutions :: Ptr (Ptr (TQString ())) -> IO CInt
instance QtoString (QFont a) (()) where
toString x0 ()
= withStringResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_toString cobj_x0
foreign import ccall "qtc_QFont_toString" qtc_QFont_toString :: Ptr (TQFont a) -> IO (Ptr (TQString ()))
instance Qunderline (QFont a) (()) where
underline x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_underline cobj_x0
foreign import ccall "qtc_QFont_underline" qtc_QFont_underline :: Ptr (TQFont a) -> IO CBool
instance Qweight (QFont a) (()) where
weight x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_weight cobj_x0
foreign import ccall "qtc_QFont_weight" qtc_QFont_weight :: Ptr (TQFont a) -> IO CInt
qFont_delete :: QFont a -> IO ()
qFont_delete x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QFont_delete cobj_x0
foreign import ccall "qtc_QFont_delete" qtc_QFont_delete :: Ptr (TQFont a) -> IO ()
| keera-studios/hsQt | Qtc/Gui/QFont.hs | bsd-2-clause | 19,563 | 0 | 15 | 3,508 | 6,827 | 3,506 | 3,321 | -1 | -1 |
{-# LANGUAGE DataKinds, EmptyDataDecls, TypeOperators, UndecidableInstances #-}
module HaskHOL.Lib.TypeQuant.Context
( TypeQuantType
, TypeQuantThry
, TypeQuantCtxt
, ctxtTypeQuant
) where
import HaskHOL.Core
import HaskHOL.Lib.Simp
import HaskHOL.Lib.Trivia.Context
-- New Theory Type and Constraint
data TypeQuantThry
type instance TypeQuantThry == TypeQuantThry = 'True
instance CtxtName TypeQuantThry where
ctxtName _ = "TypeQuantCtxt"
type instance PolyTheory TypeQuantType b = TypeQuantCtxt b
type family TypeQuantCtxt a :: Constraint where
TypeQuantCtxt a = (Typeable a, TriviaCtxt a, TypeQuantContext a ~ 'True)
-- Assert Theory Hierarchy
type TypeQuantType = ExtThry TypeQuantThry TriviaType
type family TypeQuantContext a :: Bool where
TypeQuantContext UnsafeThry = 'True
TypeQuantContext BaseThry = 'False
TypeQuantContext (ExtThry a b) = TypeQuantContext b || (a == TypeQuantThry)
ctxtTypeQuant :: TheoryPath TypeQuantType
ctxtTypeQuant = extendTheory ctxtTrivia $(thisModule') $
extendBasicConvs
[("convTYBETA",
([txt| ((\\ 'B. t):(% 'B. C)) [: 'A] |], "HaskHOL.Lib.TypeQuant"))]
| ecaustin/haskhol-deductive | src/HaskHOL/Lib/TypeQuant/Context.hs | bsd-2-clause | 1,165 | 1 | 9 | 204 | 252 | 146 | 106 | -1 | -1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.