code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE TypeOperators, OverloadedStrings, ScopedTypeVariables #-}
module Main where
import Control.Monad
import Control.Exception
import Data.Maybe (isJust, isNothing, fromMaybe)
import Data.List (isInfixOf, intercalate)
import Data.List.Split (splitOn)
import Data.Char (toLower)
import Data.Monoid
import System.IO (hIsTerminalDevice, stdout)
import System.Directory
import System.Environment (lookupEnv)
import Data.Traversable (traverse)
import Network.RTorrent
import Network.RTorrent.Action (pureAction, simpleAction)
import Options.Applicative
import Render
data Opts = Opts {
showFiles :: Bool
, showChunks :: Bool
, nameFilt :: String
, idFilt :: Int -> Bool
, quiet :: Bool
, verbose :: Bool
, forceColor :: Bool
, cmd :: Maybe RCommand
}
data RCommand =
CCD
| CStop
| CStart
| CDelete
| CExec String [String]
| CLoad String
type ChunkInfo = Maybe [Bool] :*: Int
type TInfo = TorrentInfo :*: [FileInfo] :*: ChunkInfo
parseIdList :: String -> ReadM (Int -> Bool)
parseIdList = fmap (\fs i -> any ($i) fs)
. traverse check . splitOn ","
where
readInt :: String -> ReadM Int
readInt str = case reads str of
[(i, "")] -> pure i
_ -> readerError "Trash found in ID"
check = cases . splitOn "-"
where
cases c = case c of
["", a] -> (\e i -> i <= e) <$> readInt a
[a, ""] -> (\s i -> s <= i) <$> readInt a
[a, b] -> (\s e i -> s <= i && i <= e)
<$> readInt a
<*> readInt b
[a] -> (==) <$> readInt a
_ -> readerError "Trash found in ID"
parse :: Parser Opts
parse = Opts
<$> switch (
short 'f'
<> help "Show files")
<*> switch (
short 'u'
<> help "Show chunks")
<*> strOption (
short 't'
<> value ""
<> metavar "torrent"
<> help "Filter by torrent name (checks substrings)")
<*> option (str >>= parseIdList) (
short 'i'
<> value (const True)
<> metavar "ID"
<> help "Filter by id, where -2,3-5,6,7- matches every id")
<*> switch (
short 'q'
<> help "Be quiet, overrides verbose")
<*> switch (
short 'v'
<> help "Be verbose")
<*> switch (
short 'c'
<> long "color"
<> help "Force colors on")
<*> optional (
subparser (
command "load" load
<> command "cd" cd
<> command "exec" exec
<> command "start" start
<> command "stop" stop
<> command "delete" delete
))
where
load = info (helper <*> arg) $
progDesc "Load a new torrent file."
where
arg = CLoad <$> strArgument (metavar "url")
cd = info (helper <*> pure CCD) $
progDesc "Change directory to base directory of a matching torrent."
start = info (helper <*> pure CStart) $
progDesc "Start torrents."
stop = info (helper <*> pure CStop) $
progDesc "Stop torrents."
delete = info (helper <*> pure CDelete) $
progDesc "Delete torrents."
exec = info (helper <*> (CExec <$> prg <*> many args)) $
progDesc "Execute a program on the first file of the last matching torrent."
where
prg = strArgument (metavar "program")
args = strArgument (metavar "args")
checkName :: String -> a :*: String -> Bool
checkName find (_ :*: t) = find `isInfixOf` map toLower t
shFile :: FilePath
shFile = ".rc_sh"
call :: Command a => a -> IO (Either String (Ret a))
call command = do
host <- fromMaybe "localhost" . filterEmpty <$> lookupEnv "RT_HOST"
port <- checkPort =<< fromMaybe "5000" . filterEmpty <$> lookupEnv "RT_PORT"
callRTorrent host port command
where
filterEmpty (Just "") = Nothing
filterEmpty a = a
checkPort s = case reads s of
[(i, _)] -> return i
_ -> throwIO $ ErrorCall "RT_PORT is not an integer"
addPath :: String -> IO ()
addPath url = do
path <- canonicalizePath url
`catch` (\(_ :: IOException) ->
return url)
_ <- call $ loadStartTorrent path
return ()
changeDir :: String -> [(a, TorrentInfo :*: b)] -> IO ()
changeDir shFilePath torrents =
case reverse torrents of
((_, t :*: _) : _) -> do
let dir = torrentDir t
writeFile shFilePath $ "cd '" <> dir <> "'\n"
_ -> return ()
exec :: String -> [String] -> String -> [(a, TorrentInfo :*: [FileInfo] :*: b)] -> IO ()
exec program args shFilePath torrents =
case reverse torrents of
((_, t :*: (file : _) :*: _) : _) -> do
let dir = torrentDir t
writeFile shFilePath $
intercalate " " (program : args)
<> " '" <> dir <> "/"
<> filePath file
<> "'"
_ -> return ()
commandOn :: (TorrentId -> TorrentAction Int) -> [(a, TorrentInfo :*: b)] -> IO ()
commandOn cmd torrents = void . call $
map (\(_, t :*: _) -> cmd (torrentId t)) torrents
filesRequired :: Bool -> Opts -> Bool
filesRequired doShow opts = (doShow && showFiles opts) || check (cmd opts)
where
check (Just (CExec _ _)) = True
check _ = False
getRight :: Either String a -> IO a
getRight (Right a) = return a
getRight (Left e) = throwIO $ ErrorCall e
main :: IO ()
main = do
homeDir <- getHomeDirectory
let shFilePath = homeDir ++ "/" ++ shFile
writeFile shFilePath ""
let parserOpts = info (helper <*> parse)
( fullDesc
<> header "RC - rtorrent cli remote control"
<> footer ("Use environment variables RT_HOST and RT_PORT to "
++ "set host and port, the defaults are localhost and 5000. "
++ "Note that the commands act on all matching torrents.")
)
opts <- execParser parserOpts
colorize <- (forceColor opts ||) <$> hIsTerminalDevice stdout
torrentNames <- getRight =<<
call (allTorrents (getTorrentId <+> getTorrentName))
let beforeFilter = checkName (map toLower $ nameFilt opts)
let afterFilter = idFilt opts . fst
let torrentsToGet = filter afterFilter
. zip [1..]
. filter beforeFilter
$ torrentNames
let doShow = (verbose opts || isNothing (cmd opts)) && (not $ quiet opts)
let getFiles = if filesRequired doShow opts
then getTorrentFiles
else pureAction []
let getChunks = if showChunks opts
then getTorrentChunks <+> getTorrentChunkSize
else pureAction Nothing <+> pureAction 0
let getData = getTorrent
<+> getFiles
<+> getChunks
torrents <- getRight =<< call (
map (\(k, tId :*: _) -> (\d -> (k, d)) <$> getData tId) torrentsToGet)
let renderOpts = RenderOpts {
colorize = colorize
, renderFiles = showFiles opts
, renderChunks = showChunks opts
}
when doShow $
mapM_ (renderTorrent renderOpts)
torrents
case cmd opts of
Nothing -> return ()
Just c ->
case c of
CLoad url -> addPath url
CCD -> changeDir shFilePath torrents
CExec prg args -> exec prg args shFilePath torrents
CStart -> commandOn start torrents
CStop -> commandOn closeStop torrents
CDelete -> commandOn erase torrents
| megantti/rtorrent-cli | Main.hs | mit | 7,653 | 0 | 18 | 2,564 | 2,410 | 1,211 | 1,199 | 201 | 9 |
module ReverseList.ReverseRecursively where
revR :: [Int] -> [Int]
revR [] = []
revR [x] = [x]
revR (x:xs) = revR xs ++ x : [] | cojoj/Codewars | Haskell/Codewars.hsproj/ReverseList/ReverseRecursively.hs | mit | 131 | 0 | 7 | 29 | 75 | 41 | 34 | 5 | 1 |
module Text.Pandoc.CrossRef.Util.Options (Options(..)) where
import Text.Pandoc.Definition
import Text.Pandoc.CrossRef.Util.Template
data Options = Options { cref :: Bool
, chaptersDepth :: Int
, listings :: Bool
, codeBlockCaptions :: Bool
, autoSectionLabels :: Bool
, figPrefix :: Bool -> Int -> [Inline]
, eqnPrefix :: Bool -> Int -> [Inline]
, tblPrefix :: Bool -> Int -> [Inline]
, lstPrefix :: Bool -> Int -> [Inline]
, secPrefix :: Bool -> Int -> [Inline]
, figPrefixTemplate :: Template
, eqnPrefixTemplate :: Template
, tblPrefixTemplate :: Template
, lstPrefixTemplate :: Template
, secPrefixTemplate :: Template
, chapDelim :: [Inline]
, rangeDelim :: [Inline]
, lofTitle :: [Block]
, lotTitle :: [Block]
, lolTitle :: [Block]
, outFormat :: Maybe Format
, figureTemplate :: Template
, subfigureTemplate :: Template
, subfigureChildTemplate :: Template
, ccsTemplate :: Template
, tableTemplate :: Template
, listingTemplate :: Template
, customLabel :: String -> Int -> Maybe String
, ccsDelim :: [Inline]
, ccsLabelSep :: [Inline]
, tableEqns :: Bool
}
| infotroph/pandoc-crossref | lib/Text/Pandoc/CrossRef/Util/Options.hs | gpl-2.0 | 1,810 | 0 | 11 | 906 | 317 | 202 | 115 | 34 | 0 |
{- |
Module : ./GUI/GtkAddSentence.hs
Description : Gtk GUI for adding a sentence
Copyright : (c) C. Maeder DFKI GmbH 2010
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : non-portable (imports existential types)
This module provides a GUI to add a sentence
-}
module GUI.GtkAddSentence where
import Graphics.UI.Gtk
import Graphics.UI.Gtk.Glade
import GUI.GtkUtils
import qualified GUI.Glade.TextField as TextField
import GUI.GraphTypes
import GUI.GraphLogic
import Interfaces.Utils
import Static.DevGraph
import Static.GTheory
import Static.FromXmlUtils (BasicExtResponse (..), extendByBasicSpec)
import Common.GlobalAnnotations
import Control.Monad
import Data.IORef
gtkAddSentence :: GInfo -> Int -> DGraph -> IO ()
gtkAddSentence gi n dg = postGUIAsync $ do
xml <- getGladeXML TextField.get
-- get objects
window <- xmlGetWidget xml castToWindow "TextField"
btnAbort <- xmlGetWidget xml castToButton "abort"
btnAdd <- xmlGetWidget xml castToButton "add"
entry <- xmlGetWidget xml castToEntry "entry"
let lbl = labDG dg n
name = getDGNodeName lbl
windowSetTitle window $ "Add sentence to " ++ name
onClicked btnAbort $ widgetDestroy window
onClicked btnAdd $ do
sen <- entryGetText entry
abort <- anaSentence gi (globalAnnos dg) n lbl sen
when abort $ widgetDestroy window
widgetShow window
errorFeedback :: Bool -> String -> IO Bool
errorFeedback abort msg =
errorDialog "Error" msg >> return abort
anaSentence :: GInfo -> GlobalAnnos -> Int -> DGNodeLab -> String -> IO Bool
anaSentence gi ga n lbl sen = case extendByBasicSpec ga sen $ dgn_theory lbl of
(Success gTh num _ sameSig, str)
| not sameSig -> errorFeedback False $ "signature must not change\n" ++ str
| num < 1 -> errorFeedback False "no sentence recognized"
| num > 1 -> errorFeedback False $ "multiple sentences recognized\n" ++ str
| otherwise -> do
addSentence gi n lbl gTh
return True
(Failure b, str) -> errorFeedback b str
addSentence :: GInfo -> Int -> DGNodeLab -> G_theory -> IO ()
addSentence gi n lbl th = do
let ln = libName gi
iSt = intState gi
ost <- readIORef iSt
let (ost', hist) = updateNodeProof ln ost (n, lbl) th
writeIORef iSt ost'
runAndLock gi $ updateGraph gi hist
| gnn/Hets | GUI/GtkAddSentence.hs | gpl-2.0 | 2,383 | 0 | 14 | 495 | 662 | 319 | 343 | 51 | 2 |
---------------------------------------------------------------------
-- File: Parse.hs
-- Author: Drahoslav Zan
-- Date: Jun 01 2012
-- Project: Symbolic Expression Formating in ASCII (SEFA)
---------------------------------------------------------------------
-- Copyright (C) 2012 Drahoslav Zan
--
-- This file is part of SEFA.
--
-- SEFA is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- SEFA is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with SEFA. If not, see <http:--www.gnu.org/licenses/>.
---------------------------------------------------------------------
-- vim: set nowrap sw=2 ts=2
module Parse
( parseExp
) where
import Expression
import Control.Monad
import Text.ParserCombinators.Parsec
import Text.ParserCombinators.Parsec.Expr
import Text.ParserCombinators.Parsec.Language
import qualified Text.ParserCombinators.Parsec.Token as Token
lexer = Token.makeTokenParser languageDef
languageDef = emptyDef { Token.reservedOpNames = ["+", "-", "*", "/", "^"] }
identifier = Token.identifier lexer
reservedOp = Token.reservedOp lexer
parens = Token.parens lexer
integer = Token.integer lexer
whiteSpace = Token.whiteSpace lexer
expression :: Parser Exp
expression = buildExpressionParser ops term
where
ops =
[ [Prefix (reservedOp "-" >> return Minus) ]
, [Infix (reservedOp "^" >> return Pow) AssocLeft]
, [Infix (reservedOp "*" >> return Mul) AssocLeft]
, [Infix (reservedOp "/" >> return Frac) AssocLeft]
, [Infix (reservedOp "+" >> return Add) AssocLeft]
, [Infix (reservedOp "-" >> return Sub) AssocLeft]
]
term = parens expression
<|> liftM Id identifier
<|> liftM Const integer
expr :: Parser Exp
expr = whiteSpace >> expression
parseExp :: String -> Either String Exp
parseExp str =
case parse expr "" str of
Left e -> Left $ show e
Right r -> Right r
| drahoslavzan/PrettyExpression | Parse.hs | gpl-3.0 | 2,332 | 6 | 12 | 423 | 438 | 242 | 196 | 34 | 2 |
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
module System.UtilsBoxSpec.CoreTypes where
data (f :+: g) e = Inl (f e) | Inr (g e)
infixr 6 :+:
instance (Functor f, Functor g) => Functor (f :+: g) where
fmap f (Inl x) = Inl $ fmap f x
fmap f (Inr x) = Inr $ fmap f x
class (Functor f, Functor g) => (:<:) f g where
inject :: f a -> g a
instance Functor f => f :<: f where
inject = id
instance (Functor f, Functor g) => f :<: (f :+: g) where
inject = Inl
instance {-# OVERLAPPABLE #-} (Functor f, Functor g, Functor h, f :<: g) => f :<: (h :+: g) where
inject = Inr . inject
| changlinli/utilsbox | System/UtilsBoxSpec/CoreTypes.hs | gpl-3.0 | 699 | 4 | 11 | 167 | 272 | 146 | 126 | 18 | 0 |
{-
This file is part of TAE.
TAE is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
TAE is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with TAE. If not, see <http://www.gnu.org/licenses/>.
Copyright (c) 2015 Chad Dettmering, Katie Jurek
Authors:
Chad Dettmering chad.dettmering@gmail.com
Katie Jurek admin@katiejurek.com
-}
module IO where
import Command
import World
import Look
import Room
import Person
import qualified Data.List as L
{-
- Given the player input as a [String], parse the input and execute the command if any.
-}
parse :: [String] -> World -> World
parse ("go":"to":room) w = go (L.intercalate " " room) w
parse ("go":room) w = go (L.intercalate " " room) w
parse ("pick":"up":obj) w = pickup (L.intercalate " " obj) w
parse ("pick":obj) w = pickup (L.intercalate " " obj) w
parse ("look":"at":l) w = look (L.intercalate " " l) w
parse ("look":l) w = look (L.intercalate " " l ) w
parse ("talk":"to":prs) w = talk (L.intercalate " " prs) w
parse ("talk":prs) w = talk (L.intercalate " " prs) w
parse x w = w
| cdettmering/tae | IO.hs | gpl-3.0 | 1,482 | 0 | 8 | 282 | 327 | 171 | 156 | 17 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AppEngine.Apps.Services.Versions.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets the specified Version resource. By default, only a BASIC_VIEW will
-- be returned. Specify the FULL_VIEW parameter to get the full resource.
--
-- /See:/ <https://cloud.google.com/appengine/docs/admin-api/ App Engine Admin API Reference> for @appengine.apps.services.versions.get@.
module Network.Google.Resource.AppEngine.Apps.Services.Versions.Get
(
-- * REST Resource
AppsServicesVersionsGetResource
-- * Creating a Request
, appsServicesVersionsGet
, AppsServicesVersionsGet
-- * Request Lenses
, asvgXgafv
, asvgUploadProtocol
, asvgAccessToken
, asvgUploadType
, asvgVersionsId
, asvgAppsId
, asvgView
, asvgServicesId
, asvgCallback
) where
import Network.Google.AppEngine.Types
import Network.Google.Prelude
-- | A resource alias for @appengine.apps.services.versions.get@ method which the
-- 'AppsServicesVersionsGet' request conforms to.
type AppsServicesVersionsGetResource =
"v1" :>
"apps" :>
Capture "appsId" Text :>
"services" :>
Capture "servicesId" Text :>
"versions" :>
Capture "versionsId" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "view" AppsServicesVersionsGetView :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Version
-- | Gets the specified Version resource. By default, only a BASIC_VIEW will
-- be returned. Specify the FULL_VIEW parameter to get the full resource.
--
-- /See:/ 'appsServicesVersionsGet' smart constructor.
data AppsServicesVersionsGet =
AppsServicesVersionsGet'
{ _asvgXgafv :: !(Maybe Xgafv)
, _asvgUploadProtocol :: !(Maybe Text)
, _asvgAccessToken :: !(Maybe Text)
, _asvgUploadType :: !(Maybe Text)
, _asvgVersionsId :: !Text
, _asvgAppsId :: !Text
, _asvgView :: !(Maybe AppsServicesVersionsGetView)
, _asvgServicesId :: !Text
, _asvgCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AppsServicesVersionsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'asvgXgafv'
--
-- * 'asvgUploadProtocol'
--
-- * 'asvgAccessToken'
--
-- * 'asvgUploadType'
--
-- * 'asvgVersionsId'
--
-- * 'asvgAppsId'
--
-- * 'asvgView'
--
-- * 'asvgServicesId'
--
-- * 'asvgCallback'
appsServicesVersionsGet
:: Text -- ^ 'asvgVersionsId'
-> Text -- ^ 'asvgAppsId'
-> Text -- ^ 'asvgServicesId'
-> AppsServicesVersionsGet
appsServicesVersionsGet pAsvgVersionsId_ pAsvgAppsId_ pAsvgServicesId_ =
AppsServicesVersionsGet'
{ _asvgXgafv = Nothing
, _asvgUploadProtocol = Nothing
, _asvgAccessToken = Nothing
, _asvgUploadType = Nothing
, _asvgVersionsId = pAsvgVersionsId_
, _asvgAppsId = pAsvgAppsId_
, _asvgView = Nothing
, _asvgServicesId = pAsvgServicesId_
, _asvgCallback = Nothing
}
-- | V1 error format.
asvgXgafv :: Lens' AppsServicesVersionsGet (Maybe Xgafv)
asvgXgafv
= lens _asvgXgafv (\ s a -> s{_asvgXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
asvgUploadProtocol :: Lens' AppsServicesVersionsGet (Maybe Text)
asvgUploadProtocol
= lens _asvgUploadProtocol
(\ s a -> s{_asvgUploadProtocol = a})
-- | OAuth access token.
asvgAccessToken :: Lens' AppsServicesVersionsGet (Maybe Text)
asvgAccessToken
= lens _asvgAccessToken
(\ s a -> s{_asvgAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
asvgUploadType :: Lens' AppsServicesVersionsGet (Maybe Text)
asvgUploadType
= lens _asvgUploadType
(\ s a -> s{_asvgUploadType = a})
-- | Part of \`name\`. See documentation of \`appsId\`.
asvgVersionsId :: Lens' AppsServicesVersionsGet Text
asvgVersionsId
= lens _asvgVersionsId
(\ s a -> s{_asvgVersionsId = a})
-- | Part of \`name\`. Name of the resource requested. Example:
-- apps\/myapp\/services\/default\/versions\/v1.
asvgAppsId :: Lens' AppsServicesVersionsGet Text
asvgAppsId
= lens _asvgAppsId (\ s a -> s{_asvgAppsId = a})
-- | Controls the set of fields returned in the Get response.
asvgView :: Lens' AppsServicesVersionsGet (Maybe AppsServicesVersionsGetView)
asvgView = lens _asvgView (\ s a -> s{_asvgView = a})
-- | Part of \`name\`. See documentation of \`appsId\`.
asvgServicesId :: Lens' AppsServicesVersionsGet Text
asvgServicesId
= lens _asvgServicesId
(\ s a -> s{_asvgServicesId = a})
-- | JSONP
asvgCallback :: Lens' AppsServicesVersionsGet (Maybe Text)
asvgCallback
= lens _asvgCallback (\ s a -> s{_asvgCallback = a})
instance GoogleRequest AppsServicesVersionsGet where
type Rs AppsServicesVersionsGet = Version
type Scopes AppsServicesVersionsGet =
'["https://www.googleapis.com/auth/appengine.admin",
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only"]
requestClient AppsServicesVersionsGet'{..}
= go _asvgAppsId _asvgServicesId _asvgVersionsId
_asvgXgafv
_asvgUploadProtocol
_asvgAccessToken
_asvgUploadType
_asvgView
_asvgCallback
(Just AltJSON)
appEngineService
where go
= buildClient
(Proxy :: Proxy AppsServicesVersionsGetResource)
mempty
| brendanhay/gogol | gogol-appengine/gen/Network/Google/Resource/AppEngine/Apps/Services/Versions/Get.hs | mpl-2.0 | 6,532 | 0 | 21 | 1,516 | 947 | 551 | 396 | 140 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Method.OAuth2.TokenInfo
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- /See:/ <https://developers.google.com/identity/protocols/oauth2/ Google OAuth2 API Reference> for @oauth2.tokeninfo@.
module Network.Google.Method.OAuth2.TokenInfo
(
-- * REST Resource
TokenInfoMethod
-- * Creating a Request
, tokenInfo'
, TokenInfo'
-- * Request Lenses
, tiAccessToken
, tiIdToken
) where
import Network.Google.OAuth2.Types
import Network.Google.Prelude
-- | A resource alias for @oauth2.tokeninfo@ method which the
-- 'TokenInfo'' request conforms to.
type TokenInfoMethod =
"oauth2" :>
"v2" :>
"tokeninfo" :>
QueryParam "access_token" Text :>
QueryParam "id_token" Text :>
QueryParam "alt" AltJSON :> Post '[JSON] TokenInfo
--
-- /See:/ 'tokenInfo'' smart constructor.
data TokenInfo' =
TokenInfo''
{ _tiAccessToken :: !(Maybe Text)
, _tiIdToken :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'TokenInfo'' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tiAccessToken'
--
-- * 'tiIdToken'
tokenInfo'
:: TokenInfo'
tokenInfo' = TokenInfo'' {_tiAccessToken = Nothing, _tiIdToken = Nothing}
tiAccessToken :: Lens' TokenInfo' (Maybe Text)
tiAccessToken
= lens _tiAccessToken
(\ s a -> s{_tiAccessToken = a})
tiIdToken :: Lens' TokenInfo' (Maybe Text)
tiIdToken
= lens _tiIdToken (\ s a -> s{_tiIdToken = a})
instance GoogleRequest TokenInfo' where
type Rs TokenInfo' = TokenInfo
type Scopes TokenInfo' = '[]
requestClient TokenInfo''{..}
= go _tiAccessToken _tiIdToken (Just AltJSON)
oAuth2Service
where go
= buildClient (Proxy :: Proxy TokenInfoMethod) mempty
| brendanhay/gogol | gogol-oauth2/gen/Network/Google/Method/OAuth2/TokenInfo.hs | mpl-2.0 | 2,586 | 0 | 13 | 582 | 379 | 224 | 155 | 55 | 1 |
module Handler.Home where
import Handler.Ratings
import Import
import Forms
import Data.CountryCodes
import Data.Double.Conversion.Text
import Yesod.Form.Bootstrap3
getHomeR :: Handler Html
getHomeR = do
mauth <- maybeAuth
whatDo <- case mauth of
Nothing -> return pleaseLoginW
Just userEnt -> do
demoCount <- runDB $
count [Filter UserDemographicsUser (Left $ entityKey userEnt) Eq]
(formW, _) <- generateFormPost demoForm
ratings <- getRatings
return $ do
loggedInW (entityVal userEnt)
case demoCount of
1 -> ratingsBoxW Visible ratings
0 -> demoFormW formW >> ratingsBoxW Hidden ratings
_ -> error
"impossible: more than one UserDemographics for a given user"
defaultLayout $ do
setTitle "Home"
$(widgetFile "homepage")
pleaseLoginW :: Widget
pleaseLoginW = [whamlet|
<p>
You need to <a href=@{AuthR LoginR}>log in via Google to participate.</a>
Don't worry, we'll keep your answers anonymous.|]
loggedInW :: User -> Widget
loggedInW u = [whamlet|
<p>
Great, you're logged in as #{userEmail u}. Not you?
<a href=@{AuthR LogoutR}>Log out.</a>|]
demoFormW :: Widget -> Widget
demoFormW innerForm = [whamlet|
<form id=demoForm>
We need some quick demographic information before we start:
^{innerForm}
<.error-container>|]
explainBoxW :: Widget
explainBoxW = [whamlet|
<#explainBox .container-fluid>
<p>
The Project uses something called "quadratic voting". The idea is to get
more accurate expressions of your opinions by making you prioritize. For
every program you rate, twenty-five points are added to your total rating
budget. Giving something a rating costs as many points as the rating
squared, so rating something one costs one point, rating something
two costs four points and so on. Both positive and negative ratings work.
<p>
Feel free to play with the entries below, then hit the button to dismiss
this box and get started.
<button #explanationButton .btn .btn-md .btn-block .btn-default>Got it</button>|]
data Hidden = Hidden | Visible
hidden :: Hidden -> Bool
hidden Hidden = True
hidden Visible = False
data NeedsExplanation = NeedsExplanation | AlreadyExplained deriving Eq
exampleRatings :: [(Text, Int)]
exampleRatings = [("terri.bl", -6), ("Tolerable Pro 3", 1), ("Shootymans 4", 5)]
ratingsBoxW :: Hidden -> Ratings -> Widget
ratingsBoxW hide Ratings{..} =
let noRatings = null ratingsRatings
ratingsList =
(map addCostAndEditable $ if noRatings then exampleRatings else ratingsRatings)
++
[("", 0, 0, True)] -- blank line for new entries
addCostAndEditable (name, score) = (name, score, score ^ (2 :: Int), False)
ptsSpent = if noRatings
then (sum $ map ((^(2::Int)) . snd) exampleRatings)
else ratingsPtsSpent
totalBudget = if noRatings
then (length exampleRatings * 25)
else ratingsTotalBudget
averageSpent =
toFixed 2 ((fromIntegral ptsSpent :: Double) / fromIntegral (length ratingsList))
in [whamlet|
<#ratingsBox :hidden hide:style="display: none">
$if noRatings
^{explainBoxW}
<table .table .table-bordered #points-table>
<thead>
<tr>
<td>Total budget
<td>Points remaining
<td>Average spent per program
<tbody>
<tr>
<td #total-budget>#{totalBudget}
<td #available-points>#{totalBudget - ptsSpent}
<td #average-points-spent>#{averageSpent}
<table .table #ratings-table>
<thead>
<tr>
<td .rating-btn-col>
<td .program-name-col .program-name>Program name
<td .rating-btn-col>
<td .score-col>Score
<td .cost-col>Cost
<td .rating-btn-col>
<tbody>
$forall (name, score, cost, editable) <- ratingsList
<tr :not editable && noRatings:.example-data>
<td>
<button .btn-minus .btn-score>
<td .program-name>
<input class="program-name" type="text" value="#{name}" :not editable:readonly>
<td>
<button .btn-plus .btn-score>
<td>
<span .score>#{score}
<td>
<span .cost>#{cost}
<td>
<button .btn-score .btn-delete>
|]
countryField :: Field Handler CountryCode
countryField = selectFieldList countryList
demoForm :: Form (UTCTime, Text, CountryCode, Bool)
demoForm = renderBootstrap3 BootstrapBasicForm $ (,,,) <$>
areq yearField (bfs' "Year of birth") Nothing <*>
areq textField (bfs' "Gender") Nothing <*>
areq countryField (bfs' "Country of residence") Nothing <*>
areq bsBoolField (bfs' "Are you a computer programmer?") Nothing <*
bootstrapSubmit ("Submit" :: BootstrapSubmit Text)
postDemoFormR :: Handler ()
postDemoFormR = do
((formData, _), _) <- runFormPost $ demoForm
mauth <- maybeAuth
case formData of
FormSuccess (birthYear, gender, residence, programmer) ->
case mauth of
Nothing -> permissionDenied "Login to add demographic information"
Just uid -> do
res <- runDB $ insertBy $
UserDemographics {userDemographicsUser = entityKey uid
,userDemographicsBirthYear = birthYear
,userDemographicsGender = gender
,userDemographicsResidence = residence
,userDemographicsProgrammer = programmer}
case res of
Left _ -> invalidArgs ["User already has demographics"]
Right _ -> return ()
_ -> invalidArgs []
| enolan/emp-pl-site | Handler/Home.hs | agpl-3.0 | 5,719 | 0 | 20 | 1,535 | 952 | 513 | 439 | -1 | -1 |
showVal :: LispVal -> String
showVal (String contents) = "\"" ++ contents ++ "\""
showVal (Atom name) = name
showVal (Number contents) = show contents
showVal (Bool True) = "#t"
showVal (Bool False) = "#f"
showVal (List contents) = "(" ++ unwordsList contents ++ ")"
showVal (DottedList head tail) = "(" ++ unwordsList head ++ " . " ++ showVal tail ++ ")"
unwordsList :: [LispVal] -> String
unwordsList = unwords . map showVal
instance Show LispVal where show = showVal
| chenchenhz/first_haskell | haskell_parser_tutorial/interpreter.hs | agpl-3.0 | 472 | 0 | 9 | 83 | 191 | 95 | 96 | 11 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE FlexibleContexts #-}
{-# OPTIONS_GHC -Wno-unused-top-binds #-}
-- A number of standard aggregation functions.
module Spark.Core.Internal.Groups(
GroupData,
LogicalGroupData,
-- Typed functions
groupByKey,
mapGroup,
aggKey,
groupAsDS
-- Developer
) where
import qualified Data.Text as T
import qualified Data.Vector as V
import Formatting
import Debug.Trace(trace)
import Spark.Core.Internal.DatasetStructures
import Spark.Core.Internal.ColumnStructures
import Spark.Core.Internal.ColumnFunctions(untypedCol, colType, colOp, iUntypedColData, castTypeCol, dropColReference, genColOp)
import Spark.Core.Internal.DatasetFunctions
import Spark.Core.Internal.LocalDataFunctions()
import Spark.Core.Internal.FunctionsInternals
import Spark.Core.Internal.TypesFunctions(tupleType, structTypeFromFields)
import Spark.Core.Internal.OpStructures
import Spark.Core.Internal.TypesStructures
import Spark.Core.Internal.Utilities
import Spark.Core.Internal.RowStructures(Cell)
import Spark.Core.Try
import Spark.Core.StructuresInternal
import Spark.Core.Internal.CanRename
-- import Spark.Proto.Std(Shuffle(..))
{-| A dataset that has been partitioned according to some given field.
-}
data GroupData key val = GroupData {
-- The dataset of reference for this group
_gdRef :: !UntypedDataset,
-- The columns used to partition the data by keys.
_gdKey :: !GroupColumn,
-- The columns that contain the values.
_gdValue :: !GroupColumn
}
type LogicalGroupData = Try UntypedGroupData
-- A column in a group, that can be used either for key or for values.
-- It is different from the column data, because it does not include
-- broadcast data.
data GroupColumn = GroupColumn {
_gcType :: !DataType,
_gcOp :: !ColOp,
_gcRefName :: !(Maybe FieldName)
} deriving (Eq, Show)
{-| (developper)
A group data type with no typing information.
-}
type UntypedGroupData = GroupData Cell Cell
{-| Performs a logical group of data based on a key.
-}
groupByKey :: (HasCallStack) => Column ref key -> Column ref val -> GroupData key val
groupByKey keys vals = forceRight $ _castGroup (colType keys) (colType vals) =<< _groupByKey (iUntypedColData keys) (iUntypedColData vals)
{-| Transforms the values in a group.
-}
-- This only allows direct transforms, so it is probably valid in all cases.
mapGroup :: GroupData key val -> (forall ref. Column ref val -> Column ref val') -> GroupData key val'
mapGroup g f =
let c = _valueCol g
c' = f (_unsafeCastColData c)
-- Assume for now that there is no broadcast.
-- TODO: deal with broadcast eventually
gVals = forceRight $ _groupCol c'
in g { _gdValue = gVals }
{-| Given a group and an aggregation function, aggregates the data.
Note: not all the reduction functions may be used in this case. The analyzer
will fail if the function is not universal.
-}
-- TODO: it should be a try, this can fail
aggKey :: (HasCallStack) => GroupData key val -> (forall ref. Column ref val -> LocalData val') -> Dataset (key, val')
aggKey gd f = trace "aggKey" $
let ugd = _untypedGroup gd
keyt = traceHint "aggKey: keyt: " $ mapGroupKeys gd colType
valt = traceHint "aggKey: valt: " $ mapGroupValues gd colType
-- We call the function twice: the first one to recover the type info,
-- and the second time to perform the unrolling.
-- TODO we should be able to do it in one pass instead.
fOut = traceHint "aggKey: fOut: " $ f (mapGroupValues gd dropColReference)
valt' = traceHint "aggKey: valt': " $ nodeType fOut
t = traceHint "aggKey: t: " $ tupleType keyt valt'
f' c = untypedLocalData . f <$> castTypeCol valt c
tud = traceHint "aggKey: tud: " $ _aggKey ugd f'
res = castType' t tud
in forceRight res
{-| Returns the collapsed representation of a grouped dataset, discarding group
information.
-}
groupAsDS :: forall key val. GroupData key val -> Dataset (key, val)
groupAsDS g = pack s where
c1 = _unsafeCastColData (_keyCol g) :: Column UnknownReference key
c2 = _unsafeCastColData (_valueCol g) :: Column UnknownReference val
s = struct (c1, c2) :: Column UnknownReference (key, val)
mapGroupKeys :: GroupData key val -> (forall ref. Column ref key -> a) -> a
mapGroupKeys gd f =
f (_unsafeCastColData (_keyCol gd))
mapGroupValues :: GroupData key val -> (forall ref. Column ref val -> a) -> a
mapGroupValues gd f =
f (_unsafeCastColData (_valueCol gd))
-- ******** INSTANCES ***********
instance Show (GroupData key val) where
show gd = T.unpack s where
s = sformat ("GroupData[key="%sh%", val="%sh%"]") (_gdKey gd) (_gdValue gd)
-- ******** PRIVATE METHODS ********
_keyCol :: GroupData key val -> UntypedColumnData
_keyCol gd = ColumnData {
_cOrigin = _gdRef gd,
_cType = _gcType (_gdKey gd),
_cOp = genColOp . _gcOp . _gdKey $ gd,
_cReferingPath = _gcRefName . _gdKey $ gd
}
_valueCol :: GroupData key val -> UntypedColumnData
_valueCol gd = ColumnData {
_cOrigin = _gdRef gd,
_cType = _gcType (_gdValue gd),
_cOp = genColOp . _gcOp . _gdValue $ gd,
_cReferingPath = _gcRefName . _gdValue $ gd
}
-- dt: output type of the aggregation op
_applyAggOp :: (HasCallStack) => DataType -> AggOp -> UntypedGroupData -> UntypedDataset
_applyAggOp dt ao ugd = traceHint ("_applyAggOp dt=" <> show' dt <> " ao=" <> show' ao <> " ugd=" <> show' ugd <> " res=") $
-- Reset the names to make sure there are no collision.
let c1 = untypedCol (_keyCol ugd) @@ T.unpack "_1"
c2 = untypedCol (_valueCol ugd) @@ T.unpack "_2"
s = struct' [c1, c2]
p = pack' s
ds = forceRight p
-- The structure of the result dataframe
keyDt = unSQLType (colType (_keyCol ugd))
st' = structTypeFromFields [(unsafeFieldName "key", keyDt), (unsafeFieldName "agg", dt)]
-- The keys are different, so we know we this operation is legit:
st = forceRight st'
resDt = SQLType . StrictType . Struct $ st
ds2 = emptyDataset (NodeGroupedReduction ao) resDt `parents` [untyped ds]
in ds2
-- TODO: this should be moved to ColumnFunctions
_transformCol :: ColOp -> UntypedColumnData -> UntypedColumnData
-- TODO: at this point, it should be checked for correctness (the fields
-- being extracted should exist)
_transformCol co ucd = ucd { _cOp = genColOp co }
-- Takes a column operation and chain it with another column operation.
_combineColOp :: ColOp -> ColOp -> Try ColOp
_combineColOp _ (ColBroadcast _) = missing "_combineColOp: ColBroadcast"
_combineColOp _ (x @ (ColLit _ _)) = pure x
_combineColOp x (ColFunction fn v _) =
(\x' -> ColFunction fn x' Nothing) <$> sequence (_combineColOp x <$> v)
_combineColOp x (ColExtraction fp) = _extractColOp x (V.toList (unFieldPath fp))
_combineColOp x (ColStruct v) =
ColStruct <$> sequence (f <$> v) where
f (TransformField n val) = TransformField n <$> _combineColOp x val
_extractColOp :: ColOp -> [FieldName] -> Try ColOp
_extractColOp x [] = pure x
_extractColOp (ColStruct s) (fn : t) =
case V.find (\x -> tfName x == fn) s of
Just (TransformField _ co) ->
_extractColOp co t
Nothing ->
tryError $ sformat ("Expected to find field "%sh%" in structure "%sh) fn s
_extractColOp x y =
tryError $ sformat ("Cannot perform extraction "%sh%" on column operation "%sh) y x
_aggKey :: UntypedGroupData -> (UntypedColumnData -> Try UntypedLocalData) -> Try UntypedDataset
_aggKey = error "_aggKey: not implemented"
_unsafeCastColData :: Column ref a -> Column ref' a'
_unsafeCastColData c = c { _cType = _cType c }
{-| Checks that the group can be cast.
-}
_castGroup ::
SQLType key -> SQLType val -> UntypedGroupData -> Try (GroupData key val)
_castGroup (SQLType keyType) (SQLType valType) ugd =
let keyType' = unSQLType . colType . _keyCol $ ugd
valType' = unSQLType . colType . _valueCol $ ugd in
if keyType == keyType'
then if valType == valType'
then
pure ugd { _gdRef = _gdRef ugd }
else
tryError $ sformat ("The value column (of type "%sh%") cannot be cast to type "%sh) valType' valType
else
tryError $ sformat ("The value column (of type "%sh%") cannot be cast to type "%sh) keyType' keyType
_untypedGroup :: GroupData key val -> UntypedGroupData
_untypedGroup gd = gd { _gdRef = _gdRef gd }
_groupByKey :: UntypedColumnData -> UntypedColumnData -> LogicalGroupData
_groupByKey = undefined
_groupCol :: Column ref a -> Try GroupColumn
_groupCol c = do
co <- colOpNoBroadcast (colOp c)
return GroupColumn {
_gcType = unSQLType $ colType c,
_gcOp = co,
_gcRefName = Nothing
}
| tjhunter/karps | haskell/src/Spark/Core/Internal/Groups.hs | apache-2.0 | 8,719 | 1 | 14 | 1,718 | 2,284 | 1,208 | 1,076 | 158 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
-- The communication protocol with the server
module Spark.Core.Internal.Client where
import Data.Text(Text)
import Lens.Family2((^.), (&), (.~))
import Data.Default(def)
import Spark.Core.StructuresInternal
import Spark.Core.Internal.ProtoUtils
import Spark.Core.Internal.RowUtils()
import Spark.Core.Internal.TypesStructures(DataType)
import Spark.Core.Internal.TypesFunctions()
import Spark.Core.Internal.RowStructures(Cell)
import Spark.Core.Internal.BrainFunctions()
import Spark.Core.Internal.BrainStructures(LocalSessionId, ComputeGraph)
import Spark.Core.Try
import qualified Proto.Karps.Proto.Computation as PC
import qualified Proto.Karps.Proto.Interface as PI
{-| The ID of an RDD in Spark.
-}
data RDDId = RDDId {
unRDDId :: !Int
} deriving (Eq, Show, Ord)
data Computation = Computation {
cSessionId :: !LocalSessionId,
cId :: !ComputationID,
cNodes :: !ComputeGraph, -- TODO: check to replace with OperatorNode?
-- Non-empty
cTerminalNodes :: ![NodePath],
-- The node at the top of the computation.
-- Must be part of the terminal nodes.
cCollectingNode :: !NodePath,
-- This redundant information is not serialized.
-- It is used internally to track the resulting nodes.
cTerminalNodeIds :: ![NodeId]
} deriving (Show)
data BatchComputationResult = BatchComputationResult {
bcrTargetLocalPath :: !NodePath,
bcrResults :: ![(NodePath, PossibleNodeStatus)]
} deriving (Show)
data RDDInfo = RDDInfo {
rddiId :: !RDDId,
rddiClassName :: !Text,
rddiRepr :: !Text,
rddiParents :: ![RDDId]
} deriving (Show)
data SparkComputationItemStats = SparkComputationItemStats {
scisRddInfo :: ![RDDInfo]
} deriving (Show)
data PossibleNodeStatus =
NodeQueued
| NodeRunning
| NodeFinishedSuccess !(Maybe NodeComputationSuccess) !(Maybe SparkComputationItemStats)
| NodeFinishedFailure NodeComputationFailure deriving (Show)
data NodeComputationSuccess = NodeComputationSuccess {
-- Because Row requires additional information to be deserialized.
ncsData :: Cell,
-- The data type is also available, but it is not going to be parsed for now.
ncsDataType :: DataType
} deriving (Show)
data NodeComputationFailure = NodeComputationFailure {
ncfMessage :: !Text
} deriving (Show)
instance ToProto PI.CreateComputationRequest Computation where
toProto c = (def :: PI.CreateComputationRequest)
& PI.session .~ toProto (cSessionId c)
& PI.requestedComputation .~ toProto (cId c)
& PI.requestedPaths .~ [toProto (cCollectingNode c)]
& PI.graph .~ toProto (cNodes c)
instance FromProto PC.ComputationResult (NodePath, PossibleNodeStatus) where
fromProto cr = do
np <- extractMaybe' cr PC.maybe'localPath "local_path"
case cr ^. PC.status of
PC.UNUSED -> tryError "FromProto PC.ComputationResult: missing status"
PC.SCHEDULED ->
return (np, NodeQueued)
PC.RUNNING ->
return (np, NodeRunning)
PC.FINISHED_SUCCESS -> do
cwt <- extractMaybe cr PC.maybe'finalResult "final_result"
(c, dt) <- fromProto cwt
let ncs = NodeComputationSuccess c dt
-- TODO: add the spark stats
return (np, NodeFinishedSuccess (Just ncs) Nothing)
PC.FINISHED_FAILURE ->
return (np, NodeFinishedFailure ncf) where
txt = cr ^. PC.finalError
ncf = NodeComputationFailure txt
instance FromProto PC.BatchComputationResult BatchComputationResult where
fromProto bcr = do
np <- extractMaybe' bcr PC.maybe'targetPath "target_path"
l <- sequence $ fromProto <$> (bcr ^. PC.results)
return $ BatchComputationResult np l
| tjhunter/karps | haskell/src/Spark/Core/Internal/Client.hs | apache-2.0 | 3,704 | 0 | 17 | 637 | 870 | 490 | 380 | 117 | 0 |
import System.Environment (getArgs)
import Data.Binary (decodeFile)
import Data.Guesser
import SGD
main = do
[guesserPath, tagset, inPath] <- getArgs
guesser <- decodeFile guesserPath
tagFile 10 guesser tagset inPath
| kawu/crf-guesser | examples/Guess.hs | bsd-2-clause | 231 | 0 | 8 | 41 | 72 | 38 | 34 | 8 | 1 |
{-# LANGUAGE TypeOperators, TypeFamilies, FlexibleContexts, ExplicitForAll, ScopedTypeVariables #-}
module Windows where
import CLaSH.Prelude
import qualified Data.List as L
type Elm = Unsigned 16
type Temp = Signed 32
vrotate xs = tail xs <: head xs
windowsI :: (KnownNat (n + 1), ((m + n) + 1) ~ ((m + 1) + n))
=> SNat (m + 1)
-> Vec (m + n + 1) a
-> Vec (n + 1) (Vec (m + 1) a)
windowsI m xs = withSNat (\n -> windowsV m n xs)
windowsII :: (KnownNat (n + 1), KnownNat (m + 1), ((m + n) + 1) ~ ((m + 1) + n))
=> Vec (m + n + 1) a
-> Vec (n + 1) (Vec (m + 1) a)
windowsII xs = withSNat (\m -> withSNat (\n -> windowsV m n xs))
rotated :: ((m + n) + 1) ~ ((m + 1) + n)
=> SNat (m + 1) -> SNat (n + 1)
-> Vec (m + n + 1) a
-> Vec (n + 1) (Vec ((m + 1) + n) a)
rotated _ n xs = iterate n vrotate xs
windowsV :: ((m + n) + 1) ~ ((m + 1) + n)
=> SNat (m + 1) -> SNat (n + 1)
-> Vec (m + n + 1) a
-> Vec (n + 1) (Vec (m + 1) a)
windowsV m n xs = map (take m) (rotated m n xs)
transpose :: forall r c a . (KnownNat c, KnownNat r) => Vec r (Vec c a) -> Vec c (Vec r a)
transpose m = map (\i -> map (!!i) m) indices
where
indices = iterateI (+ 1) 0 :: Vec c (Index r)
(+>>>) :: KnownNat n => Vec m a -> Vec (n + m) a -> Vec (m + n) a
l +>>> r = l ++ (takeI r)
swarch1d ::(Vec 3 Temp -> Temp) -> Vec 4 Temp -> Vec 2 Temp -> (Vec 4 Temp, Vec 2 Temp)
swarch1d f xs inp = (xs', outp)
where
xs' = inp +>>> xs
outp = map f $ windowsII xs
-- hfk1d :: Vec 3 Temp -> Temp
-- hfk1d (x1 :> x2 :> x3 :> Nil) = x2 + multwc (x1 - 2 * x2 + x3)
-- where
-- multwc a = shiftR (a * 410) 10
hfk1d :: Vec 3 Temp -> Temp
hfk1d xs = x2 + multwc (x1 - 2 * x2 + x3)
where
multwc a = shiftR (a * 410) 10
x1 = xs !! 0
x2 = xs !! 1
x3 = xs !! 2
topEntity = (swarch1d hfk1d) `mealy` (repeat 0)
res :: [Vec 2 Temp]
res = simulate topEntity $ L.repeat (repeat 45)
| christiaanb/clash-compiler | examples/Windows.hs | bsd-2-clause | 1,979 | 0 | 15 | 614 | 1,091 | 572 | 519 | -1 | -1 |
import Lseed.Data
import Lseed.Data.Functions
import Lseed.Grammar.Parse
import Lseed.Constants
import Lseed.Mainloop
import Control.Monad
import Debug.Trace
import System.Environment
import System.Time
import System.Random
import Lseed.Renderer.Cairo
import Data.Maybe
import Graphics.Rendering.Cairo
main = do
args <- getArgs
let name = fromMaybe "Some Plant" $ listToMaybe args
file <- getContents
case parseGrammar name file of
Left _ -> do
let (w,h) = (300,300)
withImageSurface FormatRGB24 w h $ \sur -> do
renderWith sur $ do
setSourceRGB 1 1 1
paint
translate 0 (0.5* fromIntegral h)
setFontSize (0.1* fromIntegral h)
setSourceRGB 0 0 0
showText "Syntax Error"
surfaceWriteToPNG sur "/dev/fd/1"
Right genome -> do
let garden = [Planted 0.5 0 name genome inititalPlant]
obs <- pngObserver
lseedMainLoop False obs (constGardenSource garden) 10
| nomeata/L-seed | src/renderAsPNG.hs | bsd-3-clause | 986 | 1 | 25 | 251 | 305 | 149 | 156 | 33 | 2 |
module Sexy.Instances.Functor.Either () where
import Sexy.Classes (Functor(..))
import Sexy.Data (Either(..))
instance Functor (Either a) where
-- (<$>) :: (b -> c) -> Either a b -> Either a c
_ <$> (Left x) = Left x
f <$> (Right x) = Right (f x)
| DanBurton/sexy | src/Sexy/Instances/Functor/Either.hs | bsd-3-clause | 255 | 0 | 8 | 52 | 100 | 55 | 45 | 6 | 0 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
module Test.Mismi.EC2.Data where
import Mismi.EC2.Data
import P
import Test.Mismi.EC2.Core.Arbitrary ()
import Test.QuickCheck
prop_instance_type v =
toMismiInstanceType (fromMismiInstanceType v) === v
prop_virtualization v =
toMismiVirtualizationType (fromMismiVirtualizationType v) === v
prop_tag e =
toMismiTag (fromMismiTag e) === e
return []
tests = $quickCheckAll
| ambiata/mismi | mismi-ec2/test/Test/Mismi/EC2/Data.hs | bsd-3-clause | 576 | 0 | 8 | 111 | 114 | 63 | 51 | 17 | 1 |
module Genetic.ScoreScaling where
import Genetic.Core
import Data.Vector (Vector)
import qualified Data.Vector as V
import qualified Data.Vector.Algorithms.Tim as VA
import Control.Monad
import Control.Monad.ST
import Control.Arrow
import Data.Function
import Prelude hiding (length)
sclIdentity :: Monad m => Generation g -> m (Vector (g, Score))
sclIdentity (Generation {..}) = return $ V.zip genGenomes genScores
sclRanked :: Monad m => Generation g -> m (Vector (g, Score))
sclRanked (Generation {..}) =
return $ V.imap (\i (g, _) -> (g, fromIntegral i)) sorted
where
sorted = runST $ do
mv <- V.unsafeThaw $ V.zip genGenomes genScores
VA.sortBy (compare `on` snd) mv
V.unsafeFreeze mv
sclSigma :: Monad m => Generation g -> m (Vector (g, Score))
sclSigma (Generation {..}) =
return $ if stdDev == 0
then V.map (id &&& const 1) genGenomes
else V.zip genGenomes $ V.map formula genScores
where
len = fromIntegral $ V.length genGenomes
average = genTotalScore / len
stdDev = V.sum ((\x -> (x - average) ^ 2) <$> genScores) / len
formula x = (x - average) / (2 * stdDev)
sclBoltzmann :: Monad m => Double -> Double -> Generation g -> m (Vector (g, Score))
sclBoltzmann initTemp deltaTemp (Generation {..}) =
return $ V.zip genGenomes $ formula <$> genScores
where
currTemp = initTemp - fromIntegral genCount * deltaTemp
len = fromIntegral $ V.length genGenomes
average = genTotalScore / len
formula x = (x / currTemp) / (average / currTemp)
| cikusa/genetic | Genetic/ScoreScaling.hs | bsd-3-clause | 1,526 | 0 | 15 | 314 | 614 | 325 | 289 | -1 | -1 |
{-# LANGUAGE FlexibleInstances, OverloadedStrings, RankNTypes #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Kraken.Daemon where
import Data.Aeson
import Network.HTTP.Types
import Network.Wai
import Network.Wai.Handler.Warp.Run
import Network.Wai.UrlMap
import Kraken.Store
import Kraken.Web.TargetGraph
runDaemon :: Port -> Store -> IO ()
runDaemon port store = runWarp port (daemon store)
daemon :: Store -> Application
daemon store = mapUrls $
mount "targetGraph" (jsonApplication (targetGraph store))
type JsonApplication =
Request -> (Value -> IO ResponseReceived) -> IO ResponseReceived
jsonApplication :: JsonApplication -> Application
jsonApplication app request respond =
app request (respond . responseLBS ok200 [("Content-Type", "application/json")] . encode)
targetGraph :: Store -> JsonApplication
targetGraph store _ respond = do
respond (toJSON (toTargetGraph $ graphWithoutPriorities store))
| zalora/kraken | src/Kraken/Daemon.hs | bsd-3-clause | 999 | 0 | 12 | 196 | 247 | 133 | 114 | 23 | 1 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Control.Monad.Codec
( CodecFor(..)
, Codec
, (=.)
, fmapArg
) where
import Data.Profunctor
-- | A serializer/deserializer pair reading @a@ in context @r@ and writing @c@ in context @w@.
data CodecFor r w c a = Codec
{ codecIn :: r a
, codecOut :: c -> w a
} deriving (Functor)
type Codec r w a = CodecFor r w a a
instance (Applicative r, Applicative w) => Applicative (CodecFor r w c) where
pure x = Codec
{ codecIn = pure x
, codecOut = \_ -> pure x
}
f <*> x = Codec
{ codecIn = codecIn f <*> codecIn x
, codecOut = \c -> codecOut f c <*> codecOut x c
}
instance (Monad r, Monad w) => Monad (CodecFor r w c) where
return = pure
m >>= f = Codec
{ codecIn = codecIn m >>= \x -> codecIn (f x)
, codecOut = \c -> codecOut m c >>= \x -> codecOut (f x) c
}
instance (Functor r, Functor w) => Profunctor (CodecFor r w) where
dimap fIn fOut Codec {..} = Codec
{ codecIn = fmap fOut codecIn
, codecOut = fmap fOut . codecOut . fIn
}
-- | Compose a function into the serializer of a `Codec`.
-- Useful to modify a `Codec` so that it writes a particular record field.
(=.) :: (c' -> c) -> CodecFor r w c a -> CodecFor r w c' a
fIn =. codec = codec { codecOut = codecOut codec . fIn }
-- | Modify a serializer function so that it also returns the serialized value,
-- Useful for implementing codecs.
fmapArg :: Functor f => (a -> f ()) -> a -> f a
fmapArg f x = x <$ f x
| chpatrick/codec | src/Control/Monad/Codec.hs | bsd-3-clause | 1,624 | 0 | 13 | 409 | 533 | 289 | 244 | 36 | 1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-|
Module : Numeric.ER.RnToRm.UnitDom.ChebyshevBase.Polynom
Description : multivariate polynomials in the Chebyshev basis
Copyright : (c) 2007-2008 Michal Konecny
License : BSD3
Maintainer : mikkonecny@gmail.com
Stability : experimental
Portability : portable
Arithmetic of multivariate polynomials
represented by their coefficients it the Chebyshev basis.
The polynomials are never to be used outside the domain @[-1,1]^n@.
All operations are rounded in such a way that the resulting polynomial
is a /point-wise upper or lower bound/ of the exact result.
-}
module Numeric.ER.RnToRm.UnitDom.ChebyshevBase.Polynom
(
ERChebPoly(..), TermKey
)
where
import Numeric.ER.RnToRm.UnitDom.ChebyshevBase.Polynom.Basic
import Numeric.ER.RnToRm.UnitDom.ChebyshevBase.Polynom.Eval
import Numeric.ER.RnToRm.UnitDom.ChebyshevBase.Polynom.Reduce
import Numeric.ER.RnToRm.UnitDom.ChebyshevBase.Polynom.Ring
import Numeric.ER.RnToRm.UnitDom.ChebyshevBase.Polynom.Derivative
import Numeric.ER.RnToRm.UnitDom.ChebyshevBase.Polynom.Bounds
import Numeric.ER.RnToRm.UnitDom.ChebyshevBase.Polynom.Enclosure
import Numeric.ER.RnToRm.UnitDom.ChebyshevBase.Polynom.EnclosureInner
import Numeric.ER.RnToRm.UnitDom.ChebyshevBase.Polynom.Compose
import Numeric.ER.RnToRm.UnitDom.ChebyshevBase.Polynom.ComposeInner
import Numeric.ER.RnToRm.UnitDom.ChebyshevBase.Polynom.Integration
import Numeric.ER.RnToRm.UnitDom.ChebyshevBase.Polynom.Derivative
import Numeric.ER.RnToRm.UnitDom.ChebyshevBase.Polynom.Division
import Numeric.ER.RnToRm.UnitDom.ChebyshevBase.Polynom.DivisionInner
import Numeric.ER.RnToRm.UnitDom.ChebyshevBase.Polynom.Elementary
import Numeric.ER.RnToRm.UnitDom.ChebyshevBase.Polynom.ElementaryInner
import qualified Numeric.ER.RnToRm.UnitDom.Base as UFB
import qualified Numeric.ER.Real.Base as B
import Numeric.ER.Real.Approx.Interval
import Numeric.ER.BasicTypes.DomainBox (VariableID(..), DomainBox, DomainBoxMappable, DomainIntBox)
import qualified Numeric.ER.BasicTypes.DomainBox as DBox
import Numeric.ER.BasicTypes
import qualified Data.Map as Map
{- code for testing purpose, to be deleted later -}
import Numeric.ER.Real.DefaultRepr
import Numeric.ER.BasicTypes.DomainBox.IntMap
type P = ERChebPoly (Box Int) B
x0 = chplVar 0 :: P
x1 = chplVar 1 :: P
x2 = chplVar 2 :: P
x3 = chplVar 3 :: P
x4 = chplVar 4 :: P
p1 = x1 *^ x1 *^ x1 +^ x1 *^ (x2 +^ (chplConst 2)) *^ (x3 -^ (chplConst 3))
e23 = enclRAConst (ERInterval 2 3) :: (P,P)
e32 = enclRAConst (ERInterval 3 2) :: (P,P)
em12 = enclRAConst (ERInterval (-1) 2) :: (P,P)
e2m1 = enclRAConst (ERInterval 2 (-1)) :: (P,P)
ex0 = enclThin x0
ex0sq = enclMultiply 3 10 ex0 ex0
ep = enclAdd 3 10 (enclConst 2) (enclAdd 3 10 ex0 ex0sq)
i23 = ienclRAConst (ERInterval 2 3) :: ((P,P),Bool)
i32 = ienclRAConst (ERInterval 3 2) :: ((P,P),Bool)
im12 = ienclRAConst (ERInterval (-1) 2) :: ((P,P),Bool)
i2m1 = ienclRAConst (ERInterval 2 (-1)) :: ((P,P),Bool)
ix0 = ienclThin x0
{- end of code for testing purposes -}
instance
(B.ERRealBase rb, RealFrac rb,
DomainBox box varid Int, Ord box, Show varid,
DomainBox boxb varid rb,
DomainBoxMappable boxb boxras varid rb [ERInterval rb],
DomainBoxMappable boxra boxras varid (ERInterval rb) [ERInterval rb],
DomainIntBox boxra varid (ERInterval rb)) =>
(UFB.ERUnitFnBase boxb boxra varid rb (ERInterval rb) (ERChebPoly box rb))
where
{----- Miscellaneous associated operations -----}
raEndpoints _ (ERInterval l h) = (l,h)
raFromEndpoints _ (l,h) = ERInterval l h
compareApprox = chplCompareApprox
showDiGrCmp = chplShow
{----- Structural analysis and update of functions -----}
isValid = chplHasNoNaNOrInfty
check = chplCheck
getGranularity = chplGetGranularity
setMinGranularity = chplSetMinGranularity
setGranularity = chplSetGranularity
getDegree = chplGetDegree
reduceDegreeUp = chplReduceDegreeUp
getSize = chplCountTerms
reduceSizeUp = chplReduceTermCountUp
getVariables = chplGetVars
{----- Construction of basic functions -----}
const = chplConst
affine = chplAffine
{----- Pointwise order operations ----------}
getAffineUpperBound f =
(const, Map.fromList varCoeffs)
where
fAff@(ERChebPoly coeffs) = chplReduceDegreeUp 1 f
const = Map.findWithDefault 0 chplConstTermKey coeffs
varCoeffs =
getVarCoeffs $ Map.toList coeffs
getVarCoeffs [] = []
getVarCoeffs ((term, coeff):rest)
| isLinearTerm = (var, coeff) : (getVarCoeffs rest)
| otherwise = getVarCoeffs rest
where
(isLinearTerm, var) =
case (DBox.toList term) of
[] -> (False, error "ERChebPoly: getAffineUpperBound: internal error - no variable")
[(var, degree)] | degree == 1 -> (True, var)
_ -> error "ERChebPoly: getAffineUpperBound: internal error - term not linear"
bounds = chplBounds
upperBound = chplUpperBound
lowerBound = chplLowerBound
upperBoundPrecise = chplUpperBoundExpensive
lowerBoundPrecise = chplLowerBoundExpensive
maxUp = chplMaxUp
minUp = chplMinUp
maxDown = chplMaxDn
minDown = chplMinDn
{----- Field operations ----------}
neg = chplNeg
addConstUp = chplAddConstUp
scaleUp = chplScaleUp
scaleApproxUp = chplScaleRAUp
(+^) = (+^)
(-^) = (-^)
(*^) = (*^)
recipUp md mt ix f = snd $ enclRecip md mt ix (md + 1) (chplNeg f, f)
{----- Evaluation and composition of functions -----}
evalUp pt f = chplEvalUp f pt
-- evalDown pt f = chplEvalDown f pt
evalApprox x ufb = chplRAEval (\ b -> ERInterval b b) ufb x
partialEvalApproxUp substitutions ufb =
snd $
chplPartialRAEval (UFB.raEndpoints ufb) ufb substitutions
composeUp m n f v fv = snd $ enclCompose m n f v (enclThin fv)
composeManyUp m n f subst = snd $ enclComposeMany m n f (Map.map enclThin subst)
composeDown m n f v fv = chplNeg $ fst $ enclCompose m n f v (enclThin fv)
composeManyDown m n f subst = chplNeg $ fst $ enclComposeMany m n f (Map.map enclThin subst)
integrate = chplIntegrate
differentiate var fb = chplDifferentiate fb var
instance
(B.ERRealBase rb, RealFrac rb,
DomainBox box varid Int, Ord box, Show varid,
DomainBox boxb varid rb,
DomainBoxMappable boxb boxras varid rb [ERInterval rb],
DomainBoxMappable boxra boxras varid (ERInterval rb) [ERInterval rb],
DomainIntBox boxra varid (ERInterval rb)) =>
(UFB.ERUnitFnBaseEncl boxb boxra varid rb (ERInterval rb) (ERChebPoly box rb))
where
boundsEncl = enclBounds
constEncl (low, high) = (chplConst (-low), chplConst high)
evalEncl pt encl = enclRAEval encl pt
evalEnclInner pt encl = enclRAEvalInner encl pt
addConstEncl _ _ = enclAddConst
scaleEncl = enclScale
addEncl = enclAdd
multiplyEncl = enclMultiply
recipEncl md mt ix = enclRecip md mt ix (md + 1)
composeEncl = enclCompose
composeManyEncls = enclComposeMany
instance
(B.ERRealBase rb, RealFrac rb,
DomainBox box varid Int, Ord box, Show varid,
DomainBox boxb varid rb,
DomainBoxMappable boxb boxras varid rb [ERInterval rb],
DomainBoxMappable boxra boxras varid (ERInterval rb) [ERInterval rb],
DomainIntBox boxra varid (ERInterval rb)) =>
(UFB.ERUnitFnBaseIEncl boxb boxra varid rb (ERInterval rb) (ERChebPoly box rb))
where
constIEncl (low, high) = ((chplConst (-low), chplConst high), low >= high)
evalIEncl pt ie = ienclRAEval ie pt
addIEncl = ienclAdd
multiplyIEncl = ienclMultiply
recipIEnclPositive md mt ix = ienclRecipPositive md mt ix (md + 1)
composeIEncl = ienclCompose
composeManyIEncls = error "ERChebPoly: composeManyIEncls not yet" -- ienclComposeMany
instance
(B.ERRealBase rb, RealFrac rb,
DomainBox box varid Int, Ord box, Show varid,
DomainBox boxb varid rb,
DomainBoxMappable boxb boxras varid rb [ERInterval rb],
DomainBoxMappable boxra boxras varid (ERInterval rb) [ERInterval rb],
DomainIntBox boxra varid (ERInterval rb)) =>
(UFB.ERUnitFnBaseElementary boxb boxra varid rb (ERInterval rb) (ERChebPoly box rb))
where
sqrtEncl md ms ix = enclSqrt md ms ix (effIx2int ix `div` 3)
expEncl = enclExp
logEncl = enclLog
sinEncl = enclSine
cosEncl = enclCosine
atanEncl = enclAtan
instance
(B.ERRealBase rb, RealFrac rb,
DomainBox box varid Int, Ord box, Show varid,
DomainBox boxb varid rb,
DomainBoxMappable boxb boxras varid rb [ERInterval rb],
DomainBoxMappable boxra boxras varid (ERInterval rb) [ERInterval rb],
DomainIntBox boxra varid (ERInterval rb)) =>
(UFB.ERUnitFnBaseIElementary boxb boxra varid rb (ERInterval rb) (ERChebPoly box rb))
where
sqrtIEncl md ms ix = ienclSqrt md ms ix (effIx2int ix `div` 3)
-- error "ERChebPoly: sqrtIEncl not yet"
expIEncl md ms ix = ienclExp md ms ix
-- error "ERChebPoly: expIEncl not yet" -- ienclExp
logIEncl = error "ERChebPoly: logIEncl not yet" -- ienclLog
sinIEncl = error "ERChebPoly: sinIEncl not yet" -- ienclSine
cosIEncl = error "ERChebPoly: cosIEncl not yet" -- ienclCosine
atanIEncl = error "ERChebPoly: atanIEncl not yet" -- ienclAtan
| michalkonecny/polypaver | src/Numeric/ER/RnToRm/UnitDom/ChebyshevBase/Polynom.hs | bsd-3-clause | 9,574 | 70 | 16 | 2,029 | 2,538 | 1,399 | 1,139 | -1 | -1 |
{-# LANGUAGE Arrows #-}
import Control.Arrow
import qualified Control.Arrow.Machine as P
import Control.Arrow.Transformer.Reader (ReaderArrow, elimReader)
import Control.Monad.Reader (ReaderT, ask)
import Control.Monad.Trans (lift)
import Control.Lens
mainPlan = P.constructT (^. P.uc0 . P.rd P.kl) $
do
P.await
(a, y) <- ask
lift $ lift $ putStrLn $ "a" ++ show (a::Int, y::Int)
P.await
(a, y) <- ask
lift $ lift $ putStrLn $ "b" ++ show (a, y)
P.await
(a, y) <- ask
lift $ lift $ putStrLn $ "c" ++ show (a, y)
P.yield 1
(a, y) <- ask
lift $ lift $ putStrLn $ "d" ++ show (a, y)
P.await
(a, y) <- ask
lift $ lift $ putStrLn $ "e" ++ show (a, y)
P.yield 2
(a, y) <- ask
lift $ lift $ putStrLn $ "f" ++ show (a, y)
P.yield 3
(a, y) <- ask
lift $ lift $ putStrLn $ "g" ++ show (a, y)
mainProc = proc eva ->
do
rec
a <- P.hold 0 -< eva
evy <- P.readerProc mainPlan -< (eva, (a, y))
y <- P.dHold 0 -< evy
returnA -< evy
main = return [1..] >>= P.kl # P.run mainProc
| as-capabl/machinecell | example/reader/reader.hs | bsd-3-clause | 1,088 | 1 | 13 | 314 | 537 | 276 | 261 | 38 | 1 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.GL.Texturing.Queries
-- Copyright : (c) Sven Panne 2002-2013
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- This module offers various texture queries.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.GL.Texturing.Queries (
TextureQuery, textureInternalFormat, textureSize1D, textureSize2D,
textureSize3D, textureBorder, textureRGBASizes, textureSharedSize,
textureIntensitySize, textureLuminanceSize, textureIndexSize,
textureDepthBits, textureCompressedImageSize, textureProxyOK
) where
import Control.Monad
import Foreign.Marshal.Utils
import Graphics.Rendering.OpenGL.GL.GLboolean
import Graphics.Rendering.OpenGL.GL.PeekPoke
import Graphics.Rendering.OpenGL.GL.PixelRectangles
import Graphics.Rendering.OpenGL.GL.StateVar
import Graphics.Rendering.OpenGL.GL.Texturing.PixelInternalFormat
import Graphics.Rendering.OpenGL.GL.Texturing.Specification
import Graphics.Rendering.OpenGL.GL.Texturing.TextureTarget
import Graphics.Rendering.OpenGL.GL.VertexSpec
import Graphics.Rendering.OpenGL.Raw
--------------------------------------------------------------------------------
data TexLevelParameter =
TextureInternalFormat
| TextureWidth
| TextureHeight
| TextureDepth
| TextureBorder
| TextureRedSize
| TextureGreenSize
| TextureBlueSize
| TextureAlphaSize
| TextureIntensitySize
| TextureLuminanceSize
| TextureIndexSize
| DepthBits
| TextureCompressedImageSize
| TextureCompressed
| TextureSharedSize
marshalTexLevelParameter :: TexLevelParameter -> GLenum
marshalTexLevelParameter x = case x of
TextureInternalFormat -> gl_TEXTURE_INTERNAL_FORMAT
TextureWidth -> gl_TEXTURE_WIDTH
TextureHeight -> gl_TEXTURE_HEIGHT
TextureDepth -> gl_TEXTURE_DEPTH
TextureBorder -> gl_TEXTURE_BORDER
TextureRedSize -> gl_TEXTURE_RED_SIZE
TextureGreenSize -> gl_TEXTURE_GREEN_SIZE
TextureBlueSize -> gl_TEXTURE_BLUE_SIZE
TextureAlphaSize -> gl_TEXTURE_ALPHA_SIZE
TextureIntensitySize -> gl_TEXTURE_INTENSITY_SIZE
TextureLuminanceSize -> gl_TEXTURE_LUMINANCE_SIZE
TextureIndexSize -> gl_TEXTURE_INDEX_SIZE_EXT
DepthBits -> gl_DEPTH_BITS
TextureCompressedImageSize -> gl_TEXTURE_COMPRESSED_IMAGE_SIZE
TextureCompressed -> gl_TEXTURE_COMPRESSED
TextureSharedSize -> gl_TEXTURE_SHARED_SIZE
--------------------------------------------------------------------------------
type TextureQuery t a = t -> Level -> GettableStateVar a
textureInternalFormat :: QueryableTextureTarget t => TextureQuery t PixelInternalFormat
textureInternalFormat t level =
makeGettableStateVar $
getTexLevelParameteriNoProxy unmarshalPixelInternalFormat t level TextureInternalFormat
textureSize1D :: TextureQuery TextureTarget1D TextureSize1D
textureSize1D t level =
makeGettableStateVar $
liftM TextureSize1D
(getTexLevelParameteriNoProxy fromIntegral t level TextureWidth)
textureSize2D :: TextureQuery TextureTarget2D TextureSize2D
textureSize2D t level =
makeGettableStateVar $
liftM2 TextureSize2D
(getTexLevelParameteriNoProxy fromIntegral t level TextureWidth )
(getTexLevelParameteriNoProxy fromIntegral t level TextureHeight)
textureSize3D :: TextureQuery TextureTarget3D TextureSize3D
textureSize3D t level =
makeGettableStateVar $
liftM3 TextureSize3D
(getTexLevelParameteriNoProxy fromIntegral t level TextureWidth )
(getTexLevelParameteriNoProxy fromIntegral t level TextureHeight)
(getTexLevelParameteriNoProxy fromIntegral t level TextureDepth )
textureBorder :: QueryableTextureTarget t => TextureQuery t Border
textureBorder t level =
makeGettableStateVar $
getTexLevelParameteriNoProxy fromIntegral t level TextureBorder
textureRGBASizes :: QueryableTextureTarget t => TextureQuery t (Color4 GLsizei)
textureRGBASizes t level =
makeGettableStateVar $
liftM4 Color4
(getTexLevelParameteriNoProxy fromIntegral t level TextureRedSize )
(getTexLevelParameteriNoProxy fromIntegral t level TextureGreenSize)
(getTexLevelParameteriNoProxy fromIntegral t level TextureBlueSize )
(getTexLevelParameteriNoProxy fromIntegral t level TextureAlphaSize)
textureSharedSize :: QueryableTextureTarget t => TextureQuery t GLsizei
textureSharedSize t level =
makeGettableStateVar $
getTexLevelParameteriNoProxy fromIntegral t level TextureSharedSize
textureIntensitySize :: QueryableTextureTarget t => TextureQuery t GLsizei
textureIntensitySize t level =
makeGettableStateVar $
getTexLevelParameteriNoProxy fromIntegral t level TextureIntensitySize
textureLuminanceSize :: QueryableTextureTarget t => TextureQuery t GLsizei
textureLuminanceSize t level =
makeGettableStateVar $
getTexLevelParameteriNoProxy fromIntegral t level TextureLuminanceSize
textureIndexSize :: QueryableTextureTarget t => TextureQuery t GLsizei
textureIndexSize t level =
makeGettableStateVar $
getTexLevelParameteriNoProxy fromIntegral t level TextureIndexSize
textureDepthBits :: QueryableTextureTarget t => TextureQuery t GLsizei
textureDepthBits t level =
makeGettableStateVar $
getTexLevelParameteriNoProxy fromIntegral t level DepthBits
textureCompressedImageSize :: QueryableTextureTarget t => TextureQuery t (Maybe GLsizei)
textureCompressedImageSize t level =
makeGettableStateVar $ do
isCompressed <- getTexLevelParameteriNoProxy unmarshalGLboolean t level TextureCompressed
if isCompressed
then getTexLevelParameteriNoProxy (Just . fromIntegral) t level TextureCompressedImageSize
else return Nothing
textureProxyOK :: ParameterizedTextureTarget t => TextureQuery t Bool
textureProxyOK t level =
makeGettableStateVar $
getTexLevelParameteri unmarshalGLboolean (marshalParameterizedTextureTargetProxy t) level TextureWidth
getTexLevelParameteriNoProxy :: QueryableTextureTarget t => (GLint -> a) -> t -> Level -> TexLevelParameter -> IO a
getTexLevelParameteriNoProxy f = getTexLevelParameteri f . marshalQueryableTextureTarget
getTexLevelParameteri :: (GLint -> a) -> GLenum -> Level -> TexLevelParameter -> IO a
getTexLevelParameteri f t level p =
with 0 $ \buf -> do
glGetTexLevelParameteriv t level (marshalTexLevelParameter p) buf
peek1 f buf
| hesiod/OpenGL | src/Graphics/Rendering/OpenGL/GL/Texturing/Queries.hs | bsd-3-clause | 6,603 | 0 | 11 | 990 | 1,148 | 608 | 540 | 124 | 16 |
module Foreign.Storable.Complex () where
| RyanGlScott/storable-complex | in-base/Foreign/Storable/Complex.hs | bsd-3-clause | 41 | 0 | 3 | 4 | 10 | 7 | 3 | 1 | 0 |
{-# LANGUAGE MultiParamTypeClasses, PatternGuards, TypeSynonymInstances #-}
{- |
Module : XMonad.Layout.TrackFloating
Copyright : (c) 2010 & 2013 Adam Vogt
2011 Willem Vanlint
License : BSD-style (see xmonad/LICENSE)
Maintainer : vogt.adam@gmail.com
Stability : unstable
Portability : unportable
Layout modifier that tracks focus in the tiled layer while the floating layer
is in use. This is particularly helpful for tiled layouts where the focus
determines what is visible.
The relevant bugs are Issue 4 and 306:
<http://code.google.com/p/xmonad/issues/detail?id=4>,
<http://code.google.com/p/xmonad/issues/detail?id=306>
-}
module XMonad.Layout.TrackFloating
(-- * Usage
-- $usage
-- ** For other layout modifiers
-- $layoutModifier
trackFloating,
useTransientFor,
-- ** Exported types
TrackFloating,
UseTransientFor,
) where
import Control.Monad
import Data.Function
import Data.List
import Data.Maybe
import qualified Data.Map as M
import qualified Data.Set as S
import XMonad
import XMonad.Layout.LayoutModifier
import qualified XMonad.StackSet as W
import qualified Data.Traversable as T
data TrackFloating a = TrackFloating
{ _wasFloating :: Bool,
_tiledFocus :: Maybe Window }
deriving (Read,Show,Eq)
instance LayoutModifier TrackFloating Window where
modifyLayoutWithUpdate os@(TrackFloating _wasF mw) ws@(W.Workspace{ W.stack = ms }) r
= do
winset <- gets windowset
let xCur = fmap W.focus xStack
xStack = W.stack $ W.workspace $ W.current winset
isF = fmap (\x -> x `M.member` W.floating winset ||
(let (\\\) = (S.\\) `on` (S.fromList . W.integrate')
in x `S.member` (xStack \\\ ms)))
xCur
newStack
-- focus is floating, so use the remembered focus point
| Just isF' <- isF,
isF',
Just w <- mw,
Just s <- ms,
Just ns <- find ((==) w . W.focus)
$ zipWith const (iterate W.focusDown' s) (W.integrate s)
= Just ns
| otherwise
= ms
newState = case isF of
Just True -> mw
Just False | Just f <- xCur -> Just f
_ -> Nothing
ran <- runLayout ws{ W.stack = newStack } r
return (ran,
let n = TrackFloating (fromMaybe False isF) newState
in guard (n /= os) >> Just n)
{- | When focus is on the tiled layer, the underlying layout is run with focus
on the window named by the WM_TRANSIENT_FOR property on the floating window.
-}
useTransientFor :: l a -> ModifiedLayout UseTransientFor l a
useTransientFor x = ModifiedLayout UseTransientFor x
data UseTransientFor a = UseTransientFor deriving (Read,Show,Eq)
instance LayoutModifier UseTransientFor Window where
modifyLayout _ ws@(W.Workspace{ W.stack = ms }) r = do
m <- gets (W.peek . windowset)
d <- asks display
parent <- fmap join $ T.traverse (io . getTransientForHint d) m
s0 <- get
whenJust parent $ \p -> put s0{ windowset = W.focusWindow p (windowset s0) }
result <- runLayout ws{ W.stack = fromMaybe ms (liftM2 focusWin ms parent) } r
m' <- gets (W.peek . windowset)
when (m' == parent) $
-- layout changed the windowset, so don't clobber it
whenJust m $ \p -> put s0{ windowset = W.focusWindow p (windowset s0) }
return result
focusWin :: Eq a => W.Stack a -> a -> Maybe (W.Stack a)
focusWin st@(W.Stack f u d) w
| w `elem` u || w `elem` d = Just . head . filter ((==w) . W.focus)
$ iterate (if w `elem` u then W.focusUp'
else W.focusDown') st
| w == f = Just st
| otherwise = Nothing
{- $usage
Apply to your layout in a config like:
> main = xmonad (defaultConfig{
> layoutHook = trackFloating (useTransientFor
> (noBorders Full ||| Tall 1 0.3 0.5)),
> ...
> })
'useTransientFor' and 'trackFloating' can be enabled independently. For
example when the floating window sets @WM_TRANSIENT_FOR@, such as libreoffice's
file->preferences window, @optionA@ will have the last-focused window magnified
while @optionB@ will result magnify the window that opened the preferences
window regardless of which tiled window was focused before.
> import XMonad.Layout.Magnifier
> import XMonad.Layout.TrackFloating
>
> underlyingLayout = magnifier (Tall 1 0.3 0.5)
>
> optionA = trackFloating underlyingLayout
> optionB = trackFloating (useTransientFor underlyingLayout)
-}
{- | Runs another layout with a remembered focus, provided:
* the subset of windows doesn't include the focus in XState
* it was previously run with a subset that included the XState focus
* the remembered focus hasn't since been killed
-}
trackFloating :: l a -> ModifiedLayout TrackFloating l a
trackFloating layout = ModifiedLayout (TrackFloating False Nothing) layout
{- $layoutModifier
It also corrects focus issues for full-like layouts inside other layout
modifiers:
> import XMonad.Layout.IM
> import XMonad.Layout.Tabbed
> import XMonad.Layout.TrackFloating
> import XMonad.Layout.Reflect
> gimpLayout = withIM 0.11 (Role "gimp-toolbox") $ reflectHoriz
> $ withIM 0.15 (Role "gimp-dock") (trackFloating simpleTabbed)
Interactions with some layout modifiers (ex. decorations, minimizing) are
unknown but likely unpleasant.
-}
| jthornber/XMonadContrib | XMonad/Layout/TrackFloating.hs | bsd-3-clause | 5,653 | 0 | 23 | 1,542 | 1,079 | 560 | 519 | 73 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
module PolyPaver.Args
(
Args,
PolyPaver(..),
Order(..),
paverDefaultArgs,
setDefaults,
checkArgs
)
where
import System.Console.CmdArgs hiding (args)
import qualified System.Console.CmdArgs as Args
--import Data.Typeable
--import Data.Data
--import Data.Maybe (catMaybes)
type Args = PolyPaver
data PolyPaver = PolyPaver
{problemId :: [String]
,tightnessValues :: String
,startDegree :: Int -- maximum polynomial degree for the first attempt, -1 means startDegree = degree
,degree :: Int -- maximum polynomial degree to try
,maxSize :: Int -- maximum number of terms in a polynomial
,effort :: Int -- effort index for AERN
,minIntegrExp :: Int
,order :: Order
,splitIntFirst :: Bool
,minDepth :: Int -- minimum bisection depth
,maxDepth :: Int -- maximum bisection depth
,maxQueueLength :: Int -- maximum queue length
,time :: Int -- timeout in seconds
-- ,boxSkewing :: Bool
-- ,splitGuessing :: Int
,quiet :: Bool
,verbose :: Bool
,plotWidth :: Int
,plotHeight :: Int
}
deriving (Show,Data,Typeable)
data Order =
BFS | DFS
-- | DFSthenBFS | BFSFalsifyOnly
deriving (Show,Data,Typeable)
paverDefaultArgs :: Args
paverDefaultArgs =
PolyPaver
{problemId = [] &= Args.args &= typ "PROBLEM_ID"
,tightnessValues = "1" &= name "i"
&= groupname "Problem parameters"
&= help "value(s) of T to try (if the formula has an unbound variable T) (eg \"2^0..10\" or \"1..10\" or \"1,10,100\") (default = 1)"
,startDegree = -1 &= name "s" &= help "first polynomial degree to try on each box (default = degree)"
&= groupname "Box solving effort"
,degree = 0 &= name "d" &= help "maximum polynomial degree (default = 0)"
,maxSize = 100 &= name "z" &= help "maximum polynomial term size (default = 100)"
,effort = 10 &= help "for approximating point-wise sqrt and exp (default = 10)"
,minIntegrExp = 0 &= name "I" &= help "n to compute approximate integration step using 2^(-n)"
,order = DFS
&= groupname "Box subdivision strategy"
&= help "sub-problem processing order, bfs for breadth-first or dfs for depth-first, (default = dfs)"
,splitIntFirst = False &= name "f"
&= help "split integer valued domains until they are exact before splitting the continuous domains"
,minDepth = 0 &= help "minimum bisection depth (default = 0)"
,maxDepth = 1000 &= name "b" &= help "maximum bisection depth (default = 1000)"
,maxQueueLength = -1 &= name "u"
&= help ("maximum queue size (default = "
++ show maxQueueLengthDefaultDFS ++ " for depth-first and "
++ show maxQueueLengthDefaultBFS ++ " for breadth-first order)")
,time = 7*24*3600 &= help "timeout in seconds (default = 7*24*3600 ie 1 week)"
-- ,boxSkewing = False &= name "k" &= help "allow parallelepiped boxes, by default only coaxial rectangles"
-- &= groupname "Experimental"
-- ,splitGuessing = -1 &= name "g" &= opt (20 :: Int) &= help "try guessing the best box splitting direction but do not allow a box in which a pair of box edge lengths exceeds a given ratio (default 20)"
-- ,epsrelbits = 23 &= name "r" &= help "n to compute machine epsilon using 2^-n (default = 24)" &= groupname "Floating point rounding interpretation in conjectures"
-- ,epsabsbits = 126 &= name "a" &= help "n to compute denormalised epsilon using 2^-n (default = 126)"
,quiet = False &= help "no reporting of progress on the console (default off)"
&= groupname "Verbosity"
,verbose = False &= help "report extra progress details on the console (default off)"
,plotWidth = 0 &= name "w" &= help "plot width for 2D problems, 0 mean no plotting (default)"
&= groupname "Plotting"
,plotHeight = 0 &= name "h" &= help "plot height for 2D problems, 0 mean no plotting (default)"
}
&= help (unlines
["Tries to decide numerical conjectures (problems) using polynomial enclosures.",
"[PROBLEM_ID] specifies one or more conjectures as follows: ",
" <name>.pp [<conclusion number>]: like a single VC in SPARK .siv ",
" <name>.siv [<vc name> [<conclusion number>]]: SPARK-generated VCs ",
" <name>.tptp: TPTP file with fof formulas (ignoring includes) ",
" <name>.form: using internal syntax (machine generated) "
])
&= summary "PolyPaver 0.3 (c) 2015 Jan Duracz and Michal Konecny (Aston University)"
&= name "polypaver"
setDefaults :: PolyPaver -> PolyPaver
setDefaults = setMaxQLength . setStartDegree
where
setMaxQLength args =
if maxQueueLength args /= -1
then args -- maxQueueLength is explicitly set, do no change
else
case order args of
DFS -> args { maxQueueLength = maxQueueLengthDefaultDFS }
BFS -> args { maxQueueLength = maxQueueLengthDefaultBFS }
-- DFSthenBFS -> args { maxQueueLength = maxQueueLengthDefaultDFS }
-- BFSFalsifyOnly -> args { maxQueueLength = maxQueueLengthDefaultBFS }
setStartDegree args
| startDegree args == -1 = args { startDegree = degree args }
| otherwise = args
maxQueueLengthDefaultDFS :: Int
maxQueueLengthDefaultDFS = 50
maxQueueLengthDefaultBFS :: Int
maxQueueLengthDefaultBFS = 5000
checkArgs :: Args -> [String]
checkArgs _args =
[]
-- catMaybes [checkSplitGuessing, checkSkewing]
-- where
-- checkSkewing
-- | boxSkewing args && startDegree args == 0 =
-- Just $
-- "Box skewing is not compatible with polynomial degree 0."
-- ++ "\n Raise starting polynomial degree to a positive value."
-- | otherwise = Nothing
-- checkSplitGuessing
-- | splitGuessing args /= -1 && startDegree args == 0 =
-- Just $
-- "Guessing split direction is not compatible with polynomial degree 0."
-- ++ "\n Raise starting polynomial degree to a positive value."
-- | otherwise = Nothing
| michalkonecny/polypaver | src/PolyPaver/Args.hs | bsd-3-clause | 6,326 | 0 | 17 | 1,735 | 845 | 480 | 365 | 92 | 3 |
{-# LANGUAGE TemplateHaskell #-}
module SyntaxHighlighting.Coloring(FullColoring, parseColoringFile, getProperty, definedStyles, intAsColor, colorDistance, highestComponent) where
{- Defines rendering properties for styles -}
import Utils.Utils
import Utils.ToString
import Utils.Image
import qualified Assets
import TypeSystem
import TypeSystem.Parser.TargetLanguageParser
import ParseTreeInterpreter.FunctionInterpreter
import Data.Maybe
import Data.Map as M
import Data.Char as Ord
import Lens.Micro hiding ((&))
import Lens.Micro.TH
import Control.Monad
type Prop = String
data FullColoring = FullColoring
{ _fcPt :: ParseTree
, _fcTs :: TypeSystem
}
deriving (Show)
makeLenses ''FullColoring
getProperty :: FullColoring -> Name -> Prop -> Maybe (Either Int String)
getProperty (FullColoring pt ts) "" prop
= either (const Nothing) return $ inMsg ("While searching prop "++prop++" in the default values") $ do
propPt <- _asID ts prop
found <- evalFunc ts "getDefaultPropertyFor" [pt, propPt]
_extractValue found
getProperty fc@(FullColoring pt ts) style prop
= either (const Nothing) return $ inMsg ("While searching a value for "++show style++" and "++prop) $ do
propPt <- _asID ts prop
stylePt <- _asID ts style
found <- evalFunc ts "getPropertyFor" [pt, stylePt, propPt]
_extractValue found
_asID ts name = parseTargetLang (get tsSyntax ts) "identifier" "coloring.hs:getProperty:id" name
& inMsg ("Not a valid stylename or property name: "++name)
_extractValue :: ParseTree -> Either String (Either Int String)
_extractValue (MLiteral _ _ "?")
= Left "No value found"
_extractValue (MLiteral _ ("color",0) str)
= return $ Right str
_extractValue (MLiteral _ ("String", 0) str)
= return $ Right str
_extractValue (MInt _ _ i)
= return $ Left i
_extractValue pt
= error $ "Coloring: unexpected parsetree; probably due to some weird styling file. Run with --plain to disable syntax highlighting"++show pt
terminalStyle = parseColoringFile "Assets: Terminal" Assets._Terminal_style
parseColoringFile :: FilePath -> String -> Either String FullColoring
parseColoringFile fp input
= do ts <- parseTypeSystem Assets._Style_language $ Just "Assets: Style.language"
pt <- parseTargetLang (get tsSyntax ts) "styleFile" fp input
pt' <- evalFunc ts "expandFile" [pt]
return $ FullColoring pt' ts
_extractStyles :: ParseTree -> [String]
_extractStyles (PtSeq _ ("knownStyles", 0) [MLiteral _ _ name, rest])
= name : _extractStyles rest
_extractStyles (MLiteral _ _ name)
= [name]
_extractStyles pt
= error $ "Coloring: unexpected parsetree for style extraction: "++show pt
definedStyles :: FullColoring -> [String]
definedStyles (FullColoring pt ts)
= either error id $ do
pt' <- evalFunc ts "knownStylesIn" [pt]
return $ _extractStyles pt'
-- Distance between hex colors
colorDistance :: String -> String -> Int
colorDistance ('#':col0) ('#':col1)
= let (r0, (g0, b0)) = col0 |> digitToInt & splitAt 2 |> splitAt 2
(r1, (g1, b1)) = col1 |> digitToInt & splitAt 2 |> splitAt 2
n [a,b] = 16*a + b
in
abs (n r0 - n r1) + abs (n g0 - n g1) + abs (n b0 - n b1)
colorDistance col0 col1
= error $ "Not colors: "++col0++", "++col1
highestComponent :: String -> Int
highestComponent ('#':col)
= let (r, (g, b)) = col |> digitToInt & splitAt 2 |> splitAt 2
n [a,b] = 16*a + b
in
maximum [n r, n g, n b]
intAsColor :: Int -> String
intAsColor i
= let b = i ` mod` 256
i' = i `div` 256
g = i' `mod` 256
r = (i' `div` 256) `mod` 256
rgb = [r, g, b] >>= (\j -> [intToDigit (j `div` 16), intToDigit (j `mod` 16)])
in
'#':(show r ++","++ show g ++","++ show b)
| pietervdvn/ALGT | src/SyntaxHighlighting/Coloring.hs | bsd-3-clause | 3,666 | 120 | 15 | 672 | 1,424 | 749 | 675 | 86 | 1 |
-- c-repl: a C read-eval-print loop.
-- Copyright (C) 2008 Evan Martin <martine@danga.com>
module CodeSnippet (
CodeSnippet(..),
parse,
runTests
) where
import Control.Monad.Error
import Data.Char
import Data.List
import Test.HUnit
import Text.ParserCombinators.Parsec hiding (parse)
import qualified Text.ParserCombinators.Parsec as Parsec
data CodeSnippet = Code String
| VarDecl String String -- Decl, initialization code.
| FunDecl String String -- Type + name, body.
deriving (Eq,Show)
type TokenStream = [(SourcePos, Token)]
tokPos = fst
data Token = Ident String | Punct String deriving (Eq, Show)
substr :: Maybe SourcePos -> Maybe SourcePos -> String -> String
substr start end str = strip $ take sublen $ drop startOfs $ str
where
startOfs = maybe 0 spOfs start
endOfs = maybe (length str) spOfs end
sublen = endOfs - startOfs
spOfs sp = sourceColumn sp - 1
strip [] = []
strip [' '] = []
strip (x:xs) = x : strip xs
stripSemi [] = []
stripSemi [';'] = []
stripSemi (x:xs) = x : stripSemi xs
parse :: String -> Either String CodeSnippet
parse input = do
-- Properly parsing C is famously impossible without processing typedefs in
-- all headers. But we can get pretty close with some heuristics.
-- This code is hideous, but it sorta comes with the territory.
case Parsec.parse p_tokenize "code" input of
Left err -> Left (show err)
Right tokenstream -> do
let (idents, rest) = span (isTypeLeader . snd) tokenstream
if length idents < 2
then return $ Code (stripSemi input)
else let (typ, var) = (init idents, last idents)
in parseDecl typ var rest
where
parseDecl typ var ((npos, Punct "("):rest) =
case dropWhile (\(_,tok) -> tok /= Punct ")") rest of
(rparen:(next,_):rest) ->
return $ FunDecl (substr Nothing (Just next) input)
(substr (Just next) Nothing input)
_ -> Left $ "couldn't find rparen"
parseDecl typ var rest =
let nextpos = case rest of
((pos, tok):rest) | tok /= Punct ";" -> Just pos
_ -> Nothing
code = case nextpos of
Just n -> substr (Just (tokPos var)) Nothing input
Nothing -> ""
in return $ VarDecl (stripSemi $ substr Nothing nextpos input)
(stripSemi code)
isTypeLeader (Ident _) = True
isTypeLeader (Punct "*") = True
isTypeLeader _ = False
p_tokenize :: Parser TokenStream
p_tokenize = many (annotate p_ident <|> annotate p_token) where
p_ident = liftM Ident $ withSpaces $ many1 (letter <|> digit <|> char '_')
p_token = do l <- withSpaces $ oneOf "()*[]={};"; return $ Punct [l]
withSpaces p = do r <- p; skipMany space; return r
annotate p = do
pos <- getPosition
p' <- p
return (pos, p')
assertParse :: CodeSnippet -> String -> Assertion
assertParse expected input = do
case parse input of
Left error -> assertFailure $ show input ++ " failed to parse: " ++ error
Right snip -> assertEqual input expected snip
testParse exp input = test $ assertParse exp input
runTests =
runTestTT $ test $ TestList [
testParse (VarDecl "int x" "x = 3") "int x = 3;"
, testParse (VarDecl "int x" "x = 3") "int x = 3"
, testParse (VarDecl "int xx" "xx = 3") "int xx = 3;"
, testParse (Code "x = 3") "x = 3"
, testParse (Code "*((char*)x) = 0") "*((char*)x) = 0;"
, testParse (VarDecl "int x" "") "int x"
, testParse (VarDecl "const char* x" "") "const char* x;"
, testParse (Code "x+y = 4") "x+y = 4;"
, testParse (Code "for (;;) x") "for (;;) x;"
, testParse (FunDecl "void f()" "{}") "void f() {}"
]
main = runTests
| LeifW/c-repl | CodeSnippet.hs | bsd-3-clause | 3,735 | 0 | 18 | 978 | 1,246 | 634 | 612 | 85 | 9 |
import KMC.Kleenex.Lang
import KMC.Util.Heredoc
import KMC.RangeSet
import Data.Word
import Debug.Trace
import KMC.Visualization
import KMC.SSTConstruction hiding (Var)
import KMC.FSTConstruction
s1 :: String
s1 = [strQ|
main := (~aaa | aa)*
aaa := /aaa/ "bcd"
aa := /aa/ "de"
|]
s2 :: String
s2 = [strQ|
main := ~(l r)
l := /(a|b)*/ "AB"
r := /(c|d)*/ "CD"
|]
s3 :: String
s3 = [strQ|
main := /a/ ~/b/ /c/
|]
s4 :: String
s4 = [strQ|
main := ~(/def*/?)
|]
mu1 :: (KleenexMu a, Marked)
mu1 = either (error "mu1 - fail") head $ testKleenex s1
sm1 :: (SimpleMu, Marked)
sm1 = either (error "sm1") head $ testSimple s1
mu2 = either (error "mu2") head $ testKleenex s2
mu3 = either (error "mu3") head $ testKleenex s3
mu4 = either (error "mu4") head $ testKleenex s4
f1 :: FST Int (RangeSet Word8) (WithNull KleenexOutTerm)
f1 = fromMu (fst mu1)
g1 :: FST Int (RangeSet Word8) (WithNull KleenexOutTerm)
g1 = fromMuWithDFA (snd mu1) (fst mu1)
f2 :: FST Int (RangeSet Word8) (WithNull KleenexOutTerm)
f2 = fromMu (fst mu2)
g2 :: FST Int (RangeSet Word8) (WithNull KleenexOutTerm)
g2 = fromMuWithDFA (snd mu2) (fst mu2)
f3 :: FST Int (RangeSet Word8) (WithNull KleenexOutTerm)
f3 = fromMu (fst mu3)
g3 :: FST Int (RangeSet Word8) (WithNull KleenexOutTerm)
g3 = fromMuWithDFA (snd mu3) (fst mu3)
f4 :: FST Int (RangeSet Word8) (WithNull KleenexOutTerm)
f4 = fromMu (fst mu4)
g4 :: FST Int (RangeSet Word8) (WithNull KleenexOutTerm)
g4 = fromMuWithDFA (snd mu4) (fst mu4)
viz :: FST Int (RangeSet Word8) (WithNull KleenexOutTerm)
-> FilePath -> IO ()
viz = mkVizToFile fstToDot
vizSST :: FST Int (RangeSet Word8) (WithNull KleenexOutTerm)
-> FilePath -> IO ()
vizSST f p = let s = sstFromFST f False
in mkVizToFile sstToDot s p
| diku-kmc/repg | test/Tests/FSTConstruction.hs | mit | 1,789 | 0 | 9 | 362 | 696 | 363 | 333 | -1 | -1 |
module Game.Board where
data Player = First
| Second
deriving (Eq)
nextPlayer :: Player -> Player
nextPlayer p = if p == First then Second else First
class Board b where
-- these first three functions are technically
-- all that is necessary to use MCTS:
gameOver :: b -> Bool
choices :: b -> [b]
scoreBoard :: b -> Int
-- and these are accessory functions for gameplay:
winner :: b -> Maybe Player
turn :: b -> Player
| rudyardrichter/MCTS | old/source/Game/Board.hs | mit | 493 | 0 | 8 | 154 | 115 | 65 | 50 | 12 | 2 |
module Code.Nonprefix where
import Code.Formal
import Challenger.Partial
import Inter.Types
import Autolib.Reporter
import Autolib.Set
import Autolib.ToDoc
import Data.Typeable
data Nonprefix = Nonprefix deriving ( Read, Show, Typeable )
instance OrderScore Nonprefix where
scoringOrder _ = Increasing
instance Partial Nonprefix () ( Set String ) where
describe Nonprefix () = vcat
[ text "Gesucht ist ein (kleiner) Code,"
, text "der keine Präfixcode ist"
, text "und dessen Spiegelbild kein Präfixcode ist."
, text ""
, parens ( text "Als Größe zählt hier die Summe der Wortlängen." )
]
initial Nonprefix () = mkSet [ "10", "11" ]
partial Nonprefix () ws = do
when ( any null $ setToList ws )
$ reject $ text "Kein Codewort darf leer sein."
should_not_be_prefix_free
( text "" ) ws
should_not_be_prefix_free
( text "der Spiegelbilder" ) $ smap reverse ws
total Nonprefix () ws = do
inform $ text "Ist die Menge" <+> toDoc ws <+> text "ein Code?"
case code_counter_examples $ setToList ws of
[] -> inform $ text "Ja."
w : _ -> reject $ vcat
[ text "Nein."
, nest 4 $ text "Hinweis: betrachten Sie das Wort"
, nest 8 $ text (take (max 1 (length w `div` 2)) w ++ "..." )
]
should_not_be_prefix_free tag ws = do
inform $ vcat
[ text "Die Menge" <+> tag
, nest 4 $ toDoc ws
, text "soll nicht präfixfrei sein."
]
when ( null $ prefix_pairs ws ) $ reject $ text "Sie ist es aber doch."
instance Measure Nonprefix () ( Set String ) where
measure Nonprefix () ws =
fromIntegral $ sum $ map length $ setToList ws
make_fixed = direct Nonprefix ()
| florianpilz/autotool | src/Code/Nonprefix.hs | gpl-2.0 | 1,722 | 10 | 24 | 450 | 548 | 266 | 282 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.CognitoIdentity.GetCredentialsForIdentity
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Returns credentials for the the provided identity ID. Any provided logins
-- will be validated against supported login providers. If the token is for
-- cognito-identity.amazonaws.com, it will be passed through to AWS Security
-- Token Service with the appropriate role for the token.
--
-- <http://docs.aws.amazon.com/cognitoidentity/latest/APIReference/API_GetCredentialsForIdentity.html>
module Network.AWS.CognitoIdentity.GetCredentialsForIdentity
(
-- * Request
GetCredentialsForIdentity
-- ** Request constructor
, getCredentialsForIdentity
-- ** Request lenses
, gcfiIdentityId
, gcfiLogins
-- * Response
, GetCredentialsForIdentityResponse
-- ** Response constructor
, getCredentialsForIdentityResponse
-- ** Response lenses
, gcfirCredentials
, gcfirIdentityId
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.CognitoIdentity.Types
import qualified GHC.Exts
data GetCredentialsForIdentity = GetCredentialsForIdentity
{ _gcfiIdentityId :: Text
, _gcfiLogins :: Map Text Text
} deriving (Eq, Read, Show)
-- | 'GetCredentialsForIdentity' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'gcfiIdentityId' @::@ 'Text'
--
-- * 'gcfiLogins' @::@ 'HashMap' 'Text' 'Text'
--
getCredentialsForIdentity :: Text -- ^ 'gcfiIdentityId'
-> GetCredentialsForIdentity
getCredentialsForIdentity p1 = GetCredentialsForIdentity
{ _gcfiIdentityId = p1
, _gcfiLogins = mempty
}
-- | A unique identifier in the format REGION:GUID.
gcfiIdentityId :: Lens' GetCredentialsForIdentity Text
gcfiIdentityId = lens _gcfiIdentityId (\s a -> s { _gcfiIdentityId = a })
-- | A set of optional name-value pairs that map provider names to provider tokens.
gcfiLogins :: Lens' GetCredentialsForIdentity (HashMap Text Text)
gcfiLogins = lens _gcfiLogins (\s a -> s { _gcfiLogins = a }) . _Map
data GetCredentialsForIdentityResponse = GetCredentialsForIdentityResponse
{ _gcfirCredentials :: Maybe Credentials
, _gcfirIdentityId :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'GetCredentialsForIdentityResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'gcfirCredentials' @::@ 'Maybe' 'Credentials'
--
-- * 'gcfirIdentityId' @::@ 'Maybe' 'Text'
--
getCredentialsForIdentityResponse :: GetCredentialsForIdentityResponse
getCredentialsForIdentityResponse = GetCredentialsForIdentityResponse
{ _gcfirIdentityId = Nothing
, _gcfirCredentials = Nothing
}
-- | Credentials for the the provided identity ID.
gcfirCredentials :: Lens' GetCredentialsForIdentityResponse (Maybe Credentials)
gcfirCredentials = lens _gcfirCredentials (\s a -> s { _gcfirCredentials = a })
-- | A unique identifier in the format REGION:GUID.
gcfirIdentityId :: Lens' GetCredentialsForIdentityResponse (Maybe Text)
gcfirIdentityId = lens _gcfirIdentityId (\s a -> s { _gcfirIdentityId = a })
instance ToPath GetCredentialsForIdentity where
toPath = const "/"
instance ToQuery GetCredentialsForIdentity where
toQuery = const mempty
instance ToHeaders GetCredentialsForIdentity
instance ToJSON GetCredentialsForIdentity where
toJSON GetCredentialsForIdentity{..} = object
[ "IdentityId" .= _gcfiIdentityId
, "Logins" .= _gcfiLogins
]
instance AWSRequest GetCredentialsForIdentity where
type Sv GetCredentialsForIdentity = CognitoIdentity
type Rs GetCredentialsForIdentity = GetCredentialsForIdentityResponse
request = post "GetCredentialsForIdentity"
response = jsonResponse
instance FromJSON GetCredentialsForIdentityResponse where
parseJSON = withObject "GetCredentialsForIdentityResponse" $ \o -> GetCredentialsForIdentityResponse
<$> o .:? "Credentials"
<*> o .:? "IdentityId"
| kim/amazonka | amazonka-cognito-identity/gen/Network/AWS/CognitoIdentity/GetCredentialsForIdentity.hs | mpl-2.0 | 4,969 | 0 | 11 | 960 | 597 | 358 | 239 | 68 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Test.AWS.DataPipeline.Internal
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
module Test.AWS.DataPipeline.Internal where
import Test.AWS.Prelude
| olorin/amazonka | amazonka-datapipeline/test/Test/AWS/DataPipeline/Internal.hs | mpl-2.0 | 631 | 0 | 4 | 140 | 25 | 21 | 4 | 4 | 0 |
{-# LANGUAGE CPP, ScopedTypeVariables #-}
module Propellor.Exception where
import Propellor.Types
import Propellor.Types.Exception
import Propellor.Message
import Utility.Exception
import Control.Exception (AsyncException)
#if MIN_VERSION_base(4,7,0)
import Control.Exception (SomeAsyncException)
#endif
import Control.Monad.Catch
import Control.Monad.IO.Class (MonadIO)
import Prelude
-- | Catches all exceptions (except for `StopPropellorException` and
-- `AsyncException` and `SomeAsyncException`) and returns FailedChange.
catchPropellor :: (MonadIO m, MonadCatch m) => m Result -> m Result
catchPropellor a = either err return =<< tryPropellor a
where
err e = warningMessage (show e) >> return FailedChange
catchPropellor' :: MonadCatch m => m a -> (SomeException -> m a) -> m a
catchPropellor' a onerr = a `catches`
[ Handler (\ (e :: AsyncException) -> throwM e)
#if MIN_VERSION_base(4,7,0)
, Handler (\ (e :: SomeAsyncException) -> throwM e)
#endif
, Handler (\ (e :: StopPropellorException) -> throwM e)
, Handler (\ (e :: SomeException) -> onerr e)
]
-- | Catches all exceptions (except for `StopPropellorException` and
-- `AsyncException`).
tryPropellor :: MonadCatch m => m a -> m (Either SomeException a)
tryPropellor a = (return . Right =<< a) `catchPropellor'` (return . Left)
| ArchiveTeam/glowing-computing-machine | src/Propellor/Exception.hs | bsd-2-clause | 1,307 | 4 | 11 | 190 | 367 | 200 | 167 | 20 | 1 |
import Control.Concurrent
import qualified Control.Concurrent.Chan as Chan
import Control.Exception
import Control.Monad
import Data.Functor
import Data.List.Extra
import Data.Time
import Data.IORef
import Prelude hiding (catch)
import Paths
import qualified Graphics.UI.Threepenny as UI
import Graphics.UI.Threepenny.Core hiding (text)
{-----------------------------------------------------------------------------
Chat
------------------------------------------------------------------------------}
main :: IO ()
main = do
static <- getStaticDir
messages <- Chan.newChan
startGUI defaultConfig
{ tpCustomHTML = Just "chat.html"
, tpStatic = Just static
} $ setup messages
type Message = (UTCTime, String, String)
setup :: Chan Message -> Window -> UI ()
setup globalMsgs window = do
msgs <- liftIO $ Chan.dupChan globalMsgs
return window # set title "Chat"
(nickRef, nickname) <- mkNickname
messageArea <- mkMessageArea msgs nickRef
getBody window #+
[ UI.div #. "header" #+ [string "Threepenny Chat"]
, UI.div #. "gradient"
, viewSource
, element nickname
, element messageArea
]
messageReceiver <- liftIO $ forkIO $ receiveMessages window msgs messageArea
on UI.disconnect window $ const $ liftIO $ do
killThread messageReceiver
now <- getCurrentTime
nick <- readIORef nickRef
Chan.writeChan msgs (now,nick,"( left the conversation )")
receiveMessages w msgs messageArea = do
messages <- Chan.getChanContents msgs
forM_ messages $ \msg -> do
atomic w $ runUI w $ do
-- FIXME: withWindow should include a call to atomic ?
element messageArea #+ [mkMessage msg]
UI.scrollToBottom messageArea
mkMessageArea :: Chan Message -> IORef String -> UI Element
mkMessageArea msgs nickname = do
input <- UI.textarea #. "send-textarea"
on UI.sendValue input $ (. trim) $ \content -> do
element input # set value ""
when (not (null content)) $ liftIO $ do
now <- getCurrentTime
nick <- readIORef nickname
when (not (null nick)) $
Chan.writeChan msgs (now,nick,content)
UI.div #. "message-area" #+ [UI.div #. "send-area" #+ [element input]]
mkNickname :: UI (IORef String, Element)
mkNickname = do
input <- UI.input #. "name-input"
el <- UI.div #. "name-area" #+
[ UI.span #. "name-label" #+ [string "Your name "]
, element input
]
UI.setFocus input
nick <- liftIO $ newIORef ""
on UI.keyup input $ \_ -> liftIO . writeIORef nick . trim =<< get value input
return (nick,el)
mkMessage :: Message -> UI Element
mkMessage (timestamp, nick, content) =
UI.div #. "message" #+
[ UI.div #. "timestamp" #+ [string $ show timestamp]
, UI.div #. "name" #+ [string $ nick ++ " says:"]
, UI.div #. "content" #+ [string content]
]
viewSource :: UI Element
viewSource =
UI.anchor #. "view-source" # set UI.href url #+ [string "View source code"]
where
url = samplesURL ++ "Chat.hs"
| yuvallanger/threepenny-gui | samples/Chat.hs | bsd-3-clause | 3,226 | 0 | 19 | 870 | 973 | 486 | 487 | 76 | 1 |
{-# LANGUAGE MultiParamTypeClasses
, FlexibleInstances
, UndecidableInstances
, DeriveDataTypeable
#-}
module Numeric.Coalgebra.Incidence
( Interval'(..)
, zeta'
, moebius'
) where
import Data.Data
import Numeric.Algebra.Class
import Numeric.Algebra.Unital
import Numeric.Algebra.Commutative
import Numeric.Ring.Class
import Numeric.Order.LocallyFinite
-- | the dual incidence algebra basis
data Interval' a = Interval' a a deriving (Eq,Ord,Show,Read,Data,Typeable)
instance (Eq a, Commutative r, Monoidal r, Semiring r) => Coalgebra r (Interval' a) where
comult f (Interval' a b) (Interval' b' c)
| b == b' = f (Interval' a c)
| otherwise = zero
instance (Eq a, Bounded a, Commutative r, Monoidal r, Semiring r) => CounitalCoalgebra r (Interval' a) where
counit f = f (Interval' minBound maxBound)
zeta' :: Unital r => Interval' a -> r
zeta' = const one
moebius' :: (Ring r, LocallyFiniteOrder a) => Interval' a -> r
moebius' (Interval' a b) = moebiusInversion a b
| athanclark/algebra | src/Numeric/Coalgebra/Incidence.hs | bsd-3-clause | 1,040 | 0 | 9 | 214 | 342 | 181 | 161 | 25 | 1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Desugaring arrow commands
-}
{-# LANGUAGE CPP #-}
module DsArrows ( dsProcExpr ) where
#include "HsVersions.h"
import Match
import DsUtils
import DsMonad
import HsSyn hiding (collectPatBinders, collectPatsBinders, collectLStmtsBinders, collectLStmtBinders, collectStmtBinders )
import TcHsSyn
import qualified HsUtils
-- NB: The desugarer, which straddles the source and Core worlds, sometimes
-- needs to see source types (newtypes etc), and sometimes not
-- So WATCH OUT; check each use of split*Ty functions.
-- Sigh. This is a pain.
import {-# SOURCE #-} DsExpr ( dsExpr, dsLExpr, dsLocalBinds )
import TcType
import TcEvidence
import CoreSyn
import CoreFVs
import CoreUtils
import MkCore
import DsBinds (dsHsWrapper)
import Name
import Var
import Id
import DataCon
import TysWiredIn
import BasicTypes
import PrelNames
import Outputable
import Bag
import VarSet
import SrcLoc
import ListSetOps( assocDefault )
import FastString
import Data.List
data DsCmdEnv = DsCmdEnv {
arr_id, compose_id, first_id, app_id, choice_id, loop_id :: CoreExpr
}
mkCmdEnv :: CmdSyntaxTable Id -> DsM ([CoreBind], DsCmdEnv)
-- See Note [CmdSyntaxTable] in HsExpr
mkCmdEnv tc_meths
= do { (meth_binds, prs) <- mapAndUnzipM mk_bind tc_meths
; return (meth_binds, DsCmdEnv {
arr_id = Var (find_meth prs arrAName),
compose_id = Var (find_meth prs composeAName),
first_id = Var (find_meth prs firstAName),
app_id = Var (find_meth prs appAName),
choice_id = Var (find_meth prs choiceAName),
loop_id = Var (find_meth prs loopAName)
}) }
where
mk_bind (std_name, expr)
= do { rhs <- dsExpr expr
; id <- newSysLocalDs (exprType rhs)
; return (NonRec id rhs, (std_name, id)) }
find_meth prs std_name
= assocDefault (mk_panic std_name) prs std_name
mk_panic std_name = pprPanic "mkCmdEnv" (ptext (sLit "Not found:") <+> ppr std_name)
-- arr :: forall b c. (b -> c) -> a b c
do_arr :: DsCmdEnv -> Type -> Type -> CoreExpr -> CoreExpr
do_arr ids b_ty c_ty f = mkApps (arr_id ids) [Type b_ty, Type c_ty, f]
-- (>>>) :: forall b c d. a b c -> a c d -> a b d
do_compose :: DsCmdEnv -> Type -> Type -> Type ->
CoreExpr -> CoreExpr -> CoreExpr
do_compose ids b_ty c_ty d_ty f g
= mkApps (compose_id ids) [Type b_ty, Type c_ty, Type d_ty, f, g]
-- first :: forall b c d. a b c -> a (b,d) (c,d)
do_first :: DsCmdEnv -> Type -> Type -> Type -> CoreExpr -> CoreExpr
do_first ids b_ty c_ty d_ty f
= mkApps (first_id ids) [Type b_ty, Type c_ty, Type d_ty, f]
-- app :: forall b c. a (a b c, b) c
do_app :: DsCmdEnv -> Type -> Type -> CoreExpr
do_app ids b_ty c_ty = mkApps (app_id ids) [Type b_ty, Type c_ty]
-- (|||) :: forall b d c. a b d -> a c d -> a (Either b c) d
-- note the swapping of d and c
do_choice :: DsCmdEnv -> Type -> Type -> Type ->
CoreExpr -> CoreExpr -> CoreExpr
do_choice ids b_ty c_ty d_ty f g
= mkApps (choice_id ids) [Type b_ty, Type d_ty, Type c_ty, f, g]
-- loop :: forall b d c. a (b,d) (c,d) -> a b c
-- note the swapping of d and c
do_loop :: DsCmdEnv -> Type -> Type -> Type -> CoreExpr -> CoreExpr
do_loop ids b_ty c_ty d_ty f
= mkApps (loop_id ids) [Type b_ty, Type d_ty, Type c_ty, f]
-- premap :: forall b c d. (b -> c) -> a c d -> a b d
-- premap f g = arr f >>> g
do_premap :: DsCmdEnv -> Type -> Type -> Type ->
CoreExpr -> CoreExpr -> CoreExpr
do_premap ids b_ty c_ty d_ty f g
= do_compose ids b_ty c_ty d_ty (do_arr ids b_ty c_ty f) g
mkFailExpr :: HsMatchContext Id -> Type -> DsM CoreExpr
mkFailExpr ctxt ty
= mkErrorAppDs pAT_ERROR_ID ty (matchContextErrString ctxt)
-- construct CoreExpr for \ (a :: a_ty, b :: b_ty) -> a
mkFstExpr :: Type -> Type -> DsM CoreExpr
mkFstExpr a_ty b_ty = do
a_var <- newSysLocalDs a_ty
b_var <- newSysLocalDs b_ty
pair_var <- newSysLocalDs (mkCorePairTy a_ty b_ty)
return (Lam pair_var
(coreCasePair pair_var a_var b_var (Var a_var)))
-- construct CoreExpr for \ (a :: a_ty, b :: b_ty) -> b
mkSndExpr :: Type -> Type -> DsM CoreExpr
mkSndExpr a_ty b_ty = do
a_var <- newSysLocalDs a_ty
b_var <- newSysLocalDs b_ty
pair_var <- newSysLocalDs (mkCorePairTy a_ty b_ty)
return (Lam pair_var
(coreCasePair pair_var a_var b_var (Var b_var)))
{-
Build case analysis of a tuple. This cannot be done in the DsM monad,
because the list of variables is typically not yet defined.
-}
-- coreCaseTuple [u1..] v [x1..xn] body
-- = case v of v { (x1, .., xn) -> body }
-- But the matching may be nested if the tuple is very big
coreCaseTuple :: UniqSupply -> Id -> [Id] -> CoreExpr -> CoreExpr
coreCaseTuple uniqs scrut_var vars body
= mkTupleCase uniqs vars body scrut_var (Var scrut_var)
coreCasePair :: Id -> Id -> Id -> CoreExpr -> CoreExpr
coreCasePair scrut_var var1 var2 body
= Case (Var scrut_var) scrut_var (exprType body)
[(DataAlt (tupleDataCon Boxed 2), [var1, var2], body)]
mkCorePairTy :: Type -> Type -> Type
mkCorePairTy t1 t2 = mkBoxedTupleTy [t1, t2]
mkCorePairExpr :: CoreExpr -> CoreExpr -> CoreExpr
mkCorePairExpr e1 e2 = mkCoreTup [e1, e2]
mkCoreUnitExpr :: CoreExpr
mkCoreUnitExpr = mkCoreTup []
{-
The input is divided into a local environment, which is a flat tuple
(unless it's too big), and a stack, which is a right-nested pair.
In general, the input has the form
((x1,...,xn), (s1,...(sk,())...))
where xi are the environment values, and si the ones on the stack,
with s1 being the "top", the first one to be matched with a lambda.
-}
envStackType :: [Id] -> Type -> Type
envStackType ids stack_ty = mkCorePairTy (mkBigCoreVarTupTy ids) stack_ty
-- splitTypeAt n (t1,... (tn,t)...) = ([t1, ..., tn], t)
splitTypeAt :: Int -> Type -> ([Type], Type)
splitTypeAt n ty
| n == 0 = ([], ty)
| otherwise = case tcTyConAppArgs ty of
[t, ty'] -> let (ts, ty_r) = splitTypeAt (n-1) ty' in (t:ts, ty_r)
_ -> pprPanic "splitTypeAt" (ppr ty)
----------------------------------------------
-- buildEnvStack
--
-- ((x1,...,xn),stk)
buildEnvStack :: [Id] -> Id -> CoreExpr
buildEnvStack env_ids stack_id
= mkCorePairExpr (mkBigCoreVarTup env_ids) (Var stack_id)
----------------------------------------------
-- matchEnvStack
--
-- \ ((x1,...,xn),stk) -> body
-- =>
-- \ pair ->
-- case pair of (tup,stk) ->
-- case tup of (x1,...,xn) ->
-- body
matchEnvStack :: [Id] -- x1..xn
-> Id -- stk
-> CoreExpr -- e
-> DsM CoreExpr
matchEnvStack env_ids stack_id body = do
uniqs <- newUniqueSupply
tup_var <- newSysLocalDs (mkBigCoreVarTupTy env_ids)
let match_env = coreCaseTuple uniqs tup_var env_ids body
pair_id <- newSysLocalDs (mkCorePairTy (idType tup_var) (idType stack_id))
return (Lam pair_id (coreCasePair pair_id tup_var stack_id match_env))
----------------------------------------------
-- matchEnv
--
-- \ (x1,...,xn) -> body
-- =>
-- \ tup ->
-- case tup of (x1,...,xn) ->
-- body
matchEnv :: [Id] -- x1..xn
-> CoreExpr -- e
-> DsM CoreExpr
matchEnv env_ids body = do
uniqs <- newUniqueSupply
tup_id <- newSysLocalDs (mkBigCoreVarTupTy env_ids)
return (Lam tup_id (coreCaseTuple uniqs tup_id env_ids body))
----------------------------------------------
-- matchVarStack
--
-- case (x1, ...(xn, s)...) -> e
-- =>
-- case z0 of (x1,z1) ->
-- case zn-1 of (xn,s) ->
-- e
matchVarStack :: [Id] -> Id -> CoreExpr -> DsM (Id, CoreExpr)
matchVarStack [] stack_id body = return (stack_id, body)
matchVarStack (param_id:param_ids) stack_id body = do
(tail_id, tail_code) <- matchVarStack param_ids stack_id body
pair_id <- newSysLocalDs (mkCorePairTy (idType param_id) (idType tail_id))
return (pair_id, coreCasePair pair_id param_id tail_id tail_code)
mkHsEnvStackExpr :: [Id] -> Id -> LHsExpr Id
mkHsEnvStackExpr env_ids stack_id
= mkLHsTupleExpr [mkLHsVarTuple env_ids, nlHsVar stack_id]
-- Translation of arrow abstraction
-- D; xs |-a c : () --> t' ---> c'
-- --------------------------
-- D |- proc p -> c :: a t t' ---> premap (\ p -> ((xs),())) c'
--
-- where (xs) is the tuple of variables bound by p
dsProcExpr
:: LPat Id
-> LHsCmdTop Id
-> DsM CoreExpr
dsProcExpr pat (L _ (HsCmdTop cmd _unitTy cmd_ty ids)) = do
(meth_binds, meth_ids) <- mkCmdEnv ids
let locals = mkVarSet (collectPatBinders pat)
(core_cmd, _free_vars, env_ids) <- dsfixCmd meth_ids locals unitTy cmd_ty cmd
let env_ty = mkBigCoreVarTupTy env_ids
let env_stk_ty = mkCorePairTy env_ty unitTy
let env_stk_expr = mkCorePairExpr (mkBigCoreVarTup env_ids) mkCoreUnitExpr
fail_expr <- mkFailExpr ProcExpr env_stk_ty
var <- selectSimpleMatchVarL pat
match_code <- matchSimply (Var var) ProcExpr pat env_stk_expr fail_expr
let pat_ty = hsLPatType pat
proc_code = do_premap meth_ids pat_ty env_stk_ty cmd_ty
(Lam var match_code)
core_cmd
return (mkLets meth_binds proc_code)
{-
Translation of a command judgement of the form
D; xs |-a c : stk --> t
to an expression e such that
D |- e :: a (xs, stk) t
-}
dsLCmd :: DsCmdEnv -> IdSet -> Type -> Type -> LHsCmd Id -> [Id]
-> DsM (CoreExpr, IdSet)
dsLCmd ids local_vars stk_ty res_ty cmd env_ids
= dsCmd ids local_vars stk_ty res_ty (unLoc cmd) env_ids
dsCmd :: DsCmdEnv -- arrow combinators
-> IdSet -- set of local vars available to this command
-> Type -- type of the stack (right-nested tuple)
-> Type -- return type of the command
-> HsCmd Id -- command to desugar
-> [Id] -- list of vars in the input to this command
-- This is typically fed back,
-- so don't pull on it too early
-> DsM (CoreExpr, -- desugared expression
IdSet) -- subset of local vars that occur free
-- D |- fun :: a t1 t2
-- D, xs |- arg :: t1
-- -----------------------------
-- D; xs |-a fun -< arg : stk --> t2
--
-- ---> premap (\ ((xs), _stk) -> arg) fun
dsCmd ids local_vars stack_ty res_ty
(HsCmdArrApp arrow arg arrow_ty HsFirstOrderApp _)
env_ids = do
let
(a_arg_ty, _res_ty') = tcSplitAppTy arrow_ty
(_a_ty, arg_ty) = tcSplitAppTy a_arg_ty
core_arrow <- dsLExpr arrow
core_arg <- dsLExpr arg
stack_id <- newSysLocalDs stack_ty
core_make_arg <- matchEnvStack env_ids stack_id core_arg
return (do_premap ids
(envStackType env_ids stack_ty)
arg_ty
res_ty
core_make_arg
core_arrow,
exprFreeIds core_arg `intersectVarSet` local_vars)
-- D, xs |- fun :: a t1 t2
-- D, xs |- arg :: t1
-- ------------------------------
-- D; xs |-a fun -<< arg : stk --> t2
--
-- ---> premap (\ ((xs), _stk) -> (fun, arg)) app
dsCmd ids local_vars stack_ty res_ty
(HsCmdArrApp arrow arg arrow_ty HsHigherOrderApp _)
env_ids = do
let
(a_arg_ty, _res_ty') = tcSplitAppTy arrow_ty
(_a_ty, arg_ty) = tcSplitAppTy a_arg_ty
core_arrow <- dsLExpr arrow
core_arg <- dsLExpr arg
stack_id <- newSysLocalDs stack_ty
core_make_pair <- matchEnvStack env_ids stack_id
(mkCorePairExpr core_arrow core_arg)
return (do_premap ids
(envStackType env_ids stack_ty)
(mkCorePairTy arrow_ty arg_ty)
res_ty
core_make_pair
(do_app ids arg_ty res_ty),
(exprFreeIds core_arrow `unionVarSet` exprFreeIds core_arg)
`intersectVarSet` local_vars)
-- D; ys |-a cmd : (t,stk) --> t'
-- D, xs |- exp :: t
-- ------------------------
-- D; xs |-a cmd exp : stk --> t'
--
-- ---> premap (\ ((xs),stk) -> ((ys),(e,stk))) cmd
dsCmd ids local_vars stack_ty res_ty (HsCmdApp cmd arg) env_ids = do
core_arg <- dsLExpr arg
let
arg_ty = exprType core_arg
stack_ty' = mkCorePairTy arg_ty stack_ty
(core_cmd, free_vars, env_ids')
<- dsfixCmd ids local_vars stack_ty' res_ty cmd
stack_id <- newSysLocalDs stack_ty
arg_id <- newSysLocalDs arg_ty
-- push the argument expression onto the stack
let
stack' = mkCorePairExpr (Var arg_id) (Var stack_id)
core_body = bindNonRec arg_id core_arg
(mkCorePairExpr (mkBigCoreVarTup env_ids') stack')
-- match the environment and stack against the input
core_map <- matchEnvStack env_ids stack_id core_body
return (do_premap ids
(envStackType env_ids stack_ty)
(envStackType env_ids' stack_ty')
res_ty
core_map
core_cmd,
free_vars `unionVarSet`
(exprFreeIds core_arg `intersectVarSet` local_vars))
-- D; ys |-a cmd : stk t'
-- -----------------------------------------------
-- D; xs |-a \ p1 ... pk -> cmd : (t1,...(tk,stk)...) t'
--
-- ---> premap (\ ((xs), (p1, ... (pk,stk)...)) -> ((ys),stk)) cmd
dsCmd ids local_vars stack_ty res_ty
(HsCmdLam (MG { mg_alts = L _ [L _ (Match _ pats _
(GRHSs [L _ (GRHS [] body)] _ ))] }))
env_ids = do
let
pat_vars = mkVarSet (collectPatsBinders pats)
local_vars' = pat_vars `unionVarSet` local_vars
(pat_tys, stack_ty') = splitTypeAt (length pats) stack_ty
(core_body, free_vars, env_ids') <- dsfixCmd ids local_vars' stack_ty' res_ty body
param_ids <- mapM newSysLocalDs pat_tys
stack_id' <- newSysLocalDs stack_ty'
-- the expression is built from the inside out, so the actions
-- are presented in reverse order
let
-- build a new environment, plus what's left of the stack
core_expr = buildEnvStack env_ids' stack_id'
in_ty = envStackType env_ids stack_ty
in_ty' = envStackType env_ids' stack_ty'
fail_expr <- mkFailExpr LambdaExpr in_ty'
-- match the patterns against the parameters
match_code <- matchSimplys (map Var param_ids) LambdaExpr pats core_expr fail_expr
-- match the parameters against the top of the old stack
(stack_id, param_code) <- matchVarStack param_ids stack_id' match_code
-- match the old environment and stack against the input
select_code <- matchEnvStack env_ids stack_id param_code
return (do_premap ids in_ty in_ty' res_ty select_code core_body,
free_vars `minusVarSet` pat_vars)
dsCmd ids local_vars stack_ty res_ty (HsCmdPar cmd) env_ids
= dsLCmd ids local_vars stack_ty res_ty cmd env_ids
-- D, xs |- e :: Bool
-- D; xs1 |-a c1 : stk --> t
-- D; xs2 |-a c2 : stk --> t
-- ----------------------------------------
-- D; xs |-a if e then c1 else c2 : stk --> t
--
-- ---> premap (\ ((xs),stk) ->
-- if e then Left ((xs1),stk) else Right ((xs2),stk))
-- (c1 ||| c2)
dsCmd ids local_vars stack_ty res_ty (HsCmdIf mb_fun cond then_cmd else_cmd)
env_ids = do
core_cond <- dsLExpr cond
(core_then, fvs_then, then_ids) <- dsfixCmd ids local_vars stack_ty res_ty then_cmd
(core_else, fvs_else, else_ids) <- dsfixCmd ids local_vars stack_ty res_ty else_cmd
stack_id <- newSysLocalDs stack_ty
either_con <- dsLookupTyCon eitherTyConName
left_con <- dsLookupDataCon leftDataConName
right_con <- dsLookupDataCon rightDataConName
let mk_left_expr ty1 ty2 e = mkCoreConApps left_con [Type ty1, Type ty2, e]
mk_right_expr ty1 ty2 e = mkCoreConApps right_con [Type ty1, Type ty2, e]
in_ty = envStackType env_ids stack_ty
then_ty = envStackType then_ids stack_ty
else_ty = envStackType else_ids stack_ty
sum_ty = mkTyConApp either_con [then_ty, else_ty]
fvs_cond = exprFreeIds core_cond `intersectVarSet` local_vars
core_left = mk_left_expr then_ty else_ty (buildEnvStack then_ids stack_id)
core_right = mk_right_expr then_ty else_ty (buildEnvStack else_ids stack_id)
core_if <- case mb_fun of
Just fun -> do { core_fun <- dsExpr fun
; matchEnvStack env_ids stack_id $
mkCoreApps core_fun [core_cond, core_left, core_right] }
Nothing -> matchEnvStack env_ids stack_id $
mkIfThenElse core_cond core_left core_right
return (do_premap ids in_ty sum_ty res_ty
core_if
(do_choice ids then_ty else_ty res_ty core_then core_else),
fvs_cond `unionVarSet` fvs_then `unionVarSet` fvs_else)
{-
Case commands are treated in much the same way as if commands
(see above) except that there are more alternatives. For example
case e of { p1 -> c1; p2 -> c2; p3 -> c3 }
is translated to
premap (\ ((xs)*ts) -> case e of
p1 -> (Left (Left (xs1)*ts))
p2 -> Left ((Right (xs2)*ts))
p3 -> Right ((xs3)*ts))
((c1 ||| c2) ||| c3)
The idea is to extract the commands from the case, build a balanced tree
of choices, and replace the commands with expressions that build tagged
tuples, obtaining a case expression that can be desugared normally.
To build all this, we use triples describing segments of the list of
case bodies, containing the following fields:
* a list of expressions of the form (Left|Right)* ((xs)*ts), to be put
into the case replacing the commands
* a sum type that is the common type of these expressions, and also the
input type of the arrow
* a CoreExpr for an arrow built by combining the translated command
bodies with |||.
-}
dsCmd ids local_vars stack_ty res_ty
(HsCmdCase exp (MG { mg_alts = L l matches, mg_arg_tys = arg_tys
, mg_origin = origin }))
env_ids = do
stack_id <- newSysLocalDs stack_ty
-- Extract and desugar the leaf commands in the case, building tuple
-- expressions that will (after tagging) replace these leaves
let
leaves = concatMap leavesMatch matches
make_branch (leaf, bound_vars) = do
(core_leaf, _fvs, leaf_ids) <-
dsfixCmd ids (bound_vars `unionVarSet` local_vars) stack_ty res_ty leaf
return ([mkHsEnvStackExpr leaf_ids stack_id],
envStackType leaf_ids stack_ty,
core_leaf)
branches <- mapM make_branch leaves
either_con <- dsLookupTyCon eitherTyConName
left_con <- dsLookupDataCon leftDataConName
right_con <- dsLookupDataCon rightDataConName
let
left_id = HsVar (dataConWrapId left_con)
right_id = HsVar (dataConWrapId right_con)
left_expr ty1 ty2 e = noLoc $ HsApp (noLoc $ HsWrap (mkWpTyApps [ty1, ty2]) left_id ) e
right_expr ty1 ty2 e = noLoc $ HsApp (noLoc $ HsWrap (mkWpTyApps [ty1, ty2]) right_id) e
-- Prefix each tuple with a distinct series of Left's and Right's,
-- in a balanced way, keeping track of the types.
merge_branches (builds1, in_ty1, core_exp1)
(builds2, in_ty2, core_exp2)
= (map (left_expr in_ty1 in_ty2) builds1 ++
map (right_expr in_ty1 in_ty2) builds2,
mkTyConApp either_con [in_ty1, in_ty2],
do_choice ids in_ty1 in_ty2 res_ty core_exp1 core_exp2)
(leaves', sum_ty, core_choices) = foldb merge_branches branches
-- Replace the commands in the case with these tagged tuples,
-- yielding a HsExpr Id we can feed to dsExpr.
(_, matches') = mapAccumL (replaceLeavesMatch res_ty) leaves' matches
in_ty = envStackType env_ids stack_ty
core_body <- dsExpr (HsCase exp (MG { mg_alts = L l matches'
, mg_arg_tys = arg_tys
, mg_res_ty = sum_ty, mg_origin = origin }))
-- Note that we replace the HsCase result type by sum_ty,
-- which is the type of matches'
core_matches <- matchEnvStack env_ids stack_id core_body
return (do_premap ids in_ty sum_ty res_ty core_matches core_choices,
exprFreeIds core_body `intersectVarSet` local_vars)
-- D; ys |-a cmd : stk --> t
-- ----------------------------------
-- D; xs |-a let binds in cmd : stk --> t
--
-- ---> premap (\ ((xs),stk) -> let binds in ((ys),stk)) c
dsCmd ids local_vars stack_ty res_ty (HsCmdLet (L _ binds) body) env_ids = do
let
defined_vars = mkVarSet (collectLocalBinders binds)
local_vars' = defined_vars `unionVarSet` local_vars
(core_body, _free_vars, env_ids') <- dsfixCmd ids local_vars' stack_ty res_ty body
stack_id <- newSysLocalDs stack_ty
-- build a new environment, plus the stack, using the let bindings
core_binds <- dsLocalBinds binds (buildEnvStack env_ids' stack_id)
-- match the old environment and stack against the input
core_map <- matchEnvStack env_ids stack_id core_binds
return (do_premap ids
(envStackType env_ids stack_ty)
(envStackType env_ids' stack_ty)
res_ty
core_map
core_body,
exprFreeIds core_binds `intersectVarSet` local_vars)
-- D; xs |-a ss : t
-- ----------------------------------
-- D; xs |-a do { ss } : () --> t
--
-- ---> premap (\ (env,stk) -> env) c
dsCmd ids local_vars stack_ty res_ty (HsCmdDo (L _ stmts) _) env_ids = do
(core_stmts, env_ids') <- dsCmdDo ids local_vars res_ty stmts env_ids
let env_ty = mkBigCoreVarTupTy env_ids
core_fst <- mkFstExpr env_ty stack_ty
return (do_premap ids
(mkCorePairTy env_ty stack_ty)
env_ty
res_ty
core_fst
core_stmts,
env_ids')
-- D |- e :: forall e. a1 (e,stk1) t1 -> ... an (e,stkn) tn -> a (e,stk) t
-- D; xs |-a ci :: stki --> ti
-- -----------------------------------
-- D; xs |-a (|e c1 ... cn|) :: stk --> t ---> e [t_xs] c1 ... cn
dsCmd _ids local_vars _stack_ty _res_ty (HsCmdArrForm op _ args) env_ids = do
let env_ty = mkBigCoreVarTupTy env_ids
core_op <- dsLExpr op
(core_args, fv_sets) <- mapAndUnzipM (dsTrimCmdArg local_vars env_ids) args
return (mkApps (App core_op (Type env_ty)) core_args,
unionVarSets fv_sets)
dsCmd ids local_vars stack_ty res_ty (HsCmdCast coercion cmd) env_ids = do
(core_cmd, env_ids') <- dsCmd ids local_vars stack_ty res_ty cmd env_ids
wrapped_cmd <- dsHsWrapper (mkWpCast coercion) core_cmd
return (wrapped_cmd, env_ids')
dsCmd _ _ _ _ _ c = pprPanic "dsCmd" (ppr c)
-- D; ys |-a c : stk --> t (ys <= xs)
-- ---------------------
-- D; xs |-a c : stk --> t ---> premap (\ ((xs),stk) -> ((ys),stk)) c
dsTrimCmdArg
:: IdSet -- set of local vars available to this command
-> [Id] -- list of vars in the input to this command
-> LHsCmdTop Id -- command argument to desugar
-> DsM (CoreExpr, -- desugared expression
IdSet) -- subset of local vars that occur free
dsTrimCmdArg local_vars env_ids (L _ (HsCmdTop cmd stack_ty cmd_ty ids)) = do
(meth_binds, meth_ids) <- mkCmdEnv ids
(core_cmd, free_vars, env_ids') <- dsfixCmd meth_ids local_vars stack_ty cmd_ty cmd
stack_id <- newSysLocalDs stack_ty
trim_code <- matchEnvStack env_ids stack_id (buildEnvStack env_ids' stack_id)
let
in_ty = envStackType env_ids stack_ty
in_ty' = envStackType env_ids' stack_ty
arg_code = if env_ids' == env_ids then core_cmd else
do_premap meth_ids in_ty in_ty' cmd_ty trim_code core_cmd
return (mkLets meth_binds arg_code, free_vars)
-- Given D; xs |-a c : stk --> t, builds c with xs fed back.
-- Typically needs to be prefixed with arr (\(p, stk) -> ((xs),stk))
dsfixCmd
:: DsCmdEnv -- arrow combinators
-> IdSet -- set of local vars available to this command
-> Type -- type of the stack (right-nested tuple)
-> Type -- return type of the command
-> LHsCmd Id -- command to desugar
-> DsM (CoreExpr, -- desugared expression
IdSet, -- subset of local vars that occur free
[Id]) -- the same local vars as a list, fed back
dsfixCmd ids local_vars stk_ty cmd_ty cmd
= trimInput (dsLCmd ids local_vars stk_ty cmd_ty cmd)
-- Feed back the list of local variables actually used a command,
-- for use as the input tuple of the generated arrow.
trimInput
:: ([Id] -> DsM (CoreExpr, IdSet))
-> DsM (CoreExpr, -- desugared expression
IdSet, -- subset of local vars that occur free
[Id]) -- same local vars as a list, fed back to
-- the inner function to form the tuple of
-- inputs to the arrow.
trimInput build_arrow
= fixDs (\ ~(_,_,env_ids) -> do
(core_cmd, free_vars) <- build_arrow env_ids
return (core_cmd, free_vars, varSetElems free_vars))
{-
Translation of command judgements of the form
D |-a do { ss } : t
-}
dsCmdDo :: DsCmdEnv -- arrow combinators
-> IdSet -- set of local vars available to this statement
-> Type -- return type of the statement
-> [CmdLStmt Id] -- statements to desugar
-> [Id] -- list of vars in the input to this statement
-- This is typically fed back,
-- so don't pull on it too early
-> DsM (CoreExpr, -- desugared expression
IdSet) -- subset of local vars that occur free
dsCmdDo _ _ _ [] _ = panic "dsCmdDo"
-- D; xs |-a c : () --> t
-- --------------------------
-- D; xs |-a do { c } : t
--
-- ---> premap (\ (xs) -> ((xs), ())) c
dsCmdDo ids local_vars res_ty [L _ (LastStmt body _ _)] env_ids = do
(core_body, env_ids') <- dsLCmd ids local_vars unitTy res_ty body env_ids
let env_ty = mkBigCoreVarTupTy env_ids
env_var <- newSysLocalDs env_ty
let core_map = Lam env_var (mkCorePairExpr (Var env_var) mkCoreUnitExpr)
return (do_premap ids
env_ty
(mkCorePairTy env_ty unitTy)
res_ty
core_map
core_body,
env_ids')
dsCmdDo ids local_vars res_ty (stmt:stmts) env_ids = do
let
bound_vars = mkVarSet (collectLStmtBinders stmt)
local_vars' = bound_vars `unionVarSet` local_vars
(core_stmts, _, env_ids') <- trimInput (dsCmdDo ids local_vars' res_ty stmts)
(core_stmt, fv_stmt) <- dsCmdLStmt ids local_vars env_ids' stmt env_ids
return (do_compose ids
(mkBigCoreVarTupTy env_ids)
(mkBigCoreVarTupTy env_ids')
res_ty
core_stmt
core_stmts,
fv_stmt)
{-
A statement maps one local environment to another, and is represented
as an arrow from one tuple type to another. A statement sequence is
translated to a composition of such arrows.
-}
dsCmdLStmt :: DsCmdEnv -> IdSet -> [Id] -> CmdLStmt Id -> [Id]
-> DsM (CoreExpr, IdSet)
dsCmdLStmt ids local_vars out_ids cmd env_ids
= dsCmdStmt ids local_vars out_ids (unLoc cmd) env_ids
dsCmdStmt
:: DsCmdEnv -- arrow combinators
-> IdSet -- set of local vars available to this statement
-> [Id] -- list of vars in the output of this statement
-> CmdStmt Id -- statement to desugar
-> [Id] -- list of vars in the input to this statement
-- This is typically fed back,
-- so don't pull on it too early
-> DsM (CoreExpr, -- desugared expression
IdSet) -- subset of local vars that occur free
-- D; xs1 |-a c : () --> t
-- D; xs' |-a do { ss } : t'
-- ------------------------------
-- D; xs |-a do { c; ss } : t'
--
-- ---> premap (\ ((xs)) -> (((xs1),()),(xs')))
-- (first c >>> arr snd) >>> ss
dsCmdStmt ids local_vars out_ids (BodyStmt cmd _ _ c_ty) env_ids = do
(core_cmd, fv_cmd, env_ids1) <- dsfixCmd ids local_vars unitTy c_ty cmd
core_mux <- matchEnv env_ids
(mkCorePairExpr
(mkCorePairExpr (mkBigCoreVarTup env_ids1) mkCoreUnitExpr)
(mkBigCoreVarTup out_ids))
let
in_ty = mkBigCoreVarTupTy env_ids
in_ty1 = mkCorePairTy (mkBigCoreVarTupTy env_ids1) unitTy
out_ty = mkBigCoreVarTupTy out_ids
before_c_ty = mkCorePairTy in_ty1 out_ty
after_c_ty = mkCorePairTy c_ty out_ty
snd_fn <- mkSndExpr c_ty out_ty
return (do_premap ids in_ty before_c_ty out_ty core_mux $
do_compose ids before_c_ty after_c_ty out_ty
(do_first ids in_ty1 c_ty out_ty core_cmd) $
do_arr ids after_c_ty out_ty snd_fn,
extendVarSetList fv_cmd out_ids)
-- D; xs1 |-a c : () --> t
-- D; xs' |-a do { ss } : t' xs2 = xs' - defs(p)
-- -----------------------------------
-- D; xs |-a do { p <- c; ss } : t'
--
-- ---> premap (\ (xs) -> (((xs1),()),(xs2)))
-- (first c >>> arr (\ (p, (xs2)) -> (xs'))) >>> ss
--
-- It would be simpler and more consistent to do this using second,
-- but that's likely to be defined in terms of first.
dsCmdStmt ids local_vars out_ids (BindStmt pat cmd _ _) env_ids = do
(core_cmd, fv_cmd, env_ids1) <- dsfixCmd ids local_vars unitTy (hsLPatType pat) cmd
let
pat_ty = hsLPatType pat
pat_vars = mkVarSet (collectPatBinders pat)
env_ids2 = varSetElems (mkVarSet out_ids `minusVarSet` pat_vars)
env_ty2 = mkBigCoreVarTupTy env_ids2
-- multiplexing function
-- \ (xs) -> (((xs1),()),(xs2))
core_mux <- matchEnv env_ids
(mkCorePairExpr
(mkCorePairExpr (mkBigCoreVarTup env_ids1) mkCoreUnitExpr)
(mkBigCoreVarTup env_ids2))
-- projection function
-- \ (p, (xs2)) -> (zs)
env_id <- newSysLocalDs env_ty2
uniqs <- newUniqueSupply
let
after_c_ty = mkCorePairTy pat_ty env_ty2
out_ty = mkBigCoreVarTupTy out_ids
body_expr = coreCaseTuple uniqs env_id env_ids2 (mkBigCoreVarTup out_ids)
fail_expr <- mkFailExpr (StmtCtxt DoExpr) out_ty
pat_id <- selectSimpleMatchVarL pat
match_code <- matchSimply (Var pat_id) (StmtCtxt DoExpr) pat body_expr fail_expr
pair_id <- newSysLocalDs after_c_ty
let
proj_expr = Lam pair_id (coreCasePair pair_id pat_id env_id match_code)
-- put it all together
let
in_ty = mkBigCoreVarTupTy env_ids
in_ty1 = mkCorePairTy (mkBigCoreVarTupTy env_ids1) unitTy
in_ty2 = mkBigCoreVarTupTy env_ids2
before_c_ty = mkCorePairTy in_ty1 in_ty2
return (do_premap ids in_ty before_c_ty out_ty core_mux $
do_compose ids before_c_ty after_c_ty out_ty
(do_first ids in_ty1 pat_ty in_ty2 core_cmd) $
do_arr ids after_c_ty out_ty proj_expr,
fv_cmd `unionVarSet` (mkVarSet out_ids `minusVarSet` pat_vars))
-- D; xs' |-a do { ss } : t
-- --------------------------------------
-- D; xs |-a do { let binds; ss } : t
--
-- ---> arr (\ (xs) -> let binds in (xs')) >>> ss
dsCmdStmt ids local_vars out_ids (LetStmt (L _ binds)) env_ids = do
-- build a new environment using the let bindings
core_binds <- dsLocalBinds binds (mkBigCoreVarTup out_ids)
-- match the old environment against the input
core_map <- matchEnv env_ids core_binds
return (do_arr ids
(mkBigCoreVarTupTy env_ids)
(mkBigCoreVarTupTy out_ids)
core_map,
exprFreeIds core_binds `intersectVarSet` local_vars)
-- D; ys |-a do { ss; returnA -< ((xs1), (ys2)) } : ...
-- D; xs' |-a do { ss' } : t
-- ------------------------------------
-- D; xs |-a do { rec ss; ss' } : t
--
-- xs1 = xs' /\ defs(ss)
-- xs2 = xs' - defs(ss)
-- ys1 = ys - defs(ss)
-- ys2 = ys /\ defs(ss)
--
-- ---> arr (\(xs) -> ((ys1),(xs2))) >>>
-- first (loop (arr (\((ys1),~(ys2)) -> (ys)) >>> ss)) >>>
-- arr (\((xs1),(xs2)) -> (xs')) >>> ss'
dsCmdStmt ids local_vars out_ids
(RecStmt { recS_stmts = stmts
, recS_later_ids = later_ids, recS_rec_ids = rec_ids
, recS_later_rets = later_rets, recS_rec_rets = rec_rets })
env_ids = do
let
env2_id_set = mkVarSet out_ids `minusVarSet` mkVarSet later_ids
env2_ids = varSetElems env2_id_set
env2_ty = mkBigCoreVarTupTy env2_ids
-- post_loop_fn = \((later_ids),(env2_ids)) -> (out_ids)
uniqs <- newUniqueSupply
env2_id <- newSysLocalDs env2_ty
let
later_ty = mkBigCoreVarTupTy later_ids
post_pair_ty = mkCorePairTy later_ty env2_ty
post_loop_body = coreCaseTuple uniqs env2_id env2_ids (mkBigCoreVarTup out_ids)
post_loop_fn <- matchEnvStack later_ids env2_id post_loop_body
--- loop (...)
(core_loop, env1_id_set, env1_ids)
<- dsRecCmd ids local_vars stmts later_ids later_rets rec_ids rec_rets
-- pre_loop_fn = \(env_ids) -> ((env1_ids),(env2_ids))
let
env1_ty = mkBigCoreVarTupTy env1_ids
pre_pair_ty = mkCorePairTy env1_ty env2_ty
pre_loop_body = mkCorePairExpr (mkBigCoreVarTup env1_ids)
(mkBigCoreVarTup env2_ids)
pre_loop_fn <- matchEnv env_ids pre_loop_body
-- arr pre_loop_fn >>> first (loop (...)) >>> arr post_loop_fn
let
env_ty = mkBigCoreVarTupTy env_ids
out_ty = mkBigCoreVarTupTy out_ids
core_body = do_premap ids env_ty pre_pair_ty out_ty
pre_loop_fn
(do_compose ids pre_pair_ty post_pair_ty out_ty
(do_first ids env1_ty later_ty env2_ty
core_loop)
(do_arr ids post_pair_ty out_ty
post_loop_fn))
return (core_body, env1_id_set `unionVarSet` env2_id_set)
dsCmdStmt _ _ _ _ s = pprPanic "dsCmdStmt" (ppr s)
-- loop (premap (\ ((env1_ids), ~(rec_ids)) -> (env_ids))
-- (ss >>> arr (\ (out_ids) -> ((later_rets),(rec_rets))))) >>>
dsRecCmd
:: DsCmdEnv -- arrow combinators
-> IdSet -- set of local vars available to this statement
-> [CmdLStmt Id] -- list of statements inside the RecCmd
-> [Id] -- list of vars defined here and used later
-> [HsExpr Id] -- expressions corresponding to later_ids
-> [Id] -- list of vars fed back through the loop
-> [HsExpr Id] -- expressions corresponding to rec_ids
-> DsM (CoreExpr, -- desugared statement
IdSet, -- subset of local vars that occur free
[Id]) -- same local vars as a list
dsRecCmd ids local_vars stmts later_ids later_rets rec_ids rec_rets = do
let
later_id_set = mkVarSet later_ids
rec_id_set = mkVarSet rec_ids
local_vars' = rec_id_set `unionVarSet` later_id_set `unionVarSet` local_vars
-- mk_pair_fn = \ (out_ids) -> ((later_rets),(rec_rets))
core_later_rets <- mapM dsExpr later_rets
core_rec_rets <- mapM dsExpr rec_rets
let
-- possibly polymorphic version of vars of later_ids and rec_ids
out_ids = varSetElems (unionVarSets (map exprFreeIds (core_later_rets ++ core_rec_rets)))
out_ty = mkBigCoreVarTupTy out_ids
later_tuple = mkBigCoreTup core_later_rets
later_ty = mkBigCoreVarTupTy later_ids
rec_tuple = mkBigCoreTup core_rec_rets
rec_ty = mkBigCoreVarTupTy rec_ids
out_pair = mkCorePairExpr later_tuple rec_tuple
out_pair_ty = mkCorePairTy later_ty rec_ty
mk_pair_fn <- matchEnv out_ids out_pair
-- ss
(core_stmts, fv_stmts, env_ids) <- dsfixCmdStmts ids local_vars' out_ids stmts
-- squash_pair_fn = \ ((env1_ids), ~(rec_ids)) -> (env_ids)
rec_id <- newSysLocalDs rec_ty
let
env1_id_set = fv_stmts `minusVarSet` rec_id_set
env1_ids = varSetElems env1_id_set
env1_ty = mkBigCoreVarTupTy env1_ids
in_pair_ty = mkCorePairTy env1_ty rec_ty
core_body = mkBigCoreTup (map selectVar env_ids)
where
selectVar v
| v `elemVarSet` rec_id_set
= mkTupleSelector rec_ids v rec_id (Var rec_id)
| otherwise = Var v
squash_pair_fn <- matchEnvStack env1_ids rec_id core_body
-- loop (premap squash_pair_fn (ss >>> arr mk_pair_fn))
let
env_ty = mkBigCoreVarTupTy env_ids
core_loop = do_loop ids env1_ty later_ty rec_ty
(do_premap ids in_pair_ty env_ty out_pair_ty
squash_pair_fn
(do_compose ids env_ty out_ty out_pair_ty
core_stmts
(do_arr ids out_ty out_pair_ty mk_pair_fn)))
return (core_loop, env1_id_set, env1_ids)
{-
A sequence of statements (as in a rec) is desugared to an arrow between
two environments (no stack)
-}
dsfixCmdStmts
:: DsCmdEnv -- arrow combinators
-> IdSet -- set of local vars available to this statement
-> [Id] -- output vars of these statements
-> [CmdLStmt Id] -- statements to desugar
-> DsM (CoreExpr, -- desugared expression
IdSet, -- subset of local vars that occur free
[Id]) -- same local vars as a list
dsfixCmdStmts ids local_vars out_ids stmts
= trimInput (dsCmdStmts ids local_vars out_ids stmts)
dsCmdStmts
:: DsCmdEnv -- arrow combinators
-> IdSet -- set of local vars available to this statement
-> [Id] -- output vars of these statements
-> [CmdLStmt Id] -- statements to desugar
-> [Id] -- list of vars in the input to these statements
-> DsM (CoreExpr, -- desugared expression
IdSet) -- subset of local vars that occur free
dsCmdStmts ids local_vars out_ids [stmt] env_ids
= dsCmdLStmt ids local_vars out_ids stmt env_ids
dsCmdStmts ids local_vars out_ids (stmt:stmts) env_ids = do
let
bound_vars = mkVarSet (collectLStmtBinders stmt)
local_vars' = bound_vars `unionVarSet` local_vars
(core_stmts, _fv_stmts, env_ids') <- dsfixCmdStmts ids local_vars' out_ids stmts
(core_stmt, fv_stmt) <- dsCmdLStmt ids local_vars env_ids' stmt env_ids
return (do_compose ids
(mkBigCoreVarTupTy env_ids)
(mkBigCoreVarTupTy env_ids')
(mkBigCoreVarTupTy out_ids)
core_stmt
core_stmts,
fv_stmt)
dsCmdStmts _ _ _ [] _ = panic "dsCmdStmts []"
-- Match a list of expressions against a list of patterns, left-to-right.
matchSimplys :: [CoreExpr] -- Scrutinees
-> HsMatchContext Name -- Match kind
-> [LPat Id] -- Patterns they should match
-> CoreExpr -- Return this if they all match
-> CoreExpr -- Return this if they don't
-> DsM CoreExpr
matchSimplys [] _ctxt [] result_expr _fail_expr = return result_expr
matchSimplys (exp:exps) ctxt (pat:pats) result_expr fail_expr = do
match_code <- matchSimplys exps ctxt pats result_expr fail_expr
matchSimply exp ctxt pat match_code fail_expr
matchSimplys _ _ _ _ _ = panic "matchSimplys"
-- List of leaf expressions, with set of variables bound in each
leavesMatch :: LMatch Id (Located (body Id)) -> [(Located (body Id), IdSet)]
leavesMatch (L _ (Match _ pats _ (GRHSs grhss (L _ binds))))
= let
defined_vars = mkVarSet (collectPatsBinders pats)
`unionVarSet`
mkVarSet (collectLocalBinders binds)
in
[(body,
mkVarSet (collectLStmtsBinders stmts)
`unionVarSet` defined_vars)
| L _ (GRHS stmts body) <- grhss]
-- Replace the leaf commands in a match
replaceLeavesMatch
:: Type -- new result type
-> [Located (body' Id)] -- replacement leaf expressions of that type
-> LMatch Id (Located (body Id)) -- the matches of a case command
-> ([Located (body' Id)], -- remaining leaf expressions
LMatch Id (Located (body' Id))) -- updated match
replaceLeavesMatch _res_ty leaves (L loc (Match mf pat mt (GRHSs grhss binds)))
= let
(leaves', grhss') = mapAccumL replaceLeavesGRHS leaves grhss
in
(leaves', L loc (Match mf pat mt (GRHSs grhss' binds)))
replaceLeavesGRHS
:: [Located (body' Id)] -- replacement leaf expressions of that type
-> LGRHS Id (Located (body Id)) -- rhss of a case command
-> ([Located (body' Id)], -- remaining leaf expressions
LGRHS Id (Located (body' Id))) -- updated GRHS
replaceLeavesGRHS (leaf:leaves) (L loc (GRHS stmts _))
= (leaves, L loc (GRHS stmts leaf))
replaceLeavesGRHS [] _ = panic "replaceLeavesGRHS []"
-- Balanced fold of a non-empty list.
foldb :: (a -> a -> a) -> [a] -> a
foldb _ [] = error "foldb of empty list"
foldb _ [x] = x
foldb f xs = foldb f (fold_pairs xs)
where
fold_pairs [] = []
fold_pairs [x] = [x]
fold_pairs (x1:x2:xs) = f x1 x2:fold_pairs xs
{-
Note [Dictionary binders in ConPatOut] See also same Note in HsUtils
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The following functions to collect value variables from patterns are
copied from HsUtils, with one change: we also collect the dictionary
bindings (pat_binds) from ConPatOut. We need them for cases like
h :: Arrow a => Int -> a (Int,Int) Int
h x = proc (y,z) -> case compare x y of
GT -> returnA -< z+x
The type checker turns the case into
case compare x y of
GT { p77 = plusInt } -> returnA -< p77 z x
Here p77 is a local binding for the (+) operation.
See comments in HsUtils for why the other version does not include
these bindings.
-}
collectPatBinders :: LPat Id -> [Id]
collectPatBinders pat = collectl pat []
collectPatsBinders :: [LPat Id] -> [Id]
collectPatsBinders pats = foldr collectl [] pats
---------------------
collectl :: LPat Id -> [Id] -> [Id]
-- See Note [Dictionary binders in ConPatOut]
collectl (L _ pat) bndrs
= go pat
where
go (VarPat var) = var : bndrs
go (WildPat _) = bndrs
go (LazyPat pat) = collectl pat bndrs
go (BangPat pat) = collectl pat bndrs
go (AsPat (L _ a) pat) = a : collectl pat bndrs
go (ParPat pat) = collectl pat bndrs
go (ListPat pats _ _) = foldr collectl bndrs pats
go (PArrPat pats _) = foldr collectl bndrs pats
go (TuplePat pats _ _) = foldr collectl bndrs pats
go (ConPatIn _ ps) = foldr collectl bndrs (hsConPatArgs ps)
go (ConPatOut {pat_args=ps, pat_binds=ds}) =
collectEvBinders ds
++ foldr collectl bndrs (hsConPatArgs ps)
go (LitPat _) = bndrs
go (NPat _ _ _) = bndrs
go (NPlusKPat (L _ n) _ _ _) = n : bndrs
go (SigPatIn pat _) = collectl pat bndrs
go (SigPatOut pat _) = collectl pat bndrs
go (CoPat _ pat _) = collectl (noLoc pat) bndrs
go (ViewPat _ pat _) = collectl pat bndrs
go p@(SplicePat {}) = pprPanic "collectl/go" (ppr p)
collectEvBinders :: TcEvBinds -> [Id]
collectEvBinders (EvBinds bs) = foldrBag add_ev_bndr [] bs
collectEvBinders (TcEvBinds {}) = panic "ToDo: collectEvBinders"
add_ev_bndr :: EvBind -> [Id] -> [Id]
add_ev_bndr (EvBind { eb_lhs = b }) bs | isId b = b:bs
| otherwise = bs
-- A worry: what about coercion variable binders??
collectLStmtsBinders :: [LStmt Id body] -> [Id]
collectLStmtsBinders = concatMap collectLStmtBinders
collectLStmtBinders :: LStmt Id body -> [Id]
collectLStmtBinders = collectStmtBinders . unLoc
collectStmtBinders :: Stmt Id body -> [Id]
collectStmtBinders (RecStmt { recS_later_ids = later_ids }) = later_ids
collectStmtBinders stmt = HsUtils.collectStmtBinders stmt
| elieux/ghc | compiler/deSugar/DsArrows.hs | bsd-3-clause | 45,720 | 0 | 23 | 13,556 | 9,522 | 4,907 | 4,615 | 700 | 19 |
-- | 'Strive.Actions.Kudos'
module Strive.Options.Kudos
( GetActivityKudoersOptions
) where
import Strive.Internal.Options (PaginationOptions)
-- | 'Strive.Actions.getActivityKudoers'
type GetActivityKudoersOptions = PaginationOptions
| tfausak/strive | source/library/Strive/Options/Kudos.hs | mit | 241 | 0 | 5 | 25 | 31 | 21 | 10 | 4 | 0 |
module Lisp.Builtins where
import Control.Lens
import Control.Monad.Trans (liftIO)
import qualified Data.Map as M
import qualified Data.Set as S
import Lisp.Evaluator
import Lisp.Parser
import Lisp.Printer
import Lisp.Reader
import Lisp.Types
dSum :: [Double] -> Double
dSum = sum
plus :: LispFunction
plus = liftFunction dSum Nothing "+"
dMinus :: [Double] -> Double
dMinus [] = 0
dMinus [d1] = -d1
dMinus (d1:ds) = d1 - (sum ds)
minus :: LispFunction
minus = liftFunction dMinus Nothing "-"
dProd :: [Double] -> Double
dProd = product
times :: LispFunction
times = liftFunction dProd Nothing "*"
dDiv :: [Double] -> Double
dDiv [] = 1
dDiv [d1] = 1 / d1
dDiv (d1:ds) = d1 / (product ds)
divide :: LispFunction
divide = liftFunction dDiv Nothing "/"
dCmp :: (Double -> Double -> Bool) -> [Double] -> Bool
dCmp f [d1, d2] = d1 `f` d2
dCmp _ _ = error "arity error in dCmp"
numCmp :: (Double -> Double -> Bool) -> String -> LispValue
numCmp f str = LVFunction $ liftFunction (dCmp f) (Just 2) str
lispNot :: LispFunction
lispNot = LFPrimitive "not" $ \vs ->
case vs of
[(LVBool False)] -> Right $ LVBool True
[_] -> Right $ LVBool False
_ -> Left $ LispError $ LVString "not requires one argument"
eq :: LispValue -> LispValue -> Bool
eq (LVString v1) (LVString v2) = v1 == v2
eq (LVSymbol v1) (LVSymbol v2) = v1 == v2
eq (LVNumber v1) (LVNumber v2) = v1 == v2
eq (LVBool v1) (LVBool v2) = v1 == v2
eq (LVList l1) (LVList l2) =
(length l1 == length l2) && (and $ zipWith eq l1 l2)
eq _ _ = False
lispEq :: LispFunction
lispEq = LFPrimitive "eq" $ \vs ->
case vs of
[v1, v2] -> Right (LVBool $ eq v1 v2)
_ -> Left (LispError $ LVString "eq: requires two arguments")
atom :: LispValue -> Bool
atom (LVSymbol _) = True
atom (LVString _) = True
atom (LVNumber _) = True
atom (LVBool _) = True
atom (LVFunction _) = True
atom (LVList []) = True
atom _ = False
lispAtom :: LispFunction
lispAtom = LFPrimitive "atom" $ \vs ->
case vs of
[v] -> Right $ LVBool $ atom v
_ -> Left $ LispError $ LVString "atom: requires one argument"
lispCar :: LispFunction
lispCar = LFPrimitive "car" $ \vs ->
case vs of
[(LVList (x:_))] -> Right x
_ -> Left $ LispError $ LVString "car: requires a non-empty list"
lispCdr :: LispFunction
lispCdr = LFPrimitive "cdr" $ \vs ->
case vs of
[(LVList (_:xs))] -> Right $ LVList xs
_ -> Left $ LispError $ LVString "cdr: requires a non-empty list"
lispCons :: LispFunction
lispCons = LFPrimitive "cons" $ \vs ->
case vs of
[x, LVList xs] -> Right $ LVList (x:xs)
_ -> Left $ LispError $ LVString "cons: requires 2 args, 2nd must be list"
quit :: LispFunction
quit = LFAction "quit" $ \_ -> error "user quit"
gensym :: LispFunction
gensym = LFAction "gensym" $ \_ -> fmap LVSymbol genStr
setMacroAction :: [LispValue] -> Lisp LispValue
setMacroAction vals =
case vals of
[(LVSymbol name)] -> do
macros . at name .= Just ()
return $ LVBool True
_ -> failWithString "set-macro! requires one symbol"
setMacro :: LispFunction
setMacro = LFAction "set-macro!" $ setMacroAction
macroexpand1Action :: LispFunction
macroexpand1Action = LFAction "macroexpand-1" $ \vs ->
case vs of
[v] -> macroexpand1 v
_ -> failWithString "macroexpand-1 requires 1 value"
macroexpandAction :: LispFunction
macroexpandAction = LFAction "macroexpand" $ \vs ->
case vs of
[v] -> macroexpand v
_ -> failWithString "macroexpand requires 1 value"
macroexpandAllAction :: LispFunction
macroexpandAllAction = LFAction "macroexpand-all" $ \vs ->
case vs of
[v] -> macroexpandAll v
_ -> failWithString "macroexpand-all requires 1 value"
evalAction :: LispFunction
evalAction = LFAction "eval" $ \vs ->
case vs of
[v] -> eval v
_ -> failWithString "eval requires 1 value"
printActionCore :: [LispValue] -> Lisp LispValue
printActionCore vs = do
mapM_ (liftIO . lispPrint) vs
case vs of
[v] -> return v
_ -> return $ LVList vs
printAction :: LispFunction
printAction = LFAction "print" printActionCore
execFile :: String -> Lisp LispValue
execFile filename = do
parsedSExps <- liftIO $ loadSExps filename
case parsedSExps of
Left anError -> failWithString anError
Right sexps -> do
mapM_ (eval . readSExp) sexps
return $ LVBool True
loadFileAction :: LispFunction
loadFileAction = LFAction "load-file" $ \vs ->
case vs of
[(LVString filename)] -> execFile filename
_ -> failWithString "load-file: requires 1 string"
throwLispError :: LispFunction
throwLispError = LFAction "error" $ \vs ->
case vs of
[v] -> lispFail $ LispError v
_ -> lispFail $ LispError $ LVList vs
globalBuiltins :: M.Map String LispValue
globalBuiltins = M.fromList [("+", LVFunction plus),
("-", LVFunction minus),
("*", LVFunction times),
("/", LVFunction divide),
("==", numCmp (==) "=="),
("<=", numCmp (<=) "<="),
(">=", numCmp (>=) ">="),
("/=", numCmp (/=) "/="),
("<" , numCmp (<) "<"),
(">" , numCmp (>) ">"),
("atom", LVFunction lispAtom),
("car", LVFunction lispCar),
("cdr", LVFunction lispCdr),
("cons", LVFunction lispCons),
("eq", LVFunction lispEq),
("error", LVFunction throwLispError),
("eval", LVFunction evalAction),
("gensym", LVFunction gensym),
("load-file", LVFunction loadFileAction),
("macroexpand", LVFunction macroexpandAction),
("macroexpand-1", LVFunction macroexpand1Action),
("macroexpand-all", LVFunction macroexpandAllAction),
("not", LVFunction lispNot),
("pi", LVNumber pi),
("print", LVFunction printAction),
("quit", LVFunction quit),
("set-macro!", LVFunction setMacro)]
initEnv :: LispEnv
initEnv = LispEnv [] globalBuiltins 1 S.empty
withPreludeEnv :: IO LispEnv
withPreludeEnv = fmap snd $ execFile "prelude.lisp" `runLisp` initEnv
| michaelochurch/summer-2015-haskell-class | Lisp/Builtins.hs | mit | 6,668 | 0 | 14 | 2,028 | 2,160 | 1,139 | 1,021 | -1 | -1 |
{-# LANGUAGE Haskell2010 #-}
{-# LINE 1 "Data/Array/Storable.hs" #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Array.Storable
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : non-portable (uses Data.Array.MArray)
--
-- A storable array is an IO-mutable array which stores its
-- contents in a contiguous memory block living in the C
-- heap. Elements are stored according to the class 'Storable'.
-- You can obtain the pointer to the array contents to manipulate
-- elements from languages like C.
--
-- It is similar to 'Data.Array.IO.IOUArray' but slower.
-- Its advantage is that it's compatible with C.
--
-----------------------------------------------------------------------------
module Data.Array.Storable (
-- * Arrays of 'Storable' things.
StorableArray, -- data StorableArray index element
-- + index type must be in class Ix
-- + element type must be in class Storable
-- * Overloaded mutable array interface
-- | Module "Data.Array.MArray" provides the interface of storable arrays.
-- They are instances of class 'MArray' (with the 'IO' monad).
module Data.Array.MArray,
-- * Accessing the pointer to the array contents
withStorableArray, -- :: StorableArray i e -> (Ptr e -> IO a) -> IO a
touchStorableArray, -- :: StorableArray i e -> IO ()
) where
import Data.Array.MArray
import Data.Array.Storable.Internals
| phischu/fragnix | tests/packages/scotty/Data.Array.Storable.hs | bsd-3-clause | 1,638 | 0 | 5 | 324 | 71 | 59 | 12 | 9 | 0 |
{-# LANGUAGE DeriveDataTypeable, TypeFamilies, TemplateHaskell #-}
module Distribution.Server.Features.PreferredVersions.State where
import Distribution.Server.Framework.Instances ()
import Distribution.Server.Framework.MemSize
import Distribution.Package
import Distribution.Version
import Data.Acid (Query, Update, makeAcidic)
import Data.Maybe (isJust, fromMaybe)
import Data.Typeable (Typeable)
import Control.Arrow (second)
import Control.Monad (ap)
import Control.Monad.State (put, modify)
import Control.Monad.Reader (ask, asks)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.SafeCopy (base, deriveSafeCopy)
import Data.Set (Set)
import qualified Data.Set as Set
data PreferredVersions = PreferredVersions {
preferredMap :: Map PackageName PreferredInfo,
deprecatedMap :: Map PackageName [PackageName]
} deriving (Typeable, Show, Eq)
emptyPreferredVersions :: PreferredVersions
emptyPreferredVersions = PreferredVersions Map.empty Map.empty
data PreferredInfo = PreferredInfo {
preferredRanges :: [VersionRange],
deprecatedVersions :: [Version],
sumRange :: Maybe VersionRange
} deriving (Typeable, Show, Eq)
emptyPreferredInfo :: PreferredInfo
emptyPreferredInfo = PreferredInfo [] [] Nothing
consolidateRanges :: [VersionRange] -> [Version] -> Maybe VersionRange
consolidateRanges ranges depr =
let range = simplifyVersionRange $ foldr intersectVersionRanges anyVersion (map notThisVersion depr ++ ranges)
in if isAnyVersion range || isNoVersion range
then Nothing
else Just range
data VersionStatus = NormalVersion | DeprecatedVersion | UnpreferredVersion deriving (Show, Typeable, Eq, Ord, Enum)
getVersionStatus :: PreferredInfo -> Version -> VersionStatus
getVersionStatus info version = case version `elem` deprecatedVersions info of
True -> DeprecatedVersion
False -> case maybe True (withinRange version) (sumRange info) of
True -> NormalVersion
False -> UnpreferredVersion
classifyVersions :: PreferredInfo -> [Version] -> [(Version, VersionStatus)]
classifyVersions (PreferredInfo [] [] _) = map (flip (,) NormalVersion)
classifyVersions info = map ((,) `ap` getVersionStatus info)
partitionVersions :: PreferredInfo -> [Version] -> ([Version], [Version], [Version])
partitionVersions info versions = if (not . isJust $ sumRange info) then (versions, [], []) else go versions
where go :: [Version] -> ([Version], [Version], [Version]) -- foldr-type approach
go (v:vs) = let ~(norm, depr, unpref) = go vs in case getVersionStatus info v of
NormalVersion -> (v:norm, depr, unpref)
DeprecatedVersion -> (norm, v:depr, unpref)
UnpreferredVersion -> (norm, depr, v:unpref)
go [] = ([], [], [])
------------------------------------------
$(deriveSafeCopy 0 'base ''PreferredVersions)
$(deriveSafeCopy 0 'base ''PreferredInfo)
$(deriveSafeCopy 0 'base ''VersionStatus)
instance MemSize PreferredVersions where
memSize (PreferredVersions a b) = memSize2 a b
instance MemSize PreferredInfo where
memSize (PreferredInfo a b c) = memSize3 a b c
initialPreferredVersions :: PreferredVersions
initialPreferredVersions = emptyPreferredVersions
setPreferredRanges :: PackageName -> [VersionRange] -> Update PreferredVersions ()
setPreferredRanges name ranges = alterPreferredInfo name $ \p ->
p { preferredRanges = ranges }
setDeprecatedVersions :: PackageName -> [Version] -> Update PreferredVersions ()
setDeprecatedVersions name versions = alterPreferredInfo name $ \p ->
p { deprecatedVersions = versions }
alterPreferredInfo :: PackageName -> (PreferredInfo -> PreferredInfo) -> Update PreferredVersions ()
alterPreferredInfo name func = modify $ \p -> p { preferredMap = Map.alter (res . func . fromMaybe emptyPreferredInfo) name $ preferredMap p }
where res (PreferredInfo [] [] _) = Nothing
res (PreferredInfo ranges depr _) = Just (PreferredInfo ranges depr $ consolidateRanges ranges depr)
getPreferredInfo :: PackageName -> Query PreferredVersions PreferredInfo
getPreferredInfo name = asks $ Map.findWithDefault emptyPreferredInfo name . preferredMap
setDeprecatedFor :: PackageName -> Maybe [PackageName] -> Update PreferredVersions ()
setDeprecatedFor name forName = modify $ \p -> p { deprecatedMap = Map.alter (const forName) name $ deprecatedMap p }
getDeprecatedFor :: PackageName -> Query PreferredVersions (Maybe [PackageName])
getDeprecatedFor name = asks $ Map.lookup name . deprecatedMap
isDeprecated :: PackageName -> Query PreferredVersions Bool
isDeprecated name = asks $ Map.member name . deprecatedMap
getPreferredVersions :: Query PreferredVersions PreferredVersions
getPreferredVersions = ask
replacePreferredVersions :: PreferredVersions -> Update PreferredVersions ()
replacePreferredVersions = put
makeAcidic ''PreferredVersions ['setPreferredRanges
,'setDeprecatedVersions
,'getPreferredInfo
,'setDeprecatedFor
,'getDeprecatedFor
,'isDeprecated
,'getPreferredVersions
,'replacePreferredVersions
]
---------------
maybeBestVersion :: PreferredInfo -> [Version] -> Set Version -> Maybe (Version, Maybe VersionStatus)
maybeBestVersion info allVersions versions = if null allVersions || Set.null versions then Nothing else Just $ findBestVersion info allVersions versions
{-
findBestVersion attempts to find the best version to display out of a set
of versions. The quality of a given version is encoded in a pair (VersionStatus,
Bool). If the version is a NormalVersion, then the boolean indicates whether if
it the most recently uploaded preferred version (and all higher versions are
either deprecated or unpreferred). Otherwise, if it is a DeprecatedVersion or
UnpreferredVersion, the boolean indicates that it is the maximum of all uploaded
versions.
The list of available versions is scanned from the back (most recent) to the
front (first one uploaded). If a 'better' version is found than the current
best version, it is replaced. If no better version can be found, the algorithm
finishes up. The exact ordering is defined as:
1. (NormalVersion, True) means the latest preferred version of the package is
available. This option may appear anywhere, although it is always seen before
(NormalVersion, False). In this case, the algorithm finishes up.
2. (UnpreferredVersion, True) means the latest available version of the package
is not preferred, but the latest preferred version is not available. If this
option appears anywhere, it will be the most recent version in the set,
excluding deprecated versions.
3. (NormalVersion, False) means neither the actual latest version nor the
preferred latest version are available, but there is some preferred version
that's available. It can only be scanned after (NormalVersion, True) and
(UnpreferredVersion, True), so the algorithm finishes up in this case.
4. (UnpreferredVersion, False) means no preferred versions are available, and
only an older version is available. It is still possible to see a NormalVersion
after this option, so the algorithm continues.
5. (DeprecatedVersion, True) and (DeprecatedVersion, False) mean only a
deprecated version is available. This is not so great.
This is a bit complex but I think it has the most intuitive result, and is
rather efficient in 99% of cases.
The version set and version list should both be non-empty; otherwise this
function is partial. Use maybeBestVersion for a safe check.
-}
findBestVersion :: PreferredInfo -> [Version] -> Set Version -> (Version, Maybe VersionStatus)
findBestVersion info allVersions versions =
let topStatus = getVersionStatus info maxVersion
in if maxAllVersion == maxVersion && topStatus == NormalVersion
then (maxVersion, Just NormalVersion) -- most common case
else second classifyOpt $ newSearch (reverse $ Set.toList versions) (maxVersion, (topStatus, True))
where
maxVersion = Set.findMax versions
maxAllVersion = last allVersions
newestPreferred = case filter ((==NormalVersion) . (infoMap Map.!)) $ allVersions of
[] -> Nothing
prefs -> Just $ last prefs
infoMap = Map.fromDistinctAscList $ classifyVersions info allVersions
newSearch (v:vs) _ = case infoMap Map.! v of
NormalVersion | v == maxAllVersion -> (v, (NormalVersion, True))
NormalVersion -> oldSearch vs (v, (NormalVersion, False))
DeprecatedVersion -> newSearch vs (v, (DeprecatedVersion, True))
UnpreferredVersion -> oldSearch vs (v, (UnpreferredVersion, True))
newSearch [] opt = opt
oldSearch (v:vs) opt = case infoMap Map.! v of
NormalVersion -> replaceBetter opt (v, (NormalVersion, newestPreferred == Just v))
other -> oldSearch vs $ replaceBetter opt (v, (other, False))
oldSearch [] opt = opt
replaceBetter keep@(_, old) replace@(_, new) = if optionPrefs new > optionPrefs old then replace else keep
optionPrefs :: (VersionStatus, Bool) -> Int
optionPrefs opt = case opt of
(NormalVersion, True) -> 4
(UnpreferredVersion, True) -> 3
(NormalVersion, False) -> 2
(UnpreferredVersion, False) -> 1
_ -> 0
classifyOpt opt = case opt of
(NormalVersion, True) -> Just NormalVersion
(UnpreferredVersion, True) -> Just UnpreferredVersion
(DeprecatedVersion, _) -> Just DeprecatedVersion
_ -> Nothing
| mpickering/hackage-server | Distribution/Server/Features/PreferredVersions/State.hs | bsd-3-clause | 9,614 | 0 | 14 | 1,832 | 2,214 | 1,196 | 1,018 | 131 | 17 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sr-SP">
<title>Retire.js Add-on</title>
<maps>
<homeID>retire</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/retire/src/main/javahelp/org/zaproxy/addon/retire/resources/help_sr_SP/helpset_sr_SP.hs | apache-2.0 | 964 | 77 | 67 | 156 | 413 | 209 | 204 | -1 | -1 |
{-# LANGUAGE TypeFamilies, FlexibleContexts #-}
-- Results in context reduction stack overflow
module Class1 where
import Data.Kind (Type)
class C a where
foo :: a x -> a y
class C (T a) => D a where
type T a :: Type -> Type
bar :: a -> T a x -> T a y
instance C Maybe where
foo Nothing = Nothing
instance D () where
type T () = Maybe
bar x t = foo t
| sdiehl/ghc | testsuite/tests/indexed-types/should_compile/Class1.hs | bsd-3-clause | 372 | 0 | 9 | 99 | 146 | 75 | 71 | -1 | -1 |
-- !!! Testing Int and Word
module Main(main) where
import Data.Int
import Data.Word
import Data.Bits
import Data.Ix -- added SOF
import Control.Exception
main :: IO ()
main = test
test :: IO ()
test = do
testIntlike "Int" (0::Int)
testIntlike "Int8" (0::Int8)
testIntlike "Int16" (0::Int16)
testIntlike "Int32" (0::Int32)
testIntlike "Int64" (0::Int64)
testIntlike "Word8" (0::Word8)
testIntlike "Word16" (0::Word16)
testIntlike "Word32" (0::Word32)
testIntlike "Word64" (0::Word64)
testInteger
testIntlike :: (Bounded a, Integral a, Ix a, Show a, Read a, Bits a) => String -> a -> IO ()
testIntlike name zero = do
putStrLn $ "--------------------------------"
putStrLn $ "--Testing " ++ name
putStrLn $ "--------------------------------"
testBounded zero
testEnum zero
testReadShow zero
testEq zero
testOrd zero
testNum zero
testReal zero
testIntegral zero
testConversions zero
testBits zero True
testInteger = do
let zero = 0 :: Integer
putStrLn $ "--------------------------------"
putStrLn $ "--Testing Integer"
putStrLn $ "--------------------------------"
testEnum zero
testReadShow zero
testEq zero
testOrd zero
testNum zero
testReal zero
testIntegral zero
testBits zero False
-- In all these tests, zero is a dummy element used to get
-- the overloading to work
testBounded zero = do
putStrLn "testBounded"
print $ (minBound-1, minBound, minBound+1) `asTypeOf` (zero,zero,zero)
print $ (maxBound-1, maxBound, maxBound+1) `asTypeOf` (zero,zero,zero)
testEnum zero = do
putStrLn "testEnum"
print $ take 10 [zero .. ] -- enumFrom
print $ take 10 [zero, toEnum 2 .. ] -- enumFromThen
print [zero .. toEnum 20] -- enumFromTo
print [zero, toEnum 2 .. toEnum 20] -- enumFromThenTo
testConversions zero = do
putStrLn "testConversions"
putStr "Integer : " >> print (map fromIntegral numbers :: [Integer])
putStr "Int : " >> print (map fromIntegral numbers :: [Int])
putStr "Int8 : " >> print (map fromIntegral numbers :: [Int8])
putStr "Int16 : " >> print (map fromIntegral numbers :: [Int16])
putStr "Int32 : " >> print (map fromIntegral numbers :: [Int32])
putStr "Int64 : " >> print (map fromIntegral numbers :: [Int64])
putStr "Word8 : " >> print (map fromIntegral numbers :: [Word8])
putStr "Word16 : " >> print (map fromIntegral numbers :: [Word16])
putStr "Word32 : " >> print (map fromIntegral numbers :: [Word32])
putStr "Word64 : " >> print (map fromIntegral numbers :: [Word64])
where numbers = [minBound, 0, maxBound] `asTypeOf` [zero]
samples :: (Num a) => a -> [a]
samples zero = map fromInteger ([-3 .. -1]++[0 .. 3])
table1 :: (Show a, Show b) => String -> (a -> b) -> [a] -> IO ()
table1 nm f xs = do
sequence [ f' x | x <- xs ]
putStrLn "#"
where
f' x = putStrLn (nm ++ " " ++ show x ++ " = " ++ show (f x))
table2 :: (Show a, Show b, Show c) => String -> (a -> b -> c) -> [a] -> [b] -> IO ()
table2 nm op xs ys = do
sequence [ sequence [ op' x y | y <- ys ] >> putStrLn " "
| x <- xs
]
putStrLn "#"
where
op' x y = do s <- Control.Exception.catch
(evaluate (show (op x y)))
(\e -> return (show (e :: SomeException)))
putStrLn (show x ++ " " ++ nm ++ " " ++ show y ++ " = " ++ s)
testReadShow zero = do
putStrLn "testReadShow"
print xs
print (map read_show xs)
where
xs = samples zero
read_show x = (read (show x) `asTypeOf` zero)
testEq zero = do
putStrLn "testEq"
table2 "==" (==) xs xs
table2 "/=" (/=) xs xs
where
xs = samples zero
testOrd zero = do
putStrLn "testOrd"
table2 "<=" (<=) xs xs
table2 "< " (<) xs xs
table2 "> " (>) xs xs
table2 ">=" (>=) xs xs
table2 "`compare`" compare xs xs
where
xs = samples zero
testNum zero = do
putStrLn "testNum"
table2 "+" (+) xs xs
table2 "-" (-) xs xs
table2 "*" (*) xs xs
table1 "negate" negate xs
where
xs = samples zero
testReal zero = do
putStrLn "testReal"
table1 "toRational" toRational xs
where
xs = samples zero
testIntegral zero = do
putStrLn "testIntegral"
table2 "`divMod` " divMod xs xs
table2 "`div` " div xs xs
table2 "`mod` " mod xs xs
table2 "`quotRem`" quotRem xs xs
table2 "`quot` " quot xs xs
table2 "`rem` " rem xs xs
where
xs = samples zero
testBits zero do_bitsize = do
putStrLn "testBits"
table2 ".&. " (.&.) xs xs
table2 ".|. " (.|.) xs xs
table2 "`xor`" xor xs xs
table1 "complement" complement xs
table2 "`shiftL`" shiftL xs ([0..3] ++ [32,64])
table2 "`shiftR`" shiftR xs ([0..3] ++ [32,64])
table2 "`rotate`" rotate xs ([-3..3] ++ [-64,-32,32,64])
table1 "bit" (\ x -> (bit x) `asTypeOf` zero) [(0::Int)..3]
table2 "`setBit`" setBit xs ([0..3] ++ [32,64])
table2 "`clearBit`" clearBit xs ([0..3] ++ [32,64])
table2 "`complementBit`" complementBit xs ([0..3] ++ [32,64])
table2 "`testBit`" testBit xs ([0..3] ++ [32,64])
if do_bitsize then table1 "bitSize" bitSize xs else return ()
table1 "isSigned" isSigned xs
where
xs = samples zero
| holzensp/ghc | testsuite/tests/numeric/should_run/arith011.hs | bsd-3-clause | 5,451 | 14 | 17 | 1,532 | 2,160 | 1,061 | 1,099 | 143 | 2 |
module Data.NGH.PSSM
( scan
) where
import qualified Data.Map as M
type Probability = Float
type AACid = Char
type Seq = String
type PSSM = [M.Map AACid Probability]
score_here :: PSSM -> Seq -> Maybe Probability
score_here [] _ = Just 0.0
score_here _ [] = Nothing
score_here (p:ps) (a:as) = do
rest <- score_here ps as
let score_aa = M.findWithDefault (-100.0) a p
return (score_aa + rest)
scan :: PSSM -> Seq -> [Probability]
scan p aas = case score_here p aas of
Nothing -> []
Just v -> v:(scan p $ tail aas)
| luispedro/NGH | Data/NGH/PSSM.hs | mit | 545 | 0 | 12 | 128 | 238 | 125 | 113 | 18 | 2 |
-- PaginationHelper
-- http://www.codewars.com/kata/515bb423de843ea99400000a/
{-
Random test for pageIndex is broken:
should work for random collections and ipp values
Falsifiable (after 2 tests and 1 shrink):
expected: Just 0
but got: Nothing
[0]
Positive {getPositive = 1}
-1
-}
module Codewars.Kata.Pagination where
type Collection a = [a]
type ItemsPerPage = Int
itemCount :: Collection a -> Int
itemCount = length
pageCount :: Collection a -> ItemsPerPage -> Int
pageCount xs n = ceiling . (/ (fromIntegral n)) . fromIntegral . itemCount $ xs
pageItemCount :: Collection a -> ItemsPerPage -> Int -> Maybe Int
pageItemCount xs n page | page < 0 = Nothing
| otherwise = case compare (page + 1) (pageCount xs n) of
LT -> Just n
EQ -> Just (itemCount xs - n*page)
GT -> Nothing
pageIndex :: Collection a -> ItemsPerPage -> Int -> Maybe Int
pageIndex xs n item | (item < 0) || (item >= itemCount xs) = Nothing
| otherwise = Just . div item $ n
| gafiatulin/codewars | src/5 kyu/Pagination.hs | mit | 1,100 | 0 | 13 | 321 | 293 | 149 | 144 | 16 | 3 |
module Arm.ArmType (
instructionBlockOffset
)
where
import Arm.Internal.Type
import Data.Int (Int64)
-- | Return the offsset of the arm instruction inside the section
instructionBlockOffset :: ArmInstr -> Int64
instructionBlockOffset = blockOffset
| mathk/arm-isa | Arm/ArmType.hs | mit | 269 | 0 | 5 | 52 | 42 | 26 | 16 | 6 | 1 |
{-# LANGUAGE CPP #-}
-- |
-- Vanilla thread management in Haskell is low level and
-- it does not approach the problems related to thread deaths.
-- When it's used naively the following typical problems arise:
--
-- * When a forked thread dies due to an uncaught exception,
-- the exception does not get raised in the main thread,
-- which is why the program continues to run as if nothing happened,
-- i.e., with the presumption that the already dead thread is running normally.
-- Naturally this may very well bring your program to a chaotic state.
--
-- * Another issue is that one thread dying does not
-- affect any of the threads forked from it.
-- That's why your program may be accumulating ghost threads.
--
-- * Ever dealt with your program ignoring the \<Ctrl-C\> strikes?
--
-- This library solves all the issues above with a concept of a slave thread.
-- A slave thread has the following properties:
--
-- 1. When it dies for whatever reason (exception or finishing normally)
-- it kills all the slave threads that were forked from it.
-- This protects you from ghost threads.
--
-- 2. It waits for all slaves to die and execute their finalizers
-- before executing its own finalizer and getting released itself.
-- This gives you hierarchical releasing of resources.
--
-- 3. When a slave thread dies with an uncaught exception
-- it reraises it in the master thread.
-- This protects you from silent exceptions
-- and lets you be sure of getting informed
-- if your program gets brought to an erroneous state.
module SlaveThread
(
fork,
forkWithUnmask,
forkFinally,
forkFinallyWithUnmask,
SlaveThreadCrashed(..)
-- * Notes
-- $note-unmask
)
where
import SlaveThread.Prelude
import SlaveThread.Util.LowLevelForking
import qualified DeferredFolds.UnfoldlM as UnfoldlM
import qualified StmContainers.Multimap as Multimap
import qualified Control.Foldl as Foldl
import qualified Focus
-- |
-- A global registry of all slave threads by their masters.
{-# NOINLINE slaveRegistry #-}
slaveRegistry :: Multimap.Multimap ThreadId ThreadId
slaveRegistry =
unsafePerformIO Multimap.newIO
-- |
-- Fork a slave thread to run a computation on.
{-# INLINABLE fork #-}
fork :: IO a -> IO ThreadId
fork =
forkFinally $ return ()
-- |
-- Like 'fork', but provides the computation a function that unmasks
-- asynchronous exceptions. See @Note [Unmask]@ at the bottom of this module.
{-# INLINABLE forkWithUnmask #-}
forkWithUnmask :: ((forall x. IO x -> IO x) -> IO a) -> IO ThreadId
forkWithUnmask =
forkFinallyWithUnmask $ return ()
-- |
-- Fork a slave thread with a finalizer action to run a computation on.
-- The finalizer gets executed when the thread dies for whatever reason:
-- due to being killed or an uncaught exception, or a normal termination.
--
-- Note the order of arguments:
--
-- >forkFinally finalizer computation
{-# INLINABLE forkFinally #-}
forkFinally :: IO a -> IO b -> IO ThreadId
forkFinally finalizer computation =
forkFinallyWithUnmask finalizer (\unmask -> unmask computation)
-- |
-- Like 'forkFinally', but provides the computation a function that unmasks
-- asynchronous exceptions. See @Note [Unmask]@ at the bottom of this module.
{-# INLINABLE forkFinallyWithUnmask #-}
forkFinallyWithUnmask :: IO a -> ((forall x. IO x -> IO x) -> IO b) -> IO ThreadId
forkFinallyWithUnmask finalizer computation =
uninterruptibleMask $ \unmask -> do
masterThread <- myThreadId
slaveThread <- forkIOWithoutHandler $ do
slaveThread <- myThreadId
-- Execute the main computation:
computationExceptions <- catch (computation unmask $> empty) (return . pure)
-- Kill the slaves and wait for them to die:
slavesDyingExceptions <- let
loop !exceptions =
catch
(unmask $ do
killSlaves slaveThread
waitForSlavesToDie slaveThread
return exceptions)
(\ !exception -> loop (exception : exceptions))
in loop []
-- Finalize:
finalizerExceptions <- catch (finalizer $> empty) (return . pure)
-- Rethrow the exceptions:
let
handler e = do
case fromException e of
Just ThreadKilled -> return ()
_ -> throwTo masterThread (SlaveThreadCrashed slaveThread e)
in do
forM_ @Maybe computationExceptions handler
forM_ slavesDyingExceptions handler
forM_ @Maybe finalizerExceptions handler
-- Unregister from the global state,
-- thus informing the master of this thread's death.
-- Whilst doing so, also ensure that the master has already registered this slave.
atomically $ do
result <- Multimap.focus Focus.lookupAndDelete slaveThread masterThread slaveRegistry
case result of
Just _ -> return ()
_ -> retry
atomically $ Multimap.insert slaveThread masterThread slaveRegistry
return slaveThread
killSlaves :: ThreadId -> IO ()
killSlaves thread = do
#if MIN_VERSION_stm_containers(1,2,0)
threads <- atomically (UnfoldlM.foldM (Foldl.generalize Foldl.revList) (Multimap.unfoldlMByKey thread slaveRegistry))
#else
threads <- atomically (UnfoldlM.foldM (Foldl.generalize Foldl.revList) (Multimap.unfoldMByKey thread slaveRegistry))
#endif
traverse_ killThread threads
waitForSlavesToDie :: ThreadId -> IO ()
waitForSlavesToDie thread =
atomically $ do
#if MIN_VERSION_stm_containers(1,2,0)
null <- UnfoldlM.null $ Multimap.unfoldlMByKey thread slaveRegistry
#else
null <- UnfoldlM.null $ Multimap.unfoldMByKey thread slaveRegistry
#endif
unless null retry
-- | A slave thread crashed. This exception is classified as /asynchronous/,
-- meaning it extends from 'SomeAsyncException'.
--
-- In general,
--
-- * /Synchronous/ exceptions such as 'IOException' are thrown by IO actions
-- that are explicitly called by the thread that receives them, and may be
-- caught, inspected, and handled by resuming execution.
-- * /Asynchronous/ exceptions such as 'ThreadKilled' should normally only be
-- caught temporarily in order to run finalizers, then re-thrown.
--
-- 'SlaveThreadCrashed' being asynchronous means it should, by default, cause
-- the entire thread hierarchy to come crashing down, ultimately terminating the
-- program.
--
-- If you want more sophisticated behavior, such as a "supervisor" thread that
-- monitors and restarts worker threads when they fail, you have to program
-- that yourself.
--
-- N.B. Consider using a library like
-- @<https://hackage.haskell.org/package/safe-exceptions safe-exceptions>@ or
-- @<https://hackage.haskell.org/package/unliftio unliftio>@, which carefully
-- distinguish synchronous and asynchronous exceptions, unlike @base@.
data SlaveThreadCrashed
= SlaveThreadCrashed !ThreadId !SomeException
deriving (Show)
instance Exception SlaveThreadCrashed where
toException = asyncExceptionToException
fromException = asyncExceptionFromException
-- $note-unmask
--
-- == Masking
--
-- Threads forked by this library, unlike in @base@, /already/ mask asynchronous
-- exceptions internally, for bookkeeping purposes.
--
-- The @*withUnmask@ variants of 'fork' are thus different from the
-- @*withUnmask@ variants found in @base@ and @async@, in that the unmasking
-- function they provide restores the masking state /to that of the calling context/,
-- as opposed to /unmasked/.
--
-- Put another way, the @base@ code that you may have written as:
--
-- @
-- mask (\\unmask -> forkIO (initialize >> unmask computation))
-- @
--
-- using this library would be instead written as:
--
-- @
-- 'forkWithUnmask' (\\unmask -> initialize >> unmask computation)
-- @
--
-- And the @base@ code that you may have written as:
--
-- @
-- mask_ (forkIOWithUnmask (\\unmask -> initialize >> unmask computation))
-- @
--
-- will instead have to /manually/ call the low-level unmasking function called
-- 'GHC.IO.unsafeUnmask', as:
--
-- @
-- mask_ ('forkWithUnmask' (\\_ -> initialize >> unsafeUnmask computation))
-- @
--
-- Note that we used 'forkWithUnmask' (to guarantee @initialize@ is run with
-- asynchronous exceptions masked), but the unmasking function it provided does
-- not guarantee asynchronous exceptions are actually unmasked, so we toss it
-- and use 'GHC.IO.unsafeUnmask' instead.
--
-- This idiom is uncommon, but necessary when you need to fork a thread in
-- library code that is unsure if it's being called with asynchronous exceptions
-- masked (as in the "acquire" phase of a @bracket@ call).
| nikita-volkov/slave-thread | library/SlaveThread.hs | mit | 8,521 | 0 | 24 | 1,605 | 909 | 514 | 395 | -1 | -1 |
{-|
Module : Variable
Copyright : (c) Stéphane Vialette, 2015
License : MIT
Maintainer : vialette@gmail.com
Stability : experimental
-}
module PPOL.Sat.Variable
(
-- Variable data type
Variable(..)
)
where
-- | 'Variable' type
data Variable a = Variable a deriving (Show, Eq, Ord)
| vialette/PPOL | src/PPOL/Sat/Variable.hs | mit | 301 | 0 | 6 | 63 | 44 | 28 | 16 | 4 | 0 |
-- Copyright (c) 2016-present, SoundCloud Ltd.
-- All rights reserved.
--
-- This source code is distributed under the terms of a MIT license,
-- found in the LICENSE file.
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
module Kubernetes.Model.V1.ResourceQuotaList
( ResourceQuotaList (..)
, kind
, apiVersion
, metadata
, items
, mkResourceQuotaList
) where
import Control.Lens.TH (makeLenses)
import Data.Aeson.TH (defaultOptions,
deriveJSON,
fieldLabelModifier)
import Data.Text (Text)
import GHC.Generics (Generic)
import Kubernetes.Model.Unversioned.ListMeta (ListMeta)
import Kubernetes.Model.V1.ResourceQuota (ResourceQuota)
import Prelude hiding (drop, error, max,
min)
import qualified Prelude as P
import Test.QuickCheck (Arbitrary, arbitrary)
import Test.QuickCheck.Instances ()
-- | ResourceQuotaList is a list of ResourceQuota items.
data ResourceQuotaList = ResourceQuotaList
{ _kind :: !(Maybe Text)
, _apiVersion :: !(Maybe Text)
, _metadata :: !(Maybe ListMeta)
, _items :: !([ResourceQuota])
} deriving (Show, Eq, Generic)
makeLenses ''ResourceQuotaList
$(deriveJSON defaultOptions{fieldLabelModifier = (\n -> if n == "_type_" then "type" else P.drop 1 n)} ''ResourceQuotaList)
instance Arbitrary ResourceQuotaList where
arbitrary = ResourceQuotaList <$> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary
-- | Use this method to build a ResourceQuotaList
mkResourceQuotaList :: [ResourceQuota] -> ResourceQuotaList
mkResourceQuotaList xitemsx = ResourceQuotaList Nothing Nothing Nothing xitemsx
| soundcloud/haskell-kubernetes | lib/Kubernetes/Model/V1/ResourceQuotaList.hs | mit | 2,128 | 0 | 14 | 742 | 351 | 210 | 141 | 43 | 1 |
-- This is the module where an end user writes the actual domain specific analysis.
{-# LANGUAGE OverloadedStrings #-}
module Skel.Pharma.UserAnalysis (userAnalysis) where
import qualified Data.Conduit.List as CL
import Data.Semigroup
import Data.Text (pack)
import Skel.Pharma.UserModel
import Skel.Pharma.UserParameters
-- The following is an implementation of relative strength index.
-- For more information, see: http://en.wikipedia.org/wiki/Relative_strength_index
-- Our analysis consists of a number of individual pipeline components.
-- Each component is combined together with the =$= operator, which
-- causes the output of one component to become the input of the next.
userAnalysis :: MonadIO m => PharmaParams -> Conduit Dummy m DataPoint
userAnalysis (PharmaParams _min _max) =
CL.map (\(Dummy x y z) -> DP3 (D3D x y z))
| teuffy/min-var-ci | src/Skel/Pharma/UserAnalysis.hs | mit | 882 | 0 | 10 | 162 | 130 | 76 | 54 | 10 | 1 |
module Language.FOmega.Derivation where
import Language.FOmega.Syntax hiding (
star, box, lam, pi, app, var, (==>)
)
import qualified Language.FOmega.Syntax as S
import Prelude hiding (pi)
import Control.Monad.Reader
import Control.Monad.Trans.Except
import Control.Applicative
import System.IO.Unsafe
-- type derivation
data Der = Der (Tm Der)
| TypeOfBox
deriving Show
unDer (Der tm) = tm
unDer TypeOfBox = error "unDer TypeOfBox"
ty :: Tm Der -> Tm Der
ty = unDer . tmVal
view :: Tm a -> Tm ()
view = erase
viewTy :: Tm Der -> Tm ()
viewTy = view . unDer . tmVal
star, box :: Tm Der
star = Star (Der box)
box = Box TypeOfBox
rules = [ ((star,star),star)
, ((box,star),star)
, ((box,box),box)
]
data TyError = PiError String (Tm Der) (Tm Der)
| AppError (Tm Der) (Tm Der) (Tm ()) (Tm ())
| VarError String
deriving Show
type Env = [(String,Tm Der)]
type M a = ReaderT Env (ReaderT QCfg (ExceptT TyError IO)) a
raise = lift . lift . throwE
askCfg = lift ask
asksCfg :: (QCfg -> a) -> M a
asksCfg f = lift (asks f)
askVarTy :: String -> M (Tm Der)
askVarTy x = do
mt <- asks (lookup x)
case mt of
Just ty -> return ty
Nothing -> raise $ VarError x
(==>) a b = pi "" a b
infixr 5 ==>
runM :: QCfg -> Env -> M a -> IO (Either TyError a)
runM cfg env act =
runExceptT (runReaderT (runReaderT act env) cfg)
pi :: String -> Tm Der -> Tm Der -> M (Tm Der)
pi x a b = do
ty <- maybe (raise $ PiError x a b) return $ lookup (ty a, ty b) rules
return $ Pi (Der ty) x a b
lam :: String -> Tm Der -> Tm Der -> M (Tm Der)
lam x a b = do
ty <- pi x a (ty b)
return $ Lam (Der ty) x a b
app :: Tm Der -> Tm Der -> M (Tm Der)
app l r = do
let tyR = ty r
ty <- case norm (ty l) of
(Pi _ x tyR' ty) ->
let expected = norm (erase tyR')
actual = norm (erase tyR)
in
if expected == actual
then liftIO $ subst x r ty
else raise $ AppError l r expected actual
l' -> error $ unlines [
"unexpected app type: "
, "l = " ++ (take 100 $ show l)
, "ty l = " ++ (take 100 $ show l')
]
return $ App (Der ty) l r
var :: String -> M (Tm Der)
var x = do
ty <- askVarTy x
return $ Var (Der ty) x
typecheckEnv :: QCfg -> Env -> [(String,Tm ())] -> ExceptT TyError IO Env
typecheckEnv _ env [] = return env
typecheckEnv cfg env ((x,t):env') = do
t' <- typecheck cfg env t
typecheckEnv cfg (env ++ [(x,t')]) env'
typecheck :: QCfg -> Env -> Tm () -> ExceptT TyError IO (Tm Der)
typecheck cfg e tm = runReaderT (runReaderT (tc tm) e) cfg
tc :: Tm () -> M (Tm Der)
tc tm =
case tm of
Var _ x -> var x
Lam _ x t e -> do t' <- tc t
e' <- local ((x,t'):) $ tc e
lam x t' e'
Pi _ x l r -> do l' <- tc l
r' <- local ((x,l'):) $ tc r
pi x l' r'
App _ l r -> do l' <- tc l
r' <- tc r
app l' r'
Quote _ e -> do e' <- tc e
case ty e' of
k | isKind k -> do cfg <- askCfg
q <- liftIO $ runQ (quoteTy $ S.norm e') cfg
tc q
t | isTy t -> do _Exp <- asksCfg qExp
cfg <- askCfg
qt <- liftIO $ runQ (quoteTy $ S.norm t) cfg
tcQt <- tc qt -- $ S.toCore qt
_Exp_qt <- app _Exp tcQt
return $ S.Quote (Der _Exp_qt) e'
| otherwise -> error "Quotation not a type or kind"
Decl _ x t d b -> do d' <- tc d
t' <- tc t
let expected = norm (erase t')
let got = norm (erase $ ty d')
if got /= expected
then fail $ unlines [
"type error at declaration of " ++ x
, "got: " ++ show got
, "expected: " ++ show expected
]
else tc =<< liftIO (subst x d b)
Star _ -> return star
Box _ -> return box
isKind (Box _) = False
isKind x = (== Box ()) . view . ty $ x
isTy (Box _) = False
isTy x = isKind . ty $ x
isBaseTy (Box _) = False
isBaseTy x = (== Star ()) . view . ty $ x
isTerm (Box _) = False
isTerm x = isBaseTy . ty $ x
type Q a = ReaderT QCfg IO a
data QCfg = QCfg {
qU :: Tm Der
, qUOp :: Tm ()
-- case function variable names
, qRK :: String
-- types of case functions
, qKF :: Tm ()
-- Exp
, q_abs :: String
, q_app :: String
, q_tabs :: String
, q_tapp :: String
, qAbs :: Tm ()
, qApp :: Tm ()
, qTAbs :: Tm ()
, qTApp :: Tm ()
, qPExp :: Tm ()
, qExp :: Tm Der
, qId :: Tm ()
}
askVar :: (QCfg -> String) -> Q (Tm ())
askVar f = S.var <$> asks f
_Abs = S.app <$> asks qAbs <*> askVar qRK
_App = S.app <$> asks qApp <*> askVar qRK
_TAbs = S.app <$> asks qTAbs <*> askVar qRK
_TApp = S.app <$> asks qTApp <*> askVar qRK
runQ = runReaderT
quoteArr :: Tm () -> Tm () -> Q (Tm ())
quoteArr t1 t2 = do k <- askVar qRK
return $ S.app k t1 S.==> S.app k t2
quoteAllBody :: String -> Tm Der -> Tm () -> Tm () -> Tm ()
quoteAllBody x k t _R =
S.pi x (erase k) $
S.app _R t
quoteAll :: String -> Tm Der -> Tm () -> Q (Tm ())
quoteAll x k t = quoteAllBody x k t <$> askVar qRK
quoteTy :: Tm Der -> Q (Tm ())
quoteTy t = S.lam <$> asks qRK <*> asks qKF <*> preQuoteTy t
preQuoteTy :: Tm Der -> Q (Tm ())
preQuoteTy t =
let go = preQuoteTy in
case t of
Var _ x -> pure $ S.var x
Pi _ _ t1 t2 | isBaseTy t1 -> join $ quoteArr <$> go t1 <*> go t2
Pi _ x k t | isKind k -> join $ quoteAll x k <$> go t
App _ t1 t2 -> S.app <$> go t1 <*> go t2
Lam _ x k t -> S.lam x (erase k) <$> go t
quoteVar :: Tm Der -> String -> Q (Tm ())
quoteVar _ x = return (S.var x)
quoteAbs :: String -> Tm Der -> Tm Der -> Q (Tm ())
quoteAbs x t b = do
qT <- preQuoteTy t
tx <- S.app <$> askVar qRK <*> pure qT
S.app3 <$> askVar q_abs
<*> pure qT
<*> preQuoteTy (ty b)
<*> (S.lam x tx <$> preQuoteTm b)
quoteApp :: Tm () -> Tm () -> Tm () -> Tm () -> Q (Tm ())
quoteApp t1 t2 e1 e2 = do
mkApp <- askVar q_app
return $ S.app4 mkApp t1 t2 e1 e2
strip :: Tm () -> Tm Der -> Tm () -> Tm ()
strip _K k b = unsafePerformIO $ do
_A <- genSym
_B <- genSym
f <- genSym
e <- genSym
x <- genSym
let t = synthesizeType k
let bt = S.app b t
return $ S.lam _A S.star $
S.lam f (S.pi _B S.star $ S.app _K (S.var _B) S.==> (S.var _A)) $
S.lam e (S.pi x (erase k) $ S.app _K (S.app b $ S.var x)) $
S.app2 (S.var f) bt (S.app (S.var e) t)
quoteTAbs :: String -> Tm Der -> Tm Der -> Q (Tm ())
quoteTAbs x k b = do
qB <- preQuoteTy (ty b)
qT <- quoteAll x k qB
_K <- askVar qRK
S.app3 <$> askVar q_tabs
<*> pure qT
<*> pure (strip _K k (S.lam x (erase k) qB))
<*> (S.lam x (erase k) <$> preQuoteTm b)
instFun :: Tm () -> Tm () -> Tm ()
instFun qPiBody qTyParam =
S.lam "e" qPiBody $
S.app (S.var "e") qTyParam
quoteTApp :: Tm Der -> Tm Der -> Tm Der -> Q (Tm ())
quoteTApp instTy tm t = do
let Pi _ x k piBody = norm (ty tm)
qPiBody <- quoteAll x k =<< preQuoteTy piBody
S.app4 <$> askVar q_tapp
<*> pure qPiBody
<*> preQuoteTm tm
<*> preQuoteTy instTy
<*> (instFun qPiBody <$> preQuoteTy t)
preQuoteTm :: Tm Der -> Q (Tm ())
preQuoteTm e | not (isTerm e) = error $ "Not a term: " ++ show e
preQuoteTm e = case e of
Var _ x -> quoteVar (ty e) x
Lam _ x t b | isBaseTy t -> quoteAbs x t b
Lam _ x k b | isKind k -> quoteTAbs x k b
App _ a b | isTerm b -> join $ quoteApp <$> preQuoteTy (ty b)
<*> preQuoteTy (ty e)
<*> preQuoteTm a <*> preQuoteTm b
App _ a t | isTy t -> quoteTApp (ty e) a t
quoteTm :: Tm Der -> Q (Tm ())
quoteTm e =
S.lam <$> asks qRK <*> asks qKF <*> (
S.lam <$> asks q_abs <*> _Abs <*> (
S.lam <$> asks q_app <*> _App <*> (
S.lam <$> asks q_tabs <*> _TAbs <*> (
S.lam <$> asks q_tapp <*> _TApp <*> (
preQuoteTm e
)))))
expandQuotations :: Tm Der -> Q (Tm ())
expandQuotations e | isTerm e = case e of
Var _ x -> return $ S.var x
Lam _ x t b -> S.lam x <$> expandQuotations t <*> expandQuotations b
App _ a b -> S.app <$> expandQuotations a <*> expandQuotations b
Quote _ e -> quoteTm e
expandQuotations t | isTy t = case t of
Var _ x -> return $ S.var x
Lam _ x k b -> S.lam x (erase k) <$> expandQuotations b
App _ a b -> S.app <$> expandQuotations a <*> expandQuotations b
Pi _ x a b -> S.pi x <$> expandQuotations a <*> expandQuotations b
Quote _ t -> quoteTy t
expandQuotations k | isKind k = return $ erase k
synthesizeType :: Tm Der -> Tm ()
synthesizeType k | isKind k =
case k of
Star _ -> S.pi "A" S.star (S.var "A") -- bottom
Pi _ _ k1 k2 -> S.lam "" (erase k1) (synthesizeType k2)
_ -> error "synthesizeType: input is not a kind"
| 8l/asif | src/Language/FOmega/Derivation.hs | mit | 9,751 | 0 | 22 | 3,732 | 4,560 | 2,182 | 2,378 | 256 | 10 |
module Main (main) where
import Control.Applicative
import Control.Monad
import Data.Version (showVersion)
import System.Console.CmdTheLine
import System.FilePath ((<.>))
import System.Exit (exitSuccess, exitFailure)
import Text.Parsec (ParseError)
import Language.PureScript.Lua.Options
import Language.PureScript.Lua.Compiler
import qualified Language.PureScript as P
import qualified Paths_psc_lua as Paths
import qualified System.IO.UTF8 as U
preludeFilename :: IO FilePath
preludeFilename = Paths.getDataFileName "prelude/prelude.purs"
readInput :: Maybe [FilePath] -> IO (Either ParseError [(FilePath, P.Module)])
readInput Nothing = do
text <- getContents
return $ map ((,) undefined) <$> P.runIndentParser "" P.parseModules text
readInput (Just input) = fmap collect $ forM input $ \inputFile -> do
text <- U.readFile inputFile
return $ (inputFile, P.runIndentParser inputFile P.parseModules text)
where
collect :: [(FilePath, Either ParseError [P.Module])] -> Either ParseError [(FilePath, P.Module)]
collect = fmap concat . sequence . map (\(fp, e) -> fmap (map ((,) fp)) e)
compile :: Options -> Maybe [FilePath] -> IO ()
compile opts input = do
modules <- readInput input
case modules of
Left err -> do
U.print err
exitFailure
Right ms -> do
case compileToLua opts (map snd ms) of
Left err -> do
U.putStrLn err
exitFailure
Right luaModules -> do
forM_ luaModules $ \(moduleName, luaCode) ->
U.writeFile (moduleName <.> "lua") luaCode
exitSuccess
useStdIn :: Term Bool
useStdIn = value . flag $ (optInfo [ "s", "stdin" ])
{ optDoc = "Read from standard input" }
inputFiles :: Term [FilePath]
inputFiles = value $ posAny [] $ posInfo
{ posDoc = "The input .ps files" }
outputFile :: Term (Maybe FilePath)
outputFile = value $ opt Nothing $ (optInfo [ "o", "output" ])
{ optDoc = "The output .lua file" }
performRuntimeTypeChecks :: Term Bool
performRuntimeTypeChecks = value $ flag $ (optInfo [ "runtime-type-checks" ])
{ optDoc = "Generate runtime type checks" }
runMain :: Term (Maybe String)
runMain = value $ defaultOpt (Just "Main") Nothing $ (optInfo [ "main" ])
{ optDoc = "Generate code to run the main method in the specified module." }
verboseErrors :: Term Bool
verboseErrors = value $ flag $ (optInfo [ "v", "verbose-errors" ])
{ optDoc = "Display verbose error messages" }
options :: Term Options
options = Options <$> performRuntimeTypeChecks <*> runMain <*> verboseErrors
stdInOrInputFiles :: FilePath -> Term (Maybe [FilePath])
stdInOrInputFiles prelude = combine <$> useStdIn <*> inputFiles
where
combine False input = Just (prelude : input)
combine True _ = Nothing
term :: FilePath -> Term (IO ())
term prelude = compile <$> options <*> stdInOrInputFiles prelude
termInfo :: TermInfo
termInfo = defTI
{ termName = "psc"
, version = showVersion Paths.version
, termDoc = "Compiles PureScript to Lua"
}
main :: IO ()
main = do
prelude <- preludeFilename
run (term prelude, termInfo)
| osa1/psc-lua | psc-lua/Main.hs | mit | 3,093 | 0 | 21 | 615 | 1,013 | 536 | 477 | 75 | 3 |
{-# LANGUAGE BangPatterns, DataKinds, DeriveDataTypeable, FlexibleInstances, MultiParamTypeClasses #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module Hadoop.Protos.ClientDatanodeProtocolProtos.ListReconfigurablePropertiesRequestProto
(ListReconfigurablePropertiesRequestProto(..)) where
import Prelude ((+), (/))
import qualified Prelude as Prelude'
import qualified Data.Typeable as Prelude'
import qualified Data.Data as Prelude'
import qualified Text.ProtocolBuffers.Header as P'
data ListReconfigurablePropertiesRequestProto = ListReconfigurablePropertiesRequestProto{}
deriving (Prelude'.Show, Prelude'.Eq, Prelude'.Ord, Prelude'.Typeable, Prelude'.Data)
instance P'.Mergeable ListReconfigurablePropertiesRequestProto where
mergeAppend ListReconfigurablePropertiesRequestProto ListReconfigurablePropertiesRequestProto
= ListReconfigurablePropertiesRequestProto
instance P'.Default ListReconfigurablePropertiesRequestProto where
defaultValue = ListReconfigurablePropertiesRequestProto
instance P'.Wire ListReconfigurablePropertiesRequestProto where
wireSize ft' self'@(ListReconfigurablePropertiesRequestProto)
= case ft' of
10 -> calc'Size
11 -> P'.prependMessageSize calc'Size
_ -> P'.wireSizeErr ft' self'
where
calc'Size = 0
wirePut ft' self'@(ListReconfigurablePropertiesRequestProto)
= case ft' of
10 -> put'Fields
11 -> do
P'.putSize (P'.wireSize 10 self')
put'Fields
_ -> P'.wirePutErr ft' self'
where
put'Fields
= do
Prelude'.return ()
wireGet ft'
= case ft' of
10 -> P'.getBareMessageWith update'Self
11 -> P'.getMessageWith update'Self
_ -> P'.wireGetErr ft'
where
update'Self wire'Tag old'Self
= case wire'Tag of
_ -> let (field'Number, wire'Type) = P'.splitWireTag wire'Tag in P'.unknown field'Number wire'Type old'Self
instance P'.MessageAPI msg' (msg' -> ListReconfigurablePropertiesRequestProto) ListReconfigurablePropertiesRequestProto where
getVal m' f' = f' m'
instance P'.GPB ListReconfigurablePropertiesRequestProto
instance P'.ReflectDescriptor ListReconfigurablePropertiesRequestProto where
getMessageInfo _ = P'.GetMessageInfo (P'.fromDistinctAscList []) (P'.fromDistinctAscList [])
reflectDescriptorInfo _
= Prelude'.read
"DescriptorInfo {descName = ProtoName {protobufName = FIName \".hadoop.hdfs.ListReconfigurablePropertiesRequestProto\", haskellPrefix = [MName \"Hadoop\",MName \"Protos\"], parentModule = [MName \"ClientDatanodeProtocolProtos\"], baseName = MName \"ListReconfigurablePropertiesRequestProto\"}, descFilePath = [\"Hadoop\",\"Protos\",\"ClientDatanodeProtocolProtos\",\"ListReconfigurablePropertiesRequestProto.hs\"], isGroup = False, fields = fromList [], descOneofs = fromList [], keys = fromList [], extRanges = [], knownKeys = fromList [], storeUnknown = False, lazyFields = False, makeLenses = False}"
instance P'.TextType ListReconfigurablePropertiesRequestProto where
tellT = P'.tellSubMessage
getT = P'.getSubMessage
instance P'.TextMsg ListReconfigurablePropertiesRequestProto where
textPut msg = Prelude'.return ()
textGet = Prelude'.return P'.defaultValue | alexbiehl/hoop | hadoop-protos/src/Hadoop/Protos/ClientDatanodeProtocolProtos/ListReconfigurablePropertiesRequestProto.hs | mit | 3,308 | 1 | 16 | 560 | 554 | 291 | 263 | 55 | 0 |
module Kafka.Internal
( module Kafka.Internal.Request
, module Kafka.Internal.Response
, module Kafka.Internal.Transport
, module Kafka.Internal.Types
) where
import Kafka.Internal.Request
import Kafka.Internal.Response
import Kafka.Internal.Transport
import Kafka.Internal.Types
| abhinav/kafka-client | src/Kafka/Internal.hs | mit | 301 | 0 | 5 | 45 | 60 | 41 | 19 | 9 | 0 |
-- Problems/Problem030.hs
module Problems.Problem030 (p30) where
import Helpers.Numbers
main = print p30
p30 :: Int
p30 = sum $ filter (\x -> x == sumOfFifthPowerDigits x) [10..999999]
sumOfFifthPowerDigits :: Int -> Int
sumOfFifthPowerDigits num = sum $ map (\x -> floor $ (fromIntegral x) ** (fromIntegral 5)) $ intToDigits num
| Sgoettschkes/learning | haskell/ProjectEuler/src/Problems/Problem030.hs | mit | 335 | 0 | 13 | 55 | 123 | 66 | 57 | 7 | 1 |
module Language.Plover.Generics where
import Control.Monad.Free
import Data.Monoid
import qualified Data.Foldable as F (Foldable, fold)
import Language.Plover.Types
visitAny :: (Functor f, F.Foldable f) => (Free f Any -> Bool) -> Free f a -> Bool
visitAny f x =
let Any result = visitMon (\t -> if f t then Pure (Any True) else t) x
in result
visitMon :: (Monoid m, Functor f, F.Foldable f) => (Free f m -> Free f m) -> Free f a -> m
visitMon f e = F.fold $ visit f $ fmap (const mempty) $ e
visit :: (Functor f) => (Free f a -> Free f a) -> Free f a -> Free f a
visit f (Free t) = f $ Free $ fmap (visit f) t
visit f x = f x
mvisit :: Functor f => (Free f a -> Maybe t) -> (t -> Free f a) -> Free f a -> Free f a
mvisit f g x =
case f x of
Nothing -> iterM (Free . fmap (mvisit f g)) x
Just x' -> g x'
fromFix :: (Functor f) => Free f Void -> Free f a
fromFix = fmap undefined
fixM :: (Eq a, Monad m) => (a -> m a) -> a -> m a
fixM f x = do
x' <- f x
if x == x' then return x else fixM f x'
scanM :: (Eq a, Monad m) => (a -> m a) -> a -> m [a]
scanM f a = scan [] a
where
scan xs x = do
x' <- f x
let l = x : xs
if x == x' then return l else scan l x'
| imh/plover | src/Language/Plover/Generics.hs | mit | 1,203 | 0 | 15 | 333 | 696 | 346 | 350 | 31 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
-----------------------------------------------------------------------------
-- |
-- Module holding types shared by consumer and producer modules.
-----------------------------------------------------------------------------
module Kafka.Types
( BrokerId(..)
, PartitionId(..)
, Millis(..)
, ClientId(..)
, BatchSize(..)
, TopicName(..)
, BrokerAddress(..)
, Timeout(..)
, KafkaLogLevel(..)
, KafkaError(..)
, KafkaDebug(..)
, KafkaCompressionCodec(..)
, TopicType(..)
, topicType
, kafkaDebugToText
, kafkaCompressionCodecToText
)
where
import Control.Exception (Exception (..))
import Data.Int (Int64)
import Data.String (IsString)
import Data.Text (Text, isPrefixOf)
import Data.Typeable (Typeable)
import GHC.Generics (Generic)
import Kafka.Internal.RdKafka (RdKafkaRespErrT, rdKafkaErr2name, rdKafkaErr2str)
-- | Kafka broker ID
newtype BrokerId = BrokerId { unBrokerId :: Int } deriving (Show, Eq, Ord, Read, Generic)
-- | Topic partition ID
newtype PartitionId = PartitionId { unPartitionId :: Int } deriving (Show, Eq, Read, Ord, Enum, Generic)
-- | A number of milliseconds, used to represent durations and timestamps
newtype Millis = Millis { unMillis :: Int64 } deriving (Show, Read, Eq, Ord, Num, Generic)
-- | Client ID used by Kafka to better track requests
--
-- See <https://kafka.apache.org/documentation/#client.id Kafka documentation on client ID>
newtype ClientId = ClientId
{ unClientId :: Text
} deriving (Show, Eq, IsString, Ord, Generic)
-- | Batch size used for polling
newtype BatchSize = BatchSize { unBatchSize :: Int } deriving (Show, Read, Eq, Ord, Num, Generic)
-- | Whether the topic is created by a user or by the system
data TopicType =
User -- ^ Normal topics that are created by user.
| System -- ^ Topics starting with a double underscore "\__" (@__consumer_offsets@, @__confluent.support.metrics@, etc.) are considered "system" topics
deriving (Show, Read, Eq, Ord, Generic)
-- | Topic name to consume/produce messages
--
-- Wildcard (regex) topics are supported by the /librdkafka/ assignor:
-- any topic name in the topics list that is prefixed with @^@ will
-- be regex-matched to the full list of topics in the cluster and matching
-- topics will be added to the subscription list.
newtype TopicName = TopicName
{ unTopicName :: Text -- ^ a simple topic name or a regex if started with @^@
} deriving (Show, Eq, Ord, IsString, Read, Generic)
-- | Deduce the type of a topic from its name, by checking if it starts with a double underscore "\__"
topicType :: TopicName -> TopicType
topicType (TopicName tn) =
if "__" `isPrefixOf` tn then System else User
{-# INLINE topicType #-}
-- | Kafka broker address string (e.g. @broker1:9092@)
newtype BrokerAddress = BrokerAddress
{ unBrokerAddress :: Text
} deriving (Show, Eq, IsString, Generic)
-- | Timeout in milliseconds
newtype Timeout = Timeout { unTimeout :: Int } deriving (Show, Eq, Read, Generic)
-- | Log levels for /librdkafka/.
data KafkaLogLevel =
KafkaLogEmerg | KafkaLogAlert | KafkaLogCrit | KafkaLogErr | KafkaLogWarning |
KafkaLogNotice | KafkaLogInfo | KafkaLogDebug
deriving (Show, Enum, Eq)
-- | All possible Kafka errors
data KafkaError =
KafkaError Text
| KafkaInvalidReturnValue
| KafkaBadSpecification Text
| KafkaResponseError RdKafkaRespErrT
| KafkaInvalidConfigurationValue Text
| KafkaUnknownConfigurationKey Text
| KafkaBadConfiguration
deriving (Eq, Show, Typeable, Generic)
instance Exception KafkaError where
displayException (KafkaResponseError err) =
"[" ++ rdKafkaErr2name err ++ "] " ++ rdKafkaErr2str err
displayException err = show err
-- | Available /librdkafka/ debug contexts
data KafkaDebug =
DebugGeneric
| DebugBroker
| DebugTopic
| DebugMetadata
| DebugQueue
| DebugMsg
| DebugProtocol
| DebugCgrp
| DebugSecurity
| DebugFetch
| DebugFeature
| DebugAll
deriving (Eq, Show, Typeable, Generic)
-- | Convert a 'KafkaDebug' into its /librdkafka/ string equivalent.
--
-- This is used internally by the library but may be useful to some developers.
kafkaDebugToText :: KafkaDebug -> Text
kafkaDebugToText d = case d of
DebugGeneric -> "generic"
DebugBroker -> "broker"
DebugTopic -> "topic"
DebugMetadata -> "metadata"
DebugQueue -> "queue"
DebugMsg -> "msg"
DebugProtocol -> "protocol"
DebugCgrp -> "cgrp"
DebugSecurity -> "security"
DebugFetch -> "fetch"
DebugFeature -> "feature"
DebugAll -> "all"
-- | Compression codec used by a topic
--
-- See <https://kafka.apache.org/documentation/#compression.type Kafka documentation on compression codecs>
data KafkaCompressionCodec =
NoCompression
| Gzip
| Snappy
| Lz4
deriving (Eq, Show, Typeable, Generic)
-- | Convert a 'KafkaCompressionCodec' into its /librdkafka/ string equivalent.
--
-- This is used internally by the library but may be useful to some developers.
kafkaCompressionCodecToText :: KafkaCompressionCodec -> Text
kafkaCompressionCodecToText c = case c of
NoCompression -> "none"
Gzip -> "gzip"
Snappy -> "snappy"
Lz4 -> "lz4"
| haskell-works/kafka-client | src/Kafka/Types.hs | mit | 5,371 | 0 | 9 | 1,015 | 979 | 587 | 392 | 107 | 12 |
{-# LANGUAGE OverloadedStrings, RecordWildCards #-}
module Main where
import Text.XML.HXT.Core
main = do
s <- getContents
print s
| danchoi/bshxt | Main.hs | mit | 141 | 0 | 7 | 29 | 30 | 17 | 13 | 6 | 1 |
import System.Environment(getArgs)
type Speed = Float
type Dir = (Float, Float)
type Point = (Float, Float)
type TimeStamp = Float
type TimeIncrement = Float
type Distance = Float
data Ship = Ship {
position :: Point,
direction :: Dir,
speed :: Speed
} deriving (Show)
data World = World {
firstShip :: Ship,
secondShip :: Ship,
timeStamp :: TimeStamp
} deriving (Show)
dist :: Point -> Point -> Distance
dist (x,y) (x', y') = sqrt ((x - x') ^ 2 + (y - y') ^ 2)
slope :: Point -> Point -> Dir
slope p1@(x,y) p2@(x', y') = ((x'- x)/d, (y'-y)/d)
where
d = dist p1 p2
distShip :: World -> Distance
distShip w = dist (position . firstShip $ w) (position . secondShip $ w)
getNewPos :: Ship -> TimeIncrement -> Point
getNewPos (Ship (x,y) (z,u) s) t = (x+z*s*t/w, y+u*s*t/w)
where
w = sqrt $ z ** 2 + u ** 2
advanceWorld :: World -> TimeIncrement -> World
advanceWorld w@(World s1 s2 t) dt = w { firstShip = s1',
secondShip = s2', timeStamp = t + dt}
where
p2 = getNewPos s2 dt
p1 = getNewPos s1 dt
s1' = s1 { position = p1}
s2' = s2 { position = p2, direction = slope p2 p1}
createWorld :: Distance -> Speed -> World
createWorld initialDistance sp = World (Ship (42, 24) (-5.42, -2.34) sp)
(Ship (initialDistance, 0) (-1, 0) sp) 0
iterateUntil :: (a -> a) -> (a -> Bool) -> a -> [a]
iterateUntil f p e = go [e]
where
go l@(x:xs)
| p x = l
| otherwise = go ((f x):l)
doEvolution :: World -> TimeIncrement -> TimeStamp -> [World]
doEvolution w t tm = iterateUntil (flip advanceWorld t) (\x -> timeStamp x > tm) w
printResults w = do
appendFile "dist" $ show $ timeStamp w
appendFile "dist" "\t"
appendFile "dist" $ show $ distShip w
appendFile "dist" "\n"
appendFile "trace" $ show $ fst $ position $ firstShip w
appendFile "trace" "\t"
appendFile "trace" $ show $ snd $ position $ firstShip w
appendFile "trace" "\t"
appendFile "trace" $ show $ fst $ position $ secondShip w
appendFile "trace" "\t"
appendFile "trace" $ show $ snd $ position $ secondShip w
appendFile "trace" "\n"
main = do
args <- getArgs
let d = read $ args !! 0
let t = read $ args !! 1
let s = read $ args !! 2
let w = createWorld d s
let maxt = read $ args !! 3
writeFile "dist" ""
writeFile "trace" ""
mapM_ printResults $ doEvolution w t maxt
| mihaimaruseac/blog-demos | ships/ships.hs | mit | 2,338 | 0 | 12 | 558 | 1,109 | 577 | 532 | 67 | 1 |
-- Filter Coffee
-- http://www.codewars.com/kata/56069d0c4af7f633910000d3/
module Codewars.G964.FilterCoffee where
import Data.List (intercalate, sort)
search :: Int -> [Int] -> String
search budget = intercalate "," . map show . sort . filter (<= budget)
| gafiatulin/codewars | src/7 kyu/FilterCoffee.hs | mit | 259 | 0 | 8 | 36 | 71 | 40 | 31 | 4 | 1 |
{-# OPTIONS_HADDOCK show-extensions #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-|
Module : Data.FixedSize.Matrix
Description : fixed-size matrices
Copyright : (c) Lars Brünjes, 2016
License : MIT
Maintainer : brunjlar@gmail.com
Stability : experimental
Portability : portable
This module defines fixed-size /matrices/ and some basic typeclass instances and operations for them.
-}
module Data.FixedSize.Matrix
( Matrix(..)
, (<%%>)
, row
, column
, transpose
) where
import Data.MyPrelude
import Data.FixedSize.Class
import Data.FixedSize.Vector
import GHC.TypeLits
-- | @'Matrix' m n a@ is the type of /matrices/ with @m@ rows, @n@ columns and entries of type @a@.
--
newtype Matrix (m :: Nat) (n :: Nat) a = Matrix (Vector m (Vector n a))
deriving (Eq, Show, Functor, Foldable, Traversable, NFData)
instance (KnownNat m, KnownNat n) => Applicative (Matrix m n) where
pure x = Matrix $ pure (pure x)
Matrix fs <*> Matrix xs = Matrix $ (<*>) <$> fs <*> xs
instance (KnownNat m, KnownNat n) => FixedSize (Matrix m n) where
type Index (Matrix m n) = (Int, Int)
type Size (Matrix m n) = m * n
m !? (i, j) = row m i >>= (!? j)
generate f = Matrix $ generate (\i -> generate (\j -> f (i, j)))
-- | Multiplication of a /matrix/ by a (column-)/vector/.
--
-- >>> :set -XDataKinds
-- >>> (pure 1 :: Matrix 1 2 Int) <%%> cons 1 (cons 2 nil)
-- [3]
--
(<%%>) :: Num a => Matrix m n a -> Vector n a -> Vector m a
Matrix rows <%%> v = (v <%>) <$> rows
-- | Gives the matrix row with the specified index (starting at zero) if the index is valid,
-- otherwise 'Nothing'.
--
-- >>> :set -XDataKinds
-- >>> row (pure 42 :: Matrix 2 4 Int) 0
-- Just [42,42,42,42]
--
-- >>> row (pure 42 :: Matrix 2 4 Int) 2
-- Nothing
--
row :: KnownNat m => Matrix m n a -> Int -> Maybe (Vector n a)
row (Matrix rows) = (rows !?)
-- | Gives the matrix column with the specified index (starting at zero) if the index is valid,
-- otherwise 'Nothing'.
--
-- >>> :set -XDataKinds
-- >>> column (pure 42 :: Matrix 2 4 Int) 3
-- Just [42,42]
--
-- >>> column (pure 42 :: Matrix 2 4 Int) 4
-- Nothing
--
column :: KnownNat n => Matrix m n a -> Int -> Maybe (Vector m a)
column (Matrix rows) j = sequenceA $ (!? j) <$> rows
-- | Transposes a matrix.
--
-- >>> transpose (Matrix $ cons (cons 'a' nil) (cons (cons 'b' nil) nil))
-- Matrix ["ab"]
--
transpose :: (KnownNat m, KnownNat n) => Matrix m n a -> Matrix n m a
transpose m = generate $ \(i, j) -> fromJust $ m !? (j, i)
| brunjlar/neural | src/Data/FixedSize/Matrix.hs | mit | 2,750 | 0 | 14 | 588 | 651 | 367 | 284 | 37 | 1 |
-- |
-- Module : False.Targets.C
-- Copyright : Joe Jevnik
--
-- License : GPL-2
-- Maintainer : joejev@gmail.org
-- Stability : stable
-- Portability : GHC
--
-- PNodes to C
{-
Program structure:
C89 is the target.
The stack is stored in a structure called f_stack.
All the operators are function that mutate a single f_stack.
-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE QuasiQuotes #-}
module False.Targets.C
( cTarget -- :: Target
) where
import Control.Applicative ((<$>))
import Control.Parallel.Strategies (parMap,rseq)
import Data.Char (ord)
import Data.List (intercalate)
import False.Core (PNode(..),Target(..),Func(..),Node(..),Var(..))
import False.Targets.C.QQ (include)
type C = String
cTarget :: Target
cTarget = Target { compile = cCompile
, defaultFile = "a.c"
}
cCompile :: [(Int,String)] -> [(Int,[PNode])] -> [PNode] -> String
cCompile _ ls ms = let ls' = parMap rseq defLambda $ reverse ls
ms' = defMain ms
in intercalate "\n\n\n"
$ [include|False/Targets/C/false.c|] : ls' ++ [ms']
cToTarget :: PNode -> C
cToTarget (PNode (FuncNode f)) = callFunction $ funcToC f
cToTarget (PNode (ValNode n)) = stackPush $ show n
cToTarget (PNode (VarNode (Var v))) = stackPush $ show $ ord v - ord 'a'
cToTarget (PString _ s) = "puts(" ++ show s ++ ");"
cToTarget (PLambda n) = stackPush $ "(size_t) &lambda_" ++ show n
stackPush :: String -> C
stackPush n = callFunctionWithArgs "f_stackpush" [n]
callFunction :: C -> C
callFunction f = f ++ "(stack);"
callFunctionWithArgs :: C -> [C] -> C
callFunctionWithArgs f as = f ++ "(" ++ intercalate "," ("stack" : as) ++ ");"
funcToC :: Func -> C
funcToC FAdd = "f_add"
funcToC FSub = "f_sub"
funcToC FMul = "f_mul"
funcToC FDiv = "f_div"
funcToC FNeg = "f_neg"
funcToC FEq = "f_eq"
funcToC FGt = "f_gt"
funcToC FNot = "f_not"
funcToC FAnd = "f_and"
funcToC FOr = "f_or"
funcToC FAssign = "f_assign"
funcToC FRead = "f_read"
funcToC FApply = "f_apply"
funcToC FDup = "f_dup"
funcToC FDel = "f_del"
funcToC FSwap = "f_swap"
funcToC FRot = "f_rot"
funcToC FPick = "f_pick"
funcToC FIf = "f_if"
funcToC FWhile = "f_while"
funcToC FPrintI = "f_printi"
funcToC FPrintC = "f_printc"
funcToC FInput = "f_input"
funcToC FFlush = "f_flush"
-- | Defines lambda_n in the source code.
defLambda :: (Int,[PNode]) -> C
defLambda (n,ps) = "void lambda_" ++ show n ++ "(f_stack *stack){\n "
++ writePNodes ps ++ "\n}"
defMain :: [PNode] -> C
defMain ps = "int main(int argc,char **argv){\n\
\ f_stack *stack = malloc(sizeof(f_stack));\n\
\ f_init(namespace,stack,argv);\n "
++ writePNodes ps
++ "\n free(stack); return 0;\n}"
writePNodes :: [PNode] -> C
writePNodes ps = intercalate "\n " $ parMap rseq cToTarget ps
| llllllllll/false | False/Targets/C.hs | gpl-2.0 | 3,013 | 0 | 11 | 790 | 838 | 456 | 382 | 65 | 1 |
{-
Given: A string s of length at most 200 letters and four integers a, b, c and d.
Return: The slice of this string from indices a through b and c through d (with space in between), inclusively.
-}
str = "HumptyDumptysatonawallHumptyDumptyhadagreatfallAlltheKingshorsesandalltheKingsmenCouldntputHumptyDumptyinhisplaceagain."
a = 22
b = 27
c = 97
d = 102
slice str a b c d = s
where
(_,stra) = splitAt a str
(_,strc) = splitAt c str
s = take (b-a+1) stra ++" "++ take (d-c+1) strc
main = do
print $ slice str a b c d | forgit/Rosalind | ini3.hs | gpl-2.0 | 529 | 0 | 12 | 106 | 147 | 77 | 70 | 11 | 1 |
--
-- Copyright (c) 2013 Citrix Systems, Inc.
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
--
{-# LANGUAGE ScopedTypeVariables #-}
module NetworkFirewall (applyFirewallRules
, cleanupFirewallRules
, initBridgedFiltering
, addForwardChain
, configGetBridgeFiltering
) where
import Data.Maybe
import Data.List
import Data.Char
import qualified Data.Map as M
import Text.Regex.Posix
import Text.Printf (printf)
import Directory
import System.FilePath.Posix
import System.Exit
import Control.Monad
import Control.Applicative
import Tools.Process
import Tools.Log
import Tools.Text
import Tools.Misc
import Tools.File
import Tools.XenStore
import App
import Utils
import Error
import NetworkUtils
import Rpc.Autogen.NetworkSlaveConst
domidToUuid :: String -> IO (Maybe String)
domidToUuid domid =
-- can's use getDomainUuid stuff here as we can't read stubdom vm objs from xenstore
if (domid == "0")
then return $ Just "00000000-0000-0000-0000-000000000000"
else do
uuids <- xsDir "/xenmgr/vms"
(listToMaybe.catMaybes) <$> (mapM (matchDomid domid) uuids)
where
xsVmDomid uuid = printf "/xenmgr/vms/%s/domid" uuid
xsVmStubdomid uuid = printf "/xenmgr/vms/%s/stubdomid" uuid
xsDomid uuid = xsRead $ xsVmDomid uuid
xsStubdomid uuid = xsRead $ xsVmStubdomid uuid
matchDomid :: String -> String -> IO (Maybe String)
matchDomid domid uuid = do
domidM <- fromMaybe "" <$> xsDomid uuid
stubdomidM <- fromMaybe "" <$> xsStubdomid uuid
case () of
_ | (domidM == domid) -> return $ Just uuid
| (stubdomidM == domid) -> return $ Just uuid
| otherwise -> return Nothing
addForwardChain :: String -> String -> String -> IO ()
addForwardChain inputIf outputIf interface = do
(_, out, _) <- readProcessWithExitCode_closeFds "/usr/sbin/iptables" ["-S", "FORWARD"] []
let output = lines out
configGetBridgeFiltering >>= \x -> when (x && outputIf /= interface) $
addIptableRulesIfMissing output (printf "-A FORWARD -i %s -o %s -m physdev --physdev-in %s -j FORWARD_%s" outputIf outputIf interface outputIf)
mapM_ (addIptableRulesIfMissing output) filters
where
filters = [ printf "-A FORWARD -i %s -o %s -j ACCEPT" inputIf outputIf
, printf "-A FORWARD -i %s -o %s -j ACCEPT" outputIf inputIf
, printf "-A FORWARD -i %s -o %s -j ACCEPT" inputIf inputIf
, printf "-A FORWARD -i %s -j REJECT" inputIf
, printf "-A FORWARD -o %s -j REJECT" inputIf
]
cleanupFirewallRules vif bridgeI = configGetBridgeFiltering >>= \x -> when (x) $ do
bridge <- if (null bridgeI)
then do
out <- words <$> (spawnShell $ printf "iptables -L FORWARD -v | grep %s | awk '{ print $6 }'" vif)
if (null out) then return "" else return $ head out
else return bridgeI
cleanupVifInRules bridge
cleanupVifOutRules bridge
removeChain vifChainOut
removeChain vifChainIn
where
vifIfaceIn="--physdev-in " ++ vif
vifIfaceOut="--physdev-is-bridged --physdev-out " ++ vif
vifChain="FORWARD_" ++ vif
vifChainIn="FORWARD_" ++ vif ++ "_IN"
vifChainOut="FORWARD_" ++ vif ++ "_OUT"
cleanupVifOutRules bridge = do
let bridgeIn = "-i " ++ bridge
bridgeOut="-o " ++ bridge
unless (null bridge) $ logAndExecuteIptables $ printf "-D FORWARD %s -m physdev %s -j %s" bridgeIn vifIfaceIn vifChainOut
cleanupVifInRules bridge = do
let bridgeOut="-o " ++ bridge
bridgeChain="FORWARD_" ++ bridge
unless (null bridge) $ logAndExecuteIptables $ printf "-D %s %s -m physdev %s -j %s" bridgeChain bridgeOut vifIfaceOut vifChainIn
domAndDevIdFromVif vif = vif `matchG` "vif([0-9]+)\\.([0-9]+)"
applyFirewallRules vif bridge = void $ configGetBridgeFiltering >>= \x -> when x $ do
let [domid, devid] = domAndDevIdFromVif vif
uuidM <- domidToUuid domid
unless (uuidM == Nothing) $ do
let uuid = fromJust uuidM
when (bridge =~ "brbridged" :: Bool) $ do -- hack to detect it's a bridged network
initChain vifChainIn
initChain vifChainOut
-- add packet destined to vif must be checked by vif input filters i.e, frame that'll leave the bridge and vif port on the bridge should go through
-- input filtering rules for that vif. These rules will not get to run when the vif is on a shared/NAT'ed network as you can't filter the packets
-- based on phys-out interface
insertRuleIfMissing bridgeChain (printf "%s -m physdev %s -j %s" bridgeOut vifIfaceOut vifChainIn)
-- any packet coming in from vif and out bridge must first be checked by vif chain out
insertRuleIfMissing "FORWARD" (printf "%s -m physdev %s -j %s" bridgeIn vifIfaceIn vifChainOut)
-- now that the framework is setup, process the firewall rules defined by the user
(exitCode, out, _) <- readProcessWithExitCode_closeFds "db-nodes-dom0" [(fwConfRoot uuid)] []
mapM_ (processRule uuid) (sort $ words out) -- sort the rule indices before applying them
return ()
where
bridgeIn = "-i " ++ bridge
bridgeOut = "-o " ++ bridge
bridgeChain = "FORWARD_" ++ bridge
vifIfaceIn = "--physdev-in " ++ vif
vifIfaceOut = "--physdev-is-bridged --physdev-out " ++ vif
vifChain = "FORWARD_" ++ vif
vifChainIn = "FORWARD_" ++ vif ++ "_IN"
vifChainOut = "FORWARD_" ++ vif ++ "_OUT"
fwConfRoot uuid = "/firewall-rules/" ++ uuid
processRule uuid index = void $ do
direction <- (map toLower) <$> strip <$> (dbReadDom0 (printf "%s/%s/direction" (fwConfRoot uuid) index))
remoteIp <- strip <$> dbReadDom0 (printf "%s/%s/remote-ip" (fwConfRoot uuid) index)
extra <- strip <$> dbReadDom0 (printf "%s/%s/extra" (fwConfRoot uuid) index)
unless (null direction || null remoteIp) $ void $ do
case () of
_ | (direction == "in") -> logAndExecuteIptables $ printf "-I %s %s --source %s -j DROP" vifChainIn extra remoteIp
| (direction == "out") -> logAndExecuteIptables $ printf "-I %s %s --destination %s -j DROP" vifChainOut extra remoteIp
| otherwise -> debug $ printf "unexpected direction - '%s'" direction
dbNodesDom0 path = do
(exitCode, out, _) <- readProcessWithExitCode_closeFds "db-nodes-dom0" [path] []
return out
dbReadDom0 path = do
(exitCode, out, _) <- readProcessWithExitCode_closeFds "db-read-dom0" [path] []
return out
appendRuleIfMissing :: String -> String -> IO ()
appendRuleIfMissing table ruleArgs = do
(_, out, _) <- readProcessWithExitCode_closeFds "/usr/sbin/iptables" ["-S", table] []
addIptableRulesIfMissing (lines out) rule
where rule = printf " -A %s %s" table ruleArgs
insertRuleIfMissing :: String -> String -> IO ()
insertRuleIfMissing table ruleArgs = do
(_, out, _) <- readProcessWithExitCode_closeFds "/usr/sbin/iptables" ["-S", table] []
addIptableRulesIfMissing (lines out) rule
where rule = printf " -I %s %s" table ruleArgs
removeChain chain = do
logAndExecuteIptables $ "-F " ++ chain
logAndExecuteIptables $ "-X " ++ chain
initChain chain = do
logAndExecuteIptables $ "-N " ++ chain
logAndExecuteIptables $ "-F " ++ chain
initBridgedFiltering bridge interface = configGetBridgeFiltering >>= \x -> when x $ do
let bridgeChain = "FORWARD_" ++ bridge
initChain bridgeChain
logAndExecuteIptables $ printf "-I %s -o %s -m physdev --physdev-is-bridged --physdev-out %s -j ACCEPT" bridgeChain bridge interface
configGetBridgeFiltering :: IO (Bool)
configGetBridgeFiltering = do
(_, enableFiltering,_) <- readProcessWithExitCode_closeFds "db-read-dom0" [path] []
if ((strip enableFiltering) == "true")
then return True
else return False
where path = "/" ++ eCONFIG_BRIDGE_FILTERING
| OpenXT/network | nws/NetworkFirewall.hs | gpl-2.0 | 9,120 | 0 | 20 | 2,457 | 1,961 | 987 | 974 | 147 | 3 |
{-# OPTIONS -Wall #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Graphics.UI.Bottle.Widgets.Spacer (
make, makeWidget, indentRight, indentRightWidget, makeHorizontal,
makeHorizontalExpanding, makeVerticalExpanding) where
import Data.Monoid(mempty)
import Data.Vector.Vector2(Vector2(..))
import Graphics.UI.Bottle.SizeRange (fixedSize, Size)
import Graphics.UI.Bottle.Sized (Sized(..))
import Graphics.UI.Bottle.Widget (Widget)
import qualified Graphics.UI.Bottle.Animation as Anim
import qualified Graphics.UI.Bottle.SizeRange as SizeRange
import qualified Graphics.UI.Bottle.Widget as Widget
import qualified Graphics.UI.Bottle.Widgets.Grid as Grid
import qualified Graphics.UI.Bottle.Widgets.GridView as GridView
make :: Size -> Sized Anim.Frame
make size = Sized (fixedSize size) mempty
makeWidget :: Size -> Widget a
makeWidget = Widget.liftView . make
makeHorizontal :: Widget.R -> Sized Anim.Frame
makeHorizontal width = make (Vector2 width 0)
makeVerticalExpanding :: Sized Anim.Frame
makeVerticalExpanding = Sized (SizeRange.verticallyExpanding 0 0) mempty
makeHorizontalExpanding :: Sized Anim.Frame
makeHorizontalExpanding = Sized (SizeRange.horizontallyExpanding 0 0) mempty
indentRight :: Widget.R -> Sized Anim.Frame -> Sized Anim.Frame
indentRight width img = GridView.make [[makeHorizontal width, img]]
indentRightWidget :: Widget.R -> Widget a -> Widget a
indentRightWidget width widget =
Grid.toWidget $
Grid.make [[Widget.liftView (makeHorizontal width), widget]]
| nimia/bottle | bottlelib/Graphics/UI/Bottle/Widgets/Spacer.hs | gpl-3.0 | 1,507 | 0 | 11 | 170 | 419 | 243 | 176 | 31 | 1 |
{-# OPTIONS_GHC -F -pgmF htfpp #-}
module Pudding.Test.Utilities.DoubleFunctions
(htf_thisModulesTests) where
import Test.Framework
import Test.HUnit
import Pudding.Utilities.DoubleFunctions
import Pudding.Utilities.FloatEq
prop_fmodCutoff :: Double -> (NonZero Double) -> Bool
prop_fmodCutoff a (NonZero b) = (abs (a `fmod` b)) < (abs b)
prop_fmodPeriod :: Double -> (NonZero Double) -> Bool
prop_fmodPeriod a (NonZero b) = (a `fmod` b) ~= ((a + b) `fmod` b)
prop_fmodSmallerPositive :: (Positive Double) -> Double -> Property
prop_fmodSmallerPositive (Positive a) b = (a < b) ==> (a `fmod` b) ~= a
prop_fmodSmallerNegative :: (Positive Double) -> Double -> Property
prop_fmodSmallerNegative (Positive a) b = (a < b) ==> ((-a) `fmod` b) ~= (b - a)
equalTest :: Double -> Assertion
equalTest d = assertEqual True $ d ~= d
differentTest :: Double -> Double -> Assertion
differentTest d1 d2 = assertEqual False $ d1 ~= d2
test_doubleEqOneOne = equalTest 1
test_doubleEqZeroZero = equalTest 0
test_doubleEqMinuszeroMinuszero = equalTest (-0)
test_doubleEqThirdThird = equalTest 0.3333
test_doubleEqSmallSmall = equalTest 1e-30
test_doubleEqOneTwo = differentTest 1 2
test_doubleEqZeroOne = differentTest 0 1
test_doubleEqOneZero = differentTest 1 0
test_doubleEqSmallOthersmall = differentTest 1.23e-50 1.234e-50
test_doubleEqZeroMinuszero =
assertEqual True $ (0 :: Double) ~= ((-0) :: Double)
test_doubleEqMinuszeroZero =
assertEqual True $ ((-0) :: Double) ~= (0 :: Double)
| jfulseca/Pudding | src/Pudding/Test/Utilities/DoubleFunctions.hs | gpl-3.0 | 1,490 | 0 | 10 | 217 | 503 | 275 | 228 | 32 | 1 |
import Wanda
import Graphics.UI.Gtk
main = initGUI >> wandaMain >> mainGUI
| arsenm/Wandahs | WandaMain.hs | gpl-3.0 | 78 | 0 | 6 | 14 | 23 | 13 | 10 | 3 | 1 |
module Hadolint.Rule.DL3013 (rule) where
import Data.List (isInfixOf)
import qualified Data.Text as Text
import Hadolint.Rule
import Hadolint.Shell (ParsedShell)
import qualified Hadolint.Shell as Shell
import Language.Docker.Syntax
rule :: Rule ParsedShell
rule = simpleRule code severity message check
where
code = "DL3013"
severity = DLWarningC
message =
"Pin versions in pip. Instead of `pip install <package>` use `pip install \
\<package>==<version>` or `pip install --requirement <requirements file>`"
check (Run (RunArgs args _)) = foldArguments (Shell.noCommands forgotToPinVersion) args
check _ = True
forgotToPinVersion cmd =
isPipInstall cmd
&& not (hasBuildConstraint cmd)
&& not (all versionFixed (packages cmd))
-- Check if the command is a pip* install command, and that specific packages are being listed
isPipInstall cmd =
( Shell.isPipInstall cmd
&& not (hasBuildConstraint cmd)
&& not (all versionFixed (packages cmd))
)
&& not (requirementInstall cmd)
-- If the user is installing requirements from a file or just the local module, then we are not interested
-- in running this rule
requirementInstall cmd =
["--requirement"] `isInfixOf` Shell.getArgs cmd
|| ["-r"] `isInfixOf` Shell.getArgs cmd
|| ["."] `isInfixOf` Shell.getArgs cmd
hasBuildConstraint cmd = Shell.hasFlag "constraint" cmd || Shell.hasFlag "c" cmd
versionFixed package = hasVersionSymbol package || isVersionedGit package || isLocalPackage package
isVersionedGit package = "git+http" `Text.isInfixOf` package && "@" `Text.isInfixOf` package
versionSymbols = ["==", ">=", "<=", ">", "<", "!=", "~=", "==="]
hasVersionSymbol package = or [s `Text.isInfixOf` package | s <- versionSymbols]
localPackageFileExtensions = [".whl", ".tar.gz"]
isLocalPackage package = or [s `Text.isSuffixOf` package | s <- localPackageFileExtensions]
{-# INLINEABLE rule #-}
packages :: Shell.Command -> [Text.Text]
packages cmd =
stripInstallPrefix $
Shell.getArgsNoFlags $
Shell.dropFlagArg
[ "abi",
"b",
"build",
"e",
"editable",
"extra-index-url",
"f",
"find-links",
"i",
"index-url",
"implementation",
"no-binary",
"only-binary",
"platform",
"prefix",
"progress-bar",
"proxy",
"python-version",
"root",
"src",
"t",
"target",
"trusted-host",
"upgrade-strategy"
]
cmd
stripInstallPrefix :: [Text.Text] -> [Text.Text]
stripInstallPrefix cmd = dropWhile (== "install") (dropWhile (/= "install") cmd)
| lukasmartinelli/hadolint | src/Hadolint/Rule/DL3013.hs | gpl-3.0 | 2,811 | 0 | 14 | 743 | 651 | 361 | 290 | 67 | 2 |
module Trello.Client.Internal
( getMyBoards
) where
import Network.HTTP.Client (Manager)
import Servant.Client (BaseUrl, ClientM, client)
import Trello.API (Key, Token, api)
import Trello.API.Types (Board)
type RawClient a =
Maybe Key -> Maybe Token -> Manager -> BaseUrl -> ClientM [Board]
getMyBoards :: RawClient [Board]
getMyBoards = client api
| cblp/tasknight-dashboard | trello-client/lib/Trello/Client/Internal.hs | gpl-3.0 | 365 | 0 | 10 | 62 | 120 | 70 | 50 | 10 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Content.Accountstatuses.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves the status of a Merchant Center account. No itemLevelIssues
-- are returned for multi-client accounts.
--
-- /See:/ <https://developers.google.com/shopping-content/v2/ Content API for Shopping Reference> for @content.accountstatuses.get@.
module Network.Google.Resource.Content.Accountstatuses.Get
(
-- * REST Resource
AccountstatusesGetResource
-- * Creating a Request
, accountstatusesGet
, AccountstatusesGet
-- * Request Lenses
, aaXgafv
, aaMerchantId
, aaUploadProtocol
, aaAccessToken
, aaUploadType
, aaAccountId
, aaDestinations
, aaCallback
) where
import Network.Google.Prelude
import Network.Google.ShoppingContent.Types
-- | A resource alias for @content.accountstatuses.get@ method which the
-- 'AccountstatusesGet' request conforms to.
type AccountstatusesGetResource =
"content" :>
"v2.1" :>
Capture "merchantId" (Textual Word64) :>
"accountstatuses" :>
Capture "accountId" (Textual Word64) :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParams "destinations" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] AccountStatus
-- | Retrieves the status of a Merchant Center account. No itemLevelIssues
-- are returned for multi-client accounts.
--
-- /See:/ 'accountstatusesGet' smart constructor.
data AccountstatusesGet =
AccountstatusesGet'
{ _aaXgafv :: !(Maybe Xgafv)
, _aaMerchantId :: !(Textual Word64)
, _aaUploadProtocol :: !(Maybe Text)
, _aaAccessToken :: !(Maybe Text)
, _aaUploadType :: !(Maybe Text)
, _aaAccountId :: !(Textual Word64)
, _aaDestinations :: !(Maybe [Text])
, _aaCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AccountstatusesGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aaXgafv'
--
-- * 'aaMerchantId'
--
-- * 'aaUploadProtocol'
--
-- * 'aaAccessToken'
--
-- * 'aaUploadType'
--
-- * 'aaAccountId'
--
-- * 'aaDestinations'
--
-- * 'aaCallback'
accountstatusesGet
:: Word64 -- ^ 'aaMerchantId'
-> Word64 -- ^ 'aaAccountId'
-> AccountstatusesGet
accountstatusesGet pAaMerchantId_ pAaAccountId_ =
AccountstatusesGet'
{ _aaXgafv = Nothing
, _aaMerchantId = _Coerce # pAaMerchantId_
, _aaUploadProtocol = Nothing
, _aaAccessToken = Nothing
, _aaUploadType = Nothing
, _aaAccountId = _Coerce # pAaAccountId_
, _aaDestinations = Nothing
, _aaCallback = Nothing
}
-- | V1 error format.
aaXgafv :: Lens' AccountstatusesGet (Maybe Xgafv)
aaXgafv = lens _aaXgafv (\ s a -> s{_aaXgafv = a})
-- | The ID of the managing account. If this parameter is not the same as
-- accountId, then this account must be a multi-client account and
-- \`accountId\` must be the ID of a sub-account of this account.
aaMerchantId :: Lens' AccountstatusesGet Word64
aaMerchantId
= lens _aaMerchantId (\ s a -> s{_aaMerchantId = a})
. _Coerce
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
aaUploadProtocol :: Lens' AccountstatusesGet (Maybe Text)
aaUploadProtocol
= lens _aaUploadProtocol
(\ s a -> s{_aaUploadProtocol = a})
-- | OAuth access token.
aaAccessToken :: Lens' AccountstatusesGet (Maybe Text)
aaAccessToken
= lens _aaAccessToken
(\ s a -> s{_aaAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
aaUploadType :: Lens' AccountstatusesGet (Maybe Text)
aaUploadType
= lens _aaUploadType (\ s a -> s{_aaUploadType = a})
-- | The ID of the account.
aaAccountId :: Lens' AccountstatusesGet Word64
aaAccountId
= lens _aaAccountId (\ s a -> s{_aaAccountId = a}) .
_Coerce
-- | If set, only issues for the specified destinations are returned,
-- otherwise only issues for the Shopping destination.
aaDestinations :: Lens' AccountstatusesGet [Text]
aaDestinations
= lens _aaDestinations
(\ s a -> s{_aaDestinations = a})
. _Default
. _Coerce
-- | JSONP
aaCallback :: Lens' AccountstatusesGet (Maybe Text)
aaCallback
= lens _aaCallback (\ s a -> s{_aaCallback = a})
instance GoogleRequest AccountstatusesGet where
type Rs AccountstatusesGet = AccountStatus
type Scopes AccountstatusesGet =
'["https://www.googleapis.com/auth/content"]
requestClient AccountstatusesGet'{..}
= go _aaMerchantId _aaAccountId _aaXgafv
_aaUploadProtocol
_aaAccessToken
_aaUploadType
(_aaDestinations ^. _Default)
_aaCallback
(Just AltJSON)
shoppingContentService
where go
= buildClient
(Proxy :: Proxy AccountstatusesGetResource)
mempty
| brendanhay/gogol | gogol-shopping-content/gen/Network/Google/Resource/Content/Accountstatuses/Get.hs | mpl-2.0 | 5,869 | 0 | 19 | 1,386 | 917 | 531 | 386 | 128 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
import CodeWorld
{-
Copyright 2020 The CodeWorld Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
import CodeWorld.Message
import Control.Concurrent
import Control.Concurrent.Async
import Control.Concurrent.MVar
import Control.Exception
import Control.Monad
import qualified Data.ByteString.Char8 as BS
import Data.List
import Data.Maybe
import qualified Data.Text as T
import qualified Network.WebSockets as WS
import Options.Applicative
import System.Clock
import Text.Read
import Text.Regex
import qualified Wuss as Wuss
connect :: Config -> WS.ClientApp a -> IO a
connect (Config {..})
| secure =
Wuss.runSecureClientWith
hostname
(fromIntegral port)
path
WS.defaultConnectionOptions
[("Host", BS.pack hostname)]
| otherwise =
WS.runClientWith
hostname
port
path
WS.defaultConnectionOptions
[("Host", BS.pack hostname)]
type Timestamp = Double
encodeEvent :: (Timestamp, Maybe Event) -> String
encodeEvent = show
decodeEvent :: String -> Maybe (Timestamp, Maybe Event)
decodeEvent = readMaybe
sendClientMessage :: Config -> WS.Connection -> ClientMessage -> IO ()
sendClientMessage config conn msg = do
when (debug config) $ do
tid <- myThreadId
putStrLn $ show tid ++ " → " ++ show msg
WS.sendTextData conn (T.pack (show msg))
getServerMessage :: Config -> WS.Connection -> IO ServerMessage
getServerMessage config conn = do
msg <- WS.receiveData conn
case readMaybe (T.unpack msg) of
Just msg -> do
when (debug config) $ do
tid <- myThreadId
putStrLn $ show tid ++ " ← " ++ show msg
return msg
Nothing -> fail "Invalid server message"
run :: Config -> MVar (Maybe GameId) -> IO [ServerMessage]
run config game = do
mgame <- takeMVar game
case mgame of
Just gameid -> do
putMVar game (Just gameid)
joinGame config gameid
Nothing -> do
newGame config game
newGame :: Config -> MVar (Maybe GameId) -> IO [ServerMessage]
newGame config game =
connect config $ \conn -> do
sendClientMessage config conn (NewGame (clients config) "BOT")
JoinedAs _ gameid <- getServerMessage config conn
putMVar game (Just gameid)
waitForStart config conn
joinGame :: Config -> GameId -> IO [ServerMessage]
joinGame config gameId =
connect config $ \conn -> do
sendClientMessage config conn (JoinGame gameId "BOT")
JoinedAs _ _ <- getServerMessage config conn
waitForStart config conn
waitForStart :: Config -> WS.Connection -> IO [ServerMessage]
waitForStart config conn = go
where
go = do
m <- getServerMessage config conn
case m of
Started {} -> playGame config conn
_ -> go
playGame :: Config -> WS.Connection -> IO [ServerMessage]
playGame config conn = do
startTime <- getTime Monotonic
forever $ do
OutEvent pid eo <- getServerMessage config conn
when (pid == 0) $
case decodeEvent eo of
Just (t, mbEvent) -> do
let mbEvent' = modify <$> mbEvent
currentTime <- getTime Monotonic
let t'
| Just ms <- delay config = max 0 (t + ms / 1000)
| otherwise = timeSpecToS (currentTime - startTime)
sendClientMessage
config
conn
(InEvent (show (t', mbEvent')))
Nothing -> putStrLn $ "Could not parse event: " ++ eo
where
modify e
| not (invert config) = e
modify (KeyPress d) = KeyPress (inv d)
modify (KeyRelease d) = KeyRelease (inv d)
modify e = e
inv "Up" = "Down"
inv "Down" = "Up"
inv "Left" = "Right"
inv "Right" = "Left"
inv c = c
timeSpecToS ts = fromIntegral (sec ts) + fromIntegral (nsec ts) * 1E-9
data Config = Config
{ clients :: Int,
invert :: Bool,
secure :: Bool,
delay :: Maybe Double,
hostname :: String,
port :: Int,
path :: String,
gameId :: Maybe GameId,
debug :: Bool
}
opts =
info
(helper <*> config)
( fullDesc <> progDesc "CodeWorld simple bot"
<> header
"codeword-game-bot - a simple mirroring bot for codeworld-gameserver"
)
where
config :: Parser Config
config =
Config
<$> option
auto
( long "clients" <> short 'c' <> showDefault <> metavar "N"
<> value 1
<> help "Number of clients to simulate (>=1)"
)
<*> switch
(long "invert" <> showDefault <> help "Return opposite direction")
<*> switch
(long "secure" <> short 's' <> help "Use a secure connection")
<*> optional
( option
auto
( long "delay" <> showDefault <> metavar "ms"
<> help
"Use remote timestamp and adjust with this many milli seconds. Default is to use local time stamps. Can be negative."
)
)
<*> strOption
( long "hostname" <> showDefault <> value "0.0.0.0"
<> metavar "HOSTNAME"
<> help "Hostname"
)
<*> option
auto
( long "port" <> showDefault <> metavar "PORT" <> value 9160
<> help "Port"
)
<*> strOption
( long "path" <> showDefault <> metavar "PATH" <> value "/gameserver"
<> help "Path"
)
<*> optional
( strOption
( long "gameid" <> showDefault <> metavar "ID"
<> help "The ID of the game to join (4 letters)"
)
)
<*> switch (long "debug" <> showDefault <> help "Show debugging output")
main = do
config <- execParser opts
start <- getTime Monotonic
game <- newMVar (gameId config)
mapConcurrently id $ replicate (clients config) (run config game)
| google/codeworld | codeworld-game-server/src/Bot.hs | apache-2.0 | 6,425 | 0 | 23 | 1,848 | 1,762 | 856 | 906 | 170 | 9 |
-- * Essentially, Haskell98!
{-# LANGUAGE NoMonomorphismRestriction #-}
-- * Typed tagless-final interpreters for simply-typed lambda-calculus
-- * de Bruijn indices
-- based on the code accompanying the paper by
-- Jacques Carette, Oleg Kiselyov, and Chung-chieh Shan
module TTFdB where
-- * Abstracting over the final interpreter in IntroHOIF.hs
-- This class defines syntax (and its instances, semantics)
-- of our language
-- I could've moved s and z into a separate ``class Var var_repr''
-- Symantics would then have a single form
-- v :: var_repr h a -> repr h a
class Symantics repr where
int :: Int -> repr h Int -- int literal
add :: repr h Int -> repr h Int -> repr h Int
z :: repr (a,h) a -- variables: z and s ... (s z)
s :: repr h a -> repr (any,h) a
lam :: repr (a,h) b -> repr h (a->b)
app :: repr h (a->b) -> repr h a -> repr h b
-- * Like GADT-based, but in lower-case
-- * Like ExpSYM, but repr is of kind * -> * -> *
-- repr is parameterized by the environment (h) and the type
-- of the expression
-- * The types read like the rules of minimal logic
-- For example, z is the axiom stating that assuming A we can get A
-- lam is the inference rule: if assuming A we derive B,
-- then we can derive the implication A->B
-- * Type system of simply-typed lambda-calculus in Haskell98!
-- The signature of the class can be read off as the typing
-- rules for simply-typed lambda-calculus. But the code
-- is Haskell98! So, contrary to the common belief, we do not
-- need dependent types to express the type system of simply
-- typed lambda-calculus. Compare with Term.agda
-- * ``a way to write a typed fold function over a typed term.''
-- as one reviewer of our paper put it
-- * Sample terms and their inferred types
td1 = add (int 1) (int 2)
-- td1 :: (Symantics repr) => repr h Int
td2 = lam (add z z)
-- td2 :: (Symantics repr) => repr h (Int -> Int)
td2o = lam (add z (s z))
-- td2o :: (Symantics repr) => repr (Int, h) (Int -> Int)
-- the inferred type says td2o is open! Needs the env with Int
td3 = lam (add (app z (int 1)) (int 2))
-- td3 :: (Symantics repr) => repr h ((Int -> Int) -> Int)
-- Ill-typed terms are not expressible
-- * td2a = app (int 1) (int 2)
-- Couldn't match expected type `a -> b' against inferred type `Int'
-- Expected type: repr h (a -> b)
-- Inferred type: repr h Int
-- In the first argument of `app', namely `(int 1)'
-- * Embedding all and only typed terms of the object language
-- * in the typed metalanguage
-- Typed object terms are represented as typed Haskell terms
-- * //
-- * Typed and tagless evaluator
-- * object term ==> metalanguage value
newtype R h a = R{unR :: h -> a}
instance Symantics R where
int x = R $ const x
add e1 e2 = R $ \h -> (unR e1 h) + (unR e2 h)
z = R $ \(x,_) -> x
s v = R $ \(_,h) -> unR v h
lam e = R $ \h -> \x -> unR e (x,h)
app e1 e2 = R $ \h -> (unR e1 h) (unR e2 h)
eval e = unR e () -- Eval in the empty environment
-- * R is not a tag!
-- The expression with unR _looks_ like tag introduction and
-- elimination. But the function unR is *total*. There is no
-- run-time error is possible at all -- and this fact is fully
-- apparent to the compiler.
-- * R is a meta-circular interpreter
-- It runs each object-language operation by executing the corresponding
-- metalanguage operation.
-- * R never gets stuck: no variant types, no pattern-match failure
-- * Well-typed programs indeed don't go wrong!
-- * R is total
-- * The instance R is a constructive proof of type soundness
-- First of all, we see the type preservation (for object types)
-- for interpretations: interpretations preserve types.
-- Then we see progress: the interpreter does not get stuck.
-- So we reduced the problem of type soundness of the object language
-- (simply-typed lambda-calculus) to the type soundness of the
-- metalanguage.
-- * R _looks_ like a reader monad (but h varies)
-- R looks like a reader monad, but the type of the environment
-- varies.
-- Evaluate our test expressions
td1_eval = eval td1
-- 3
td2_eval = eval td2
-- td2_eval :: Int -> Int -- can't print
-- since td2_eval is a function taking Int, we can give it an Int
td2_eval' = eval td2 21
-- 42
-- td2o_eval = eval td2o
-- Can't evaluate the open term
td3_eval = eval td3
-- td3_eval :: (Int -> Int) -> Int
-- * //
-- Another interpreter
newtype S h a = S{unS :: Int -> String}
instance Symantics S where
int x = S $ const $ show x
add e1 e2 = S $ \h ->
"(" ++ unS e1 h ++ "+" ++ unS e2 h ++ ")"
z = S $ \h -> "x" ++ show (h-1)
s v = S $ \h -> unS v (h-1)
lam e = S $ \h ->
let x = "x" ++ show h
in "(\\" ++ x ++ " -> " ++ unS e (h+1) ++ ")"
app e1 e2 = S $ \h ->
"(" ++ unS e1 h ++ " " ++ unS e2 h ++ ")"
-- Now this is almost the Reader monad. Why not fully?
-- Actually, it looks more like the Environment Applicative
-- The interpretation of lam is quite different from that in R
-- The environment is the nesting depth of lambdas
view :: S () a -> String -- Only closed terms can be viewed
view e = unS e 0
td1_view = view td1
-- "(1+2)"
td2_view = view td2
-- "(\\x0 -> (x0+x0))"
td3_view = view td3
-- "(\\x0 -> ((x0 1)+2))"
-- We now finally see our terms in a useful form
-- Clearly, de Bruijn notation is not the most perspicuous
-- We now turn to HOAS
-- Exercise: implement the following extensions
{-
-- * //
-- Extensions of the language
-- * Multiplication
class MulSYM repr where
mul :: repr r Int -> repr r Int -> repr r Int
-- * Booleans
class BoolSYM repr where
bool :: Bool -> repr r Bool -- bool literal
if_ :: repr r Bool -> repr r a -> repr r a -> repr r a
leq :: repr r Int -> repr r Int -> repr r Bool
-- * Fixpoint
class FixSYM repr where
fix :: repr (a,h) a -> repr h a
-- Logically, the signature of fix reads like nonsense
-- The extensions are independent
testpowfix () = lam {- x -} (
fix {- self -} (lam {- n -} (
let n = z; self = s z; x = s (s z)
in if_ (leq n (int 0)) (int 1)
(mul x (app self (add n (int (-1))))))))
testpowfix7 () = lam (app (app (testpowfix ()) z) (int 7))
rtestpw = mkrtest testpowfix
rtestpw7 = mkrtest testpowfix7
rtestpw72 = mkrtest (\() -> app (testpowfix7 ()) (int 2)) -- 128
-- Another interpreter: it interprets each term to give its size
-- (the number of constructors)
-}
main = do
print td1_eval
print td2_eval'
print td1_view
print td2_view
print td3_view
| egaburov/funstuff | Haskell/tytag/codes3/TTFdB.hs | apache-2.0 | 6,690 | 0 | 14 | 1,719 | 945 | 526 | 419 | 49 | 1 |
{-# LANGUAGE OverloadedLists #-}
module BEDSpec where
import Test.Hspec
import Data.Map
import Text.Parsec.ByteString
import Bio.Motions.BED
import Data.Either(isLeft)
test :: Spec
test = context "when parsing BED file" $ do
let chrInfos = [("chr1", 50), ("chr2", 40), ("super_chromosome", 20), ("bajtAlina", 7)]
let chrNums = fromList $ (fst <$> chrInfos) `zip` [0..]
it "should parse file without errors" $ do
ansOK <- parseFromFile (parseBED chrNums 1) "test/testdata/1.bed"
ansOK `shouldBe` Right
[ BindingSiteInfo {bsChain = 0, bsFrom = 1, bsTo = 9, bsType = 1}
, BindingSiteInfo {bsChain = 1, bsFrom = 10, bsTo = 20, bsType = 1}
, BindingSiteInfo {bsChain = 2, bsFrom = 5, bsTo = 18, bsType = 1}
]
it "should not parse file with errors" $ do
ansErr <- parseFromFile (parseBED chrNums 1) "test/testdata/wrong.bed"
ansErr `shouldSatisfy` isLeft
context "when creating a lists of EnergyVectors" $
it "should calculate energy vectors correctly" $ do
ans <- parseBEDs 10 chrInfos ["test/testdata/0.bed", "test/testdata/1.bed"]
ans `shouldBe` [ [[0, 1], [1, 0], [2, 0], [1, 0], [1, 0]]
, [[0, 0], [1, 1], [1, 1], [0, 0]]
, [[0, 1], [1, 1]]
, [[0, 0]]
]
it "should throw an exception when getting an invalid file" $
let ansErr = parseBEDs 10 chrInfos ["test/testdata/wrong.bed"] in
ansErr `shouldThrow` anyIOException
spec :: Spec
spec = describe "BED parser" test
| Motions/motions | test/BEDSpec.hs | apache-2.0 | 1,633 | 0 | 15 | 484 | 514 | 299 | 215 | 32 | 1 |
{- Copyright 2014 David Farrell <shokku.ra@gmail.com>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-}
module Config where
import Data.Either.Utils
import Data.ConfigFile.Monadic
import Control.Monad
import Control.Monad.Error
defaultCP :: ConfigParser
defaultCP = forceEither $ return emptyCP
>>= add_section "info"
>>= set "info" "name" "lambdircd"
>>= set "info" "network" "LambdaNet"
>>= set "info" "description" "A lambdircd server"
>>= add_section "listen"
>>= set "listen" "addresses" "* 6667"
>>= set "listen" "queue" "5"
>>= set "listen" "defer" "30"
>>= add_section "client"
>>= set "client" "connect_timeout" "20"
>>= set "client" "ping_timeout" "240"
>>= set "client" "max_channels" "5"
>>= add_section "plugins"
>>= set "plugins" "load" ""
loadConfig :: String -> IO ConfigParser
loadConfig path = do
eitherCP <- runErrorT $ join $ liftIO $ readfile path defaultCP
case eitherCP of
Left e -> print e >> return defaultCP
Right cp -> return cp
getConfig' cp sec = forceEither . get cp sec
getConfigString :: ConfigParser -> SectionSpec -> OptionSpec -> String
getConfigString = getConfig'
getConfigBool :: ConfigParser -> SectionSpec -> OptionSpec -> Bool
getConfigBool = getConfig'
getConfigInt :: ConfigParser -> SectionSpec -> OptionSpec -> Int
getConfigInt = getConfig'
| shockkolate/lambdircd | src.old/Config.hs | apache-2.0 | 1,955 | 0 | 20 | 441 | 350 | 172 | 178 | 34 | 2 |
module Sodium.Memory (SodiumBytes) where
import Sodium.Error
import Sodium.FFI
import Control.Monad (foldM_, when, forM_, void)
import Data.ByteArray
import Data.List.NonEmpty (NonEmpty(..))
import Data.Semigroup (Semigroup(..))
import Data.Void
import Foreign.ForeignPtr
import Foreign.Ptr
import System.IO.Unsafe (unsafePerformIO, unsafeDupablePerformIO)
import qualified Data.List.NonEmpty as NE
data SodiumBytes = SodiumBytes
{ sbLength :: !Int
, sbPtr :: !(ForeignPtr Void)
}
allocSB :: Int -> IO (ForeignPtr Void)
allocSB len = do
ptr <- c_malloc (fromIntegral len)
when (ptr == nullPtr) $ sodiumFail "sodium_malloc"
newForeignPtr c_free_ptr ptr
instance ByteArrayAccess SodiumBytes where
length = sbLength
withByteArray ba act =
withForeignPtr (sbPtr ba) $ act . castPtr
instance Eq SodiumBytes where
(SodiumBytes l1 ptr1) == (SodiumBytes l2 ptr2)
| l1 /= l2 = False
| otherwise = unsafeDupablePerformIO $
withForeignPtr ptr1 $ \p1 ->
withForeignPtr ptr2 $ \p2 ->
pure $ c_memcmp p1 p2 (fromIntegral l1) == 0
instance Ord SodiumBytes where
compare (SodiumBytes l1 ptr1) (SodiumBytes l2 ptr2)
| cmp < 0 = LT
| cmp > 0 = GT
| otherwise = compare l1 l2
where
len = min l1 l2
cmp = unsafeDupablePerformIO $
withForeignPtr ptr1 $ \p1 ->
withForeignPtr ptr2 $ \p2 ->
pure $ std_memcmp p1 p2 (fromIntegral len)
instance Semigroup SodiumBytes where
(SodiumBytes l1 ptr1) <> (SodiumBytes l2 ptr2) = unsafePerformIO $ do
dest <- allocSB (l1 + l2)
withForeignPtr dest $ \pdest -> do
withForeignPtr ptr1 $ \p1 ->
void $ std_memcpy pdest p1 (fromIntegral l1)
withForeignPtr ptr2 $ \p2 ->
void $ std_memcpy (pdest `plusPtr` l1) p2 (fromIntegral l2)
pure $ SodiumBytes (l1 + l2) dest
sconcat ptrs = unsafePerformIO $ do
let totalLen = sum $ NE.map sbLength ptrs
dest <- allocSB totalLen
withForeignPtr dest $ \pdest ->
foldM_ (copyBytes pdest) 0 ptrs
pure $ SodiumBytes totalLen dest
where
copyBytes pdest offset src =
withForeignPtr (sbPtr src) $ \psrc -> do
void $ std_memcpy (pdest `plusPtr` offset) psrc (fromIntegral $ sbLength src)
pure $ offset + sbLength src
stimes n ptr = unsafePerformIO $ do
let intN = fromIntegral n
let blockSize = sbLength ptr
let totalLen = intN * blockSize
dest <- allocSB totalLen
withForeignPtr dest $ \pdest ->
withForeignPtr (sbPtr ptr) $ \psrc ->
forM_ [0 .. intN - 1] $ \i ->
std_memcpy (pdest `plusPtr` (i * blockSize)) psrc (fromIntegral blockSize)
pure $ SodiumBytes totalLen dest
instance Monoid SodiumBytes where
mempty = unsafePerformIO $ do
ptr <- allocSB 0
pure $ SodiumBytes 0 ptr
mconcat [] = mempty
mconcat (x:xs) = sconcat (x :| xs)
instance ByteArray SodiumBytes where
allocRet len act = do
ptr <- allocSB len
v <- withForeignPtr ptr $ act . castPtr
pure (v, SodiumBytes len ptr)
| rblaze/private-cloud | src/Sodium/Memory.hs | apache-2.0 | 3,285 | 0 | 19 | 992 | 1,105 | 549 | 556 | 85 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module PrivateCloud.Cloud.Monad
( PrivateCloud
, cloudId
, conflictSuffix
, connection
, context
, dbName
, exclusions
, initCloudSettings
, rootDir
, runPrivateCloud
, runAction
) where
import Control.Exception.Safe
import Control.Monad.Extra (fromMaybeM)
import Control.Monad.IO.Class
import Control.Monad.Trans.Reader
import Database.SQLite.Simple
import Data.ByteArray
import Data.Tagged
import System.FilePath
import System.FilePath.Glob
import qualified Data.Text as T
import PrivateCloud.Cloud.Exception
import PrivateCloud.Provider.Class
dbName :: FilePath
dbName = ".privatecloud"
conflictSuffix :: FilePath
conflictSuffix = ".conflict"
uniqueIdSetting :: String
uniqueIdSetting = "uniqueid"
data CloudContext p = CloudContext
{ ccConnection :: Connection
, ccExclusions :: [Pattern]
, ccCloudId :: T.Text
, ccRoot :: FilePath
, ccContext :: Tagged p (ProviderContext p)
}
newtype PrivateCloud p a = PrivateCloud (ReaderT (CloudContext p) IO a)
deriving (Functor, Applicative, Monad, MonadIO, MonadThrow, MonadCatch)
runPrivateCloud :: forall ba p a. (ByteArrayAccess ba, CloudProvider p) => FilePath -> [Pattern] -> (T.Text -> IO (Maybe ba)) -> PrivateCloud p a -> IO a
runPrivateCloud root excls getCreds (PrivateCloud f) =
withConnection (root </> dbName) $ \conn -> do
cloudid <- fromMaybeM (throw $ ConfigurationError "No saved cloudid found") $
readSetting conn uniqueIdSetting
creds <- fromMaybeM (throw $ ConfigurationError "No saved credential found") $
getCreds cloudid
ctx <- loadContext (Tagged creds :: Tagged p ba)
runReaderT f CloudContext
{ ccConnection = conn
, ccExclusions = excls
, ccCloudId = cloudid
, ccRoot = root
, ccContext = Tagged ctx
}
initCloudSettings :: FilePath -> T.Text -> IO ()
initCloudSettings root uniqueId =
withConnection (root </> dbName) $ \conn ->
withTransaction conn $ do
execute_ conn "CREATE TABLE localFiles (file TEXT PRIMARY KEY NOT NULL, lastSyncedHash TEXT, lastSyncedSize INT, lastSyncedModTime INT)"
execute_ conn "CREATE TABLE settings (name TEXT PRIMARY KEY NOT NULL, value TEXT)"
execute conn "INSERT INTO settings (name, value) VALUES (?,?)" (uniqueIdSetting, uniqueId)
readSetting :: Connection -> String -> IO (Maybe T.Text)
readSetting conn name = do
rows <- query conn "SELECT value FROM settings WHERE name = ?" (Only name)
case rows of
[Only value] -> pure (Just value)
_ -> pure Nothing
connection :: PrivateCloud p Connection
connection = PrivateCloud (asks ccConnection)
exclusions :: PrivateCloud p [Pattern]
exclusions = PrivateCloud (asks ccExclusions)
cloudId :: PrivateCloud p T.Text
cloudId = PrivateCloud (asks ccCloudId)
rootDir :: PrivateCloud p FilePath
rootDir = PrivateCloud (asks ccRoot)
context :: CloudProvider p => PrivateCloud p (Tagged p (ProviderContext p))
context = PrivateCloud (asks ccContext)
runAction :: CloudProvider p => CloudMonad p b -> PrivateCloud p b
runAction f = do
ctx <- context
liftIO $ runCloud ctx f
| rblaze/private-cloud | src/PrivateCloud/Cloud/Monad.hs | apache-2.0 | 3,337 | 0 | 14 | 723 | 874 | 465 | 409 | 82 | 2 |
{- This module provides type-level finite maps.
The implementation is similar to that shown in the paper.
"Embedding effect systems in Haskell" Orchard, Petricek 2014 -}
{-# LANGUAGE TypeOperators, PolyKinds, DataKinds, KindSignatures,
TypeFamilies, UndecidableInstances, MultiParamTypeClasses,
FlexibleInstances, GADTs, FlexibleContexts, ScopedTypeVariables,
ConstraintKinds, IncoherentInstances #-}
module Data.Type.Map (Mapping(..), Union, Unionable, union, append, Var(..), Map(..),
ext, empty, mapLength,
Combine, Combinable(..), Cmp,
Nubable, nub,
Lookup, Member, (:\), Split, split,
IsMember, lookp, Updatable, update,
IsMap, AsMap, asMap,
Sortable, quicksort,
Submap, submap) where
import GHC.TypeLits
import Data.Type.Bool
import Data.Type.Equality
import Data.Type.Set (Cmp, Proxy(..), Flag(..), Sort, Filter, (:++))
{- Throughout, type variables
'k' ranges over "keys"
'v' ranges over "values"
'kvp' ranges over "key-value-pairs"
'm', 'n' range over "maps" -}
-- Mappings
infixr 4 :->
{-| A key-value pair -}
data Mapping k v = k :-> v
{-| Union of two finite maps -}
type Union m n = Nub (Sort (m :++ n))
{-| Apply 'Combine' to values with matching key (removes duplicate keys) -}
type family Nub t where
Nub '[] = '[]
Nub '[kvp] = '[kvp]
Nub ((k :-> v1) ': (k :-> v2) ': m) = Nub ((k :-> Combine v1 v2) ': m)
Nub (kvp1 ': kvp2 ': s) = kvp1 ': Nub (kvp2 ': s)
{-| Open type family for combining values in a map (that have the same key) -}
type family Combine (a :: v) (b :: v) :: v
{-| Delete elements from a map by key -}
type family (m :: [Mapping k v]) :\ (c :: k) :: [Mapping k v] where
'[] :\ k = '[]
((k :-> v) ': m) :\ k = m :\ k
(kvp ': m) :\ k = kvp ': (m :\ k)
{-| Type-level lookup of elements from a map -}
type family Lookup (m :: [Mapping k v]) (c :: k) :: Maybe v where
Lookup '[] k = Nothing
Lookup ((k :-> v) ': m) k = Just v
Lookup (kvp ': m) k = Lookup m k
{-| Membership test as type function -}
type family Member (c :: k) (m :: [Mapping k v]) :: Bool where
Member k '[] = False
Member k ((k :-> v) ': m) = True
Member k (kvp ': m) = Member k m
-----------------------------------------------------------------
-- Value-level map with a type-level representation
{-| Pair a symbol (representing a variable) with a type -}
data Var (k :: Symbol) = Var
instance KnownSymbol k => Show (Var k) where
show = symbolVal
{-| A value-level heterogenously-typed Map (with type-level representation in terms of lists) -}
data Map (n :: [Mapping Symbol *]) where
Empty :: Map '[]
Ext :: Var k -> v -> Map m -> Map ((k :-> v) ': m)
{-| Smart constructor which normalises the representation -}
ext :: (Sortable ((k :-> v) ': m), Nubable (Sort ((k :-> v) ': m))) => Var k -> v -> Map m -> Map (AsMap ((k :-> v) ': m))
ext k v m = asMap $ Ext k v m
{-| Smart constructor to match `ext` (but doesn't do anything other than wrap Empty) -}
empty :: Map '[]
empty = Empty
{-| Length function -}
mapLength :: Map n -> Int
mapLength Empty = 0
mapLength (Ext _ _ xs) = 1 + mapLength xs
{-| Membership test a type class (predicate) -}
class IsMember v t m where
{-| Value-level lookup of elements from a map, via type class predicate -}
lookp :: Var v -> Map m -> t
instance {-# OVERLAPS #-} IsMember v t ((v ':-> t) ': m) where
lookp _ (Ext _ x _) = x
instance {-# OVERLAPPABLE #-} IsMember v t m => IsMember v t (x ': m) where
lookp v (Ext _ _ m) = lookp v m
{-| Updatability as a type class -}
class Updatable v t m n where
{-| Update a map with `m` at variable `v` with a value of type `t`
to produce a map of type `n` -}
update :: Map m -> Var v -> t -> Map n
instance {-# OVERLAPS #-} Updatable v t ((v ':-> s) ': m) ((v ':-> t) ': m) where
update (Ext v _ m) _ x = Ext v x m
instance Updatable v t m n => Updatable v t ((w ':-> y) ': m) ((w ':-> y) ': n) where
update (Ext w y m) v x = Ext w y (update m v x)
-- instance Updatable v t '[] '[v ':-> t] where
-- update Empty v x = Ext v x Empty
instance Updatable v t s ((v ':-> t) ': s) where
update xs v x = Ext v x xs
{-| Predicate to check if in normalised map form -}
type IsMap s = (s ~ Nub (Sort s))
{-| At the type level, normalise the list form to the map form -}
type AsMap s = Nub (Sort s)
{-| At the value level, noramlise the list form to the map form -}
asMap :: (Sortable s, Nubable (Sort s)) => Map s -> Map (AsMap s)
asMap x = nub (quicksort x)
instance Show (Map '[]) where
show Empty = "{}"
instance (KnownSymbol k, Show v, Show' (Map s)) => Show (Map ((k :-> v) ': s)) where
show (Ext k v s) = "{" ++ show k ++ " :-> " ++ show v ++ show' s ++ "}"
class Show' t where
show' :: t -> String
instance Show' (Map '[]) where
show' Empty = ""
instance (KnownSymbol k, Show v, Show' (Map s)) => Show' (Map ((k :-> v) ': s)) where
show' (Ext k v s) = ", " ++ show k ++ " :-> " ++ show v ++ (show' s)
instance Eq (Map '[]) where
Empty == Empty = True
instance (Eq v, Eq (Map s)) => Eq (Map ((k :-> v) ': s)) where
(Ext Var v m) == (Ext Var v' m') = v == v' && m == m'
instance Ord (Map '[]) where
compare Empty Empty = EQ
instance (Ord v, Ord (Map s)) => Ord (Map ((k :-> v) ': s)) where
compare (Ext Var v m) (Ext Var v' m') = compare v v' `mappend` compare m m'
{-| Union of two finite maps (normalising) -}
union :: (Unionable s t) => Map s -> Map t -> Map (Union s t)
union s t = nub (quicksort (append s t))
type Unionable s t = (Nubable (Sort (s :++ t)), Sortable (s :++ t))
{-| Append of two finite maps (non normalising) -}
append :: Map s -> Map t -> Map (s :++ t)
append Empty x = x
append (Ext k v xs) ys = Ext k v (append xs ys)
type instance Cmp (k :: Symbol) (k' :: Symbol) = CmpSymbol k k'
type instance Cmp (k :-> v) (k' :-> v') = CmpSymbol k k'
{-| Value-level quick sort that respects the type-level ordering -}
class Sortable xs where
quicksort :: Map xs -> Map (Sort xs)
instance Sortable '[] where
quicksort Empty = Empty
instance (Sortable (Filter FMin (k :-> v) xs)
, Sortable (Filter FMax (k :-> v) xs)
, FilterV FMin k v xs
, FilterV FMax k v xs) => Sortable ((k :-> v) ': xs) where
quicksort (Ext k v xs) =
quicksort (less k v xs) `append` Ext k v Empty `append` quicksort (more k v xs)
where
less = filterV (Proxy::(Proxy FMin))
more = filterV (Proxy::(Proxy FMax))
{- Filter out the elements less-than or greater-than-or-equal to the pivot -}
class FilterV (f::Flag) k v xs where
filterV :: Proxy f -> Var k -> v -> Map xs -> Map (Filter f (k :-> v) xs)
instance FilterV f k v '[] where
filterV _ k v Empty = Empty
instance (Conder (Cmp x (k :-> v) == LT), FilterV FMin k v xs) => FilterV FMin k v (x ': xs) where
filterV f@Proxy k v (Ext k' v' xs) =
cond (Proxy::(Proxy (Cmp x (k :-> v) == LT)))
(Ext k' v' (filterV f k v xs))
(filterV f k v xs)
instance
(Conder ((Cmp x (k :-> v) == GT) || (Cmp x (k :-> v) == EQ)), FilterV FMax k v xs)
=> FilterV FMax k v (x ': xs) where
filterV f@Proxy k v (Ext k' v' xs) =
cond (Proxy::(Proxy ((Cmp x (k :-> v) == GT) || (Cmp x (k :-> v) == EQ))))
(Ext k' v' (filterV f k v xs))
(filterV f k v xs)
class Combinable t t' where
combine :: t -> t' -> Combine t t'
class Nubable t where
nub :: Map t -> Map (Nub t)
instance Nubable '[] where
nub Empty = Empty
instance Nubable '[e] where
nub (Ext k v Empty) = Ext k v Empty
instance {-# OVERLAPPABLE #-}
(Nub (e ': f ': s) ~ (e ': Nub (f ': s)),
Nubable (f ': s)) => Nubable (e ': f ': s) where
nub (Ext k v (Ext k' v' s)) = Ext k v (nub (Ext k' v' s))
instance {-# OVERLAPS #-}
(Combinable v v', Nubable ((k :-> Combine v v') ': s))
=> Nubable ((k :-> v) ': (k :-> v') ': s) where
nub (Ext k v (Ext k' v' s)) = nub (Ext k (combine v v') s)
class Conder g where
cond :: Proxy g -> Map s -> Map t -> Map (If g s t)
instance Conder True where
cond _ s t = s
instance Conder False where
cond _ s t = t
{-| Splitting a union of maps, given the maps we want to split it into -}
class Split s t st where
-- where st ~ Union s t
split :: Map st -> (Map s, Map t)
instance Split '[] '[] '[] where
split Empty = (Empty, Empty)
instance {-# OVERLAPPABLE #-} Split s t st => Split (x ': s) (x ': t) (x ': st) where
split (Ext k v st) = let (s, t) = split st
in (Ext k v s, Ext k v t)
instance {-# OVERLAPS #-} Split s t st => Split (x ': s) t (x ': st) where
split (Ext k v st) = let (s, t) = split st
in (Ext k v s, t)
instance {-# OVERLAPS #-} (Split s t st) => Split s (x ': t) (x ': st) where
split (Ext k v st) = let (s, t) = split st
in (s, Ext k v t)
{-| Construct a submap 's' from a supermap 't' -}
class Submap s t where
submap :: Map t -> Map s
instance Submap '[] '[] where
submap xs = Empty
instance {-# OVERLAPPABLE #-} Submap s t => Submap s (x ': t) where
submap (Ext _ _ xs) = submap xs
instance {-# OVERLAPS #-} Submap s t => Submap (x ': s) (x ': t) where
submap (Ext k v xs) = Ext k v (submap xs)
| dorchard/type-level-sets | src/Data/Type/Map.hs | bsd-2-clause | 9,629 | 0 | 16 | 2,792 | 4,055 | 2,140 | 1,915 | 166 | 1 |
module Haskonf.HaskonfSpec (main, spec) where
import Control.Monad (unless)
import Haskonf (appDir, binName, build, copyReal,
doesConfigExist, rebuild, runFrom)
import Paths_haskonf
import System.Environment (getProgName)
import Test.Hspec (Spec, describe, hspec, it, shouldBe)
main :: IO ()
main = hspec spec
pname :: String
pname = "haskonf-usage"
theArgs :: [String]
theArgs =
["--rebuild"] ++ words "you see the result of running configured binary"
spec :: Spec
spec = do
describe "haskonf" $ do
it ("creates an application directory " ++
"and populates it with default config") $ do
real <- getDataFileName "haskonf-usage.hs"
doesConfigExist pname >>= (flip unless) (copyReal pname real)
_ <- mainDo theArgs
-- We don't really test the result of executing the file, we just
-- make sure that binary is compiled based on some configuration
-- and we can run it appropriately and without errors.
True `shouldBe` True
mainDo :: [String] -> IO ()
mainDo ("--rebuild":xs) = do
_ <- rebuild pname
-- -^- Bad: errors should be addressed in real applications
launch xs
mainDo xs = do
_ <- build pname
-- -^- Bad: errors should be addressed in real applications
launch xs
launch :: [String] -> IO ()
launch args = do
dir <- appDir pname
me <- getProgName
runFrom me dir (binName pname) args
| manpages/haskonf | test/Haskonf/HaskonfSpec.hs | bsd-2-clause | 1,554 | 0 | 16 | 473 | 366 | 191 | 175 | 35 | 1 |
{-|
Utility functions shared by several modules of "Typechecker".
-}
module Typechecker.Util(TypecheckM
,whenM
,anyM
,allM
,unlessM
,concatMapM
,tcError
,pushError
,tcWarning
,pushWarning
,resolveType
,resolveTypeAndCheckForLoops
,findFormalRefType
,isKnownRefType
,assertSafeTypeArguments
,subtypeOf
,assertDistinctThing
,assertDistinct
,findTrait
,findField
,findMethod
,findMethodWithCalledType
,findCapability
,findVar
,propagateResultType
,unifyTypes
,uniquifyTypeVars
,checkValidUseOfBreak
,checkValidUseOfContinue
,abstractTraitFrom
,isLinearType
,isSubordinateType
,isEncapsulatedType
,isLocalType
,isPassiveType
,isActiveType
,isSharedType
,isAliasableType
,isSharableType
,checkConjunction
,includesMarkerTrait
) where
import Identifiers
import Types as Ty
import AST.AST as AST
import Data.List
import Data.Maybe
import Text.Printf (printf)
import Debug.Trace
import Control.Monad.Reader
import Control.Monad.Except
import Control.Arrow(second)
import Control.Monad.State
-- Module dependencies
import Typechecker.TypeError
import Typechecker.Environment
-- Monadic versions of common functions
anyM :: (Monad m) => (a -> m Bool) -> [a] -> m Bool
anyM p = foldM (\b x -> liftM (b ||) (p x)) False
allM :: (Monad m) => (a -> m Bool) -> [a] -> m Bool
allM p = foldM (\b x -> liftM (b &&) (p x)) True
whenM :: (Monad m) => m Bool -> m () -> m ()
whenM cond action = cond >>= (`when` action)
unlessM :: (Monad m) => m Bool -> m () -> m ()
unlessM cond action = cond >>= (`unless` action)
findM :: (Monad m) => (a -> m Bool) -> [a] -> m (Maybe a)
findM _ [] = return Nothing
findM p (x:xs) = do
b <- p x
if b
then return $ Just x
else findM p xs
-- | A version of 'concatMap' that works with a monadic predicate.
-- Source: https://hackage.haskell.org/package/extra-1.5/docs/src/Control-Monad-Extra.html
concatMapM :: Monad m => (a -> m [b]) -> [a] -> m [b]
{-# INLINE concatMapM #-}
concatMapM op = foldr f (return [])
where
f x xs = do x <- op x
if null x
then xs
else do
xs <- xs
return $ x++xs
-- | The monad in which all typechecking is performed. A function
-- of return type @TypecheckM Bar@ may read from an 'Environment'
-- and returns a @Bar@ or throws a typechecking exception.
type TypecheckM a =
forall m . (MonadState [TCWarning] m,
MonadError TCError m,
MonadReader Environment m) => m a
-- | Convenience function for throwing an exception with the
-- current backtrace
tcError err =
do bt <- asks backtrace
throwError $ TCError err bt
-- | Push the expression @expr@ and throw error err
pushError expr err = local (pushBT expr) $ tcError err
tcWarning wrn =
do bt <- asks backtrace
modify (TCWarning bt wrn:)
pushWarning expr wrn = local (pushBT expr) $ tcWarning wrn
checkValidUseOfBreak = Typechecker.TypeError.validUseOfBreak . bt
checkValidUseOfContinue = Typechecker.TypeError.validUseOfContinue . bt
-- | @matchTypeParameterLength ty1 ty2@ ensures that the type parameter
-- lists of its arguments have the same length.
matchTypeParameterLength :: Type -> Type -> TypecheckM ()
matchTypeParameterLength ty1 ty2 = do
let params1 = getTypeParameters ty1
params2 = getTypeParameters ty2
unless (length params1 == length params2) $
tcError $ WrongNumberOfTypeParametersError
ty1 (length params1) ty2 (length params2)
-- | @resolveType ty@ checks all the components of @ty@, resolving
-- reference types to traits or classes and making sure that any
-- type variables are in the current environment.
resolveType :: Type -> TypecheckM Type
resolveType = typeMapM resolveSingleType
resolveSingleType :: Type -> TypecheckM Type
resolveSingleType ty
| isTypeVar ty = do
params <- asks typeParameters
case find ((getId ty ==) . getId) params of
Just ty' -> return $ ty' `withBoxOf` ty
Nothing -> tcError $ FreeTypeVariableError ty
| isRefAtomType ty = do
res <- resolveRefAtomType ty
formal <- findFormalRefType ty
if isTypeSynonym res
then resolveType res -- Force unfolding of type synonyms
else resolveMode res formal
| isCapabilityType ty =
resolveCapa ty
| isStringType ty = do
tcWarning StringDeprecatedWarning
return ty
| isTypeSynonym ty = do
unless (isModeless ty) $
tcError $ CannotHaveModeError ty
let unfolded = unfoldTypeSynonyms ty
resolveType unfolded
| isArrayType ty = do
let elementType = getResultType ty
when (isStackboundType elementType) $
tcError $ StackboundArrayTypeError elementType
return ty
| otherwise = return ty
where
resolveCapa t = do
let traits = typesFromCapability t
mapM_ resolveSingleTrait traits
assertDistinctThing "occurrence" "trait" traits
return t
resolveSingleTrait t
| isRefAtomType t = do
result <- asks $ traitLookup t
when (isNothing result) $
tcError $ UnknownTraitError t
| otherwise =
tcError $ MalformedCapabilityError t
resolveTypeAndCheckForLoops :: Type -> TypecheckM Type
resolveTypeAndCheckForLoops ty =
evalStateT (typeMapM resolveAndCheck ty) []
where
resolveAndCheck ty
| isRefAtomType ty = do
seen <- get
let tyid = getId ty
when (tyid `elem` seen) $
lift . tcError $ RecursiveTypesynonymError ty
res <- lift $ resolveRefAtomType ty
formal <- lift $ findFormalRefType ty
when (isTypeSynonym res) $ put (tyid : seen)
if isTypeSynonym res
then typeMapM resolveAndCheck res
else lift $ resolveMode res formal
| otherwise = lift $ resolveType ty
-- | Resolve a ref atom type (class type, trait type or typedef)
-- and ensure that it has the correct number type arguments.
resolveRefAtomType :: Type -> TypecheckM Type
resolveRefAtomType ty = do
formal <- findFormalRefType ty
matchTypeParameterLength formal ty
let formalTypeParams = getTypeParameters formal
actualTypeParams = getTypeParameters ty
assertSafeTypeArguments formalTypeParams actualTypeParams
let res = formal `setTypeParameters` getTypeParameters ty
`withModeOf` ty
`withBoxOf` ty
return res
-- | Find the formal version of a type with any type parameters of
-- that type uninstantied. Throws a typechecking error if a formal
-- type is not found or if several matching formal types are
-- found.
findFormalRefType :: Type -> TypecheckM Type
findFormalRefType ty
| isRefAtomType ty = do
result <- asks $ refTypeLookup ty
case result of
Just [] ->
tcError $ UnknownRefTypeError ty
Just [formal] ->
case getRefNamespace formal of
Just ns -> do
unless (isExplicitNamespace ns) $
tcError $ UnknownRefTypeError ty
return formal
Nothing ->
error $ "Util.hs: No namespace after resolving type " ++ show ty
Just l ->
tcError $ AmbiguousTypeError ty l
Nothing ->
tcError $ UnknownNamespaceError (getRefNamespace ty)
| otherwise = error $ "Util.hs: " ++ Ty.showWithKind ty ++ " isn't a ref-type"
resolveMode :: Type -> Type -> TypecheckM Type
resolveMode actual formal
| isModeless actual && not (isModeless formal) =
resolveMode (actual `withModeOf` formal) formal
| isClassType actual = do
when (isModeless formal) $
unless (isModeless actual) $
tcError $ CannotHaveModeError actual
unless (actual `modeSubtypeOf` formal) $
tcError $ ModeOverrideError formal
when (isSharableSingleType actual) $
tcError $ CannotGiveSharableModeError actual
return actual
| isTraitType actual = do
when (isModeless actual) $
tcError $ ModelessError actual
unless (hasMinorMode formal || actual `modeSubtypeOf` formal) $
tcError $ ModeOverrideError formal
when (isReadSingleType actual) $
unless (isReadSingleType formal) $
tcError $ CannotGiveReadModeError actual
when (isSharableSingleType actual) $
tcError $ CannotGiveSharableModeError actual
return actual
| otherwise =
error $ "Util.hs: Cannot resolve unknown reftype: " ++ show formal
assertSafeTypeArguments :: [Type] -> [Type] -> TypecheckM ()
assertSafeTypeArguments = zipWithM_ assertSafeTypeArgument
where
assertSafeTypeArgument formal arg
| isModeless formal = do
unlessM (isAliasableType arg) $
tcError $ UnsafeTypeArgumentError formal arg
when (isArrayType arg) $
tcWarning ArrayTypeArgumentWarning
| otherwise = do
unlessM (isSharableType arg) $
unless (arg `modeSubtypeOf` formal) $
tcError $ UnsafeTypeArgumentError formal arg
when (isArrayType arg) $
tcWarning ArrayTypeArgumentWarning
subtypeOf :: Type -> Type -> TypecheckM Bool
subtypeOf ty1 ty2
| isStackboundType ty1 =
liftM (isStackboundType ty2 &&) $ unbox ty1 `subtypeOf` unbox ty2
| isArrowType ty1 && isArrowType ty2 = do
let argTys1 = getArgTypes ty1
argTys2 = getArgTypes ty2
resultTy1 = getResultType ty1
resultTy2 = getResultType ty2
contravariance <- liftM and $ zipWithM subtypeOf argTys2 argTys1
covariance <- resultTy1 `subtypeOf` resultTy2
return $ length argTys1 == length argTys2 &&
ty1 `modeSubtypeOf` ty2 &&
contravariance && covariance
| hasResultType ty1 && hasResultType ty2 =
liftM (ty1 `hasSameKind` ty2 &&) $
getResultType ty1 `subtypeOf` getResultType ty2
| isNullType ty1 = return (isNullType ty2 || isRefType ty2)
| isClassType ty1 && isClassType ty2 =
return $ ty1 == ty2
| isClassType ty1 && isCapabilityType ty2 = do
capability <- findCapability ty1
capability `capabilitySubtypeOf` ty2
| isTupleType ty1 && isTupleType ty2 = do
let argTys1 = getArgTypes ty1
argTys2 = getArgTypes ty2
results <- zipWithM subtypeOf argTys1 argTys2
return $ and results && length argTys1 == length argTys2
| isAbstractTraitType ty1 && isTraitType ty2 =
return $ ty1 == abstractTraitFromTraitType ty2
| isTraitType ty1 && isAbstractTraitType ty2 =
return $ abstractTraitFromTraitType ty1 == ty2
| isTraitType ty1 && isTraitType ty2 =
return $ ty1 `modeSubtypeOf` ty2 &&
ty1 == ty2
| isTraitType ty1 && isCapabilityType ty2 = do
let traits = typesFromCapability ty2
allM (ty1 `subtypeOf`) traits
| isCapabilityType ty1 && isTraitType ty2 = do
let traits = typesFromCapability ty1
anyM (`subtypeOf` ty2) traits
| isCapabilityType ty1 && isCapabilityType ty2 =
ty1 `capabilitySubtypeOf` ty2
| isUnionType ty1 && isUnionType ty2 = do
let members1 = unionMembers ty1
members2 = unionMembers ty2
allM (\ty -> anyM (ty `subtypeOf`) members2) members1
| isUnionType ty1 = do
let members1 = unionMembers ty1
allM (`subtypeOf` ty2) members1
| isUnionType ty2 = do
let members2 = unionMembers ty2
anyM (ty1 `subtypeOf`) members2
| isBottomType ty1 && (not . isBottomType $ ty2) = return True
| isNumeric ty1 && isNumeric ty2 =
return $ ty1 `numericSubtypeOf` ty2
| otherwise = return (ty1 == ty2)
where
capabilitySubtypeOf cap1 cap2 = do
let traits1 = typesFromCapability cap1
traits2 = typesFromCapability cap2
preservesConjunctions = cap1 `preservesConjunctionsOf` cap2
preservesModes =
all (\t1 -> isReadSingleType t1 || isLinearSingleType t1 ||
any (`modeSubtypeOf` t1) traits2) traits1
isSubsumed <- allM (\t2 -> anyM (`subtypeOf` t2) traits1) traits2
return (preservesConjunctions && preservesModes && isSubsumed)
preservesConjunctionsOf cap1 cap2 =
let pairs1 = conjunctiveTypesFromCapability cap1
pairs2 = conjunctiveTypesFromCapability cap2
in all (`existsIn` pairs1) pairs2
existsIn (left, right) =
any (separates left right)
separates left right (l, r) =
all (`elem` l) left && all (`elem` r) right ||
all (`elem` l) right && all (`elem` r) left
numericSubtypeOf ty1 ty2
| isIntType ty1 && isRealType ty2 = True
| isIntType ty1 && isUIntType ty2 = True
| isUIntType ty1 && isIntType ty2 = True
| otherwise = ty1 == ty2
equivalentTo :: Type -> Type -> TypecheckM Bool
equivalentTo ty1 ty2 = do
b1 <- ty1 `subtypeOf` ty2
b2 <- ty2 `subtypeOf` ty1
return $ b1 && b2
includesMarkerTrait :: Type -> Type -> TypecheckM Bool
includesMarkerTrait ty trait
| isTraitType ty = return $ ty == trait
| isClassType ty = do
cap <- findCapability ty
includesMarkerTrait cap trait
| isCapabilityType ty = do
let traits = typesFromCapability ty
anyM (`includesMarkerTrait` trait) traits
| otherwise = return False
-- | Convenience function for asserting distinctness of a list of
-- things. @assertDistinct "declaration" "field" [f : Foo, f :
-- Bar]@ will throw an error with the message "Duplicate
-- declaration of field 'f'".
assertDistinctThing :: (Eq a, Show a) =>
String -> String -> [a] -> TypecheckM ()
assertDistinctThing something kind l =
let
duplicates = l \\ nub l
duplicate = head duplicates
in
unless (null duplicates) $
tcError $ DuplicateThingError something (kind ++ " " ++ show duplicate)
-- | Convenience function for asserting distinctness of a list of
-- things that @HasMeta@ (and thus knows how to print its own
-- kind). @assertDistinct "declaration" [f : Foo, f : Bar]@ will
-- throw an error with the message "Duplicate declaration of field
-- 'f'".
assertDistinct :: (Eq a, AST.HasMeta a) =>
String -> [a] -> TypecheckM ()
assertDistinct something l =
let
duplicates = l \\ nub l
first = head duplicates
in
unless (null duplicates) $
tcError $ DuplicateThingError something (AST.showWithKind first)
findTrait :: Type -> TypecheckM TraitDecl
findTrait t = do
result <- asks $ traitLookup t
case result of
Just [] ->
tcError $ UnknownTraitError t
Just [tdecl] ->
return tdecl
Just l ->
tcError $ AmbiguousTypeError t (map tname l)
Nothing ->
tcError $ UnknownNamespaceError (getRefNamespace t)
isKnownRefType :: Type -> TypecheckM Bool
isKnownRefType ty
| isRefAtomType ty = do
result <- asks $ refTypeLookup ty
case result of
Just [] -> return False
Just [ref] -> return $ maybe False isExplicitNamespace
(getRefNamespace ref)
Just l -> tcError $ AmbiguousTypeError ty l
Nothing -> return False
| isCapabilityType ty = do
let traits = typesFromCapability ty
results <- mapM isKnownRefType traits
return $ and results
| isUnionType ty = do
let members = unionMembers ty
results <- mapM isKnownRefType members
return $ and results
| otherwise = return True
findField :: Type -> Name -> TypecheckM FieldDecl
findField ty f = do
isKnown <- isKnownRefType ty
unless isKnown $
tcError $ UnknownTypeUsageError "access field of" ty
result <- asks $ fieldLookup ty f
case result of
Just fdecl -> return fdecl
Nothing -> tcError $ FieldNotFoundError f ty
findMethod :: Type -> Name -> TypecheckM FunctionHeader
findMethod ty = liftM fst . findMethodWithCalledType ty
findMethodWithCalledType :: Type -> Name -> TypecheckM (FunctionHeader, Type)
findMethodWithCalledType ty name
| isUnionType ty = do
let members = unionMembers ty
results <- mapM (`findMethodWithCalledType` name) members
let result@(_, calledType) = head results
unless (all (==calledType) (map snd results)) $
tcError $ UnionMethodAmbiguityError ty name
return result
| otherwise = do
isKnown <- isKnownRefType ty
unless isKnown $
tcError $ UnknownTypeUsageError "call method on" ty
result <- asks $ methodAndCalledTypeLookup ty name
when (isNothing result) $
tcError $ MethodNotFoundError name ty
return $ fromJust result
findCapability :: Type -> TypecheckM Type
findCapability ty = do
result <- asks $ capabilityLookup ty
return $ fromMaybe err result
where
err = error $ "Util.hs: No capability in " ++ Ty.showWithKind ty
findVar :: QualifiedName -> TypecheckM (Maybe (QualifiedName, Type))
findVar x = do
result <- asks $ varLookup x
case result of
Just [] ->
return Nothing
Just [qvar] ->
return (Just qvar)
Just l ->
tcError $ AmbiguousNameError x l
Nothing ->
tcError $ UnknownNamespaceError (qnspace x)
getImplementedTraits :: Type -> TypecheckM [Type]
getImplementedTraits ty
| isClassType ty = do
capability <- findCapability ty
return $ typesFromCapability capability
| otherwise =
error $ "Types.hs: Can't get implemented traits of type " ++ show ty
propagateResultType :: Type -> Expr -> Expr
propagateResultType ty e
| hasResultingBody e =
let body' = propagateResultType ty (body e)
in setType ty e{body = body'}
| Match{clauses} <- e =
let clauses' = map propagateMatchClause clauses
in setType ty e{clauses = clauses'}
| Seq{eseq} <- e =
let result = propagateResultType ty (last eseq)
in setType ty e{eseq = init eseq ++ [result]}
| IfThenElse{thn, els} <- e =
setType ty e{thn = propagateResultType ty thn
,els = propagateResultType ty els}
| otherwise = setType ty e
where
hasResultingBody TypedExpr{} = True
hasResultingBody Let{} = True
hasResultingBody While{} = True
hasResultingBody For{} = True
hasResultingBody _ = False
propagateMatchClause mc@MatchClause{mchandler} =
mc{mchandler = propagateResultType ty mchandler}
typeIsUnifiable ty
| isClassType ty = do
capability <- findCapability ty
return $ not (isIncapability capability)
| isCapabilityType ty = return $ not (isIncapability ty)
| otherwise =
return $
isUnionType ty ||
isNullType ty ||
isBottomType ty
isUnifiableWith ty types
| isArrowType ty = return False
| hasResultType ty &&
all hasResultType types &&
all (hasSameKind ty) types =
isUnifiableWith (getResultType ty) (map getResultType types)
| isClassType ty = do
capability <- findCapability ty
if isIncapability capability
then return $ all (==ty) types
else allM typeIsUnifiable types
| otherwise = do
tyUniable <- typeIsUnifiable ty
tysUniable <- allM typeIsUnifiable types
return $ tyUniable && tysUniable &&
not (isNullType ty) && not (isBottomType ty)
unifyTypes :: [Type] -> TypecheckM (Maybe Type)
unifyTypes tys = do
result <- findM (`isUnifiableWith` tys) tys
case result of
Just ty -> do
union <- doUnifyTypes ty tys
liftM Just $ lub union
Nothing ->
return Nothing
where
lub union = do
let members = unionMembers union
bounds <- filterM (\t -> allM (`subtypeOf` t) members) members
if null bounds
then return union
else return $ head bounds
doUnifyTypes :: Type -> [Type] -> TypecheckM Type
doUnifyTypes inter [] = return inter
doUnifyTypes inter args@(ty:tys)
| hasResultType inter = do
let res = getResultType inter
args' = map getResultType args
res' <- doUnifyTypes res args'
return $ setResultType inter res'
| isNullType ty =
doUnifyTypes inter tys
| isBottomType ty =
doUnifyTypes inter tys
| isClassType ty =
if ty == inter
then doUnifyTypes inter tys
else do
cap <- findCapability ty
doUnifyTypes inter (cap:tys)
| isClassType inter = do
cap <- findCapability inter
doUnifyTypes cap (ty:tys)
| isCapabilityType ty = do
let members = unionMembers inter
isSubsumed <- anyM (ty `equivalentTo`) members
if isSubsumed
then doUnifyTypes inter tys
else do
unlessM (anyM (\t -> allM (`subtypeOf` t) members)
(typesFromCapability ty)) $
tcError $ MalformedUnionTypeError ty inter
doUnifyTypes (unionType inter ty) tys
| isUnionType ty =
doUnifyTypes inter (unionMembers ty ++ tys)
| otherwise =
error "Util.hs: Tried to form an union without a capability"
uniquifyTypeVars :: [Type] -> Type -> TypecheckM Type
uniquifyTypeVars params = typeMapM (uniquifyTypeVar params)
uniquifyTypeVar :: [Type] -> Type -> TypecheckM Type
uniquifyTypeVar params ty
| isTypeVar ty = do
localTypeVars <- asks typeParameters
boundTypeVars <- map fst <$> asks bindings
if ty `elem` params && (ty `elem` localTypeVars || ty `elem` boundTypeVars)
then uniquify ty
else return ty
| otherwise = return ty
where
uniquify :: Type -> TypecheckM Type
uniquify ty = do
localTypeVars <- asks typeParameters
boundTypeVars <- map fst <$> asks bindings
let candidates = map (appendToTypeVar ty) [0..]
return $ fromJust $
find (`notElem` localTypeVars ++ boundTypeVars) candidates
appendToTypeVar ty i =
let id = getId ty
id' = id ++ show i
in typeVar id'
isSafeValField :: FieldDecl -> TypecheckM Bool
isSafeValField f@Field{ftype} = do
isSafe <- isSharableType ftype
return $ isValField f && isSafe
abstractTraitFrom :: Type -> (Type, [TraitExtension]) -> TypecheckM TraitDecl
abstractTraitFrom cname (t, exts) = do
tdecl@Trait{tname, treqs, tmethods} <- findTrait t
let bindings = zip (getTypeParameters tname) (getTypeParameters t)
(fieldNames, methodNames) = partitionTraitExtensions exts
fields <- mapM (findField cname) fieldNames
checkLocalFields t fields
fields' <- checkReadFields t fields
methods <- mapM (findMethod cname) methodNames
treqs' <- mapM (resolveReq t) treqs
let newReqs = treqs' ++ map RequiredField fields' ++ map RequiredMethod methods
tmethods' = map (concretizeMethod bindings) tmethods
return tdecl{treqs = newReqs
,tname = t
,tmethods = tmethods'}
where
resolveReq trait r@RequiredField{rfield = Field{fname}} = do
rfield' <- findField trait fname
return r{rfield = rfield'}
resolveReq trait r@RequiredMethod{rheader} = do
rheader' <- findMethod trait (hname rheader)
return r{rheader = rheader'}
concretizeMethod :: [(Type, Type)] -> MethodDecl -> MethodDecl
concretizeMethod bindings m =
let mheader' = replaceHeaderTypes bindings (mheader m)
in m{mheader = mheader'}
checkReadFields t fields
| isReadSingleType t = do
unsafeFields <- filterM (liftM not . isAliasableType . ftype) fields
let unsafeField = head unsafeFields
unless (null unsafeFields) $
tcError $ NonSafeInExtendedReadTraitError
t (fname unsafeField) (ftype unsafeField)
return $ map (\f -> f{fmut = Val}) fields
| otherwise = return fields
checkLocalFields t fields =
unless (isLocalSingleType t || isActiveSingleType t) $ do
localFields <- filterM (isLocalType . ftype) fields
unless (null localFields) $
tcError $ ThreadLocalFieldExtensionError
t (head localFields)
partly :: (Type -> TypecheckM Bool) -> Type -> TypecheckM Bool
partly isKind ty
| isCompositeType ty
, traits <- typesFromCapability ty
= anyM (partly isKind) traits
| isUnionType ty
, tys <- unionMembers ty
= anyM (partly isKind) tys
| isClassType ty = do
capability <- findCapability ty
capIsPartly <- partly isKind capability
tyIsKind <- isKind ty
return $ tyIsKind || capIsPartly
| hasResultType ty &&
not (isArrowType ty) =
partly isKind (getResultType ty)
| isTupleType ty =
anyM (partly isKind) (getArgTypes ty)
| otherwise = isKind ty
fully :: (Type -> Bool) -> Type -> TypecheckM Bool
fully isKind ty
| isCompositeType ty
, traits <- typesFromCapability ty
= allM (fully isKind) traits
| isUnionType ty
, tys <- unionMembers ty
= allM (fully isKind) tys
| isClassType ty = do
capability <- findCapability ty
liftM (isKind ty ||) (fully isKind capability)
| hasResultType ty &&
not (isArrowType ty) =
fully isKind (getResultType ty)
| isTupleType ty =
allM (fully isKind) (getArgTypes ty)
| otherwise = return $ isKind ty
isLinearType :: Type -> TypecheckM Bool
isLinearType = partly (return . isLinearSingleType)
isSubordinateType :: Type -> TypecheckM Bool
isSubordinateType = partly (return . isSubordinateSingleType)
isEncapsulatedType :: Type -> TypecheckM Bool
isEncapsulatedType = fully isSubordinateSingleType
isLocalType :: Type -> TypecheckM Bool
isLocalType = partly (isLocalType' [])
where
isLocalType' :: [Type] -> Type -> TypecheckM Bool
isLocalType' checked ty
| ty `elem` checked = return False
| otherwise = do
holdsLocal <- holdsLocalData checked ty
return $ isLocalSingleType ty || holdsLocal
holdsLocalData :: [Type] -> Type -> TypecheckM Bool
holdsLocalData checked ty
| isPassiveRefType ty && isRefAtomType ty &&
not (isUnsafeSingleType ty) && ty `notElem` checked =
anyM (isLocalType' (ty:checked)) $ getTypeParameters ty
| otherwise = return False
isPassiveType :: Type -> TypecheckM Bool
isPassiveType ty
| isClassType ty && isModeless ty = do
capability <- findCapability ty
isPassiveType capability
| isClassType ty =
return $ isPassiveRefType ty
| isCapabilityType ty =
fully isPassiveRefType ty
| isUnionType ty
, tys <- unionMembers ty
= allM isPassiveType tys
| otherwise = return False
isActiveType :: Type -> TypecheckM Bool
isActiveType ty
| isClassType ty && isModeless ty = do
capability <- findCapability ty
isActiveType capability
| isClassType ty =
return $ isActiveSingleType ty
| isCapabilityType ty =
fully isActiveSingleType ty
| isUnionType ty
, tys <- unionMembers ty
= allM isActiveType tys
| otherwise = return False
isSharedType :: Type -> TypecheckM Bool
isSharedType ty
| isClassType ty && isModeless ty = do
capability <- findCapability ty
isSharedType capability
| isClassType ty =
return $ isSharedSingleType ty
| isCapabilityType ty =
fully isSharedSingleType ty
| isUnionType ty
, tys <- unionMembers ty
= allM isSharedType tys
| otherwise = return False
isSharableType :: Type -> TypecheckM Bool
isSharableType ty
| isArrowType ty = return $ isModeless ty
| hasResultType ty = isSharableType $ getResultType ty
| isTupleType ty = allM isSharableType $ getArgTypes ty
| isCompositeType ty
, traits <- typesFromCapability ty = allM isSharableType traits
| isClassType ty && isModeless ty = do
capability <- findCapability ty
isSharableType capability
| isModeless ty =
return $ isPrimitive ty
|| isRangeType ty
|| isCType ty
|| isIncapability ty
| otherwise = return $ hasSharableMode ty
isUnsafeType :: Type -> TypecheckM Bool
isUnsafeType ty
| isClassType ty = do
capability <- findCapability ty
capIsUnsafe <- isUnsafeType capability
return $ isUnsafeSingleType ty || capIsUnsafe
| otherwise = return $
any isUnsafeSingleType $ typeComponents ty
isAliasableType :: Type -> TypecheckM Bool
isAliasableType ty
| isArrowType ty = return . not $ isLinearSingleType ty
| hasResultType ty = isAliasableType $ getResultType ty
| isTupleType ty = allM isAliasableType $ getArgTypes ty
| otherwise =
anyM (\f -> f ty)
[isSharableType
,isLocalType
,\t -> return $
isTypeVar t && (isModeless t || hasSharableMode t)
]
checkConjunction :: Type -> [Type] -> TypecheckM ()
checkConjunction source sinks
| isCompositeType source = do
let sourceConjunctions = conjunctiveTypesFromCapability source
mapM_ (\ty -> wellFormedConjunction sourceConjunctions
(sinks \\ [ty]) ty) sinks
| isClassType source = do
cap <- findCapability source
when (isIncapability cap) $
tcError $ CannotUnpackError source
when (source `elem` sinks) $
tcError $ CannotInferUnpackingError source
checkConjunction cap sinks
| isTraitType source =
whenM (isLinearType source) $
tcError $ DuplicatingSplitError source
| otherwise =
tcError $ UnsplittableTypeError source
where
wellFormedConjunction pairs siblings ty = do
when (null pairs) $
tcError $ MalformedConjunctionError ty (head siblings) source
let nonDisjoints =
filter (\ty' -> all (not . singleConjunction ty ty') pairs) siblings
nonDisjoint = head nonDisjoints
unless (null nonDisjoints) $
tcError $ MalformedConjunctionError ty nonDisjoint source
singleConjunction ty1 ty2 (tys1, tys2) =
ty1 `elem` tys1 && ty2 `elem` tys2 ||
ty1 `elem` tys2 && ty2 `elem` tys1
| Paow/encore | src/types/Typechecker/Util.hs | bsd-3-clause | 31,062 | 0 | 20 | 9,090 | 9,136 | 4,324 | 4,812 | -1 | -1 |
import Test.QuickCheck
import Lib
main :: IO ()
main = do
quickCheck prop_functor
prop_functor :: CatA -> Bool
prop_functor x = (functor . f) x == (g . functor) x
instance Arbitrary CatA where
arbitrary = elements [AA, AB]
instance Arbitrary CatB where
arbitrary = elements [BA, BB]
| matthewfranglen/category-set-graph | test/Spec.hs | bsd-3-clause | 300 | 0 | 8 | 65 | 115 | 60 | 55 | 11 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Real.PkgAesonTH where
import Real.Types
import Data.Aeson as Aeson
import Data.Aeson.TH as Aeson
import Data.ByteString.Lazy as BS
import Data.Maybe
serialise :: [GenericPackageDescription] -> BS.ByteString
serialise pkgs = Aeson.encode pkgs
deserialise :: BS.ByteString -> [GenericPackageDescription]
deserialise = fromJust . Aeson.decode'
deriveJSON defaultOptions ''Version
deriveJSON defaultOptions ''PackageName
deriveJSON defaultOptions ''PackageId
deriveJSON defaultOptions ''VersionRange
deriveJSON defaultOptions ''Dependency
deriveJSON defaultOptions ''CompilerFlavor
deriveJSON defaultOptions ''License
deriveJSON defaultOptions ''SourceRepo
deriveJSON defaultOptions ''RepoKind
deriveJSON defaultOptions ''RepoType
deriveJSON defaultOptions ''BuildType
deriveJSON defaultOptions ''Library
deriveJSON defaultOptions ''Executable
deriveJSON defaultOptions ''TestSuite
deriveJSON defaultOptions ''TestSuiteInterface
deriveJSON defaultOptions ''TestType
deriveJSON defaultOptions ''Benchmark
deriveJSON defaultOptions ''BenchmarkInterface
deriveJSON defaultOptions ''BenchmarkType
deriveJSON defaultOptions ''BuildInfo
deriveJSON defaultOptions ''ModuleName
deriveJSON defaultOptions ''Language
deriveJSON defaultOptions ''Extension
deriveJSON defaultOptions ''KnownExtension
deriveJSON defaultOptions ''PackageDescription
deriveJSON defaultOptions ''OS
deriveJSON defaultOptions ''Arch
deriveJSON defaultOptions ''Flag
deriveJSON defaultOptions ''FlagName
deriveJSON defaultOptions ''CondTree
deriveJSON defaultOptions ''ConfVar
deriveJSON defaultOptions ''Condition
deriveJSON defaultOptions ''GenericPackageDescription
| thoughtpolice/binary-serialise-cbor | bench/Real/PkgAesonTH.hs | bsd-3-clause | 1,712 | 0 | 6 | 150 | 423 | 189 | 234 | 45 | 1 |
-- A class used to keep track of file positions.
module Language.Scala.Position
( Index
, HasPosition (..)
, Position
, Positioned (..)
, noPosition
, lineNumber
, columnNumber
, startPositionInFile
, advanceLine
, advanceColumn
, nextLine
, nextColumn
, nextTab
, tabWidth
) where
------------------------------------------------------------------------
import Data.Functor
import Language.Scala.Util
------------------------------------------------------------------------
infixl 8 :@
------------------------------------------------------------------------
class HasPosition a where
position :: a -> Position
pmap :: (Position -> Position) -> a -> a
------------------------------------------------------------------------
-- Note that we store line position separateley for efficiency,
-- since it changes much less often than column positions!
data Position = Pos LinePosition !Index deriving (Eq, Ord)
data LinePosition = LPos FilePath !Index deriving (Eq, Ord)
instance Show Position where
showsPrec _ (Pos (LPos fp ln) cn) = showString fp . showLine
where
showLine = if ln > 0 then showChar ':' . shows ln . showColumn else id
showColumn = if cn > 0 then showChar ':' . shows cn else id
instance HasPosition Position where
position = id
pmap f = f
------------------------------------------------------------------------
data Positioned a = !a :@ !Position deriving (Eq, Ord, Show)
instance HasPosition (Positioned a) where
position (_ :@ p) = p
pmap f (x :@ p) = x :@ f p
instance HasValue Positioned where
value (v :@ _) = v
instance Functor Positioned where
fmap f (x :@ p) = f x :@ p
x <$ (_ :@ p) = x :@ p
------------------------------------------------------------------------
noPosition :: Position
noPosition = Pos (LPos "" 0) 0
lineNumber :: Position -> Index
lineNumber (Pos (LPos _ ln) _) = ln
columnNumber :: Position -> Index
columnNumber (Pos (LPos _ _) cn) = cn
startPositionInFile :: FilePath -> Position
startPositionInFile fp = Pos (LPos fp 1) 1
advanceLine :: Index -> Position -> Position
advanceLine n (Pos (LPos fp ln) _) = Pos (LPos fp (ln + n)) 1
advanceColumn :: Index -> Position -> Position
advanceColumn n (Pos lp cn) = Pos lp (cn + n)
nextLine :: Position -> Position
nextLine = advanceLine 1
nextColumn :: Position -> Position
nextColumn = advanceColumn 1
nextTab :: Position -> Position
nextTab (Pos lp cn) = Pos lp ((cn - 1) `div` tabWidth * tabWidth + tabWidth + 1)
tabWidth :: Index
tabWidth = 8
| jystic/language-scala | src/Language/Scala/Position.hs | bsd-3-clause | 2,570 | 0 | 12 | 503 | 771 | 414 | 357 | 67 | 1 |
{-# LANGUAGE BangPatterns, CPP, DeriveDataTypeable, MagicHash #-}
{-# LANGUAGE ScopedTypeVariables #-}
#if __GLASGOW_HASKELL__ >= 708
{-# LANGUAGE TypeFamilies #-}
#endif
{-# OPTIONS_GHC -fno-full-laziness -funbox-strict-fields #-}
module Data.HashMap.Base
(
HashMap(..)
, Leaf(..)
-- * Construction
, empty
, singleton
-- * Basic interface
, null
, size
, member
, lookup
, lookupDefault
, (!)
, insert
, insertWith
, unsafeInsert
, delete
, adjust
-- * Combine
-- ** Union
, union
, unionWith
, unions
-- * Transformations
, map
, mapWithKey
, traverseWithKey
-- * Difference and intersection
, difference
, intersection
, intersectionWith
-- * Folds
, foldl'
, foldlWithKey'
, foldr
, foldrWithKey
-- * Filter
, filter
, filterWithKey
-- * Conversions
, keys
, elems
-- ** Lists
, toList
, fromList
, fromListWith
-- Internals used by the strict version
, Hash
, Bitmap
, bitmapIndexedOrFull
, collision
, hash
, mask
, index
, bitsPerSubkey
, fullNodeMask
, sparseIndex
, two
, unionArrayBy
, update16
, update16M
, update16With'
, updateOrConcatWith
) where
#if __GLASGOW_HASKELL__ >= 709
import Data.Functor ((<$>))
#else
import Control.Applicative ((<$>), Applicative(pure))
import Data.Monoid (Monoid(mempty, mappend))
import Data.Traversable (Traversable(..))
import Data.Word (Word)
#endif
import Control.DeepSeq (NFData(rnf))
import Control.Monad.ST (ST)
import Data.Bits ((.&.), (.|.), complement)
import Data.Data hiding (Typeable)
import qualified Data.Foldable as Foldable
import qualified Data.List as L
import GHC.Exts ((==#), build, reallyUnsafePtrEquality#)
import Prelude hiding (filter, foldr, lookup, map, null, pred)
import Text.Read hiding (step)
import qualified Data.HashMap.Array as A
import qualified Data.Hashable as H
import Data.Hashable (Hashable)
import Data.HashMap.PopCount (popCount)
import Data.HashMap.Unsafe (runST)
import Data.HashMap.UnsafeShift (unsafeShiftL, unsafeShiftR)
import Data.Typeable (Typeable)
#if __GLASGOW_HASKELL__ >= 707
import GHC.Exts (isTrue#)
#endif
#if __GLASGOW_HASKELL__ >= 708
import qualified GHC.Exts as Exts
#endif
------------------------------------------------------------------------
-- | Convenience function. Compute a hash value for the given value.
hash :: H.Hashable a => a -> Hash
hash = fromIntegral . H.hash
data Leaf k v = L !k v
deriving (Eq)
instance (NFData k, NFData v) => NFData (Leaf k v) where
rnf (L k v) = rnf k `seq` rnf v
-- Invariant: The length of the 1st argument to 'Full' is
-- 2^bitsPerSubkey
-- | A map from keys to values. A map cannot contain duplicate keys;
-- each key can map to at most one value.
data HashMap k v
= Empty
| BitmapIndexed !Bitmap !(A.Array (HashMap k v))
| Leaf !Hash !(Leaf k v)
| Full !(A.Array (HashMap k v))
| Collision !Hash !(A.Array (Leaf k v))
deriving (Typeable)
instance (NFData k, NFData v) => NFData (HashMap k v) where
rnf Empty = ()
rnf (BitmapIndexed _ ary) = rnf ary
rnf (Leaf _ l) = rnf l
rnf (Full ary) = rnf ary
rnf (Collision _ ary) = rnf ary
instance Functor (HashMap k) where
fmap = map
instance Foldable.Foldable (HashMap k) where
foldr f = foldrWithKey (const f)
instance (Eq k, Hashable k) => Monoid (HashMap k v) where
mempty = empty
{-# INLINE mempty #-}
mappend = union
{-# INLINE mappend #-}
instance (Data k, Data v, Eq k, Hashable k) => Data (HashMap k v) where
gfoldl f z m = z fromList `f` toList m
toConstr _ = fromListConstr
gunfold k z c = case constrIndex c of
1 -> k (z fromList)
_ -> error "gunfold"
dataTypeOf _ = hashMapDataType
dataCast2 f = gcast2 f
fromListConstr :: Constr
fromListConstr = mkConstr hashMapDataType "fromList" [] Prefix
hashMapDataType :: DataType
hashMapDataType = mkDataType "Data.HashMap.Base.HashMap" [fromListConstr]
type Hash = Word
type Bitmap = Word
type Shift = Int
instance (Eq k, Hashable k, Read k, Read e) => Read (HashMap k e) where
readPrec = parens $ prec 10 $ do
Ident "fromList" <- lexP
xs <- readPrec
return (fromList xs)
readListPrec = readListPrecDefault
instance (Show k, Show v) => Show (HashMap k v) where
showsPrec d m = showParen (d > 10) $
showString "fromList " . shows (toList m)
instance Traversable (HashMap k) where
traverse f = traverseWithKey (const f)
instance (Eq k, Eq v) => Eq (HashMap k v) where
(==) = equal
equal :: (Eq k, Eq v) => HashMap k v -> HashMap k v -> Bool
equal t1 t2 = go (toList' t1 []) (toList' t2 [])
where
-- If the two trees are the same, then their lists of 'Leaf's and
-- 'Collision's read from left to right should be the same (modulo the
-- order of elements in 'Collision').
go (Leaf k1 l1 : tl1) (Leaf k2 l2 : tl2)
| k1 == k2 && l1 == l2
= go tl1 tl2
go (Collision k1 ary1 : tl1) (Collision k2 ary2 : tl2)
| k1 == k2 && A.length ary1 == A.length ary2 &&
L.null (A.toList ary1 L.\\ A.toList ary2)
= go tl1 tl2
go [] [] = True
go _ _ = False
toList' (BitmapIndexed _ ary) a = A.foldr toList' a ary
toList' (Full ary) a = A.foldr toList' a ary
toList' l@(Leaf _ _) a = l : a
toList' c@(Collision _ _) a = c : a
toList' Empty a = a
-- Helper function to detect 'Leaf's and 'Collision's.
isLeafOrCollision :: HashMap k v -> Bool
isLeafOrCollision (Leaf _ _) = True
isLeafOrCollision (Collision _ _) = True
isLeafOrCollision _ = False
------------------------------------------------------------------------
-- * Construction
-- | /O(1)/ Construct an empty map.
empty :: HashMap k v
empty = Empty
-- | /O(1)/ Construct a map with a single element.
singleton :: (Hashable k) => k -> v -> HashMap k v
singleton k v = Leaf (hash k) (L k v)
------------------------------------------------------------------------
-- * Basic interface
-- | /O(1)/ Return 'True' if this map is empty, 'False' otherwise.
null :: HashMap k v -> Bool
null Empty = True
null _ = False
-- | /O(n)/ Return the number of key-value mappings in this map.
size :: HashMap k v -> Int
size t = go t 0
where
go Empty !n = n
go (Leaf _ _) n = n + 1
go (BitmapIndexed _ ary) n = A.foldl' (flip go) n ary
go (Full ary) n = A.foldl' (flip go) n ary
go (Collision _ ary) n = n + A.length ary
-- | /O(log n)/ Return 'True' if the specified key is present in the
-- map, 'False' otherwise.
member :: (Eq k, Hashable k) => k -> HashMap k a -> Bool
member k m = case lookup k m of
Nothing -> False
Just _ -> True
{-# INLINABLE member #-}
-- | /O(log n)/ Return the value to which the specified key is mapped,
-- or 'Nothing' if this map contains no mapping for the key.
lookup :: (Eq k, Hashable k) => k -> HashMap k v -> Maybe v
lookup k0 m0 = go h0 k0 0 m0
where
h0 = hash k0
go !_ !_ !_ Empty = Nothing
go h k _ (Leaf hx (L kx x))
| h == hx && k == kx = Just x -- TODO: Split test in two
| otherwise = Nothing
go h k s (BitmapIndexed b v)
| b .&. m == 0 = Nothing
| otherwise = go h k (s+bitsPerSubkey) (A.index v (sparseIndex b m))
where m = mask h s
go h k s (Full v) = go h k (s+bitsPerSubkey) (A.index v (index h s))
go h k _ (Collision hx v)
| h == hx = lookupInArray k v
| otherwise = Nothing
{-# INLINABLE lookup #-}
-- | /O(log n)/ Return the value to which the specified key is mapped,
-- or the default value if this map contains no mapping for the key.
lookupDefault :: (Eq k, Hashable k)
=> v -- ^ Default value to return.
-> k -> HashMap k v -> v
lookupDefault def k t = case lookup k t of
Just v -> v
_ -> def
{-# INLINABLE lookupDefault #-}
-- | /O(log n)/ Return the value to which the specified key is mapped.
-- Calls 'error' if this map contains no mapping for the key.
(!) :: (Eq k, Hashable k) => HashMap k v -> k -> v
(!) m k = case lookup k m of
Just v -> v
Nothing -> error "Data.HashMap.Base.(!): key not found"
{-# INLINABLE (!) #-}
infixl 9 !
-- | Create a 'Collision' value with two 'Leaf' values.
collision :: Hash -> Leaf k v -> Leaf k v -> HashMap k v
collision h e1 e2 =
let v = A.run $ do mary <- A.new 2 e1
A.write mary 1 e2
return mary
in Collision h v
{-# INLINE collision #-}
-- | Create a 'BitmapIndexed' or 'Full' node.
bitmapIndexedOrFull :: Bitmap -> A.Array (HashMap k v) -> HashMap k v
bitmapIndexedOrFull b ary
| b == fullNodeMask = Full ary
| otherwise = BitmapIndexed b ary
{-# INLINE bitmapIndexedOrFull #-}
-- | /O(log n)/ Associate the specified value with the specified
-- key in this map. If this map previously contained a mapping for
-- the key, the old value is replaced.
insert :: (Eq k, Hashable k) => k -> v -> HashMap k v -> HashMap k v
insert k0 v0 m0 = go h0 k0 v0 0 m0
where
h0 = hash k0
go !h !k x !_ Empty = Leaf h (L k x)
go h k x s t@(Leaf hy l@(L ky y))
| hy == h = if ky == k
then if x `ptrEq` y
then t
else Leaf h (L k x)
else collision h l (L k x)
| otherwise = runST (two s h k x hy ky y)
go h k x s t@(BitmapIndexed b ary)
| b .&. m == 0 =
let !ary' = A.insert ary i $! Leaf h (L k x)
in bitmapIndexedOrFull (b .|. m) ary'
| otherwise =
let !st = A.index ary i
!st' = go h k x (s+bitsPerSubkey) st
in if st' `ptrEq` st
then t
else BitmapIndexed b (A.update ary i st')
where m = mask h s
i = sparseIndex b m
go h k x s t@(Full ary) =
let !st = A.index ary i
!st' = go h k x (s+bitsPerSubkey) st
in if st' `ptrEq` st
then t
else Full (update16 ary i st')
where i = index h s
go h k x s t@(Collision hy v)
| h == hy = Collision h (updateOrSnocWith const k x v)
| otherwise = go h k x s $ BitmapIndexed (mask hy s) (A.singleton t)
{-# INLINABLE insert #-}
-- | In-place update version of insert
unsafeInsert :: (Eq k, Hashable k) => k -> v -> HashMap k v -> HashMap k v
unsafeInsert k0 v0 m0 = runST (go h0 k0 v0 0 m0)
where
h0 = hash k0
go !h !k x !_ Empty = return $! Leaf h (L k x)
go h k x s t@(Leaf hy l@(L ky y))
| hy == h = if ky == k
then if x `ptrEq` y
then return t
else return $! Leaf h (L k x)
else return $! collision h l (L k x)
| otherwise = two s h k x hy ky y
go h k x s t@(BitmapIndexed b ary)
| b .&. m == 0 = do
ary' <- A.insertM ary i $! Leaf h (L k x)
return $! bitmapIndexedOrFull (b .|. m) ary'
| otherwise = do
st <- A.indexM ary i
st' <- go h k x (s+bitsPerSubkey) st
A.unsafeUpdateM ary i st'
return t
where m = mask h s
i = sparseIndex b m
go h k x s t@(Full ary) = do
st <- A.indexM ary i
st' <- go h k x (s+bitsPerSubkey) st
A.unsafeUpdateM ary i st'
return t
where i = index h s
go h k x s t@(Collision hy v)
| h == hy = return $! Collision h (updateOrSnocWith const k x v)
| otherwise = go h k x s $ BitmapIndexed (mask hy s) (A.singleton t)
{-# INLINABLE unsafeInsert #-}
-- | Create a map from two key-value pairs which hashes don't collide.
two :: Shift -> Hash -> k -> v -> Hash -> k -> v -> ST s (HashMap k v)
two = go
where
go s h1 k1 v1 h2 k2 v2
| bp1 == bp2 = do
st <- go (s+bitsPerSubkey) h1 k1 v1 h2 k2 v2
ary <- A.singletonM st
return $! BitmapIndexed bp1 ary
| otherwise = do
mary <- A.new 2 $ Leaf h1 (L k1 v1)
A.write mary idx2 $ Leaf h2 (L k2 v2)
ary <- A.unsafeFreeze mary
return $! BitmapIndexed (bp1 .|. bp2) ary
where
bp1 = mask h1 s
bp2 = mask h2 s
idx2 | index h1 s < index h2 s = 1
| otherwise = 0
{-# INLINE two #-}
-- | /O(log n)/ Associate the value with the key in this map. If
-- this map previously contained a mapping for the key, the old value
-- is replaced by the result of applying the given function to the new
-- and old value. Example:
--
-- > insertWith f k v map
-- > where f new old = new + old
insertWith :: (Eq k, Hashable k) => (v -> v -> v) -> k -> v -> HashMap k v
-> HashMap k v
insertWith f k0 v0 m0 = go h0 k0 v0 0 m0
where
h0 = hash k0
go !h !k x !_ Empty = Leaf h (L k x)
go h k x s (Leaf hy l@(L ky y))
| hy == h = if ky == k
then Leaf h (L k (f x y))
else collision h l (L k x)
| otherwise = runST (two s h k x hy ky y)
go h k x s (BitmapIndexed b ary)
| b .&. m == 0 =
let ary' = A.insert ary i $! Leaf h (L k x)
in bitmapIndexedOrFull (b .|. m) ary'
| otherwise =
let st = A.index ary i
st' = go h k x (s+bitsPerSubkey) st
ary' = A.update ary i $! st'
in BitmapIndexed b ary'
where m = mask h s
i = sparseIndex b m
go h k x s (Full ary) =
let st = A.index ary i
st' = go h k x (s+bitsPerSubkey) st
ary' = update16 ary i $! st'
in Full ary'
where i = index h s
go h k x s t@(Collision hy v)
| h == hy = Collision h (updateOrSnocWith f k x v)
| otherwise = go h k x s $ BitmapIndexed (mask hy s) (A.singleton t)
{-# INLINABLE insertWith #-}
-- | In-place update version of insertWith
unsafeInsertWith :: forall k v. (Eq k, Hashable k)
=> (v -> v -> v) -> k -> v -> HashMap k v
-> HashMap k v
unsafeInsertWith f k0 v0 m0 = runST (go h0 k0 v0 0 m0)
where
h0 = hash k0
go :: (Eq k, Hashable k) => Hash -> k -> v -> Shift -> HashMap k v
-> ST s (HashMap k v)
go !h !k x !_ Empty = return $! Leaf h (L k x)
go h k x s (Leaf hy l@(L ky y))
| hy == h = if ky == k
then return $! Leaf h (L k (f x y))
else return $! collision h l (L k x)
| otherwise = two s h k x hy ky y
go h k x s t@(BitmapIndexed b ary)
| b .&. m == 0 = do
ary' <- A.insertM ary i $! Leaf h (L k x)
return $! bitmapIndexedOrFull (b .|. m) ary'
| otherwise = do
st <- A.indexM ary i
st' <- go h k x (s+bitsPerSubkey) st
A.unsafeUpdateM ary i st'
return t
where m = mask h s
i = sparseIndex b m
go h k x s t@(Full ary) = do
st <- A.indexM ary i
st' <- go h k x (s+bitsPerSubkey) st
A.unsafeUpdateM ary i st'
return t
where i = index h s
go h k x s t@(Collision hy v)
| h == hy = return $! Collision h (updateOrSnocWith f k x v)
| otherwise = go h k x s $ BitmapIndexed (mask hy s) (A.singleton t)
{-# INLINABLE unsafeInsertWith #-}
-- | /O(log n)/ Remove the mapping for the specified key from this map
-- if present.
delete :: (Eq k, Hashable k) => k -> HashMap k v -> HashMap k v
delete k0 m0 = go h0 k0 0 m0
where
h0 = hash k0
go !_ !_ !_ Empty = Empty
go h k _ t@(Leaf hy (L ky _))
| hy == h && ky == k = Empty
| otherwise = t
go h k s t@(BitmapIndexed b ary)
| b .&. m == 0 = t
| otherwise =
let !st = A.index ary i
!st' = go h k (s+bitsPerSubkey) st
in if st' `ptrEq` st
then t
else case st' of
Empty | A.length ary == 1 -> Empty
| A.length ary == 2 ->
case (i, A.index ary 0, A.index ary 1) of
(0, _, l) | isLeafOrCollision l -> l
(1, l, _) | isLeafOrCollision l -> l
_ -> bIndexed
| otherwise -> bIndexed
where
bIndexed = BitmapIndexed (b .&. complement m) (A.delete ary i)
l | isLeafOrCollision l && A.length ary == 1 -> l
_ -> BitmapIndexed b (A.update ary i st')
where m = mask h s
i = sparseIndex b m
go h k s t@(Full ary) =
let !st = A.index ary i
!st' = go h k (s+bitsPerSubkey) st
in if st' `ptrEq` st
then t
else case st' of
Empty ->
let ary' = A.delete ary i
bm = fullNodeMask .&. complement (1 `unsafeShiftL` i)
in BitmapIndexed bm ary'
_ -> Full (A.update ary i st')
where i = index h s
go h k _ t@(Collision hy v)
| h == hy = case indexOf k v of
Just i
| A.length v == 2 ->
if i == 0
then Leaf h (A.index v 1)
else Leaf h (A.index v 0)
| otherwise -> Collision h (A.delete v i)
Nothing -> t
| otherwise = t
{-# INLINABLE delete #-}
-- | /O(log n)/ Adjust the value tied to a given key in this map only
-- if it is present. Otherwise, leave the map alone.
adjust :: (Eq k, Hashable k) => (v -> v) -> k -> HashMap k v -> HashMap k v
adjust f k0 m0 = go h0 k0 0 m0
where
h0 = hash k0
go !_ !_ !_ Empty = Empty
go h k _ t@(Leaf hy (L ky y))
| hy == h && ky == k = Leaf h (L k (f y))
| otherwise = t
go h k s t@(BitmapIndexed b ary)
| b .&. m == 0 = t
| otherwise = let st = A.index ary i
st' = go h k (s+bitsPerSubkey) st
ary' = A.update ary i $! st'
in BitmapIndexed b ary'
where m = mask h s
i = sparseIndex b m
go h k s (Full ary) =
let i = index h s
st = A.index ary i
st' = go h k (s+bitsPerSubkey) st
ary' = update16 ary i $! st'
in Full ary'
go h k _ t@(Collision hy v)
| h == hy = Collision h (updateWith f k v)
| otherwise = t
{-# INLINABLE adjust #-}
------------------------------------------------------------------------
-- * Combine
-- | /O(n+m)/ The union of two maps. If a key occurs in both maps, the
-- mapping from the first will be the mapping in the result.
union :: (Eq k, Hashable k) => HashMap k v -> HashMap k v -> HashMap k v
union = unionWith const
{-# INLINABLE union #-}
-- | /O(n+m)/ The union of two maps. If a key occurs in both maps,
-- the provided function (first argument) will be used to compute the
-- result.
unionWith :: (Eq k, Hashable k) => (v -> v -> v) -> HashMap k v -> HashMap k v
-> HashMap k v
unionWith f = go 0
where
-- empty vs. anything
go !_ t1 Empty = t1
go _ Empty t2 = t2
-- leaf vs. leaf
go s t1@(Leaf h1 l1@(L k1 v1)) t2@(Leaf h2 l2@(L k2 v2))
| h1 == h2 = if k1 == k2
then Leaf h1 (L k1 (f v1 v2))
else collision h1 l1 l2
| otherwise = goDifferentHash s h1 h2 t1 t2
go s t1@(Leaf h1 (L k1 v1)) t2@(Collision h2 ls2)
| h1 == h2 = Collision h1 (updateOrSnocWith f k1 v1 ls2)
| otherwise = goDifferentHash s h1 h2 t1 t2
go s t1@(Collision h1 ls1) t2@(Leaf h2 (L k2 v2))
| h1 == h2 = Collision h1 (updateOrSnocWith (flip f) k2 v2 ls1)
| otherwise = goDifferentHash s h1 h2 t1 t2
go s t1@(Collision h1 ls1) t2@(Collision h2 ls2)
| h1 == h2 = Collision h1 (updateOrConcatWith f ls1 ls2)
| otherwise = goDifferentHash s h1 h2 t1 t2
-- branch vs. branch
go s (BitmapIndexed b1 ary1) (BitmapIndexed b2 ary2) =
let b' = b1 .|. b2
ary' = unionArrayBy (go (s+bitsPerSubkey)) b1 b2 ary1 ary2
in bitmapIndexedOrFull b' ary'
go s (BitmapIndexed b1 ary1) (Full ary2) =
let ary' = unionArrayBy (go (s+bitsPerSubkey)) b1 fullNodeMask ary1 ary2
in Full ary'
go s (Full ary1) (BitmapIndexed b2 ary2) =
let ary' = unionArrayBy (go (s+bitsPerSubkey)) fullNodeMask b2 ary1 ary2
in Full ary'
go s (Full ary1) (Full ary2) =
let ary' = unionArrayBy (go (s+bitsPerSubkey)) fullNodeMask fullNodeMask
ary1 ary2
in Full ary'
-- leaf vs. branch
go s (BitmapIndexed b1 ary1) t2
| b1 .&. m2 == 0 = let ary' = A.insert ary1 i t2
b' = b1 .|. m2
in bitmapIndexedOrFull b' ary'
| otherwise = let ary' = A.updateWith' ary1 i $ \st1 ->
go (s+bitsPerSubkey) st1 t2
in BitmapIndexed b1 ary'
where
h2 = leafHashCode t2
m2 = mask h2 s
i = sparseIndex b1 m2
go s t1 (BitmapIndexed b2 ary2)
| b2 .&. m1 == 0 = let ary' = A.insert ary2 i $! t1
b' = b2 .|. m1
in bitmapIndexedOrFull b' ary'
| otherwise = let ary' = A.updateWith' ary2 i $ \st2 ->
go (s+bitsPerSubkey) t1 st2
in BitmapIndexed b2 ary'
where
h1 = leafHashCode t1
m1 = mask h1 s
i = sparseIndex b2 m1
go s (Full ary1) t2 =
let h2 = leafHashCode t2
i = index h2 s
ary' = update16With' ary1 i $ \st1 -> go (s+bitsPerSubkey) st1 t2
in Full ary'
go s t1 (Full ary2) =
let h1 = leafHashCode t1
i = index h1 s
ary' = update16With' ary2 i $ \st2 -> go (s+bitsPerSubkey) t1 st2
in Full ary'
leafHashCode (Leaf h _) = h
leafHashCode (Collision h _) = h
leafHashCode _ = error "leafHashCode"
goDifferentHash s h1 h2 t1 t2
| m1 == m2 = BitmapIndexed m1 (A.singleton $! go (s+bitsPerSubkey) t1 t2)
| m1 < m2 = BitmapIndexed (m1 .|. m2) (A.pair t1 t2)
| otherwise = BitmapIndexed (m1 .|. m2) (A.pair t2 t1)
where
m1 = mask h1 s
m2 = mask h2 s
{-# INLINE unionWith #-}
-- | Strict in the result of @f@.
unionArrayBy :: (a -> a -> a) -> Bitmap -> Bitmap -> A.Array a -> A.Array a
-> A.Array a
unionArrayBy f b1 b2 ary1 ary2 = A.run $ do
let b' = b1 .|. b2
mary <- A.new_ (popCount b')
-- iterate over nonzero bits of b1 .|. b2
-- it would be nice if we could shift m by more than 1 each time
let ba = b1 .&. b2
go !i !i1 !i2 !m
| m > b' = return ()
| b' .&. m == 0 = go i i1 i2 (m `unsafeShiftL` 1)
| ba .&. m /= 0 = do
A.write mary i $! f (A.index ary1 i1) (A.index ary2 i2)
go (i+1) (i1+1) (i2+1) (m `unsafeShiftL` 1)
| b1 .&. m /= 0 = do
A.write mary i =<< A.indexM ary1 i1
go (i+1) (i1+1) (i2 ) (m `unsafeShiftL` 1)
| otherwise = do
A.write mary i =<< A.indexM ary2 i2
go (i+1) (i1 ) (i2+1) (m `unsafeShiftL` 1)
go 0 0 0 (b' .&. negate b') -- XXX: b' must be non-zero
return mary
-- TODO: For the case where b1 .&. b2 == b1, i.e. when one is a
-- subset of the other, we could use a slightly simpler algorithm,
-- where we copy one array, and then update.
{-# INLINE unionArrayBy #-}
-- TODO: Figure out the time complexity of 'unions'.
-- | Construct a set containing all elements from a list of sets.
unions :: (Eq k, Hashable k) => [HashMap k v] -> HashMap k v
unions = L.foldl' union empty
{-# INLINE unions #-}
------------------------------------------------------------------------
-- * Transformations
-- | /O(n)/ Transform this map by applying a function to every value.
mapWithKey :: (k -> v1 -> v2) -> HashMap k v1 -> HashMap k v2
mapWithKey f = go
where
go Empty = Empty
go (Leaf h (L k v)) = Leaf h $ L k (f k v)
go (BitmapIndexed b ary) = BitmapIndexed b $ A.map' go ary
go (Full ary) = Full $ A.map' go ary
go (Collision h ary) = Collision h $
A.map' (\ (L k v) -> L k (f k v)) ary
{-# INLINE mapWithKey #-}
-- | /O(n)/ Transform this map by applying a function to every value.
map :: (v1 -> v2) -> HashMap k v1 -> HashMap k v2
map f = mapWithKey (const f)
{-# INLINE map #-}
-- TODO: We should be able to use mutation to create the new
-- 'HashMap'.
-- | /O(n)/ Transform this map by accumulating an Applicative result
-- from every value.
traverseWithKey :: Applicative f => (k -> v1 -> f v2) -> HashMap k v1
-> f (HashMap k v2)
traverseWithKey f = go
where
go Empty = pure Empty
go (Leaf h (L k v)) = Leaf h . L k <$> f k v
go (BitmapIndexed b ary) = BitmapIndexed b <$> A.traverse go ary
go (Full ary) = Full <$> A.traverse go ary
go (Collision h ary) =
Collision h <$> A.traverse (\ (L k v) -> L k <$> f k v) ary
{-# INLINE traverseWithKey #-}
------------------------------------------------------------------------
-- * Difference and intersection
-- | /O(n*log m)/ Difference of two maps. Return elements of the first map
-- not existing in the second.
difference :: (Eq k, Hashable k) => HashMap k v -> HashMap k w -> HashMap k v
difference a b = foldlWithKey' go empty a
where
go m k v = case lookup k b of
Nothing -> insert k v m
_ -> m
{-# INLINABLE difference #-}
-- | /O(n*log m)/ Intersection of two maps. Return elements of the first
-- map for keys existing in the second.
intersection :: (Eq k, Hashable k) => HashMap k v -> HashMap k w -> HashMap k v
intersection a b = foldlWithKey' go empty a
where
go m k v = case lookup k b of
Just _ -> insert k v m
_ -> m
{-# INLINABLE intersection #-}
-- | /O(n+m)/ Intersection of two maps. If a key occurs in both maps
-- the provided function is used to combine the values from the two
-- maps.
intersectionWith :: (Eq k, Hashable k) => (v1 -> v2 -> v3) -> HashMap k v1
-> HashMap k v2 -> HashMap k v3
intersectionWith f a b = foldlWithKey' go empty a
where
go m k v = case lookup k b of
Just w -> insert k (f v w) m
_ -> m
{-# INLINABLE intersectionWith #-}
------------------------------------------------------------------------
-- * Folds
-- | /O(n)/ Reduce this map by applying a binary operator to all
-- elements, using the given starting value (typically the
-- left-identity of the operator). Each application of the operator
-- is evaluated before before using the result in the next
-- application. This function is strict in the starting value.
foldl' :: (a -> v -> a) -> a -> HashMap k v -> a
foldl' f = foldlWithKey' (\ z _ v -> f z v)
{-# INLINE foldl' #-}
-- | /O(n)/ Reduce this map by applying a binary operator to all
-- elements, using the given starting value (typically the
-- left-identity of the operator). Each application of the operator
-- is evaluated before before using the result in the next
-- application. This function is strict in the starting value.
foldlWithKey' :: (a -> k -> v -> a) -> a -> HashMap k v -> a
foldlWithKey' f = go
where
go !z Empty = z
go z (Leaf _ (L k v)) = f z k v
go z (BitmapIndexed _ ary) = A.foldl' go z ary
go z (Full ary) = A.foldl' go z ary
go z (Collision _ ary) = A.foldl' (\ z' (L k v) -> f z' k v) z ary
{-# INLINE foldlWithKey' #-}
-- | /O(n)/ Reduce this map by applying a binary operator to all
-- elements, using the given starting value (typically the
-- right-identity of the operator).
foldr :: (v -> a -> a) -> a -> HashMap k v -> a
foldr f = foldrWithKey (const f)
{-# INLINE foldr #-}
-- | /O(n)/ Reduce this map by applying a binary operator to all
-- elements, using the given starting value (typically the
-- right-identity of the operator).
foldrWithKey :: (k -> v -> a -> a) -> a -> HashMap k v -> a
foldrWithKey f = go
where
go z Empty = z
go z (Leaf _ (L k v)) = f k v z
go z (BitmapIndexed _ ary) = A.foldr (flip go) z ary
go z (Full ary) = A.foldr (flip go) z ary
go z (Collision _ ary) = A.foldr (\ (L k v) z' -> f k v z') z ary
{-# INLINE foldrWithKey #-}
------------------------------------------------------------------------
-- * Filter
-- | Create a new array of the @n@ first elements of @mary@.
trim :: A.MArray s a -> Int -> ST s (A.Array a)
trim mary n = do
mary2 <- A.new_ n
A.copyM mary 0 mary2 0 n
A.unsafeFreeze mary2
{-# INLINE trim #-}
-- | /O(n)/ Filter this map by retaining only elements satisfying a
-- predicate.
filterWithKey :: forall k v. (k -> v -> Bool) -> HashMap k v -> HashMap k v
filterWithKey pred = go
where
go Empty = Empty
go t@(Leaf _ (L k v))
| pred k v = t
| otherwise = Empty
go (BitmapIndexed b ary) = filterA ary b
go (Full ary) = filterA ary fullNodeMask
go (Collision h ary) = filterC ary h
filterA ary0 b0 =
let !n = A.length ary0
in runST $ do
mary <- A.new_ n
step ary0 mary b0 0 0 1 n
where
step :: A.Array (HashMap k v) -> A.MArray s (HashMap k v)
-> Bitmap -> Int -> Int -> Bitmap -> Int
-> ST s (HashMap k v)
step !ary !mary !b i !j !bi n
| i >= n = case j of
0 -> return Empty
1 -> do
ch <- A.read mary 0
case ch of
t | isLeafOrCollision t -> return t
_ -> BitmapIndexed b <$> trim mary 1
_ -> do
ary2 <- trim mary j
return $! if j == maxChildren
then Full ary2
else BitmapIndexed b ary2
| bi .&. b == 0 = step ary mary b i j (bi `unsafeShiftL` 1) n
| otherwise = case go (A.index ary i) of
Empty -> step ary mary (b .&. complement bi) (i+1) j
(bi `unsafeShiftL` 1) n
t -> do A.write mary j t
step ary mary b (i+1) (j+1) (bi `unsafeShiftL` 1) n
filterC ary0 h =
let !n = A.length ary0
in runST $ do
mary <- A.new_ n
step ary0 mary 0 0 n
where
step :: A.Array (Leaf k v) -> A.MArray s (Leaf k v)
-> Int -> Int -> Int
-> ST s (HashMap k v)
step !ary !mary i !j n
| i >= n = case j of
0 -> return Empty
1 -> do l <- A.read mary 0
return $! Leaf h l
_ | i == j -> do ary2 <- A.unsafeFreeze mary
return $! Collision h ary2
| otherwise -> do ary2 <- trim mary j
return $! Collision h ary2
| pred k v = A.write mary j el >> step ary mary (i+1) (j+1) n
| otherwise = step ary mary (i+1) j n
where el@(L k v) = A.index ary i
{-# INLINE filterWithKey #-}
-- | /O(n)/ Filter this map by retaining only elements which values
-- satisfy a predicate.
filter :: (v -> Bool) -> HashMap k v -> HashMap k v
filter p = filterWithKey (\_ v -> p v)
{-# INLINE filter #-}
------------------------------------------------------------------------
-- * Conversions
-- TODO: Improve fusion rules by modelled them after the Prelude ones
-- on lists.
-- | /O(n)/ Return a list of this map's keys. The list is produced
-- lazily.
keys :: HashMap k v -> [k]
keys = L.map fst . toList
{-# INLINE keys #-}
-- | /O(n)/ Return a list of this map's values. The list is produced
-- lazily.
elems :: HashMap k v -> [v]
elems = L.map snd . toList
{-# INLINE elems #-}
------------------------------------------------------------------------
-- ** Lists
-- | /O(n)/ Return a list of this map's elements. The list is
-- produced lazily.
toList :: HashMap k v -> [(k, v)]
toList t = build (\ c z -> foldrWithKey (curry c) z t)
{-# INLINE toList #-}
-- | /O(n)/ Construct a map with the supplied mappings. If the list
-- contains duplicate mappings, the later mappings take precedence.
fromList :: (Eq k, Hashable k) => [(k, v)] -> HashMap k v
fromList = L.foldl' (\ m (k, v) -> unsafeInsert k v m) empty
{-# INLINABLE fromList #-}
-- | /O(n*log n)/ Construct a map from a list of elements. Uses
-- the provided function to merge duplicate entries.
fromListWith :: (Eq k, Hashable k) => (v -> v -> v) -> [(k, v)] -> HashMap k v
fromListWith f = L.foldl' (\ m (k, v) -> unsafeInsertWith f k v m) empty
{-# INLINE fromListWith #-}
------------------------------------------------------------------------
-- Array operations
-- | /O(n)/ Lookup the value associated with the given key in this
-- array. Returns 'Nothing' if the key wasn't found.
lookupInArray :: Eq k => k -> A.Array (Leaf k v) -> Maybe v
lookupInArray k0 ary0 = go k0 ary0 0 (A.length ary0)
where
go !k !ary !i !n
| i >= n = Nothing
| otherwise = case A.index ary i of
(L kx v)
| k == kx -> Just v
| otherwise -> go k ary (i+1) n
{-# INLINABLE lookupInArray #-}
-- | /O(n)/ Lookup the value associated with the given key in this
-- array. Returns 'Nothing' if the key wasn't found.
indexOf :: Eq k => k -> A.Array (Leaf k v) -> Maybe Int
indexOf k0 ary0 = go k0 ary0 0 (A.length ary0)
where
go !k !ary !i !n
| i >= n = Nothing
| otherwise = case A.index ary i of
(L kx _)
| k == kx -> Just i
| otherwise -> go k ary (i+1) n
{-# INLINABLE indexOf #-}
updateWith :: Eq k => (v -> v) -> k -> A.Array (Leaf k v) -> A.Array (Leaf k v)
updateWith f k0 ary0 = go k0 ary0 0 (A.length ary0)
where
go !k !ary !i !n
| i >= n = ary
| otherwise = case A.index ary i of
(L kx y) | k == kx -> A.update ary i (L k (f y))
| otherwise -> go k ary (i+1) n
{-# INLINABLE updateWith #-}
updateOrSnocWith :: Eq k => (v -> v -> v) -> k -> v -> A.Array (Leaf k v)
-> A.Array (Leaf k v)
updateOrSnocWith f k0 v0 ary0 = go k0 v0 ary0 0 (A.length ary0)
where
go !k v !ary !i !n
| i >= n = A.run $ do
-- Not found, append to the end.
mary <- A.new_ (n + 1)
A.copy ary 0 mary 0 n
A.write mary n (L k v)
return mary
| otherwise = case A.index ary i of
(L kx y) | k == kx -> A.update ary i (L k (f v y))
| otherwise -> go k v ary (i+1) n
{-# INLINABLE updateOrSnocWith #-}
updateOrConcatWith :: Eq k => (v -> v -> v) -> A.Array (Leaf k v) -> A.Array (Leaf k v) -> A.Array (Leaf k v)
updateOrConcatWith f ary1 ary2 = A.run $ do
-- first: look up the position of each element of ary2 in ary1
let indices = A.map (\(L k _) -> indexOf k ary1) ary2
-- that tells us how large the overlap is:
-- count number of Nothing constructors
let nOnly2 = A.foldl' (\n -> maybe (n+1) (const n)) 0 indices
let n1 = A.length ary1
let n2 = A.length ary2
-- copy over all elements from ary1
mary <- A.new_ (n1 + nOnly2)
A.copy ary1 0 mary 0 n1
-- append or update all elements from ary2
let go !iEnd !i2
| i2 >= n2 = return ()
| otherwise = case A.index indices i2 of
Just i1 -> do -- key occurs in both arrays, store combination in position i1
L k v1 <- A.indexM ary1 i1
L _ v2 <- A.indexM ary2 i2
A.write mary i1 (L k (f v1 v2))
go iEnd (i2+1)
Nothing -> do -- key is only in ary2, append to end
A.write mary iEnd =<< A.indexM ary2 i2
go (iEnd+1) (i2+1)
go n1 0
return mary
{-# INLINABLE updateOrConcatWith #-}
------------------------------------------------------------------------
-- Manually unrolled loops
-- | /O(n)/ Update the element at the given position in this array.
update16 :: A.Array e -> Int -> e -> A.Array e
update16 ary idx b = runST (update16M ary idx b)
{-# INLINE update16 #-}
-- | /O(n)/ Update the element at the given position in this array.
update16M :: A.Array e -> Int -> e -> ST s (A.Array e)
update16M ary idx b = do
mary <- clone16 ary
A.write mary idx b
A.unsafeFreeze mary
{-# INLINE update16M #-}
-- | /O(n)/ Update the element at the given position in this array, by applying a function to it.
update16With' :: A.Array e -> Int -> (e -> e) -> A.Array e
update16With' ary idx f = update16 ary idx $! f (A.index ary idx)
{-# INLINE update16With' #-}
-- | Unsafely clone an array of 16 elements. The length of the input
-- array is not checked.
clone16 :: A.Array e -> ST s (A.MArray s e)
clone16 ary =
#if __GLASGOW_HASKELL__ >= 702
A.thaw ary 0 16
#else
do mary <- A.new_ 16
A.indexM ary 0 >>= A.write mary 0
A.indexM ary 1 >>= A.write mary 1
A.indexM ary 2 >>= A.write mary 2
A.indexM ary 3 >>= A.write mary 3
A.indexM ary 4 >>= A.write mary 4
A.indexM ary 5 >>= A.write mary 5
A.indexM ary 6 >>= A.write mary 6
A.indexM ary 7 >>= A.write mary 7
A.indexM ary 8 >>= A.write mary 8
A.indexM ary 9 >>= A.write mary 9
A.indexM ary 10 >>= A.write mary 10
A.indexM ary 11 >>= A.write mary 11
A.indexM ary 12 >>= A.write mary 12
A.indexM ary 13 >>= A.write mary 13
A.indexM ary 14 >>= A.write mary 14
A.indexM ary 15 >>= A.write mary 15
return mary
#endif
------------------------------------------------------------------------
-- Bit twiddling
bitsPerSubkey :: Int
bitsPerSubkey = 4
maxChildren :: Int
maxChildren = fromIntegral $ 1 `unsafeShiftL` bitsPerSubkey
subkeyMask :: Bitmap
subkeyMask = 1 `unsafeShiftL` bitsPerSubkey - 1
sparseIndex :: Bitmap -> Bitmap -> Int
sparseIndex b m = popCount (b .&. (m - 1))
mask :: Word -> Shift -> Bitmap
mask w s = 1 `unsafeShiftL` index w s
{-# INLINE mask #-}
-- | Mask out the 'bitsPerSubkey' bits used for indexing at this level
-- of the tree.
index :: Hash -> Shift -> Int
index w s = fromIntegral $ (unsafeShiftR w s) .&. subkeyMask
{-# INLINE index #-}
-- | A bitmask with the 'bitsPerSubkey' least significant bits set.
fullNodeMask :: Bitmap
fullNodeMask = complement (complement 0 `unsafeShiftL` maxChildren)
{-# INLINE fullNodeMask #-}
-- | Check if two the two arguments are the same value. N.B. This
-- function might give false negatives (due to GC moving objects.)
ptrEq :: a -> a -> Bool
#if __GLASGOW_HASKELL__ < 707
ptrEq x y = reallyUnsafePtrEquality# x y ==# 1#
#else
ptrEq x y = isTrue# (reallyUnsafePtrEquality# x y ==# 1#)
#endif
{-# INLINE ptrEq #-}
#if __GLASGOW_HASKELL__ >= 708
------------------------------------------------------------------------
-- IsList instance
instance (Eq k, Hashable k) => Exts.IsList (HashMap k v) where
type Item (HashMap k v) = (k, v)
fromList = fromList
toList = toList
#endif
| athanclark/lh-bug | deps/unordered-containers/Data/HashMap/Base.hs | bsd-3-clause | 39,869 | 0 | 21 | 13,297 | 13,515 | 6,729 | 6,786 | 788 | 17 |
module Raven.Client.Connection
( listenAtEnd
, sendReq
)where
import Network.Transport
import Control.Concurrent
import Graphics.UI.Gtk hiding (Action, backspace)
import Codec.Picture
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as B
import Data.Vector (Vector)
import qualified Data.Vector as V
import Text.Read
import Control.Monad (void)
-- |Listens at endpoint and handles events
listenAtEnd :: EndPoint -> TextBuffer -> MVar (Vector TextBuffer) -> IO ()
listenAtEnd end connbuf workbufs =
receive end >>=
(\event -> case event of
ConnectionClosed _ ->
textBufferSetText connbuf "Connection Closed" >>
listenAtEnd end connbuf workbufs
EndPointClosed ->
textBufferSetText connbuf "Error: Endpoint Closed" >>
listenAtEnd end connbuf workbufs
ErrorEvent (TransportError _ err) ->
textBufferSetText connbuf
("Connection Error: " ++ err) >>
listenAtEnd end connbuf workbufs
Received _ [val] -> modBuf workbufs val >>
listenAtEnd end connbuf workbufs
Received _ vals -> buildImg vals >>
listenAtEnd end connbuf workbufs
_ -> listenAtEnd end connbuf workbufs)
-- |Sends a request to the server.
-- Needs the connection, the line number, and the message
sendReq :: Connection -> Int -> ByteString -> IO ()
sendReq conn n val = void $ send conn [B.pack $ show n,B.pack " ",val] --error possible
-- |updates the work buffer
modBuf :: MVar (Vector TextBuffer) -> ByteString -> IO ()
modBuf bufs str = let wds = B.words str in
case (readMaybe (B.unpack (head wds)) :: Maybe Int) of
Just n -> readMVar bufs >>=
(\bufs' -> let buf = bufs' V.! n in
textBufferGetEndIter buf >>=
(\iter ->
textBufferInsertByteString buf iter (B.unwords (tail wds))))
_ -> return ()
buildImg :: [ByteString] -> IO ()
buildImg vals = let vals' = B.concat vals
in case decodeImage (B.tail (B.dropWhile (/= ' ') vals')) of
Left str -> putStrLn str --log this
Right img ->
savePngImage (".raven/client/plots/" ++
B.unpack (B.takeWhile (/= ' ') vals') ++ ".png") img
| denumerate/raven | src/Raven/Client/Connection.hs | bsd-3-clause | 2,196 | 0 | 22 | 523 | 684 | 350 | 334 | 50 | 6 |
{-# LANGUAGE NamedFieldPuns #-}
{-|
Module : Data.RangeSet
Description : A set of elments represented as a list of ranges.
Copyright : (c) 2016 Micxjo Funkcio
License : BSD3
Maintainer : micxjo@fastmail.com
Stability : experimental
-}
module Data.RangeSet
( -- * RangeSet type
RangeSet
-- * Construction
, empty
, singleton
, rangeSet
, insert
-- * Query
, member
, notMember
, size
, isEmpty
, isSingleton
, ranges
-- * Combine
, union
, unions
-- * Conversion
, toList
, toAscList
, toDescList
) where
import qualified Data.List as List
import Data.Range (Range)
import qualified Data.Range as R
import Data.Semigroup (Semigroup(..))
-- | A set of elements, represented as a (possibly empty) list of
-- disjoint ranges.
newtype RangeSet a = RangeSet { _ranges :: [Range a] } deriving (Eq, Show)
instance (Ord a, Enum a, Bounded a) => Semigroup (RangeSet a) where
(<>) = union
stimes n rs
| n < 0 = error "stimes: RangeSet, negative multiplier"
| n == 0 = empty
| otherwise = rs
instance (Ord a, Enum a, Bounded a) => Monoid (RangeSet a) where
mempty = empty
mappend = (<>)
-- | Create an empty set.
empty :: RangeSet a
empty = RangeSet []
-- | Create a singleton set.
singleton :: Enum a => a -> RangeSet a
singleton a = RangeSet [R.singleton a]
-- | Create a set from (min a b) to (max a b), inclusive.
rangeSet :: (Ord a, Enum a) => a -> a -> RangeSet a
rangeSet a b
| a <= b = RangeSet [R.range a b]
| otherwise = RangeSet [R.range b a]
-- | Is the element in the set?
member :: Ord a => a -> RangeSet a -> Bool
member a RangeSet{_ranges} = any (R.member a) _ranges
-- | Is the element not in the set?
notMember :: Ord a => a -> RangeSet a -> Bool
notMember a = not . member a
-- | The sub-ranges of the set.
ranges :: RangeSet a -> [Range a]
ranges RangeSet{_ranges} = _ranges
safePred :: (Eq a, Enum a, Bounded a) => a -> a
safePred a
| a == minBound = a
| otherwise = pred a
append :: (Ord a, Enum a, Bounded a) => RangeSet a -> Range a -> RangeSet a
append RangeSet{_ranges} range = RangeSet (lt ++ (mid : gt))
where (lt, ranges') = List.partition (
\r -> R.rangeMax r < safePred (R.rangeMin range)) _ranges
(gt, overlap) = List.partition (
\r -> safePred (R.rangeMin r) > R.rangeMax range) ranges'
mid = case overlap of
[] -> range
_ -> foldr (<>) range overlap
-- | The union of two sets.
union :: (Ord a, Enum a, Bounded a) => RangeSet a -> RangeSet a -> RangeSet a
union rs RangeSet{_ranges} = List.foldl' append rs _ranges
-- | The union of a list of sets: (union == foldl union empty)
unions :: (Ord a, Enum a, Bounded a, Foldable t) => t (RangeSet a) -> RangeSet a
unions = List.foldl' union mempty
-- | The number of element in the set.
size :: Num a => RangeSet a -> a
size RangeSet{_ranges} = sum (map R.size _ranges)
-- | Is the set empty?
isEmpty :: RangeSet a -> Bool
isEmpty (RangeSet []) = True
isEmpty _ = False
-- | Does the set contain exactly one element?
isSingleton :: Eq a => RangeSet a -> Bool
isSingleton (RangeSet [r]) = R.isSingleton r
isSingleton _ = False
-- | Insert an elemen in the set.
insert :: (Enum a, Ord a, Bounded a) => a -> RangeSet a -> RangeSet a
insert a rs = append rs (R.singleton a)
-- | All of the elements of the set.
toList :: Enum a => RangeSet a -> [a]
toList = toAscList
-- | All of the elements of the set, in ascending order.
toAscList :: Enum a => RangeSet a -> [a]
toAscList RangeSet{_ranges} = concatMap R.toList _ranges
-- | All of the elements of the set, in descending order.
toDescList :: Enum a => RangeSet a -> [a]
toDescList RangeSet{_ranges} = concatMap R.toDescList _ranges
| micxjo/range-set | src/Data/RangeSet.hs | bsd-3-clause | 3,877 | 0 | 14 | 1,016 | 1,248 | 650 | 598 | 80 | 2 |
module Bertrand.REPL
( repl
) where
import System.IO
import Control.Monad
import Control.Monad.Extra
-- import Control.Monad.State
import Data.Either
import Data.Maybe
import Data.Monoid
import qualified Data.Map as M
import Bertrand.Shell
import Bertrand.Data
import Bertrand.System (prelude)
import Bertrand.Preprocessor
import Bertrand.Parser
import Bertrand.Interpreter
import Debug.Trace
data REPLST = REPLST Envir [ParseOption] deriving Show
instance Monoid REPLST where
mempty = REPLST mempty []
REPLST ex xs `mappend` REPLST ey ys = REPLST (ex <> ey) (xs ++ ys)
sdesc = shellDesc {commands = cmds,
evalFunc = evalF,
prompt = const "> ",
style = sstyle }
sstyle = shellStyle {startText = "Hello! Bertrand, ver.0.1 :? for help",
quitText = "See you!",
commandPrefix = ':' }
repl :: IO ()
repl = shell sdesc (REPLST mempty [])
cmds :: [(String, CommandFunc REPLST)]
cmds = [("help", const $ outputStrLn helptext),
("?", const $ outputStrLn helptext),
("clear", const $ do
put mempty
outputStrLn "cleared all declarations"),
("options", const $ do
REPLST _ ops <- get
outputStr $ unlines . map show $ ops),
("binds", \ss -> do
REPLST env _ <- get
outputStr $ if null ss
then unlines . map show . M.toList $ binds env
else unlines . map show . concat . M.lookup (head ss) $ binds env),
("cstrs", \ss -> do
REPLST env _ <- get
outputStr $ if null ss
then unlines . map show . M.toList $ cstrs env
else unlines . map show . concat . M.lookup (head ss) $ cstrs env),
("decls", const $ do
REPLST env _ <- get
outputStr $ unlines . map show $ decls env),
("variables", const $ do
REPLST env _ <- get
outputStrLn $ "var: " ++ (unwords . map show . fst $ vars env)
outputStrLn $ "cons: " ++ (unwords . map show . snd $ vars env) )
]
evalF :: String -> Shell REPLST ()
evalF "" = return ()
evalF xs = case last xs of
'.' -> case preprocess (init xs) of
("", op) -> modify (`mappend` REPLST mempty op)
(s, _) -> do
m <- parseS 0 $ init s
maybe (return ())
(\e -> modify (`mappend` REPLST e [])) m
-- s <- get
-- traceShow s $ return ()
'?' -> case preprocess (init xs) of
("", _) -> return ()
(s, _) -> do
m <- parseS 0 $ "it = ternary (" ++ init s ++ ")"
whenJust m $ \e -> do
let a = head . fromJust . M.lookup "it" $ binds e
REPLST env _ <- get
-- traceShow ('?', s, env) $
outputStrLn . evalShow .
Env (fst preludeM){depth = -2} $ Env env{depth = -1} a
_ -> case preprocess xs of
("", _) -> return ()
(s, _) -> do
m <- parseS 0 $ "it = (" ++ init s ++ ")"
whenJust m $ \e -> do
let a = head . fromJust . M.lookup "it" $ binds e
REPLST env _ <- get
-- outputStrLn $ show a
outputStrLn . evalShow .
Env (fst preludeM){depth = -2} $ Env env{depth = -1} a
parseS :: Int -> String -> Shell REPLST (Maybe Envir)
parseS i s = do
REPLST _ ops <- get
either
(\(i, j) -> do
outputStrLn $ "main:" ++ show i ++ ":" ++ show j ++ " parse error"
return Nothing)
(return . Just) $ parse (snd preludeM ++ ops) i s
preludeM :: (Envir, [ParseOption])
preludeM =
-- traceShowId $
let (s, ops) = preprocess prelude
in (mconcat $ rights $ map (parse ops (-1)) $ lines s, ops)
-- command :: String -> REPL ()
-- command s = case take 1 s of
-- "q" -> return ()
-- -- "i" -> do
-- -- info $ tail s
-- -- roop
-- "c" -> do
-- put mempty
-- io $ putStrLn "Cleared all declarations."
-- roop
-- "i" -> do
-- REPLST es _ <- get
-- io $ putStr $ unlines . map show $ es
-- roop
-- "o" -> do
-- REPLST _ ops <- get
-- io $ putStr $ unlines . map show $ ops
-- roop
-- "p" -> do
-- let (es, ops) = prelude
-- io $ putStrLn $ unlines . map show $ ops
-- io $ putStr $ unlines . map show $ es
-- roop
-- "?" -> do
-- io $ putStr helptext
-- roop
--
-- _ -> do
-- io $ putStrLn "command not find."
-- roop
-- getInput :: IO String
-- getInput = getLine
--------------------------------------------------------------------------------
helptext :: String
helptext =
" Commands available from the prompt:\n\
\\n\
\ <expr> evaluate and display <expr>\n\
\ <expr> ? display <expr> is true, false or undefined\n\
\ <statement> . declare that <statement> is true\n\
\ :binds [<name>] display bindings of <name>\n\
\ :clear clear all declaretions\n\
\ :cstrs [<name>] display constraints of <name>\n\
\ :decls display all declarations\n\
\ :options display all parse options\n\
\\n\
\ :help, :? display this list of commands\n\
\ :quit exit Bertrand Interpreter"
| fujiy00/bertrand | src/Bertrand/REPL.hs | bsd-3-clause | 5,493 | 0 | 23 | 2,010 | 1,476 | 771 | 705 | 97 | 6 |
{-# LANGUAGE OverloadedStrings, TupleSections, FlexibleContexts #-}
-- | This program is a convenience utility for running the Futhark
-- test suite, and its test programs.
module Main ( ProgramTest (..)
, TestRun (..)
, TestCase (..)
, main) where
import Control.Applicative
import Control.Concurrent
import Control.Monad hiding (forM_)
import Control.Exception hiding (try)
import Control.Monad.Except hiding (forM_)
import Data.List hiding (foldl')
import Data.Maybe
import Data.Monoid
import Data.Ord
import Data.Foldable (forM_)
import qualified Data.Set as S
import qualified Data.Text as T
import qualified Data.Text.IO as T
import qualified Data.HashMap.Lazy as HM
import System.Console.GetOpt
import System.Directory
import System.Process.Text (readProcessWithExitCode)
import System.Exit
import System.IO
import System.FilePath
import Text.Regex.TDFA
import Prelude
import Futhark.Util.Pretty (prettyText)
import Futhark.Representation.AST.Syntax.Core hiding (Prim)
import Futhark.Analysis.Metrics
import Futhark.Pipeline
import Futhark.Compiler
import Futhark.Test
import Futhark.Util.Options
--- Test execution
type TestM = ExceptT T.Text IO
runTestM :: TestM () -> IO TestResult
runTestM = fmap (either Failure $ const Success) . runExceptT
io :: IO a -> TestM a
io = liftIO
context :: T.Text -> TestM a -> TestM a
context s = withExceptT ((s<>":\n")<>)
data TestResult = Success
| Failure T.Text
deriving (Eq, Show)
data TestCase = TestCase { testCaseProgram :: FilePath
, testCaseTest :: ProgramTest
, testCasePrograms :: ProgConfig
, testCaseOptions :: [String]
-- ^ Extra options to pass to the program.
}
deriving (Show)
instance Eq TestCase where
x == y = testCaseProgram x == testCaseProgram y
instance Ord TestCase where
x `compare` y = testCaseProgram x `compare` testCaseProgram y
data RunResult = ErrorResult Int T.Text
| SuccessResult [Value]
progNotFound :: T.Text -> T.Text
progNotFound s = s <> ": command not found"
optimisedProgramMetrics :: StructurePipeline -> FilePath -> TestM AstMetrics
optimisedProgramMetrics (SOACSPipeline pipeline) program = do
res <- io $ runFutharkM (runPipelineOnProgram newFutharkConfig pipeline program) False
case res of
Left err ->
throwError $ errorDesc err
Right prog ->
return $ progMetrics prog
optimisedProgramMetrics (KernelsPipeline pipeline) program = do
res <- io $ runFutharkM (runPipelineOnProgram newFutharkConfig pipeline program) False
case res of
Left err ->
throwError $ errorDesc err
Right prog ->
return $ progMetrics prog
testMetrics :: FilePath -> StructureTest -> TestM ()
testMetrics program (StructureTest pipeline expected) = context "Checking metrics" $ do
actual <- optimisedProgramMetrics pipeline program
mapM_ (ok actual) $ HM.toList expected
where ok metrics (name, expected_occurences) =
case HM.lookup name metrics of
Nothing
| expected_occurences > 0 ->
throwError $ name <> " should have occurred " <> T.pack (show expected_occurences) <>
" times, but did not occur at all in optimised program."
Just actual_occurences
| expected_occurences /= actual_occurences ->
throwError $ name <> " should have occurred " <> T.pack (show expected_occurences) <>
" times, but occured " <> T.pack (show actual_occurences) <> " times."
_ -> return ()
runTestCase :: TestCase -> TestM ()
runTestCase (TestCase program testcase progs extra_options) = do
forM_ (testExpectedStructure testcase) $ testMetrics program
case testAction testcase of
CompileTimeFailure expected_error ->
forM_ (configTypeCheckers progs) $ \typeChecker ->
context ("Type-checking with " <> T.pack typeChecker) $ do
(code, _, err) <-
io $ readProcessWithExitCode typeChecker [program] ""
case code of
ExitSuccess -> throwError "Expected failure\n"
ExitFailure 127 -> throwError $ progNotFound $ T.pack typeChecker
ExitFailure 1 -> throwError err
ExitFailure _ -> checkError expected_error err
RunCases [] ->
forM_ (configCompilers progs) $ \compiler ->
context ("Compiling with " <> T.pack compiler) $
justCompileTestProgram compiler program
RunCases run_cases ->
forM_ run_cases $ \run -> do
unless (runMode run == CompiledOnly) $
forM_ (configInterpreters progs) $ \interpreter ->
context ("Interpreting with " <> T.pack interpreter) $
interpretTestProgram interpreter program run
unless (runMode run == InterpretedOnly) $
forM_ (configCompilers progs) $ \compiler ->
context ("Compiling with " <> T.pack compiler) $
compileTestProgram extra_options compiler program run
checkError :: ExpectedError -> T.Text -> TestM ()
checkError (ThisError regex_s regex) err
| not (match regex $ T.unpack err) =
throwError $ "Expected error:\n " <> regex_s <>
"\nGot error:\n " <> err
checkError _ _ =
return ()
runResult :: FilePath -> ExitCode -> T.Text -> T.Text -> TestM RunResult
runResult program ExitSuccess stdout_s _ =
case valuesFromText "stdout" stdout_s of
Left e -> do
actual <- io $ writeOutFile program "actual" stdout_s
throwError $ T.pack (show e) <> "\n(See " <> T.pack actual <> ")"
Right vs -> return $ SuccessResult vs
runResult _ (ExitFailure code) _ stderr_s =
return $ ErrorResult code stderr_s
getExpectedResult :: (Functor m, MonadIO m) =>
FilePath -> ExpectedResult Values
-> m (ExpectedResult [Value])
getExpectedResult dir (Succeeds (Just vals)) = Succeeds . Just <$> getValues dir vals
getExpectedResult _ (Succeeds Nothing) = return $ Succeeds Nothing
getExpectedResult _ (RunTimeFailure err) = return $ RunTimeFailure err
interpretTestProgram :: String -> FilePath -> TestRun -> TestM ()
interpretTestProgram futharki program (TestRun _ inputValues expectedResult) = do
input <- T.unlines . map prettyText <$> getValues dir inputValues
expectedResult' <- getExpectedResult dir expectedResult
(code, output, err) <- io $ readProcessWithExitCode futharki [program] input
case code of
ExitFailure 127 ->
throwError $ progNotFound $ T.pack futharki
_ ->
compareResult program expectedResult' =<< runResult program code output err
where dir = takeDirectory program
compileTestProgram :: [String] -> String -> FilePath -> TestRun -> TestM ()
compileTestProgram extra_options futharkc program (TestRun _ inputValues expectedResult) = do
input <- getValuesText dir inputValues
expectedResult' <- getExpectedResult dir expectedResult
(futcode, _, futerr) <-
io $ readProcessWithExitCode futharkc
[program, "-o", binOutputf] ""
case futcode of
ExitFailure 127 -> throwError $ progNotFound $ T.pack futharkc
ExitFailure _ -> throwError futerr
ExitSuccess -> return ()
-- Explicitly prefixing the current directory is necessary for
-- readProcessWithExitCode to find the binary when binOutputf has
-- no path component.
let binpath = "." </> binOutputf
context ("Running " <> T.pack (unwords $ binpath : extra_options)) $ do
(progCode, output, progerr) <-
io $ readProcessWithExitCode binpath extra_options input
withExceptT validating $
compareResult program expectedResult' =<< runResult program progCode output progerr
where binOutputf = program `replaceExtension` "bin"
dir = takeDirectory program
validating = ("validating test result:\n"<>)
justCompileTestProgram :: String -> FilePath -> TestM ()
justCompileTestProgram futharkc program =
withExceptT compiling $ do
(futcode, _, futerr) <-
io $ readProcessWithExitCode futharkc
[program, "-o", binOutputf] mempty
case futcode of
ExitFailure 127 -> throwError $ progNotFound $ T.pack futharkc
ExitFailure _ -> throwError futerr
ExitSuccess -> return ()
where binOutputf = program `replaceExtension` "bin"
compiling = ("compiling:\n"<>)
compareResult :: FilePath -> ExpectedResult [Value] -> RunResult
-> TestM ()
compareResult _ (Succeeds Nothing) SuccessResult{} =
return ()
compareResult program (Succeeds (Just expectedResult)) (SuccessResult actualResult) =
case compareValues actualResult expectedResult of
Just mismatch -> do
actualf <-
io $ writeOutFile program "actual" $
T.unlines $ map prettyText actualResult
expectedf <-
io $ writeOutFile program "expected" $
T.unlines $ map prettyText expectedResult
throwError $ T.pack actualf <> " and " <> T.pack expectedf <>
" do not match:\n" <> T.pack (show mismatch)
Nothing ->
return ()
compareResult _ (RunTimeFailure expectedError) (ErrorResult _ actualError) =
checkError expectedError actualError
compareResult _ (Succeeds _) (ErrorResult code err) =
throwError $ "Program failed with error code " <>
T.pack (show code) <> " and stderr:\n " <> err
compareResult _ (RunTimeFailure f) (SuccessResult _) =
throwError $ "Program succeeded, but expected failure:\n " <> T.pack (show f)
writeOutFile :: FilePath -> String -> T.Text -> IO FilePath
writeOutFile base ext content =
attempt (0::Int)
where template = base `replaceExtension` ext
attempt i = do
let filename = template ++ "-" ++ show i
exists <- doesFileExist filename
if exists
then attempt $ i+1
else do T.writeFile filename content
return filename
---
--- Test manager
---
catching :: IO TestResult -> IO TestResult
catching m = m `catch` save
where save :: SomeException -> IO TestResult
save e = return $ Failure $ T.pack $ show e
doTest :: TestCase -> IO TestResult
doTest = catching . runTestM . runTestCase
makeTestCase :: TestConfig -> TestMode -> FilePath -> IO TestCase
makeTestCase config mode file = do
spec <- applyMode mode <$> testSpecFromFile file
return $ TestCase file spec (configPrograms config) (configExtraOptions config)
applyMode :: TestMode -> ProgramTest -> ProgramTest
applyMode mode test =
test { testAction = applyModeToAction mode $ testAction test }
applyModeToAction :: TestMode -> TestAction -> TestAction
applyModeToAction _ a@CompileTimeFailure{} =
a
applyModeToAction OnlyTypeCheck (RunCases _) =
RunCases []
applyModeToAction mode (RunCases cases) =
RunCases $ mapMaybe (applyModeToCase mode) cases
applyModeToCase :: TestMode -> TestRun -> Maybe TestRun
applyModeToCase OnlyInterpret run =
Just run { runMode = InterpretedOnly }
applyModeToCase OnlyCompile run =
Just run { runMode = CompiledOnly }
applyModeToCase OnTravis run | runMode run == NoTravis =
Nothing
applyModeToCase _ run =
Just run
runTest :: MVar TestCase -> MVar (TestCase, TestResult) -> IO ()
runTest testmvar resmvar = forever $ do
test <- takeMVar testmvar
res <- doTest test
putMVar resmvar (test, res)
excludedTest :: TestConfig -> TestCase -> Bool
excludedTest config =
any (`elem` configExclude config) . testTags . testCaseTest
clearLine :: IO ()
clearLine = putStr "\27[2K"
reportInteractive :: String -> Int -> Int -> Int -> IO ()
reportInteractive first failed passed remaining = do
clearLine
putStr $
"\rWaiting for " ++ first ++ " (" ++
show failed ++ " failed, " ++
show passed ++ " passed, " ++
show remaining ++ " to go.)\r"
hFlush stdout
reportText :: String -> Int -> Int -> Int -> IO ()
reportText first failed passed remaining =
putStr $ "Waiting for " ++ first ++ " (" ++
show failed ++ " failed, " ++
show passed ++ " passed, " ++
show remaining ++ " to go.)\n"
runTests :: TestConfig -> [FilePath] -> IO ()
runTests config files = do
let mode = configTestMode config
testmvar <- newEmptyMVar
resmvar <- newEmptyMVar
concurrency <- getNumCapabilities
replicateM_ concurrency $ forkIO $ runTest testmvar resmvar
all_tests <- mapM (makeTestCase config mode) files
let (excluded, included) = partition (excludedTest config) all_tests
_ <- forkIO $ mapM_ (putMVar testmvar) included
isTTY <- (&& mode /= OnTravis) <$> hIsTerminalDevice stdout
let report = if isTTY then reportInteractive else reportText
clear = if isTTY then clearLine else putStr "\n"
getResults remaining failed passed =
case S.toList remaining of
[] -> clear >> return (failed, passed)
first:_ -> do
report (testCaseProgram first) failed passed $ S.size remaining
(test, res) <- takeMVar resmvar
let next = getResults $ test `S.delete` remaining
case res of
Success -> next failed (passed+1)
Failure s -> do clear
T.putStrLn (T.pack (testCaseProgram test) <> ":\n" <> s)
next (failed+1) passed
(failed, passed) <- getResults (S.fromList included) 0 0
let excluded_str = if null excluded
then ""
else " (" ++ show (length excluded) ++ " excluded)"
putStrLn $ show failed ++ " failed, " ++ show passed ++ " passed" ++ excluded_str ++ "."
exitWith $ case failed of 0 -> ExitSuccess
_ -> ExitFailure 1
---
--- Configuration and command line parsing
---
data TestConfig = TestConfig
{ configTestMode :: TestMode
, configPrograms :: ProgConfig
, configExclude :: [T.Text]
, configExtraOptions :: [String]
-- ^ Extra options passed to the programs being run.
}
defaultConfig :: TestConfig
defaultConfig = TestConfig { configTestMode = Everything
, configExclude = [ "disable" ]
, configPrograms =
ProgConfig
{ configCompiler = Left "futhark-c"
, configInterpreter = Left "futharki"
, configTypeChecker = Left "futhark"
}
, configExtraOptions = []
}
data ProgConfig = ProgConfig
{ configCompiler :: Either FilePath [FilePath]
, configInterpreter :: Either FilePath [FilePath]
, configTypeChecker :: Either FilePath [FilePath]
}
deriving (Show)
changeProgConfig :: (ProgConfig -> ProgConfig) -> TestConfig -> TestConfig
changeProgConfig f config = config { configPrograms = f $ configPrograms config }
configCompilers :: ProgConfig -> [FilePath]
configCompilers = either pure id . configCompiler
configInterpreters :: ProgConfig -> [FilePath]
configInterpreters = either pure id . configInterpreter
configTypeCheckers :: ProgConfig -> [FilePath]
configTypeCheckers = either pure id . configTypeChecker
addCompiler :: FilePath -> ProgConfig -> ProgConfig
addCompiler compiler config = case configCompiler config of
Left _ -> config { configCompiler = Right [compiler] }
Right existing -> config { configCompiler = Right $ compiler : existing }
addInterpreter :: FilePath -> ProgConfig -> ProgConfig
addInterpreter interpreter config = case configInterpreter config of
Left _ -> config { configInterpreter = Right [interpreter] }
Right existing -> config { configInterpreter = Right $ interpreter : existing }
addTypeChecker :: FilePath -> ProgConfig -> ProgConfig
addTypeChecker typeChecker config = case configTypeChecker config of
Left _ -> config { configTypeChecker = Right [typeChecker] }
Right existing -> config { configTypeChecker = Right $ typeChecker : existing }
data TestMode = OnlyTypeCheck
| OnlyCompile
| OnlyInterpret
| OnTravis
| Everything
deriving (Eq)
commandLineOptions :: [FunOptDescr TestConfig]
commandLineOptions = [
Option "t" ["only-typecheck"]
(NoArg $ Right $ \config -> config { configTestMode = OnlyTypeCheck })
"Only perform type-checking"
, Option "i" ["only-interpret"]
(NoArg $ Right $ \config -> config { configTestMode = OnlyInterpret })
"Only interpret"
, Option "c" ["only-compile"]
(NoArg $ Right $ \config -> config { configTestMode = OnlyCompile })
"Only run compiled code"
, Option [] ["travis"]
(NoArg $ Right $ \config -> config { configTestMode = OnTravis
, configExclude = T.pack "notravis" :
configExclude config })
"Only run compiled code not marked notravis"
, Option [] ["typechecker"]
(ReqArg (Right . changeProgConfig . addTypeChecker)
"PROGRAM")
"What to run for type-checking (defaults to 'futhark')."
, Option [] ["compiler"]
(ReqArg (Right . changeProgConfig . addCompiler)
"PROGRAM")
"What to run for code generation (defaults to 'futhark-c')."
, Option [] ["interpreter"]
(ReqArg (Right . changeProgConfig . addInterpreter)
"PROGRAM")
"What to run for interpretation (defaults to 'futharki')."
, Option [] ["exclude"]
(ReqArg (\tag ->
Right $ \config ->
config { configExclude = T.pack tag : configExclude config })
"TAG")
"Exclude test programs that define this tag."
, Option "p" ["pass-option"]
(ReqArg (\opt ->
Right $ \config ->
config { configExtraOptions = opt : configExtraOptions config })
"OPT")
"Pass this option to programs being run."
]
main :: IO ()
main = mainWithOptions defaultConfig commandLineOptions $ \progs config ->
Just $ runTests config progs
| CulpaBS/wbBach | src/futhark-test.hs | bsd-3-clause | 18,074 | 0 | 26 | 4,613 | 5,068 | 2,565 | 2,503 | 389 | 7 |
module Text.Highlighter.Lexers.GoodDataCL (lexer) where
import Text.Regex.PCRE.Light
import Text.Highlighter.Types
lexer :: Lexer
lexer = Lexer
{ lName = "GoodData-CL"
, lAliases = ["gooddata-cl"]
, lExtensions = [".gdc"]
, lMimetypes = ["text/x-gooddata-cl"]
, lStart = root'
, lFlags = [caseless]
}
argsList' :: TokenMatcher
argsList' =
[ tokNext "\\)" (Arbitrary "Punctuation") Pop
, tok "," (Arbitrary "Punctuation")
, tok "[a-zA-Z]\\w*" (Arbitrary "Name" :. Arbitrary "Variable")
, tok "=" (Arbitrary "Operator")
, tokNext "\"" (Arbitrary "Literal" :. Arbitrary "String") (GoTo stringLiteral')
, tok "[0-9]+(?:\\.[0-9]+)?(?:[eE][+-]?[0-9]{1,3})?" (Arbitrary "Literal" :. Arbitrary "Number")
, tok "\\s" (Arbitrary "Text")
]
root' :: TokenMatcher
root' =
[ tok "#.*" (Arbitrary "Comment" :. Arbitrary "Single")
, tok "[a-zA-Z]\\w*" (Arbitrary "Name" :. Arbitrary "Function")
, tokNext "\\(" (Arbitrary "Punctuation") (GoTo argsList')
, tok ";" (Arbitrary "Punctuation")
, tok "\\s+" (Arbitrary "Text")
]
stringLiteral' :: TokenMatcher
stringLiteral' =
[ tok "\\\\[tnrfbae\"\\\\]" (Arbitrary "Literal" :. Arbitrary "String" :. Arbitrary "Escape")
, tokNext "\"" (Arbitrary "Literal" :. Arbitrary "String") Pop
, tok "[^\\\\\"]+" (Arbitrary "Literal" :. Arbitrary "String")
]
| chemist/highlighter | src/Text/Highlighter/Lexers/GoodDataCL.hs | bsd-3-clause | 1,384 | 0 | 10 | 271 | 409 | 217 | 192 | 32 | 1 |
module Text.Highlighter.Lexers.Vala (lexer) where
import Text.Regex.PCRE.Light
import Text.Highlighter.Types
lexer :: Lexer
lexer = Lexer
{ lName = "Vala"
, lAliases = ["vala", "vapi"]
, lExtensions = [".vala", ".vapi"]
, lMimetypes = ["text/x-vala"]
, lStart = root'
, lFlags = [multiline]
}
statements' :: TokenMatcher
statements' =
[ tokNext "L?\"" (Arbitrary "Literal" :. Arbitrary "String") (GoTo string')
, tok "L?'(\\\\.|\\\\[0-7]{1,3}|\\\\x[a-fA-F0-9]{1,2}|[^\\\\\\'\\n])'" (Arbitrary "Literal" :. Arbitrary "String" :. Arbitrary "Char")
, tok "(\\d+\\.\\d*|\\.\\d+|\\d+)[eE][+-]?\\d+[lL]?" (Arbitrary "Literal" :. Arbitrary "Number" :. Arbitrary "Float")
, tok "(\\d+\\.\\d*|\\.\\d+|\\d+[fF])[fF]?" (Arbitrary "Literal" :. Arbitrary "Number" :. Arbitrary "Float")
, tok "0x[0-9a-fA-F]+[Ll]?" (Arbitrary "Literal" :. Arbitrary "Number" :. Arbitrary "Hex")
, tok "0[0-7]+[Ll]?" (Arbitrary "Literal" :. Arbitrary "Number" :. Arbitrary "Oct")
, tok "\\d+[Ll]?" (Arbitrary "Literal" :. Arbitrary "Number" :. Arbitrary "Integer")
, tok "[\126!%^&*+=|?:<>/-]" (Arbitrary "Operator")
, tok "(\\[)(Compact|Immutable|(?:Boolean|Simple)Type)(\\])" (ByGroups [(Arbitrary "Punctuation"), (Arbitrary "Name" :. Arbitrary "Decorator"), (Arbitrary "Punctuation")])
, tok "(\\[)(CCode|(?:Integer|Floating)Type)" (ByGroups [(Arbitrary "Punctuation"), (Arbitrary "Name" :. Arbitrary "Decorator")])
, tok "[()\\[\\],.]" (Arbitrary "Punctuation")
, tok "(as|base|break|case|catch|construct|continue|default|delete|do|else|enum|finally|for|foreach|get|if|in|is|lock|new|out|params|return|set|sizeof|switch|this|throw|try|typeof|while|yield)\\b" (Arbitrary "Keyword")
, tok "(abstract|const|delegate|dynamic|ensures|extern|inline|internal|override|owned|private|protected|public|ref|requires|signal|static|throws|unowned|var|virtual|volatile|weak|yields)\\b" (Arbitrary "Keyword" :. Arbitrary "Declaration")
, tokNext "(namespace|using)(\\s+)" (ByGroups [(Arbitrary "Keyword" :. Arbitrary "Namespace"), (Arbitrary "Text")]) (GoTo namespace')
, tokNext "(class|errordomain|interface|struct)(\\s+)" (ByGroups [(Arbitrary "Keyword" :. Arbitrary "Declaration"), (Arbitrary "Text")]) (GoTo class')
, tok "(\\.)([a-zA-Z_][a-zA-Z0-9_]*)" (ByGroups [(Arbitrary "Operator"), (Arbitrary "Name" :. Arbitrary "Attribute")])
, tok "(void|bool|char|double|float|int|int8|int16|int32|int64|long|short|size_t|ssize_t|string|time_t|uchar|uint|uint8|uint16|uint32|uint64|ulong|unichar|ushort)\\b" (Arbitrary "Keyword" :. Arbitrary "Type")
, tok "(true|false|null)\\b" (Arbitrary "Name" :. Arbitrary "Builtin")
, tok "[a-zA-Z_][a-zA-Z0-9_]*" (Arbitrary "Name")
]
whitespace' :: TokenMatcher
whitespace' =
[ tokNext "^\\s*#if\\s+0" (Arbitrary "Comment" :. Arbitrary "Preproc") (GoTo if0')
, tok "\\n" (Arbitrary "Text")
, tok "\\s+" (Arbitrary "Text")
, tok "\\\\\\n" (Arbitrary "Text")
, tok "//(\\n|(.|\\n)*?[^\\\\]\\n)" (Arbitrary "Comment" :. Arbitrary "Single")
, tok "/(\\\\\\n)?[*](.|\\n)*?[*](\\\\\\n)?/" (Arbitrary "Comment" :. Arbitrary "Multiline")
]
statement' :: TokenMatcher
statement' =
[ anyOf whitespace'
, anyOf statements'
, tok "[{}]" (Arbitrary "Punctuation")
, tokNext ";" (Arbitrary "Punctuation") Pop
]
if0' :: TokenMatcher
if0' =
[ tokNext "^\\s*#if.*?(?<!\\\\)\\n" (Arbitrary "Comment" :. Arbitrary "Preproc") Push
, tokNext "^\\s*#el(?:se|if).*\\n" (Arbitrary "Comment" :. Arbitrary "Preproc") Pop
, tokNext "^\\s*#endif.*?(?<!\\\\)\\n" (Arbitrary "Comment" :. Arbitrary "Preproc") Pop
, tok ".*?\\n" (Arbitrary "Comment")
]
namespace' :: TokenMatcher
namespace' =
[ tokNext "[a-zA-Z_][a-zA-Z0-9_.]*" (Arbitrary "Name" :. Arbitrary "Namespace") Pop
]
root' :: TokenMatcher
root' =
[ anyOf whitespace'
, tokNext "" (Arbitrary "Text") (GoTo statement')
]
class' :: TokenMatcher
class' =
[ tokNext "[a-zA-Z_][a-zA-Z0-9_]*" (Arbitrary "Name" :. Arbitrary "Class") Pop
]
string' :: TokenMatcher
string' =
[ tokNext "\"" (Arbitrary "Literal" :. Arbitrary "String") Pop
, tok "\\\\([\\\\abfnrtv\"\\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})" (Arbitrary "Literal" :. Arbitrary "String" :. Arbitrary "Escape")
, tok "[^\\\\\"\\n]+" (Arbitrary "Literal" :. Arbitrary "String")
, tok "\\\\\\n" (Arbitrary "Literal" :. Arbitrary "String")
, tok "\\\\" (Arbitrary "Literal" :. Arbitrary "String")
]
| chemist/highlighter | src/Text/Highlighter/Lexers/Vala.hs | bsd-3-clause | 4,507 | 0 | 12 | 671 | 1,134 | 578 | 556 | 69 | 1 |
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
module Bot
( bot
) where
import Data.Foldable (for_)
import Data.Monoid ((<>))
import Safe (lastMay)
import Web.Telegram.API.Bot (Chat (..), GetUpdatesRequest (..), Message (..),
Response (..), TelegramClient, Update (..),
User (..), getUpdatesM, getUpdatesRequest)
import BotCommands (BotCmd (..), addNote, readCommand, showNew, showOld)
import Const (updateIdFile)
import Tools (putLog, saveOffset, tshow)
bot :: Maybe Int -- ^ Offset (update id)
-> TelegramClient ()
bot curOffset = do
Response{result} <- getUpdatesM updatesRequest
case lastMay result of
Just Update{update_id} -> do
let newOffset = update_id + 1
for_ result handleMessage
bot $ Just newOffset
Nothing -> bot curOffset
where
updatesRequest = getUpdatesRequest{updates_offset = curOffset}
handleMessage :: Update -> TelegramClient ()
handleMessage update =
case mMessage of
Just Message{text = Just text, from = Just from, chat} -> do
let User{user_id} = from
Chat{chat_id} = chat
case readCommand text of
Just command ->
case command of
ShowNew ->
showNew (fromIntegral chat_id) user_id
ShowOld ->
showOld (fromIntegral chat_id) user_id
WrongCommand wrongCmd ->
putLog $ cmdErr wrongCmd
Nothing -> addNote user_id text
saveOffset updateIdFile update_id
Just msg ->
putLog $ "unhandled " <> tshow msg
_ ->
putLog $ "unhandled " <> tshow update
where
cmdErr c = "Wrong bot command: " <> tshow c
Update{update_id, message = mMessage} = update
| MCL1303/TaskBot | lib/Bot.hs | bsd-3-clause | 1,941 | 0 | 18 | 674 | 520 | 275 | 245 | 47 | 6 |
{-# OPTIONS_GHC -Wall -fwarn-tabs #-}
{-# LANGUAGE CPP, DeriveDataTypeable #-}
-- HACK: in GHC 7.10, Haddock complains about Control.Monad.STM and
-- System.IO.Unsafe being imported but unused. However, if we use
-- CPP to avoid including them under Haddock, then it will fail to
-- compile!
#ifdef __HADDOCK__
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
#endif
#if __GLASGOW_HASKELL__ >= 701
# ifdef __HADDOCK__
{-# LANGUAGE Trustworthy #-}
# else
{-# LANGUAGE Safe #-}
# endif
#endif
----------------------------------------------------------------
-- 2015.03.29
-- |
-- Module : Control.Concurrent.STM.TBMQueue
-- Copyright : Copyright (c) 2011--2015 wren gayle romano
-- License : BSD
-- Maintainer : wren@community.haskell.org
-- Stability : provisional
-- Portability : non-portable (GHC STM, DeriveDataTypeable)
--
-- A version of "Control.Concurrent.STM.TQueue" where the queue is
-- bounded in length and closeable. This combines the abilities of
-- "Control.Concurrent.STM.TBQueue" and "Control.Concurrent.STM.TMQueue".
--
-- /Since: 2.0.0/
----------------------------------------------------------------
module Control.Concurrent.STM.TBMQueue
(
-- * The TBMQueue type
TBMQueue()
-- ** Creating TBMQueues
, newTBMQueue
, newTBMQueueIO
-- ** Reading from TBMQueues
, readTBMQueue
, tryReadTBMQueue
, peekTBMQueue
, tryPeekTBMQueue
-- ** Writing to TBMQueues
, writeTBMQueue
, tryWriteTBMQueue
, unGetTBMQueue
-- ** Closing TBMQueues
, closeTBMQueue
-- ** Predicates
, isClosedTBMQueue
, isEmptyTBMQueue
, isFullTBMQueue
-- ** Other functionality
, estimateFreeSlotsTBMQueue
, freeSlotsTBMQueue
) where
import Prelude hiding (reads)
import Data.Typeable (Typeable)
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative ((<$>))
#endif
import Control.Monad.STM (STM, retry)
import Control.Concurrent.STM.TVar
import Control.Concurrent.STM.TQueue -- N.B., GHC only
-- N.B., we need a Custom cabal build-type for this to work.
#ifdef __HADDOCK__
import Control.Monad.STM (atomically)
import System.IO.Unsafe (unsafePerformIO)
#endif
----------------------------------------------------------------
-- | @TBMQueue@ is an abstract type representing a bounded closeable
-- FIFO queue.
data TBMQueue a = TBMQueue
{-# UNPACK #-} !(TVar Bool)
{-# UNPACK #-} !(TVar Int)
{-# UNPACK #-} !(TVar Int)
{-# UNPACK #-} !(TQueue a)
deriving (Typeable)
-- The components are:
-- * Whether the queue has been closed.
-- * How many free slots we /know/ we have available.
-- * How many slots have been freed up by successful reads since
-- the last time the slot count was synchronized by 'isFullTBQueue'.
-- * The underlying TQueue.
-- | Build and returns a new instance of @TBMQueue@ with the given
-- capacity. /N.B./, we do not verify the capacity is positive, but
-- if it is non-positive then 'writeTBMQueue' will always retry and
-- 'isFullTBMQueue' will always be true.
newTBMQueue :: Int -> STM (TBMQueue a)
newTBMQueue n = do
closed <- newTVar False
slots <- newTVar n
reads <- newTVar 0
queue <- newTQueue
return (TBMQueue closed slots reads queue)
-- | @IO@ version of 'newTBMQueue'. This is useful for creating
-- top-level @TBMQueue@s using 'unsafePerformIO', because using
-- 'atomically' inside 'unsafePerformIO' isn't possible.
newTBMQueueIO :: Int -> IO (TBMQueue a)
newTBMQueueIO n = do
closed <- newTVarIO False
slots <- newTVarIO n
reads <- newTVarIO 0
queue <- newTQueueIO
return (TBMQueue closed slots reads queue)
-- | Read the next value from the @TBMQueue@, retrying if the queue
-- is empty (and not closed). We return @Nothing@ immediately if
-- the queue is closed and empty.
readTBMQueue :: TBMQueue a -> STM (Maybe a)
readTBMQueue (TBMQueue closed _slots reads queue) = do
b <- readTVar closed
if b
then do
mx <- tryReadTQueue queue
case mx of
Nothing -> return mx
Just _x -> do
modifyTVar' reads (1 +)
return mx
else do
x <- readTQueue queue
modifyTVar' reads (1 +)
return (Just x)
{-
-- The above is slightly optimized over the clearer:
readTBMQueue (TBMQueue closed _slots reads queue) =
b <- readTVar closed
b' <- isEmptyTQueue queue
if b && b'
then return Nothing
else do
x <- readTQueue queue
modifyTVar' reads (1 +)
return (Just x)
-- TODO: compare Core and benchmarks; is the loss of clarity worth it?
-}
-- | A version of 'readTBMQueue' which does not retry. Instead it
-- returns @Just Nothing@ if the queue is open but no value is
-- available; it still returns @Nothing@ if the queue is closed
-- and empty.
tryReadTBMQueue :: TBMQueue a -> STM (Maybe (Maybe a))
tryReadTBMQueue (TBMQueue closed _slots reads queue) = do
b <- readTVar closed
if b
then do
mx <- tryReadTQueue queue
case mx of
Nothing -> return Nothing
Just _x -> do
modifyTVar' reads (1 +)
return (Just mx)
else do
mx <- tryReadTQueue queue
case mx of
Nothing -> return (Just mx)
Just _x -> do
modifyTVar' reads (1 +)
return (Just mx)
{-
-- The above is slightly optimized over the clearer:
tryReadTBMQueue (TBMQueue closed _slots reads queue) =
b <- readTVar closed
b' <- isEmptyTQueue queue
if b && b'
then return Nothing
else do
mx <- tryReadTBMQueue queue
case mx of
Nothing -> return (Just mx)
Just _x -> do
modifyTVar' reads (1 +)
return (Just mx)
-- TODO: compare Core and benchmarks; is the loss of clarity worth it?
-}
-- | Get the next value from the @TBMQueue@ without removing it,
-- retrying if the queue is empty.
peekTBMQueue :: TBMQueue a -> STM (Maybe a)
peekTBMQueue (TBMQueue closed _slots _reads queue) = do
b <- readTVar closed
if b
then do
b' <- isEmptyTQueue queue
if b'
then return Nothing
else Just <$> peekTQueue queue
else Just <$> peekTQueue queue
{-
-- The above is lazier reading from @queue@ than the clearer:
peekTBMQueue (TBMQueue closed _slots _reads queue) = do
b <- isEmptyTQueue queue
b' <- readTVar closed
if b && b'
then return Nothing
else Just <$> peekTQueue queue
-- TODO: compare Core and benchmarks; is the loss of clarity worth it?
-}
-- | A version of 'peekTBMQueue' which does not retry. Instead it
-- returns @Just Nothing@ if the queue is open but no value is
-- available; it still returns @Nothing@ if the queue is closed
-- and empty.
tryPeekTBMQueue :: TBMQueue a -> STM (Maybe (Maybe a))
tryPeekTBMQueue (TBMQueue closed _slots _reads queue) = do
b <- readTVar closed
if b
then fmap Just <$> tryPeekTQueue queue
else Just <$> tryPeekTQueue queue
{-
-- The above is lazier reading from @queue@ (and removes an extraneous isEmptyTQueue when using the compatibility layer) than the clearer:
tryPeekTBMQueue (TBMQueue closed _slots _reads queue) = do
b <- isEmptyTQueue queue
b' <- readTVar closed
if b && b'
then return Nothing
else Just <$> tryPeekTQueue queue
-- TODO: compare Core and benchmarks; is the loss of clarity worth it?
-}
-- | Write a value to a @TBMQueue@, retrying if the queue is full.
-- If the queue is closed then the value is silently discarded.
-- Use 'isClosedTBMQueue' to determine if the queue is closed
-- before writing, as needed.
writeTBMQueue :: TBMQueue a -> a -> STM ()
writeTBMQueue self@(TBMQueue closed slots _reads queue) x = do
b <- readTVar closed
if b
then return () -- Discard silently
else do
n <- estimateFreeSlotsTBMQueue self
if n <= 0
then retry
else do
writeTVar slots $! n - 1
writeTQueue queue x
-- | A version of 'writeTBMQueue' which does not retry. Returns @Just
-- True@ if the value was successfully written, @Just False@ if it
-- could not be written (but the queue was open), and @Nothing@
-- if it was discarded (i.e., the queue was closed).
tryWriteTBMQueue :: TBMQueue a -> a -> STM (Maybe Bool)
tryWriteTBMQueue self@(TBMQueue closed slots _reads queue) x = do
b <- readTVar closed
if b
then return Nothing
else do
n <- estimateFreeSlotsTBMQueue self
if n <= 0
then return (Just False)
else do
writeTVar slots $! n - 1
writeTQueue queue x
return (Just True)
-- | Put a data item back onto a queue, where it will be the next
-- item read. If the queue is closed then the value is silently
-- discarded; you can use 'peekTBMQueue' to circumvent this in certain
-- circumstances. /N.B./, this could allow the queue to temporarily
-- become longer than the specified limit, which is necessary to
-- ensure that the item is indeed the next one read.
unGetTBMQueue :: TBMQueue a -> a -> STM ()
unGetTBMQueue (TBMQueue closed slots _reads queue) x = do
b <- readTVar closed
if b
then return () -- Discard silently
else do
modifyTVar' slots (subtract 1)
unGetTQueue queue x
-- | Closes the @TBMQueue@, preventing any further writes.
closeTBMQueue :: TBMQueue a -> STM ()
closeTBMQueue (TBMQueue closed _slots _reads _queue) =
writeTVar closed True
-- | Returns @True@ if the supplied @TBMQueue@ has been closed.
isClosedTBMQueue :: TBMQueue a -> STM Bool
isClosedTBMQueue (TBMQueue closed _slots _reads _queue) =
readTVar closed
{-
-- | Returns @True@ if the supplied @TBMQueue@ has been closed.
isClosedTBMQueueIO :: TBMQueue a -> IO Bool
isClosedTBMQueueIO (TBMQueue closed _slots _reads _queue) =
readTVarIO closed
-}
-- | Returns @True@ if the supplied @TBMQueue@ is empty (i.e., has
-- no elements). /N.B./, a @TBMQueue@ can be both ``empty'' and
-- ``full'' at the same time, if the initial limit was non-positive.
isEmptyTBMQueue :: TBMQueue a -> STM Bool
isEmptyTBMQueue (TBMQueue _closed _slots _reads queue) =
isEmptyTQueue queue
-- | Returns @True@ if the supplied @TBMQueue@ is full (i.e., is
-- over its limit). /N.B./, a @TBMQueue@ can be both ``empty'' and
-- ``full'' at the same time, if the initial limit was non-positive.
-- /N.B./, a @TBMQueue@ may still be full after reading, if
-- 'unGetTBMQueue' was used to go over the initial limit.
--
-- This is equivalent to: @liftM (<= 0) estimateFreeSlotsTBMQueue@
isFullTBMQueue :: TBMQueue a -> STM Bool
isFullTBMQueue (TBMQueue _closed slots reads _queue) = do
n <- readTVar slots
if n <= 0
then do
m <- readTVar reads
let n' = n + m
writeTVar slots $! n'
writeTVar reads 0
return $! n' <= 0
else return False
-- | Estimate the number of free slots. If the result is positive,
-- then it's a minimum bound; if it's non-positive then it's exact.
-- It will only be negative if the initial limit was negative or
-- if 'unGetTBMQueue' was used to go over the initial limit.
--
-- This function always contends with writers, but only contends
-- with readers when it has to; compare against 'freeSlotsTBMQueue'.
estimateFreeSlotsTBMQueue :: TBMQueue a -> STM Int
estimateFreeSlotsTBMQueue (TBMQueue _closed slots reads _queue) = do
n <- readTVar slots
if n > 0
then return n
else do
m <- readTVar reads
let n' = n + m
writeTVar slots $! n'
writeTVar reads 0
return n'
-- | Return the exact number of free slots. The result can be
-- negative if the initial limit was negative or if 'unGetTBMQueue'
-- was used to go over the initial limit.
--
-- This function always contends with both readers and writers;
-- compare against 'estimateFreeSlotsTBMQueue'.
freeSlotsTBMQueue :: TBMQueue a -> STM Int
freeSlotsTBMQueue (TBMQueue _closed slots reads _queue) = do
n <- readTVar slots
m <- readTVar reads
let n' = n + m
writeTVar slots $! n'
writeTVar reads 0
return n'
----------------------------------------------------------------
----------------------------------------------------------- fin.
| bitemyapp/stm-chans | src/Control/Concurrent/STM/TBMQueue.hs | bsd-3-clause | 12,706 | 0 | 17 | 3,369 | 1,794 | 921 | 873 | 170 | 4 |
module Data.Number.IReal.Scalable where
import Data.Bits
import Data.Ratio
import Data.Number.IReal.IntegerInterval
import Data.Number.IReal.Auxiliary
-- | Scaling. @scale x n@ computes @x * 2^n@ using bit shifts.
class Scalable a where
scale :: a -> Int -> a
-- | Correctly rounded result for negative n.
-- Rounds upwards when decimal part of unrounded result is .5
instance Scalable Integer where
scale x n
|n >= 0 = shift x n
|otherwise = shift (x + bit (-n-1)) n
instance (Integral a, Bits a) => Scalable (Ratio a) where
scale x n
|n >= 0 = shift num n % den
|otherwise = num % shift den (-n)
where num = numerator x
den = denominator x
instance Scalable IntegerInterval where
scale i@(I (m,r)) n
| n >= 0 = I (scale m n, scale r n)
| otherwise = shift (lowerI i) n `upto` (-shift (-upperI i) n)
instance Scalable Double where
scale = flip scaleFloat
class VarPrec a where
prec :: Int -> a -> a
precB :: Int -> a -> a
prec d = precB (dec2bits d)
instance VarPrec a => VarPrec [a] where
prec d xs = map (prec d) xs
precB b xs = map (precB b) xs
instance VarPrec Double where
prec _ = id
precB _ = id | sydow/ireal | Data/Number/IReal/Scalable.hs | bsd-3-clause | 1,194 | 2 | 14 | 303 | 482 | 243 | 239 | 33 | 0 |
module Main where
import MinFree
import Test.Hspec
main :: IO ()
main = hspec minfreeTest
minfreeTest :: Spec
minfreeTest = do
describe "Validate minfree function" $
it "minfree should find the smallest number not in the supplied set" $
minfree [08, 23, 09, 00, 12, 11, 01, 10, 31, 07, 41, 04, 14, 21, 05, 17, 03, 19, 02, 06] `shouldBe` 13
describe "Validate minfree' function" $
it "minfree' should find the smallest number not in the supplied set" $
minfree' [08, 23, 09, 00, 12, 11, 01, 10, 31, 07, 41, 04, 14, 21, 05, 17, 03, 19, 02, 06] `shouldBe` 13
| bdkoepke/pearls | HSpecTests.hs | bsd-3-clause | 584 | 0 | 10 | 131 | 212 | 127 | 85 | 13 | 1 |
{-# LANGUAGE TupleSections #-}
-- Parsing.
module GHC.ParMake.Parse (getModuleDeps, depsListToDeps)
where
import Control.Concurrent
import Control.Monad
import Data.Char (isAlphaNum, isSpace)
import Data.Functor ((<$>))
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (catMaybes)
import qualified Data.Set as Set
import System.Exit (ExitCode(..))
import System.FilePath ((</>))
import System.IO.Temp (withSystemTempDirectory)
import Distribution.Compat.ReadP
import GHC.ParMake.Types (Dep(..))
import GHC.ParMake.Util (Verbosity, debug', fatal,
defaultOutputHooks, runProcess)
-- TODO This random choice of characters is *insane*, this will NOT WORK when
-- some unexpected character is in the filename.
-- Worse even, `parseLine` will just return Nothing, silencing the
-- problem and making ghc-parmake exit with code 1 without reason.
--
-- This filename parsing and "careful" parsing (returning Nothing by
-- default instead of erroring) must be changed!
parseModuleName :: ReadP r String
parseModuleName = munch1 (\c -> isAlphaNum c || c == '.'
|| c == '-' || c == '/' || c == '_')
parseLine :: String -> Maybe (String, String)
parseLine l = case [ r | (r, rest) <- readP_to_S parser l, all isSpace rest] of
[] -> Nothing
[r] -> Just r
_ -> Nothing
where
parser = do skipSpaces
m <- parseModuleName
skipSpaces
_ <- char ':'
skipSpaces
d <- parseModuleName
skipSpaces
return (m,d)
trimLines :: [String] -> [String]
trimLines ls = [ l | l <- ls, isValidLine l]
where
isValidLine ('#':_) = False
isValidLine _ = True
-- Interaction with the outside world.
-- Run 'ghc -M' and return dependencies for every module.
getModuleDeps :: Verbosity
-> FilePath
-> [String]
-> [FilePath]
-> IO [Dep]
getModuleDeps v ghcPath ghcArgs files =
withSystemTempDirectory "ghc-parmake" $ \tmpDir -> do
let tmpFileInternal = tmpDir </> "depends.internal.mk"
tmpFileExternal = tmpDir </> "depends.external.mk"
let ghcArgsInternal = files ++ ("-M":"-dep-makefile":tmpFileInternal:ghcArgs)
ghcArgsExternal = files ++
("-M":"-dep-makefile":tmpFileExternal:"-include-pkg-deps":ghcArgs)
-- Get all internal dependencies in this package.
let getInternalMakeDeps = do
debug' v $ "Running compiler with -M to get internal module deps: "
++ ghcPath ++ " " ++ show ghcArgsInternal
failOnError <$> runProcess defaultOutputHooks Nothing
ghcPath ghcArgsInternal
parseDepsFromFile tmpFileInternal
-- Pass -include-pkg-deps to also find out the external dependencies.
let getAllMakeDeps = do
debug' v $ "Running compiler with '-M -include-pkg-deps' "
++ "to get external module deps: "
++ ghcPath ++ " " ++ show ghcArgsExternal
failOnError <$> runProcess defaultOutputHooks Nothing
ghcPath ghcArgsExternal
parseDepsFromFile tmpFileExternal
-- The two ghc -M are mainly CPU-bound. Run them in parallel.
[internalMakeDeps, allMakeDeps] <- parallelIO [ getInternalMakeDeps
, getAllMakeDeps ]
-- Put internal and internal + external deps together
let depsIntAll = mergeValues (groupByTarget internalMakeDeps)
(groupByTarget allMakeDeps)
-- External deps are (all - internal) ones.
return [ Dep target int (intExt `diff` int)
| (target, (int, intExt)) <- Map.toList depsIntAll ]
where
failOnError (ExitSuccess ) = ()
failOnError (ExitFailure n) =
fatal $ "ghc -M exited with status " ++ show n
parseDepsFromFile :: FilePath -> IO [(String, String)]
parseDepsFromFile file = catMaybes . map parseLine . trimLines . lines
<$> readFile file
-- * Helpers
-- | Fast list difference. Uses `Set.difference`, but preserves order.
diff :: (Ord a) => [a] -> [a] -> [a]
xs `diff` ys = filter (`Set.member` diffSet) xs
where
diffSet = Set.fromList xs `Set.difference` Set.fromList ys
-- | Runs the IO actions in parallel, and waits until all are finished.
parallelIO :: [IO a] -> IO [a]
parallelIO ios = do
mvars <- forM ios $ \io -> do m <- newEmptyMVar
_ <- forkIO $ io >>= putMVar m
return m
mapM readMVar mvars
-- | Groups a list of (targets, dependencies) by the targets.
groupByTarget :: (Ord target) => [(target, dep)] -> Map target [dep]
groupByTarget deps = Map.fromListWith (++) [ (t, [d]) | (t, d) <- deps ]
-- | Merges two maps that have the same keys.
mergeValues :: (Ord k) => Map k [a] -> Map k [b] -> Map k ([a], [b])
mergeValues m1 m2 = Map.unionWith (\(a,b) (x,y) -> (a ++ x, b ++ y))
(fmap (, []) m1)
(fmap ([], ) m2)
-- | Converts a list of (targets, dependencies) to a `Dep` list
-- with no external dependencies.
depsListToDeps :: [(FilePath, FilePath)] -> [Dep]
depsListToDeps l = [ Dep t ds [] | (t, ds) <- Map.toList (groupByTarget l) ]
| 23Skidoo/ghc-parmake | src/GHC/ParMake/Parse.hs | bsd-3-clause | 5,398 | 0 | 19 | 1,555 | 1,352 | 729 | 623 | 91 | 3 |
module Foundation where
import Prelude
import Yesod
import Yesod.Static
import Yesod.Auth
-- import Yesod.Auth.Email
-- import Yesod.Auth.BrowserId
import Yesod.Auth.GoogleEmail
import Yesod.Default.Config
import Yesod.Default.Util (addStaticContentExternal)
import Network.HTTP.Conduit (Manager)
import qualified Settings
import Settings.Development (development)
import qualified Database.Persist
import Settings.StaticFiles
import Database.Persist.MongoDB hiding (master)
import Settings (widgetFile, Extra (..))
import Model
import Text.Jasmine (minifym)
import Text.Hamlet (hamletFile)
import Yesod.Core.Types (Logger)
-- | The site argument for your application. This can be a good place to
-- keep settings and values requiring initialization before your application
-- starts running, such as database connections. Every handler will have
-- access to the data present here.
data App = App
{ settings :: AppConfig DefaultEnv Extra
, getStatic :: Static -- ^ Settings for static file serving.
, connPool :: Database.Persist.PersistConfigPool Settings.PersistConf -- ^ Database connection pool.
, httpManager :: Manager
, persistConfig :: Settings.PersistConf
, appLogger :: Logger
}
-- Set up i18n messages. See the message folder.
mkMessage "App" "messages" "en"
-- This is where we define all of the routes in our application. For a full
-- explanation of the syntax, please see:
-- http://www.yesodweb.com/book/routing-and-handlers
--
-- Note that this is really half the story; in Application.hs, mkYesodDispatch
-- generates the rest of the code. Please see the linked documentation for an
-- explanation for this split.
mkYesodData "App" $(parseRoutesFile "config/routes")
type Form x = Html -> MForm (HandlerT App IO) (FormResult x, Widget)
-- Please see the documentation for the Yesod typeclass. There are a number
-- of settings which can be configured by overriding methods here.
instance Yesod App where
approot = ApprootMaster $ appRoot . settings
-- Store session data on the client in encrypted cookies,
-- default session idle timeout is 120 minutes
makeSessionBackend _ = fmap Just $ defaultClientSessionBackend
(120 * 60) -- 120 minutes
"config/client_session_key.aes"
defaultLayout widget = do
master <- getYesod
mmsg <- getMessage
-- We break up the default layout into two components:
-- default-layout is the contents of the body tag, and
-- default-layout-wrapper is the entire page. Since the final
-- value passed to hamletToRepHtml cannot be a widget, this allows
-- you to use normal widget features in default-layout.
pc <- widgetToPageContent $ do
$(combineStylesheets 'StaticR
[ css_normalize_css
, css_bootstrap_css
])
$(widgetFile "default-layout")
giveUrlRenderer $(hamletFile "templates/default-layout-wrapper.hamlet")
-- This is done to provide an optimization for serving static files from
-- a separate domain. Please see the staticRoot setting in Settings.hs
urlRenderOverride y (StaticR s) =
Just $ uncurry (joinPath y (Settings.staticRoot $ settings y)) $ renderRoute s
urlRenderOverride _ _ = Nothing
-- The page to be redirected to when authentication is required.
authRoute _ = Just $ AuthR LoginR
isAuthorized PetCreateR _ = isAdmin
isAuthorized _ _ = return Authorized
-- This function creates static content files in the static folder
-- and names them based on a hash of their content. This allows
-- expiration dates to be set far in the future without worry of
-- users receiving stale content.
addStaticContent =
addStaticContentExternal minifym genFileName Settings.staticDir (StaticR . flip StaticRoute [])
where
-- Generate a unique filename based on the content itself
genFileName lbs
| development = "autogen-" ++ base64md5 lbs
| otherwise = base64md5 lbs
-- Place Javascript at bottom of the body tag so the rest of the page loads first
jsLoader _ = BottomOfBody
-- What messages should be logged. The following includes all messages when
-- in development, and warnings and errors in production.
shouldLog _ _source level =
development || level == LevelWarn || level == LevelError
makeLogger = return . appLogger
-- How to run database actions.
instance YesodPersist App where
type YesodPersistBackend App = Action
runDB = defaultRunDB persistConfig connPool
isAdmin :: HandlerT App IO AuthResult
isAdmin = do
mu <- maybeAuthId
return $ case mu of
Nothing -> AuthenticationRequired
-- Just _ -> Unauthorized "Y
Just _ -> Authorized
instance YesodAuth App where
type AuthId App = UserId
-- Where to send a user after successful login
loginDest _ = HomeR
-- Where to send a user after logout
logoutDest _ = HomeR
getAuthId creds = runDB $ do
x <- getBy $ UniqueUser $ credsIdent creds
case x of
Just (Entity uid _) -> return $ Just uid
Nothing -> do
fmap Just $ insert User
{ userIdent = credsIdent creds
, userPassword = Nothing
}
-- You can add other plugins like BrowserID, email or OAuth here
-- authPlugins _ = [authBrowserId def, authGoogleEmail]
authPlugins _ = [authGoogleEmail]
authHttpManager = httpManager
-- This instance is required to use forms. You can modify renderMessage to
-- achieve customized and internationalized form validation messages.
instance RenderMessage App FormMessage where
renderMessage _ _ = defaultFormMessage
-- | Get the 'Extra' value, used to hold data from the settings.yml file.
getExtra :: Handler Extra
getExtra = fmap (appExtra . settings) getYesod
-- Note: previous versions of the scaffolding included a deliver function to
-- send emails. Unfortunately, there are too many different options for us to
-- give a reasonable default. Instead, the information is available on the
-- wiki:
--
-- https://github.com/yesodweb/yesod/wiki/Sending-email
| jabaraster/MetShop | Foundation.hs | bsd-3-clause | 6,235 | 0 | 18 | 1,423 | 909 | 497 | 412 | -1 | -1 |
{-# LANGUAGE ForeignFunctionInterface, CPP #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.ARB.ProvokingVertex
-- Copyright : (c) Sven Panne 2013
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- All raw functions and tokens from the ARB_provoking_vertex extension, see
-- <http://www.opengl.org/registry/specs/ARB/provoking_vertex.txt>.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.ARB.ProvokingVertex (
-- * Functions
glProvokingVertex,
-- * Tokens
gl_FIRST_VERTEX_CONVENTION,
gl_LAST_VERTEX_CONVENTION,
gl_PROVOKING_VERTEX,
gl_QUADS_FOLLOW_PROVOKING_VERTEX_CONVENTION
) where
import Foreign.C.Types
import Graphics.Rendering.OpenGL.Raw.Extensions
import Graphics.Rendering.OpenGL.Raw.Core31.Types
#include "HsOpenGLRaw.h"
extensionNameString :: String
extensionNameString = "GL_ARB_provoking_vertex"
EXTENSION_ENTRY(dyn_glProvokingVertex,ptr_glProvokingVertex,"glProvokingVertex",glProvokingVertex,GLenum -> IO ())
gl_FIRST_VERTEX_CONVENTION :: GLenum
gl_FIRST_VERTEX_CONVENTION = 0x8E4D
gl_LAST_VERTEX_CONVENTION :: GLenum
gl_LAST_VERTEX_CONVENTION = 0x8E4E
gl_PROVOKING_VERTEX :: GLenum
gl_PROVOKING_VERTEX = 0x8E4F
gl_QUADS_FOLLOW_PROVOKING_VERTEX_CONVENTION :: GLenum
gl_QUADS_FOLLOW_PROVOKING_VERTEX_CONVENTION = 0x8E4C
| mfpi/OpenGLRaw | src/Graphics/Rendering/OpenGL/Raw/ARB/ProvokingVertex.hs | bsd-3-clause | 1,512 | 0 | 10 | 162 | 150 | 100 | 50 | -1 | -1 |
{- |
Module : Data.Dequeue.Show
Description : A newtype used entirely to provide 'Read' and 'Show' instances for 'Dequeue's.
Copyright : (c) Henry Bucklow 2010
License : BSD3
Maintainer : henry@elsie.org.uk
Stability : provisional
Portability : portable
A newtype used entirely for its derived 'Read' and 'Show' instances. These are
then used by 'showDequeue' and 'readDequeue' to make writing 'Read' and 'Show'
instances for 'Dequeue's easier.
-}
module Data.Dequeue.Show (Dequeue(..)) where
newtype Dequeue a = Dequeue [a] deriving (Read, Show)
| oconnore/dequeue | src/Data/Dequeue/Show.hs | bsd-3-clause | 572 | 0 | 6 | 105 | 40 | 26 | 14 | 2 | 0 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE UnicodeSyntax #-}
{-|
[@ISO639-1@] -
[@ISO639-2@] gil
[@ISO639-3@] gil
[@Native name@] Taetae ni Kiribati
[@English name@] Gilbertese
-}
module Text.Numeral.Language.GIL.TestData (cardinals) where
--------------------------------------------------------------------------------
-- Imports
--------------------------------------------------------------------------------
import "base" Prelude ( Num )
import "numerals" Text.Numeral.Grammar.Reified ( defaultInflection )
import "this" Text.Numeral.Test ( TestData )
--------------------------------------------------------------------------------
-- Test data
--------------------------------------------------------------------------------
{-
Sources:
http://www.languagesandnumbers.com/how-to-count-in-gilbertese/en/gil/
-}
cardinals ∷ (Num i) ⇒ TestData i
cardinals =
[ ( "default"
, defaultInflection
, [ (0, "akea")
, (1, "teuana")
, (2, "uoua")
, (3, "tenua")
, (4, "aua")
, (5, "nimaua")
, (6, "onoua")
, (7, "itua")
, (8, "wanua")
, (9, "ruaiwa")
, (10, "tebwina")
, (11, "tebwi ma teuana")
, (12, "tebwi ma uoua")
, (13, "tebwi ma tenua")
, (14, "tebwi ma aua")
, (15, "tebwi ma nimaua")
, (16, "tebwi ma onoua")
, (17, "tebwi ma itua")
, (18, "tebwi ma wanua")
, (19, "tebwi ma ruaiwa")
, (20, "uabwi")
, (21, "uabwi ma teuana")
, (22, "uabwi ma uoua")
, (23, "uabwi ma tenua")
, (24, "uabwi ma aua")
, (25, "uabwi ma nimaua")
, (26, "uabwi ma onoua")
, (27, "uabwi ma itua")
, (28, "uabwi ma wanua")
, (29, "uabwi ma ruaiwa")
, (30, "tenibwi")
, (31, "tenibwi ma teuana")
, (32, "tenibwi ma uoua")
, (33, "tenibwi ma tenua")
, (34, "tenibwi ma aua")
, (35, "tenibwi ma nimaua")
, (36, "tenibwi ma onoua")
, (37, "tenibwi ma itua")
, (38, "tenibwi ma wanua")
, (39, "tenibwi ma ruaiwa")
, (40, "abwi")
, (41, "abwi ma teuana")
, (42, "abwi ma uoua")
, (43, "abwi ma tenua")
, (44, "abwi ma aua")
, (45, "abwi ma nimaua")
, (46, "abwi ma onoua")
, (47, "abwi ma itua")
, (48, "abwi ma wanua")
, (49, "abwi ma ruaiwa")
, (50, "nimabwi")
, (51, "nimabwi ma teuana")
, (52, "nimabwi ma uoua")
, (53, "nimabwi ma tenua")
, (54, "nimabwi ma aua")
, (55, "nimabwi ma nimaua")
, (56, "nimabwi ma onoua")
, (57, "nimabwi ma itua")
, (58, "nimabwi ma wanua")
, (59, "nimabwi ma ruaiwa")
, (60, "onobwi")
, (61, "onobwi ma teuana")
, (62, "onobwi ma uoua")
, (63, "onobwi ma tenua")
, (64, "onobwi ma aua")
, (65, "onobwi ma nimaua")
, (66, "onobwi ma onoua")
, (67, "onobwi ma itua")
, (68, "onobwi ma wanua")
, (69, "onobwi ma ruaiwa")
, (70, "itibwi")
, (71, "itibwi ma teuana")
, (72, "itibwi ma uoua")
, (73, "itibwi ma tenua")
, (74, "itibwi ma aua")
, (75, "itibwi ma nimaua")
, (76, "itibwi ma onoua")
, (77, "itibwi ma itua")
, (78, "itibwi ma wanua")
, (79, "itibwi ma ruaiwa")
, (80, "wanibwi")
, (81, "wanibwi ma teuana")
, (82, "wanibwi ma uoua")
, (83, "wanibwi ma tenua")
, (84, "wanibwi ma aua")
, (85, "wanibwi ma nimaua")
, (86, "wanibwi ma onoua")
, (87, "wanibwi ma itua")
, (88, "wanibwi ma wanua")
, (89, "wanibwi ma ruaiwa")
, (90, "ruabwi")
, (91, "ruabwi ma teuana")
, (92, "ruabwi ma uoua")
, (93, "ruabwi ma tenua")
, (94, "ruabwi ma aua")
, (95, "ruabwi ma nimaua")
, (96, "ruabwi ma onoua")
, (97, "ruabwi ma itua")
, (98, "ruabwi ma wanua")
, (99, "ruabwi ma ruaiwa")
, (100, "tebubua")
, (101, "tebubua ao teuana")
, (102, "tebubua ao uoua")
, (103, "tebubua ao tenua")
, (104, "tebubua ao aua")
, (105, "tebubua ao nimaua")
, (106, "tebubua ao onoua")
, (107, "tebubua ao itua")
, (108, "tebubua ao wanua")
, (109, "tebubua ao ruaiwa")
, (110, "tebubua tebwina")
, (123, "tebubua uabwi ma tenua")
, (200, "uabubua")
, (300, "tenibubua")
, (321, "tenibubua uabwi ma teuana")
, (400, "abubua")
, (500, "nimabubua")
, (600, "onobubua")
, (700, "itibubua")
, (800, "wanibubua")
, (900, "ruabubua")
, (909, "ruabubua ao ruaiwa")
, (990, "ruabubua ruabwi")
, (999, "ruabubua ruabwi ma ruaiwa")
, (1000, "tengaa")
, (1001, "tengaa ao teuana")
, (1008, "tengaa ao wanua")
, (1234, "tengaa uabubua tenibwi ma aua")
, (2000, "uangaa")
, (3000, "teningaa")
, (4000, "angaa")
, (4321, "angaa tenibubua uabwi ma teuana")
, (5000, "nimangaa")
, (6000, "onongaa")
, (7000, "itingaa")
, (8000, "waniengaa")
, (9000, "ruangaa")
, (10000, "tebwina tengaa")
, (12345, "tebwi ma uoua tengaa tenibubua abwi ma nimaua")
, (20000, "uabwi tengaa")
, (30000, "tenibwi tengaa")
, (40000, "abwi tengaa")
, (50000, "nimabwi tengaa")
, (54321, "nimabwi ma aua tengaa tenibubua uabwi ma teuana")
, (60000, "onobwi tengaa")
, (70000, "itibwi tengaa")
, (80000, "wanibwi tengaa")
, (90000, "ruabwi tengaa")
, (100000, "tebubua tengaa")
, (123456, "tebubua uabwi ma tenua tengaa abubua nimabwi ma onoua")
, (200000, "uabubua tengaa")
, (300000, "tenibubua tengaa")
, (400000, "abubua tengaa")
, (500000, "nimabubua tengaa")
, (600000, "onobubua tengaa")
, (654321, "onobubua nimabwi ma aua tengaa tenibubua uabwi ma teuana")
, (700000, "itibubua tengaa")
, (800000, "wanibubua tengaa")
, (900000, "ruabubua tengaa")
, (1000000, "te mirion")
, (1000001, "te mirion, teuana")
, (1234567, "te mirion, uabubua tenibwi ma aua tengaa nimabubua onobwi ma itua")
, (2000000, "uoua te mirion")
, (3000000, "tenua te mirion")
, (4000000, "aua te mirion")
, (5000000, "nimaua te mirion")
, (6000000, "onoua te mirion")
, (7000000, "itua te mirion")
, (7654321, "itua te mirion, onobubua nimabwi ma aua tengaa tenibubua uabwi ma teuana")
, (8000000, "wanua te mirion")
, (9000000, "ruaiwa te mirion")
, (1000000000, "te birian")
, (1000000001, "te birian, teuana")
, (2000000000, "uoua te birian")
, (3000000000, "tenua te birian")
, (4000000000, "aua te birian")
, (5000000000, "nimaua te birian")
, (6000000000, "onoua te birian")
, (7000000000, "itua te birian")
, (8000000000, "wanua te birian")
, (9000000000, "ruaiwa te birian")
]
)
]
| telser/numerals | src-test/Text/Numeral/Language/GIL/TestData.hs | bsd-3-clause | 7,045 | 0 | 8 | 2,091 | 1,723 | 1,151 | 572 | 193 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.